@@ -508,11 +508,10 @@ let ParseInput
508508type Tokenizer = unit -> Parser.token
509509
510510// Show all tokens in the stream, for testing purposes
511- let ShowAllTokensAndExit ( shortFilename , tokenizer : Tokenizer , lexbuf : LexBuffer < char >, exiter : Exiter ) =
511+ let ShowAllTokensAndExit ( tokenizer : Tokenizer , lexbuf : LexBuffer < char >, exiter : Exiter ) =
512512 while true do
513- printf " tokenize - getting one token from %s \n " shortFilename
514513 let t = tokenizer ()
515- printf " tokenize - got %s @ %a \n " ( Parser.token_ to_ string t) outputRange lexbuf.LexemeRange
514+ printfn $ " { Parser.token_to_string t} { lexbuf.LexemeRange} "
516515
517516 match t with
518517 | Parser.EOF _ -> exiter.Exit 0
@@ -609,9 +608,6 @@ let ParseOneInputLexbuf (tcConfig: TcConfig, lexResourceManager, lexbuf, fileNam
609608 tcConfig.applyLineDirectives
610609 )
611610
612- // Set up the initial lexer arguments
613- let shortFilename = SanitizeFileName fileName tcConfig.implicitIncludeDir
614-
615611 let input =
616612 usingLexbufForParsing ( lexbuf, fileName) ( fun lexbuf ->
617613
@@ -642,7 +638,7 @@ let ParseOneInputLexbuf (tcConfig: TcConfig, lexResourceManager, lexbuf, fileNam
642638
643639 // If '--tokenize' then show the tokens now and exit
644640 if tokenizeOnly then
645- ShowAllTokensAndExit( shortFilename , tokenizer, lexbuf, tcConfig.exiter)
641+ ShowAllTokensAndExit( tokenizer, lexbuf, tcConfig.exiter)
646642
647643 // Test hook for one of the parser entry points
648644 if tcConfig.testInteractionParser then
0 commit comments