@@ -566,52 +566,52 @@ pub(super) fn write_code(
566566 } ;
567567 let mut current_expansion = get_expansion ( & mut token_handler, expanded_codes, file_span) ;
568568
569- Classifier :: new (
569+ classify (
570570 & src,
571- token_handler. href_context . as_ref ( ) . map ( |c| c. file_span ) . unwrap_or ( DUMMY_SP ) ,
571+ token_handler. href_context . as_ref ( ) . map_or ( DUMMY_SP , |c| c. file_span ) ,
572572 decoration_info,
573- )
574- . highlight ( & mut |span, highlight| match highlight {
575- Highlight :: Token { text, class } => {
576- token_handler. push_token ( class, Cow :: Borrowed ( text) ) ;
577-
578- if text == "\n " {
579- if current_expansion. is_none ( ) {
580- current_expansion = get_expansion ( & mut token_handler, expanded_codes, span) ;
581- }
582- if let Some ( ref current_expansion) = current_expansion
583- && current_expansion. span . lo ( ) == span. hi ( )
584- {
585- token_handler. add_expanded_code ( current_expansion) ;
586- }
587- } else {
588- let mut need_end = false ;
589- if let Some ( ref current_expansion) = current_expansion {
590- if current_expansion. span . lo ( ) == span. hi ( ) {
591- token_handler. add_expanded_code ( current_expansion) ;
592- } else if current_expansion. end_line == token_handler. line
593- && span. hi ( ) >= current_expansion. span . hi ( )
573+ & mut |span, highlight| match highlight {
574+ Highlight :: Token { text, class } => {
575+ token_handler. push_token ( class, Cow :: Borrowed ( text) ) ;
576+
577+ if text == "\n " {
578+ if current_expansion. is_none ( ) {
579+ current_expansion = get_expansion ( & mut token_handler, expanded_codes, span) ;
580+ }
581+ if let Some ( ref current_expansion) = current_expansion
582+ && current_expansion. span . lo ( ) == span. hi ( )
594583 {
595- need_end = true ;
584+ token_handler. add_expanded_code ( current_expansion) ;
585+ }
586+ } else {
587+ let mut need_end = false ;
588+ if let Some ( ref current_expansion) = current_expansion {
589+ if current_expansion. span . lo ( ) == span. hi ( ) {
590+ token_handler. add_expanded_code ( current_expansion) ;
591+ } else if current_expansion. end_line == token_handler. line
592+ && span. hi ( ) >= current_expansion. span . hi ( )
593+ {
594+ need_end = true ;
595+ }
596+ }
597+ if need_end {
598+ current_expansion = end_expansion ( & mut token_handler, expanded_codes, span) ;
596599 }
597- }
598- if need_end {
599- current_expansion = end_expansion ( & mut token_handler, expanded_codes, span) ;
600600 }
601601 }
602- }
603- Highlight :: EnterSpan { class } => {
604- token_handler. class_stack . enter_elem (
605- token_handler. out ,
606- & token_handler . href_context ,
607- class ,
608- None ,
609- ) ;
610- }
611- Highlight :: ExitSpan => {
612- token_handler . class_stack . exit_elem ( ) ;
613- }
614- } ) ;
602+ Highlight :: EnterSpan { class } => {
603+ token_handler . class_stack . enter_elem (
604+ token_handler. out ,
605+ & token_handler. href_context ,
606+ class ,
607+ None ,
608+ ) ;
609+ }
610+ Highlight :: ExitSpan => {
611+ token_handler . class_stack . exit_elem ( ) ;
612+ }
613+ } ,
614+ ) ;
615615}
616616
617617fn write_footer ( playground_button : Option < & str > ) -> impl Display {
@@ -735,6 +735,12 @@ struct TokenIter<'a> {
735735 cursor : Cursor < ' a > ,
736736}
737737
738+ impl < ' a > TokenIter < ' a > {
739+ fn new ( src : & ' a str ) -> Self {
740+ Self { src, cursor : Cursor :: new ( src, FrontmatterAllowed :: Yes ) }
741+ }
742+ }
743+
738744impl < ' a > Iterator for TokenIter < ' a > {
739745 type Item = ( TokenKind , & ' a str ) ;
740746 fn next ( & mut self ) -> Option < ( TokenKind , & ' a str ) > {
@@ -843,6 +849,54 @@ fn new_span(lo: u32, text: &str, file_span: Span) -> Span {
843849 file_span. with_lo ( file_lo + BytePos ( lo) ) . with_hi ( file_lo + BytePos ( hi) )
844850}
845851
852+ fn classify < ' src > (
853+ src : & ' src str ,
854+ file_span : Span ,
855+ decoration_info : Option < & DecorationInfo > ,
856+ sink : & mut dyn FnMut ( Span , Highlight < ' src > ) ,
857+ ) {
858+ let offset = rustc_lexer:: strip_shebang ( src) ;
859+
860+ if let Some ( offset) = offset {
861+ sink ( DUMMY_SP , Highlight :: Token { text : & src[ ..offset] , class : Some ( Class :: Comment ) } ) ;
862+ }
863+
864+ let mut classifier =
865+ Classifier :: new ( src, offset. unwrap_or_default ( ) , file_span, decoration_info) ;
866+
867+ loop {
868+ if let Some ( decs) = classifier. decorations . as_mut ( ) {
869+ let byte_pos = classifier. byte_pos ;
870+ let n_starts = decs. starts . iter ( ) . filter ( |( i, _) | byte_pos >= * i) . count ( ) ;
871+ for ( _, kind) in decs. starts . drain ( 0 ..n_starts) {
872+ sink ( DUMMY_SP , Highlight :: EnterSpan { class : Class :: Decoration ( kind) } ) ;
873+ }
874+
875+ let n_ends = decs. ends . iter ( ) . filter ( |i| byte_pos >= * * i) . count ( ) ;
876+ for _ in decs. ends . drain ( 0 ..n_ends) {
877+ sink ( DUMMY_SP , Highlight :: ExitSpan ) ;
878+ }
879+ }
880+
881+ if let Some ( ( TokenKind :: Colon | TokenKind :: Ident , _) ) = classifier. tokens . peek ( ) {
882+ let tokens = classifier. get_full_ident_path ( ) ;
883+ for & ( token, start, end) in & tokens {
884+ let text = & classifier. src [ start..end] ;
885+ classifier. advance ( token, text, sink, start as u32 ) ;
886+ classifier. byte_pos += text. len ( ) as u32 ;
887+ }
888+ if !tokens. is_empty ( ) {
889+ continue ;
890+ }
891+ }
892+ if let Some ( ( token, text, before) ) = classifier. next ( ) {
893+ classifier. advance ( token, text, sink, before) ;
894+ } else {
895+ break ;
896+ }
897+ }
898+ }
899+
846900/// Processes program tokens, classifying strings of text by highlighting
847901/// category (`Class`).
848902struct Classifier < ' src > {
@@ -857,21 +911,23 @@ struct Classifier<'src> {
857911}
858912
859913impl < ' src > Classifier < ' src > {
860- /// Takes as argument the source code to HTML-ify, the rust edition to use and the source code
861- /// file span which will be used later on by the `span_correspondence_map`.
862- fn new ( src : & ' src str , file_span : Span , decoration_info : Option < & DecorationInfo > ) -> Self {
863- let tokens =
864- PeekIter :: new ( TokenIter { src, cursor : Cursor :: new ( src, FrontmatterAllowed :: Yes ) } ) ;
865- let decorations = decoration_info. map ( Decorations :: new) ;
914+ /// Takes as argument the source code to HTML-ify and the source code file span
915+ /// which will be used later on by the `span_correspondence_map`.
916+ fn new (
917+ src : & ' src str ,
918+ byte_pos : usize ,
919+ file_span : Span ,
920+ decoration_info : Option < & DecorationInfo > ,
921+ ) -> Self {
866922 Classifier {
867- tokens,
923+ tokens : PeekIter :: new ( TokenIter :: new ( & src [ byte_pos.. ] ) ) ,
868924 in_attribute : false ,
869925 in_macro : false ,
870926 in_macro_nonterminal : false ,
871- byte_pos : 0 ,
927+ byte_pos : byte_pos as u32 ,
872928 file_span,
873929 src,
874- decorations,
930+ decorations : decoration_info . map ( Decorations :: new ) ,
875931 }
876932 }
877933
@@ -938,50 +994,6 @@ impl<'src> Classifier<'src> {
938994 }
939995 }
940996
941- /// Exhausts the `Classifier` writing the output into `sink`.
942- ///
943- /// The general structure for this method is to iterate over each token,
944- /// possibly giving it an HTML span with a class specifying what flavor of
945- /// token is used.
946- fn highlight ( mut self , sink : & mut dyn FnMut ( Span , Highlight < ' src > ) ) {
947- loop {
948- if let Some ( decs) = self . decorations . as_mut ( ) {
949- let byte_pos = self . byte_pos ;
950- let n_starts = decs. starts . iter ( ) . filter ( |( i, _) | byte_pos >= * i) . count ( ) ;
951- for ( _, kind) in decs. starts . drain ( 0 ..n_starts) {
952- sink ( DUMMY_SP , Highlight :: EnterSpan { class : Class :: Decoration ( kind) } ) ;
953- }
954-
955- let n_ends = decs. ends . iter ( ) . filter ( |i| byte_pos >= * * i) . count ( ) ;
956- for _ in decs. ends . drain ( 0 ..n_ends) {
957- sink ( DUMMY_SP , Highlight :: ExitSpan ) ;
958- }
959- }
960-
961- if self
962- . tokens
963- . peek ( )
964- . map ( |t| matches ! ( t. 0 , TokenKind :: Colon | TokenKind :: Ident ) )
965- . unwrap_or ( false )
966- {
967- let tokens = self . get_full_ident_path ( ) ;
968- for ( token, start, end) in & tokens {
969- let text = & self . src [ * start..* end] ;
970- self . advance ( * token, text, sink, * start as u32 ) ;
971- self . byte_pos += text. len ( ) as u32 ;
972- }
973- if !tokens. is_empty ( ) {
974- continue ;
975- }
976- }
977- if let Some ( ( token, text, before) ) = self . next ( ) {
978- self . advance ( token, text, sink, before) ;
979- } else {
980- break ;
981- }
982- }
983- }
984-
985997 /// Single step of highlighting. This will classify `token`, but maybe also a couple of
986998 /// following ones as well.
987999 ///
@@ -1019,6 +1031,7 @@ impl<'src> Classifier<'src> {
10191031 Class :: Comment
10201032 }
10211033 }
1034+ TokenKind :: Frontmatter { .. } => Class :: Comment ,
10221035 // Consider this as part of a macro invocation if there was a
10231036 // leading identifier.
10241037 TokenKind :: Bang if self . in_macro => {
@@ -1117,7 +1130,6 @@ impl<'src> Classifier<'src> {
11171130 | TokenKind :: At
11181131 | TokenKind :: Tilde
11191132 | TokenKind :: Colon
1120- | TokenKind :: Frontmatter { .. }
11211133 | TokenKind :: Unknown => return no_highlight ( sink) ,
11221134
11231135 TokenKind :: Question => Class :: QuestionMark ,
0 commit comments