Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 30 additions & 5 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,44 @@ pub(crate) struct UnmatchedDelim {
pub candidate_span: Option<Span>,
}

/// Which tokens should be stripped before lexing the tokens.
pub(crate) enum StripTokens {
/// Strip both shebang and frontmatter.
ShebangAndFrontmatter,
/// Strip the shebang but not frontmatter.
///
/// That means that char sequences looking like frontmatter are simply
/// interpreted as regular Rust lexemes.
Shebang,
/// Strip nothing.
///
/// In other words, char sequences looking like a shebang or frontmatter
/// are simply interpreted as regular Rust lexemes.
Nothing,
}

pub(crate) fn lex_token_trees<'psess, 'src>(
psess: &'psess ParseSess,
mut src: &'src str,
mut start_pos: BytePos,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
match strip_tokens {
StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
}
}
StripTokens::Nothing => {}
}

let frontmatter_allowed = match strip_tokens {
StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
};

let cursor = Cursor::new(src, frontmatter_allowed);
let mut lexer = Lexer {
psess,
Expand Down
29 changes: 12 additions & 17 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span};
Expand All @@ -34,6 +33,8 @@ pub mod parser;
use parser::Parser;
use rustc_ast::token::Delimiter;

use crate::lexer::StripTokens;

pub mod lexer;

mod errors;
Expand Down Expand Up @@ -62,10 +63,10 @@ pub fn new_parser_from_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}

/// Creates a new parser from a simple (no frontmatter) source string.
/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
///
/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
/// etc., otherwise a panic will occur when they are dropped.
Expand All @@ -75,7 +76,7 @@ pub fn new_parser_from_simple_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::No)
new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
}

/// Creates a new parser from a filename. On failure, the errors must be consumed via
Expand Down Expand Up @@ -109,7 +110,7 @@ pub fn new_parser_from_file<'a>(
}
err.emit();
});
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}

pub fn utf8_error<E: EmissionGuarantee>(
Expand Down Expand Up @@ -160,10 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
fn new_parser_from_source_file(
psess: &ParseSess,
source_file: Arc<SourceFile>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None, frontmatter_allowed)?;
let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
Expand All @@ -179,8 +180,8 @@ pub fn source_str_to_stream(
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters.
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
// frontmatters as frontmatters, but for compatibility reason still strip the shebang
source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
}

/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
Expand All @@ -189,7 +190,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Arc<SourceFile>,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!(
Expand All @@ -198,13 +199,7 @@ fn source_file_to_stream<'psess>(
));
});

lexer::lex_token_trees(
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
}

/// Runs the given subparser `f` on the tokens of the given `attr`'s item.
Expand Down
3 changes: 3 additions & 0 deletions tests/run-make/multiline-args-value/cfg-shebang.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
error: invalid `--cfg` argument: `#!/usr/bin/shebang
key` (expected `key` or `key="value"`)

6 changes: 6 additions & 0 deletions tests/run-make/multiline-args-value/check-cfg-shebang.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
error: invalid `--check-cfg` argument: `#!/usr/bin/shebang
cfg(key)`
|
= note: expected `cfg(name, values("value1", "value2", ... "valueN"))`
= note: visit <https://doc.rust-lang.org/nightly/rustc/check-cfg.html> for more details

31 changes: 24 additions & 7 deletions tests/run-make/multiline-args-value/rmake.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
use run_make_support::{cwd, diff, rustc};

fn test_and_compare(flag: &str, val: &str) {
fn test_and_compare(test_name: &str, flag: &str, val: &str) {
let mut cmd = rustc();

let output =
cmd.input("").arg("--crate-type=lib").arg(&format!("--{flag}")).arg(val).run_fail();
let output = cmd.input("").arg("--crate-type=lib").arg(flag).arg(val).run_fail();

assert_eq!(output.stdout_utf8(), "");
diff()
.expected_file(format!("{flag}.stderr"))
.actual_text("output", output.stderr_utf8())
.expected_file(format!("{test_name}.stderr"))
.actual_text("stderr", output.stderr_utf8())
.run();
}

fn main() {
// Verify that frontmatter isn't allowed in `--cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130
test_and_compare(
"cfg",
"cfg-frontmatter",
"--cfg",
r#"---
---
key"#,
Expand All @@ -26,9 +26,26 @@ key"#,
// Verify that frontmatter isn't allowed in `--check-cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130
test_and_compare(
"check-cfg",
"check-cfg-frontmatter",
"--check-cfg",
r#"---
---
cfg(key)"#,
);

// Verify that shebang isn't allowed in `--cfg` arguments.
test_and_compare(
"cfg-shebang",
"--cfg",
r#"#!/usr/bin/shebang
key"#,
);

// Verify that shebang isn't allowed in `--check-cfg` arguments.
test_and_compare(
"check-cfg-shebang",
"--check-cfg",
r#"#!/usr/bin/shebang
cfg(key)"#,
);
}
Loading