Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
3d8ce0a
rustdoc: Use diagnostics for error when including sources
camelid Mar 10, 2021
b362958
Add function core::iter::zip
cuviper Mar 8, 2021
3b1f5e3
Use iter::zip in library/
cuviper Mar 8, 2021
72ebebe
Use iter::zip in compiler/
cuviper Mar 8, 2021
e82e812
Use iter::zip in src/tools/clippy/
cuviper Mar 8, 2021
addc51a
update array missing `IntoIterator` msg
lcnr Feb 28, 2021
5ac917d
fix rustc_on_implemented `_Self` paths
lcnr Mar 12, 2021
f94360f
Always preserve `None`-delimited groups in a captured `TokenStream`
Aaron1011 Mar 27, 2021
ee1b33c
Add #[inline] to io::Error methods.
m-ou-se Mar 27, 2021
6c6ef73
Improve fs error open_from unix
pickfire Mar 26, 2021
42150fb
combine: stop eagerly evaluating consts
lcnr Jan 24, 2021
e461ddd
update tests
lcnr Mar 15, 2021
fb4f48e
make unaligned_refereces future-incompat lint warn-by-default, and re…
RalfJung Feb 25, 2021
f0a6052
Add the tracking issue for `#![feature(iter_zip)]`
cuviper Mar 27, 2021
520c9a2
Rollup merge of #81351 - lcnr:big-money-big-prices, r=oli-obk
Dylan-DPC Mar 27, 2021
a900677
Rollup merge of #82525 - RalfJung:unaligned-ref-warn, r=petrochenkov
Dylan-DPC Mar 27, 2021
ebea9d9
Rollup merge of #82626 - lcnr:encode_with_shorthandb, r=estebank
Dylan-DPC Mar 27, 2021
b2e2543
Rollup merge of #82917 - cuviper:iter-zip, r=m-ou-se
Dylan-DPC Mar 27, 2021
f665e5a
Rollup merge of #82993 - camelid:source-use-diag, r=jyn514
Dylan-DPC Mar 27, 2021
aee7b9e
Rollup merge of #83522 - pickfire:patch-6, r=JohnTitor
Dylan-DPC Mar 27, 2021
1115acc
Rollup merge of #83548 - Aaron1011:capture-none-delims, r=petrochenkov
Dylan-DPC Mar 27, 2021
7d6af67
Rollup merge of #83555 - m-ou-se:inline-io-error-new-const, r=jackh726
Dylan-DPC Mar 27, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Always preserve None-delimited groups in a captured TokenStream
Previously, we would silently remove any `None`-delimiters when
capturing a `TokenStream`, 'flattenting' them to their inner tokens.
This was not normally visible, since we usually have
`TokenKind::Interpolated` (which gets converted to a `None`-delimited
group during macro invocation) instead of an actual `None`-delimited
group.

However, there are a couple of cases where this becomes visible to
proc-macros:
1. A cross-crate `macro_rules!` macro has a `None`-delimited group
   stored in its body (as a result of being produced by another
   `macro_rules!` macro). The cross-crate `macro_rules!` invocation
   can then expand to an attribute macro invocation, which needs
   to be able to see the `None`-delimited group.
2. A proc-macro can invoke an attribute proc-macro with its re-collected
   input. If there are any nonterminals present in the input, they will
   get re-collected to `None`-delimited groups, which will then get
   captured as part of the attribute macro invocation.

Both of these cases are incredibly obscure, so there hopefully won't be
any breakage. This change will allow more agressive 'flattenting' of
nonterminals in #82608 without losing `None`-delimited groups.
  • Loading branch information
Aaron1011 committed Mar 27, 2021
commit f94360fd83b49554b6c26999a0030e9cfe800f32
45 changes: 35 additions & 10 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,21 +98,46 @@ impl<'a> Parser<'a> {
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
// result - we produce an empty `TokenStream` if no calls were made,
// and omit the final token otherwise.
if self.num_calls == 0 {
return TokenStream::new(vec![]);
}

let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = std::iter::once(self.start_token.clone())
.chain((0..self.num_calls).map(|_| {
if self.desugar_doc_comments {
// Don't skip `None` delimiters, since we want to pass them to
// proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`,
// which gets converted to a `None`-delimited group when we invoke
// a proc-macro. However, it's possible to already have a `None`-delimited
// group in the stream (such as when parsing the output of a proc-macro,
// or in certain unusual cases with cross-crate `macro_rules!` macros).
cursor_snapshot.skip_none_delims = false;

// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback.
let num_calls = self.num_calls - 1;
let mut i = 0;
let tokens =
std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| {
if i >= num_calls {
return None;
}

let token = if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
};

// When the `LazyTokenStreamImpl` was original produced, we did *not*
// include `NoDelim` tokens in `num_calls`, since they are normally ignored
// by the parser. Therefore, we only increment our counter for other types of tokens.
if !matches!(
token.0.kind,
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
) {
i += 1;
}
}))
.take(self.num_calls);
Some(token)
}));

make_token_stream(tokens, self.append_unglued_token.clone())
}
Expand Down
25 changes: 20 additions & 5 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,13 @@ struct TokenCursor {
// appended to the captured stream when
// we evaluate a `LazyTokenStream`
append_unglued_token: Option<TreeAndSpacing>,
// If `true`, skip the delimiters for `None`-delimited groups,
// and just yield the inner tokens. This is `true` during
// normal parsing, since the parser code is not currently prepared
// to handle `None` delimiters. When capturing a `TokenStream`,
// however, we want to handle `None`-delimiters, since
// proc-macros always see `None`-delimited groups.
skip_none_delims: bool,
}

#[derive(Clone)]
Expand All @@ -184,13 +191,13 @@ struct TokenCursorFrame {
}

impl TokenCursorFrame {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self {
TokenCursorFrame {
delim,
span,
open_delim: delim == token::NoDelim,
open_delim: delim == token::NoDelim && skip_none_delims,
tree_cursor: tts.into_trees(),
close_delim: delim == token::NoDelim,
close_delim: delim == token::NoDelim && skip_none_delims,
}
}
}
Expand Down Expand Up @@ -218,7 +225,7 @@ impl TokenCursor {
return (token, spacing);
}
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, tts);
let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
Expand Down Expand Up @@ -276,6 +283,7 @@ impl TokenCursor {
.cloned()
.collect::<TokenStream>()
},
self.skip_none_delims,
),
));

Expand Down Expand Up @@ -371,12 +379,19 @@ impl<'a> Parser<'a> {
prev_token: Token::dummy(),
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
// Skip over the delimiters for `None`-delimited groups
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
frame: TokenCursorFrame::new(
DelimSpan::dummy(),
token::NoDelim,
tokens,
/* skip_none_delims */ true,
),
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
append_unglued_token: None,
skip_none_delims: true,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
Expand Down
9 changes: 5 additions & 4 deletions src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@ pub struct FirstStruct;

#[macro_export]
macro_rules! outer_macro {
($name:ident) => {
($name:ident, $attr_struct_name:ident) => {
#[macro_export]
macro_rules! inner_macro {
($wrapper:ident) => {
$wrapper!($name)
($bang_macro:ident, $attr_macro:ident) => {
$bang_macro!($name);
#[$attr_macro] struct $attr_struct_name {}
}
}
}
}

outer_macro!(FirstStruct);
outer_macro!(FirstStruct, FirstAttrStruct);
10 changes: 5 additions & 5 deletions src/test/ui/proc-macro/nested-macro-rules.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// run-pass
// aux-build:nested-macro-rules.rs
// aux-build:test-macros.rs
// compile-flags: -Z span-debug
// compile-flags: -Z span-debug -Z macro-backtrace
// edition:2018

#![no_std] // Don't load unnecessary hygiene information from std
Expand All @@ -10,14 +10,14 @@ extern crate std;
extern crate nested_macro_rules;
extern crate test_macros;

use test_macros::print_bang;
use test_macros::{print_bang, print_attr};

use nested_macro_rules::FirstStruct;
struct SecondStruct;

fn main() {
nested_macro_rules::inner_macro!(print_bang);
nested_macro_rules::inner_macro!(print_bang, print_attr);

nested_macro_rules::outer_macro!(SecondStruct);
inner_macro!(print_bang);
nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct);
inner_macro!(print_bang, print_attr);
}
52 changes: 48 additions & 4 deletions src/test/ui/proc-macro/nested-macro-rules.stdout
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,32 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "FirstStruct",
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#7),
span: $DIR/auxiliary/nested-macro-rules.rs:16:14: 16:25 (#7),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#6),
span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#6),
},
]
PRINT-ATTR INPUT (DISPLAY): struct FirstAttrStruct { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#6),
},
Group {
delimiter: None,
stream: TokenStream [
Ident {
ident: "FirstAttrStruct",
span: $DIR/auxiliary/nested-macro-rules.rs:16:27: 16:42 (#7),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#6),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#6),
},
]
PRINT-BANG INPUT (DISPLAY): SecondStruct
Expand All @@ -18,9 +40,31 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "SecondStruct",
span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#13),
span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#16),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#12),
span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#15),
},
]
PRINT-ATTR INPUT (DISPLAY): struct SecondAttrStruct { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#15),
},
Group {
delimiter: None,
stream: TokenStream [
Ident {
ident: "SecondAttrStruct",
span: $DIR/nested-macro-rules.rs:21:52: 21:68 (#16),
},
],
span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#15),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#15),
},
]