Skip to content

Commit

Permalink
Use common variants for open and close delimiters
Browse files Browse the repository at this point in the history
This common representation for delimeters should make pattern matching easier. Having a separate `token::DelimToken` enum also allows us to enforce the invariant that the opening and closing delimiters must be the same in `ast::TtDelimited`, removing the need to ensure matched delimiters when working with token trees.
  • Loading branch information
brendanzab committed Oct 29, 2014
1 parent 77f44d4 commit 936d999
Show file tree
Hide file tree
Showing 17 changed files with 328 additions and 315 deletions.
12 changes: 6 additions & 6 deletions src/grammar/verify.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,20 +59,20 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
"FLOAT_SUFFIX" => id(),
"INT_SUFFIX" => id(),
"SHL" => token::BinOp(token::Shl),
"LBRACE" => token::LBrace,
"LBRACE" => token::OpenDelim(token::Brace),
"RARROW" => token::Rarrow,
"LIT_STR" => token::LitStr(Name(0)),
"DOTDOT" => token::DotDot,
"MOD_SEP" => token::ModSep,
"DOTDOTDOT" => token::DotDotDot,
"NOT" => token::Not,
"AND" => token::BinOp(token::And),
"LPAREN" => token::LParen,
"LPAREN" => token::OpenDelim(token::Paren),
"ANDAND" => token::AndAnd,
"AT" => token::At,
"LBRACKET" => token::LBracket,
"LBRACKET" => token::OpenDelim(token::Bracket),
"LIT_STR_RAW" => token::LitStrRaw(Name(0), 0),
"RPAREN" => token::RParen,
"RPAREN" => token::CloseDelim(token::Paren),
"SLASH" => token::BinOp(token::Slash),
"COMMA" => token::Comma,
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
Expand All @@ -83,15 +83,15 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
"LIT_CHAR" => token::LitChar(Name(0)),
"LIT_BYTE" => token::LitByte(Name(0)),
"EQ" => token::Eq,
"RBRACKET" => token::RBracket,
"RBRACKET" => token::CloseDelim(token::Bracket),
"COMMENT" => token::Comment,
"DOC_COMMENT" => token::DocComment(Name(0)),
"DOT" => token::Dot,
"EQEQ" => token::EqEq,
"NE" => token::Ne,
"GE" => token::Ge,
"PERCENT" => token::BinOp(token::Percent),
"RBRACE" => token::RBrace,
"RBRACE" => token::CloseDelim(token::Brace),
"BINOP" => token::BinOp(token::Plus),
"POUND" => token::Pound,
"OROR" => token::OrOr,
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/middle/save/span_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ impl<'a> SpanUtils<'a> {
last_span = None;
let mut next = toks.next_token();

if (next.tok == token::LParen ||
if (next.tok == token::OpenDelim(token::Paren) ||
next.tok == token::Lt) &&
bracket_count == 0 &&
prev.tok.is_ident() {
Expand All @@ -164,8 +164,8 @@ impl<'a> SpanUtils<'a> {
}

bracket_count += match prev.tok {
token::LParen | token::Lt => 1,
token::RParen | token::Gt => -1,
token::OpenDelim(token::Paren) | token::Lt => 1,
token::CloseDelim(token::Paren) | token::Gt => -1,
token::BinOp(token::Shr) => -2,
_ => 0
};
Expand Down
6 changes: 3 additions & 3 deletions src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,

// miscellaneous, no highlighting
token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
token::Colon | token::ModSep | token::LArrow | token::LParen |
token::RParen | token::LBracket | token::LBrace | token::RBrace |
token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
token::Question => "",
token::Dollar => {
if lexer.peek().tok.is_ident() {
Expand All @@ -118,7 +118,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
try!(write!(out, r"<span class='attribute'>#"));
continue
}
token::RBracket => {
token::CloseDelim(token::Bracket) => {
if is_attribute {
is_attribute = false;
try!(write!(out, "]</span>"));
Expand Down
51 changes: 36 additions & 15 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -595,17 +595,38 @@ pub enum CaptureClause {
CaptureByRef,
}

/// A token that delimits a sequence of token trees
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Delimiter {
pub span: Span,
pub token: ::parse::token::Token,
}
/// A delimited sequence of token trees
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Delimited {
/// The type of delimiter
pub delim: token::DelimToken,
/// The span covering the opening delimiter
pub open_span: Span,
/// The delimited sequence of token trees
pub tts: Vec<TokenTree>,
/// The span covering the closing delimiter
pub close_span: Span,
}

impl Delimited {
/// Returns the opening delimiter as a token.
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}

/// Returns the closing delimiter as a token.
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}

/// Returns the opening delimiter as a token tree.
pub fn open_tt(&self) -> TokenTree {
TtToken(self.open_span, self.open_token())
}

impl Delimiter {
/// Convert the delimiter to a `TtToken`
pub fn to_tt(&self) -> TokenTree {
TtToken(self.span, self.token.clone())
/// Returns the closing delimiter as a token tree.
pub fn close_tt(&self) -> TokenTree {
TtToken(self.close_span, self.close_token())
}
}

Expand Down Expand Up @@ -635,15 +656,15 @@ pub enum KleeneOp {
#[doc="For macro invocations; parsing is delegated to the macro"]
pub enum TokenTree {
/// A single token
TtToken(Span, ::parse::token::Token),
TtToken(Span, token::Token),
/// A delimited sequence of token trees
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
TtDelimited(Span, Rc<Delimited>),

// These only make sense for right-hand-sides of MBE macros:

/// A Kleene-style repetition sequence with an optional separator.
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
TtSequence(Span, Rc<Vec<TokenTree>>, Option<token::Token>, KleeneOp),
/// A syntactic variable that will be filled in by macro expansion.
TtNonterminal(Span, Ident)
}
Expand Down Expand Up @@ -715,10 +736,10 @@ pub type Matcher = Spanned<Matcher_>;
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub enum Matcher_ {
/// Match one token
MatchTok(::parse::token::Token),
MatchTok(token::Token),
/// Match repetitions of a sequence: body, separator, Kleene operator,
/// lo, hi position-in-match-array used:
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
MatchSeq(Vec<Matcher>, Option<token::Token>, KleeneOp, uint, uint),
/// Parse a Rust NT: name to bind, name of NT, position in match array:
MatchNonterminal(Ident, Ident, uint)
}
Expand Down
8 changes: 4 additions & 4 deletions src/libsyntax/ext/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])

let span = p.last_span;

p.expect(&token::LParen);
p.expect(&token::OpenDelim(token::Paren));
let out = p.parse_expr();
p.expect(&token::RParen);
p.expect(&token::CloseDelim(token::Paren));

// Expands a read+write operand into two operands.
//
Expand Down Expand Up @@ -129,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.span_err(p.last_span, "input operand constraint contains '+'");
}

p.expect(&token::LParen);
p.expect(&token::OpenDelim(token::Paren));
let input = p.parse_expr();
p.expect(&token::RParen);
p.expect(&token::CloseDelim(token::Paren));

inputs.push((constraint, input));
}
Expand Down
32 changes: 21 additions & 11 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -531,6 +531,15 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
mk_token_path(cx, sp, name)
}

fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
let name = match delim {
token::Paren => "Paren",
token::Bracket => "Bracket",
token::Brace => "Brace",
};
mk_token_path(cx, sp, name)
}

#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
match *tok {
Expand All @@ -542,6 +551,15 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec!(mk_binop(cx, sp, binop)));
}

token::OpenDelim(delim) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"),
vec![mk_delim(cx, sp, delim)]);
}
token::CloseDelim(delim) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"),
vec![mk_delim(cx, sp, delim)]);
}

token::LitByte(i) => {
let e_byte = mk_name(cx, sp, i.ident());

Expand Down Expand Up @@ -625,12 +643,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::RArrow => "RArrow",
token::LArrow => "LArrow",
token::FatArrow => "FatArrow",
token::LParen => "LParen",
token::RParen => "RParen",
token::LBracket => "LBracket",
token::RBracket => "RBracket",
token::LBrace => "LBrace",
token::RBrace => "RBrace",
token::Pound => "Pound",
token::Dollar => "Dollar",
token::Underscore => "Underscore",
Expand All @@ -640,7 +652,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
mk_token_path(cx, sp, name)
}


fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TtToken(sp, ref tok) => {
Expand All @@ -656,10 +667,9 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(cx.stmt_expr(e_push))
},
ast::TtDelimited(sp, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
mk_tt(cx, sp, &open.to_tt()).into_iter()
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
mk_tt(cx, sp, &delimed.open_tt()).into_iter()
.chain(delimed.tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &delimed.close_tt()).into_iter())
.collect()
},
ast::TtSequence(..) => panic!("TtSequence in quote!"),
Expand Down
6 changes: 2 additions & 4 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -355,10 +355,8 @@ pub fn parse(sess: &ParseSess,
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
token::RParen |
token::RBrace |
token::RBracket => {},
_ => bb_eis.push(ei)
token::CloseDelim(_) => {},
_ => bb_eis.push(ei),
}
}
MatchTok(ref t) => {
Expand Down
5 changes: 1 addition & 4 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,10 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// ignore delimiters
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.clone()
},
TtDelimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
}
},
Expand Down
14 changes: 6 additions & 8 deletions src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,7 @@ impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t {
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.iter().fold(LisUnconstrained, |size, tt| {
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
Expand Down Expand Up @@ -207,14 +206,13 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
};
match t {
TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
let mut forest = Vec::with_capacity(1 + tts.len() + 1);
forest.push(open.to_tt());
forest.extend(tts.iter().map(|x| (*x).clone()));
forest.push(close.to_tt());
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
tts.push(delimed.open_tt());
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
tts.push(delimed.close_tt());

r.stack.push(TtFrame {
forest: Rc::new(forest),
forest: Rc::new(tts),
idx: 0,
dotdotdoted: false,
sep: None
Expand Down
20 changes: 8 additions & 12 deletions src/libsyntax/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -572,18 +572,14 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
TtToken(span, ref tok) =>
TtToken(span, fld.fold_token(tok.clone())),
TtDelimited(span, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
TtDelimited(span, Rc::new((
Delimiter {
span: open.span,
token: fld.fold_token(open.token.clone())
},
fld.fold_tts(tts.as_slice()),
Delimiter {
span: close.span,
token: fld.fold_token(close.token.clone())
},
)))
TtDelimited(span, Rc::new(
Delimited {
delim: delimed.delim,
open_span: delimed.open_span,
tts: fld.fold_tts(delimed.tts.as_slice()),
close_span: delimed.close_span,
}
))
},
TtSequence(span, ref pattern, ref sep, is_optional) =>
TtSequence(span,
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,10 @@ impl<'a> ParserAttr for Parser<'a> {
ast::AttrOuter
};

self.expect(&token::LBracket);
self.expect(&token::OpenDelim(token::Bracket));
let meta_item = self.parse_meta_item();
let hi = self.span.hi;
self.expect(&token::RBracket);
self.expect(&token::CloseDelim(token::Bracket));

(mk_sp(lo, hi), meta_item, style)
}
Expand Down Expand Up @@ -194,7 +194,7 @@ impl<'a> ParserAttr for Parser<'a> {
let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
}
token::LParen => {
token::OpenDelim(token::Paren) => {
let inner_items = self.parse_meta_seq();
let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaList(name, inner_items)))
Expand All @@ -208,15 +208,15 @@ impl<'a> ParserAttr for Parser<'a> {

/// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
self.parse_seq(&token::LParen,
&token::RParen,
self.parse_seq(&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_disallowed(token::Comma),
|p| p.parse_meta_item()).node
}

fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
match self.token {
token::LParen => self.parse_meta_seq(),
token::OpenDelim(token::Paren) => self.parse_meta_seq(),
_ => Vec::new()
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -967,12 +967,12 @@ impl<'a> StringReader<'a> {
token::Dot
};
}
'(' => { self.bump(); return token::LParen; }
')' => { self.bump(); return token::RParen; }
'{' => { self.bump(); return token::LBrace; }
'}' => { self.bump(); return token::RBrace; }
'[' => { self.bump(); return token::LBracket; }
']' => { self.bump(); return token::RBracket; }
'(' => { self.bump(); return token::OpenDelim(token::Paren); }
')' => { self.bump(); return token::CloseDelim(token::Paren); }
'{' => { self.bump(); return token::OpenDelim(token::Brace); }
'}' => { self.bump(); return token::CloseDelim(token::Brace); }
'[' => { self.bump(); return token::OpenDelim(token::Bracket); }
']' => { self.bump(); return token::CloseDelim(token::Bracket); }
'@' => { self.bump(); return token::At; }
'#' => { self.bump(); return token::Pound; }
'~' => { self.bump(); return token::Tilde; }
Expand Down
Loading

0 comments on commit 936d999

Please sign in to comment.