diff --git a/syntex_syntax/Cargo.toml b/syntex_syntax/Cargo.toml
index 0faa9788..1a94c065 100644
--- a/syntex_syntax/Cargo.toml
+++ b/syntex_syntax/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "syntex_syntax"
-version = "0.19.1"
+version = "0.20.0"
authors = [ "erick.tryzelaar@gmail.com" ]
license = "MIT/Apache-2.0"
description = "Export of libsyntax for code generation"
@@ -8,8 +8,8 @@ repository = "https://github.com/erickt/rust-syntex"
[dependencies]
bitflags = "^0.3.2"
-libc = "^0.1.10"
-log = "^0.3.2"
+libc = "^0.2.1"
+log = "^0.3.3"
rustc-serialize = "^0.3.16"
-term = "^0.2.11"
+term = "^0.2.13"
unicode-xid = "^0.0.3"
diff --git a/syntex_syntax/src/ast.rs b/syntex_syntax/src/ast.rs
index 7b3c33d5..8c9c8835 100644
--- a/syntex_syntax/src/ast.rs
+++ b/syntex_syntax/src/ast.rs
@@ -31,13 +31,11 @@ pub use self::MetaItem_::*;
pub use self::Mutability::*;
pub use self::Pat_::*;
pub use self::PathListItem_::*;
-pub use self::PatWildKind::*;
pub use self::PrimTy::*;
pub use self::Sign::*;
pub use self::Stmt_::*;
pub use self::StrStyle::*;
pub use self::StructFieldKind::*;
-pub use self::TokenTree::*;
pub use self::TraitItem_::*;
pub use self::Ty_::*;
pub use self::TyParamBound::*;
@@ -569,19 +567,10 @@ pub enum BindingMode {
BindByValue(Mutability),
}
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum PatWildKind {
- /// Represents the wildcard pattern `_`
- PatWildSingle,
-
- /// Represents the wildcard pattern `..`
- PatWildMulti,
-}
-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Pat_ {
- /// Represents a wildcard pattern (either `_` or `..`)
- PatWild(PatWildKind),
+ /// Represents a wildcard pattern (`_`)
+ PatWild,
/// A PatIdent may either be a new bound variable,
/// or a nullary enum (in which case the third field
@@ -593,7 +582,7 @@ pub enum Pat_ {
/// set (of "PatIdents that refer to nullary enums")
PatIdent(BindingMode, SpannedIdent, Option
>),
- /// "None" means a * pattern where we don't bind the fields to names.
+ /// "None" means a `Variant(..)` pattern where we don't bind the fields to names.
PatEnum(Path, Option>>),
/// An associated const named using the qualified path `::CONST` or
@@ -615,8 +604,8 @@ pub enum Pat_ {
PatLit(P),
/// A range pattern, e.g. `1...2`
PatRange(P, P),
- /// [a, b, ..i, y, z] is represented as:
- /// PatVec(box [a, b], Some(i), box [y, z])
+ /// `[a, b, ..i, y, z]` is represented as:
+ /// `PatVec(box [a, b], Some(i), box [y, z])`
PatVec(Vec
(P, Vec) {
+fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
+ -> (P, Vec) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a SubstNt during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
@@ -694,7 +701,7 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1;
- let cx_expr = p.parse_expr();
+ let cx_expr = panictry!(p.parse_expr_nopanic());
if !panictry!(p.eat(&token::Comma)) {
panic!(p.fatal("expected token `,`"));
}
@@ -746,7 +753,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec
> {
- Some(self.p.parse_expr())
+ Some(panictry!(self.p.parse_expr_nopanic()))
}
fn make_items(mut self: Box>)
-> Option>> {
let mut ret = SmallVector::zero();
while self.p.token != token::Eof {
- match self.p.parse_item() {
+ match panictry!(self.p.parse_item_nopanic()) {
Some(item) => ret.push(item),
None => panic!(self.p.span_fatal(
self.p.span,
diff --git a/syntex_syntax/src/ext/trace_macros.rs b/syntex_syntax/src/ext/trace_macros.rs
index ab34f41d..628b88d1 100644
--- a/syntex_syntax/src/ext/trace_macros.rs
+++ b/syntex_syntax/src/ext/trace_macros.rs
@@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast;
+use ast::TokenTree;
use codemap::Span;
use ext::base::ExtCtxt;
use ext::base;
@@ -18,7 +18,7 @@ use parse::token::keywords;
pub fn expand_trace_macros(cx: &mut ExtCtxt,
sp: Span,
- tt: &[ast::TokenTree])
+ tt: &[TokenTree])
-> Box {
if !cx.ecfg.enable_trace_macros() {
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
@@ -30,10 +30,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
}
match (tt.len(), tt.first()) {
- (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => {
cx.set_trace_macros(true);
}
- (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
diff --git a/syntex_syntax/src/ext/tt/macro_parser.rs b/syntex_syntax/src/ext/tt/macro_parser.rs
index 8dec9ae1..0e69edd7 100644
--- a/syntex_syntax/src/ext/tt/macro_parser.rs
+++ b/syntex_syntax/src/ext/tt/macro_parser.rs
@@ -80,12 +80,10 @@ use self::TokenTreeOrTokenTreeVec::*;
use ast;
use ast::{TokenTree, Name};
-use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span};
use codemap;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
-use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
@@ -147,16 +145,16 @@ pub struct MatcherPos {
pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match elt {
- &TtSequence(_, ref seq) => {
+ &TokenTree::Sequence(_, ref seq) => {
seq.num_captures
}
- &TtDelimited(_, ref delim) => {
+ &TokenTree::Delimited(_, ref delim) => {
count_names(&delim.tts)
}
- &TtToken(_, MatchNt(..)) => {
+ &TokenTree::Token(_, MatchNt(..)) => {
1
}
- &TtToken(_, _) => 0,
+ &TokenTree::Token(_, _) => 0,
}
})
}
@@ -206,17 +204,17 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc])
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc],
ret_val: &mut HashMap>, idx: &mut usize) {
match m {
- &TtSequence(_, ref seq) => {
+ &TokenTree::Sequence(_, ref seq) => {
for next_m in &seq.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
- &TtDelimited(_, ref delim) => {
+ &TokenTree::Delimited(_, ref delim) => {
for next_m in &delim.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
- &TtToken(sp, MatchNt(bind_name, _, _, _)) => {
+ &TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name.name) {
Vacant(spot) => {
spot.insert(res[*idx].clone());
@@ -230,8 +228,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc])
}
}
}
- &TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
- &TtToken(_, _) => (),
+ &TokenTree::Token(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
+ &TokenTree::Token(_, _) => (),
}
}
let mut ret_val = HashMap::new();
@@ -363,7 +361,7 @@ pub fn parse(sess: &ParseSess,
} else {
match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
- TtSequence(sp, seq) => {
+ TokenTree::Sequence(sp, seq) => {
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
@@ -389,10 +387,10 @@ pub fn parse(sess: &ParseSess,
match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
sp_lo: sp.lo,
- top_elts: Tt(TtSequence(sp, seq)),
+ top_elts: Tt(TokenTree::Sequence(sp, seq)),
}));
}
- TtToken(_, MatchNt(..)) => {
+ TokenTree::Token(_, MatchNt(..)) => {
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
@@ -400,10 +398,10 @@ pub fn parse(sess: &ParseSess,
_ => bb_eis.push(ei),
}
}
- TtToken(sp, SubstNt(..)) => {
+ TokenTree::Token(sp, SubstNt(..)) => {
return Error(sp, "missing fragment specifier".to_string())
}
- seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
+ seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
@@ -413,7 +411,7 @@ pub fn parse(sess: &ParseSess,
ei.idx = 0;
cur_eis.push(ei);
}
- TtToken(_, ref t) => {
+ TokenTree::Token(_, ref t) => {
let mut ei_t = ei.clone();
if token_name_eq(t,&tok) {
ei_t.idx += 1;
@@ -441,7 +439,7 @@ pub fn parse(sess: &ParseSess,
if (!bb_eis.is_empty() && !next_eis.is_empty())
|| bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
- TtToken(_, MatchNt(bind, name, _, _)) => {
+ TokenTree::Token(_, MatchNt(bind, name, _, _)) => {
format!("{} ('{}')", name, bind)
}
_ => panic!()
@@ -469,7 +467,7 @@ pub fn parse(sess: &ParseSess,
let mut ei = bb_eis.pop().unwrap();
match ei.top_elts.get_tt(ei.idx) {
- TtToken(span, MatchNt(_, ident, _, _)) => {
+ TokenTree::Token(span, MatchNt(_, ident, _, _)) => {
let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
@@ -503,18 +501,18 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
// check at the beginning and the parser checks after each bump
panictry!(p.check_unknown_macro_variable());
match name {
- "item" => match p.parse_item() {
+ "item" => match panictry!(p.parse_item_nopanic()) {
Some(i) => token::NtItem(i),
None => panic!(p.fatal("expected an item keyword"))
},
"block" => token::NtBlock(panictry!(p.parse_block())),
- "stmt" => match p.parse_stmt() {
+ "stmt" => match panictry!(p.parse_stmt_nopanic()) {
Some(s) => token::NtStmt(s),
None => panic!(p.fatal("expected a statement"))
},
- "pat" => token::NtPat(p.parse_pat()),
- "expr" => token::NtExpr(p.parse_expr()),
- "ty" => token::NtTy(p.parse_ty()),
+ "pat" => token::NtPat(panictry!(p.parse_pat_nopanic())),
+ "expr" => token::NtExpr(panictry!(p.parse_expr_nopanic())),
+ "ty" => token::NtTy(panictry!(p.parse_ty_nopanic())),
// this could be handled like a token, since it is one
"ident" => match p.token {
token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) }
@@ -527,7 +525,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
"path" => {
token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons))))
},
- "meta" => token::NtMeta(p.parse_meta_item()),
+ "meta" => token::NtMeta(panictry!(p.parse_meta_item())),
_ => {
panic!(p.span_fatal_help(sp,
&format!("invalid fragment specifier `{}`", name),
diff --git a/syntex_syntax/src/ext/tt/macro_rules.rs b/syntex_syntax/src/ext/tt/macro_rules.rs
index cce4450b..4e5825d1 100644
--- a/syntex_syntax/src/ext/tt/macro_rules.rs
+++ b/syntex_syntax/src/ext/tt/macro_rules.rs
@@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken};
+use ast::{self, TokenTree};
use codemap::{Span, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
use ext::base::{NormalTT, TTMacroExpander};
@@ -26,6 +26,7 @@ use util::small_vector::SmallVector;
use std::cell::RefCell;
use std::rc::Rc;
+use std::iter::once;
struct ParserAnyMacro<'a> {
parser: RefCell>,
@@ -66,18 +67,18 @@ impl<'a> ParserAnyMacro<'a> {
impl<'a> MacResult for ParserAnyMacro<'a> {
fn make_expr(self: Box>) -> Option
> {
- let ret = self.parser.borrow_mut().parse_expr();
+ let ret = panictry!(self.parser.borrow_mut().parse_expr_nopanic());
self.ensure_complete_parse(true);
Some(ret)
}
fn make_pat(self: Box>) -> Option
> {
- let ret = self.parser.borrow_mut().parse_pat();
+ let ret = panictry!(self.parser.borrow_mut().parse_pat_nopanic());
self.ensure_complete_parse(false);
Some(ret)
}
fn make_items(self: Box>) -> Option>> {
let mut ret = SmallVector::zero();
- while let Some(item) = self.parser.borrow_mut().parse_item() {
+ while let Some(item) = panictry!(self.parser.borrow_mut().parse_item_nopanic()) {
ret.push(item);
}
self.ensure_complete_parse(false);
@@ -119,7 +120,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> {
}
fn make_ty(self: Box>) -> Option
> {
- let ret = self.parser.borrow_mut().parse_ty();
+ let ret = panictry!(self.parser.borrow_mut().parse_ty_nopanic());
self.ensure_complete_parse(true);
Some(ret)
}
@@ -171,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
match **lhs {
MatchedNonterminal(NtTT(ref lhs_tt)) => {
let lhs_tt = match **lhs_tt {
- TtDelimited(_, ref delim) => &delim.tts[..],
+ TokenTree::Delimited(_, ref delim) => &delim.tts[..],
_ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
};
@@ -182,7 +183,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// ignore delimiters
- TtDelimited(_, ref delimed) => delimed.tts.clone(),
+ TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
_ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
}
},
@@ -243,21 +244,21 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
let argument_gram = vec!(
- TtSequence(DUMMY_SP,
+ TokenTree::Sequence(DUMMY_SP,
Rc::new(ast::SequenceRepetition {
tts: vec![
- TtToken(DUMMY_SP, match_lhs_tok),
- TtToken(DUMMY_SP, token::FatArrow),
- TtToken(DUMMY_SP, match_rhs_tok)],
+ TokenTree::Token(DUMMY_SP, match_lhs_tok),
+ TokenTree::Token(DUMMY_SP, token::FatArrow),
+ TokenTree::Token(DUMMY_SP, match_rhs_tok)],
separator: Some(token::Semi),
op: ast::OneOrMore,
num_captures: 2
})),
//to phase into semicolon-termination instead of
//semicolon-separation
- TtSequence(DUMMY_SP,
+ TokenTree::Sequence(DUMMY_SP,
Rc::new(ast::SequenceRepetition {
- tts: vec![TtToken(DUMMY_SP, token::Semi)],
+ tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
separator: None,
op: ast::ZeroOrMore,
num_captures: 0
@@ -307,14 +308,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
}
fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
- // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is
- // those tts. Or, it can be a "bare sequence", not wrapped in parens.
+ // lhs is going to be like MatchedNonterminal(NtTT(TokenTree::Delimited(...))), where the
+ // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
match lhs {
&MatchedNonterminal(NtTT(ref inner)) => match &**inner {
- &TtDelimited(_, ref tts) => {
+ &TokenTree::Delimited(_, ref tts) => {
check_matcher(cx, tts.tts.iter(), &Eof);
},
- tt @ &TtSequence(..) => {
+ tt @ &TokenTree::Sequence(..) => {
check_matcher(cx, Some(tt).into_iter(), &Eof);
},
_ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \
@@ -327,7 +328,7 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
// after parsing/expansion. we can report every error in every macro this way.
}
-// returns the last token that was checked, for TtSequence. this gets used later on.
+// returns the last token that was checked, for TokenTree::Sequence. this gets used later on.
fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
-> Option<(Span, Token)> where I: Iterator {
use print::pprust::token_to_string;
@@ -338,7 +339,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
let mut tokens = matcher.peekable();
while let Some(token) = tokens.next() {
last = match *token {
- TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
+ TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
// ii. If T is a simple NT, look ahead to the next token T' in
// M. If T' is in the set FOLLOW(NT), continue. Else; reject.
if can_be_followed_by_any(&frag_spec.name.as_str()) {
@@ -346,9 +347,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
} else {
let next_token = match tokens.peek() {
// If T' closes a complex NT, replace T' with F
- Some(&&TtToken(_, CloseDelim(_))) => follow.clone(),
- Some(&&TtToken(_, ref tok)) => tok.clone(),
- Some(&&TtSequence(sp, _)) => {
+ Some(&&TokenTree::Token(_, CloseDelim(_))) => follow.clone(),
+ Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
+ Some(&&TokenTree::Sequence(sp, _)) => {
// Be conservative around sequences: to be
// more specific, we would need to
// consider FIRST sets, but also the
@@ -366,12 +367,16 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
Eof
},
// die next iteration
- Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+ Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(),
// else, we're at the end of the macro or sequence
None => follow.clone()
};
- let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
+ let tok = if let TokenTree::Token(_, ref tok) = *token {
+ tok
+ } else {
+ unreachable!()
+ };
// If T' is in the set FOLLOW(NT), continue. Else, reject.
match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) {
@@ -391,7 +396,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
}
}
},
- TtSequence(sp, ref seq) => {
+ TokenTree::Sequence(sp, ref seq) => {
// iii. Else, T is a complex NT.
match seq.separator {
// If T has the form $(...)U+ or $(...)U* for some token U,
@@ -408,8 +413,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
// but conservatively correct.
Some((span, tok)) => {
let fol = match tokens.peek() {
- Some(&&TtToken(_, ref tok)) => tok.clone(),
- Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+ Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
+ Some(&&TokenTree::Delimited(_, ref delim)) =>
+ delim.close_token(),
Some(_) => {
cx.span_err(sp, "sequence repetition followed by \
another sequence repetition, which is not allowed");
@@ -417,7 +423,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
},
None => Eof
};
- check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(),
+ check_matcher(cx, once(&TokenTree::Token(span, tok.clone())),
&fol)
},
None => last,
@@ -428,8 +434,8 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
// sequence. If it accepts, continue, else, reject.
None => {
let fol = match tokens.peek() {
- Some(&&TtToken(_, ref tok)) => tok.clone(),
- Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+ Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
+ Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(),
Some(_) => {
cx.span_err(sp, "sequence repetition followed by another \
sequence repetition, which is not allowed");
@@ -441,11 +447,11 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
}
}
},
- TtToken(..) => {
+ TokenTree::Token(..) => {
// i. If T is not an NT, continue.
continue
},
- TtDelimited(_, ref tts) => {
+ TokenTree::Delimited(_, ref tts) => {
// if we don't pass in that close delimiter, we'll incorrectly consider the matcher
// `{ $foo:ty }` as having a follow that isn't `RBrace`
check_matcher(cx, tts.tts.iter(), &tts.close_token())
diff --git a/syntex_syntax/src/ext/tt/transcribe.rs b/syntex_syntax/src/ext/tt/transcribe.rs
index d1e48eda..0fc31f3f 100644
--- a/syntex_syntax/src/ext/tt/transcribe.rs
+++ b/syntex_syntax/src/ext/tt/transcribe.rs
@@ -10,7 +10,7 @@
use self::LockstepIterSize::*;
use ast;
-use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name};
+use ast::{TokenTree, Ident, Name};
use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@@ -53,7 +53,7 @@ pub struct TtReader<'a> {
}
/// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option>>,
@@ -67,7 +67,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
///
/// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None.
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
interp: Option>>,
@@ -78,7 +78,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
let mut r = TtReader {
sp_diag: sp_diag,
stack: vec!(TtFrame {
- forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
+ forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
tts: src,
// doesn't matter. This merely holds the root unzipping.
separator: None, op: ast::ZeroOrMore, num_captures: 0
@@ -151,17 +151,17 @@ impl Add for LockstepIterSize {
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t {
- TtDelimited(_, ref delimed) => {
+ TokenTree::Delimited(_, ref delimed) => {
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
- TtSequence(_, ref seq) => {
+ TokenTree::Sequence(_, ref seq) => {
seq.tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
- TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
+ TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
match lookup_cur_matched(r, name) {
Some(matched) => match *matched {
MatchedNonterminal(_) => LisUnconstrained,
@@ -169,7 +169,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
},
_ => LisUnconstrained
},
- TtToken(..) => LisUnconstrained,
+ TokenTree::Token(..) => LisUnconstrained,
}
}
@@ -232,17 +232,17 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
}
}
- loop { /* because it's easiest, this handles `TtDelimited` not starting
- with a `TtToken`, even though it won't happen */
+ loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
+ with a `TokenTree::Token`, even though it won't happen */
let t = {
let frame = r.stack.last().unwrap();
// FIXME(pcwalton): Bad copy.
frame.forest.get_tt(frame.idx)
};
match t {
- TtSequence(sp, seq) => {
+ TokenTree::Sequence(sp, seq) => {
// FIXME(pcwalton): Bad copy.
- match lockstep_iter_size(&TtSequence(sp, seq.clone()),
+ match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
r) {
LisUnconstrained => {
panic!(r.sp_diag.span_fatal(
@@ -272,20 +272,20 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
idx: 0,
dotdotdoted: true,
sep: seq.separator.clone(),
- forest: TtSequence(sp, seq),
+ forest: TokenTree::Sequence(sp, seq),
});
}
}
}
// FIXME #2887: think about span stuff here
- TtToken(sp, SubstNt(ident, namep)) => {
+ TokenTree::Token(sp, SubstNt(ident, namep)) => {
r.stack.last_mut().unwrap().idx += 1;
match lookup_cur_matched(r, ident) {
None => {
r.cur_span = sp;
r.cur_tok = SubstNt(ident, namep);
return ret_val;
- // this can't be 0 length, just like TtDelimited
+ // this can't be 0 length, just like TokenTree::Delimited
}
Some(cur_matched) => {
match *cur_matched {
@@ -313,8 +313,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
}
}
- // TtDelimited or any token that can be unzipped
- seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
+ // TokenTree::Delimited or any token that can be unzipped
+ seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
// do not advance the idx yet
r.stack.push(TtFrame {
forest: seq,
@@ -324,15 +324,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
});
// if this could be 0-length, we'd need to potentially recur here
}
- TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
+ TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
r.stack.push(TtFrame {
- forest: TtToken(sp, DocComment(name)),
+ forest: TokenTree::Token(sp, DocComment(name)),
idx: 0,
dotdotdoted: false,
sep: None
});
}
- TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
+ TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
r.stack.last_mut().unwrap().idx += 1;
if r.imported_from.is_some() {
@@ -344,7 +344,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
// otherwise emit nothing and proceed to the next token
}
- TtToken(sp, tok) => {
+ TokenTree::Token(sp, tok) => {
r.cur_span = sp;
r.cur_tok = tok;
r.stack.last_mut().unwrap().idx += 1;
diff --git a/syntex_syntax/src/fold.rs b/syntex_syntax/src/fold.rs
index 282c69d2..a1a237a3 100644
--- a/syntex_syntax/src/fold.rs
+++ b/syntex_syntax/src/fold.rs
@@ -579,10 +579,10 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree {
match *tt {
- TtToken(span, ref tok) =>
- TtToken(span, fld.fold_token(tok.clone())),
- TtDelimited(span, ref delimed) => {
- TtDelimited(span, Rc::new(
+ TokenTree::Token(span, ref tok) =>
+ TokenTree::Token(span, fld.fold_token(tok.clone())),
+ TokenTree::Delimited(span, ref delimed) => {
+ TokenTree::Delimited(span, Rc::new(
Delimited {
delim: delimed.delim,
open_span: delimed.open_span,
@@ -591,8 +591,8 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree {
}
))
},
- TtSequence(span, ref seq) =>
- TtSequence(span,
+ TokenTree::Sequence(span, ref seq) =>
+ TokenTree::Sequence(span,
Rc::new(SequenceRepetition {
tts: fld.fold_tts(&seq.tts),
separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
@@ -1118,7 +1118,7 @@ pub fn noop_fold_pat(p: P, folder: &mut T) -> P {
p.map(|Pat {id, node, span}| Pat {
id: folder.new_id(id),
node: match node {
- PatWild(k) => PatWild(k),
+ PatWild => PatWild,
PatIdent(binding_mode, pth1, sub) => {
PatIdent(binding_mode,
Spanned{span: folder.new_span(pth1.span),
diff --git a/syntex_syntax/src/parse/attr.rs b/syntex_syntax/src/parse/attr.rs
index 21936009..5df2478d 100644
--- a/syntex_syntax/src/parse/attr.rs
+++ b/syntex_syntax/src/parse/attr.rs
@@ -12,30 +12,21 @@ use attr;
use ast;
use codemap::{spanned, Spanned, mk_sp, Span};
use parse::common::*; //resolve bug?
+use parse::PResult;
use parse::token;
use parse::parser::{Parser, TokenType};
use ptr::P;
-/// A parser that can parse attributes.
-pub trait ParserAttr {
- fn parse_outer_attributes(&mut self) -> Vec;
- fn parse_inner_attributes(&mut self) -> Vec;
- fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute;
- fn parse_meta_item(&mut self) -> P;
- fn parse_meta_seq(&mut self) -> Vec
>;
- fn parse_optional_meta(&mut self) -> Vec
>;
-}
-
-impl<'a> ParserAttr for Parser<'a> {
+impl<'a> Parser<'a> {
/// Parse attributes that appear before an item
- fn parse_outer_attributes(&mut self) -> Vec {
+ pub fn parse_outer_attributes(&mut self) -> PResult> {
let mut attrs: Vec = Vec::new();
loop {
debug!("parse_outer_attributes: self.token={:?}",
self.token);
match self.token {
token::Pound => {
- attrs.push(self.parse_attribute(false));
+ attrs.push(try!(self.parse_attribute(false)));
}
token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr(
@@ -45,32 +36,32 @@ impl<'a> ParserAttr for Parser<'a> {
self.span.hi
);
if attr.node.style != ast::AttrStyle::Outer {
- panic!(self.fatal("expected outer comment"));
+ return Err(self.fatal("expected outer comment"));
}
attrs.push(attr);
- panictry!(self.bump());
+ try!(self.bump());
}
_ => break
}
}
- return attrs;
+ return Ok(attrs);
}
/// Matches `attribute = # ! [ meta_item ]`
///
/// If permit_inner is true, then a leading `!` indicates an inner
/// attribute
- fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
+ pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult {
debug!("parse_attributes: permit_inner={:?} self.token={:?}",
permit_inner, self.token);
let (span, value, mut style) = match self.token {
token::Pound => {
let lo = self.span.lo;
- panictry!(self.bump());
+ try!(self.bump());
if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); }
let style = if self.token == token::Not {
- panictry!(self.bump());
+ try!(self.bump());
if !permit_inner {
let span = self.span;
self.span_err(span,
@@ -84,27 +75,27 @@ impl<'a> ParserAttr for Parser<'a> {
ast::AttrStyle::Outer
};
- panictry!(self.expect(&token::OpenDelim(token::Bracket)));
- let meta_item = self.parse_meta_item();
+ try!(self.expect(&token::OpenDelim(token::Bracket)));
+ let meta_item = try!(self.parse_meta_item());
let hi = self.span.hi;
- panictry!(self.expect(&token::CloseDelim(token::Bracket)));
+ try!(self.expect(&token::CloseDelim(token::Bracket)));
(mk_sp(lo, hi), meta_item, style)
}
_ => {
let token_str = self.this_token_to_string();
- panic!(self.fatal(&format!("expected `#`, found `{}`", token_str)));
+ return Err(self.fatal(&format!("expected `#`, found `{}`", token_str)));
}
};
if permit_inner && self.token == token::Semi {
- panictry!(self.bump());
+ try!(self.bump());
self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon");
style = ast::AttrStyle::Inner;
}
- return Spanned {
+ Ok(Spanned {
span: span,
node: ast::Attribute_ {
id: attr::mk_attr_id(),
@@ -112,7 +103,7 @@ impl<'a> ParserAttr for Parser<'a> {
value: value,
is_sugared_doc: false
}
- };
+ })
}
/// Parse attributes that appear after the opening of an item. These should
@@ -120,7 +111,7 @@ impl<'a> ParserAttr for Parser<'a> {
/// terminated by a semicolon.
/// matches inner_attrs*
- fn parse_inner_attributes(&mut self) -> Vec {
+ pub fn parse_inner_attributes(&mut self) -> PResult> {
let mut attrs: Vec = vec![];
loop {
match self.token {
@@ -130,7 +121,7 @@ impl<'a> ParserAttr for Parser<'a> {
break;
}
- let attr = self.parse_attribute(true);
+ let attr = try!(self.parse_attribute(true));
assert!(attr.node.style == ast::AttrStyle::Inner);
attrs.push(attr);
}
@@ -141,7 +132,7 @@ impl<'a> ParserAttr for Parser<'a> {
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
if attr.node.style == ast::AttrStyle::Inner {
attrs.push(attr);
- panictry!(self.bump());
+ try!(self.bump());
} else {
break;
}
@@ -149,13 +140,13 @@ impl<'a> ParserAttr for Parser<'a> {
_ => break
}
}
- attrs
+ Ok(attrs)
}
/// matches meta_item = IDENT
/// | IDENT = lit
/// | IDENT meta_seq
- fn parse_meta_item(&mut self) -> P {
+ pub fn parse_meta_item(&mut self) -> PResult
> {
let nt_meta = match self.token {
token::Interpolated(token::NtMeta(ref e)) => {
Some(e.clone())
@@ -165,19 +156,19 @@ impl<'a> ParserAttr for Parser<'a> {
match nt_meta {
Some(meta) => {
- panictry!(self.bump());
- return meta;
+ try!(self.bump());
+ return Ok(meta);
}
None => {}
}
let lo = self.span.lo;
- let ident = panictry!(self.parse_ident());
+ let ident = try!(self.parse_ident());
let name = self.id_to_interned_str(ident);
match self.token {
token::Eq => {
- panictry!(self.bump());
- let lit = panictry!(self.parse_lit());
+ try!(self.bump());
+ let lit = try!(self.parse_lit());
// FIXME #623 Non-string meta items are not serialized correctly;
// just forbid them for now
match lit.node {
@@ -189,32 +180,25 @@ impl<'a> ParserAttr for Parser<'a> {
}
}
let hi = self.span.hi;
- P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
+ Ok(P(spanned(lo, hi, ast::MetaNameValue(name, lit))))
}
token::OpenDelim(token::Paren) => {
- let inner_items = self.parse_meta_seq();
+ let inner_items = try!(self.parse_meta_seq());
let hi = self.span.hi;
- P(spanned(lo, hi, ast::MetaList(name, inner_items)))
+ Ok(P(spanned(lo, hi, ast::MetaList(name, inner_items))))
}
_ => {
let hi = self.last_span.hi;
- P(spanned(lo, hi, ast::MetaWord(name)))
+ Ok(P(spanned(lo, hi, ast::MetaWord(name))))
}
}
}
/// matches meta_seq = ( COMMASEP(meta_item) )
- fn parse_meta_seq(&mut self) -> Vec