diff --git a/syntex_syntax/Cargo.toml b/syntex_syntax/Cargo.toml index 0faa9788..1a94c065 100644 --- a/syntex_syntax/Cargo.toml +++ b/syntex_syntax/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "syntex_syntax" -version = "0.19.1" +version = "0.20.0" authors = [ "erick.tryzelaar@gmail.com" ] license = "MIT/Apache-2.0" description = "Export of libsyntax for code generation" @@ -8,8 +8,8 @@ repository = "https://github.com/erickt/rust-syntex" [dependencies] bitflags = "^0.3.2" -libc = "^0.1.10" -log = "^0.3.2" +libc = "^0.2.1" +log = "^0.3.3" rustc-serialize = "^0.3.16" -term = "^0.2.11" +term = "^0.2.13" unicode-xid = "^0.0.3" diff --git a/syntex_syntax/src/ast.rs b/syntex_syntax/src/ast.rs index 7b3c33d5..8c9c8835 100644 --- a/syntex_syntax/src/ast.rs +++ b/syntex_syntax/src/ast.rs @@ -31,13 +31,11 @@ pub use self::MetaItem_::*; pub use self::Mutability::*; pub use self::Pat_::*; pub use self::PathListItem_::*; -pub use self::PatWildKind::*; pub use self::PrimTy::*; pub use self::Sign::*; pub use self::Stmt_::*; pub use self::StrStyle::*; pub use self::StructFieldKind::*; -pub use self::TokenTree::*; pub use self::TraitItem_::*; pub use self::Ty_::*; pub use self::TyParamBound::*; @@ -569,19 +567,10 @@ pub enum BindingMode { BindByValue(Mutability), } -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub enum PatWildKind { - /// Represents the wildcard pattern `_` - PatWildSingle, - - /// Represents the wildcard pattern `..` - PatWildMulti, -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Pat_ { - /// Represents a wildcard pattern (either `_` or `..`) - PatWild(PatWildKind), + /// Represents a wildcard pattern (`_`) + PatWild, /// A PatIdent may either be a new bound variable, /// or a nullary enum (in which case the third field @@ -593,7 +582,7 @@ pub enum Pat_ { /// set (of "PatIdents that refer to nullary enums") PatIdent(BindingMode, SpannedIdent, Option>), - /// "None" means a * pattern where we don't bind the fields to names. + /// "None" means a `Variant(..)` pattern where we don't bind the fields to names. PatEnum(Path, Option>>), /// An associated const named using the qualified path `::CONST` or @@ -615,8 +604,8 @@ pub enum Pat_ { PatLit(P), /// A range pattern, e.g. `1...2` PatRange(P, P), - /// [a, b, ..i, y, z] is represented as: - /// PatVec(box [a, b], Some(i), box [y, z]) + /// `[a, b, ..i, y, z]` is represented as: + /// `PatVec(box [a, b], Some(i), box [y, z])` PatVec(Vec>, Option>, Vec>), /// A macro pattern; pre-expansion PatMac(Mac), @@ -964,12 +953,12 @@ impl Delimited { /// Returns the opening delimiter as a token tree. pub fn open_tt(&self) -> TokenTree { - TtToken(self.open_span, self.open_token()) + TokenTree::Token(self.open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self) -> TokenTree { - TtToken(self.close_span, self.close_token()) + TokenTree::Token(self.close_span, self.close_token()) } } @@ -1009,61 +998,61 @@ pub enum KleeneOp { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TokenTree { /// A single token - TtToken(Span, token::Token), + Token(Span, token::Token), /// A delimited sequence of token trees - TtDelimited(Span, Rc), + Delimited(Span, Rc), // This only makes sense in MBE macros. /// A kleene-style repetition sequence with a span // FIXME(eddyb) #12938 Use DST. - TtSequence(Span, Rc), + Sequence(Span, Rc), } impl TokenTree { pub fn len(&self) -> usize { match *self { - TtToken(_, token::DocComment(name)) => { + TokenTree::Token(_, token::DocComment(name)) => { match doc_comment_style(&name.as_str()) { AttrStyle::Outer => 2, AttrStyle::Inner => 3 } } - TtToken(_, token::SpecialVarNt(..)) => 2, - TtToken(_, token::MatchNt(..)) => 3, - TtDelimited(_, ref delimed) => { + TokenTree::Token(_, token::SpecialVarNt(..)) => 2, + TokenTree::Token(_, token::MatchNt(..)) => 3, + TokenTree::Delimited(_, ref delimed) => { delimed.tts.len() + 2 } - TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { seq.tts.len() } - TtToken(..) => 0 + TokenTree::Token(..) => 0 } } pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { - (&TtToken(sp, token::DocComment(_)), 0) => { - TtToken(sp, token::Pound) + (&TokenTree::Token(sp, token::DocComment(_)), 0) => { + TokenTree::Token(sp, token::Pound) } - (&TtToken(sp, token::DocComment(name)), 1) + (&TokenTree::Token(sp, token::DocComment(name)), 1) if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { - TtToken(sp, token::Not) + TokenTree::Token(sp, token::Not) } - (&TtToken(sp, token::DocComment(name)), _) => { + (&TokenTree::Token(sp, token::DocComment(name)), _) => { let stripped = strip_doc_comment_decoration(&name.as_str()); - TtDelimited(sp, Rc::new(Delimited { + TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"), - token::Plain)), - TtToken(sp, token::Eq), - TtToken(sp, token::Literal( + tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"), + token::Plain)), + TokenTree::Token(sp, token::Eq), + TokenTree::Token(sp, token::Literal( token::StrRaw(token::intern(&stripped), 0), None))], close_span: sp, })) } - (&TtDelimited(_, ref delimed), _) => { + (&TokenTree::Delimited(_, ref delimed), _) => { if index == 0 { return delimed.open_tt(); } @@ -1072,19 +1061,19 @@ impl TokenTree { } delimed.tts[index - 1].clone() } - (&TtToken(sp, token::SpecialVarNt(var)), _) => { - let v = [TtToken(sp, token::Dollar), - TtToken(sp, token::Ident(token::str_to_ident(var.as_str()), + (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { + let v = [TokenTree::Token(sp, token::Dollar), + TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()), token::Plain))]; v[index].clone() } - (&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { - let v = [TtToken(sp, token::SubstNt(name, name_st)), - TtToken(sp, token::Colon), - TtToken(sp, token::Ident(kind, kind_st))]; + (&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { + let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)), + TokenTree::Token(sp, token::Colon), + TokenTree::Token(sp, token::Ident(kind, kind_st))]; v[index].clone() } - (&TtSequence(_, ref seq), _) => { + (&TokenTree::Sequence(_, ref seq), _) => { seq.tts[index].clone() } _ => panic!("Cannot expand a token tree") @@ -1094,9 +1083,9 @@ impl TokenTree { /// Returns the `Span` corresponding to this token tree. pub fn get_span(&self) -> Span { match *self { - TtToken(span, _) => span, - TtDelimited(span, _) => span, - TtSequence(span, _) => span, + TokenTree::Token(span, _) => span, + TokenTree::Delimited(span, _) => span, + TokenTree::Sequence(span, _) => span, } } @@ -1736,6 +1725,12 @@ impl StructFieldKind { NamedField(..) => false, } } + + pub fn visibility(&self) -> Visibility { + match *self { + NamedField(_, vis) | UnnamedField(vis) => vis + } + } } /// Fields and Ids of enum variants and structs diff --git a/syntex_syntax/src/diagnostic.rs b/syntex_syntax/src/diagnostic.rs index 9fce2043..b7775105 100644 --- a/syntex_syntax/src/diagnostic.rs +++ b/syntex_syntax/src/diagnostic.rs @@ -206,9 +206,9 @@ impl Handler { can_emit_warnings: can_emit_warnings } } - pub fn fatal(&self, msg: &str) -> ! { + pub fn fatal(&self, msg: &str) -> FatalError { self.emit.borrow_mut().emit(None, msg, None, Fatal); - panic!(FatalError); + FatalError } pub fn err(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Error); @@ -226,14 +226,15 @@ impl Handler { pub fn abort_if_errors(&self) { let s; match self.err_count.get() { - 0 => return, - 1 => s = "aborting due to previous error".to_string(), - _ => { - s = format!("aborting due to {} previous errors", - self.err_count.get()); - } + 0 => return, + 1 => s = "aborting due to previous error".to_string(), + _ => { + s = format!("aborting due to {} previous errors", + self.err_count.get()); + } } - self.fatal(&s[..]); + + panic!(self.fatal(&s[..])); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); diff --git a/syntex_syntax/src/diagnostics/plugin.rs b/syntex_syntax/src/diagnostics/plugin.rs index a276765e..be0d5729 100644 --- a/syntex_syntax/src/diagnostics/plugin.rs +++ b/syntex_syntax/src/diagnostics/plugin.rs @@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let code = match (token_tree.len(), token_tree.get(0)) { - (1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code, + (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, _ => unreachable!() }; @@ -92,12 +92,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => { + (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { (code, None) }, - (3, Some(&ast::TtToken(_, token::Ident(ref code, _))), - Some(&ast::TtToken(_, token::Comma)), - Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => { + (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), + Some(&TokenTree::Token(_, token::Comma)), + Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) } _ => unreachable!() @@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &ast::TtToken(_, token::Ident(ref crate_name, _)), + &TokenTree::Token(_, token::Ident(ref crate_name, _)), // DIAGNOSTICS ident. - &ast::TtToken(_, token::Ident(ref name, _)) + &TokenTree::Token(_, token::Ident(ref name, _)) ) => (*&crate_name, name), _ => unreachable!() }; diff --git a/syntex_syntax/src/ext/asm.rs b/syntex_syntax/src/ext/asm.rs index 8b2196bc..b4b51188 100644 --- a/syntex_syntax/src/ext/asm.rs +++ b/syntex_syntax/src/ext/asm.rs @@ -80,7 +80,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, "malformed inline assembly"); return DummyResult::expr(sp); } - let (s, style) = match expr_to_string(cx, p.parse_expr(), + let (s, style) = match expr_to_string(cx, panictry!(p.parse_expr_nopanic()), "inline assembly must be a string literal") { Some((s, st)) => (s, st), // let compilation continue @@ -103,7 +103,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let span = p.last_span; panictry!(p.expect(&token::OpenDelim(token::Paren))); - let out = p.parse_expr(); + let out = panictry!(p.parse_expr_nopanic()); panictry!(p.expect(&token::CloseDelim(token::Paren))); // Expands a read+write operand into two operands. @@ -140,14 +140,14 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = panictry!(p.parse_str()); - if constraint.starts_with("=") { + if constraint.starts_with("=") && !constraint.contains("*") { cx.span_err(p.last_span, "input operand constraint contains '='"); - } else if constraint.starts_with("+") { + } else if constraint.starts_with("+") && !constraint.contains("*") { cx.span_err(p.last_span, "input operand constraint contains '+'"); } panictry!(p.expect(&token::OpenDelim(token::Paren))); - let input = p.parse_expr(); + let input = panictry!(p.parse_expr_nopanic()); panictry!(p.expect(&token::CloseDelim(token::Paren))); inputs.push((constraint, input)); diff --git a/syntex_syntax/src/ext/base.rs b/syntex_syntax/src/ext/base.rs index 1a64bdcf..ea2a0378 100644 --- a/syntex_syntax/src/ext/base.rs +++ b/syntex_syntax/src/ext/base.rs @@ -356,7 +356,7 @@ impl DummyResult { pub fn raw_pat(sp: Span) -> ast::Pat { ast::Pat { id: ast::DUMMY_NODE_ID, - node: ast::PatWild(ast::PatWildSingle), + node: ast::PatWild, span: sp, } } @@ -811,7 +811,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, cx.span_err(sp, &format!("{} takes 1 argument", name)); return None } - let ret = cx.expander().fold_expr(p.parse_expr()); + let ret = cx.expander().fold_expr(panictry!(p.parse_expr_nopanic())); if p.token != token::Eof { cx.span_err(sp, &format!("{} takes 1 argument", name)); } @@ -828,7 +828,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { - es.push(cx.expander().fold_expr(p.parse_expr())); + es.push(cx.expander().fold_expr(panictry!(p.parse_expr_nopanic()))); if panictry!(p.eat(&token::Comma)){ continue; } diff --git a/syntex_syntax/src/ext/build.rs b/syntex_syntax/src/ext/build.rs index 16a5eb05..4c10a749 100644 --- a/syntex_syntax/src/ext/build.rs +++ b/syntex_syntax/src/ext/build.rs @@ -801,7 +801,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }) } fn pat_wild(&self, span: Span) -> P { - self.pat(span, ast::PatWild(ast::PatWildSingle)) + self.pat(span, ast::PatWild) } fn pat_lit(&self, span: Span, expr: P) -> P { self.pat(span, ast::PatLit(expr)) diff --git a/syntex_syntax/src/ext/cfg.rs b/syntex_syntax/src/ext/cfg.rs index aa654e30..d354a4ae 100644 --- a/syntex_syntax/src/ext/cfg.rs +++ b/syntex_syntax/src/ext/cfg.rs @@ -19,7 +19,6 @@ use ext::base; use ext::build::AstBuilder; use attr; use attr::*; -use parse::attr::ParserAttr; use parse::token; pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, @@ -27,7 +26,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Box { let mut p = cx.new_parser_from_tts(tts); - let cfg = p.parse_meta_item(); + let cfg = panictry!(p.parse_meta_item()); if !panictry!(p.eat(&token::Eof)){ cx.span_err(sp, "expected 1 cfg-pattern"); diff --git a/syntex_syntax/src/ext/concat_idents.rs b/syntex_syntax/src/ext/concat_idents.rs index c31a7673..e9e36546 100644 --- a/syntex_syntax/src/ext/concat_idents.rs +++ b/syntex_syntax/src/ext/concat_idents.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, TokenTree}; use codemap::Span; use ext::base::*; use ext::base; @@ -17,7 +17,7 @@ use parse::token; use parse::token::str_to_ident; use ptr::P; -pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { if !cx.ecfg.enable_concat_idents() { feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, @@ -32,7 +32,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { - ast::TtToken(_, token::Comma) => {}, + TokenTree::Token(_, token::Comma) => {}, _ => { cx.span_err(sp, "concat_idents! expecting comma."); return DummyResult::expr(sp); @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } else { match *e { - ast::TtToken(_, token::Ident(ident, _)) => { + TokenTree::Token(_, token::Ident(ident, _)) => { res_str.push_str(&ident.name.as_str()) }, _ => { diff --git a/syntex_syntax/src/ext/deriving/debug.rs b/syntex_syntax/src/ext/deriving/debug.rs index 537375f7..8a180e6c 100644 --- a/syntex_syntax/src/ext/deriving/debug.rs +++ b/syntex_syntax/src/ext/deriving/debug.rs @@ -72,34 +72,40 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let span = Span { expn_id: cx.backtrace(), .. span }; let name = cx.expr_lit(span, ast::Lit_::LitStr(ident.name.as_str(), ast::StrStyle::CookedStr)); - let mut expr = substr.nonself_args[0].clone(); + let builder = token::str_to_ident("builder"); + let builder_expr = cx.expr_ident(span, builder.clone()); - match *substr.fields { - Struct(ref fields) | EnumMatching(_, _, ref fields) => { + let fmt = substr.nonself_args[0].clone(); + let stmts = match *substr.fields { + Struct(ref fields) | EnumMatching(_, _, ref fields) => { + let mut stmts = vec![]; if fields.is_empty() || fields[0].name.is_none() { // tuple struct/"normal" variant - expr = cx.expr_method_call(span, - expr, - token::str_to_ident("debug_tuple"), - vec![name]); + let expr = cx.expr_method_call(span, + fmt, + token::str_to_ident("debug_tuple"), + vec![name]); + stmts.push(cx.stmt_let(span, true, builder, expr)); for field in fields { // Use double indirection to make sure this works for unsized types let field = cx.expr_addr_of(field.span, field.self_.clone()); let field = cx.expr_addr_of(field.span, field); - expr = cx.expr_method_call(span, - expr, - token::str_to_ident("field"), - vec![field]); + let expr = cx.expr_method_call(span, + builder_expr.clone(), + token::str_to_ident("field"), + vec![field]); + stmts.push(cx.stmt_expr(expr)); } } else { // normal struct/struct variant - expr = cx.expr_method_call(span, - expr, - token::str_to_ident("debug_struct"), - vec![name]); + let expr = cx.expr_method_call(span, + fmt, + token::str_to_ident("debug_struct"), + vec![name]); + stmts.push(cx.stmt_let(span, true, builder, expr)); for field in fields { let name = cx.expr_lit(field.span, ast::Lit_::LitStr( @@ -109,18 +115,23 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // Use double indirection to make sure this works for unsized types let field = cx.expr_addr_of(field.span, field.self_.clone()); let field = cx.expr_addr_of(field.span, field); - expr = cx.expr_method_call(span, - expr, - token::str_to_ident("field"), - vec![name, field]); + let expr = cx.expr_method_call(span, + builder_expr.clone(), + token::str_to_ident("field"), + vec![name, field]); + stmts.push(cx.stmt_expr(expr)); } } + stmts } _ => unreachable!() - } + }; + + let expr = cx.expr_method_call(span, + builder_expr, + token::str_to_ident("finish"), + vec![]); - cx.expr_method_call(span, - expr, - token::str_to_ident("finish"), - vec![]) + let block = cx.block(span, stmts, Some(expr)); + cx.expr_block(block) } diff --git a/syntex_syntax/src/ext/expand.rs b/syntex_syntax/src/ext/expand.rs index d3a8ae4f..6406fa4f 100644 --- a/syntex_syntax/src/ext/expand.rs +++ b/syntex_syntax/src/ext/expand.rs @@ -408,9 +408,7 @@ pub fn expand_item_mac(it: P, } MacroRulesTT => { if ident.name == parse::token::special_idents::invalid.name { - fld.cx.span_err(path_span, - &format!("macro_rules! expects an ident argument") - ); + fld.cx.span_err(path_span, "macro_rules! expects an ident argument"); return SmallVector::zero(); } diff --git a/syntex_syntax/src/ext/format.rs b/syntex_syntax/src/ext/format.rs index cc2f94f0..c5634237 100644 --- a/syntex_syntax/src/ext/format.rs +++ b/syntex_syntax/src/ext/format.rs @@ -93,7 +93,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ecx.span_err(sp, "requires at least a format string argument"); return None; } - let fmtstr = p.parse_expr(); + let fmtstr = panictry!(p.parse_expr_nopanic()); let mut named = false; while p.token != token::Eof { if !panictry!(p.eat(&token::Comma)) { @@ -124,7 +124,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let name: &str = &ident.name.as_str(); panictry!(p.expect(&token::Eq)); - let e = p.parse_expr(); + let e = panictry!(p.parse_expr_nopanic()); match names.get(name) { None => {} Some(prev) => { @@ -138,7 +138,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) order.push(name.to_string()); names.insert(name.to_string(), e); } else { - args.push(p.parse_expr()); + args.push(panictry!(p.parse_expr_nopanic())); } } Some((fmtstr, args, order, names)) diff --git a/syntex_syntax/src/ext/quote.rs b/syntex_syntax/src/ext/quote.rs index 43c8d7af..7abc79f7 100644 --- a/syntex_syntax/src/ext/quote.rs +++ b/syntex_syntax/src/ext/quote.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, TokenTree}; use codemap::Span; use ext::base::ExtCtxt; use ext::base; @@ -71,67 +71,69 @@ pub mod rt { impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))] + vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] } } impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + vec![TokenTree::Token(DUMMY_SP, + token::Interpolated(token::NtPath(Box::new(self.clone()))))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] } } impl ToTokens for ast::Block { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] } } impl ToTokens for ast::Generics { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))] } } impl ToTokens for ast::WhereClause { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtWhereClause(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, + token::Interpolated(token::NtWhereClause(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let mut tts = vec![ - ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone()))) + TokenTree::Token(self.span, token::Interpolated(token::NtStmt(self.clone()))) ]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { - tts.push(ast::TtToken(self.span, token::Semi)); + tts.push(TokenTree::Token(self.span, token::Semi)); } tts @@ -140,19 +142,19 @@ pub mod rt { impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } @@ -173,12 +175,12 @@ pub mod rt { }; } - impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] } + impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] } impl_to_tokens_slice! { P, [] } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } @@ -186,11 +188,11 @@ pub mod rt { fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let mut r = vec![]; // FIXME: The spans could be better - r.push(ast::TtToken(self.span, token::Pound)); + r.push(TokenTree::Token(self.span, token::Pound)); if self.node.style == ast::AttrStyle::Inner { - r.push(ast::TtToken(self.span, token::Not)); + r.push(TokenTree::Token(self.span, token::Not)); } - r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited { + r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), @@ -210,7 +212,7 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], @@ -278,7 +280,7 @@ pub mod rt { fn parse_item(&self, s: String) -> P; fn parse_expr(&self, s: String) -> P; fn parse_stmt(&self, s: String) -> P; - fn parse_tts(&self, s: String) -> Vec; + fn parse_tts(&self, s: String) -> Vec; } impl<'a> ExtParseUtils for ExtCtxt<'a> { @@ -305,7 +307,7 @@ pub mod rt { self.parse_sess()) } - fn parse_tts(&self, s: String) -> Vec { + fn parse_tts(&self, s: String) -> Vec { parse::parse_tts_from_source_str("".to_string(), s, self.cfg(), @@ -316,7 +318,7 @@ pub mod rt { pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { let (cx_expr, expr) = expand_tts(cx, sp, tts); let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]); @@ -325,57 +327,57 @@ pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_expr", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_item", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_pat", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_arm(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_arm", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_ty(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_ty", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_stmt", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts); base::MacEager::expr(expanded) } pub fn expand_quote_attr(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_attribute", + let expanded = expand_parse_call(cx, sp, "parse_attribute_panic", vec!(cx.expr_bool(sp, true)), tts); base::MacEager::expr(expanded) @@ -383,7 +385,7 @@ pub fn expand_quote_attr(cx: &mut ExtCtxt, pub fn expand_quote_matcher(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); @@ -423,6 +425,11 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { vec!(e_str)) } +fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { + let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name)); + cx.expr_path(cx.path_global(sp, idents)) +} + fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) @@ -591,9 +598,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { mk_token_path(cx, sp, name) } -fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec> { +fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec> { match *tt { - ast::TtToken(sp, SubstNt(ident, _)) => { + TokenTree::Token(sp, SubstNt(ident, _)) => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = @@ -612,17 +619,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec { + ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { let mut seq = vec![]; for i in 0..tt.len() { seq.push(tt.get_tt(i)); } statements_mk_tts(cx, &seq[..], matcher) } - ast::TtToken(sp, ref tok) => { + TokenTree::Token(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, - mk_ast_path(cx, sp, "TtToken"), + mk_tt_path(cx, sp, "Token"), vec!(e_sp, expr_mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, @@ -631,16 +638,16 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec { + TokenTree::Delimited(_, ref delimed) => { statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() .chain(delimed.tts.iter() .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) .collect() }, - ast::TtSequence(sp, ref seq) => { + TokenTree::Sequence(sp, ref seq) => { if !matcher { - panic!("TtSequence in quote!"); + panic!("TokenTree::Sequence in quote!"); } let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -671,7 +678,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec Vec (P, Vec) { +fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) + -> (P, Vec) { // NB: It appears that the main parser loses its mind if we consider // $foo as a SubstNt during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is @@ -694,7 +701,7 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree]) let mut p = cx.new_parser_from_tts(tts); p.quote_depth += 1; - let cx_expr = p.parse_expr(); + let cx_expr = panictry!(p.parse_expr_nopanic()); if !panictry!(p.eat(&token::Comma)) { panic!(p.fatal("expected token `,`")); } @@ -746,7 +753,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec> { vec!(stmt_let_sp, stmt_let_tt) } -fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec> { +fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec> { let mut ss = Vec::new(); for tt in tts { ss.extend(statements_mk_tt(cx, tt, matcher)); @@ -754,7 +761,7 @@ fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec ss } -fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P, P) { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); @@ -790,7 +797,7 @@ fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, arg_exprs: Vec> , - tts: &[ast::TokenTree]) -> P { + tts: &[TokenTree]) -> P { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let cfg_call = || cx.expr_method_call( diff --git a/syntex_syntax/src/ext/source_util.rs b/syntex_syntax/src/ext/source_util.rs index 25063e7b..72ba7359 100644 --- a/syntex_syntax/src/ext/source_util.rs +++ b/syntex_syntax/src/ext/source_util.rs @@ -109,13 +109,13 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree } impl<'a> base::MacResult for ExpandResult<'a> { fn make_expr(mut self: Box>) -> Option> { - Some(self.p.parse_expr()) + Some(panictry!(self.p.parse_expr_nopanic())) } fn make_items(mut self: Box>) -> Option>> { let mut ret = SmallVector::zero(); while self.p.token != token::Eof { - match self.p.parse_item() { + match panictry!(self.p.parse_item_nopanic()) { Some(item) => ret.push(item), None => panic!(self.p.span_fatal( self.p.span, diff --git a/syntex_syntax/src/ext/trace_macros.rs b/syntex_syntax/src/ext/trace_macros.rs index ab34f41d..628b88d1 100644 --- a/syntex_syntax/src/ext/trace_macros.rs +++ b/syntex_syntax/src/ext/trace_macros.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::TokenTree; use codemap::Span; use ext::base::ExtCtxt; use ext::base; @@ -18,7 +18,7 @@ use parse::token::keywords; pub fn expand_trace_macros(cx: &mut ExtCtxt, sp: Span, - tt: &[ast::TokenTree]) + tt: &[TokenTree]) -> Box { if !cx.ecfg.enable_trace_macros() { feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, @@ -30,10 +30,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, } match (tt.len(), tt.first()) { - (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => { + (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => { cx.set_trace_macros(true); } - (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => { + (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), diff --git a/syntex_syntax/src/ext/tt/macro_parser.rs b/syntex_syntax/src/ext/tt/macro_parser.rs index 8dec9ae1..0e69edd7 100644 --- a/syntex_syntax/src/ext/tt/macro_parser.rs +++ b/syntex_syntax/src/ext/tt/macro_parser.rs @@ -80,12 +80,10 @@ use self::TokenTreeOrTokenTreeVec::*; use ast; use ast::{TokenTree, Name}; -use ast::{TtDelimited, TtSequence, TtToken}; use codemap::{BytePos, mk_sp, Span}; use codemap; use parse::lexer::*; //resolve bug? use parse::ParseSess; -use parse::attr::ParserAttr; use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; use parse::token::{Eof, DocComment, MatchNt, SubstNt}; use parse::token::{Token, Nonterminal}; @@ -147,16 +145,16 @@ pub struct MatcherPos { pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match elt { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { seq.num_captures } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { count_names(&delim.tts) } - &TtToken(_, MatchNt(..)) => { + &TokenTree::Token(_, MatchNt(..)) => { 1 } - &TtToken(_, _) => 0, + &TokenTree::Token(_, _) => 0, } }) } @@ -206,17 +204,17 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc], ret_val: &mut HashMap>, idx: &mut usize) { match m { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { for next_m in &seq.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { for next_m in &delim.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtToken(sp, MatchNt(bind_name, _, _, _)) => { + &TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { match ret_val.entry(bind_name.name) { Vacant(spot) => { spot.insert(res[*idx].clone()); @@ -230,8 +228,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) } } } - &TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"), - &TtToken(_, _) => (), + &TokenTree::Token(_, SubstNt(..)) => panic!("Cannot fill in a NT"), + &TokenTree::Token(_, _) => (), } } let mut ret_val = HashMap::new(); @@ -363,7 +361,7 @@ pub fn parse(sess: &ParseSess, } else { match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; @@ -389,10 +387,10 @@ pub fn parse(sess: &ParseSess, match_hi: ei_t.match_cur + seq.num_captures, up: Some(ei_t), sp_lo: sp.lo, - top_elts: Tt(TtSequence(sp, seq)), + top_elts: Tt(TokenTree::Sequence(sp, seq)), })); } - TtToken(_, MatchNt(..)) => { + TokenTree::Token(_, MatchNt(..)) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. match tok { @@ -400,10 +398,10 @@ pub fn parse(sess: &ParseSess, _ => bb_eis.push(ei), } } - TtToken(sp, SubstNt(..)) => { + TokenTree::Token(sp, SubstNt(..)) => { return Error(sp, "missing fragment specifier".to_string()) } - seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; ei.stack.push(MatcherTtFrame { @@ -413,7 +411,7 @@ pub fn parse(sess: &ParseSess, ei.idx = 0; cur_eis.push(ei); } - TtToken(_, ref t) => { + TokenTree::Token(_, ref t) => { let mut ei_t = ei.clone(); if token_name_eq(t,&tok) { ei_t.idx += 1; @@ -441,7 +439,7 @@ pub fn parse(sess: &ParseSess, if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TtToken(_, MatchNt(bind, name, _, _)) => { + TokenTree::Token(_, MatchNt(bind, name, _, _)) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -469,7 +467,7 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TtToken(span, MatchNt(_, ident, _, _)) => { + TokenTree::Token(span, MatchNt(_, ident, _, _)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); @@ -503,18 +501,18 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { // check at the beginning and the parser checks after each bump panictry!(p.check_unknown_macro_variable()); match name { - "item" => match p.parse_item() { + "item" => match panictry!(p.parse_item_nopanic()) { Some(i) => token::NtItem(i), None => panic!(p.fatal("expected an item keyword")) }, "block" => token::NtBlock(panictry!(p.parse_block())), - "stmt" => match p.parse_stmt() { + "stmt" => match panictry!(p.parse_stmt_nopanic()) { Some(s) => token::NtStmt(s), None => panic!(p.fatal("expected a statement")) }, - "pat" => token::NtPat(p.parse_pat()), - "expr" => token::NtExpr(p.parse_expr()), - "ty" => token::NtTy(p.parse_ty()), + "pat" => token::NtPat(panictry!(p.parse_pat_nopanic())), + "expr" => token::NtExpr(panictry!(p.parse_expr_nopanic())), + "ty" => token::NtTy(panictry!(p.parse_ty_nopanic())), // this could be handled like a token, since it is one "ident" => match p.token { token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) } @@ -527,7 +525,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { "path" => { token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))) }, - "meta" => token::NtMeta(p.parse_meta_item()), + "meta" => token::NtMeta(panictry!(p.parse_meta_item())), _ => { panic!(p.span_fatal_help(sp, &format!("invalid fragment specifier `{}`", name), diff --git a/syntex_syntax/src/ext/tt/macro_rules.rs b/syntex_syntax/src/ext/tt/macro_rules.rs index cce4450b..4e5825d1 100644 --- a/syntex_syntax/src/ext/tt/macro_rules.rs +++ b/syntex_syntax/src/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken}; +use ast::{self, TokenTree}; use codemap::{Span, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; @@ -26,6 +26,7 @@ use util::small_vector::SmallVector; use std::cell::RefCell; use std::rc::Rc; +use std::iter::once; struct ParserAnyMacro<'a> { parser: RefCell>, @@ -66,18 +67,18 @@ impl<'a> ParserAnyMacro<'a> { impl<'a> MacResult for ParserAnyMacro<'a> { fn make_expr(self: Box>) -> Option> { - let ret = self.parser.borrow_mut().parse_expr(); + let ret = panictry!(self.parser.borrow_mut().parse_expr_nopanic()); self.ensure_complete_parse(true); Some(ret) } fn make_pat(self: Box>) -> Option> { - let ret = self.parser.borrow_mut().parse_pat(); + let ret = panictry!(self.parser.borrow_mut().parse_pat_nopanic()); self.ensure_complete_parse(false); Some(ret) } fn make_items(self: Box>) -> Option>> { let mut ret = SmallVector::zero(); - while let Some(item) = self.parser.borrow_mut().parse_item() { + while let Some(item) = panictry!(self.parser.borrow_mut().parse_item_nopanic()) { ret.push(item); } self.ensure_complete_parse(false); @@ -119,7 +120,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> { } fn make_ty(self: Box>) -> Option> { - let ret = self.parser.borrow_mut().parse_ty(); + let ret = panictry!(self.parser.borrow_mut().parse_ty_nopanic()); self.ensure_complete_parse(true); Some(ret) } @@ -171,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => &delim.tts[..], + TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => panic!(cx.span_fatal(sp, "malformed macro lhs")) }; @@ -182,7 +183,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TtDelimited(_, ref delimed) => delimed.tts.clone(), + TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")), } }, @@ -243,21 +244,21 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); let argument_gram = vec!( - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: vec![ - TtToken(DUMMY_SP, match_lhs_tok), - TtToken(DUMMY_SP, token::FatArrow), - TtToken(DUMMY_SP, match_rhs_tok)], + TokenTree::Token(DUMMY_SP, match_lhs_tok), + TokenTree::Token(DUMMY_SP, token::FatArrow), + TokenTree::Token(DUMMY_SP, match_rhs_tok)], separator: Some(token::Semi), op: ast::OneOrMore, num_captures: 2 })), //to phase into semicolon-termination instead of //semicolon-separation - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { - tts: vec![TtToken(DUMMY_SP, token::Semi)], + tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -307,14 +308,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, } fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { - // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is - // those tts. Or, it can be a "bare sequence", not wrapped in parens. + // lhs is going to be like MatchedNonterminal(NtTT(TokenTree::Delimited(...))), where the + // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { &MatchedNonterminal(NtTT(ref inner)) => match &**inner { - &TtDelimited(_, ref tts) => { + &TokenTree::Delimited(_, ref tts) => { check_matcher(cx, tts.tts.iter(), &Eof); }, - tt @ &TtSequence(..) => { + tt @ &TokenTree::Sequence(..) => { check_matcher(cx, Some(tt).into_iter(), &Eof); }, _ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \ @@ -327,7 +328,7 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { // after parsing/expansion. we can report every error in every macro this way. } -// returns the last token that was checked, for TtSequence. this gets used later on. +// returns the last token that was checked, for TokenTree::Sequence. this gets used later on. fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) -> Option<(Span, Token)> where I: Iterator { use print::pprust::token_to_string; @@ -338,7 +339,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) let mut tokens = matcher.peekable(); while let Some(token) = tokens.next() { last = match *token { - TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => { + TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. if can_be_followed_by_any(&frag_spec.name.as_str()) { @@ -346,9 +347,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } else { let next_token = match tokens.peek() { // If T' closes a complex NT, replace T' with F - Some(&&TtToken(_, CloseDelim(_))) => follow.clone(), - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtSequence(sp, _)) => { + Some(&&TokenTree::Token(_, CloseDelim(_))) => follow.clone(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Sequence(sp, _)) => { // Be conservative around sequences: to be // more specific, we would need to // consider FIRST sets, but also the @@ -366,12 +367,16 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) Eof }, // die next iteration - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), // else, we're at the end of the macro or sequence None => follow.clone() }; - let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() }; + let tok = if let TokenTree::Token(_, ref tok) = *token { + tok + } else { + unreachable!() + }; // If T' is in the set FOLLOW(NT), continue. Else, reject. match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) { @@ -391,7 +396,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtSequence(sp, ref seq) => { + TokenTree::Sequence(sp, ref seq) => { // iii. Else, T is a complex NT. match seq.separator { // If T has the form $(...)U+ or $(...)U* for some token U, @@ -408,8 +413,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // but conservatively correct. Some((span, tok)) => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => + delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by \ another sequence repetition, which is not allowed"); @@ -417,7 +423,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) }, None => Eof }; - check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(), + check_matcher(cx, once(&TokenTree::Token(span, tok.clone())), &fol) }, None => last, @@ -428,8 +434,8 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // sequence. If it accepts, continue, else, reject. None => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by another \ sequence repetition, which is not allowed"); @@ -441,11 +447,11 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtToken(..) => { + TokenTree::Token(..) => { // i. If T is not an NT, continue. continue }, - TtDelimited(_, ref tts) => { + TokenTree::Delimited(_, ref tts) => { // if we don't pass in that close delimiter, we'll incorrectly consider the matcher // `{ $foo:ty }` as having a follow that isn't `RBrace` check_matcher(cx, tts.tts.iter(), &tts.close_token()) diff --git a/syntex_syntax/src/ext/tt/transcribe.rs b/syntex_syntax/src/ext/tt/transcribe.rs index d1e48eda..0fc31f3f 100644 --- a/syntex_syntax/src/ext/tt/transcribe.rs +++ b/syntex_syntax/src/ext/tt/transcribe.rs @@ -10,7 +10,7 @@ use self::LockstepIterSize::*; use ast; -use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name}; +use ast::{TokenTree, Ident, Name}; use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -53,7 +53,7 @@ pub struct TtReader<'a> { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, interp: Option>>, @@ -67,7 +67,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, /// like any other attribute which consists of `meta` and surrounding #[ ] tokens. /// /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, interp: Option>>, @@ -78,7 +78,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -151,17 +151,17 @@ impl Add for LockstepIterSize { fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { - TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { seq.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) => + TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => match lookup_cur_matched(r, name) { Some(matched) => match *matched { MatchedNonterminal(_) => LisUnconstrained, @@ -169,7 +169,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { }, _ => LisUnconstrained }, - TtToken(..) => LisUnconstrained, + TokenTree::Token(..) => LisUnconstrained, } } @@ -232,17 +232,17 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - loop { /* because it's easiest, this handles `TtDelimited` not starting - with a `TtToken`, even though it won't happen */ + loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting + with a `TokenTree::Token`, even though it won't happen */ let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. frame.forest.get_tt(frame.idx) }; match t { - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TtSequence(sp, seq.clone()), + match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), r) { LisUnconstrained => { panic!(r.sp_diag.span_fatal( @@ -272,20 +272,20 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { idx: 0, dotdotdoted: true, sep: seq.separator.clone(), - forest: TtSequence(sp, seq), + forest: TokenTree::Sequence(sp, seq), }); } } } // FIXME #2887: think about span stuff here - TtToken(sp, SubstNt(ident, namep)) => { + TokenTree::Token(sp, SubstNt(ident, namep)) => { r.stack.last_mut().unwrap().idx += 1; match lookup_cur_matched(r, ident) { None => { r.cur_span = sp; r.cur_tok = SubstNt(ident, namep); return ret_val; - // this can't be 0 length, just like TtDelimited + // this can't be 0 length, just like TokenTree::Delimited } Some(cur_matched) => { match *cur_matched { @@ -313,8 +313,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - // TtDelimited or any token that can be unzipped - seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => { + // TokenTree::Delimited or any token that can be unzipped + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => { // do not advance the idx yet r.stack.push(TtFrame { forest: seq, @@ -324,15 +324,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { }); // if this could be 0-length, we'd need to potentially recur here } - TtToken(sp, DocComment(name)) if r.desugar_doc_comments => { + TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => { r.stack.push(TtFrame { - forest: TtToken(sp, DocComment(name)), + forest: TokenTree::Token(sp, DocComment(name)), idx: 0, dotdotdoted: false, sep: None }); } - TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { + TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { r.stack.last_mut().unwrap().idx += 1; if r.imported_from.is_some() { @@ -344,7 +344,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // otherwise emit nothing and proceed to the next token } - TtToken(sp, tok) => { + TokenTree::Token(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; r.stack.last_mut().unwrap().idx += 1; diff --git a/syntex_syntax/src/fold.rs b/syntex_syntax/src/fold.rs index 282c69d2..a1a237a3 100644 --- a/syntex_syntax/src/fold.rs +++ b/syntex_syntax/src/fold.rs @@ -579,10 +579,10 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { - TtToken(span, ref tok) => - TtToken(span, fld.fold_token(tok.clone())), - TtDelimited(span, ref delimed) => { - TtDelimited(span, Rc::new( + TokenTree::Token(span, ref tok) => + TokenTree::Token(span, fld.fold_token(tok.clone())), + TokenTree::Delimited(span, ref delimed) => { + TokenTree::Delimited(span, Rc::new( Delimited { delim: delimed.delim, open_span: delimed.open_span, @@ -591,8 +591,8 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { } )) }, - TtSequence(span, ref seq) => - TtSequence(span, + TokenTree::Sequence(span, ref seq) => + TokenTree::Sequence(span, Rc::new(SequenceRepetition { tts: fld.fold_tts(&seq.tts), separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), @@ -1118,7 +1118,7 @@ pub fn noop_fold_pat(p: P, folder: &mut T) -> P { p.map(|Pat {id, node, span}| Pat { id: folder.new_id(id), node: match node { - PatWild(k) => PatWild(k), + PatWild => PatWild, PatIdent(binding_mode, pth1, sub) => { PatIdent(binding_mode, Spanned{span: folder.new_span(pth1.span), diff --git a/syntex_syntax/src/parse/attr.rs b/syntex_syntax/src/parse/attr.rs index 21936009..5df2478d 100644 --- a/syntex_syntax/src/parse/attr.rs +++ b/syntex_syntax/src/parse/attr.rs @@ -12,30 +12,21 @@ use attr; use ast; use codemap::{spanned, Spanned, mk_sp, Span}; use parse::common::*; //resolve bug? +use parse::PResult; use parse::token; use parse::parser::{Parser, TokenType}; use ptr::P; -/// A parser that can parse attributes. -pub trait ParserAttr { - fn parse_outer_attributes(&mut self) -> Vec; - fn parse_inner_attributes(&mut self) -> Vec; - fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute; - fn parse_meta_item(&mut self) -> P; - fn parse_meta_seq(&mut self) -> Vec>; - fn parse_optional_meta(&mut self) -> Vec>; -} - -impl<'a> ParserAttr for Parser<'a> { +impl<'a> Parser<'a> { /// Parse attributes that appear before an item - fn parse_outer_attributes(&mut self) -> Vec { + pub fn parse_outer_attributes(&mut self) -> PResult> { let mut attrs: Vec = Vec::new(); loop { debug!("parse_outer_attributes: self.token={:?}", self.token); match self.token { token::Pound => { - attrs.push(self.parse_attribute(false)); + attrs.push(try!(self.parse_attribute(false))); } token::DocComment(s) => { let attr = ::attr::mk_sugared_doc_attr( @@ -45,32 +36,32 @@ impl<'a> ParserAttr for Parser<'a> { self.span.hi ); if attr.node.style != ast::AttrStyle::Outer { - panic!(self.fatal("expected outer comment")); + return Err(self.fatal("expected outer comment")); } attrs.push(attr); - panictry!(self.bump()); + try!(self.bump()); } _ => break } } - return attrs; + return Ok(attrs); } /// Matches `attribute = # ! [ meta_item ]` /// /// If permit_inner is true, then a leading `!` indicates an inner /// attribute - fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute { + pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult { debug!("parse_attributes: permit_inner={:?} self.token={:?}", permit_inner, self.token); let (span, value, mut style) = match self.token { token::Pound => { let lo = self.span.lo; - panictry!(self.bump()); + try!(self.bump()); if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); } let style = if self.token == token::Not { - panictry!(self.bump()); + try!(self.bump()); if !permit_inner { let span = self.span; self.span_err(span, @@ -84,27 +75,27 @@ impl<'a> ParserAttr for Parser<'a> { ast::AttrStyle::Outer }; - panictry!(self.expect(&token::OpenDelim(token::Bracket))); - let meta_item = self.parse_meta_item(); + try!(self.expect(&token::OpenDelim(token::Bracket))); + let meta_item = try!(self.parse_meta_item()); let hi = self.span.hi; - panictry!(self.expect(&token::CloseDelim(token::Bracket))); + try!(self.expect(&token::CloseDelim(token::Bracket))); (mk_sp(lo, hi), meta_item, style) } _ => { let token_str = self.this_token_to_string(); - panic!(self.fatal(&format!("expected `#`, found `{}`", token_str))); + return Err(self.fatal(&format!("expected `#`, found `{}`", token_str))); } }; if permit_inner && self.token == token::Semi { - panictry!(self.bump()); + try!(self.bump()); self.span_warn(span, "this inner attribute syntax is deprecated. \ The new syntax is `#![foo]`, with a bang and no semicolon"); style = ast::AttrStyle::Inner; } - return Spanned { + Ok(Spanned { span: span, node: ast::Attribute_ { id: attr::mk_attr_id(), @@ -112,7 +103,7 @@ impl<'a> ParserAttr for Parser<'a> { value: value, is_sugared_doc: false } - }; + }) } /// Parse attributes that appear after the opening of an item. These should @@ -120,7 +111,7 @@ impl<'a> ParserAttr for Parser<'a> { /// terminated by a semicolon. /// matches inner_attrs* - fn parse_inner_attributes(&mut self) -> Vec { + pub fn parse_inner_attributes(&mut self) -> PResult> { let mut attrs: Vec = vec![]; loop { match self.token { @@ -130,7 +121,7 @@ impl<'a> ParserAttr for Parser<'a> { break; } - let attr = self.parse_attribute(true); + let attr = try!(self.parse_attribute(true)); assert!(attr.node.style == ast::AttrStyle::Inner); attrs.push(attr); } @@ -141,7 +132,7 @@ impl<'a> ParserAttr for Parser<'a> { let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi); if attr.node.style == ast::AttrStyle::Inner { attrs.push(attr); - panictry!(self.bump()); + try!(self.bump()); } else { break; } @@ -149,13 +140,13 @@ impl<'a> ParserAttr for Parser<'a> { _ => break } } - attrs + Ok(attrs) } /// matches meta_item = IDENT /// | IDENT = lit /// | IDENT meta_seq - fn parse_meta_item(&mut self) -> P { + pub fn parse_meta_item(&mut self) -> PResult> { let nt_meta = match self.token { token::Interpolated(token::NtMeta(ref e)) => { Some(e.clone()) @@ -165,19 +156,19 @@ impl<'a> ParserAttr for Parser<'a> { match nt_meta { Some(meta) => { - panictry!(self.bump()); - return meta; + try!(self.bump()); + return Ok(meta); } None => {} } let lo = self.span.lo; - let ident = panictry!(self.parse_ident()); + let ident = try!(self.parse_ident()); let name = self.id_to_interned_str(ident); match self.token { token::Eq => { - panictry!(self.bump()); - let lit = panictry!(self.parse_lit()); + try!(self.bump()); + let lit = try!(self.parse_lit()); // FIXME #623 Non-string meta items are not serialized correctly; // just forbid them for now match lit.node { @@ -189,32 +180,25 @@ impl<'a> ParserAttr for Parser<'a> { } } let hi = self.span.hi; - P(spanned(lo, hi, ast::MetaNameValue(name, lit))) + Ok(P(spanned(lo, hi, ast::MetaNameValue(name, lit)))) } token::OpenDelim(token::Paren) => { - let inner_items = self.parse_meta_seq(); + let inner_items = try!(self.parse_meta_seq()); let hi = self.span.hi; - P(spanned(lo, hi, ast::MetaList(name, inner_items))) + Ok(P(spanned(lo, hi, ast::MetaList(name, inner_items)))) } _ => { let hi = self.last_span.hi; - P(spanned(lo, hi, ast::MetaWord(name))) + Ok(P(spanned(lo, hi, ast::MetaWord(name)))) } } } /// matches meta_seq = ( COMMASEP(meta_item) ) - fn parse_meta_seq(&mut self) -> Vec> { - panictry!(self.parse_seq(&token::OpenDelim(token::Paren), - &token::CloseDelim(token::Paren), - seq_sep_trailing_allowed(token::Comma), - |p| Ok(p.parse_meta_item()))).node - } - - fn parse_optional_meta(&mut self) -> Vec> { - match self.token { - token::OpenDelim(token::Paren) => self.parse_meta_seq(), - _ => Vec::new() - } + fn parse_meta_seq(&mut self) -> PResult>> { + self.parse_unspanned_seq(&token::OpenDelim(token::Paren), + &token::CloseDelim(token::Paren), + seq_sep_trailing_allowed(token::Comma), + |p| p.parse_meta_item()) } } diff --git a/syntex_syntax/src/parse/lexer/comments.rs b/syntex_syntax/src/parse/lexer/comments.rs index 137996a3..e5e2c3a9 100644 --- a/syntex_syntax/src/parse/lexer/comments.rs +++ b/syntex_syntax/src/parse/lexer/comments.rs @@ -270,7 +270,7 @@ fn read_block_comment(rdr: &mut StringReader, while level > 0 { debug!("=== block comment level {}", level); if rdr.is_eof() { - rdr.fatal("unterminated block comment"); + panic!(rdr.fatal("unterminated block comment")); } if rdr.curr_is('\n') { trim_whitespace_prefix_and_push_line(&mut lines, diff --git a/syntex_syntax/src/parse/lexer/mod.rs b/syntex_syntax/src/parse/lexer/mod.rs index e25b240d..84c8f8a5 100644 --- a/syntex_syntax/src/parse/lexer/mod.rs +++ b/syntex_syntax/src/parse/lexer/mod.rs @@ -11,6 +11,7 @@ use ast; use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap; +use diagnostic::FatalError; use diagnostic::SpanHandler; use ext::tt::transcribe::tt_next_token; use parse::token::str_to_ident; @@ -32,7 +33,7 @@ pub trait Reader { fn is_eof(&self) -> bool; fn next_token(&mut self) -> TokenAndSpan; /// Report a fatal error with the current span. - fn fatal(&self, &str) -> !; + fn fatal(&self, &str) -> FatalError; /// Report a non-fatal error with the current span. fn err(&self, &str); fn peek(&self) -> TokenAndSpan; @@ -88,7 +89,7 @@ impl<'a> Reader for StringReader<'a> { self.advance_token(); ret_val } - fn fatal(&self, m: &str) -> ! { + fn fatal(&self, m: &str) -> FatalError { self.fatal_span(self.peek_span, m) } fn err(&self, m: &str) { @@ -112,8 +113,8 @@ impl<'a> Reader for TtReader<'a> { debug!("TtReader: r={:?}", r); r } - fn fatal(&self, m: &str) -> ! { - panic!(self.sp_diag.span_fatal(self.cur_span, m)); + fn fatal(&self, m: &str) -> FatalError { + self.sp_diag.span_fatal(self.cur_span, m) } fn err(&self, m: &str) { self.sp_diag.span_err(self.cur_span, m); @@ -165,8 +166,8 @@ impl<'a> StringReader<'a> { } /// Report a fatal lexical error with a given span. - pub fn fatal_span(&self, sp: Span, m: &str) -> ! { - panic!(self.span_diagnostic.span_fatal(sp, m)) + pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError { + self.span_diagnostic.span_fatal(sp, m) } /// Report a lexical error with a given span. @@ -180,7 +181,7 @@ impl<'a> StringReader<'a> { } /// Report a fatal error spanning [`from_pos`, `to_pos`). - fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! { + fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { self.fatal_span(codemap::mk_sp(from_pos, to_pos), m) } @@ -196,11 +197,11 @@ impl<'a> StringReader<'a> { /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// escaped character to the error message - fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> ! { + fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> FatalError { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, &m[..]); + self.fatal_span_(from_pos, to_pos, &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -214,12 +215,12 @@ impl<'a> StringReader<'a> { /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the /// offending string to the error message - fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> ! { + fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> FatalError { m.push_str(": "); let from = self.byte_offset(from_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize(); m.push_str(&self.source_text[from..to]); - self.fatal_span_(from_pos, to_pos, &m[..]); + self.fatal_span_(from_pos, to_pos, &m[..]) } /// Advance peek_tok and peek_span to refer to the next token, and @@ -540,7 +541,7 @@ impl<'a> StringReader<'a> { "unterminated block comment" }; let last_bpos = self.last_pos; - self.fatal_span_(start_bpos, last_bpos, msg); + panic!(self.fatal_span_(start_bpos, last_bpos, msg)); } let n = self.curr.unwrap(); match n { @@ -685,7 +686,9 @@ impl<'a> StringReader<'a> { for _ in 0..n_digits { if self.is_eof() { let last_bpos = self.last_pos; - self.fatal_span_(start_bpos, last_bpos, "unterminated numeric character escape"); + panic!(self.fatal_span_(start_bpos, + last_bpos, + "unterminated numeric character escape")); } if self.curr_is(delim) { let last_bpos = self.last_pos; @@ -838,15 +841,15 @@ impl<'a> StringReader<'a> { let c = match self.curr { Some(c) => c, None => { - self.fatal_span_(start_bpos, self.last_pos, - "unterminated unicode escape (found EOF)"); + panic!(self.fatal_span_(start_bpos, self.last_pos, + "unterminated unicode escape (found EOF)")); } }; accum_int *= 16; accum_int += c.to_digit(16).unwrap_or_else(|| { if c == delim { - self.fatal_span_(self.last_pos, self.pos, - "unterminated unicode escape (needed a `}`)"); + panic!(self.fatal_span_(self.last_pos, self.pos, + "unterminated unicode escape (needed a `}`)")); } else { self.err_span_char(self.last_pos, self.pos, "invalid character in unicode escape", c); @@ -1080,12 +1083,13 @@ impl<'a> StringReader<'a> { let valid = self.scan_char_or_byte(start, c2, /* ascii_only = */ false, '\''); if !self.curr_is('\'') { let last_bpos = self.last_pos; - self.fatal_span_verbose( - // Byte offsetting here is okay because the - // character before position `start` is an - // ascii single quote. - start - BytePos(1), last_bpos, - "unterminated character constant".to_string()); + panic!(self.fatal_span_verbose( + // Byte offsetting here is okay because the + // character before position `start` is an + // ascii single quote. + start - BytePos(1), last_bpos, + + String::from("character literal may only contain one codepoint"))); } let id = if valid { self.name_from(start) } else { token::intern("0") }; self.bump(); // advance curr past token @@ -1110,7 +1114,9 @@ impl<'a> StringReader<'a> { while !self.curr_is('"') { if self.is_eof() { let last_bpos = self.last_pos; - self.fatal_span_(start_bpos, last_bpos, "unterminated double quote string"); + panic!(self.fatal_span_(start_bpos, + last_bpos, + "unterminated double quote string")); } let ch_start = self.last_pos; @@ -1136,14 +1142,14 @@ impl<'a> StringReader<'a> { if self.is_eof() { let last_bpos = self.last_pos; - self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"); + panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); } else if !self.curr_is('"') { let last_bpos = self.last_pos; let curr_char = self.curr.unwrap(); - self.fatal_span_char(start_bpos, last_bpos, + panic!(self.fatal_span_char(start_bpos, last_bpos, "found invalid character; \ only `#` is allowed in raw string delimitation", - curr_char); + curr_char)); } self.bump(); let content_start_bpos = self.last_pos; @@ -1152,7 +1158,7 @@ impl<'a> StringReader<'a> { 'outer: loop { if self.is_eof() { let last_bpos = self.last_pos; - self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"); + panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); } //if self.curr_is('"') { //content_end_bpos = self.last_pos; @@ -1221,7 +1227,7 @@ impl<'a> StringReader<'a> { c => { let last_bpos = self.last_pos; let bpos = self.pos; - self.fatal_span_char(last_bpos, bpos, "unknown start of token", c); + panic!(self.fatal_span_char(last_bpos, bpos, "unknown start of token", c)); } } } @@ -1274,9 +1280,9 @@ impl<'a> StringReader<'a> { // character before position `start` are an // ascii single quote and ascii 'b'. let last_pos = self.last_pos; - self.fatal_span_verbose( + panic!(self.fatal_span_verbose( start - BytePos(2), last_pos, - "unterminated byte constant".to_string()); + "unterminated byte constant".to_string())); } let id = if valid { self.name_from(start) } else { token::intern("?") }; @@ -1296,8 +1302,7 @@ impl<'a> StringReader<'a> { while !self.curr_is('"') { if self.is_eof() { let last_pos = self.last_pos; - self.fatal_span_(start, last_pos, - "unterminated double quote byte string"); + panic!(self.fatal_span_(start, last_pos, "unterminated double quote byte string")); } let ch_start = self.last_pos; @@ -1321,14 +1326,14 @@ impl<'a> StringReader<'a> { if self.is_eof() { let last_pos = self.last_pos; - self.fatal_span_(start_bpos, last_pos, "unterminated raw string"); + panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string")); } else if !self.curr_is('"') { let last_pos = self.last_pos; let ch = self.curr.unwrap(); - self.fatal_span_char(start_bpos, last_pos, + panic!(self.fatal_span_char(start_bpos, last_pos, "found invalid character; \ only `#` is allowed in raw string delimitation", - ch); + ch)); } self.bump(); let content_start_bpos = self.last_pos; @@ -1337,7 +1342,7 @@ impl<'a> StringReader<'a> { match self.curr { None => { let last_pos = self.last_pos; - self.fatal_span_(start_bpos, last_pos, "unterminated raw string") + panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string")) }, Some('"') => { content_end_bpos = self.last_pos; diff --git a/syntex_syntax/src/parse/mod.rs b/syntex_syntax/src/parse/mod.rs index 2320de4b..86297c14 100644 --- a/syntex_syntax/src/parse/mod.rs +++ b/syntex_syntax/src/parse/mod.rs @@ -13,7 +13,6 @@ use ast; use codemap::{self, Span, CodeMap, FileMap}; use diagnostic::{SpanHandler, Handler, Auto, FatalError}; -use parse::attr::ParserAttr; use parse::parser::Parser; use parse::token::InternedString; use ptr::P; @@ -83,7 +82,8 @@ pub fn parse_crate_attrs_from_file( cfg: ast::CrateConfig, sess: &ParseSess ) -> Vec { - new_parser_from_file(sess, cfg, input).parse_inner_attributes() + // FIXME: maybe_aborted? + panictry!(new_parser_from_file(sess, cfg, input).parse_inner_attributes()) } pub fn parse_crate_from_source_str(name: String, @@ -107,7 +107,7 @@ pub fn parse_crate_attrs_from_source_str(name: String, cfg, name, source); - maybe_aborted(p.parse_inner_attributes(), p) + maybe_aborted(panictry!(p.parse_inner_attributes()), p) } pub fn parse_expr_from_source_str(name: String, @@ -116,7 +116,7 @@ pub fn parse_expr_from_source_str(name: String, sess: &ParseSess) -> P { let mut p = new_parser_from_source_str(sess, cfg, name, source); - maybe_aborted(p.parse_expr(), p) + maybe_aborted(panictry!(p.parse_expr_nopanic()), p) } pub fn parse_item_from_source_str(name: String, @@ -125,7 +125,7 @@ pub fn parse_item_from_source_str(name: String, sess: &ParseSess) -> Option> { let mut p = new_parser_from_source_str(sess, cfg, name, source); - maybe_aborted(p.parse_item(),p) + maybe_aborted(panictry!(p.parse_item_nopanic()), p) } pub fn parse_meta_from_source_str(name: String, @@ -134,7 +134,7 @@ pub fn parse_meta_from_source_str(name: String, sess: &ParseSess) -> P { let mut p = new_parser_from_source_str(sess, cfg, name, source); - maybe_aborted(p.parse_meta_item(),p) + maybe_aborted(panictry!(p.parse_meta_item()), p) } pub fn parse_stmt_from_source_str(name: String, @@ -148,7 +148,7 @@ pub fn parse_stmt_from_source_str(name: String, name, source ); - maybe_aborted(p.parse_stmt(), p) + maybe_aborted(panictry!(p.parse_stmt_nopanic()), p) } // Warning: This parses with quote_depth > 0, which is not the default. @@ -235,7 +235,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) let msg = format!("couldn't read {:?}: {}", path.display(), e); match spanopt { Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)), - None => sess.span_diagnostic.handler().fatal(&msg) + None => panic!(sess.span_diagnostic.handler().fatal(&msg)) } } } @@ -671,7 +671,7 @@ mod tests { use std::rc::Rc; use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION}; use owned_slice::OwnedSlice; - use ast; + use ast::{self, TokenTree}; use abi; use attr::{first_attr_value_str_by_name, AttrMetaMethods}; use parse; @@ -741,10 +741,10 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&ast::TtToken(_, token::Ident(name_macro_rules, token::Plain))), - Some(&ast::TtToken(_, token::Not)), - Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), - Some(&ast::TtDelimited(_, ref macro_delimed)), + Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), + Some(&TokenTree::Token(_, token::Not)), + Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), + Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name.as_str() == "macro_rules" && name_zip.name.as_str() == "zip" => { @@ -752,17 +752,17 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, - Some(&ast::TtDelimited(_, ref first_delimed)), - Some(&ast::TtToken(_, token::FatArrow)), - Some(&ast::TtDelimited(_, ref second_delimed)), + Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Delimited(_, ref second_delimed)), ) if macro_delimed.delim == token::Paren => { let tts = &first_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -772,8 +772,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -792,39 +792,39 @@ mod tests { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - ast::TtToken(sp(0, 2), + TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"), token::IdentStyle::Plain)), - ast::TtToken(sp(3, 4), + TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"), token::IdentStyle::Plain)), - ast::TtDelimited( + TokenTree::Delimited( sp(5, 14), Rc::new(ast::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - ast::TtToken(sp(6, 7), + TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"), token::IdentStyle::Plain)), - ast::TtToken(sp(8, 9), + TokenTree::Token(sp(8, 9), token::Colon), - ast::TtToken(sp(10, 13), + TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"), token::IdentStyle::Plain)), ], close_span: sp(13, 14), })), - ast::TtDelimited( + TokenTree::Delimited( sp(15, 21), Rc::new(ast::Delimited { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - ast::TtToken(sp(17, 18), + TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"), token::IdentStyle::Plain)), - ast::TtToken(sp(18, 19), + TokenTree::Token(sp(18, 19), token::Semi) ], close_span: sp(20, 21), @@ -858,7 +858,7 @@ mod tests { #[test] fn parse_stmt_1 () { assert!(string_to_stmt("b;".to_string()) == - P(Spanned{ + Some(P(Spanned{ node: ast::StmtExpr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(None, ast::Path { @@ -873,7 +873,7 @@ mod tests { }), span: sp(0,1)}), ast::DUMMY_NODE_ID), - span: sp(0,1)})) + span: sp(0,1)}))) } diff --git a/syntex_syntax/src/parse/parser.rs b/syntex_syntax/src/parse/parser.rs index e5f306a8..cacb4059 100644 --- a/syntex_syntax/src/parse/parser.rs +++ b/syntex_syntax/src/parse/parser.rs @@ -41,15 +41,13 @@ use ast::{MutImmutable, MutMutable, Mac_}; use ast::{MutTy, BiMul, Mutability}; use ast::{MethodImplItem, NamedField, UnNeg, NoReturn, UnNot}; use ast::{Pat, PatBox, PatEnum, PatIdent, PatLit, PatQPath, PatMac, PatRange}; -use ast::{PatRegion, PatStruct, PatTup, PatVec, PatWild, PatWildMulti}; -use ast::PatWildSingle; +use ast::{PatRegion, PatStruct, PatTup, PatVec, PatWild}; use ast::{PolyTraitRef, QSelf}; use ast::{Return, BiShl, BiShr, Stmt, StmtDecl}; use ast::{StmtExpr, StmtSemi, StmtMac, VariantData, StructField}; use ast::{BiSub, StrStyle}; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef}; -use ast::{TtDelimited, TtSequence, TtToken}; use ast::{Ty, Ty_, TypeBinding, TyMac}; use ast::{TyFixedLengthVec, TyBareFn, TyTypeof, TyInfer}; use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr}; @@ -64,7 +62,6 @@ use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, CodeMap}; use diagnostic; use ext::tt::macro_parser; use parse; -use parse::attr::ParserAttr; use parse::classify; use parse::common::{SeqSep, seq_sep_none, seq_sep_trailing_allowed}; use parse::lexer::{Reader, TokenAndSpan}; @@ -359,31 +356,36 @@ impl<'a> Parser<'a> { } // Panicing fns (for now!) - // This is so that the quote_*!() syntax extensions - pub fn parse_expr(&mut self) -> P { + // These functions are used by the quote_*!() syntax extensions, but shouldn't + // be used otherwise. + pub fn parse_expr_panic(&mut self) -> P { panictry!(self.parse_expr_nopanic()) } - pub fn parse_item(&mut self) -> Option> { + pub fn parse_item_panic(&mut self) -> Option> { panictry!(self.parse_item_nopanic()) } - pub fn parse_pat(&mut self) -> P { + pub fn parse_pat_panic(&mut self) -> P { panictry!(self.parse_pat_nopanic()) } - pub fn parse_arm(&mut self) -> Arm { + pub fn parse_arm_panic(&mut self) -> Arm { panictry!(self.parse_arm_nopanic()) } - pub fn parse_ty(&mut self) -> P { + pub fn parse_ty_panic(&mut self) -> P { panictry!(self.parse_ty_nopanic()) } - pub fn parse_stmt(&mut self) -> Option> { + pub fn parse_stmt_panic(&mut self) -> Option> { panictry!(self.parse_stmt_nopanic()) } + pub fn parse_attribute_panic(&mut self, permit_inner: bool) -> ast::Attribute { + panictry!(self.parse_attribute(permit_inner)) + } + /// Convert a token to a string using self's reader pub fn token_to_string(token: &token::Token) -> String { pprust::token_to_string(token) @@ -1174,7 +1176,7 @@ impl<'a> Parser<'a> { seq_sep_none(), |p| -> PResult> { maybe_whole!(no_clone p, NtTraitItem); - let mut attrs = p.parse_outer_attributes(); + let mut attrs = try!(p.parse_outer_attributes()); let lo = p.span.lo; let (name, node) = if try!(p.eat_keyword(keywords::Type)) { @@ -1840,7 +1842,7 @@ impl<'a> Parser<'a> { }); } _ => { - return Err(self.fatal(&format!("expected a lifetime name"))); + return Err(self.fatal("expected a lifetime name")); } } } @@ -2425,7 +2427,7 @@ impl<'a> Parser<'a> { )); let (sep, repeat) = try!(self.parse_sep_and_kleene_op()); let name_num = macro_parser::count_names(&seq); - return Ok(TtSequence(mk_sp(sp.lo, seq_span.hi), + return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), Rc::new(SequenceRepetition { tts: seq, separator: sep, @@ -2434,7 +2436,7 @@ impl<'a> Parser<'a> { }))); } else if self.token.is_keyword_allow_following_colon(keywords::Crate) { try!(self.bump()); - return Ok(TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); + return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); } else { sp = mk_sp(sp.lo, self.span.hi); let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; @@ -2456,9 +2458,9 @@ impl<'a> Parser<'a> { sp = mk_sp(sp.lo, self.span.hi); let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; let nt_kind = try!(self.parse_ident()); - Ok(TtToken(sp, MatchNt(name, nt_kind, namep, kindp))) + Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) } else { - Ok(TtToken(sp, SubstNt(name, namep))) + Ok(TokenTree::Token(sp, SubstNt(name, namep))) } } @@ -2506,7 +2508,7 @@ impl<'a> Parser<'a> { /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> PResult { // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TtSequence's + // parses token trees but also identifies TokenType::Sequence's // and token::SubstNt's; it's too early to know yet // whether something will be a nonterminal or a seq // yet. @@ -2537,7 +2539,7 @@ impl<'a> Parser<'a> { p.parse_unquoted() } _ => { - Ok(TtToken(p.span, try!(p.bump_and_get()))) + Ok(TokenTree::Token(p.span, try!(p.bump_and_get()))) } } } @@ -2576,7 +2578,7 @@ impl<'a> Parser<'a> { // Expand to cover the entire delimited token tree let span = Span { hi: close_span.hi, ..pre_span }; - Ok(TtDelimited(span, Rc::new(Delimited { + Ok(TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, open_span: open_span, tts: tts, @@ -2962,7 +2964,7 @@ impl<'a> Parser<'a> { pub fn parse_arm_nopanic(&mut self) -> PResult { maybe_whole!(no_clone self, NtArm); - let attrs = self.parse_outer_attributes(); + let attrs = try!(self.parse_outer_attributes()); let pats = try!(self.parse_pats()); let mut guard = None; if try!(self.eat_keyword(keywords::If) ){ @@ -3078,7 +3080,7 @@ impl<'a> Parser<'a> { self.check(&token::CloseDelim(token::Bracket)) { slice = Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, - node: PatWild(PatWildMulti), + node: PatWild, span: self.span, })); before_slice = false; @@ -3215,7 +3217,7 @@ impl<'a> Parser<'a> { token::Underscore => { // Parse _ try!(self.bump()); - pat = PatWild(PatWildSingle); + pat = PatWild; } token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat @@ -3471,7 +3473,7 @@ impl<'a> Parser<'a> { } } - let attrs = self.parse_outer_attributes(); + let attrs = try!(self.parse_outer_attributes()); let lo = self.span.lo; Ok(Some(if self.check_keyword(keywords::Let) { @@ -3613,7 +3615,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; try!(self.expect(&token::OpenDelim(token::Brace))); - Ok((self.parse_inner_attributes(), + Ok((try!(self.parse_inner_attributes()), try!(self.parse_block_tail(lo, DefaultBlock)))) } @@ -4437,7 +4439,7 @@ impl<'a> Parser<'a> { pub fn parse_impl_item(&mut self) -> PResult> { maybe_whole!(no_clone self, NtImplItem); - let mut attrs = self.parse_outer_attributes(); + let mut attrs = try!(self.parse_outer_attributes()); let lo = self.span.lo; let vis = try!(self.parse_visibility()); let (name, node) = if try!(self.eat_keyword(keywords::Type)) { @@ -4614,7 +4616,7 @@ impl<'a> Parser<'a> { generics.where_clause = try!(self.parse_where_clause()); try!(self.expect(&token::OpenDelim(token::Brace))); - let attrs = self.parse_inner_attributes(); + let attrs = try!(self.parse_inner_attributes()); let mut impl_items = vec![]; while !try!(self.eat(&token::CloseDelim(token::Brace))) { @@ -4733,7 +4735,7 @@ impl<'a> Parser<'a> { &token::CloseDelim(token::Paren), seq_sep_trailing_allowed(token::Comma), |p| { - let attrs = p.parse_outer_attributes(); + let attrs = try!(p.parse_outer_attributes()); let lo = p.span.lo; let struct_field_ = ast::StructField_ { kind: UnnamedField(try!(p.parse_visibility())), @@ -4775,7 +4777,7 @@ impl<'a> Parser<'a> { /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self, allow_pub: bool) -> PResult { - let attrs = self.parse_outer_attributes(); + let attrs = try!(self.parse_outer_attributes()); if try!(self.eat_keyword(keywords::Pub) ){ if !allow_pub { @@ -4847,7 +4849,7 @@ impl<'a> Parser<'a> { let mod_inner_lo = self.span.lo; let old_owns_directory = self.owns_directory; self.owns_directory = true; - let attrs = self.parse_inner_attributes(); + let attrs = try!(self.parse_inner_attributes()); let m = try!(self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)); self.owns_directory = old_owns_directory; self.pop_mod_path(); @@ -4996,7 +4998,7 @@ impl<'a> Parser<'a> { Some(name), id_sp); let mod_inner_lo = p0.span.lo; - let mod_attrs = p0.parse_inner_attributes(); + let mod_attrs = try!(p0.parse_inner_attributes()); let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo)); self.sess.included_mod_stack.borrow_mut().pop(); Ok((ast::ItemMod(m0), mod_attrs)) @@ -5099,7 +5101,7 @@ impl<'a> Parser<'a> { let abi = opt_abi.unwrap_or(abi::C); - attrs.extend(self.parse_inner_attributes()); + attrs.extend(try!(self.parse_inner_attributes())); let mut foreign_items = vec![]; while let Some(item) = try!(self.parse_foreign_item()) { @@ -5149,7 +5151,7 @@ impl<'a> Parser<'a> { let mut all_nullary = true; let mut any_disr = None; while self.token != token::CloseDelim(token::Brace) { - let variant_attrs = self.parse_outer_attributes(); + let variant_attrs = try!(self.parse_outer_attributes()); let vlo = self.span.lo; let struct_def; @@ -5511,7 +5513,7 @@ impl<'a> Parser<'a> { /// Parse a foreign item. fn parse_foreign_item(&mut self) -> PResult>> { - let attrs = self.parse_outer_attributes(); + let attrs = try!(self.parse_outer_attributes()); let lo = self.span.lo; let visibility = try!(self.parse_visibility()); @@ -5611,7 +5613,7 @@ impl<'a> Parser<'a> { } pub fn parse_item_nopanic(&mut self) -> PResult>> { - let attrs = self.parse_outer_attributes(); + let attrs = try!(self.parse_outer_attributes()); self.parse_item_(attrs, true) } @@ -5730,7 +5732,7 @@ impl<'a> Parser<'a> { pub fn parse_crate_mod(&mut self) -> PResult { let lo = self.span.lo; Ok(ast::Crate { - attrs: self.parse_inner_attributes(), + attrs: try!(self.parse_inner_attributes()), module: try!(self.parse_mod_items(&token::Eof, lo)), config: self.cfg.clone(), span: mk_sp(lo, self.span.lo), diff --git a/syntex_syntax/src/print/pprust.rs b/syntex_syntax/src/print/pprust.rs index 3903ed62..630453a4 100644 --- a/syntex_syntax/src/print/pprust.rs +++ b/syntex_syntax/src/print/pprust.rs @@ -11,7 +11,7 @@ pub use self::AnnNode::*; use abi; -use ast; +use ast::{self, TokenTree}; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast_util; use util::parser::AssocOp; @@ -1463,7 +1463,7 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> { match *tt { - ast::TtToken(_, ref tk) => { + TokenTree::Token(_, ref tk) => { try!(word(&mut self.s, &token_to_string(tk))); match *tk { parse::token::DocComment(..) => { @@ -1472,14 +1472,14 @@ impl<'a> State<'a> { _ => Ok(()) } } - ast::TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { try!(word(&mut self.s, &token_to_string(&delimed.open_token()))); try!(space(&mut self.s)); try!(self.print_tts(&delimed.tts)); try!(space(&mut self.s)); word(&mut self.s, &token_to_string(&delimed.close_token())) }, - ast::TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { try!(word(&mut self.s, "$(")); for tt_elt in &seq.tts { try!(self.print_tt(tt_elt)); @@ -1510,9 +1510,9 @@ impl<'a> State<'a> { // There should be no space between the module name and the following `::` in paths, // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701) suppress_space = match tt { - &ast::TtToken(_, token::Ident(_, token::ModName)) | - &ast::TtToken(_, token::MatchNt(_, _, _, token::ModName)) | - &ast::TtToken(_, token::SubstNt(_, token::ModName)) => true, + &TokenTree::Token(_, token::Ident(_, token::ModName)) | + &TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) | + &TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true, _ => false } } @@ -2417,8 +2417,7 @@ impl<'a> State<'a> { /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ match pat.node { - ast::PatWild(ast::PatWildSingle) => try!(word(&mut self.s, "_")), - ast::PatWild(ast::PatWildMulti) => try!(word(&mut self.s, "..")), + ast::PatWild => try!(word(&mut self.s, "_")), ast::PatIdent(binding_mode, ref path1, ref sub) => { match binding_mode { ast::BindByRef(mutbl) => { @@ -2514,13 +2513,10 @@ impl<'a> State<'a> { |s, p| s.print_pat(&**p))); if let Some(ref p) = *slice { if !before.is_empty() { try!(self.word_space(",")); } - try!(self.print_pat(&**p)); - match **p { - ast::Pat { node: ast::PatWild(ast::PatWildMulti), .. } => { - // this case is handled by print_pat - } - _ => try!(word(&mut self.s, "..")), + if p.node != ast::PatWild { + try!(self.print_pat(&**p)); } + try!(word(&mut self.s, "..")); if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, diff --git a/syntex_syntax/src/util/parser_testing.rs b/syntex_syntax/src/util/parser_testing.rs index 6e6e2d18..57138bf2 100644 --- a/syntex_syntax/src/util/parser_testing.rs +++ b/syntex_syntax/src/util/parser_testing.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast; -use parse::{ParseSess,filemap_to_tts}; +use parse::{ParseSess,PResult,filemap_to_tts}; use parse::new_parser_from_source_str; use parse::parser::Parser; use parse::token; @@ -31,11 +31,11 @@ pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> } fn with_error_checking_parse(s: String, f: F) -> T where - F: FnOnce(&mut Parser) -> T, + F: FnOnce(&mut Parser) -> PResult, { let ps = ParseSess::new(); let mut p = string_to_parser(&ps, s); - let x = f(&mut p); + let x = panictry!(f(&mut p)); p.abort_if_errors(); x } @@ -43,39 +43,37 @@ fn with_error_checking_parse(s: String, f: F) -> T where /// Parse a string, return a crate. pub fn string_to_crate (source_str : String) -> ast::Crate { with_error_checking_parse(source_str, |p| { - panictry!(p.parse_crate_mod()) + p.parse_crate_mod() }) } /// Parse a string, return an expr pub fn string_to_expr (source_str : String) -> P { with_error_checking_parse(source_str, |p| { - p.parse_expr() + p.parse_expr_nopanic() }) } /// Parse a string, return an item pub fn string_to_item (source_str : String) -> Option> { with_error_checking_parse(source_str, |p| { - p.parse_item() + p.parse_item_nopanic() }) } /// Parse a string, return a stmt -pub fn string_to_stmt(source_str : String) -> P { +pub fn string_to_stmt(source_str : String) -> Option> { with_error_checking_parse(source_str, |p| { - p.parse_stmt().unwrap() + p.parse_stmt_nopanic() }) } /// Parse a string, return a pat. Uses "irrefutable"... which doesn't /// (currently) affect parsing. pub fn string_to_pat(source_str: String) -> P { - // Binding `sess` and `parser` works around dropck-injected - // region-inference issues; see #25212, #22323, #22321. - let sess = ParseSess::new(); - let mut parser = string_to_parser(&sess, source_str); - parser.parse_pat() + with_error_checking_parse(source_str, |p| { + p.parse_pat_nopanic() + }) } /// Convert a vector of strings to a vector of ast::Ident's diff --git a/syntex_syntax/src/visit.rs b/syntex_syntax/src/visit.rs index 67a8eca8..9eb0a050 100644 --- a/syntex_syntax/src/visit.rs +++ b/syntex_syntax/src/visit.rs @@ -459,7 +459,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_expr(lower_bound); visitor.visit_expr(upper_bound) } - PatWild(_) => (), + PatWild => (), PatVec(ref prepatterns, ref slice_pattern, ref postpatterns) => { walk_list!(visitor, visit_pat, prepatterns); walk_opt!(visitor, visit_pat, slice_pattern);