diff --git a/syntex_errors/.gitignore b/syntex_errors/.gitignore new file mode 100644 index 00000000..1b72444a --- /dev/null +++ b/syntex_errors/.gitignore @@ -0,0 +1,2 @@ +/Cargo.lock +/target diff --git a/syntex_pos/.gitignore b/syntex_pos/.gitignore new file mode 100644 index 00000000..1b72444a --- /dev/null +++ b/syntex_pos/.gitignore @@ -0,0 +1,2 @@ +/Cargo.lock +/target diff --git a/syntex_syntax/.gitignore b/syntex_syntax/.gitignore index 5a44eef0..1b72444a 100644 --- a/syntex_syntax/.gitignore +++ b/syntex_syntax/.gitignore @@ -1 +1,2 @@ /Cargo.lock +/target diff --git a/syntex_syntax/src/attr.rs b/syntex_syntax/src/attr.rs index 3c88fb8f..e01bd2a9 100644 --- a/syntex_syntax/src/attr.rs +++ b/syntex_syntax/src/attr.rs @@ -34,6 +34,27 @@ thread_local! { static USED_ATTRS: RefCell> = RefCell::new(Vec::new()) } +enum AttrError { + MultipleItem(InternedString), + UnknownMetaItem(InternedString), + MissingSince, + MissingFeature, + MultipleStabilityLevels, +} + +fn handle_errors(diag: &Handler, span: Span, error: AttrError) { + match error { + AttrError::MultipleItem(item) => span_err!(diag, span, E0538, + "multiple '{}' items", item), + AttrError::UnknownMetaItem(item) => span_err!(diag, span, E0541, + "unknown meta item '{}'", item), + AttrError::MissingSince => span_err!(diag, span, E0542, "missing 'since'"), + AttrError::MissingFeature => span_err!(diag, span, E0546, "missing 'feature'"), + AttrError::MultipleStabilityLevels => span_err!(diag, span, E0544, + "multiple stability levels"), + } +} + pub fn mark_used(attr: &Attribute) { let AttrId(id) = attr.node.id; USED_ATTRS.with(|slot| { @@ -303,10 +324,10 @@ pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option, attrs: &[Attribute]) -> In MetaItemKind::List(ref n, ref items) if n == "inline" => { mark_used(attr); if items.len() != 1 { - diagnostic.map(|d|{ d.span_err(attr.span, "expected one argument"); }); + diagnostic.map(|d|{ span_err!(d, attr.span, E0534, "expected one argument"); }); InlineAttr::None } else if contains_name(&items[..], "always") { InlineAttr::Always } else if contains_name(&items[..], "never") { InlineAttr::Never } else { - diagnostic.map(|d|{ d.span_err((*items[0]).span, "invalid argument"); }); + diagnostic.map(|d| { + span_err!(d, (*items[0]).span, E0535, "invalid argument"); + }); InlineAttr::None } } @@ -374,13 +397,13 @@ pub fn cfg_matches(cfgs: &[P], cfg: &ast::MetaItem, mis.iter().all(|mi| cfg_matches(cfgs, &mi, sess, features)), ast::MetaItemKind::List(ref pred, ref mis) if &pred[..] == "not" => { if mis.len() != 1 { - sess.span_diagnostic.span_err(cfg.span, "expected 1 cfg-pattern"); + span_err!(sess.span_diagnostic, cfg.span, E0536, "expected 1 cfg-pattern"); return false; } !cfg_matches(cfgs, &mis[0], sess, features) } ast::MetaItemKind::List(ref pred, _) => { - sess.span_diagnostic.span_err(cfg.span, &format!("invalid predicate `{}`", pred)); + span_err!(sess.span_diagnostic, cfg.span, E0537, "invalid predicate `{}`", pred); false }, ast::MetaItemKind::Word(_) | ast::MetaItemKind::NameValue(..) => { @@ -446,15 +469,14 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, if let Some(metas) = attr.meta_item_list() { let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { - diagnostic.span_err(meta.span, &format!("multiple '{}' items", - meta.name())); + handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false } if let Some(v) = meta.value_str() { *item = Some(v); true } else { - diagnostic.span_err(meta.span, "incorrect meta item"); + span_err!(diagnostic, meta.span, E0539, "incorrect meta item"); false } }; @@ -462,7 +484,8 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, match tag { "rustc_deprecated" => { if rustc_depr.is_some() { - diagnostic.span_err(item_sp, "multiple rustc_deprecated attributes"); + span_err!(diagnostic, item_sp, E0540, + "multiple rustc_deprecated attributes"); break } @@ -473,8 +496,8 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, "since" => if !get(meta, &mut since) { continue 'outer }, "reason" => if !get(meta, &mut reason) { continue 'outer }, _ => { - diagnostic.span_err(meta.span, &format!("unknown meta item '{}'", - meta.name())); + handle_errors(diagnostic, meta.span, + AttrError::UnknownMetaItem(meta.name())); continue 'outer } } @@ -488,18 +511,18 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, }) } (None, _) => { - diagnostic.span_err(attr.span(), "missing 'since'"); + handle_errors(diagnostic, attr.span(), AttrError::MissingSince); continue } _ => { - diagnostic.span_err(attr.span(), "missing 'reason'"); + span_err!(diagnostic, attr.span(), E0543, "missing 'reason'"); continue } } } "unstable" => { if stab.is_some() { - diagnostic.span_err(item_sp, "multiple stability levels"); + handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels); break } @@ -512,8 +535,8 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, "reason" => if !get(meta, &mut reason) { continue 'outer }, "issue" => if !get(meta, &mut issue) { continue 'outer }, _ => { - diagnostic.span_err(meta.span, &format!("unknown meta item '{}'", - meta.name())); + handle_errors(diagnostic, meta.span, + AttrError::UnknownMetaItem(meta.name())); continue 'outer } } @@ -528,7 +551,8 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, if let Ok(issue) = issue.parse() { issue } else { - diagnostic.span_err(attr.span(), "incorrect 'issue'"); + span_err!(diagnostic, attr.span(), E0545, + "incorrect 'issue'"); continue } } @@ -538,18 +562,18 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, }) } (None, _, _) => { - diagnostic.span_err(attr.span(), "missing 'feature'"); + handle_errors(diagnostic, attr.span(), AttrError::MissingFeature); continue } _ => { - diagnostic.span_err(attr.span(), "missing 'issue'"); + span_err!(diagnostic, attr.span(), E0547, "missing 'issue'"); continue } } } "stable" => { if stab.is_some() { - diagnostic.span_err(item_sp, "multiple stability levels"); + handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels); break } @@ -560,8 +584,8 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, "feature" => if !get(meta, &mut feature) { continue 'outer }, "since" => if !get(meta, &mut since) { continue 'outer }, _ => { - diagnostic.span_err(meta.span, &format!("unknown meta item '{}'", - meta.name())); + handle_errors(diagnostic, meta.span, + AttrError::UnknownMetaItem(meta.name())); continue 'outer } } @@ -578,11 +602,11 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, }) } (None, _) => { - diagnostic.span_err(attr.span(), "missing 'feature'"); + handle_errors(diagnostic, attr.span(), AttrError::MissingFeature); continue } _ => { - diagnostic.span_err(attr.span(), "missing 'since'"); + handle_errors(diagnostic, attr.span(), AttrError::MissingSince); continue } } @@ -590,7 +614,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, _ => unreachable!() } } else { - diagnostic.span_err(attr.span(), "incorrect stability attribute type"); + span_err!(diagnostic, attr.span(), E0548, "incorrect stability attribute type"); continue } } @@ -603,8 +627,9 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, } stab.rustc_depr = Some(rustc_depr); } else { - diagnostic.span_err(item_sp, "rustc_deprecated attribute must be paired with \ - either stable or unstable attribute"); + span_err!(diagnostic, item_sp, E0549, + "rustc_deprecated attribute must be paired with \ + either stable or unstable attribute"); } } @@ -627,22 +652,21 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, mark_used(attr); if depr.is_some() { - diagnostic.span_err(item_sp, "multiple deprecated attributes"); + span_err!(diagnostic, item_sp, E0550, "multiple deprecated attributes"); break } depr = if let Some(metas) = attr.meta_item_list() { let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { - diagnostic.span_err(meta.span, &format!("multiple '{}' items", - meta.name())); + handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false } if let Some(v) = meta.value_str() { *item = Some(v); true } else { - diagnostic.span_err(meta.span, "incorrect meta item"); + span_err!(diagnostic, meta.span, E0551, "incorrect meta item"); false } }; @@ -654,8 +678,8 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, "since" => if !get(meta, &mut since) { continue 'outer }, "note" => if !get(meta, &mut note) { continue 'outer }, _ => { - diagnostic.span_err(meta.span, &format!("unknown meta item '{}'", - meta.name())); + handle_errors(diagnostic, meta.span, + AttrError::UnknownMetaItem(meta.name())); continue 'outer } } @@ -689,7 +713,7 @@ pub fn require_unique_names(diagnostic: &Handler, metas: &[P]) { if !set.insert(name.clone()) { panic!(diagnostic.span_fatal(meta.span, - &format!("duplicate meta item `{}`", name))); + &format!("duplicate meta item `{}`", name))); } } } @@ -718,8 +742,8 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec Some(ity) => Some(ReprInt(item.span, ity)), None => { // Not a word we recognize - diagnostic.span_err(item.span, - "unrecognized representation hint"); + span_err!(diagnostic, item.span, E0552, + "unrecognized representation hint"); None } } @@ -731,7 +755,8 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec } } // Not a word: - _ => diagnostic.span_err(item.span, "unrecognized enum representation hint") + _ => span_err!(diagnostic, item.span, E0553, + "unrecognized enum representation hint"), } } } diff --git a/syntex_syntax/src/config.rs b/syntex_syntax/src/config.rs index eaf82f5f..ff75149f 100644 --- a/syntex_syntax/src/config.rs +++ b/syntex_syntax/src/config.rs @@ -33,7 +33,7 @@ impl<'a> StripUnconfigured<'a> { if self.in_cfg(node.attrs()) { Some(node) } else { None } } - fn process_cfg_attrs(&mut self, node: T) -> T { + pub fn process_cfg_attrs(&mut self, node: T) -> T { node.map_attrs(|attrs| { attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect() }) diff --git a/syntex_syntax/src/diagnostic_list.rs b/syntex_syntax/src/diagnostic_list.rs new file mode 100644 index 00000000..eb30657b --- /dev/null +++ b/syntex_syntax/src/diagnostic_list.rs @@ -0,0 +1,54 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(non_snake_case)] + +// Error messages for EXXXX errors. +// Each message should start and end with a new line, and be wrapped to 80 characters. +// In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable. +register_long_diagnostics! { + +E0533: r##" +```compile_fail,E0533 +#[export_name] +pub fn something() {} + +fn main() {} +``` +"##, + +} + +register_diagnostics! { + E0534, // expected one argument + E0535, // invalid argument + E0536, // expected 1 cfg-pattern + E0537, // invalid predicate + E0538, // multiple [same] items + E0539, // incorrect meta item + E0540, // multiple rustc_deprecated attributes + E0541, // unknown meta item + E0542, // missing 'since' + E0543, // missing 'reason' + E0544, // multiple stability levels + E0545, // incorrect 'issue' + E0546, // missing 'feature' + E0547, // missing 'issue' + E0548, // incorrect stability attribute type + E0549, // rustc_deprecated attribute must be paired with either stable or unstable attribute + E0550, // multiple deprecated attributes + E0551, // incorrect meta item + E0552, // unrecognized representation hint + E0553, // unrecognized enum representation hint + E0554, // #[feature] may not be used on the [] release channel + E0555, // malformed feature attribute, expected #![feature(...)] + E0556, // malformed feature, expected just one word + E0557, // feature has been removed +} diff --git a/syntex_syntax/src/ext/base.rs b/syntex_syntax/src/ext/base.rs index ca38ef06..92670cd9 100644 --- a/syntex_syntax/src/ext/base.rs +++ b/syntex_syntax/src/ext/base.rs @@ -944,9 +944,8 @@ impl SyntaxEnv { pub fn find(&self, k: Name) -> Option> { for frame in self.chain.iter().rev() { - match frame.map.get(&k) { - Some(v) => return Some(v.clone()), - None => {} + if let Some(v) = frame.map.get(&k) { + return Some(v.clone()); } } None diff --git a/syntex_syntax/src/ext/expand.rs b/syntex_syntax/src/ext/expand.rs index c670283e..b2b63d0d 100644 --- a/syntex_syntax/src/ext/expand.rs +++ b/syntex_syntax/src/ext/expand.rs @@ -237,7 +237,7 @@ fn expand_mac_invoc(mac: ast::Mac, ident: Option, attrs: Vec(mac: ast::Mac, ident: Option, attrs: Vec SmallVe }; fld.cx.bt_pop(); - modified.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect() + let configured = modified.into_iter().flat_map(|it| { + it.fold_with(&mut fld.strip_unconfigured()) + }).collect::>(); + + configured.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect() } } } @@ -1126,7 +1130,7 @@ impl Folder for Marker { Spanned { node: Mac_ { path: self.fold_path(node.path), - tts: self.fold_tts(node.tts), + tts: self.fold_tts(&node.tts), }, span: self.new_span(span), } @@ -1141,7 +1145,7 @@ impl Folder for Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: Vec, m: Mrk) -> Vec { +fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec { noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None}) } diff --git a/syntex_syntax/src/ext/quote.rs b/syntex_syntax/src/ext/quote.rs index 68527b07..ffc950d7 100644 --- a/syntex_syntax/src/ext/quote.rs +++ b/syntex_syntax/src/ext/quote.rs @@ -32,6 +32,7 @@ pub mod rt { use ext::base::ExtCtxt; use parse::{self, token, classify}; use ptr::P; + use std::rc::Rc; use tokenstream::{self, TokenTree}; @@ -215,12 +216,12 @@ pub mod rt { if self.node.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } - r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { + r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), close_span: self.span, - })); + }))); r } } @@ -235,12 +236,12 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], close_span: DUMMY_SP, - })] + }))] } } @@ -791,9 +792,14 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec { parser: RefCell>, @@ -262,7 +263,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); let argument_gram = vec![ - TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![ TokenTree::Token(DUMMY_SP, match_lhs_tok), TokenTree::Token(DUMMY_SP, token::FatArrow), @@ -271,14 +272,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, separator: Some(token::Semi), op: tokenstream::KleeneOp::OneOrMore, num_captures: 2, - }), + })), // to phase into semicolon-termination instead of semicolon-separation - TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 - }), + })), ]; // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/syntex_syntax/src/ext/tt/transcribe.rs b/syntex_syntax/src/ext/tt/transcribe.rs index 58328eb4..7c0d1066 100644 --- a/syntex_syntax/src/ext/tt/transcribe.rs +++ b/syntex_syntax/src/ext/tt/transcribe.rs @@ -79,11 +79,11 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 - }), + })), idx: 0, dotdotdoted: false, sep: None, @@ -225,12 +225,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } else { /* repeat */ *r.repeat_idx.last_mut().unwrap() += 1; r.stack.last_mut().unwrap().idx = 0; - match r.stack.last().unwrap().sep.clone() { - Some(tk) => { - r.cur_tok = tk; /* repeat same span, I guess */ - return ret_val; - } - None => {} + if let Some(tk) = r.stack.last().unwrap().sep.clone() { + r.cur_tok = tk; // repeat same span, I guess + return ret_val; } } } diff --git a/syntex_syntax/src/feature_gate.rs b/syntex_syntax/src/feature_gate.rs index d6476fdb..27485ee6 100644 --- a/syntex_syntax/src/feature_gate.rs +++ b/syntex_syntax/src/feature_gate.rs @@ -1103,17 +1103,16 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F match attr.meta_item_list() { None => { - span_handler.span_err(attr.span, "malformed feature attribute, \ - expected #![feature(...)]"); + span_err!(span_handler, attr.span, E0555, + "malformed feature attribute, expected #![feature(...)]"); } Some(list) => { for mi in list { let name = match mi.node { ast::MetaItemKind::Word(ref word) => (*word).clone(), _ => { - span_handler.span_err(mi.span, - "malformed feature, expected just \ - one word"); + span_err!(span_handler, mi.span, E0556, + "malformed feature, expected just one word"); continue } }; @@ -1123,7 +1122,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F } else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter() .find(|& &(n, _, _)| name == n) { - span_handler.span_err(mi.span, "feature has been removed"); + span_err!(span_handler, mi.span, E0557, "feature has been removed"); } else if let Some(&(_, _, _)) = ACCEPTED_FEATURES.iter() .find(|& &(n, _, _)| name == n) { @@ -1179,9 +1178,9 @@ fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, for attr in &krate.attrs { if attr.check_name("feature") { let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)"); - let ref msg = format!("#[feature] may not be used on the {} release channel", - release_channel); - span_handler.span_err(attr.span, msg); + span_err!(span_handler, attr.span, E0554, + "#[feature] may not be used on the {} release channel", + release_channel); } } } diff --git a/syntex_syntax/src/fold.rs b/syntex_syntax/src/fold.rs index ed6f09ee..ac3d643b 100644 --- a/syntex_syntax/src/fold.rs +++ b/syntex_syntax/src/fold.rs @@ -28,6 +28,8 @@ use tokenstream::*; use util::small_vector::SmallVector; use util::move_map::MoveMap; +use std::rc::Rc; + pub trait Folder : Sized { // Any additions to this trait should happen in form // of a call to a public `noop_*` function that only calls @@ -222,11 +224,11 @@ pub trait Folder : Sized { noop_fold_ty_params(tps, self) } - fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { + fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree { noop_fold_tt(tt, self) } - fn fold_tts(&mut self, tts: Vec) -> Vec { + fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec { noop_fold_tts(tts, self) } @@ -501,7 +503,7 @@ pub fn noop_fold_mac(Spanned {node, span}: Mac, fld: &mut T) -> Mac { Spanned { node: Mac_ { path: fld.fold_path(node.path), - tts: fld.fold_tts(node.tts), + tts: fld.fold_tts(&node.tts), }, span: fld.new_span(span) } @@ -528,26 +530,32 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { } } -pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { - match tt { +pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { + match *tt { TokenTree::Token(span, ref tok) => TokenTree::Token(span, fld.fold_token(tok.clone())), - TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited { - delim: delimed.delim, - open_span: delimed.open_span, - tts: fld.fold_tts(delimed.tts), - close_span: delimed.close_span, - }), - TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition { - tts: fld.fold_tts(seq.tts), - separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), - ..seq - }), + TokenTree::Delimited(span, ref delimed) => { + TokenTree::Delimited(span, Rc::new( + Delimited { + delim: delimed.delim, + open_span: delimed.open_span, + tts: fld.fold_tts(&delimed.tts), + close_span: delimed.close_span, + } + )) + }, + TokenTree::Sequence(span, ref seq) => + TokenTree::Sequence(span, + Rc::new(SequenceRepetition { + tts: fld.fold_tts(&seq.tts), + separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), + ..**seq + })), } } -pub fn noop_fold_tts(tts: Vec, fld: &mut T) -> Vec { - tts.move_map(|tt| fld.fold_tt(tt)) +pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec { + tts.iter().map(|tt| fld.fold_tt(tt)).collect() } // apply ident folder if it's an ident, apply other folds to interpolated nodes @@ -605,7 +613,7 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), ..*id})), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), - token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))), + token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), token::NtImplItem(arm) => token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm) diff --git a/syntex_syntax/src/lib.rs b/syntex_syntax/src/lib.rs index 652cf68d..8febf1c4 100644 --- a/syntex_syntax/src/lib.rs +++ b/syntex_syntax/src/lib.rs @@ -33,6 +33,7 @@ #![feature(str_escape)] #![feature(unicode)] #![feature(question_mark)] +#![feature(rustc_diagnostic_macros)] extern crate serialize; extern crate term; @@ -66,6 +67,18 @@ macro_rules! panictry { }) } +#[macro_use] +pub mod diagnostics { + #[macro_use] + pub mod macros; + pub mod plugin; + pub mod metadata; +} + +// NB: This module needs to be declared first so diagnostics are +// registered before they are used. +pub mod diagnostic_list; + pub mod util { pub mod interner; pub mod lev_distance; @@ -80,12 +93,6 @@ pub mod util { pub use self::thin_vec::ThinVec; } -pub mod diagnostics { - pub mod macros; - pub mod plugin; - pub mod metadata; -} - pub mod json; pub mod syntax { @@ -130,3 +137,5 @@ pub mod ext { pub mod macro_rules; } } + +// __build_diagnostic_array! { libsyntax, DIAGNOSTICS } diff --git a/syntex_syntax/src/parse/attr.rs b/syntex_syntax/src/parse/attr.rs index f6e94b7c..15344cef 100644 --- a/syntex_syntax/src/parse/attr.rs +++ b/syntex_syntax/src/parse/attr.rs @@ -160,12 +160,9 @@ impl<'a> Parser<'a> { _ => None, }; - match nt_meta { - Some(meta) => { - self.bump(); - return Ok(meta); - } - None => {} + if let Some(meta) = nt_meta { + self.bump(); + return Ok(meta); } let lo = self.span.lo; diff --git a/syntex_syntax/src/parse/lexer/mod.rs b/syntex_syntax/src/parse/lexer/mod.rs index 809f4daa..77b5c108 100644 --- a/syntex_syntax/src/parse/lexer/mod.rs +++ b/syntex_syntax/src/parse/lexer/mod.rs @@ -470,15 +470,12 @@ impl<'a> StringReader<'a> { /// PRECONDITION: self.curr is not whitespace /// Eats any kind of comment. fn scan_comment(&mut self) -> Option { - match self.curr { - Some(c) => { - if c.is_whitespace() { - self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos), - "called consume_any_line_comment, but there \ - was whitespace"); - } + if let Some(c) = self.curr { + if c.is_whitespace() { + self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos), + "called consume_any_line_comment, but there \ + was whitespace"); } - None => {} } if self.curr_is('/') { diff --git a/syntex_syntax/src/parse/mod.rs b/syntex_syntax/src/parse/mod.rs index bbcc044d..9502bc48 100644 --- a/syntex_syntax/src/parse/mod.rs +++ b/syntex_syntax/src/parse/mod.rs @@ -662,6 +662,7 @@ pub fn integer_lit(s: &str, #[cfg(test)] mod tests { use super::*; + use std::rc::Rc; use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; use codemap::Spanned; use ast::{self, PatKind}; @@ -763,7 +764,7 @@ mod tests { ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, - _ => panic!("value 3: {:?}", *first_delimed), + _ => panic!("value 3: {:?}", **first_delimed), } let tts = &second_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1)) { @@ -774,10 +775,10 @@ mod tests { ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, - _ => panic!("value 4: {:?}", *second_delimed), + _ => panic!("value 4: {:?}", **second_delimed), } }, - _ => panic!("value 2: {:?}", *macro_delimed), + _ => panic!("value 2: {:?}", **macro_delimed), } }, _ => panic!("value: {:?}",tts), @@ -793,7 +794,7 @@ mod tests { TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), TokenTree::Delimited( sp(5, 14), - tokenstream::Delimited { + Rc::new(tokenstream::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ @@ -802,10 +803,10 @@ mod tests { TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), ], close_span: sp(13, 14), - }), + })), TokenTree::Delimited( sp(15, 21), - tokenstream::Delimited { + Rc::new(tokenstream::Delimited { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ @@ -813,7 +814,7 @@ mod tests { TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), - }) + })) ]; assert_eq!(tts, expected); diff --git a/syntex_syntax/src/parse/parser.rs b/syntex_syntax/src/parse/parser.rs index 20a54228..4cf14e62 100644 --- a/syntex_syntax/src/parse/parser.rs +++ b/syntex_syntax/src/parse/parser.rs @@ -2688,12 +2688,13 @@ impl<'a> Parser<'a> { )?; let (sep, repeat) = self.parse_sep_and_kleene_op()?; let name_num = macro_parser::count_names(&seq); - return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition { - tts: seq, - separator: sep, - op: repeat, - num_captures: name_num - })); + return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), + Rc::new(SequenceRepetition { + tts: seq, + separator: sep, + op: repeat, + num_captures: name_num + }))); } else if self.token.is_keyword(keywords::Crate) { self.bump(); return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); @@ -2752,9 +2753,8 @@ impl<'a> Parser<'a> { } }; - match parse_kleene_op(self)? { - Some(kleene_op) => return Ok((None, kleene_op)), - None => {} + if let Some(kleene_op) = parse_kleene_op(self)? { + return Ok((None, kleene_op)); } let separator = self.bump_and_get(); @@ -2849,12 +2849,12 @@ impl<'a> Parser<'a> { _ => {} } - Ok(TokenTree::Delimited(span, Delimited { + Ok(TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, open_span: open_span, tts: tts, close_span: close_span, - })) + }))) }, _ => { // invariants: the current token is not a left-delimiter, @@ -5297,15 +5297,22 @@ impl<'a> Parser<'a> { /// Parse a `mod { ... }` or `mod ;` item fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { + let outer_attrs = ::config::StripUnconfigured { + config: &self.cfg, + sess: self.sess, + should_test: false, // irrelevant + features: None, // don't perform gated feature checking + }.process_cfg_attrs(outer_attrs.to_owned()); + let id_span = self.span; let id = self.parse_ident()?; if self.check(&token::Semi) { self.bump(); // This mod is in an external file. Let's go get it! - let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span)?; + let (m, attrs) = self.eval_src_mod(id, &outer_attrs, id_span)?; Ok((id, m, Some(attrs))) } else { - self.push_mod_path(id, outer_attrs); + self.push_mod_path(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; let mod_inner_lo = self.span.lo; let attrs = self.parse_inner_attributes()?; @@ -5691,15 +5698,12 @@ impl<'a> Parser<'a> { } _ => None }; - match nt_item { - Some(mut item) => { - self.bump(); - let mut attrs = attrs; - mem::swap(&mut item.attrs, &mut attrs); - item.attrs.extend(attrs); - return Ok(Some(P(item))); - } - None => {} + if let Some(mut item) = nt_item { + self.bump(); + let mut attrs = attrs; + mem::swap(&mut item.attrs, &mut attrs); + item.attrs.extend(attrs); + return Ok(Some(P(item))); } let lo = self.span.lo; diff --git a/syntex_syntax/src/print/pprust.rs b/syntex_syntax/src/print/pprust.rs index 94b71661..ce30c3de 100644 --- a/syntex_syntax/src/print/pprust.rs +++ b/syntex_syntax/src/print/pprust.rs @@ -1264,13 +1264,10 @@ impl<'a> State<'a> { _ => {} } - match *opt_trait { - Some(ref t) => { - try!(self.print_trait_ref(t)); - try!(space(&mut self.s)); - try!(self.word_space("for")); - } - None => {} + if let Some(ref t) = *opt_trait { + try!(self.print_trait_ref(t)); + try!(space(&mut self.s)); + try!(self.word_space("for")); } try!(self.print_type(&ty)); @@ -1470,11 +1467,8 @@ impl<'a> State<'a> { try!(self.print_tt(tt_elt)); } try!(word(&mut self.s, ")")); - match seq.separator { - Some(ref tk) => { - try!(word(&mut self.s, &token_to_string(tk))); - } - None => {}, + if let Some(ref tk) = seq.separator { + try!(word(&mut self.s, &token_to_string(tk))); } match seq.op { tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), diff --git a/syntex_syntax/src/tokenstream.rs b/syntex_syntax/src/tokenstream.rs index 35377d14..f0f0a7bc 100644 --- a/syntex_syntax/src/tokenstream.rs +++ b/syntex_syntax/src/tokenstream.rs @@ -21,6 +21,8 @@ use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::lexer; use parse::token; +use std::rc::Rc; + /// A delimited sequence of token trees #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Delimited { @@ -94,13 +96,13 @@ pub enum TokenTree { /// A single token Token(Span, token::Token), /// A delimited sequence of token trees - Delimited(Span, Delimited), + Delimited(Span, Rc), // This only makes sense in MBE macros. /// A kleene-style repetition sequence with a span // FIXME(eddyb) #12938 Use DST. - Sequence(Span, SequenceRepetition), + Sequence(Span, Rc), } impl TokenTree { @@ -149,7 +151,7 @@ impl TokenTree { Some(*cnt) }).max().unwrap_or(0); - TokenTree::Delimited(sp, Delimited { + TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), @@ -157,7 +159,7 @@ impl TokenTree { TokenTree::Token(sp, token::Literal( token::StrRaw(token::intern(&stripped), num_of_hashes), None))], close_span: sp, - }) + })) } (&TokenTree::Delimited(_, ref delimed), _) => { if index == 0 {