From cb8728f8430dc3820440f829d9e493735494673d Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sat, 2 Jul 2016 18:07:45 -0700 Subject: [PATCH] Update syntex to 0.37 --- quasi/Cargo.toml | 7 ++-- quasi/src/lib.rs | 59 +++++++++++++++++--------------- quasi_codegen/Cargo.toml | 11 +++--- quasi_codegen/src/lib.rs | 74 ++++++++++++++++++++-------------------- quasi_macros/Cargo.toml | 8 ++--- quasi_tests/Cargo.toml | 10 +++--- 6 files changed, 88 insertions(+), 81 deletions(-) diff --git a/quasi/Cargo.toml b/quasi/Cargo.toml index fd0ec42d..fd13cdf8 100644 --- a/quasi/Cargo.toml +++ b/quasi/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "quasi" -version = "0.13.0" +version = "0.14.0" authors = ["Erick Tryzelaar "] license = "MIT/Apache-2.0" description = "A quasi-quoting macro system" @@ -8,9 +8,10 @@ repository = "https://github.com/serde-rs/quasi" include = ["Cargo.toml", "src/**/*.rs"] [features] -with-syntex = ["syntex_syntax"] +with-syntex = ["syntex_syntax", "syntex_errors"] unstable-testing = ["clippy"] [dependencies] -syntex_syntax = { version = "^0.36.0", optional = true } clippy = { version = "^0.*", optional = true } +syntex_errors = { version = "^0.37.0", optional = true } +syntex_syntax = { version = "^0.37.0", optional = true } diff --git a/quasi/src/lib.rs b/quasi/src/lib.rs index bace1f4c..6aea928f 100644 --- a/quasi/src/lib.rs +++ b/quasi/src/lib.rs @@ -15,23 +15,28 @@ #[macro_use] #[cfg(feature = "with-syntex")] extern crate syntex_syntax as syntax; +#[cfg(feature = "with-syntex")] +extern crate syntex_errors as errors; #[macro_use] #[cfg(not(feature = "with-syntex"))] extern crate syntax; +#[cfg(not(feature = "with-syntex"))] +extern crate rustc_errors as errors; use std::iter; use std::marker; -use std::rc::Rc; use std::usize; -use syntax::ast::{self, TokenTree}; +use syntax::ast; use syntax::codemap::{DUMMY_SP, Spanned, dummy_spanned}; use syntax::ext::base::ExtCtxt; -use syntax::parse::{self, classify, parse_tts_from_source_str, token}; use syntax::parse::parser::Parser; +use syntax::parse::{self, classify, parse_tts_from_source_str, token}; use syntax::print::pprust; use syntax::ptr::P; +use syntax::tokenstream::{self, TokenTree}; +use syntax::util::ThinVec; pub trait ToTokens { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec; @@ -81,49 +86,49 @@ impl ToTokens for Option { impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(DUMMY_SP, token::Ident(*self))] + vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))] } } impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtTy(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtBlock(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] } } @@ -157,12 +162,12 @@ impl ToTokens for ast::WhereClause { impl ToTokens for ast::Stmt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let mut tts = vec![ - ast::TokenTree::Token(self.span, token::Interpolated(token::NtStmt(P(self.clone())))) + TokenTree::Token(self.span, token::Interpolated(token::NtStmt(P(self.clone())))) ]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { - tts.push(ast::TokenTree::Token(self.span, token::Semi)); + tts.push(TokenTree::Token(self.span, token::Semi)); } tts @@ -171,19 +176,19 @@ impl ToTokens for ast::Stmt { impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))] + vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } @@ -204,12 +209,12 @@ macro_rules! impl_to_tokens_slice { }; } -impl_to_tokens_slice! { ast::Ty, [ast::TokenTree::Token(DUMMY_SP, token::Comma)] } +impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] } impl_to_tokens_slice! { P, [] } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] + vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } @@ -217,16 +222,16 @@ impl ToTokens for ast::Attribute { fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let mut r = vec![]; // FIXME: The spans could be better - r.push(ast::TokenTree::Token(self.span, token::Pound)); + r.push(TokenTree::Token(self.span, token::Pound)); if self.node.style == ast::AttrStyle::Inner { - r.push(ast::TokenTree::Token(self.span, token::Not)); + r.push(TokenTree::Token(self.span, token::Not)); } - r.push(ast::TokenTree::Delimited(self.span, Rc::new(ast::Delimited { + r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), close_span: self.span, - }))); + })); r } } @@ -242,12 +247,12 @@ impl ToTokens for str { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![ast::TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], close_span: DUMMY_SP, - }))] + })] } } @@ -258,7 +263,7 @@ impl ToTokens for ast::Lit { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(self.clone())), span: DUMMY_SP, - attrs: None, + attrs: ThinVec::new(), }).to_tokens(cx) } } @@ -329,7 +334,7 @@ pub trait ExtParseUtils { fn parse_item(&self, s: String) -> P; fn parse_expr(&self, s: String) -> P; fn parse_stmt(&self, s: String) -> ast::Stmt; - fn parse_tts(&self, s: String) -> Vec; + fn parse_tts(&self, s: String) -> Vec; } impl<'a> ExtParseUtils for ExtCtxt<'a> { @@ -358,7 +363,7 @@ impl<'a> ExtParseUtils for ExtCtxt<'a> { self.parse_sess())) } - fn parse_tts(&self, s: String) -> Vec { + fn parse_tts(&self, s: String) -> Vec { panictry!(parse::parse_tts_from_source_str( "".to_string(), s, diff --git a/quasi_codegen/Cargo.toml b/quasi_codegen/Cargo.toml index 2b4c289e..b725f187 100644 --- a/quasi_codegen/Cargo.toml +++ b/quasi_codegen/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "quasi_codegen" -version = "0.13.0" +version = "0.14.0" authors = ["Erick Tryzelaar "] license = "MIT/Apache-2.0" description = "A quasi-quoting macro system" @@ -9,11 +9,12 @@ include = ["Cargo.toml", "src/**/*.rs"] [features] default = ["with-syntex"] -with-syntex = ["syntex", "syntex_syntax", "aster/with-syntex"] +with-syntex = ["syntex", "syntex_errors", "syntex_syntax", "aster/with-syntex"] unstable-testing = ["clippy"] [dependencies] -aster = { version = "^0.19.0", default-features = false } +aster = { version = "^0.20.0", default-features = false } clippy = { version = "^0.*", optional = true } -syntex = { version = "^0.36.0", optional = true } -syntex_syntax = { version = "^0.36.0", optional = true } +syntex = { version = "^0.37.0", optional = true } +syntex_errors = { version = "^0.37.0", optional = true } +syntex_syntax = { version = "^0.37.0", optional = true } diff --git a/quasi_codegen/src/lib.rs b/quasi_codegen/src/lib.rs index 32c0e691..59dc77b2 100644 --- a/quasi_codegen/src/lib.rs +++ b/quasi_codegen/src/lib.rs @@ -16,15 +16,17 @@ extern crate aster; #[cfg(feature = "with-syntex")] extern crate syntex; - #[macro_use] #[cfg(feature = "with-syntex")] extern crate syntex_syntax as syntax; +#[cfg(feature = "with-syntex")] +extern crate syntex_errors as errors; #[macro_use] #[cfg(not(feature = "with-syntex"))] extern crate syntax; - +#[cfg(not(feature = "with-syntex"))] +extern crate rustc_errors as errors; #[cfg(not(feature = "with-syntex"))] extern crate rustc_plugin; @@ -38,6 +40,7 @@ use syntax::ext::base; use syntax::parse::token::*; use syntax::parse::token; use syntax::ptr::P; +use syntax::tokenstream::{self, TokenTree}; /// Quasiquoting works via token trees. /// @@ -49,7 +52,7 @@ use syntax::ptr::P; fn expand_quote_tokens<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree], + tts: &[TokenTree], ) -> Box { let (cx_expr, expr) = expand_tts(cx, sp, tts); let expanded = expand_wrapper(sp, cx_expr, expr); @@ -59,7 +62,7 @@ fn expand_quote_tokens<'cx>( fn expand_quote_ty<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -73,7 +76,7 @@ fn expand_quote_ty<'cx>( fn expand_quote_expr<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -87,7 +90,7 @@ fn expand_quote_expr<'cx>( fn expand_quote_stmt<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -101,7 +104,7 @@ fn expand_quote_stmt<'cx>( fn expand_quote_attr<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let builder = aster::AstBuilder::new().span(sp); @@ -118,7 +121,7 @@ fn expand_quote_attr<'cx>( fn expand_quote_matcher<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let builder = aster::AstBuilder::new().span(sp); @@ -141,7 +144,7 @@ fn expand_quote_matcher<'cx>( fn expand_quote_pat<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -155,7 +158,7 @@ fn expand_quote_pat<'cx>( fn expand_quote_arm<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -169,7 +172,7 @@ fn expand_quote_arm<'cx>( fn expand_quote_block<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -183,7 +186,7 @@ fn expand_quote_block<'cx>( fn expand_quote_item<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -197,7 +200,7 @@ fn expand_quote_item<'cx>( fn expand_quote_impl_item<'cx>( cx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree] + tts: &[TokenTree] ) -> Box { let expanded = expand_parse_call( cx, @@ -211,7 +214,7 @@ fn expand_quote_impl_item<'cx>( /* fn expand_quote_where_clause<'cx>(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[TokenTree]) -> Box { let expanded = expand_parse_call(cx, sp, "parse_where_clause", vec!(), tts); @@ -238,7 +241,7 @@ fn mk_name(builder: &aster::AstBuilder, name: T) -> P fn mk_tt_path(builder: &aster::AstBuilder, name: &str) -> P { builder.expr().path() .global() - .ids(&["syntax", "ast", "TokenTree", name]) + .ids(&["syntax", "tokenstream", "TokenTree", name]) .build() } @@ -446,11 +449,11 @@ struct QuoteStmts { idents: Vec, } -fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result { +fn statements_mk_tt(tt: &TokenTree, matcher: bool) -> Result { let builder = aster::AstBuilder::new(); match *tt { - ast::TokenTree::Token(sp, SubstNt(ident)) => { + TokenTree::Token(sp, SubstNt(ident)) => { // tt.extend($ident.to_tokens(ext_cx).into_iter()) let builder = builder.span(sp); @@ -480,14 +483,14 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result { + ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { let mut seq = vec![]; for i in 0..tt.len() { seq.push(tt.get_tt(i)); } statements_mk_tts(&seq[..], matcher) } - ast::TokenTree::Token(sp, ref tok) => { + TokenTree::Token(sp, ref tok) => { let builder = builder.span(sp); let e_tok = builder.expr().call() @@ -506,7 +509,7 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result { + TokenTree::Delimited(_, ref delimed) => { let delimited = try!(statements_mk_tts(&delimed.tts[..], matcher)); let open = try!(statements_mk_tt(&delimed.open_tt(), matcher)).stmts.into_iter(); let close = try!(statements_mk_tt(&delimed.close_tt(), matcher)).stmts.into_iter(); @@ -515,7 +518,7 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result { + TokenTree::Sequence(sp, ref seq) if matcher => { let builder = builder.span(sp); let e_sp = builder.expr().id("_sp"); @@ -537,29 +540,26 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result "ZeroOrMore", - ast::KleeneOp::OneOrMore => "OneOrMore", + tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore", + tokenstream::KleeneOp::OneOrMore => "OneOrMore", }; let e_op = builder.expr().path() .global() - .ids(&["syntax", "ast", "KleeneOp", e_op]) + .ids(&["syntax", "tokenstream", "KleeneOp", e_op]) .build(); let e_seq_struct = builder.expr().struct_() - .global().ids(&["syntax", "ast", "SequenceRepetition"]).build() + .global().ids(&["syntax", "tokenstream", "SequenceRepetition"]).build() .field("tts").build(e_tts) .field("separator").build(e_separator) .field("op").build(e_op) .field("num_captures").usize(seq.num_captures) .build(); - let e_rc_new = builder.expr().rc() - .build(e_seq_struct); - let e_tok = builder.expr().call() .build(mk_tt_path(&builder, "Sequence")) .arg().build(e_sp) - .arg().build(e_rc_new) + .arg().build(e_seq_struct) .build(); let e_push = builder.expr().method_call("push").id("tt") @@ -571,7 +571,7 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result { + TokenTree::Sequence(sp, ref seq) => { // Repeating fragments in a loop: // for (...(a, b), ...) in a.into_wrapped_iter() // .zip_wrap(b.into_wrapped_iter())... @@ -583,7 +583,7 @@ fn statements_mk_tt(tt: &ast::TokenTree, matcher: bool) -> Result Result Result (P, Vec) { +fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) + -> (P, Vec) { // NB: It appears that the main parser loses its mind if we consider // $foo as a SubstNt during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is @@ -715,7 +715,7 @@ fn mk_stmts_let(builder: &aster::AstBuilder) -> Vec { vec!(stmt_let_sp, stmt_let_tt) } -fn statements_mk_tts(tts: &[ast::TokenTree], matcher: bool) -> Result { +fn statements_mk_tts(tts: &[TokenTree], matcher: bool) -> Result { let mut ret = QuoteStmts { stmts: vec![], idents: vec![] }; for tt in tts { let QuoteStmts { stmts, idents } = try!(statements_mk_tt(tt, matcher)); @@ -725,7 +725,7 @@ fn statements_mk_tts(tts: &[ast::TokenTree], matcher: bool) -> Result (P, P) { let builder = aster::AstBuilder::new().span(sp); @@ -771,7 +771,7 @@ fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &[&str], arg_exprs: Vec> , - tts: &[ast::TokenTree]) -> P { + tts: &[TokenTree]) -> P { let builder = aster::AstBuilder::new().span(sp); let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); diff --git a/quasi_macros/Cargo.toml b/quasi_macros/Cargo.toml index b8ab85c1..7c83d7dc 100644 --- a/quasi_macros/Cargo.toml +++ b/quasi_macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "quasi_macros" -version = "0.13.0" +version = "0.14.0" authors = ["Erick Tryzelaar "] license = "MIT/Apache-2.0" description = "A quasi-quoting macro system" @@ -16,8 +16,8 @@ unstable-testing = ["clippy", "quasi/unstable-testing", "quasi_codegen/unstable- [dependencies] clippy = { version = "^0.*", optional = true } -quasi_codegen = { version = "^0.13.0", path = "../quasi_codegen", default-features = false } +quasi_codegen = { version = "^0.14.0", path = "../quasi_codegen", default-features = false } [dev-dependencies] -aster = "^0.19.0" -quasi = { version = "^0.13.0", path = "../quasi" } +aster = "^0.20.0" +quasi = { version = "^0.14.0", path = "../quasi" } diff --git a/quasi_tests/Cargo.toml b/quasi_tests/Cargo.toml index cb0e67d4..6b192f2d 100644 --- a/quasi_tests/Cargo.toml +++ b/quasi_tests/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "quasi_tests" -version = "0.13.0" +version = "0.14.0" authors = ["Erick Tryzelaar "] license = "MIT/Apache-2.0" description = "A quasi-quoting macro system" @@ -12,13 +12,13 @@ unstable-testing = ["clippy", "quasi/unstable-testing", "quasi_codegen/unstable- [build-dependencies] quasi_codegen = { path = "../quasi_codegen" } -syntex = { version = "^0.36.0" } +syntex = { version = "^0.37.0" } [dependencies] clippy = { version = "^0.*", optional = true } [dev-dependencies] -aster = { version = "^0.19.0", features = ["with-syntex"] } +aster = { version = "^0.20.0", features = ["with-syntex"] } quasi = { path = "../quasi", features = ["with-syntex"] } -syntex = { version = "^0.36.0" } -syntex_syntax = { version = "^0.36.0" } +syntex = { version = "^0.37.0" } +syntex_syntax = { version = "^0.37.0" }