Skip to content
This repository has been archived by the owner on May 6, 2020. It is now read-only.

Commit

Permalink
Merge branch 'rust'
Browse files Browse the repository at this point in the history
# Conflicts:
#	syntex_syntax/src/diagnostic.rs
#	syntex_syntax/src/lib.rs
  • Loading branch information
erickt committed Nov 9, 2015
2 parents 80b52b0 + 6ab68f0 commit a26e399
Show file tree
Hide file tree
Showing 27 changed files with 445 additions and 445 deletions.
8 changes: 4 additions & 4 deletions syntex_syntax/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
[package]
name = "syntex_syntax"
version = "0.19.1"
version = "0.20.0"
authors = [ "[email protected]" ]
license = "MIT/Apache-2.0"
description = "Export of libsyntax for code generation"
repository = "https://github.com/erickt/rust-syntex"

[dependencies]
bitflags = "^0.3.2"
libc = "^0.1.10"
log = "^0.3.2"
libc = "^0.2.1"
log = "^0.3.3"
rustc-serialize = "^0.3.16"
term = "^0.2.11"
term = "^0.2.13"
unicode-xid = "^0.0.3"
93 changes: 44 additions & 49 deletions syntex_syntax/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,11 @@ pub use self::MetaItem_::*;
pub use self::Mutability::*;
pub use self::Pat_::*;
pub use self::PathListItem_::*;
pub use self::PatWildKind::*;
pub use self::PrimTy::*;
pub use self::Sign::*;
pub use self::Stmt_::*;
pub use self::StrStyle::*;
pub use self::StructFieldKind::*;
pub use self::TokenTree::*;
pub use self::TraitItem_::*;
pub use self::Ty_::*;
pub use self::TyParamBound::*;
Expand Down Expand Up @@ -569,19 +567,10 @@ pub enum BindingMode {
BindByValue(Mutability),
}

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PatWildKind {
/// Represents the wildcard pattern `_`
PatWildSingle,

/// Represents the wildcard pattern `..`
PatWildMulti,
}

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Pat_ {
/// Represents a wildcard pattern (either `_` or `..`)
PatWild(PatWildKind),
/// Represents a wildcard pattern (`_`)
PatWild,

/// A PatIdent may either be a new bound variable,
/// or a nullary enum (in which case the third field
Expand All @@ -593,7 +582,7 @@ pub enum Pat_ {
/// set (of "PatIdents that refer to nullary enums")
PatIdent(BindingMode, SpannedIdent, Option<P<Pat>>),

/// "None" means a * pattern where we don't bind the fields to names.
/// "None" means a `Variant(..)` pattern where we don't bind the fields to names.
PatEnum(Path, Option<Vec<P<Pat>>>),

/// An associated const named using the qualified path `<T>::CONST` or
Expand All @@ -615,8 +604,8 @@ pub enum Pat_ {
PatLit(P<Expr>),
/// A range pattern, e.g. `1...2`
PatRange(P<Expr>, P<Expr>),
/// [a, b, ..i, y, z] is represented as:
/// PatVec(box [a, b], Some(i), box [y, z])
/// `[a, b, ..i, y, z]` is represented as:
/// `PatVec(box [a, b], Some(i), box [y, z])`
PatVec(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>),
/// A macro pattern; pre-expansion
PatMac(Mac),
Expand Down Expand Up @@ -964,12 +953,12 @@ impl Delimited {

/// Returns the opening delimiter as a token tree.
pub fn open_tt(&self) -> TokenTree {
TtToken(self.open_span, self.open_token())
TokenTree::Token(self.open_span, self.open_token())
}

/// Returns the closing delimiter as a token tree.
pub fn close_tt(&self) -> TokenTree {
TtToken(self.close_span, self.close_token())
TokenTree::Token(self.close_span, self.close_token())
}
}

Expand Down Expand Up @@ -1009,61 +998,61 @@ pub enum KleeneOp {
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TokenTree {
/// A single token
TtToken(Span, token::Token),
Token(Span, token::Token),
/// A delimited sequence of token trees
TtDelimited(Span, Rc<Delimited>),
Delimited(Span, Rc<Delimited>),

// This only makes sense in MBE macros.

/// A kleene-style repetition sequence with a span
// FIXME(eddyb) #12938 Use DST.
TtSequence(Span, Rc<SequenceRepetition>),
Sequence(Span, Rc<SequenceRepetition>),
}

impl TokenTree {
pub fn len(&self) -> usize {
match *self {
TtToken(_, token::DocComment(name)) => {
TokenTree::Token(_, token::DocComment(name)) => {
match doc_comment_style(&name.as_str()) {
AttrStyle::Outer => 2,
AttrStyle::Inner => 3
}
}
TtToken(_, token::SpecialVarNt(..)) => 2,
TtToken(_, token::MatchNt(..)) => 3,
TtDelimited(_, ref delimed) => {
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
TokenTree::Token(_, token::MatchNt(..)) => 3,
TokenTree::Delimited(_, ref delimed) => {
delimed.tts.len() + 2
}
TtSequence(_, ref seq) => {
TokenTree::Sequence(_, ref seq) => {
seq.tts.len()
}
TtToken(..) => 0
TokenTree::Token(..) => 0
}
}

pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TtToken(sp, token::DocComment(_)), 0) => {
TtToken(sp, token::Pound)
(&TokenTree::Token(sp, token::DocComment(_)), 0) => {
TokenTree::Token(sp, token::Pound)
}
(&TtToken(sp, token::DocComment(name)), 1)
(&TokenTree::Token(sp, token::DocComment(name)), 1)
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
TtToken(sp, token::Not)
TokenTree::Token(sp, token::Not)
}
(&TtToken(sp, token::DocComment(name)), _) => {
(&TokenTree::Token(sp, token::DocComment(name)), _) => {
let stripped = strip_doc_comment_decoration(&name.as_str());
TtDelimited(sp, Rc::new(Delimited {
TokenTree::Delimited(sp, Rc::new(Delimited {
delim: token::Bracket,
open_span: sp,
tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"),
token::Plain)),
TtToken(sp, token::Eq),
TtToken(sp, token::Literal(
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"),
token::Plain)),
TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Literal(
token::StrRaw(token::intern(&stripped), 0), None))],
close_span: sp,
}))
}
(&TtDelimited(_, ref delimed), _) => {
(&TokenTree::Delimited(_, ref delimed), _) => {
if index == 0 {
return delimed.open_tt();
}
Expand All @@ -1072,19 +1061,19 @@ impl TokenTree {
}
delimed.tts[index - 1].clone()
}
(&TtToken(sp, token::SpecialVarNt(var)), _) => {
let v = [TtToken(sp, token::Dollar),
TtToken(sp, token::Ident(token::str_to_ident(var.as_str()),
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
let v = [TokenTree::Token(sp, token::Dollar),
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()),
token::Plain))];
v[index].clone()
}
(&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
let v = [TtToken(sp, token::SubstNt(name, name_st)),
TtToken(sp, token::Colon),
TtToken(sp, token::Ident(kind, kind_st))];
(&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)),
TokenTree::Token(sp, token::Colon),
TokenTree::Token(sp, token::Ident(kind, kind_st))];
v[index].clone()
}
(&TtSequence(_, ref seq), _) => {
(&TokenTree::Sequence(_, ref seq), _) => {
seq.tts[index].clone()
}
_ => panic!("Cannot expand a token tree")
Expand All @@ -1094,9 +1083,9 @@ impl TokenTree {
/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
TtToken(span, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _) => span,
TokenTree::Token(span, _) => span,
TokenTree::Delimited(span, _) => span,
TokenTree::Sequence(span, _) => span,
}
}

Expand Down Expand Up @@ -1736,6 +1725,12 @@ impl StructFieldKind {
NamedField(..) => false,
}
}

pub fn visibility(&self) -> Visibility {
match *self {
NamedField(_, vis) | UnnamedField(vis) => vis
}
}
}

/// Fields and Ids of enum variants and structs
Expand Down
19 changes: 10 additions & 9 deletions syntex_syntax/src/diagnostic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,9 +206,9 @@ impl Handler {
can_emit_warnings: can_emit_warnings
}
}
pub fn fatal(&self, msg: &str) -> ! {
pub fn fatal(&self, msg: &str) -> FatalError {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
FatalError
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
Expand All @@ -226,14 +226,15 @@ impl Handler {
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);

panic!(self.fatal(&s[..]));
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
Expand Down
14 changes: 7 additions & 7 deletions syntex_syntax/src/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code,
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
_ => unreachable!()
};

Expand Down Expand Up @@ -92,12 +92,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
(code, None)
},
(3, Some(&ast::TtToken(_, token::Ident(ref code, _))),
Some(&ast::TtToken(_, token::Comma)),
Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => {
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
Some(&TokenTree::Token(_, token::Comma)),
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description))
}
_ => unreachable!()
Expand Down Expand Up @@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
&ast::TtToken(_, token::Ident(ref crate_name, _)),
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
// DIAGNOSTICS ident.
&ast::TtToken(_, token::Ident(ref name, _))
&TokenTree::Token(_, token::Ident(ref name, _))
) => (*&crate_name, name),
_ => unreachable!()
};
Expand Down
10 changes: 5 additions & 5 deletions syntex_syntax/src/ext/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.span_err(sp, "malformed inline assembly");
return DummyResult::expr(sp);
}
let (s, style) = match expr_to_string(cx, p.parse_expr(),
let (s, style) = match expr_to_string(cx, panictry!(p.parse_expr_nopanic()),
"inline assembly must be a string literal") {
Some((s, st)) => (s, st),
// let compilation continue
Expand All @@ -103,7 +103,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let span = p.last_span;

panictry!(p.expect(&token::OpenDelim(token::Paren)));
let out = p.parse_expr();
let out = panictry!(p.parse_expr_nopanic());
panictry!(p.expect(&token::CloseDelim(token::Paren)));

// Expands a read+write operand into two operands.
Expand Down Expand Up @@ -140,14 +140,14 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])

let (constraint, _str_style) = panictry!(p.parse_str());

if constraint.starts_with("=") {
if constraint.starts_with("=") && !constraint.contains("*") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.starts_with("+") {
} else if constraint.starts_with("+") && !constraint.contains("*") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}

panictry!(p.expect(&token::OpenDelim(token::Paren)));
let input = p.parse_expr();
let input = panictry!(p.parse_expr_nopanic());
panictry!(p.expect(&token::CloseDelim(token::Paren)));

inputs.push((constraint, input));
Expand Down
6 changes: 3 additions & 3 deletions syntex_syntax/src/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ impl DummyResult {
pub fn raw_pat(sp: Span) -> ast::Pat {
ast::Pat {
id: ast::DUMMY_NODE_ID,
node: ast::PatWild(ast::PatWildSingle),
node: ast::PatWild,
span: sp,
}
}
Expand Down Expand Up @@ -811,7 +811,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
cx.span_err(sp, &format!("{} takes 1 argument", name));
return None
}
let ret = cx.expander().fold_expr(p.parse_expr());
let ret = cx.expander().fold_expr(panictry!(p.parse_expr_nopanic()));
if p.token != token::Eof {
cx.span_err(sp, &format!("{} takes 1 argument", name));
}
Expand All @@ -828,7 +828,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new();
while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr()));
es.push(cx.expander().fold_expr(panictry!(p.parse_expr_nopanic())));
if panictry!(p.eat(&token::Comma)){
continue;
}
Expand Down
2 changes: 1 addition & 1 deletion syntex_syntax/src/ext/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -801,7 +801,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span })
}
fn pat_wild(&self, span: Span) -> P<ast::Pat> {
self.pat(span, ast::PatWild(ast::PatWildSingle))
self.pat(span, ast::PatWild)
}
fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> {
self.pat(span, ast::PatLit(expr))
Expand Down
3 changes: 1 addition & 2 deletions syntex_syntax/src/ext/cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,14 @@ use ext::base;
use ext::build::AstBuilder;
use attr;
use attr::*;
use parse::attr::ParserAttr;
use parse::token;

pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item();
let cfg = panictry!(p.parse_meta_item());

if !panictry!(p.eat(&token::Eof)){
cx.span_err(sp, "expected 1 cfg-pattern");
Expand Down
Loading

0 comments on commit a26e399

Please sign in to comment.