From 294c7fc606bacbc8e29214eec7aea55dcf320b76 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Wed, 29 Jul 2015 10:25:23 -0700 Subject: [PATCH] Sync with Rust HEAD (ddbce11) --- syntex_syntax/src/ast.rs | 58 +- syntex_syntax/src/ast_util.rs | 4 +- syntex_syntax/src/codemap.rs | 198 +++-- syntex_syntax/src/diagnostic.rs | 830 ++++++++++---------- syntex_syntax/src/diagnostics/macros.rs | 6 + syntex_syntax/src/diagnostics/plugin.rs | 25 +- syntex_syntax/src/ext/base.rs | 6 + syntex_syntax/src/ext/build.rs | 5 +- syntex_syntax/src/ext/concat_idents.rs | 4 +- syntex_syntax/src/ext/deriving/decodable.rs | 16 +- syntex_syntax/src/ext/deriving/encodable.rs | 8 +- syntex_syntax/src/ext/deriving/show.rs | 6 +- syntex_syntax/src/ext/expand.rs | 222 +++++- syntex_syntax/src/ext/format.rs | 3 +- syntex_syntax/src/ext/pushpop_safe.rs | 94 +++ syntex_syntax/src/ext/quote.rs | 4 +- syntex_syntax/src/ext/source_util.rs | 8 +- syntex_syntax/src/ext/tt/macro_parser.rs | 14 +- syntex_syntax/src/ext/tt/macro_rules.rs | 25 +- syntex_syntax/src/ext/tt/transcribe.rs | 10 +- syntex_syntax/src/feature_gate.rs | 81 +- syntex_syntax/src/fold.rs | 3 +- syntex_syntax/src/lib.rs | 2 + syntex_syntax/src/parse/lexer/comments.rs | 2 +- syntex_syntax/src/parse/lexer/mod.rs | 61 +- syntex_syntax/src/parse/mod.rs | 64 +- syntex_syntax/src/parse/obsolete.rs | 6 +- syntex_syntax/src/parse/parser.rs | 337 ++++---- syntex_syntax/src/parse/token.rs | 26 +- syntex_syntax/src/print/pprust.rs | 36 +- syntex_syntax/src/std_inject.rs | 3 +- 31 files changed, 1305 insertions(+), 862 deletions(-) create mode 100644 syntex_syntax/src/ext/pushpop_safe.rs diff --git a/syntex_syntax/src/ast.rs b/syntex_syntax/src/ast.rs index e844b206..db173d08 100644 --- a/syntex_syntax/src/ast.rs +++ b/syntex_syntax/src/ast.rs @@ -29,7 +29,6 @@ pub use self::Item_::*; pub use self::KleeneOp::*; pub use self::Lit_::*; pub use self::LitIntType::*; -pub use self::LocalSource::*; pub use self::Mac_::*; pub use self::MacStmtStyle::*; pub use self::MetaItem_::*; @@ -63,6 +62,7 @@ use owned_slice::OwnedSlice; use parse::token::{InternedString, str_to_ident}; use parse::token; use parse::lexer; +use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use print::pprust; use ptr::P; @@ -87,10 +87,6 @@ pub struct Ident { impl Ident { /// Construct an identifier with the given name and an empty context: pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}} - - pub fn as_str<'a>(&'a self) -> &'a str { - self.name.as_str() - } } impl fmt::Debug for Ident { @@ -108,13 +104,13 @@ impl fmt::Display for Ident { impl fmt::Debug for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Name(nm) = *self; - write!(f, "{:?}({})", token::get_name(*self), nm) + write!(f, "{}({})", self, nm) } } impl fmt::Display for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&token::get_name(*self), f) + fmt::Display::fmt(&self.as_str(), f) } } @@ -134,13 +130,10 @@ impl PartialEq for Ident { // one example and its non-hygienic counterpart would be: // syntax::parse::token::Token::mtwt_eq // syntax::ext::tt::macro_parser::token_name_eq - panic!("not allowed to compare these idents: {}, {}. \ + panic!("not allowed to compare these idents: {:?}, {:?}. \ Probably related to issue \\#6993", self, other); } } - fn ne(&self, other: &Ident) -> bool { - ! self.eq(other) - } } /// A SyntaxContext represents a chain of macro-expandings @@ -166,12 +159,15 @@ pub const ILLEGAL_CTXT : SyntaxContext = 1; RustcEncodable, RustcDecodable, Clone, Copy)] pub struct Name(pub u32); +impl> PartialEq for Name { + fn eq(&self, other: &T) -> bool { + self.as_str() == other.as_ref() + } +} + impl Name { - pub fn as_str<'a>(&'a self) -> &'a str { - unsafe { - // FIXME #12938: can't use copy_lifetime since &str isn't a &T - ::std::mem::transmute::<&str,&str>(&token::get_name(*self)) - } + pub fn as_str(&self) -> token::InternedString { + token::InternedString::new_from_name(*self) } pub fn usize(&self) -> usize { @@ -189,7 +185,7 @@ pub type Mrk = u32; impl Encodable for Ident { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&token::get_ident(*self)) + s.emit_str(&self.name.as_str()) } } @@ -755,14 +751,6 @@ pub enum MacStmtStyle { MacStmtWithoutBraces, } -/// Where a local declaration came from: either a true `let ... = -/// ...;`, or one desugared from the pattern of a for loop. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub enum LocalSource { - LocalLet, - LocalFor, -} - // FIXME (pending discussion of #1697, #2178...): local should really be // a refinement on pat. /// Local represents a `let` statement, e.g., `let : = ;` @@ -774,7 +762,6 @@ pub struct Local { pub init: Option>, pub id: NodeId, pub span: Span, - pub source: LocalSource, } pub type Decl = Spanned; @@ -809,6 +796,8 @@ pub type SpannedIdent = Spanned; pub enum BlockCheckMode { DefaultBlock, UnsafeBlock(UnsafeSource), + PushUnsafeBlock(UnsafeSource), + PopUnsafeBlock(UnsafeSource), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -1079,7 +1068,12 @@ pub enum TokenTree { impl TokenTree { pub fn len(&self) -> usize { match *self { - TtToken(_, token::DocComment(_)) => 2, + TtToken(_, token::DocComment(name)) => { + match doc_comment_style(&name.as_str()) { + AttrOuter => 2, + AttrInner => 3 + } + } TtToken(_, token::SpecialVarNt(..)) => 2, TtToken(_, token::MatchNt(..)) => 3, TtDelimited(_, ref delimed) => { @@ -1097,14 +1091,20 @@ impl TokenTree { (&TtToken(sp, token::DocComment(_)), 0) => { TtToken(sp, token::Pound) } - (&TtToken(sp, token::DocComment(name)), 1) => { + (&TtToken(sp, token::DocComment(name)), 1) + if doc_comment_style(&name.as_str()) == AttrInner => { + TtToken(sp, token::Not) + } + (&TtToken(sp, token::DocComment(name)), _) => { + let stripped = strip_doc_comment_decoration(&name.as_str()); TtDelimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"), token::Plain)), TtToken(sp, token::Eq), - TtToken(sp, token::Literal(token::Str_(name), None))], + TtToken(sp, token::Literal( + token::StrRaw(token::intern(&stripped), 0), None))], close_span: sp, })) } diff --git a/syntex_syntax/src/ast_util.rs b/syntex_syntax/src/ast_util.rs index 7d7ea371..83d3c9c4 100644 --- a/syntex_syntax/src/ast_util.rs +++ b/syntex_syntax/src/ast_util.rs @@ -25,9 +25,7 @@ use std::u32; pub fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - idents.iter().map(|i| { - token::get_ident(*i).to_string() - }).collect::>().connect("::") + idents.iter().map(|i| i.to_string()).collect::>().join("::") } pub fn local_def(id: NodeId) -> DefId { diff --git a/syntex_syntax/src/codemap.rs b/syntex_syntax/src/codemap.rs index 5ddcfaef..17e6b2c2 100644 --- a/syntex_syntax/src/codemap.rs +++ b/syntex_syntax/src/codemap.rs @@ -115,6 +115,10 @@ impl Sub for CharPos { /// are *absolute* positions from the beginning of the codemap, not positions /// relative to FileMaps. Methods on the CodeMap can be used to relate spans back /// to the original source. +/// You must be careful if the span crosses more than one file - you will not be +/// able to use many of the functions on spans in codemap and you cannot assume +/// that the length of the span = hi - lo; there may be space in the BytePos +/// range between files. #[derive(Clone, Copy, Hash)] pub struct Span { pub lo: BytePos, @@ -339,7 +343,7 @@ pub struct MultiByteChar { pub bytes: usize, } -/// A single source in the CodeMap +/// A single source in the CodeMap. pub struct FileMap { /// The name of the file that the source came from, source that doesn't /// originate from files has names between angle brackets by convention, @@ -508,6 +512,9 @@ impl FileMap { lines.get(line_number).map(|&line| { let begin: BytePos = line - self.start_pos; let begin = begin.to_usize(); + // We can't use `lines.get(line_number+1)` because we might + // be parsing when we call this function and thus the current + // line is the last one we have line info for. let slice = &src[begin..]; match slice.find('\n') { Some(e) => &slice[..e], @@ -598,27 +605,27 @@ impl CodeMap { Ok(self.new_filemap(path.to_str().unwrap().to_string(), src)) } + fn next_start_pos(&self) -> usize { + let files = self.files.borrow(); + match files.last() { + None => 0, + // Add one so there is some space between files. This lets us distinguish + // positions in the codemap, even in the presence of zero-length files. + Some(last) => last.end_pos.to_usize() + 1, + } + } + + /// Creates a new filemap without setting its line information. If you don't + /// intend to set the line information yourself, you should use new_filemap_and_lines. pub fn new_filemap(&self, filename: FileName, mut src: String) -> Rc { + let start_pos = self.next_start_pos(); let mut files = self.files.borrow_mut(); - let start_pos = match files.last() { - None => 0, - Some(last) => last.end_pos.to_usize(), - }; // Remove utf-8 BOM if any. if src.starts_with("\u{feff}") { src.drain(..3); } - // Append '\n' in case it's not already there. - // This is a workaround to prevent CodeMap.lookup_filemap_idx from - // accidentally overflowing into the next filemap in case the last byte - // of span is also the last byte of filemap, which leads to incorrect - // results from CodeMap.span_to_*. - if !src.is_empty() && !src.ends_with("\n") { - src.push('\n'); - } - let end_pos = start_pos + src.len(); let filemap = Rc::new(FileMap { @@ -635,6 +642,21 @@ impl CodeMap { filemap } + /// Creates a new filemap and sets its line information. + pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc { + let fm = self.new_filemap(filename.to_string(), src.to_owned()); + let mut byte_pos: u32 = 0; + for line in src.lines() { + // register the start of this line + fm.next_line(BytePos(byte_pos)); + + // update byte_pos to include this line and the \n at the end + byte_pos += line.len() as u32 + 1; + } + fm + } + + /// Allocates a new FileMap representing a source file from an external /// crate. The source code of such an "imported filemap" is not available, /// but we still know enough to generate accurate debuginfo location @@ -645,11 +667,8 @@ impl CodeMap { mut file_local_lines: Vec, mut file_local_multibyte_chars: Vec) -> Rc { + let start_pos = self.next_start_pos(); let mut files = self.files.borrow_mut(); - let start_pos = match files.last() { - None => 0, - Some(last) => last.end_pos.to_usize(), - }; let end_pos = Pos::from_usize(start_pos + source_len); let start_pos = Pos::from_usize(start_pos); @@ -686,39 +705,61 @@ impl CodeMap { /// Lookup source information about a BytePos pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { - let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos); - let line = a + 1; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); - let linebpos = (*f.lines.borrow())[a]; - let linechpos = self.bytepos_to_file_charpos(linebpos); - debug!("byte pos {:?} is on the line at byte pos {:?}", - pos, linebpos); - debug!("char pos {:?} is on the line at char pos {:?}", - chpos, linechpos); - debug!("byte is on line: {}", line); - assert!(chpos >= linechpos); - Loc { - file: f, - line: line, - col: chpos - linechpos + match self.lookup_line(pos) { + Ok(FileMapAndLine { fm: f, line: a }) => { + let line = a + 1; // Line numbers start at 1 + let linebpos = (*f.lines.borrow())[a]; + let linechpos = self.bytepos_to_file_charpos(linebpos); + debug!("byte pos {:?} is on the line at byte pos {:?}", + pos, linebpos); + debug!("char pos {:?} is on the line at char pos {:?}", + chpos, linechpos); + debug!("byte is on line: {}", line); + assert!(chpos >= linechpos); + Loc { + file: f, + line: line, + col: chpos - linechpos, + } + } + Err(f) => { + Loc { + file: f, + line: 0, + col: chpos, + } + } } } - fn lookup_line(&self, pos: BytePos) -> FileMapAndLine { + // If the relevant filemap is empty, we don't return a line number. + fn lookup_line(&self, pos: BytePos) -> Result> { let idx = self.lookup_filemap_idx(pos); let files = self.files.borrow(); let f = (*files)[idx].clone(); + + let len = f.lines.borrow().len(); + if len == 0 { + return Err(f); + } + let mut a = 0; { let lines = f.lines.borrow(); let mut b = lines.len(); while b - a > 1 { let m = (a + b) / 2; - if (*lines)[m] > pos { b = m; } else { a = m; } + if (*lines)[m] > pos { + b = m; + } else { + a = m; + } } + assert!(a <= lines.len()); } - FileMapAndLine {fm: f, line: a} + Ok(FileMapAndLine { fm: f, line: a }) } pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { @@ -853,7 +894,7 @@ impl CodeMap { FileMapAndBytePos {fm: fm, pos: offset} } - /// Converts an absolute BytePos to a CharPos relative to the filemap and above. + /// Converts an absolute BytePos to a CharPos relative to the filemap. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { let idx = self.lookup_filemap_idx(bpos); let files = self.files.borrow(); @@ -880,12 +921,15 @@ impl CodeMap { CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes) } + // Return the index of the filemap (in self.files) which contains pos. fn lookup_filemap_idx(&self, pos: BytePos) -> usize { let files = self.files.borrow(); let files = &*files; - let len = files.len(); + let count = files.len(); + + // Binary search for the filemap. let mut a = 0; - let mut b = len; + let mut b = count; while b - a > 1 { let m = (a + b) / 2; if files[m].start_pos > pos { @@ -894,26 +938,8 @@ impl CodeMap { a = m; } } - // There can be filemaps with length 0. These have the same start_pos as - // the previous filemap, but are not the filemaps we want (because they - // are length 0, they cannot contain what we are looking for). So, - // rewind until we find a useful filemap. - loop { - let lines = files[a].lines.borrow(); - let lines = lines; - if !lines.is_empty() { - break; - } - if a == 0 { - panic!("position {} does not resolve to a source location", - pos.to_usize()); - } - a -= 1; - } - if a >= len { - panic!("position {} does not resolve to a source location", - pos.to_usize()) - } + + assert!(a < count, "position {} does not resolve to a source location", pos.to_usize()); return a; } @@ -954,6 +980,10 @@ impl CodeMap { mac_span.lo <= span.lo && span.hi <= mac_span.hi }); + debug!("span_allows_unstable: span: {:?} call_site: {:?} callee: {:?}", + (span.lo, span.hi), + (info.call_site.lo, info.call_site.hi), + info.callee.span.map(|x| (x.lo, x.hi))); debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}", span_comes_from_this_expansion, info.callee.allow_internal_unstable); @@ -1027,10 +1057,13 @@ mod tests { let fm = cm.new_filemap("blork.rs".to_string(), "first line.\nsecond line".to_string()); fm.next_line(BytePos(0)); + // Test we can get lines with partial line info. assert_eq!(fm.get_line(0), Some("first line.")); - // TESTING BROKEN BEHAVIOR: + // TESTING BROKEN BEHAVIOR: line break declared before actual line break. fm.next_line(BytePos(10)); assert_eq!(fm.get_line(1), Some(".")); + fm.next_line(BytePos(12)); + assert_eq!(fm.get_line(2), Some("second line")); } #[test] @@ -1056,9 +1089,9 @@ mod tests { fm1.next_line(BytePos(0)); fm1.next_line(BytePos(12)); - fm2.next_line(BytePos(24)); - fm3.next_line(BytePos(24)); - fm3.next_line(BytePos(34)); + fm2.next_line(fm2.start_pos); + fm3.next_line(fm3.start_pos); + fm3.next_line(fm3.start_pos + BytePos(12)); cm } @@ -1068,11 +1101,15 @@ mod tests { // Test lookup_byte_offset let cm = init_code_map(); - let fmabp1 = cm.lookup_byte_offset(BytePos(22)); + let fmabp1 = cm.lookup_byte_offset(BytePos(23)); assert_eq!(fmabp1.fm.name, "blork.rs"); - assert_eq!(fmabp1.pos, BytePos(22)); + assert_eq!(fmabp1.pos, BytePos(23)); + + let fmabp1 = cm.lookup_byte_offset(BytePos(24)); + assert_eq!(fmabp1.fm.name, "empty.rs"); + assert_eq!(fmabp1.pos, BytePos(0)); - let fmabp2 = cm.lookup_byte_offset(BytePos(24)); + let fmabp2 = cm.lookup_byte_offset(BytePos(25)); assert_eq!(fmabp2.fm.name, "blork2.rs"); assert_eq!(fmabp2.pos, BytePos(0)); } @@ -1085,7 +1122,7 @@ mod tests { let cp1 = cm.bytepos_to_file_charpos(BytePos(22)); assert_eq!(cp1, CharPos(22)); - let cp2 = cm.bytepos_to_file_charpos(BytePos(24)); + let cp2 = cm.bytepos_to_file_charpos(BytePos(25)); assert_eq!(cp2, CharPos(0)); } @@ -1099,7 +1136,7 @@ mod tests { assert_eq!(loc1.line, 2); assert_eq!(loc1.col, CharPos(10)); - let loc2 = cm.lookup_char_pos(BytePos(24)); + let loc2 = cm.lookup_char_pos(BytePos(25)); assert_eq!(loc2.file.name, "blork2.rs"); assert_eq!(loc2.line, 1); assert_eq!(loc2.col, CharPos(0)); @@ -1115,18 +1152,18 @@ mod tests { "first line€€.\n€ second line".to_string()); fm1.next_line(BytePos(0)); - fm1.next_line(BytePos(22)); - fm2.next_line(BytePos(40)); - fm2.next_line(BytePos(58)); + fm1.next_line(BytePos(28)); + fm2.next_line(fm2.start_pos); + fm2.next_line(fm2.start_pos + BytePos(20)); fm1.record_multibyte_char(BytePos(3), 3); fm1.record_multibyte_char(BytePos(9), 3); fm1.record_multibyte_char(BytePos(12), 3); fm1.record_multibyte_char(BytePos(15), 3); fm1.record_multibyte_char(BytePos(18), 3); - fm2.record_multibyte_char(BytePos(50), 3); - fm2.record_multibyte_char(BytePos(53), 3); - fm2.record_multibyte_char(BytePos(58), 3); + fm2.record_multibyte_char(fm2.start_pos + BytePos(10), 3); + fm2.record_multibyte_char(fm2.start_pos + BytePos(13), 3); + fm2.record_multibyte_char(fm2.start_pos + BytePos(18), 3); cm } @@ -1172,19 +1209,6 @@ mod tests { Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } } - fn new_filemap_and_lines(cm: &CodeMap, filename: &str, input: &str) -> Rc { - let fm = cm.new_filemap(filename.to_string(), input.to_string()); - let mut byte_pos: u32 = 0; - for line in input.lines() { - // register the start of this line - fm.next_line(BytePos(byte_pos)); - - // update byte_pos to include this line and the \n at the end - byte_pos += line.len() as u32 + 1; - } - fm - } - /// Test span_to_snippet and span_to_lines for a span coverting 3 /// lines in the middle of a file. #[test] @@ -1192,7 +1216,7 @@ mod tests { let cm = CodeMap::new(); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ^~\n~~~\n~~~~~ \n \n"; - new_filemap_and_lines(&cm, "blork.rs", inputtext); + cm.new_filemap_and_lines("blork.rs", inputtext); let span = span_from_selection(inputtext, selection); // check that we are extracting the text we thought we were extracting diff --git a/syntex_syntax/src/diagnostic.rs b/syntex_syntax/src/diagnostic.rs index ec93d2c5..7476302b 100644 --- a/syntex_syntax/src/diagnostic.rs +++ b/syntex_syntax/src/diagnostic.rs @@ -208,6 +208,10 @@ impl Handler { } pub fn fatal(&self, msg: &str) -> ! { self.emit.borrow_mut().emit(None, msg, None, Fatal); + + // Suppress the fatal error message from the panic below as we've + // already terminated in our own "legitimate" fashion. + io::set_panic(Box::new(io::sink())); panic!(FatalError); } pub fn err(&self, msg: &str) { @@ -308,63 +312,6 @@ impl Level { } } -fn print_maybe_styled(w: &mut EmitterWriter, - msg: &str, - color: term::attr::Attr) -> io::Result<()> { - match w.dst { - Terminal(ref mut t) => { - try!(t.attr(color)); - // If `msg` ends in a newline, we need to reset the color before - // the newline. We're making the assumption that we end up writing - // to a `LineBufferedWriter`, which means that emitting the reset - // after the newline ends up buffering the reset until we print - // another line or exit. Buffering the reset is a problem if we're - // sharing the terminal with any other programs (e.g. other rustc - // instances via `make -jN`). - // - // Note that if `msg` contains any internal newlines, this will - // result in the `LineBufferedWriter` flushing twice instead of - // once, which still leaves the opportunity for interleaved output - // to be miscolored. We assume this is rare enough that we don't - // have to worry about it. - if msg.ends_with("\n") { - try!(t.write_all(msg[..msg.len()-1].as_bytes())); - try!(t.reset()); - try!(t.write_all(b"\n")); - } else { - try!(t.write_all(msg.as_bytes())); - try!(t.reset()); - } - Ok(()) - } - Raw(ref mut w) => w.write_all(msg.as_bytes()), - } -} - -fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, - msg: &str, code: Option<&str>) -> io::Result<()> { - if !topic.is_empty() { - try!(write!(&mut dst.dst, "{} ", topic)); - } - - try!(print_maybe_styled(dst, - &format!("{}: ", lvl.to_string()), - term::attr::ForegroundColor(lvl.color()))); - try!(print_maybe_styled(dst, - &format!("{}", msg), - term::attr::Bold)); - - match code { - Some(code) => { - let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style)); - } - None => () - } - try!(write!(&mut dst.dst, "\n")); - Ok(()) -} - pub struct EmitterWriter { dst: Destination, registry: Option @@ -375,6 +322,20 @@ enum Destination { Raw(Box), } +/// Do not use this for messages that end in `\n` – use `println_maybe_styled` instead. See +/// `EmitterWriter::print_maybe_styled` for details. +macro_rules! print_maybe_styled { + ($writer: expr, $style: expr, $($arg: tt)*) => { + $writer.print_maybe_styled(format_args!($($arg)*), $style, false) + } +} + +macro_rules! println_maybe_styled { + ($writer: expr, $style: expr, $($arg: tt)*) => { + $writer.print_maybe_styled(format_args!($($arg)*), $style, true) + } +} + impl EmitterWriter { pub fn stderr(color_config: ColorConfig, registry: Option) -> EmitterWriter { @@ -401,6 +362,396 @@ impl EmitterWriter { registry: Option) -> EmitterWriter { EmitterWriter { dst: Raw(dst), registry: registry } } + + fn print_maybe_styled(&mut self, + args: fmt::Arguments, + color: term::attr::Attr, + print_newline_at_end: bool) -> io::Result<()> { + match self.dst { + Terminal(ref mut t) => { + try!(t.attr(color)); + // If `msg` ends in a newline, we need to reset the color before + // the newline. We're making the assumption that we end up writing + // to a `LineBufferedWriter`, which means that emitting the reset + // after the newline ends up buffering the reset until we print + // another line or exit. Buffering the reset is a problem if we're + // sharing the terminal with any other programs (e.g. other rustc + // instances via `make -jN`). + // + // Note that if `msg` contains any internal newlines, this will + // result in the `LineBufferedWriter` flushing twice instead of + // once, which still leaves the opportunity for interleaved output + // to be miscolored. We assume this is rare enough that we don't + // have to worry about it. + try!(t.write_fmt(args)); + try!(t.reset()); + if print_newline_at_end { + t.write_all(b"\n") + } else { + Ok(()) + } + } + Raw(ref mut w) => { + try!(w.write_fmt(args)); + if print_newline_at_end { + w.write_all(b"\n") + } else { + Ok(()) + } + } + } + } + + fn print_diagnostic(&mut self, topic: &str, lvl: Level, + msg: &str, code: Option<&str>) -> io::Result<()> { + if !topic.is_empty() { + try!(write!(&mut self.dst, "{} ", topic)); + } + + try!(print_maybe_styled!(self, term::attr::ForegroundColor(lvl.color()), + "{}: ", lvl.to_string())); + try!(print_maybe_styled!(self, term::attr::Bold, "{}", msg)); + + match code { + Some(code) => { + let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); + try!(print_maybe_styled!(self, style, " [{}]", code.clone())); + } + None => () + } + try!(write!(&mut self.dst, "\n")); + Ok(()) + } + + fn emit_(&mut self, cm: &codemap::CodeMap, rsp: RenderSpan, + msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> { + let sp = rsp.span(); + + // We cannot check equality directly with COMMAND_LINE_SP + // since PartialEq is manually implemented to ignore the ExpnId + let ss = if sp.expn_id == COMMAND_LINE_EXPN { + "".to_string() + } else if let EndSpan(_) = rsp { + let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; + cm.span_to_string(span_end) + } else { + cm.span_to_string(sp) + }; + + try!(self.print_diagnostic(&ss[..], lvl, msg, code)); + + match rsp { + FullSpan(_) => { + try!(self.highlight_lines(cm, sp, lvl, cm.span_to_lines(sp))); + try!(self.print_macro_backtrace(cm, sp)); + } + EndSpan(_) => { + try!(self.end_highlight_lines(cm, sp, lvl, cm.span_to_lines(sp))); + try!(self.print_macro_backtrace(cm, sp)); + } + Suggestion(_, ref suggestion) => { + try!(self.highlight_suggestion(cm, sp, suggestion)); + try!(self.print_macro_backtrace(cm, sp)); + } + FileLine(..) => { + // no source text in this case! + } + } + + match code { + Some(code) => + match self.registry.as_ref().and_then(|registry| registry.find_description(code)) { + Some(_) => { + try!(self.print_diagnostic(&ss[..], Help, + &format!("run `rustc --explain {}` to see a \ + detailed explanation", code), None)); + } + None => () + }, + None => (), + } + Ok(()) + } + + fn highlight_suggestion(&mut self, + cm: &codemap::CodeMap, + sp: Span, + suggestion: &str) + -> io::Result<()> + { + let lines = cm.span_to_lines(sp).unwrap(); + assert!(!lines.lines.is_empty()); + + // To build up the result, we want to take the snippet from the first + // line that precedes the span, prepend that with the suggestion, and + // then append the snippet from the last line that trails the span. + let fm = &lines.file; + + let first_line = &lines.lines[0]; + let prefix = fm.get_line(first_line.line_index) + .map(|l| &l[..first_line.start_col.0]) + .unwrap_or(""); + + let last_line = lines.lines.last().unwrap(); + let suffix = fm.get_line(last_line.line_index) + .map(|l| &l[last_line.end_col.0..]) + .unwrap_or(""); + + let complete = format!("{}{}{}", prefix, suggestion, suffix); + + // print the suggestion without any line numbers, but leave + // space for them. This helps with lining up with previous + // snippets from the actual error being reported. + let fm = &*lines.file; + let mut lines = complete.lines(); + for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) { + let elided_line_num = format!("{}", line_index+1); + try!(write!(&mut self.dst, "{0}:{1:2$} {3}\n", + fm.name, "", elided_line_num.len(), line)); + } + + // if we elided some lines, add an ellipsis + if lines.next().is_some() { + let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1); + try!(write!(&mut self.dst, "{0:1$} {0:2$} ...\n", + "", fm.name.len(), elided_line_num.len())); + } + + Ok(()) + } + + fn highlight_lines(&mut self, + cm: &codemap::CodeMap, + sp: Span, + lvl: Level, + lines: codemap::FileLinesResult) + -> io::Result<()> + { + let lines = match lines { + Ok(lines) => lines, + Err(_) => { + try!(write!(&mut self.dst, "(internal compiler error: unprintable span)\n")); + return Ok(()); + } + }; + + let fm = &*lines.file; + + let line_strings: Option> = + lines.lines.iter() + .map(|info| fm.get_line(info.line_index)) + .collect(); + + let line_strings = match line_strings { + None => { return Ok(()); } + Some(line_strings) => line_strings + }; + + // Display only the first MAX_LINES lines. + let all_lines = lines.lines.len(); + let display_lines = cmp::min(all_lines, MAX_LINES); + let display_line_infos = &lines.lines[..display_lines]; + let display_line_strings = &line_strings[..display_lines]; + + // Calculate the widest number to format evenly and fix #11715 + assert!(display_line_infos.len() > 0); + let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1; + let mut digits = 0; + while max_line_num > 0 { + max_line_num /= 10; + digits += 1; + } + + // Print the offending lines + for (line_info, line) in display_line_infos.iter().zip(display_line_strings) { + try!(write!(&mut self.dst, "{}:{:>width$} {}\n", + fm.name, + line_info.line_index + 1, + line, + width=digits)); + } + + // If we elided something, put an ellipsis. + if display_lines < all_lines { + let last_line_index = display_line_infos.last().unwrap().line_index; + let s = format!("{}:{} ", fm.name, last_line_index + 1); + try!(write!(&mut self.dst, "{0:1$}...\n", "", s.len())); + } + + // FIXME (#3260) + // If there's one line at fault we can easily point to the problem + if lines.lines.len() == 1 { + let lo = cm.lookup_char_pos(sp.lo); + let mut digits = 0; + let mut num = (lines.lines[0].line_index + 1) / 10; + + // how many digits must be indent past? + while num > 0 { num /= 10; digits += 1; } + + let mut s = String::new(); + // Skip is the number of characters we need to skip because they are + // part of the 'filename:line ' part of the previous line. + let skip = fm.name.chars().count() + digits + 3; + for _ in 0..skip { + s.push(' '); + } + if let Some(orig) = fm.get_line(lines.lines[0].line_index) { + let mut col = skip; + let mut lastc = ' '; + let mut iter = orig.chars().enumerate(); + for (pos, ch) in iter.by_ref() { + lastc = ch; + if pos >= lo.col.to_usize() { break; } + // Whenever a tab occurs on the previous line, we insert one on + // the error-point-squiggly-line as well (instead of a space). + // That way the squiggly line will usually appear in the correct + // position. + match ch { + '\t' => { + col += 8 - col%8; + s.push('\t'); + }, + _ => { + col += 1; + s.push(' '); + }, + } + } + + try!(write!(&mut self.dst, "{}", s)); + let mut s = String::from("^"); + let count = match lastc { + // Most terminals have a tab stop every eight columns by default + '\t' => 8 - col%8, + _ => 1, + }; + col += count; + s.extend(::std::iter::repeat('~').take(count)); + + let hi = cm.lookup_char_pos(sp.hi); + if hi.col != lo.col { + for (pos, ch) in iter { + if pos >= hi.col.to_usize() { break; } + let count = match ch { + '\t' => 8 - col%8, + _ => 1, + }; + col += count; + s.extend(::std::iter::repeat('~').take(count)); + } + } + + if s.len() > 1 { + // One extra squiggly is replaced by a "^" + s.pop(); + } + + try!(println_maybe_styled!(self, term::attr::ForegroundColor(lvl.color()), + "{}", s)); + } + } + Ok(()) + } + + /// Here are the differences between this and the normal `highlight_lines`: + /// `end_highlight_lines` will always put arrow on the last byte of the + /// span (instead of the first byte). Also, when the span is too long (more + /// than 6 lines), `end_highlight_lines` will print the first line, then + /// dot dot dot, then last line, whereas `highlight_lines` prints the first + /// six lines. + #[allow(deprecated)] + fn end_highlight_lines(&mut self, + cm: &codemap::CodeMap, + sp: Span, + lvl: Level, + lines: codemap::FileLinesResult) + -> io::Result<()> { + let lines = match lines { + Ok(lines) => lines, + Err(_) => { + try!(write!(&mut self.dst, "(internal compiler error: unprintable span)\n")); + return Ok(()); + } + }; + + let fm = &*lines.file; + + let lines = &lines.lines[..]; + if lines.len() > MAX_LINES { + if let Some(line) = fm.get_line(lines[0].line_index) { + try!(write!(&mut self.dst, "{}:{} {}\n", fm.name, + lines[0].line_index + 1, line)); + } + try!(write!(&mut self.dst, "...\n")); + let last_line_index = lines[lines.len() - 1].line_index; + if let Some(last_line) = fm.get_line(last_line_index) { + try!(write!(&mut self.dst, "{}:{} {}\n", fm.name, + last_line_index + 1, last_line)); + } + } else { + for line_info in lines { + if let Some(line) = fm.get_line(line_info.line_index) { + try!(write!(&mut self.dst, "{}:{} {}\n", fm.name, + line_info.line_index + 1, line)); + } + } + } + let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1); + let hi = cm.lookup_char_pos(sp.hi); + let skip = last_line_start.chars().count(); + let mut s = String::new(); + for _ in 0..skip { + s.push(' '); + } + if let Some(orig) = fm.get_line(lines[0].line_index) { + let iter = orig.chars().enumerate(); + for (pos, ch) in iter { + // Span seems to use half-opened interval, so subtract 1 + if pos >= hi.col.to_usize() - 1 { break; } + // Whenever a tab occurs on the previous line, we insert one on + // the error-point-squiggly-line as well (instead of a space). + // That way the squiggly line will usually appear in the correct + // position. + match ch { + '\t' => s.push('\t'), + _ => s.push(' '), + } + } + } + s.push('^'); + println_maybe_styled!(self, term::attr::ForegroundColor(lvl.color()), + "{}", s) + } + + fn print_macro_backtrace(&mut self, + cm: &codemap::CodeMap, + sp: Span) + -> io::Result<()> { + let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> { + match expn_info { + Some(ei) => { + let ss = ei.callee.span.map_or(String::new(), + |span| cm.span_to_string(span)); + let (pre, post) = match ei.callee.format { + codemap::MacroAttribute => ("#[", "]"), + codemap::MacroBang => ("", "!"), + codemap::CompilerExpansion => ("", ""), + }; + try!(self.print_diagnostic(&ss, Note, + &format!("in expansion of {}{}{}", + pre, + ei.callee.name, + post), + None)); + let ss = cm.span_to_string(ei.call_site); + try!(self.print_diagnostic(&ss, Note, "expansion site", None)); + Ok(Some(ei.call_site)) + } + None => Ok(None) + } + })); + cs.map_or(Ok(()), |call_site| self.print_macro_backtrace(cm, call_site)) + } } #[cfg(unix)] @@ -442,11 +793,11 @@ impl Emitter for EmitterWriter { cmsp: Option<(&codemap::CodeMap, Span)>, msg: &str, code: Option<&str>, lvl: Level) { let error = match cmsp { - Some((cm, COMMAND_LINE_SP)) => emit(self, cm, + Some((cm, COMMAND_LINE_SP)) => self.emit_(cm, FileLine(COMMAND_LINE_SP), msg, code, lvl), - Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl), - None => print_diagnostic(self, "", lvl, msg, code), + Some((cm, sp)) => self.emit_(cm, FullSpan(sp), msg, code, lvl), + None => self.print_diagnostic("", lvl, msg, code), }; match error { @@ -457,346 +808,13 @@ impl Emitter for EmitterWriter { fn custom_emit(&mut self, cm: &codemap::CodeMap, sp: RenderSpan, msg: &str, lvl: Level) { - match emit(self, cm, sp, msg, None, lvl) { + match self.emit_(cm, sp, msg, None, lvl) { Ok(()) => {} Err(e) => panic!("failed to print diagnostics: {:?}", e), } } } -fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, - msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> { - let sp = rsp.span(); - - // We cannot check equality directly with COMMAND_LINE_SP - // since PartialEq is manually implemented to ignore the ExpnId - let ss = if sp.expn_id == COMMAND_LINE_EXPN { - "".to_string() - } else if let EndSpan(_) = rsp { - let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; - cm.span_to_string(span_end) - } else { - cm.span_to_string(sp) - }; - - try!(print_diagnostic(dst, &ss[..], lvl, msg, code)); - - match rsp { - FullSpan(_) => { - try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); - try!(print_macro_backtrace(dst, cm, sp)); - } - EndSpan(_) => { - try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); - try!(print_macro_backtrace(dst, cm, sp)); - } - Suggestion(_, ref suggestion) => { - try!(highlight_suggestion(dst, cm, sp, suggestion)); - try!(print_macro_backtrace(dst, cm, sp)); - } - FileLine(..) => { - // no source text in this case! - } - } - - match code { - Some(code) => - match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { - Some(_) => { - try!(print_diagnostic(dst, &ss[..], Help, - &format!("run `rustc --explain {}` to see a detailed \ - explanation", code), None)); - } - None => () - }, - None => (), - } - Ok(()) -} - -fn highlight_suggestion(err: &mut EmitterWriter, - cm: &codemap::CodeMap, - sp: Span, - suggestion: &str) - -> io::Result<()> -{ - let lines = cm.span_to_lines(sp).unwrap(); - assert!(!lines.lines.is_empty()); - - // To build up the result, we want to take the snippet from the first - // line that precedes the span, prepend that with the suggestion, and - // then append the snippet from the last line that trails the span. - let fm = &lines.file; - - let first_line = &lines.lines[0]; - let prefix = fm.get_line(first_line.line_index) - .map(|l| &l[..first_line.start_col.0]) - .unwrap_or(""); - - let last_line = lines.lines.last().unwrap(); - let suffix = fm.get_line(last_line.line_index) - .map(|l| &l[last_line.end_col.0..]) - .unwrap_or(""); - - let complete = format!("{}{}{}", prefix, suggestion, suffix); - - // print the suggestion without any line numbers, but leave - // space for them. This helps with lining up with previous - // snippets from the actual error being reported. - let fm = &*lines.file; - let mut lines = complete.lines(); - for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) { - let elided_line_num = format!("{}", line_index+1); - try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n", - fm.name, "", elided_line_num.len(), line)); - } - - // if we elided some lines, add an ellipsis - if lines.next().is_some() { - let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1); - try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n", - "", fm.name.len(), elided_line_num.len())); - } - - Ok(()) -} - -fn highlight_lines(err: &mut EmitterWriter, - cm: &codemap::CodeMap, - sp: Span, - lvl: Level, - lines: codemap::FileLinesResult) - -> io::Result<()> -{ - let lines = match lines { - Ok(lines) => lines, - Err(_) => { - try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n")); - return Ok(()); - } - }; - - let fm = &*lines.file; - - let line_strings: Option> = - lines.lines.iter() - .map(|info| fm.get_line(info.line_index)) - .collect(); - - let line_strings = match line_strings { - None => { return Ok(()); } - Some(line_strings) => line_strings - }; - - // Display only the first MAX_LINES lines. - let all_lines = lines.lines.len(); - let display_lines = cmp::min(all_lines, MAX_LINES); - let display_line_infos = &lines.lines[..display_lines]; - let display_line_strings = &line_strings[..display_lines]; - - // Calculate the widest number to format evenly and fix #11715 - assert!(display_line_infos.len() > 0); - let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1; - let mut digits = 0; - while max_line_num > 0 { - max_line_num /= 10; - digits += 1; - } - - // Print the offending lines - for (line_info, line) in display_line_infos.iter().zip(display_line_strings) { - try!(write!(&mut err.dst, "{}:{:>width$} {}\n", - fm.name, - line_info.line_index + 1, - line, - width=digits)); - } - - // If we elided something, put an ellipsis. - if display_lines < all_lines { - let last_line_index = display_line_infos.last().unwrap().line_index; - let s = format!("{}:{} ", fm.name, last_line_index + 1); - try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len())); - } - - // FIXME (#3260) - // If there's one line at fault we can easily point to the problem - if lines.lines.len() == 1 { - let lo = cm.lookup_char_pos(sp.lo); - let mut digits = 0; - let mut num = (lines.lines[0].line_index + 1) / 10; - - // how many digits must be indent past? - while num > 0 { num /= 10; digits += 1; } - - let mut s = String::new(); - // Skip is the number of characters we need to skip because they are - // part of the 'filename:line ' part of the previous line. - let skip = fm.name.chars().count() + digits + 3; - for _ in 0..skip { - s.push(' '); - } - if let Some(orig) = fm.get_line(lines.lines[0].line_index) { - let mut col = skip; - let mut lastc = ' '; - let mut iter = orig.chars().enumerate(); - for (pos, ch) in iter.by_ref() { - lastc = ch; - if pos >= lo.col.to_usize() { break; } - // Whenever a tab occurs on the previous line, we insert one on - // the error-point-squiggly-line as well (instead of a space). - // That way the squiggly line will usually appear in the correct - // position. - match ch { - '\t' => { - col += 8 - col%8; - s.push('\t'); - }, - _ => { - col += 1; - s.push(' '); - }, - } - } - - try!(write!(&mut err.dst, "{}", s)); - let mut s = String::from("^"); - let count = match lastc { - // Most terminals have a tab stop every eight columns by default - '\t' => 8 - col%8, - _ => 1, - }; - col += count; - s.extend(::std::iter::repeat('~').take(count)); - - let hi = cm.lookup_char_pos(sp.hi); - if hi.col != lo.col { - for (pos, ch) in iter { - if pos >= hi.col.to_usize() { break; } - let count = match ch { - '\t' => 8 - col%8, - _ => 1, - }; - col += count; - s.extend(::std::iter::repeat('~').take(count)); - } - } - - if s.len() > 1 { - // One extra squiggly is replaced by a "^" - s.pop(); - } - - try!(print_maybe_styled(err, - &format!("{}\n", s), - term::attr::ForegroundColor(lvl.color()))); - } - } - Ok(()) -} - -/// Here are the differences between this and the normal `highlight_lines`: -/// `end_highlight_lines` will always put arrow on the last byte of the -/// span (instead of the first byte). Also, when the span is too long (more -/// than 6 lines), `end_highlight_lines` will print the first line, then -/// dot dot dot, then last line, whereas `highlight_lines` prints the first -/// six lines. -#[allow(deprecated)] -fn end_highlight_lines(w: &mut EmitterWriter, - cm: &codemap::CodeMap, - sp: Span, - lvl: Level, - lines: codemap::FileLinesResult) - -> io::Result<()> { - let lines = match lines { - Ok(lines) => lines, - Err(_) => { - try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n")); - return Ok(()); - } - }; - - let fm = &*lines.file; - - let lines = &lines.lines[..]; - if lines.len() > MAX_LINES { - if let Some(line) = fm.get_line(lines[0].line_index) { - try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, - lines[0].line_index + 1, line)); - } - try!(write!(&mut w.dst, "...\n")); - let last_line_index = lines[lines.len() - 1].line_index; - if let Some(last_line) = fm.get_line(last_line_index) { - try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, - last_line_index + 1, last_line)); - } - } else { - for line_info in lines { - if let Some(line) = fm.get_line(line_info.line_index) { - try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, - line_info.line_index + 1, line)); - } - } - } - let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1); - let hi = cm.lookup_char_pos(sp.hi); - let skip = last_line_start.chars().count(); - let mut s = String::new(); - for _ in 0..skip { - s.push(' '); - } - if let Some(orig) = fm.get_line(lines[0].line_index) { - let iter = orig.chars().enumerate(); - for (pos, ch) in iter { - // Span seems to use half-opened interval, so subtract 1 - if pos >= hi.col.to_usize() - 1 { break; } - // Whenever a tab occurs on the previous line, we insert one on - // the error-point-squiggly-line as well (instead of a space). - // That way the squiggly line will usually appear in the correct - // position. - match ch { - '\t' => s.push('\t'), - _ => s.push(' '), - } - } - } - s.push('^'); - s.push('\n'); - print_maybe_styled(w, - &s[..], - term::attr::ForegroundColor(lvl.color())) -} - -fn print_macro_backtrace(w: &mut EmitterWriter, - cm: &codemap::CodeMap, - sp: Span) - -> io::Result<()> { - let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> { - match expn_info { - Some(ei) => { - let ss = ei.callee.span.map_or(String::new(), - |span| cm.span_to_string(span)); - let (pre, post) = match ei.callee.format { - codemap::MacroAttribute => ("#[", "]"), - codemap::MacroBang => ("", "!"), - codemap::CompilerExpansion => ("", ""), - }; - try!(print_diagnostic(w, &ss, Note, - &format!("in expansion of {}{}{}", - pre, - ei.callee.name, - post), - None)); - let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, &ss, Note, "expansion site", None)); - Ok(Some(ei.call_site)) - } - None => Ok(None) - } - })); - cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site)) -} - pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where M: FnOnce() -> String, { @@ -808,7 +826,7 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where #[cfg(test)] mod test { - use super::{EmitterWriter, highlight_lines, Level}; + use super::{EmitterWriter, Level}; use codemap::{mk_sp, CodeMap, BytePos}; use std::sync::{Arc, Mutex}; use std::io::{self, Write}; @@ -841,12 +859,7 @@ mod test { tolv dreizehn "; - let file = cm.new_filemap("dummy.txt".to_string(), content.to_string()); - for (i, b) in content.bytes().enumerate() { - if b == b'\n' { - file.next_line(BytePos(i as u32)); - } - } + let file = cm.new_filemap_and_lines("dummy.txt", content); let start = file.lines.borrow()[7]; let end = file.lines.borrow()[11]; let sp = mk_sp(start, end); @@ -854,15 +867,16 @@ mod test { println!("span_to_lines"); let lines = cm.span_to_lines(sp); println!("highlight_lines"); - highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap(); + ew.highlight_lines(&cm, sp, lvl, lines).unwrap(); println!("done"); let vec = data.lock().unwrap().clone(); let vec: &[u8] = &vec; - println!("{}", from_utf8(vec).unwrap()); - assert_eq!(vec, "dummy.txt: 8 \n\ - dummy.txt: 9 \n\ - dummy.txt:10 \n\ - dummy.txt:11 \n\ - dummy.txt:12 \n".as_bytes()); + let str = from_utf8(vec).unwrap(); + println!("{}", str); + assert_eq!(str, "dummy.txt: 8 line8\n\ + dummy.txt: 9 line9\n\ + dummy.txt:10 line10\n\ + dummy.txt:11 e-lä-vän\n\ + dummy.txt:12 tolv\n"); } } diff --git a/syntex_syntax/src/diagnostics/macros.rs b/syntex_syntax/src/diagnostics/macros.rs index 055ade46..669b930e 100644 --- a/syntex_syntax/src/diagnostics/macros.rs +++ b/syntex_syntax/src/diagnostics/macros.rs @@ -63,6 +63,9 @@ macro_rules! fileline_help { macro_rules! register_diagnostics { ($($code:tt),*) => ( $(register_diagnostic! { $code })* + ); + ($($code:tt),*,) => ( + $(register_diagnostic! { $code })* ) } @@ -70,5 +73,8 @@ macro_rules! register_diagnostics { macro_rules! register_long_diagnostics { ($($code:tt: $description:tt),*) => ( $(register_diagnostic! { $code, $description })* + ); + ($($code:tt: $description:tt),*,) => ( + $(register_diagnostic! { $code, $description })* ) } diff --git a/syntex_syntax/src/diagnostics/plugin.rs b/syntex_syntax/src/diagnostics/plugin.rs index aee06680..48fd05a7 100644 --- a/syntex_syntax/src/diagnostics/plugin.rs +++ b/syntex_syntax/src/diagnostics/plugin.rs @@ -63,7 +63,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, // Previously used errors. Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { ecx.span_warn(span, &format!( - "diagnostic code {} already used", &token::get_ident(code) + "diagnostic code {} already used", code )); ecx.span_note(previous_span, "previous invocation"); } @@ -74,7 +74,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, // Unregistered errors. None => { ecx.span_err(span, &format!( - "used diagnostic code {} not registered", &token::get_ident(code) + "used diagnostic code {} not registered", code )); } } @@ -110,7 +110,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if !msg.starts_with("\n") || !msg.ends_with("\n") { ecx.span_err(span, &format!( "description for error code {} doesn't start and end with a newline", - token::get_ident(*code) + code )); } @@ -122,7 +122,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, ecx.span_err(span, &format!( "description for error code {} contains a line longer than {} characters.\n\ if you're inserting a long URL use the footnote style to bypass this check.", - token::get_ident(*code), MAX_DESCRIPTION_WIDTH + code, MAX_DESCRIPTION_WIDTH )); } }); @@ -134,12 +134,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, }; if diagnostics.insert(code.name, info).is_some() { ecx.span_err(span, &format!( - "diagnostic code {} already registered", &token::get_ident(*code) + "diagnostic code {} already registered", code )); } }); - let sym = Ident::new(token::gensym(&( - "__register_diagnostic_".to_string() + &token::get_ident(*code) + let sym = Ident::new(token::gensym(&format!( + "__register_diagnostic_{}", code ))); MacEager::items(SmallVector::many(vec![ ecx.item_mod( @@ -163,7 +163,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, &ast::TtToken(_, token::Ident(ref crate_name, _)), // DIAGNOSTICS ident. &ast::TtToken(_, token::Ident(ref name, _)) - ) => (crate_name.as_str(), name), + ) => (*&crate_name, name), _ => unreachable!() }; @@ -172,7 +172,10 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, .ok().expect("unable to determine target arch from $CFG_COMPILER_HOST_TRIPLE"); with_registered_diagnostics(|diagnostics| { - if let Err(e) = output_metadata(ecx, &target_triple, crate_name, &diagnostics) { + if let Err(e) = output_metadata(ecx, + &target_triple, + &crate_name.name.as_str(), + &diagnostics) { ecx.span_bug(span, &format!( "error writing metadata for triple `{}` and crate `{}`, error: {}, cause: {:?}", target_triple, crate_name, e.description(), e.cause() @@ -187,8 +190,8 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, diagnostics.iter().filter_map(|(code, info)| { info.description.map(|description| { ecx.expr_tuple(span, vec![ - ecx.expr_str(span, token::get_name(*code)), - ecx.expr_str(span, token::get_name(description)) + ecx.expr_str(span, code.as_str()), + ecx.expr_str(span, description.as_str()) ]) }) }).collect(); diff --git a/syntex_syntax/src/ext/base.rs b/syntex_syntax/src/ext/base.rs index 499562ed..409ae86d 100644 --- a/syntex_syntax/src/ext/base.rs +++ b/syntex_syntax/src/ext/base.rs @@ -591,6 +591,12 @@ fn initial_syntax_expander_table<'feat>(ecfg: &expand::ExpansionConfig<'feat>) syntax_expanders.insert(intern("cfg"), builtin_normal_expander( ext::cfg::expand_cfg)); + syntax_expanders.insert(intern("push_unsafe"), + builtin_normal_expander( + ext::pushpop_safe::expand_push_unsafe)); + syntax_expanders.insert(intern("pop_unsafe"), + builtin_normal_expander( + ext::pushpop_safe::expand_pop_unsafe)); syntax_expanders.insert(intern("trace_macros"), builtin_normal_expander( ext::trace_macros::expand_trace_macros)); diff --git a/syntex_syntax/src/ext/build.rs b/syntex_syntax/src/ext/build.rs index 8a80e291..b91c54ae 100644 --- a/syntex_syntax/src/ext/build.rs +++ b/syntex_syntax/src/ext/build.rs @@ -538,7 +538,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> { init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, - source: ast::LocalLet, }); let decl = respan(sp, ast::DeclLocal(local)); P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) @@ -562,7 +561,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> { init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, - source: ast::LocalLet, }); let decl = respan(sp, ast::DeclLocal(local)); P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) @@ -631,9 +629,8 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_field_access(&self, sp: Span, expr: P, ident: ast::Ident) -> P { - let field_name = token::get_ident(ident); let field_span = Span { - lo: sp.lo - Pos::from_usize(field_name.len()), + lo: sp.lo - Pos::from_usize(ident.name.as_str().len()), hi: sp.hi, expn_id: sp.expn_id, }; diff --git a/syntex_syntax/src/ext/concat_idents.rs b/syntex_syntax/src/ext/concat_idents.rs index 5d07c36c..24436c45 100644 --- a/syntex_syntax/src/ext/concat_idents.rs +++ b/syntex_syntax/src/ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } else { match *e { ast::TtToken(_, token::Ident(ident, _)) => { - res_str.push_str(&token::get_ident(ident)) + res_str.push_str(&ident.name.as_str()) }, _ => { cx.span_err(sp, "concat_idents! requires ident args."); @@ -49,7 +49,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(&res_str[..]); + let res = str_to_ident(&res_str); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/syntex_syntax/src/ext/deriving/decodable.rs b/syntex_syntax/src/ext/deriving/decodable.rs index 99f217a4..085d9d60 100644 --- a/syntex_syntax/src/ext/deriving/decodable.rs +++ b/syntex_syntax/src/ext/deriving/decodable.rs @@ -128,7 +128,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, decoder, cx.ident_of("read_struct"), vec!( - cx.expr_str(trait_span, token::get_ident(substr.type_ident)), + cx.expr_str(trait_span, substr.type_ident.name.as_str()), cx.expr_usize(trait_span, nfields), cx.lambda_expr_1(trait_span, result, blkarg) )) @@ -140,10 +140,10 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let mut variants = Vec::new(); let rvariant_arg = cx.ident_of("read_enum_variant_arg"); - for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { - variants.push(cx.expr_str(v_span, token::get_ident(name))); + for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() { + variants.push(cx.expr_str(v_span, ident.name.as_str())); - let path = cx.path(trait_span, vec![substr.type_ident, name]); + let path = cx.path(trait_span, vec![substr.type_ident, ident]); let decoded = decode_static_fields(cx, v_span, path, @@ -175,7 +175,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, decoder, cx.ident_of("read_enum"), vec!( - cx.expr_str(trait_span, token::get_ident(substr.type_ident)), + cx.expr_str(trait_span, substr.type_ident.name.as_str()), cx.lambda_expr_1(trait_span, result, blkarg) )) } @@ -211,9 +211,9 @@ fn decode_static_fields(cx: &mut ExtCtxt, } Named(ref fields) => { // use the field's span to get nicer error messages. - let fields = fields.iter().enumerate().map(|(i, &(name, span))| { - let arg = getarg(cx, span, token::get_ident(name), i); - cx.field_imm(span, name, arg) + let fields = fields.iter().enumerate().map(|(i, &(ident, span))| { + let arg = getarg(cx, span, ident.name.as_str(), i); + cx.field_imm(span, ident, arg) }).collect(); cx.expr_struct(trait_span, outer_pat_path, fields) } diff --git a/syntex_syntax/src/ext/deriving/encodable.rs b/syntex_syntax/src/ext/deriving/encodable.rs index 0ef23705..ae4d337b 100644 --- a/syntex_syntax/src/ext/deriving/encodable.rs +++ b/syntex_syntax/src/ext/deriving/encodable.rs @@ -186,7 +186,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, .. }) in fields.iter().enumerate() { let name = match name { - Some(id) => token::get_ident(id), + Some(id) => id.name.as_str(), None => { token::intern_and_get_ident(&format!("_field{}", i)) } @@ -223,7 +223,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, encoder, cx.ident_of("emit_struct"), vec!( - cx.expr_str(trait_span, token::get_ident(substr.type_ident)), + cx.expr_str(trait_span, substr.type_ident.name.as_str()), cx.expr_usize(trait_span, fields.len()), blk )) @@ -263,7 +263,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, token::get_ident(variant.node.name)); + let name = cx.expr_str(trait_span, variant.node.name.name.as_str()); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), vec!(name, @@ -275,7 +275,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, encoder, cx.ident_of("emit_enum"), vec!( - cx.expr_str(trait_span, token::get_ident(substr.type_ident)), + cx.expr_str(trait_span, substr.type_ident.name.as_str()), blk )); cx.expr_block(cx.block(trait_span, vec!(me), Some(ret))) diff --git a/syntex_syntax/src/ext/deriving/show.rs b/syntex_syntax/src/ext/deriving/show.rs index dbde963b..3a478884 100644 --- a/syntex_syntax/src/ext/deriving/show.rs +++ b/syntex_syntax/src/ext/deriving/show.rs @@ -59,7 +59,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // build fmt.debug_struct().field(, &)....build() // or fmt.debug_tuple().field(&)....build() // based on the "shape". - let name = match *substr.fields { + let ident = match *substr.fields { Struct(_) => substr.type_ident, EnumMatching(_, v, _) => v.node.name, EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => { @@ -69,7 +69,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // We want to make sure we have the expn_id set so that we can use unstable methods let span = Span { expn_id: cx.backtrace(), .. span }; - let name = cx.expr_lit(span, ast::Lit_::LitStr(token::get_ident(name), + let name = cx.expr_lit(span, ast::Lit_::LitStr(ident.name.as_str(), ast::StrStyle::CookedStr)); let mut expr = substr.nonself_args[0].clone(); @@ -102,7 +102,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, for field in fields { let name = cx.expr_lit(field.span, ast::Lit_::LitStr( - token::get_ident(field.name.clone().unwrap()), + field.name.unwrap().name.as_str(), ast::StrStyle::CookedStr)); // Use double indirection to make sure this works for unsized types diff --git a/syntex_syntax/src/ext/expand.rs b/syntex_syntax/src/ext/expand.rs index 53befc09..6e49b190 100644 --- a/syntex_syntax/src/ext/expand.rs +++ b/syntex_syntax/src/ext/expand.rs @@ -33,6 +33,16 @@ use visit; use visit::Visitor; use std_inject; +// Given suffix ["b","c","d"], returns path `::std::b::c::d` when +// `fld.cx.use_std`, and `::core::b::c::d` otherwise. +fn mk_core_path(fld: &mut MacroExpander, + span: Span, + suffix: &[&'static str]) -> ast::Path { + let mut idents = vec![fld.cx.ident_of_std("core")]; + for s in suffix.iter() { idents.push(fld.cx.ident_of(*s)); } + fld.cx.path_global(span, idents) +} + pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { fn push_compiler_expansion(fld: &mut MacroExpander, span: Span, expansion_desc: &str) { fld.cx.bt_push(ExpnInfo { @@ -40,13 +50,26 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { callee: NameAndSpan { name: expansion_desc.to_string(), format: CompilerExpansion, + + // This does *not* mean code generated after + // `push_compiler_expansion` is automatically exempt + // from stability lints; must also tag such code with + // an appropriate span from `fld.cx.backtrace()`. allow_internal_unstable: true, + span: None, }, }); } - e.and_then(|ast::Expr {id, node, span}| match node { + // Sets the expn_id so that we can use unstable methods. + fn allow_unstable(fld: &mut MacroExpander, span: Span) -> Span { + Span { expn_id: fld.cx.backtrace(), ..span } + } + + let expr_span = e.span; + return e.and_then(|ast::Expr {id, node, span}| match node { + // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. ast::ExprMac(mac) => { @@ -71,6 +94,118 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { }) } + // Desugar ExprBox: `in (PLACE) EXPR` + ast::ExprBox(Some(placer), value_expr) => { + // to: + // + // let p = PLACE; + // let mut place = Placer::make_place(p); + // let raw_place = Place::pointer(&mut place); + // push_unsafe!({ + // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); + // InPlace::finalize(place) + // }) + + // Ensure feature-gate is enabled + feature_gate::check_for_placement_in( + fld.cx.ecfg.features, + &fld.cx.parse_sess.span_diagnostic, + expr_span); + + push_compiler_expansion(fld, expr_span, "placement-in expansion"); + + let value_span = value_expr.span; + let placer_span = placer.span; + + let placer_expr = fld.fold_expr(placer); + let value_expr = fld.fold_expr(value_expr); + + let placer_ident = token::gensym_ident("placer"); + let agent_ident = token::gensym_ident("place"); + let p_ptr_ident = token::gensym_ident("p_ptr"); + + let placer = fld.cx.expr_ident(span, placer_ident); + let agent = fld.cx.expr_ident(span, agent_ident); + let p_ptr = fld.cx.expr_ident(span, p_ptr_ident); + + let make_place = ["ops", "Placer", "make_place"]; + let place_pointer = ["ops", "Place", "pointer"]; + let move_val_init = ["intrinsics", "move_val_init"]; + let inplace_finalize = ["ops", "InPlace", "finalize"]; + + let make_call = |fld: &mut MacroExpander, p, args| { + // We feed in the `expr_span` because codemap's span_allows_unstable + // allows the call_site span to inherit the `allow_internal_unstable` + // setting. + let span_unstable = allow_unstable(fld, expr_span); + let path = mk_core_path(fld, span_unstable, p); + let path = fld.cx.expr_path(path); + let expr_span_unstable = allow_unstable(fld, span); + fld.cx.expr_call(expr_span_unstable, path, args) + }; + + let stmt_let = |fld: &mut MacroExpander, bind, expr| { + fld.cx.stmt_let(placer_span, false, bind, expr) + }; + let stmt_let_mut = |fld: &mut MacroExpander, bind, expr| { + fld.cx.stmt_let(placer_span, true, bind, expr) + }; + + // let placer = ; + let s1 = stmt_let(fld, placer_ident, placer_expr); + + // let mut place = Placer::make_place(placer); + let s2 = { + let call = make_call(fld, &make_place, vec![placer]); + stmt_let_mut(fld, agent_ident, call) + }; + + // let p_ptr = Place::pointer(&mut place); + let s3 = { + let args = vec![fld.cx.expr_mut_addr_of(placer_span, agent.clone())]; + let call = make_call(fld, &place_pointer, args); + stmt_let(fld, p_ptr_ident, call) + }; + + // pop_unsafe!(EXPR)); + let pop_unsafe_expr = pop_unsafe_expr(fld.cx, value_expr, value_span); + + // push_unsafe!({ + // ptr::write(p_ptr, pop_unsafe!()); + // InPlace::finalize(place) + // }) + let expr = { + let call_move_val_init = StmtSemi(make_call( + fld, &move_val_init, vec![p_ptr, pop_unsafe_expr]), ast::DUMMY_NODE_ID); + let call_move_val_init = codemap::respan(value_span, call_move_val_init); + + let call = make_call(fld, &inplace_finalize, vec![agent]); + Some(push_unsafe_expr(fld.cx, vec![P(call_move_val_init)], call, span)) + }; + + let block = fld.cx.block_all(span, vec![s1, s2, s3], expr); + let result = fld.cx.expr_block(block); + fld.cx.bt_pop(); + result + } + + // Issue #22181: + // Eventually a desugaring for `box EXPR` + // (similar to the desugaring above for `in PLACE BLOCK`) + // should go here, desugaring + // + // to: + // + // let mut place = BoxPlace::make_place(); + // let raw_place = Place::pointer(&mut place); + // let value = $value; + // unsafe { + // ::std::ptr::write(raw_place, value); + // Boxed::finalize(place) + // } + // + // But for now there are type-inference issues doing that. + ast::ExprWhile(cond, body, opt_ident) => { let cond = fld.fold_expr(cond); let (body, opt_ident) = expand_loop_block(body, opt_ident, fld); @@ -360,7 +495,26 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { span: span }, fld)) } - }) + }); + + fn push_unsafe_expr(cx: &mut ExtCtxt, stmts: Vec>, + expr: P, span: Span) + -> P { + let rules = ast::PushUnsafeBlock(ast::CompilerGenerated); + cx.expr_block(P(ast::Block { + rules: rules, span: span, id: ast::DUMMY_NODE_ID, + stmts: stmts, expr: Some(expr), + })) + } + + fn pop_unsafe_expr(cx: &mut ExtCtxt, expr: P, span: Span) + -> P { + let rules = ast::PopUnsafeBlock(ast::CompilerGenerated); + cx.expr_block(P(ast::Block { + rules: rules, span: span, id: ast::DUMMY_NODE_ID, + stmts: vec![], expr: Some(expr), + })) + } } /// Expand a (not-ident-style) macro invocation. Returns the result @@ -391,14 +545,13 @@ fn expand_mac_invoc(mac: ast::Mac, // let compilation continue return None; } - let extname = pth.segments[0].identifier; - let extnamestr = token::get_ident(extname); - match fld.cx.syntax_env.find(&extname.name) { + let extname = pth.segments[0].identifier.name; + match fld.cx.syntax_env.find(&extname) { None => { fld.cx.span_err( pth.span, &format!("macro undefined: '{}!'", - &extnamestr)); + &extname)); // let compilation continue None @@ -408,7 +561,7 @@ fn expand_mac_invoc(mac: ast::Mac, fld.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - name: extnamestr.to_string(), + name: extname.to_string(), format: MacroBang, span: exp_span, allow_internal_unstable: allow_internal_unstable, @@ -435,7 +588,7 @@ fn expand_mac_invoc(mac: ast::Mac, fld.cx.span_err( pth.span, &format!("non-expression macro in expression position: {}", - &extnamestr[..] + extname )); return None; } @@ -446,7 +599,7 @@ fn expand_mac_invoc(mac: ast::Mac, fld.cx.span_err( pth.span, &format!("'{}' is not a tt-style macro", - &extnamestr)); + extname)); None } } @@ -558,19 +711,18 @@ pub fn expand_item_mac(it: P, node: MacInvocTT(ref pth, ref tts, _), .. }) => { - (pth.segments[0].identifier, pth.span, (*tts).clone()) + (pth.segments[0].identifier.name, pth.span, (*tts).clone()) } _ => fld.cx.span_bug(it.span, "invalid item macro invocation") }; - let extnamestr = token::get_ident(extname); let fm = fresh_mark(); let items = { - let expanded = match fld.cx.syntax_env.find(&extname.name) { + let expanded = match fld.cx.syntax_env.find(&extname) { None => { fld.cx.span_err(path_span, &format!("macro undefined: '{}!'", - extnamestr)); + extname)); // let compilation continue return SmallVector::zero(); } @@ -581,14 +733,14 @@ pub fn expand_item_mac(it: P, fld.cx .span_err(path_span, &format!("macro {}! expects no ident argument, given '{}'", - extnamestr, - token::get_ident(it.ident))); + extname, + it.ident)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.to_string(), + name: extname.to_string(), format: MacroBang, span: span, allow_internal_unstable: allow_internal_unstable, @@ -602,13 +754,13 @@ pub fn expand_item_mac(it: P, if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, &format!("macro {}! expects an ident argument", - &extnamestr)); + extname)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.to_string(), + name: extname.to_string(), format: MacroBang, span: span, allow_internal_unstable: allow_internal_unstable, @@ -629,7 +781,7 @@ pub fn expand_item_mac(it: P, fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr.to_string(), + name: extname.to_string(), format: MacroBang, span: None, // `macro_rules!` doesn't directly allow @@ -674,7 +826,7 @@ pub fn expand_item_mac(it: P, _ => { fld.cx.span_err(it.span, &format!("{}! is not legal in item position", - &extnamestr)); + extname)); return SmallVector::zero(); } } @@ -693,7 +845,7 @@ pub fn expand_item_mac(it: P, None => { fld.cx.span_err(path_span, &format!("non-item macro in item position: {}", - &extnamestr)); + extname)); return SmallVector::zero(); } }; @@ -757,7 +909,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE StmtDecl(decl, node_id) => decl.and_then(|Spanned {node: decl, span}| match decl { DeclLocal(local) => { // take it apart: - let rewritten_local = local.map(|Local {id, pat, ty, init, source, span}| { + let rewritten_local = local.map(|Local {id, pat, ty, init, span}| { // expand the ty since TyFixedLengthVec contains an Expr // and thus may have a macro use let expanded_ty = ty.map(|t| fld.fold_ty(t)); @@ -787,7 +939,6 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE pat: rewritten_pat, // also, don't forget to expand the init: init: init.map(|e| fld.fold_expr(e)), - source: source, span: span } }); @@ -943,13 +1094,12 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { fld.cx.span_err(pth.span, "expected macro name without module separators"); return DummyResult::raw_pat(span); } - let extname = pth.segments[0].identifier; - let extnamestr = token::get_ident(extname); - let marked_after = match fld.cx.syntax_env.find(&extname.name) { + let extname = pth.segments[0].identifier.name; + let marked_after = match fld.cx.syntax_env.find(&extname) { None => { fld.cx.span_err(pth.span, &format!("macro undefined: '{}!'", - extnamestr)); + extname)); // let compilation continue return DummyResult::raw_pat(span); } @@ -959,7 +1109,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { fld.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - name: extnamestr.to_string(), + name: extname.to_string(), format: MacroBang, span: tt_span, allow_internal_unstable: allow_internal_unstable, @@ -979,7 +1129,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { pth.span, &format!( "non-pattern macro in pattern position: {}", - &extnamestr + extname ) ); return DummyResult::raw_pat(span); @@ -992,7 +1142,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { _ => { fld.cx.span_err(span, &format!("{}! is not legal in pattern position", - &extnamestr)); + extname)); return DummyResult::raw_pat(span); } } @@ -1504,6 +1654,7 @@ impl<'feat> ExpansionConfig<'feat> { fn enable_trace_macros = allow_trace_macros, fn enable_allow_internal_unstable = allow_internal_unstable, fn enable_custom_derive = allow_custom_derive, + fn enable_pushpop_unsafe = allow_pushpop_unsafe, } } @@ -1967,8 +2118,7 @@ mod tests { = varref.segments.iter().map(|s| s.identifier) .collect(); println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name); - let string = token::get_ident(final_varref_ident); - println!("varref's first segment's string: \"{}\"", &string[..]); + println!("varref's first segment's string: \"{}\"", final_varref_ident); println!("binding #{}: {}, resolves to {}", binding_idx, bindings[binding_idx], binding_name); mtwt::with_sctable(|x| mtwt::display_sctable(x)); @@ -2020,11 +2170,7 @@ foo_module!(); // find the xx binding let bindings = crate_bindings(&cr); let cxbinds: Vec<&ast::Ident> = - bindings.iter().filter(|b| { - let ident = token::get_ident(**b); - let string = &ident[..]; - "xx" == string - }).collect(); + bindings.iter().filter(|b| b.name == "xx").collect(); let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbind = match (cxbinds.len(), cxbinds.get(0)) { (1, Some(b)) => *b, @@ -2036,7 +2182,7 @@ foo_module!(); // the xx binding should bind all of the xx varrefs: for (idx,v) in varrefs.iter().filter(|p| { p.segments.len() == 1 - && "xx" == &*token::get_ident(p.segments[0].identifier) + && p.segments[0].identifier.name == "xx" }).enumerate() { if mtwt::resolve(v.segments[0].identifier) != resolved_binding { println!("uh oh, xx binding didn't match xx varref:"); diff --git a/syntex_syntax/src/ext/format.rs b/syntex_syntax/src/ext/format.rs index 5b972b46..5a2b9c0e 100644 --- a/syntex_syntax/src/ext/format.rs +++ b/syntex_syntax/src/ext/format.rs @@ -121,8 +121,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) return None; } }; - let interned_name = token::get_ident(ident); - let name = &interned_name[..]; + let name: &str = &ident.name.as_str(); panictry!(p.expect(&token::Eq)); let e = p.parse_expr(); diff --git a/syntex_syntax/src/ext/pushpop_safe.rs b/syntex_syntax/src/ext/pushpop_safe.rs new file mode 100644 index 00000000..a67d550d --- /dev/null +++ b/syntex_syntax/src/ext/pushpop_safe.rs @@ -0,0 +1,94 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/* + * The compiler code necessary to support the `push_unsafe!` and + * `pop_unsafe!` macros. + * + * This is a hack to allow a kind of "safety hygiene", where a macro + * can generate code with an interior expression that inherits the + * safety of some outer context. + * + * For example, in: + * + * ```rust + * fn foo() { push_unsafe!( { EXPR_1; pop_unsafe!( EXPR_2 ) } ) } + * ``` + * + * the `EXPR_1` is considered to be in an `unsafe` context, + * but `EXPR_2` is considered to be in a "safe" (i.e. checked) context. + * + * For comparison, in: + * + * ```rust + * fn foo() { unsafe { push_unsafe!( { EXPR_1; pop_unsafe!( EXPR_2 ) } ) } } + * ``` + * + * both `EXPR_1` and `EXPR_2` are considered to be in `unsafe` + * contexts. + * + */ + +use ast; +use codemap::Span; +use ext::base::*; +use ext::base; +use ext::build::AstBuilder; +use feature_gate; +use ptr::P; + +enum PushPop { Push, Pop } + +pub fn expand_push_unsafe<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) + -> Box { + expand_pushpop_unsafe(cx, sp, tts, PushPop::Push) +} + +pub fn expand_pop_unsafe<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) + -> Box { + expand_pushpop_unsafe(cx, sp, tts, PushPop::Pop) +} + +fn expand_pushpop_unsafe<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree], + pp: PushPop) -> Box { + feature_gate::check_for_pushpop_syntax( + cx.ecfg.features, &cx.parse_sess.span_diagnostic, sp); + + let mut exprs = match get_exprs_from_tts(cx, sp, tts) { + Some(exprs) => exprs.into_iter(), + None => return DummyResult::expr(sp), + }; + + let expr = match (exprs.next(), exprs.next()) { + (Some(expr), None) => expr, + _ => { + let msg = match pp { + PushPop::Push => "push_unsafe! takes 1 arguments", + PushPop::Pop => "pop_unsafe! takes 1 arguments", + }; + cx.span_err(sp, msg); + return DummyResult::expr(sp); + } + }; + + let source = ast::UnsafeSource::CompilerGenerated; + let check_mode = match pp { + PushPop::Push => ast::BlockCheckMode::PushUnsafeBlock(source), + PushPop::Pop => ast::BlockCheckMode::PopUnsafeBlock(source), + }; + + MacEager::expr(cx.expr_block(P(ast::Block { + stmts: vec![], + expr: Some(expr), + id: ast::DUMMY_NODE_ID, + rules: check_mode, + span: sp + }))) +} diff --git a/syntex_syntax/src/ext/quote.rs b/syntex_syntax/src/ext/quote.rs index 82c249d2..b8168297 100644 --- a/syntex_syntax/src/ext/quote.rs +++ b/syntex_syntax/src/ext/quote.rs @@ -407,7 +407,7 @@ fn id_ext(str: &str) -> ast::Ident { // Lift an ident to the expr that evaluates to that ident. fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { - let e_str = cx.expr_str(sp, token::get_ident(ident)); + let e_str = cx.expr_str(sp, ident.name.as_str()); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), @@ -416,7 +416,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { // Lift a name to the expr that evaluates to that name fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { - let e_str = cx.expr_str(sp, token::get_ident(ident)); + let e_str = cx.expr_str(sp, ident.name.as_str()); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("name_of"), diff --git a/syntex_syntax/src/ext/source_util.rs b/syntex_syntax/src/ext/source_util.rs index 3866f553..8da36b2c 100644 --- a/syntex_syntax/src/ext/source_util.rs +++ b/syntex_syntax/src/ext/source_util.rs @@ -76,9 +76,9 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) base::check_zero_tts(cx, sp, tts, "module_path!"); let string = cx.mod_path() .iter() - .map(|x| token::get_ident(*x).to_string()) + .map(|x| x.to_string()) .collect::>() - .connect("::"); + .join("::"); base::MacEager::expr(cx.expr_str( sp, token::intern_and_get_ident(&string[..]))) @@ -156,7 +156,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // dependency information let filename = format!("{}", file.display()); let interned = token::intern_and_get_ident(&src[..]); - cx.codemap().new_filemap(filename, src); + cx.codemap().new_filemap_and_lines(&filename, &src); base::MacEager::expr(cx.expr_str(sp, interned)) } @@ -187,7 +187,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information, but don't enter it's contents let filename = format!("{}", file.display()); - cx.codemap().new_filemap(filename, "".to_string()); + cx.codemap().new_filemap_and_lines(&filename, ""); base::MacEager::expr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } diff --git a/syntex_syntax/src/ext/tt/macro_parser.rs b/syntex_syntax/src/ext/tt/macro_parser.rs index 5521c68e..4556bd5f 100644 --- a/syntex_syntax/src/ext/tt/macro_parser.rs +++ b/syntex_syntax/src/ext/tt/macro_parser.rs @@ -223,11 +223,10 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) *idx += 1; } Occupied(..) => { - let string = token::get_ident(bind_name); panic!(p_s.span_diagnostic .span_fatal(sp, &format!("duplicated bind name: {}", - &string))) + bind_name))) } } } @@ -460,12 +459,10 @@ pub fn parse(sess: &ParseSess, let nts = bb_eis.iter().map(|ei| { match ei.top_elts.get_tt(ei.idx) { TtToken(_, MatchNt(bind, name, _, _)) => { - (format!("{} ('{}')", - token::get_ident(name), - token::get_ident(bind))).to_string() + format!("{} ('{}')", name, bind) } _ => panic!() - } }).collect::>().connect(" or "); + } }).collect::>().join(" or "); return Error(sp, format!( "local ambiguity: multiple parsing options: \ built-in NTs {} or {} other options.", @@ -484,11 +481,10 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TtToken(span, MatchNt(_, name, _, _)) => { - let name_string = token::get_ident(name); + TtToken(span, MatchNt(_, ident, _, _)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( - parse_nt(&mut rust_parser, span, &name_string)))); + parse_nt(&mut rust_parser, span, &ident.name.as_str())))); ei.idx += 1; ei.match_cur += 1; } diff --git a/syntex_syntax/src/ext/tt/macro_rules.rs b/syntex_syntax/src/ext/tt/macro_rules.rs index 03d4e21a..adc88c32 100644 --- a/syntex_syntax/src/ext/tt/macro_rules.rs +++ b/syntex_syntax/src/ext/tt/macro_rules.rs @@ -56,10 +56,9 @@ impl<'a> ParserAnyMacro<'a> { let span = parser.span; parser.span_err(span, &msg[..]); - let name = token::get_ident(self.macro_ident); let msg = format!("caused by the macro expansion here; the usage \ of `{}` is likely invalid in this context", - name); + self.macro_ident); parser.span_note(self.site_span, &msg[..]); } } @@ -154,7 +153,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, -> Box { if cx.trace_macros() { println!("{}! {{ {} }}", - token::get_ident(name), + name, print::pprust::tts_to_string(arg)); } @@ -302,8 +301,8 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { tt @ &TtSequence(..) => { check_matcher(cx, Some(tt).into_iter(), &Eof); }, - _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find \ - a TtDelimited or TtSequence)") + _ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \ + in balanced delimiters or a repetition indicator") }, _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find a \ MatchedNonterminal)") @@ -326,7 +325,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. - if can_be_followed_by_any(frag_spec.as_str()) { + if can_be_followed_by_any(&frag_spec.name.as_str()) { continue } else { let next_token = match tokens.peek() { @@ -340,13 +339,13 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // possibility that the sequence occurred // zero times (in which case we need to // look at the token that follows the - // sequence, which may itself a sequence, + // sequence, which may itself be a sequence, // and so on). cx.span_err(sp, &format!("`${0}:{1}` is followed by a \ sequence repetition, which is not \ allowed for `{1}` fragments", - name.as_str(), frag_spec.as_str()) + name, frag_spec) ); Eof }, @@ -359,7 +358,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() }; // If T' is in the set FOLLOW(NT), continue. Else, reject. - match (&next_token, is_in_follow(cx, &next_token, frag_spec.as_str())) { + match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) { (_, Err(msg)) => { cx.span_err(sp, &msg); continue @@ -369,7 +368,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) (next, Ok(false)) => { cx.span_err(sp, &format!("`${0}:{1}` is followed by `{2}`, which \ is not allowed for `{1}` fragments", - name.as_str(), frag_spec.as_str(), + name, frag_spec, token_to_string(next))); continue }, @@ -495,14 +494,14 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { "pat" => { match *tok { FatArrow | Comma | Eq => Ok(true), - Ident(i, _) if i.as_str() == "if" || i.as_str() == "in" => Ok(true), + Ident(i, _) if i.name == "if" || i.name == "in" => Ok(true), _ => Ok(false) } }, "path" | "ty" => { match *tok { - Comma | FatArrow | Colon | Eq | Gt => Ok(true), - Ident(i, _) if i.as_str() == "as" => Ok(true), + Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true), + Ident(i, _) if i.name == "as" => Ok(true), _ => Ok(false) } }, diff --git a/syntex_syntax/src/ext/tt/transcribe.rs b/syntex_syntax/src/ext/tt/transcribe.rs index 368a9f0c..0ca755c9 100644 --- a/syntex_syntax/src/ext/tt/transcribe.rs +++ b/syntex_syntax/src/ext/tt/transcribe.rs @@ -140,11 +140,9 @@ impl Add for LockstepIterSize { LisContradiction(_) => other, LisConstraint(r_len, _) if l_len == r_len => self.clone(), LisConstraint(r_len, r_id) => { - let l_n = token::get_ident(l_id.clone()); - let r_n = token::get_ident(r_id); LisContradiction(format!("inconsistent lockstep iteration: \ - '{:?}' has {} items, but '{:?}' has {}", - l_n, l_len, r_n, r_len).to_string()) + '{}' has {} items, but '{}' has {}", + l_id, l_len, r_id, r_len)) } }, } @@ -308,8 +306,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { MatchedSeq(..) => { panic!(r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ - &format!("variable '{:?}' is still repeating at this depth", - token::get_ident(ident)))); + &format!("variable '{}' is still repeating at this depth", + ident))); } } } diff --git a/syntex_syntax/src/feature_gate.rs b/syntex_syntax/src/feature_gate.rs index ab8cf9ae..53b57cdf 100644 --- a/syntex_syntax/src/feature_gate.rs +++ b/syntex_syntax/src/feature_gate.rs @@ -80,6 +80,8 @@ const KNOWN_FEATURES: &'static [(&'static str, &'static str, Status)] = &[ ("visible_private_types", "1.0.0", Active), ("slicing_syntax", "1.0.0", Accepted), ("box_syntax", "1.0.0", Active), + ("placement_in_syntax", "1.0.0", Active), + ("pushpop_unsafe", "1.2.0", Active), ("on_unimplemented", "1.0.0", Active), ("simd_ffi", "1.0.0", Active), ("allocator", "1.0.0", Active), @@ -158,6 +160,11 @@ const KNOWN_FEATURES: &'static [(&'static str, &'static str, Status)] = &[ // Allows using #[prelude_import] on glob `use` items. ("prelude_import", "1.2.0", Active), + + // Allows the definition recursive static items. + ("static_recursion", "1.3.0", Active), +// Allows default type parameters to influence type inference. + ("default_type_parameter_fallback", "1.3.0", Active) ]; // (changing above list without updating src/doc/reference.md makes @cmr sad) @@ -325,6 +332,9 @@ pub struct Features { pub allow_trace_macros: bool, pub allow_internal_unstable: bool, pub allow_custom_derive: bool, + pub allow_placement_in: bool, + pub allow_box: bool, + pub allow_pushpop_unsafe: bool, pub simd_ffi: bool, pub unmarked_api: bool, pub negate_unsigned: bool, @@ -333,6 +343,8 @@ pub struct Features { /// #![feature] attrs for non-language (library) features pub declared_lib_features: Vec<(InternedString, Span)>, pub const_fn: bool, + pub static_recursion: bool, + pub default_type_parameter_fallback: bool, } impl Features { @@ -348,16 +360,51 @@ impl Features { allow_trace_macros: false, allow_internal_unstable: false, allow_custom_derive: false, + allow_placement_in: false, + allow_box: false, + allow_pushpop_unsafe: false, simd_ffi: false, unmarked_api: false, negate_unsigned: false, declared_stable_lang_features: Vec::new(), declared_lib_features: Vec::new(), const_fn: false, + static_recursion: false, + default_type_parameter_fallback: false, } } } +const EXPLAIN_BOX_SYNTAX: &'static str = + "box expression syntax is experimental; you can call `Box::new` instead."; + +const EXPLAIN_PLACEMENT_IN: &'static str = + "placement-in expression syntax is experimental and subject to change."; + +const EXPLAIN_PUSHPOP_UNSAFE: &'static str = + "push/pop_unsafe macros are experimental and subject to change."; + +pub fn check_for_box_syntax(f: Option<&Features>, diag: &SpanHandler, span: Span) { + if let Some(&Features { allow_box: true, .. }) = f { + return; + } + emit_feature_err(diag, "box_syntax", span, EXPLAIN_BOX_SYNTAX); +} + +pub fn check_for_placement_in(f: Option<&Features>, diag: &SpanHandler, span: Span) { + if let Some(&Features { allow_placement_in: true, .. }) = f { + return; + } + emit_feature_err(diag, "placement_in_syntax", span, EXPLAIN_PLACEMENT_IN); +} + +pub fn check_for_pushpop_syntax(f: Option<&Features>, diag: &SpanHandler, span: Span) { + if let Some(&Features { allow_pushpop_unsafe: true, .. }) = f { + return; + } + emit_feature_err(diag, "pushpop_unsafe", span, EXPLAIN_PUSHPOP_UNSAFE); +} + struct Context<'a> { features: Vec<&'static str>, span_handler: &'a SpanHandler, @@ -366,6 +413,11 @@ struct Context<'a> { } impl<'a> Context<'a> { + fn enable_feature(&mut self, feature: &'static str) { + debug!("enabling feature: {}", feature); + self.features.push(feature); + } + fn gate_feature(&self, feature: &str, span: Span, explain: &str) { let has_feature = self.has_feature(feature); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", feature, span, has_feature); @@ -488,6 +540,26 @@ impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> { fn visit_attribute(&mut self, attr: &'v ast::Attribute) { self.context.check_attribute(attr, true); } + + fn visit_expr(&mut self, e: &ast::Expr) { + // Issue 22181: overloaded-`box` and placement-`in` are + // implemented via a desugaring expansion, so their feature + // gates go into MacroVisitor since that works pre-expansion. + // + // Issue 22234: we also check during expansion as well. + // But we keep these checks as a pre-expansion check to catch + // uses in e.g. conditionalized code. + + if let ast::ExprBox(None, _) = e.node { + self.context.gate_feature("box_syntax", e.span, EXPLAIN_BOX_SYNTAX); + } + + if let ast::ExprBox(Some(_), _) = e.node { + self.context.gate_feature("placement_in_syntax", e.span, EXPLAIN_PLACEMENT_IN); + } + + visit::walk_expr(self, e); + } } struct PostExpansionVisitor<'a> { @@ -510,7 +582,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_name(&mut self, sp: Span, name: ast::Name) { - if !token::get_name(name).is_ascii() { + if !name.as_str().is_ascii() { self.gate_feature("non_ascii_idents", sp, "non-ascii idents are not fully supported."); } @@ -754,7 +826,7 @@ fn check_crate_inner(cm: &CodeMap, span_handler: &SpanHandler, match KNOWN_FEATURES.iter() .find(|& &(n, _, _)| name == n) { Some(&(name, _, Active)) => { - cx.features.push(name); + cx.enable_feature(name); } Some(&(_, _, Removed)) => { span_handler.span_err(mi.span, "feature has been removed"); @@ -787,12 +859,17 @@ fn check_crate_inner(cm: &CodeMap, span_handler: &SpanHandler, allow_trace_macros: cx.has_feature("trace_macros"), allow_internal_unstable: cx.has_feature("allow_internal_unstable"), allow_custom_derive: cx.has_feature("custom_derive"), + allow_placement_in: cx.has_feature("placement_in_syntax"), + allow_box: cx.has_feature("box_syntax"), + allow_pushpop_unsafe: cx.has_feature("pushpop_unsafe"), simd_ffi: cx.has_feature("simd_ffi"), unmarked_api: cx.has_feature("unmarked_api"), negate_unsigned: cx.has_feature("negate_unsigned"), declared_stable_lang_features: accepted_features, declared_lib_features: unknown_features, const_fn: cx.has_feature("const_fn"), + static_recursion: cx.has_feature("static_recursion"), + default_type_parameter_fallback: cx.has_feature("default_type_parameter_fallback"), } } diff --git a/syntex_syntax/src/fold.rs b/syntex_syntax/src/fold.rs index 14742d2e..dab6d41d 100644 --- a/syntex_syntax/src/fold.rs +++ b/syntex_syntax/src/fold.rs @@ -515,12 +515,11 @@ pub fn noop_fold_parenthesized_parameter_data(data: ParenthesizedPara } pub fn noop_fold_local(l: P, fld: &mut T) -> P { - l.map(|Local {id, pat, ty, init, source, span}| Local { + l.map(|Local {id, pat, ty, init, span}| Local { id: fld.new_id(id), ty: ty.map(|t| fld.fold_ty(t)), pat: fld.fold_pat(pat), init: init.map(|e| fld.fold_expr(e)), - source: source, span: fld.new_span(span) }) } diff --git a/syntex_syntax/src/lib.rs b/syntex_syntax/src/lib.rs index 7333265b..5424c0b2 100644 --- a/syntex_syntax/src/lib.rs +++ b/syntex_syntax/src/lib.rs @@ -32,6 +32,7 @@ #![feature(libc)] #![feature(ref_slice)] #![feature(rustc_private)] +#![feature(set_stdio)] #![feature(staged_api)] #![feature(str_char)] #![feature(str_escape)] @@ -119,6 +120,7 @@ pub mod ext { pub mod log_syntax; pub mod mtwt; pub mod quote; + pub mod pushpop_safe; pub mod source_util; pub mod trace_macros; diff --git a/syntex_syntax/src/parse/lexer/comments.rs b/syntex_syntax/src/parse/lexer/comments.rs index 1577b50a..46734562 100644 --- a/syntex_syntax/src/parse/lexer/comments.rs +++ b/syntex_syntax/src/parse/lexer/comments.rs @@ -139,7 +139,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let lines = vertical_trim(lines); let lines = horizontal_trim(lines); - return lines.connect("\n"); + return lines.join("\n"); } panic!("not a doc-comment: {}", comment); diff --git a/syntex_syntax/src/parse/lexer/mod.rs b/syntex_syntax/src/parse/lexer/mod.rs index 507bd9de..019a8404 100644 --- a/syntex_syntax/src/parse/lexer/mod.rs +++ b/syntex_syntax/src/parse/lexer/mod.rs @@ -172,6 +172,11 @@ impl<'a> StringReader<'a> { self.span_diagnostic.span_err(sp, m) } + /// Suggest some help with a given span. + pub fn help_span(&self, sp: Span, m: &str) { + self.span_diagnostic.span_help(sp, m) + } + /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! { self.fatal_span(codemap::mk_sp(from_pos, to_pos), m) @@ -182,6 +187,11 @@ impl<'a> StringReader<'a> { self.err_span(codemap::mk_sp(from_pos, to_pos), m) } + /// Suggest some help spanning [`from_pos`, `to_pos`). + fn help_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { + self.help_span(codemap::mk_sp(from_pos, to_pos), m) + } + /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// escaped character to the error message fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> ! { @@ -221,6 +231,7 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; + self.peek_span = codemap::mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.last_pos; self.peek_tok = self.next_token_inner(); @@ -683,7 +694,7 @@ impl<'a> StringReader<'a> { accum_int *= 16; accum_int += c.to_digit(16).unwrap_or_else(|| { self.err_span_char(self.last_pos, self.pos, - "illegal character in numeric character escape", c); + "invalid character in numeric character escape", c); valid = false; 0 @@ -703,7 +714,7 @@ impl<'a> StringReader<'a> { Some(_) => valid, None => { let last_bpos = self.last_pos; - self.err_span_(start_bpos, last_bpos, "illegal numeric character escape"); + self.err_span_(start_bpos, last_bpos, "invalid numeric character escape"); false } } @@ -728,19 +739,24 @@ impl<'a> StringReader<'a> { return match e { 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0' => true, 'x' => self.scan_byte_escape(delim, !ascii_only), - 'u' if self.curr_is('{') => { - let valid = self.scan_unicode_escape(delim); - if valid && ascii_only { - self.err_span_( - escaped_pos, - self.last_pos, + 'u' => { + let valid = if self.curr_is('{') { + self.scan_unicode_escape(delim) && !ascii_only + } else { + self.err_span_(start, self.last_pos, + "incorrect unicode escape sequence"); + self.help_span_(start, self.last_pos, + "format of unicode escape sequences is `\\u{…}`"); + false + }; + if ascii_only { + self.err_span_(start, self.last_pos, "unicode escape sequences cannot be used as a byte or in \ a byte string" ); - false - } else { - valid } + valid + } '\n' if delim == '"' => { self.consume_whitespace(); @@ -757,16 +773,13 @@ impl<'a> StringReader<'a> { if ascii_only { "unknown byte escape" } else { "unknown character escape" }, c); - let sp = codemap::mk_sp(escaped_pos, last_pos); if e == '\r' { - self.span_diagnostic.span_help( - sp, + self.help_span_(escaped_pos, last_pos, "this is an isolated carriage return; consider checking \ your editor and version control settings") } if (e == '{' || e == '}') && !ascii_only { - self.span_diagnostic.span_help( - sp, + self.help_span_(escaped_pos, last_pos, "if used in a formatting string, \ curly braces are escaped with `{{` and `}}`") } @@ -833,7 +846,7 @@ impl<'a> StringReader<'a> { "unterminated unicode escape (needed a `}`)"); } else { self.err_span_char(self.last_pos, self.pos, - "illegal character in unicode escape", c); + "invalid character in unicode escape", c); } valid = false; 0 @@ -848,14 +861,12 @@ impl<'a> StringReader<'a> { valid = false; } - self.bump(); // past the ending } - if valid && (char::from_u32(accum_int).is_none() || count == 0) { - self.err_span_(start_bpos, self.last_pos, "illegal unicode character escape"); + self.err_span_(start_bpos, self.last_pos, "invalid unicode character escape"); valid = false; } - + self.bump(); // past the ending } valid } @@ -1127,8 +1138,8 @@ impl<'a> StringReader<'a> { let last_bpos = self.last_pos; let curr_char = self.curr.unwrap(); self.fatal_span_char(start_bpos, last_bpos, - "only `#` is allowed in raw string delimitation; \ - found illegal character", + "found invalid character; \ + only `#` is allowed in raw string delimitation", curr_char); } self.bump(); @@ -1312,8 +1323,8 @@ impl<'a> StringReader<'a> { let last_pos = self.last_pos; let ch = self.curr.unwrap(); self.fatal_span_char(start_bpos, last_pos, - "only `#` is allowed in raw string delimitation; \ - found illegal character", + "found invalid character; \ + only `#` is allowed in raw string delimitation", ch); } self.bump(); diff --git a/syntex_syntax/src/parse/mod.rs b/syntex_syntax/src/parse/mod.rs index d6c28d41..c5a73601 100644 --- a/syntex_syntax/src/parse/mod.rs +++ b/syntex_syntax/src/parse/mod.rs @@ -11,10 +11,11 @@ //! The main parser interface use ast; -use codemap::{Span, CodeMap, FileMap}; +use codemap::{self, Span, CodeMap, FileMap}; use diagnostic::{SpanHandler, Handler, Auto, FatalError}; use parse::attr::ParserAttr; use parse::parser::Parser; +use parse::token::InternedString; use ptr::P; use str::char_at; @@ -203,7 +204,14 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, cfg: ast::CrateConfig) -> Parser<'a> { - tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) + let end_pos = filemap.end_pos; + let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg); + + if parser.token == token::Eof && parser.span == codemap::DUMMY_SP { + parser.span = codemap::mk_sp(end_pos, end_pos); + } + + parser } // must preserve old name for now, because quote! from the *existing* @@ -432,17 +440,17 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ { debug!("filtered_float_lit: {}, {:?}", data, suffix); - match suffix { + match suffix.as_ref().map(|s| &**s) { Some("f32") => ast::LitFloat(data, ast::TyF32), Some("f64") => ast::LitFloat(data, ast::TyF64), Some(suf) => { if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. - sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", &suf[1..])); + sd.span_err(sp, &*format!("invalid width `{}` for float literal", &suf[1..])); + sd.fileline_help(sp, "valid widths are 32 and 64"); } else { - sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ - valid suffixes are `f32` and `f64`", suf)); + sd.span_err(sp, &*format!("invalid suffix `{}` for float literal", suf)); + sd.fileline_help(sp, "valid suffixes are `f32` and `f64`"); } ast::LitFloatUnsuffixed(data) @@ -450,12 +458,13 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, None => ast::LitFloatUnsuffixed(data) } } -pub fn float_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ { +pub fn float_lit(s: &str, suffix: Option, + sd: &SpanHandler, sp: Span) -> ast::Lit_ { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::(); - let data = token::intern_and_get_ident(&*s); - filtered_float_lit(data, suffix, sd, sp) + let data = token::intern_and_get_ident(&s); + filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp) } /// Parse a string representing a byte literal into its final form. Similar to `char_lit` @@ -550,7 +559,11 @@ pub fn binary_lit(lit: &str) -> Rc> { Rc::new(res) } -pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ { +pub fn integer_lit(s: &str, + suffix: Option, + sd: &SpanHandler, + sp: Span) + -> ast::Lit_ { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); @@ -572,8 +585,8 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } // 1f64 and 2f32 etc. are valid float literals. - match suffix { - Some(suf) if looks_like_width_suffix(&['f'], suf) => { + if let Some(ref suf) = suffix { + if looks_like_width_suffix(&['f'], suf) { match base { 16 => sd.span_err(sp, "hexadecimal float literal is not supported"), 8 => sd.span_err(sp, "octal float literal is not supported"), @@ -581,18 +594,17 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> _ => () } let ident = token::intern_and_get_ident(&*s); - return filtered_float_lit(ident, suffix, sd, sp) + return filtered_float_lit(ident, Some(&**suf), sd, sp) } - _ => {} } if base != 10 { s = &s[2..]; } - if let Some(suf) = suffix { + if let Some(ref suf) = suffix { if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} - ty = match suf { + ty = match &**suf { "isize" => ast::SignedIntLit(ast::TyIs, ast::Plus), "i8" => ast::SignedIntLit(ast::TyI8, ast::Plus), "i16" => ast::SignedIntLit(ast::TyI16, ast::Plus), @@ -607,11 +619,11 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // i and u look like widths, so lets // give an error message along those lines if looks_like_width_suffix(&['i', 'u'], suf) { - sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ - valid widths are 8, 16, 32 and 64", + sd.span_err(sp, &*format!("invalid width `{}` for integer literal", &suf[1..])); + sd.fileline_help(sp, "valid widths are 8, 16, 32 and 64"); } else { - sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); + sd.span_err(sp, &*format!("invalid suffix `{}` for numeric literal", suf)); sd.fileline_help(sp, "the suffix must be one of the integral types \ (`u32`, `isize`, etc)"); } @@ -732,8 +744,8 @@ mod tests { Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), Some(&ast::TtDelimited(_, ref macro_delimed)), ) - if name_macro_rules.as_str() == "macro_rules" - && name_zip.as_str() == "zip" => { + if name_macro_rules.name == "macro_rules" + && name_zip.name == "zip" => { let tts = ¯o_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( @@ -748,10 +760,10 @@ mod tests { ( 2, Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(name, token::Plain))), + Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), ) if first_delimed.delim == token::Paren - && name.as_str() == "a" => {}, + && ident.name == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } let tts = &second_delimed.tts[..]; @@ -759,10 +771,10 @@ mod tests { ( 2, Some(&ast::TtToken(_, token::Dollar)), - Some(&ast::TtToken(_, token::Ident(name, token::Plain))), + Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), ) if second_delimed.delim == token::Paren - && name.as_str() == "a" => {}, + && ident.name == "a" => {}, _ => panic!("value 4: {:?}", **second_delimed), } }, diff --git a/syntex_syntax/src/parse/obsolete.rs b/syntex_syntax/src/parse/obsolete.rs index 00d9b7f4..5a72477d 100644 --- a/syntex_syntax/src/parse/obsolete.rs +++ b/syntex_syntax/src/parse/obsolete.rs @@ -92,10 +92,8 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { fn is_obsolete_ident(&mut self, ident: &str) -> bool { match self.token { - token::Ident(sid, _) => { - token::get_ident(sid) == ident - } - _ => false + token::Ident(sid, _) => sid.name == ident, + _ => false, } } diff --git a/syntex_syntax/src/parse/parser.rs b/syntex_syntax/src/parse/parser.rs index 81ae607f..11611c9a 100644 --- a/syntex_syntax/src/parse/parser.rs +++ b/syntex_syntax/src/parse/parser.rs @@ -35,7 +35,7 @@ use ast::{ItemMac, ItemMod, ItemStruct, ItemTrait, ItemTy, ItemDefaultImpl}; use ast::{ItemExternCrate, ItemUse}; use ast::{LifetimeDef, Lit, Lit_}; use ast::{LitBool, LitChar, LitByte, LitBinary}; -use ast::{LitStr, LitInt, Local, LocalLet}; +use ast::{LitStr, LitInt, Local}; use ast::{MacStmtWithBraces, MacStmtWithSemicolon, MacStmtWithoutBraces}; use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, MatchSource}; use ast::{MutTy, BiMul, Mutability}; @@ -60,7 +60,7 @@ use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; use ast; use ast_util::{self, AS_PREC, ident_to_path, operator_prec}; -use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp}; +use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, CodeMap}; use diagnostic; use ext::tt::macro_parser; use parse; @@ -288,7 +288,7 @@ impl TokenType { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Operator => "an operator".to_string(), - TokenType::Keyword(kw) => format!("`{}`", token::get_name(kw.to_name())), + TokenType::Keyword(kw) => format!("`{}`", kw.to_name()), } } } @@ -297,6 +297,24 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool { t.is_plain_ident() || *t == token::Underscore } +/// Information about the path to a module. +pub struct ModulePath { + pub name: String, + pub path_exists: bool, + pub result: Result, +} + +pub struct ModulePathSuccess { + pub path: ::std::path::PathBuf, + pub owns_directory: bool, +} + +pub struct ModulePathError { + pub err_msg: String, + pub help_msg: String, +} + + impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, cfg: ast::CrateConfig, @@ -663,7 +681,7 @@ impl<'a> Parser<'a> { if text.is_empty() { self.span_bug(sp, "found empty literal suffix in Some") } - self.span_err(sp, &*format!("{} with a suffix is illegal", kind)); + self.span_err(sp, &*format!("{} with a suffix is invalid", kind)); } } } @@ -1005,7 +1023,7 @@ impl<'a> Parser<'a> { } pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString { - token::get_ident(id) + id.name.as_str() } /// Is the current token one of the keywords that signals a bare function @@ -1480,20 +1498,20 @@ impl<'a> Parser<'a> { } token::Literal(lit, suf) => { let (suffix_illegal, out) = match lit { - token::Byte(i) => (true, LitByte(parse::byte_lit(i.as_str()).0)), - token::Char(i) => (true, LitChar(parse::char_lit(i.as_str()).0)), + token::Byte(i) => (true, LitByte(parse::byte_lit(&i.as_str()).0)), + token::Char(i) => (true, LitChar(parse::char_lit(&i.as_str()).0)), // there are some valid suffixes for integer and // float literals, so all the handling is done // internally. token::Integer(s) => { - (false, parse::integer_lit(s.as_str(), + (false, parse::integer_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, self.last_span)) } token::Float(s) => { - (false, parse::float_lit(s.as_str(), + (false, parse::float_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, self.last_span)) @@ -1501,20 +1519,20 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitStr(token::intern_and_get_ident(&parse::str_lit(s.as_str())), + LitStr(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), ast::CookedStr)) } token::StrRaw(s, n) => { (true, LitStr( - token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())), + token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), ast::RawStr(n))) } token::Binary(i) => - (true, LitBinary(parse::binary_lit(i.as_str()))), + (true, LitBinary(parse::binary_lit(&i.as_str()))), token::BinaryRaw(i, _) => (true, - LitBinary(Rc::new(i.as_str().as_bytes().iter().cloned().collect()))), + LitBinary(Rc::new(i.to_string().into_bytes()))), }; if suffix_illegal { @@ -2081,28 +2099,32 @@ impl<'a> Parser<'a> { return self.parse_if_expr(); } if try!(self.eat_keyword(keywords::For) ){ - return self.parse_for_expr(None); + let lo = self.last_span.lo; + return self.parse_for_expr(None, lo); } if try!(self.eat_keyword(keywords::While) ){ - return self.parse_while_expr(None); + let lo = self.last_span.lo; + return self.parse_while_expr(None, lo); } if self.token.is_lifetime() { let lifetime = self.get_lifetime(); + let lo = self.span.lo; try!(self.bump()); try!(self.expect(&token::Colon)); if try!(self.eat_keyword(keywords::While) ){ - return self.parse_while_expr(Some(lifetime)) + return self.parse_while_expr(Some(lifetime), lo) } if try!(self.eat_keyword(keywords::For) ){ - return self.parse_for_expr(Some(lifetime)) + return self.parse_for_expr(Some(lifetime), lo) } if try!(self.eat_keyword(keywords::Loop) ){ - return self.parse_loop_expr(Some(lifetime)) + return self.parse_loop_expr(Some(lifetime), lo) } return Err(self.fatal("expected `while`, `for`, or `loop` after a label")) } if try!(self.eat_keyword(keywords::Loop) ){ - return self.parse_loop_expr(None); + let lo = self.last_span.lo; + return self.parse_loop_expr(None, lo); } if try!(self.eat_keyword(keywords::Continue) ){ let lo = self.span.lo; @@ -2426,7 +2448,7 @@ impl<'a> Parser<'a> { match self.token { token::SubstNt(name, _) => return Err(self.fatal(&format!("unknown macro variable `{}`", - token::get_ident(name)))), + name))), _ => {} } } @@ -2590,19 +2612,40 @@ impl<'a> Parser<'a> { ex = ExprAddrOf(m, e); } token::Ident(_, _) => { - if !self.check_keyword(keywords::Box) { + if !self.check_keyword(keywords::Box) && !self.check_keyword(keywords::In) { return self.parse_dot_or_call_expr(); } let lo = self.span.lo; - let box_hi = self.span.hi; + let keyword_hi = self.span.hi; + let is_in = self.token.is_keyword(keywords::In); try!(self.bump()); - // Check for a place: `box(PLACE) EXPR`. - if try!(self.eat(&token::OpenDelim(token::Paren)) ){ - // Support `box() EXPR` as the default. - if !try!(self.eat(&token::CloseDelim(token::Paren)) ){ + if is_in { + let place = try!(self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL)); + let blk = try!(self.parse_block()); + hi = blk.span.hi; + let blk_expr = self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)); + ex = ExprBox(Some(place), blk_expr); + return Ok(self.mk_expr(lo, hi, ex)); + } + + // FIXME (#22181) Remove `box (PLACE) EXPR` support + // entirely after next release (enabling `(box (EXPR))`), + // since it will be replaced by `in PLACE { EXPR }`, ... + // + // ... but for now: check for a place: `box(PLACE) EXPR`. + + if try!(self.eat(&token::OpenDelim(token::Paren))) { + let box_span = mk_sp(lo, self.last_span.hi); + self.span_warn(box_span, + "deprecated syntax; use the `in` keyword now \ + (e.g. change `box () ` to \ + `in { }`)"); + + // Continue supporting `box () EXPR` (temporarily) + if !try!(self.eat(&token::CloseDelim(token::Paren))) { let place = try!(self.parse_expr_nopanic()); try!(self.expect(&token::CloseDelim(token::Paren))); // Give a suggestion to use `box()` when a parenthesised expression is used @@ -2612,10 +2655,15 @@ impl<'a> Parser<'a> { self.span_err(span, &format!("expected expression, found `{}`", this_token_to_string)); - let box_span = mk_sp(lo, box_hi); + + // Spanning just keyword avoids constructing + // printout of arg expression (which starts + // with parenthesis, as established above). + + let box_span = mk_sp(lo, keyword_hi); self.span_suggestion(box_span, - "try using `box()` instead:", - "box()".to_string()); + "try using `box ()` instead:", + format!("box ()")); self.abort_if_errors(); } let subexpression = try!(self.parse_prefix_expr()); @@ -2628,6 +2676,7 @@ impl<'a> Parser<'a> { // Otherwise, we use the unique pointer default. let subexpression = try!(self.parse_prefix_expr()); hi = subexpression.span.hi; + // FIXME (pnkfelix): After working out kinks with box // desugaring, should be `ExprBox(None, subexpression)` // instead. @@ -2716,14 +2765,15 @@ impl<'a> Parser<'a> { // (much lower than other prefix expressions) to be consistent // with the postfix-form 'expr..' let lo = self.span.lo; + let mut hi = self.span.hi; try!(self.bump()); let opt_end = if self.is_at_start_of_range_notation_rhs() { let end = try!(self.parse_binops()); + hi = end.span.hi; Some(end) } else { None }; - let hi = self.span.hi; let ex = self.mk_range(None, opt_end); Ok(self.mk_expr(lo, hi, ex)) } @@ -2765,17 +2815,17 @@ impl<'a> Parser<'a> { } // A range expression, either `expr..expr` or `expr..`. token::DotDot => { + let lo = lhs.span.lo; + let mut hi = self.span.hi; try!(self.bump()); let opt_end = if self.is_at_start_of_range_notation_rhs() { let end = try!(self.parse_binops()); + hi = end.span.hi; Some(end) } else { None }; - - let lo = lhs.span.lo; - let hi = self.span.hi; let range = self.mk_range(Some(lhs), opt_end); return Ok(self.mk_expr(lo, hi, range)); } @@ -2874,48 +2924,48 @@ impl<'a> Parser<'a> { } /// Parse a 'for' .. 'in' expression ('for' token already eaten) - pub fn parse_for_expr(&mut self, opt_ident: Option) -> PResult> { + pub fn parse_for_expr(&mut self, opt_ident: Option, + span_lo: BytePos) -> PResult> { // Parse: `for in ` - let lo = self.last_span.lo; let pat = try!(self.parse_pat_nopanic()); try!(self.expect_keyword(keywords::In)); let expr = try!(self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL)); let loop_block = try!(self.parse_block()); let hi = self.last_span.hi; - Ok(self.mk_expr(lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident))) + Ok(self.mk_expr(span_lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident))) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) - pub fn parse_while_expr(&mut self, opt_ident: Option) -> PResult> { + pub fn parse_while_expr(&mut self, opt_ident: Option, + span_lo: BytePos) -> PResult> { if self.token.is_keyword(keywords::Let) { - return self.parse_while_let_expr(opt_ident); + return self.parse_while_let_expr(opt_ident, span_lo); } - let lo = self.last_span.lo; let cond = try!(self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL)); let body = try!(self.parse_block()); let hi = body.span.hi; - return Ok(self.mk_expr(lo, hi, ExprWhile(cond, body, opt_ident))); + return Ok(self.mk_expr(span_lo, hi, ExprWhile(cond, body, opt_ident))); } /// Parse a 'while let' expression ('while' token already eaten) - pub fn parse_while_let_expr(&mut self, opt_ident: Option) -> PResult> { - let lo = self.last_span.lo; + pub fn parse_while_let_expr(&mut self, opt_ident: Option, + span_lo: BytePos) -> PResult> { try!(self.expect_keyword(keywords::Let)); let pat = try!(self.parse_pat_nopanic()); try!(self.expect(&token::Eq)); let expr = try!(self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL)); let body = try!(self.parse_block()); let hi = body.span.hi; - return Ok(self.mk_expr(lo, hi, ExprWhileLet(pat, expr, body, opt_ident))); + return Ok(self.mk_expr(span_lo, hi, ExprWhileLet(pat, expr, body, opt_ident))); } - pub fn parse_loop_expr(&mut self, opt_ident: Option) -> PResult> { - let lo = self.last_span.lo; + pub fn parse_loop_expr(&mut self, opt_ident: Option, + span_lo: BytePos) -> PResult> { let body = try!(self.parse_block()); let hi = body.span.hi; - Ok(self.mk_expr(lo, hi, ExprLoop(body, opt_ident))) + Ok(self.mk_expr(span_lo, hi, ExprLoop(body, opt_ident))) } fn parse_match_expr(&mut self) -> PResult> { @@ -3378,7 +3428,6 @@ impl<'a> Parser<'a> { init: init, id: ast::DUMMY_NODE_ID, span: mk_sp(lo, self.last_span.hi), - source: LocalLet, })) } @@ -4687,7 +4736,7 @@ impl<'a> Parser<'a> { if fields.is_empty() { return Err(self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone())))); + class_name))); } try!(self.bump()); @@ -4726,7 +4775,7 @@ impl<'a> Parser<'a> { if fields.is_empty() { return Err(self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone())))); + class_name))); } generics.where_clause = try!(self.parse_where_clause()); @@ -4802,8 +4851,14 @@ impl<'a> Parser<'a> { return Err(self.fatal(&format!("expected item, found `{}`", token_str))); } + let hi = if self.span == codemap::DUMMY_SP { + inner_lo + } else { + self.span.lo + }; + Ok(ast::Mod { - inner: mk_sp(inner_lo, self.span.lo), + inner: mk_sp(inner_lo, hi), items: items }) } @@ -4847,8 +4902,7 @@ impl<'a> Parser<'a> { fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) { let default_path = self.id_to_interned_str(id); - let file_path = match ::attr::first_attr_value_str_by_name(attrs, - "path") { + let file_path = match ::attr::first_attr_value_str_by_name(attrs, "path") { Some(d) => d, None => default_path, }; @@ -4859,82 +4913,103 @@ impl<'a> Parser<'a> { self.mod_path_stack.pop().unwrap(); } - /// Read a module from a source file. - fn eval_src_mod(&mut self, - id: ast::Ident, - outer_attrs: &[ast::Attribute], - id_sp: Span) - -> PResult<(ast::Item_, Vec )> { + pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option { + ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d)) + } + + /// Returns either a path to a module, or . + pub fn default_submod_path(id: ast::Ident, dir_path: &Path, codemap: &CodeMap) -> ModulePath + { + let mod_name = id.to_string(); + let default_path_str = format!("{}.rs", mod_name); + let secondary_path_str = format!("{}/mod.rs", mod_name); + let default_path = dir_path.join(&default_path_str); + let secondary_path = dir_path.join(&secondary_path_str); + let default_exists = codemap.file_exists(&default_path); + let secondary_exists = codemap.file_exists(&secondary_path); + + let result = match (default_exists, secondary_exists) { + (true, false) => Ok(ModulePathSuccess { path: default_path, owns_directory: false }), + (false, true) => Ok(ModulePathSuccess { path: secondary_path, owns_directory: true }), + (false, false) => Err(ModulePathError { + err_msg: format!("file not found for module `{}`", mod_name), + help_msg: format!("name the file either {} or {} inside the directory {:?}", + default_path_str, + secondary_path_str, + dir_path.display()), + }), + (true, true) => Err(ModulePathError { + err_msg: format!("file for module `{}` found at both {} and {}", + mod_name, + default_path_str, + secondary_path_str), + help_msg: "delete or rename one of them to remove the ambiguity".to_owned(), + }), + }; + + ModulePath { + name: mod_name, + path_exists: default_exists || secondary_exists, + result: result, + } + } + + fn submod_path(&mut self, + id: ast::Ident, + outer_attrs: &[ast::Attribute], + id_sp: Span) -> PResult { let mut prefix = PathBuf::from(&self.sess.codemap().span_to_filename(self.span)); prefix.pop(); let mut dir_path = prefix; for part in &self.mod_path_stack { dir_path.push(&**part); } - let mod_string = token::get_ident(id); - let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( - outer_attrs, "path") { - Some(d) => (dir_path.join(&*d), true), - None => { - let mod_name = mod_string.to_string(); - let default_path_str = format!("{}.rs", mod_name); - let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(&default_path_str[..]); - let secondary_path = dir_path.join(&secondary_path_str[..]); - let default_exists = self.sess.codemap().file_exists(&default_path); - let secondary_exists = self.sess.codemap().file_exists(&secondary_path); - - if !self.owns_directory { - self.span_err(id_sp, - "cannot declare a new module at this location"); - let this_module = match self.mod_path_stack.last() { - Some(name) => name.to_string(), - None => self.root_module_name.as_ref().unwrap().clone(), - }; - self.span_note(id_sp, - &format!("maybe move this module `{0}` \ - to its own directory via \ - `{0}/mod.rs`", - this_module)); - if default_exists || secondary_exists { - self.span_note(id_sp, - &format!("... or maybe `use` the module \ - `{}` instead of possibly \ - redeclaring it", - mod_name)); - } - self.abort_if_errors(); - } - match (default_exists, secondary_exists) { - (true, false) => (default_path, false), - (false, true) => (secondary_path, true), - (false, false) => { - return Err(self.span_fatal_help(id_sp, - &format!("file not found for module `{}`", - mod_name), - &format!("name the file either {} or {} inside \ - the directory {:?}", - default_path_str, - secondary_path_str, - dir_path.display()))); - } - (true, true) => { - return Err(self.span_fatal_help( - id_sp, - &format!("file for module `{}` found at both {} \ - and {}", - mod_name, - default_path_str, - secondary_path_str), - "delete or rename one of them to remove the ambiguity")); - } - } + if let Some(p) = Parser::submod_path_from_attr(outer_attrs, &dir_path) { + return Ok(ModulePathSuccess { path: p, owns_directory: true }); + } + + let paths = Parser::default_submod_path(id, &dir_path, self.sess.codemap()); + + if !self.owns_directory { + self.span_err(id_sp, "cannot declare a new module at this location"); + let this_module = match self.mod_path_stack.last() { + Some(name) => name.to_string(), + None => self.root_module_name.as_ref().unwrap().clone(), + }; + self.span_note(id_sp, + &format!("maybe move this module `{0}` to its own directory \ + via `{0}/mod.rs`", + this_module)); + if paths.path_exists { + self.span_note(id_sp, + &format!("... or maybe `use` the module `{}` instead \ + of possibly redeclaring it", + paths.name)); } - }; + self.abort_if_errors(); + } + + match paths.result { + Ok(succ) => Ok(succ), + Err(err) => Err(self.span_fatal_help(id_sp, &err.err_msg, &err.help_msg)), + } + } + + /// Read a module from a source file. + fn eval_src_mod(&mut self, + id: ast::Ident, + outer_attrs: &[ast::Attribute], + id_sp: Span) + -> PResult<(ast::Item_, Vec )> { + let ModulePathSuccess { path, owns_directory } = try!(self.submod_path(id, + outer_attrs, + id_sp)); - self.eval_src_mod_from_path(file_path, owns_directory, - mod_string.to_string(), id_sp) + self.eval_src_mod_from_path(path, + owns_directory, + id.to_string(), + id_sp) } fn eval_src_mod_from_path(&mut self, @@ -4959,13 +5034,12 @@ impl<'a> Parser<'a> { included_mod_stack.push(path.clone()); drop(included_mod_stack); - let mut p0 = - new_sub_parser_from_file(self.sess, - self.cfg.clone(), - &path, - owns_directory, - Some(name), - id_sp); + let mut p0 = new_sub_parser_from_file(self.sess, + self.cfg.clone(), + &path, + owns_directory, + Some(name), + id_sp); let mod_inner_lo = p0.span.lo; let mod_attrs = p0.parse_inner_attributes(); let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo)); @@ -5138,7 +5212,7 @@ impl<'a> Parser<'a> { self.span_err(start_span, &format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident))); + ident)); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5206,17 +5280,16 @@ impl<'a> Parser<'a> { let sp = self.span; self.expect_no_suffix(sp, "ABI spec", suf); try!(self.bump()); - let the_string = s.as_str(); - match abi::lookup(the_string) { + match abi::lookup(&s.as_str()) { Some(abi) => Ok(Some(abi)), None => { let last_span = self.last_span; self.span_err( last_span, - &format!("illegal ABI: expected one of [{}], \ + &format!("invalid ABI: expected one of [{}], \ found `{}`", - abi::all_names().connect(", "), - the_string)); + abi::all_names().join(", "), + s)); Ok(None) } } diff --git a/syntex_syntax/src/parse/token.rs b/syntex_syntax/src/parse/token.rs index 832fec40..bd479255 100644 --- a/syntex_syntax/src/parse/token.rs +++ b/syntex_syntax/src/parse/token.rs @@ -647,6 +647,12 @@ impl InternedString { string: string, } } + + #[inline] + pub fn new_from_name(name: ast::Name) -> InternedString { + let interner = get_ident_interner(); + InternedString::new_from_rc_str(interner.get(name)) + } } impl Deref for InternedString { @@ -678,7 +684,7 @@ impl<'a> PartialEq<&'a str> for InternedString { } } -impl<'a> PartialEq for &'a str { +impl<'a> PartialEq for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { PartialEq::eq(*self, &other.string[..]) @@ -691,7 +697,7 @@ impl<'a> PartialEq for &'a str { impl Decodable for InternedString { fn decode(d: &mut D) -> Result { - Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[..]))) + Ok(intern(try!(d.read_str()).as_ref()).as_str()) } } @@ -701,25 +707,11 @@ impl Encodable for InternedString { } } -/// Returns the string contents of a name, using the thread-local interner. -#[inline] -pub fn get_name(name: ast::Name) -> InternedString { - let interner = get_ident_interner(); - InternedString::new_from_rc_str(interner.get(name)) -} - -/// Returns the string contents of an identifier, using the thread-local -/// interner. -#[inline] -pub fn get_ident(ident: ast::Ident) -> InternedString { - get_name(ident.name) -} - /// Interns and returns the string contents of an identifier, using the /// thread-local interner. #[inline] pub fn intern_and_get_ident(s: &str) -> InternedString { - get_name(intern(s)) + intern(s).as_str() } /// Maps a string to its interned representation. diff --git a/syntex_syntax/src/print/pprust.rs b/syntex_syntax/src/print/pprust.rs index 6693eed6..6cfe85bc 100644 --- a/syntex_syntax/src/print/pprust.rs +++ b/syntex_syntax/src/print/pprust.rs @@ -251,40 +251,40 @@ pub fn token_to_string(tok: &Token) -> String { /* Literals */ token::Literal(lit, suf) => { let mut out = match lit { - token::Byte(b) => format!("b'{}'", b.as_str()), - token::Char(c) => format!("'{}'", c.as_str()), - token::Float(c) => c.as_str().to_string(), - token::Integer(c) => c.as_str().to_string(), - token::Str_(s) => format!("\"{}\"", s.as_str()), + token::Byte(b) => format!("b'{}'", b), + token::Char(c) => format!("'{}'", c), + token::Float(c) => c.to_string(), + token::Integer(c) => c.to_string(), + token::Str_(s) => format!("\"{}\"", s), token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", delim=repeat("#", n), - string=s.as_str()), - token::Binary(v) => format!("b\"{}\"", v.as_str()), + string=s), + token::Binary(v) => format!("b\"{}\"", v), token::BinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}", delim=repeat("#", n), - string=s.as_str()), + string=s), }; if let Some(s) = suf { - out.push_str(s.as_str()) + out.push_str(&s.as_str()) } out } /* Name components */ - token::Ident(s, _) => token::get_ident(s).to_string(), - token::Lifetime(s) => format!("{}", token::get_ident(s)), + token::Ident(s, _) => s.to_string(), + token::Lifetime(s) => s.to_string(), token::Underscore => "_".to_string(), /* Other */ - token::DocComment(s) => s.as_str().to_string(), + token::DocComment(s) => s.to_string(), token::SubstNt(s, _) => format!("${}", s), token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), token::Eof => "".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), - token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()), + token::Shebang(s) => format!("/* shebang: {}*/", s), token::SpecialVarNt(var) => format!("${}", var.as_str()), @@ -819,7 +819,7 @@ impl<'a> State<'a> { try!(self.head(&visibility_qualified(item.vis, "extern crate"))); if let Some(p) = *optional_path { - let val = token::get_name(p); + let val = p.as_str(); if val.contains("-") { try!(self.print_string(&val, ast::CookedStr)); } else { @@ -1434,8 +1434,8 @@ impl<'a> State<'a> { attrs: &[ast::Attribute], close_box: bool) -> io::Result<()> { match blk.rules { - ast::UnsafeBlock(..) => try!(self.word_space("unsafe")), - ast::DefaultBlock => () + ast::UnsafeBlock(..) | ast::PushUnsafeBlock(..) => try!(self.word_space("unsafe")), + ast::DefaultBlock | ast::PopUnsafeBlock(..) => () } try!(self.maybe_print_comment(blk.span.lo)); try!(self.ann.pre(self, NodeBlock(blk))); @@ -2009,7 +2009,7 @@ impl<'a> State<'a> { } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { - try!(word(&mut self.s, &token::get_ident(ident))); + try!(word(&mut self.s, &ident.name.as_str())); self.ann.post(self, NodeIdent(&ident)) } @@ -2018,7 +2018,7 @@ impl<'a> State<'a> { } pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> { - try!(word(&mut self.s, &token::get_name(name))); + try!(word(&mut self.s, &name.as_str())); self.ann.post(self, NodeName(&name)) } diff --git a/syntex_syntax/src/std_inject.rs b/syntex_syntax/src/std_inject.rs index 36550586..9787f253 100644 --- a/syntex_syntax/src/std_inject.rs +++ b/syntex_syntax/src/std_inject.rs @@ -160,8 +160,7 @@ impl fold::Folder for PreludeInjector { style: ast::AttrOuter, value: P(ast::MetaItem { span: self.span, - node: ast::MetaWord(token::get_name( - special_idents::prelude_import.name)), + node: ast::MetaWord(special_idents::prelude_import.name.as_str()), }), is_sugared_doc: false, },