From 0beba7cfc4df26c49f875fdbc8f46e35e395227f Mon Sep 17 00:00:00 2001 From: Jinoh Kang Date: Tue, 14 Nov 2023 15:41:39 +0900 Subject: [PATCH 01/28] fix: Fix incorrectly replacing references in macro invocation in "Convert to named struct" assist --- .../src/handlers/convert_tuple_struct_to_named_struct.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 017853a4a2023..435d7c4a5377d 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -216,7 +216,7 @@ fn edit_field_references( edit.edit_file(file_id); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { - edit.replace(name_ref.syntax().text_range(), name.text()); + edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); } } } From bf645be37b8ff3b0cd46fa6ea6512370cecf7039 Mon Sep 17 00:00:00 2001 From: "Jonathan Pallant (Ferrous Systems)" Date: Wed, 22 Nov 2023 17:20:54 +0000 Subject: [PATCH 02/28] Put copyright on a line by itself. --- crates/stdx/src/anymap.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs index fd44e6c6d0f4c..4d9125d4528dd 100644 --- a/crates/stdx/src/anymap.rs +++ b/crates/stdx/src/anymap.rs @@ -1,5 +1,6 @@ //! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer. -//! Copyright © 2014–2022 Chris Morgan. COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING" +//! Copyright © 2014–2022 Chris Morgan. +//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING //! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0 //! //! This implementation provides a safe and convenient store for one value of each type. From 11a70f6f698227144fd8694bfa7613b250714e88 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 22 Nov 2023 07:30:09 +0100 Subject: [PATCH 03/28] also make 'core_intrinsics' internal --- crates/proc-macro-srv/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 84bd15efb8bee..58833cb7e9264 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -13,7 +13,7 @@ #![cfg(feature = "sysroot-abi")] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] -#![allow(unreachable_pub)] +#![allow(unreachable_pub, internal_features)] extern crate proc_macro; From 5ef5e557495e55833b1c1a12dadab3a673d00b2a Mon Sep 17 00:00:00 2001 From: Ben Kimock Date: Sun, 26 Nov 2023 20:45:56 -0500 Subject: [PATCH 04/28] Use a u64 for the rmeta root position --- crates/proc-macro-api/src/version.rs | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 48efbf589c6af..238fb561554c9 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -120,13 +120,18 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]); // Last supported version is: // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632 - let snappy_portion = match version { - 5 | 6 => &dot_rustc[8..], + let (snappy_portion, bytes_before_version) = match version { + 5 | 6 => (&dot_rustc[8..], 13), 7 | 8 => { let len_bytes = &dot_rustc[8..12]; let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize; - &dot_rustc[12..data_len + 12] + (&dot_rustc[12..data_len + 12], 13) } + 9 => { + let len_bytes = &dot_rustc[8..16]; + let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize; + (&dot_rustc[16..data_len + 12], 17) + } _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, @@ -142,15 +147,15 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { Box::new(SnapDecoder::new(snappy_portion)) }; - // the bytes before version string bytes, so this basically is: + // We're going to skip over the bytes before the version string, so basically: // 8 bytes for [b'r',b'u',b's',b't',0,0,0,5] - // 4 bytes for [crate root bytes] + // 4 or 8 bytes for [crate root bytes] // 1 byte for length of version string - // so 13 bytes in total, and we should check the 13th byte + // so 13 or 17 bytes in total, and we should check the last of those bytes // to know the length - let mut bytes_before_version = [0u8; 13]; + let mut bytes_before_version = vec![0u8; bytes_before_version]; uncompressed.read_exact(&mut bytes_before_version)?; - let length = bytes_before_version[12]; + let length = *bytes_before_version.last().unwrap(); let mut version_string_utf8 = vec![0u8; length as usize]; uncompressed.read_exact(&mut version_string_utf8)?; From 5f957658c1fad9597a9df332fa1267edafa35ec8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 8 Dec 2023 20:38:26 +0100 Subject: [PATCH 05/28] fix: Fix fragment parser replacing matches with dummies on incomplete parses --- .../macro_expansion_tests/mbe/regression.rs | 96 +++++++++++++++---- crates/mbe/src/tt_iter.rs | 11 +-- 2 files changed, 82 insertions(+), 25 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 9010050ee6788..71ba49721746d 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -13,37 +13,97 @@ fn test_vec() { check( r#" macro_rules! vec { - ($($item:expr),*) => {{ - let mut v = Vec::new(); - $( v.push($item); )* - v - }}; + () => ( + $crate::__rust_force_expr!($crate::vec::Vec::new()) + ); + ($elem:expr; $n:expr) => ( + $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n)) + ); + ($($x:expr),+ $(,)?) => ( + $crate::__rust_force_expr!(<[_]>::into_vec( + // This rustc_box is not required, but it produces a dramatic improvement in compile + // time when constructing arrays with many elements. + #[rustc_box] + $crate::boxed::Box::new([$($x),+]) + )) + ); +} + +macro_rules! __rust_force_expr { + ($e:expr) => { + $e + }; } + fn main() { vec!(); vec![1u32,2]; + vec![a.]; } "#, expect![[r#" macro_rules! vec { - ($($item:expr),*) => {{ - let mut v = Vec::new(); - $( v.push($item); )* - v - }}; + () => ( + $crate::__rust_force_expr!($crate::vec::Vec::new()) + ); + ($elem:expr; $n:expr) => ( + $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n)) + ); + ($($x:expr),+ $(,)?) => ( + $crate::__rust_force_expr!(<[_]>::into_vec( + // This rustc_box is not required, but it produces a dramatic improvement in compile + // time when constructing arrays with many elements. + #[rustc_box] + $crate::boxed::Box::new([$($x),+]) + )) + ); +} + +macro_rules! __rust_force_expr { + ($e:expr) => { + $e + }; } + fn main() { - { - let mut v = Vec::new(); - v + $crate::__rust_force_expr!($crate:: vec:: Vec:: new()); + $crate::__rust_force_expr!(<[_]>:: into_vec(#[rustc_box]$crate:: boxed:: Box:: new([1u32, 2]))); + /* error: expected Expr */$crate::__rust_force_expr!($crate:: vec:: from_elem((a.), $n)); +} +"#]], + ); + // FIXME we should ahev testing infra for multi level expansion tests + check( + r#" +macro_rules! __rust_force_expr { + ($e:expr) => { + $e }; - { - let mut v = Vec::new(); - v.push(1u32); - v.push(2); - v +} + +fn main() { + __rust_force_expr!(crate:: vec:: Vec:: new()); + __rust_force_expr!(<[_]>:: into_vec(#[rustc_box] crate:: boxed:: Box:: new([1u32, 2]))); + __rust_force_expr/*+errors*/!(crate:: vec:: from_elem((a.), $n)); +} +"#, + expect![[r#" +macro_rules! __rust_force_expr { + ($e:expr) => { + $e }; } + +fn main() { + (crate ::vec::Vec::new()); + (<[_]>::into_vec(#[rustc_box] crate ::boxed::Box::new([1u32, 2]))); + /* error: expected Expr *//* parse error: expected field name or number */ +/* parse error: expected expression */ +/* parse error: expected R_PAREN */ +/* parse error: expected COMMA */ +/* parse error: expected expression, item or let statement */ +(crate ::vec::from_elem((a.), $n)); +} "#]], ); } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 595691b177368..40e8a2385f461 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -131,7 +131,6 @@ impl<'a, S: Span> TtIter<'a, S> { let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let parser_input = to_parser_input(&buffer); let tree_traversal = entry_point.parse(&parser_input); - let mut cursor = buffer.begin(); let mut error = false; for step in tree_traversal.iter() { @@ -163,12 +162,10 @@ impl<'a, S: Span> TtIter<'a, S> { let mut curr = buffer.begin(); let mut res = vec![]; - if cursor.is_root() { - while curr != cursor { - let Some(token) = curr.token_tree() else { break }; - res.push(token.cloned()); - curr = curr.bump(); - } + while curr != cursor { + let Some(token) = curr.token_tree() else { break }; + res.push(token.cloned()); + curr = curr.bump(); } self.inner = self.inner.as_slice()[res.len()..].iter(); From 5285df4f6cdfc9048b108157e3c70b67473d5f0b Mon Sep 17 00:00:00 2001 From: surechen Date: Fri, 10 Nov 2023 10:11:24 +0800 Subject: [PATCH 06/28] remove redundant imports detects redundant imports that can be eliminated. for #117772 : In order to facilitate review and modification, split the checking code and removing redundant imports code into two PR. --- crates/stdx/src/anymap.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs index fd44e6c6d0f4c..7dba002c6ec6f 100644 --- a/crates/stdx/src/anymap.rs +++ b/crates/stdx/src/anymap.rs @@ -17,7 +17,6 @@ #![warn(missing_docs, unused_results)] -use core::convert::TryInto; use core::hash::Hasher; /// A hasher designed to eke a little more speed out, given `TypeId`’s known characteristics. From f9453e33c0a15a86a5044663bf91d605f01636e3 Mon Sep 17 00:00:00 2001 From: Ben Kimock Date: Sun, 10 Dec 2023 23:04:50 -0500 Subject: [PATCH 07/28] Apply suggestions from code review Co-authored-by: Waffle Maybe --- crates/proc-macro-api/src/version.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 238fb561554c9..87118a62650cc 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -85,8 +85,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&' } /// Check the version of rustc that was used to compile a proc macro crate's -/// /// binary file. +/// /// A proc macro crate binary's ".rustc" section has following byte layout: /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes /// * ff060000 734e6150 is followed, it's the snappy format magic bytes, @@ -96,8 +96,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&' /// The bytes you get after decompressing the snappy format portion has /// following layout: /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again) -/// * [crate root bytes] next 4 bytes is to store crate root position, -/// according to rustc's source code comment +/// * [crate root bytes] next 8 bytes (4 in old versions) is to store +/// crate root position, according to rustc's source code comment /// * [length byte] next 1 byte tells us how many bytes we should read next /// for the version string's utf8 bytes /// * [version string bytes encoded in utf8] <- GET THIS BOI @@ -119,7 +119,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { } let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]); // Last supported version is: - // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632 + // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318 let (snappy_portion, bytes_before_version) = match version { 5 | 6 => (&dot_rustc[8..], 13), 7 | 8 => { @@ -153,9 +153,9 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { // 1 byte for length of version string // so 13 or 17 bytes in total, and we should check the last of those bytes // to know the length - let mut bytes_before_version = vec![0u8; bytes_before_version]; - uncompressed.read_exact(&mut bytes_before_version)?; - let length = *bytes_before_version.last().unwrap(); + let mut bytes = [0u8; 17]; + uncompressed.read_exact(&mut bytes[..bytes_before_version])?; + let length = bytes[bytes_before_version - 1]; let mut version_string_utf8 = vec![0u8; length as usize]; uncompressed.read_exact(&mut version_string_utf8)?; From f532576ac53ddcc666bc8d59e0b6437065e2f599 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 11 Dec 2023 11:16:01 +0200 Subject: [PATCH 08/28] Merge commit '457b966b171b09a7e57acb710fbca29a4b3526f0' into sync-from-ra --- Cargo.lock | 115 +- Cargo.toml | 26 +- crates/base-db/Cargo.toml | 8 +- crates/base-db/src/fixture.rs | 52 +- crates/base-db/src/input.rs | 39 +- crates/base-db/src/lib.rs | 15 +- crates/base-db/src/span.rs | 203 ++++ crates/cfg/src/lib.rs | 2 +- crates/cfg/src/tests.rs | 34 +- crates/flycheck/src/lib.rs | 2 +- crates/hir-def/Cargo.toml | 3 +- crates/hir-def/src/attr.rs | 8 +- crates/hir-def/src/attr/tests.rs | 13 +- crates/hir-def/src/body.rs | 12 + crates/hir-def/src/body/lower.rs | 40 +- crates/hir-def/src/body/tests.rs | 115 +- crates/hir-def/src/data.rs | 7 +- crates/hir-def/src/expander.rs | 26 +- crates/hir-def/src/find_path.rs | 5 +- crates/hir-def/src/generics.rs | 22 +- crates/hir-def/src/hir/format_args.rs | 28 +- crates/hir-def/src/item_scope.rs | 9 + crates/hir-def/src/item_tree.rs | 18 +- crates/hir-def/src/item_tree/lower.rs | 143 ++- crates/hir-def/src/item_tree/pretty.rs | 2 +- crates/hir-def/src/item_tree/tests.rs | 12 + crates/hir-def/src/lib.rs | 53 +- crates/hir-def/src/lower.rs | 21 +- .../macro_expansion_tests/builtin_fn_macro.rs | 4 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 160 ++- .../macro_expansion_tests/mbe/regression.rs | 26 + .../hir-def/src/macro_expansion_tests/mod.rs | 174 ++- .../src/macro_expansion_tests/proc_macros.rs | 59 + crates/hir-def/src/nameres/collector.rs | 113 +- crates/hir-def/src/nameres/mod_resolution.rs | 6 +- crates/hir-def/src/nameres/path_resolution.rs | 2 +- crates/hir-def/src/nameres/tests.rs | 4 +- .../hir-def/src/nameres/tests/incremental.rs | 241 +++- crates/hir-def/src/path.rs | 4 +- crates/hir-def/src/path/lower.rs | 70 +- crates/hir-def/src/resolver.rs | 8 + crates/hir-def/src/test_db.rs | 1 + crates/hir-def/src/visibility.rs | 18 +- crates/hir-expand/src/ast_id_map.rs | 63 +- crates/hir-expand/src/attrs.rs | 195 ++- crates/hir-expand/src/builtin_attr_macro.rs | 39 +- crates/hir-expand/src/builtin_derive_macro.rs | 460 ++++---- crates/hir-expand/src/builtin_fn_macro.rs | 223 ++-- crates/hir-expand/src/db.rs | 687 ++++++----- crates/hir-expand/src/eager.rs | 189 ++- crates/hir-expand/src/files.rs | 375 ++++++ crates/hir-expand/src/fixup.rs | 355 +++--- crates/hir-expand/src/hygiene.rs | 423 ++++--- crates/hir-expand/src/lib.rs | 986 +++++----------- crates/hir-expand/src/mod_path.rs | 85 +- crates/hir-expand/src/name.rs | 1 + crates/hir-expand/src/proc_macro.rs | 26 +- crates/hir-expand/src/quote.rs | 170 +-- crates/hir-expand/src/span.rs | 111 ++ crates/hir-ty/src/consteval.rs | 28 +- crates/hir-ty/src/diagnostics/decl_check.rs | 4 +- crates/hir-ty/src/display.rs | 15 +- crates/hir-ty/src/infer/expr.rs | 101 +- crates/hir-ty/src/infer/path.rs | 1 + crates/hir-ty/src/layout.rs | 3 +- crates/hir-ty/src/layout/adt.rs | 3 +- crates/hir-ty/src/lib.rs | 14 +- crates/hir-ty/src/lower.rs | 115 +- crates/hir-ty/src/method_resolution.rs | 9 +- crates/hir-ty/src/mir.rs | 3 +- crates/hir-ty/src/mir/eval.rs | 2 +- crates/hir-ty/src/mir/lower.rs | 4 +- crates/hir-ty/src/mir/monomorphization.rs | 3 +- crates/hir-ty/src/test_db.rs | 1 + crates/hir-ty/src/tests/regression.rs | 12 + crates/hir-ty/src/traits.rs | 22 +- crates/hir/src/attrs.rs | 12 +- crates/hir/src/db.rs | 23 +- crates/hir/src/diagnostics.rs | 11 +- crates/hir/src/lib.rs | 122 +- crates/hir/src/semantics.rs | 597 ++++++---- crates/hir/src/semantics/source_to_def.rs | 6 +- crates/hir/src/source_analyzer.rs | 162 ++- crates/hir/src/symbols.rs | 19 +- .../ide-assists/src/handlers/bool_to_enum.rs | 2 + .../convert_tuple_return_type_to_struct.rs | 2 + .../extract_expressions_from_format_string.rs | 4 +- .../src/handlers/extract_function.rs | 34 +- .../src/handlers/extract_module.rs | 2 +- .../src/handlers/fix_visibility.rs | 4 +- .../ide-assists/src/handlers/flip_binexpr.rs | 31 +- .../src/handlers/generate_constant.rs | 2 +- .../src/handlers/generate_delegate_methods.rs | 216 +++- .../src/handlers/generate_enum_variant.rs | 6 +- .../src/handlers/generate_function.rs | 5 +- .../ide-assists/src/handlers/inline_call.rs | 8 +- .../src/handlers/inline_local_variable.rs | 4 +- .../src/handlers/remove_unused_imports.rs | 4 +- .../replace_derive_with_manual_impl.rs | 10 +- .../replace_named_generic_with_impl.rs | 5 +- .../src/handlers/unnecessary_async.rs | 6 +- crates/ide-assists/src/lib.rs | 2 +- crates/ide-completion/src/completions/dot.rs | 12 +- .../src/completions/item_list/trait_impl.rs | 30 +- crates/ide-completion/src/completions/mod_.rs | 2 +- .../ide-completion/src/completions/record.rs | 25 + crates/ide-completion/src/completions/use_.rs | 4 +- crates/ide-completion/src/item.rs | 9 +- crates/ide-completion/src/lib.rs | 2 +- crates/ide-completion/src/render.rs | 412 ++++++- crates/ide-completion/src/render/function.rs | 65 +- crates/ide-completion/src/tests/expression.rs | 252 +++- crates/ide-completion/src/tests/flyimport.rs | 54 +- crates/ide-completion/src/tests/item.rs | 24 +- crates/ide-completion/src/tests/pattern.rs | 4 +- crates/ide-completion/src/tests/predicate.rs | 42 +- crates/ide-completion/src/tests/record.rs | 4 +- crates/ide-completion/src/tests/special.rs | 94 +- crates/ide-completion/src/tests/type_pos.rs | 486 ++++---- crates/ide-completion/src/tests/use_tree.rs | 20 +- crates/ide-db/src/apply_change.rs | 17 +- crates/ide-db/src/defs.rs | 11 +- crates/ide-db/src/helpers.rs | 4 +- crates/ide-db/src/lib.rs | 13 +- crates/ide-db/src/path_transform.rs | 40 +- crates/ide-db/src/rename.rs | 93 +- crates/ide-db/src/search.rs | 123 +- .../ide-db/src/test_data/test_doc_alias.txt | 98 +- .../test_symbol_index_collection.txt | 408 ++++--- .../src/handlers/field_shorthand.rs | 9 +- .../src/handlers/inactive_code.rs | 2 +- .../src/handlers/invalid_derive_target.rs | 2 +- .../src/handlers/json_is_not_rust.rs | 4 +- .../src/handlers/macro_error.rs | 24 +- .../src/handlers/malformed_derive.rs | 2 +- .../src/handlers/mismatched_arg_count.rs | 5 +- .../src/handlers/missing_fields.rs | 2 +- .../src/handlers/missing_unsafe.rs | 1 + .../src/handlers/no_such_field.rs | 2 +- .../replace_filter_map_next_with_find_map.rs | 2 +- .../handlers/trait_impl_missing_assoc_item.rs | 14 + .../trait_impl_redundant_assoc_item.rs | 79 ++ .../src/handlers/type_mismatch.rs | 23 +- .../src/handlers/typed_hole.rs | 4 +- .../src/handlers/unlinked_file.rs | 10 +- .../src/handlers/unresolved_extern_crate.rs | 15 + .../src/handlers/unresolved_field.rs | 25 +- .../src/handlers/unresolved_method.rs | 55 +- .../src/handlers/unresolved_module.rs | 15 +- .../src/handlers/useless_braces.rs | 7 +- crates/ide-diagnostics/src/lib.rs | 45 +- crates/ide-diagnostics/src/tests.rs | 49 +- crates/ide-ssr/src/lib.rs | 2 +- crates/ide/Cargo.toml | 1 + crates/ide/src/annotations.rs | 6 +- crates/ide/src/call_hierarchy.rs | 18 +- crates/ide/src/doc_links.rs | 8 +- crates/ide/src/doc_links/tests.rs | 8 +- crates/ide/src/expand_macro.rs | 16 +- crates/ide/src/extend_selection.rs | 14 +- crates/ide/src/goto_declaration.rs | 5 +- crates/ide/src/goto_definition.rs | 60 +- crates/ide/src/goto_implementation.rs | 14 +- crates/ide/src/goto_type_definition.rs | 92 +- crates/ide/src/highlight_related.rs | 77 +- crates/ide/src/hover.rs | 49 +- crates/ide/src/hover/tests.rs | 112 ++ crates/ide/src/inlay_hints.rs | 1 + .../ide/src/inlay_hints/closure_captures.rs | 5 +- crates/ide/src/inlay_hints/implicit_drop.rs | 22 +- crates/ide/src/interpret_function.rs | 14 +- crates/ide/src/lib.rs | 7 +- crates/ide/src/moniker.rs | 4 +- crates/ide/src/navigation_target.rs | 620 ++++++---- crates/ide/src/parent_module.rs | 4 +- crates/ide/src/references.rs | 103 +- crates/ide/src/rename.rs | 93 +- crates/ide/src/runnables.rs | 20 +- crates/ide/src/signature_help.rs | 7 +- crates/ide/src/static_index.rs | 11 +- crates/ide/src/status.rs | 10 +- crates/ide/src/syntax_highlighting.rs | 23 +- crates/ide/src/syntax_highlighting/format.rs | 20 +- .../ide/src/syntax_highlighting/highlight.rs | 9 +- .../test_data/highlight_macros.html | 14 +- .../test_data/highlight_strings.html | 48 +- crates/ide/src/syntax_highlighting/tests.rs | 47 +- crates/ide/src/view_hir.rs | 6 +- crates/ide/src/view_mir.rs | 6 +- crates/intern/Cargo.toml | 2 +- crates/limit/src/lib.rs | 2 +- crates/load-cargo/src/lib.rs | 37 +- crates/mbe/src/benchmark.rs | 62 +- crates/mbe/src/expander.rs | 46 +- crates/mbe/src/expander/matcher.rs | 175 +-- crates/mbe/src/expander/transcriber.rs | 338 +++--- crates/mbe/src/lib.rs | 157 +-- crates/mbe/src/parser.rs | 68 +- crates/mbe/src/syntax_bridge.rs | 1041 ++++++++--------- crates/mbe/src/syntax_bridge/tests.rs | 15 +- crates/mbe/src/to_parser_input.rs | 4 +- crates/mbe/src/token_map.rs | 156 +-- crates/mbe/src/tt_iter.rs | 37 +- crates/parser/src/lib.rs | 2 +- crates/paths/src/lib.rs | 2 +- crates/proc-macro-api/Cargo.toml | 8 +- crates/proc-macro-api/src/lib.rs | 41 +- crates/proc-macro-api/src/msg.rs | 121 +- crates/proc-macro-api/src/msg/flat.rs | 196 +++- crates/proc-macro-srv-cli/src/main.rs | 4 +- crates/proc-macro-srv/src/dylib.rs | 12 +- crates/proc-macro-srv/src/lib.rs | 44 +- crates/proc-macro-srv/src/proc_macros.rs | 85 +- crates/proc-macro-srv/src/server.rs | 45 +- .../proc-macro-srv/src/server/token_stream.rs | 64 +- crates/proc-macro-srv/src/tests/mod.rs | 98 +- crates/proc-macro-srv/src/tests/utils.rs | 26 +- crates/proc-macro-test/imp/src/lib.rs | 2 +- crates/proc-macro-test/src/lib.rs | 2 +- crates/profile/src/lib.rs | 2 +- crates/project-model/src/lib.rs | 2 +- crates/project-model/src/tests.rs | 4 +- crates/rust-analyzer/Cargo.toml | 5 - crates/rust-analyzer/src/bin/main.rs | 2 +- crates/rust-analyzer/src/cargo_target_spec.rs | 4 +- .../rust-analyzer/src/cli/analysis_stats.rs | 2 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/rust-analyzer/src/diagnostics.rs | 69 +- crates/rust-analyzer/src/global_state.rs | 8 +- crates/rust-analyzer/src/handlers/request.rs | 8 +- .../src/integrated_benchmarks.rs | 3 +- crates/rust-analyzer/src/lib.rs | 2 +- crates/rust-analyzer/src/lsp/to_proto.rs | 2 +- crates/rust-analyzer/src/reload.rs | 38 +- crates/rust-analyzer/tests/slow-tests/main.rs | 2 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 1 + crates/rustc-dependencies/Cargo.toml | 8 +- crates/sourcegen/src/lib.rs | 2 +- crates/stdx/Cargo.toml | 3 +- crates/stdx/src/lib.rs | 21 +- crates/syntax/Cargo.toml | 2 +- crates/syntax/src/ast/token_ext.rs | 6 + crates/syntax/src/lib.rs | 48 +- crates/syntax/src/parsing/reparsing.rs | 12 +- crates/syntax/src/ptr.rs | 8 +- crates/syntax/src/tests.rs | 2 +- crates/syntax/src/token_text.rs | 2 +- crates/test-utils/src/lib.rs | 2 +- crates/test-utils/src/minicore.rs | 38 +- crates/text-edit/src/lib.rs | 2 +- crates/toolchain/src/lib.rs | 2 +- crates/tt/Cargo.toml | 1 + crates/tt/src/lib.rs | 264 +++-- crates/vfs-notify/src/lib.rs | 2 +- crates/vfs/src/lib.rs | 19 +- editors/code/src/run.ts | 151 ++- editors/code/src/toolchain.ts | 2 +- lib/la-arena/src/lib.rs | 2 +- lib/line-index/src/lib.rs | 5 +- lib/line-index/src/tests.rs | 28 +- lib/lsp-server/Cargo.toml | 2 +- lib/lsp-server/src/lib.rs | 2 +- xtask/src/main.rs | 2 +- 263 files changed, 9743 insertions(+), 6213 deletions(-) create mode 100644 crates/base-db/src/span.rs create mode 100644 crates/hir-expand/src/files.rs create mode 100644 crates/hir-expand/src/span.rs create mode 100644 crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs diff --git a/Cargo.lock b/Cargo.lock index 876fd93aab713..46efbdd93c97f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -276,7 +276,7 @@ dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.9.0", + "memoffset", "scopeguard", ] @@ -301,12 +301,12 @@ dependencies = [ [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.12.3", + "hashbrown", "lock_api", "once_cell", "parking_lot_core", @@ -448,15 +448,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "hashbrown" -version = "0.12.3" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "heck" @@ -509,7 +503,7 @@ dependencies = [ "either", "expect-test", "fst", - "hashbrown 0.12.3", + "hashbrown", "hir-expand", "indexmap", "intern", @@ -539,7 +533,7 @@ dependencies = [ "cov-mark", "either", "expect-test", - "hashbrown 0.12.3", + "hashbrown", "intern", "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -608,6 +602,7 @@ dependencies = [ name = "ide" version = "0.0.0" dependencies = [ + "arrayvec", "cfg", "cov-mark", "crossbeam-channel", @@ -764,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown", ] [[package]] @@ -792,7 +787,7 @@ name = "intern" version = "0.0.0" dependencies = [ "dashmap", - "hashbrown 0.12.3", + "hashbrown", "rustc-hash", "triomphe", ] @@ -938,23 +933,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "lsp-server" version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928" dependencies = [ "crossbeam-channel", - "ctrlc", "log", - "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-server" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928" +version = "0.7.5" dependencies = [ "crossbeam-channel", + "ctrlc", "log", + "lsp-types", "serde", "serde_json", ] @@ -1002,15 +997,6 @@ dependencies = [ "libc", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -1061,11 +1047,11 @@ dependencies = [ [[package]] name = "miow" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123" +checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" dependencies = [ - "windows-sys 0.42.0", + "windows-sys 0.48.0", ] [[package]] @@ -1177,15 +1163,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.6" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.4.1", "smallvec", - "windows-sys 0.42.0", + "windows-targets", ] [[package]] @@ -1255,6 +1241,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" name = "proc-macro-api" version = "0.0.0" dependencies = [ + "base-db", + "indexmap", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "memmap2", "object 0.32.0", "paths", @@ -1263,6 +1252,7 @@ dependencies = [ "serde_json", "snap", "stdx", + "text-size", "tracing", "triomphe", "tt", @@ -1402,9 +1392,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5f38444d48da534b3bb612713fce9b0aeeffb2e0dfa242764f55482acc5b52d" +checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa" dependencies = [ "bitflags 1.3.2", "ra-ap-rustc_index", @@ -1413,9 +1403,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69fb5da07e1a39222d9c311203123c3b6a86420fa06dc695aa1661b0aecf8d16" +checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24" dependencies = [ "arrayvec", "ra-ap-rustc_index_macros", @@ -1424,9 +1414,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d69f9f6af58124f2da0cb8b0c3d8494e0d883a5fe0c6732258bde81ac5a87cc" +checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8" dependencies = [ "proc-macro2", "quote", @@ -1436,9 +1426,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5e8650195795c4023d8321846466994a975bc457cb8a91c0b3b17a5fc8ba40" +checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29" dependencies = [ "unicode-properties", "unicode-xid", @@ -1446,9 +1436,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6b325ee1ec90e4dbd4394913adf4ef32e4fcf2b311ec9563a0fa50cd549af6" +checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1476,31 +1466,31 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "rowan" -version = "0.15.11" +version = "0.15.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" +checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" dependencies = [ "countme", - "hashbrown 0.12.3", - "memoffset 0.8.0", + "hashbrown", + "memoffset", "rustc-hash", "text-size", ] @@ -1524,16 +1514,14 @@ dependencies = [ "ide-ssr", "itertools", "load-cargo", - "lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lsp-server 0.7.4", "lsp-types", "mbe", "mimalloc", - "mio", "nohash-hasher", "num_cpus", "oorandom", "parking_lot", - "parking_lot_core", "parser", "proc-macro-api", "profile", @@ -1564,11 +1552,10 @@ dependencies = [ [[package]] name = "rust-analyzer-salsa" -version = "0.17.0-pre.3" +version = "0.17.0-pre.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca92b657d614d076800aa7bf5d5ba33564e71fa7f16cd79eacdfe301a50ab1c" +checksum = "16c42b8737c320578b441a82daf7cdf8d897468de64e8a774fa54b53a50b6cc0" dependencies = [ - "crossbeam-utils", "indexmap", "lock_api", "log", @@ -1581,9 +1568,9 @@ dependencies = [ [[package]] name = "rust-analyzer-salsa-macros" -version = "0.17.0-pre.3" +version = "0.17.0-pre.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b190359266d293f2ee13eaa502a766dc8b77b63fbaa5d460d24fd0210675ceef" +checksum = "db72b0883f3592ade2be15a10583c75e0b269ec26e1190800fda2e2ce5ae6634" dependencies = [ "heck", "proc-macro2", @@ -1751,6 +1738,7 @@ dependencies = [ "always-assert", "backtrace", "crossbeam-channel", + "itertools", "jod-thread", "libc", "miow", @@ -2010,6 +1998,7 @@ version = "0.0.0" dependencies = [ "smol_str", "stdx", + "text-size", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 272f456bf9f86..f3f01aab8eee6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ debug = 0 # ungrammar = { path = "../ungrammar" } -# salsa = { path = "../salsa" } +# rust-analyzer-salsa = { path = "../salsa" } [workspace.dependencies] # local crates @@ -95,14 +95,25 @@ bitflags = "2.4.1" cargo_metadata = "0.18.1" dissimilar = "1.0.7" either = "1.9.0" +hashbrown = { version = "0.14", features = [ + "inline-more", +], default-features = false } indexmap = "2.1.0" itertools = "0.12.0" libc = "0.2.150" +nohash-hasher = "0.2.0" +rayon = "1.8.0" +rust-analyzer-salsa = "0.17.0-pre.4" +rustc-hash = "1.1.0" +serde = { version = "1.0.192", features = ["derive"] } +serde_json = "1.0.108" smallvec = { version = "1.10.0", features = [ "const_new", "union", "const_generics", ] } +smol_str = "0.2.0" +text-size = "1.1.1" tracing = "0.1.40" tracing-tree = "0.3.0" tracing-subscriber = { version = "0.3.18", default-features = false, features = [ @@ -110,15 +121,8 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features = "fmt", "tracing-log", ] } -smol_str = "0.2.0" -nohash-hasher = "0.2.0" -text-size = "1.1.1" -rayon = "1.8.0" -serde = { version = "1.0.192", features = ["derive"] } -serde_json = "1.0.108" triomphe = { version = "0.1.10", default-features = false, features = ["std"] } -# can't upgrade due to dashmap depending on 0.12.3 currently -hashbrown = { version = "0.12.3", features = [ - "inline-more", -], default-features = false } xshell = "0.2.5" + +# We need to freeze the version of the crate, as the raw-api feature is considered unstable +dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml index 5ad88f65188e0..393ffe155ba88 100644 --- a/crates/base-db/Cargo.toml +++ b/crates/base-db/Cargo.toml @@ -12,12 +12,10 @@ rust-version.workspace = true doctest = false [dependencies] -rust-analyzer-salsa = "0.17.0-pre.3" -rustc-hash = "1.1.0" - -triomphe.workspace = true - la-arena.workspace = true +rust-analyzer-salsa.workspace = true +rustc-hash.workspace = true +triomphe.workspace = true # local deps cfg.workspace = true diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 3da555a47acb2..bfdd21555f0aa 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -8,11 +8,12 @@ use test_utils::{ ESCAPED_CURSOR_MARKER, }; use triomphe::Arc; -use tt::token_id::{Leaf, Subtree, TokenTree}; +use tt::{Leaf, Subtree, TokenTree}; use vfs::{file_set::FileSet, VfsPath}; use crate::{ input::{CrateName, CrateOrigin, LangCrateOrigin}, + span::SpanData, Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, @@ -134,7 +135,7 @@ impl ChangeFixture { let mut file_set = FileSet::default(); let mut current_source_root_kind = SourceRootKind::Local; - let mut file_id = FileId(0); + let mut file_id = FileId::from_raw(0); let mut roots = Vec::new(); let mut file_position = None; @@ -209,7 +210,7 @@ impl ChangeFixture { let path = VfsPath::new_virtual_path(meta.path); file_set.insert(file_id, path); files.push(file_id); - file_id.0 += 1; + file_id = FileId::from_raw(file_id.index() + 1); } if crates.is_empty() { @@ -254,7 +255,7 @@ impl ChangeFixture { if let Some(mini_core) = mini_core { let core_file = file_id; - file_id.0 += 1; + file_id = FileId::from_raw(file_id.index() + 1); let mut fs = FileSet::default(); fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); @@ -295,7 +296,6 @@ impl ChangeFixture { let mut proc_macros = ProcMacros::default(); if !proc_macro_names.is_empty() { let proc_lib_file = file_id; - file_id.0 += 1; proc_macro_defs.extend(default_test_proc_macros()); let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs); @@ -539,10 +539,13 @@ struct IdentityProcMacroExpander; impl ProcMacroExpander for IdentityProcMacroExpander { fn expand( &self, - subtree: &Subtree, - _: Option<&Subtree>, + subtree: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + _: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -553,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander; impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { fn expand( &self, - _: &Subtree, - attrs: Option<&Subtree>, + _: &Subtree, + attrs: Option<&Subtree>, _: &Env, - ) -> Result { + _: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { attrs .cloned() .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) @@ -568,11 +574,14 @@ struct MirrorProcMacroExpander; impl ProcMacroExpander for MirrorProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { - fn traverse(input: &Subtree) -> Subtree { + _: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { + fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { @@ -595,13 +604,16 @@ struct ShortenProcMacroExpander; impl ProcMacroExpander for ShortenProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + _: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); - fn traverse(input: &Subtree) -> Subtree { + fn traverse(input: &Subtree) -> Subtree { let token_trees = input .token_trees .iter() @@ -613,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Subtree { delimiter: input.delimiter, token_trees } } - fn modify_leaf(leaf: &Leaf) -> Leaf { + fn modify_leaf(leaf: &Leaf) -> Leaf { let mut leaf = leaf.clone(); match &mut leaf { Leaf::Literal(it) => { diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index e4f78321e215a..c2472363aacd3 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -13,9 +13,10 @@ use la_arena::{Arena, Idx}; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::SmolStr; use triomphe::Arc; -use tt::token_id::Subtree; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; +use crate::span::SpanData; + // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // then the crate for the proc-macro hasn't been build yet as the build data is missing. pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; @@ -242,6 +243,9 @@ impl CrateDisplayName { } } +// FIXME: These should not be defined in here? Why does base db know about proc-macros +// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either. + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct ProcMacroId(pub u32); @@ -255,10 +259,13 @@ pub enum ProcMacroKind { pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { fn expand( &self, - subtree: &Subtree, - attrs: Option<&Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result; + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, + ) -> Result, ProcMacroExpansionError>; } #[derive(Debug)] @@ -323,7 +330,9 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level things + // FIXME: These things should not be per crate! These are more per workspace crate graph level + // things. This info does need to be somewhat present though as to prevent deduplication from + // happening across different workspaces with different layouts. pub target_layout: TargetLayoutLoadResult, pub channel: Option, } @@ -871,7 +880,7 @@ mod tests { fn detect_cyclic_dependency_indirect() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -884,7 +893,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -897,7 +906,7 @@ mod tests { None, ); let crate3 = graph.add_crate_root( - FileId(3u32), + FileId::from_raw(3u32), Edition2018, None, None, @@ -933,7 +942,7 @@ mod tests { fn detect_cyclic_dependency_direct() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -946,7 +955,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -976,7 +985,7 @@ mod tests { fn it_works() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -989,7 +998,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -1002,7 +1011,7 @@ mod tests { None, ); let crate3 = graph.add_crate_root( - FileId(3u32), + FileId::from_raw(3u32), Edition2018, None, None, @@ -1032,7 +1041,7 @@ mod tests { fn dashes_are_normalized() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -1045,7 +1054,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 40cfab88afdbe..57e7934367bb2 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -1,10 +1,11 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod input; mod change; pub mod fixture; +pub mod span; use std::panic; @@ -12,14 +13,13 @@ use rustc_hash::FxHashSet; use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use triomphe::Arc; -pub use crate::input::DependencyKind; pub use crate::{ change::Change, input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, - Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, - ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, + DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, + ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, + ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; @@ -68,8 +68,7 @@ pub trait FileLoader { /// model. Everything else in rust-analyzer is derived from these queries. #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { - // Parses the file into the syntax tree. - #[salsa::invoke(parse_query)] + /// Parses the file into the syntax tree. fn parse(&self, file_id: FileId) -> Parse; /// The crate graph. @@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { fn proc_macros(&self) -> Arc; } -fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { +fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let text = db.file_text(file_id); SourceFile::parse(&text) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs new file mode 100644 index 0000000000000..3464f4cb6d1c7 --- /dev/null +++ b/crates/base-db/src/span.rs @@ -0,0 +1,203 @@ +//! File and span related types. +// FIXME: This should probably be moved into its own crate. +use std::fmt; + +use salsa::InternId; +use tt::SyntaxContext; +use vfs::FileId; + +pub type ErasedFileAstId = la_arena::Idx; + +// The first inde is always the root node's AstId +pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); + +pub type SpanData = tt::SpanData; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct SyntaxContextId(InternId); + +impl fmt::Debug for SyntaxContextId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if *self == Self::SELF_REF { + f.debug_tuple("SyntaxContextId") + .field(&{ + #[derive(Debug)] + #[allow(non_camel_case_types)] + struct SELF_REF; + SELF_REF + }) + .finish() + } else { + f.debug_tuple("SyntaxContextId").field(&self.0).finish() + } + } +} +crate::impl_intern_key!(SyntaxContextId); + +impl fmt::Display for SyntaxContextId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0.as_u32()) + } +} + +impl SyntaxContext for SyntaxContextId { + const DUMMY: Self = Self::ROOT; +} +// inherent trait impls please tyvm +impl SyntaxContextId { + pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); + // veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so + // we need a special value that behaves as the current context. + pub const SELF_REF: Self = + SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); + + pub fn is_root(self) -> bool { + self == Self::ROOT + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct SpanAnchor { + pub file_id: FileId, + pub ast_id: ErasedFileAstId, +} + +impl fmt::Debug for SpanAnchor { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish() + } +} + +impl tt::SpanAnchor for SpanAnchor { + const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }; +} + +/// Input to the analyzer is a set of files, where each file is identified by +/// `FileId` and contains source code. However, another source of source code in +/// Rust are macros: each macro can be thought of as producing a "temporary +/// file". To assign an id to such a file, we use the id of the macro call that +/// produced the file. So, a `HirFileId` is either a `FileId` (source code +/// written by user), or a `MacroCallId` (source code produced by macro). +/// +/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file +/// containing the call plus the offset of the macro call in the file. Note that +/// this is a recursive definition! However, the size_of of `HirFileId` is +/// finite (because everything bottoms out at the real `FileId`) and small +/// (`MacroCallId` uses the location interning. You can check details here: +/// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct HirFileId(u32); + +impl From for u32 { + fn from(value: HirFileId) -> Self { + value.0 + } +} + +impl From for HirFileId { + fn from(value: MacroCallId) -> Self { + value.as_file() + } +} + +impl fmt::Debug for HirFileId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.repr().fmt(f) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroFileId { + pub macro_call_id: MacroCallId, +} + +/// `MacroCallId` identifies a particular macro invocation, like +/// `println!("Hello, {}", world)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct MacroCallId(salsa::InternId); +crate::impl_intern_key!(MacroCallId); + +impl MacroCallId { + pub fn as_file(self) -> HirFileId { + MacroFileId { macro_call_id: self }.into() + } + + pub fn as_macro_file(self) -> MacroFileId { + MacroFileId { macro_call_id: self } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFileId), +} + +impl fmt::Debug for HirFileIdRepr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(), + Self::MacroFile(arg0) => { + f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish() + } + } + } +} + +impl From for HirFileId { + fn from(id: FileId) -> Self { + assert!(id.index() < Self::MAX_FILE_ID); + HirFileId(id.index()) + } +} + +impl From for HirFileId { + fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + +impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + + #[inline] + pub fn is_macro(self) -> bool { + self.0 & Self::MACRO_FILE_TAG_MASK != 0 + } + + #[inline] + pub fn macro_file(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFileId { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + #[inline] + pub fn file_id(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => Some(FileId::from_raw(self.0)), + _ => None, + } + } + + #[inline] + pub fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFileId { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } +} diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 8bbe5e2a8c29b..6b178e7b04a76 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -1,6 +1,6 @@ //! cfg defines conditional compiling options, `cfg` attribute parser and evaluator -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod cfg_expr; mod dnf; diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index bdc3f854e0866..c7ac1af934a08 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,37 +1,31 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::syntax_node_to_token_tree; +use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use syntax::{ast, AstNode}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; fn assert_parse_result(input: &str, expected: CfgExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } fn check_dnf(input: &str, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); } fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 0749d91eb32a1..68faca51e8263 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -2,7 +2,7 @@ //! another compatible command (f.x. clippy) in a background thread and provide //! LSP diagnostics based on the output of the command. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ ffi::OsString, diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index e4f2e14c51c72..2d174517605f1 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -15,8 +15,7 @@ doctest = false arrayvec = "0.7.2" bitflags.workspace = true cov-mark = "2.0.0-pre.1" -# We need to freeze the version of the crate, as the raw-api feature is considered unstable -dashmap = { version = "=5.4.0", features = ["raw-api"] } +dashmap.workspace = true drop_bomb = "0.1.5" either.workspace = true fst = { version = "0.4.7", default-features = false } diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index fa3025e0303d1..942b28fc1450e 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -421,6 +421,7 @@ impl AttrsWithOwner { RawAttrs::from_attrs_owner( db.upcast(), src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), ) } GenericParamId::TypeParamId(it) => { @@ -428,11 +429,16 @@ impl AttrsWithOwner { RawAttrs::from_attrs_owner( db.upcast(), src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), ) } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) + RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(&src.value[it.local_id]), + db.span_map(src.file_id).as_ref(), + ) } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index e4c8d446af7bd..0f98a4ec93c63 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -1,17 +1,20 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. +use base_db::FileId; +use hir_expand::span::{RealSpanMap, SpanMapRef}; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree( + tt.syntax(), + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), + ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); } diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 1942c60c075d7..db28c6731ece1 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -95,6 +95,8 @@ pub struct BodySourceMap { field_map_back: FxHashMap, pat_field_map_back: FxHashMap, + format_args_template_map: FxHashMap>, + expansions: FxHashMap>, HirFileId>, /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in @@ -387,6 +389,14 @@ impl BodySourceMap { self.expr_map.get(&src).copied() } + pub fn implicit_format_args( + &self, + node: InFile<&ast::FormatArgsExpr>, + ) -> Option<&[(syntax::TextRange, Name)]> { + let src = node.map(AstPtr::new).map(AstPtr::upcast::); + self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref) + } + /// Get a reference to the body source map's diagnostics. pub fn diagnostics(&self) -> &[BodyDiagnostic] { &self.diagnostics @@ -403,8 +413,10 @@ impl BodySourceMap { field_map_back, pat_field_map_back, expansions, + format_args_template_map, diagnostics, } = self; + format_args_template_map.shrink_to_fit(); expr_map.shrink_to_fit(); expr_map_back.shrink_to_fit(); pat_map.shrink_to_fit(); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index e4158d7564bdb..c6a9093201592 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1025,7 +1025,7 @@ impl ExprCollector<'_> { let id = collector(self, Some(expansion.tree())); self.ast_id_map = prev_ast_id_map; - self.expander.exit(self.db, mark); + self.expander.exit(mark); id } None => collector(self, None), @@ -1597,13 +1597,25 @@ impl ExprCollector<'_> { }); let template = f.template(); let fmt_snippet = template.as_ref().map(ToString::to_string); + let mut mappings = vec![]; let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => { - format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| { - self.alloc_expr_desugared(Expr::Path(Path::from(name))) - }) - } - None => FormatArgs { template: Default::default(), arguments: args.finish() }, + Some((s, is_direct_literal)) => format_args::parse( + &s, + fmt_snippet, + args, + is_direct_literal, + |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name, span| { + if let Some(span) = span { + mappings.push((span, name.clone())) + } + }, + ), + None => FormatArgs { + template: Default::default(), + arguments: args.finish(), + orphans: Default::default(), + }, }; // Create a list of all _unique_ (argument, format trait) combinations. @@ -1742,18 +1754,26 @@ impl ExprCollector<'_> { }); let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe { id: None, - statements: Box::default(), + // We collect the unused expressions here so that we still infer them instead of + // dropping them out of the expression tree + statements: fmt + .orphans + .into_iter() + .map(|expr| Statement::Expr { expr, has_semi: true }) + .collect(), tail: Some(unsafe_arg_new), }); - self.alloc_expr( + let idx = self.alloc_expr( Expr::Call { callee: new_v1_formatted, args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]), is_assignee_expr: false, }, syntax_ptr, - ) + ); + self.source_map.format_args_template_map.insert(idx, mappings); + idx } /// Generate a hir expression for a format_args placeholder specification. diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 1658757d2b6ed..2b432dfbb92bc 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -143,7 +143,6 @@ mod m { #[test] fn desugar_builtin_format_args() { - // Regression test for a path resolution bug introduced with inner item handling. let (db, body, def) = lower( r#" //- minicore: fmt @@ -161,7 +160,7 @@ fn main() { let count = 10; builtin#lang(Arguments::new_v1_formatted)( &[ - "\"hello ", " ", " friends, we ", " ", "", "\"", + "hello ", " ", " friends, we ", " ", "", ], &[ builtin#lang(Argument::new_display)( @@ -221,3 +220,115 @@ fn main() { }"#]] .assert_eq(&body.pretty_print(&db, def)) } + +#[test] +fn test_macro_hygiene() { + let (db, body, def) = lower( + r##" +//- minicore: fmt, from +//- /main.rs +mod error; + +use crate::error::error; + +fn main() { + // _ = forces body expansion instead of block def map expansion + _ = error!("Failed to resolve path `{}`", node.text()); +} +//- /error.rs +macro_rules! _error { + ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))} +} +pub(crate) use _error as error; +macro_rules! _intermediate { + ($arg:expr) => {$crate::error::SsrError::new($arg)} +} +pub(crate) use _intermediate as intermediate; + +pub struct SsrError(pub(crate) core::fmt::Arguments); + +impl SsrError { + pub(crate) fn new(message: impl Into) -> SsrError { + SsrError(message.into()) + } +} +"##, + ); + + assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); + expect![[r#" + fn main() { + _ = $crate::error::SsrError::new( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "Failed to resolve path `", "`", + ], + &[ + builtin#lang(Argument::new_display)( + &node.text(), + ), + ], + &[ + builtin#lang(Placeholder::new)( + 0usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), + ], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def)) +} + +#[test] +fn regression_10300() { + let (db, body, def) = lower( + r#" +//- minicore: concat, panic +mod private { + pub use core::concat; +} + +macro_rules! m { + () => { + panic!(concat!($crate::private::concat!("cc"))); + }; +} + +fn f() { + m!(); +} +"#, + ); + + let (_, source_map) = db.body_with_source_map(def.into()); + assert_eq!(source_map.diagnostics(), &[]); + + for (_, def_map) in body.blocks(&db) { + assert_eq!(def_map.diagnostics(), &[]); + } + + expect![[r#" + fn f() { + $crate::panicking::panic_fmt( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "cc", + ], + &[], + &[], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def)) +} diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 72ccc17486f0d..635d13f24ad81 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, MacroCallKind::Attr { ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: None, invoc_attr_index: attr.id, }, attr.path().clone(), @@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> { } AssocItem::MacroCall(call) => { let file_id = self.expander.current_file_id(); - let MacroCall { ast_id, expand_to, ref path } = item_tree[call]; + let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call]; let module = self.expander.module.local_id; let resolver = |path| { @@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> { match macro_call_as_call_id( self.db.upcast(), &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), + call_site, expand_to, self.expander.module.krate(), resolver, @@ -793,7 +794,7 @@ impl<'a> AssocItemCollector<'a> { self.collect(&item_tree, tree_id, &iter); - self.expander.exit(self.db, mark); + self.expander.exit(mark); } } diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 6db8398bc9867..398f116d83135 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -4,21 +4,21 @@ use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, - InFile, MacroCallId, UnresolvedMacro, + attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId, + InFile, MacroCallId, }; use limit::Limit; use syntax::{ast, Parse, SyntaxNode}; use crate::{ attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, - MacroId, ModuleId, + MacroId, ModuleId, UnresolvedMacro, }; #[derive(Debug)] pub struct Expander { cfg_options: CfgOptions, - hygiene: Hygiene, + span_map: SpanMap, krate: CrateId, pub(crate) current_file_id: HirFileId, pub(crate) module: ModuleId, @@ -41,7 +41,7 @@ impl Expander { recursion_depth: 0, recursion_limit, cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), - hygiene: Hygiene::new(db.upcast(), current_file_id), + span_map: db.span_map(current_file_id), krate: module.krate, } } @@ -94,8 +94,8 @@ impl Expander { ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) } } - pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.hygiene = Hygiene::new(db.upcast(), mark.file_id); + pub fn exit(&mut self, mut mark: Mark) { + self.span_map = mark.span_map; self.current_file_id = mark.file_id; if self.recursion_depth == u32::MAX { // Recursion limit has been reached somewhere in the macro expansion tree. Reset the @@ -110,7 +110,7 @@ impl Expander { } pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { - LowerCtx::new(db, &self.hygiene, self.current_file_id) + LowerCtx::new(db, self.span_map.clone(), self.current_file_id) } pub(crate) fn to_source(&self, value: T) -> InFile { @@ -118,7 +118,7 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref())) } pub(crate) fn cfg_options(&self) -> &CfgOptions { @@ -130,8 +130,8 @@ impl Expander { } pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option { - let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id); - Path::from_src(path, &ctx) + let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id); + Path::from_src(&ctx, path) } fn within_limit( @@ -174,10 +174,11 @@ impl Expander { let parse = value.cast::()?; self.recursion_depth += 1; - self.hygiene = Hygiene::new(db.upcast(), file_id); + let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id)); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let mark = Mark { file_id: old_file_id, + span_map: old_span_map, bomb: DropBomb::new("expansion mark dropped"), }; Some((mark, parse)) @@ -190,5 +191,6 @@ impl Expander { #[derive(Debug)] pub struct Mark { file_id: HirFileId, + span_map: SpanMap, bomb: DropBomb, } diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 1ebd1ba0e66db..13af0b0218e89 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -586,7 +586,7 @@ fn find_local_import_locations( #[cfg(test)] mod tests { use base_db::fixture::WithFixture; - use hir_expand::hygiene::Hygiene; + use hir_expand::db::ExpandDatabase; use syntax::ast::AstNode; use crate::test_db::TestDB; @@ -608,7 +608,8 @@ mod tests { let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); - let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap(); + let mod_path = + ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap(); let def_map = module.def_map(&db); let resolved = def_map diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index fac90e6630452..0d95d916ff99b 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -21,7 +21,7 @@ use crate::{ db::DefDatabase, dyn_map::{keys, DynMap}, expander::Expander, - item_tree::{AttrOwner, ItemTree}, + item_tree::ItemTree, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, src::{HasChildSource, HasSource}, @@ -250,7 +250,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams, - add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { if let Some(params) = node.generic_param_list() { self.fill_params(lower_ctx, params, add_param_attrs) @@ -275,7 +278,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList, - mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + mut add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { for type_or_const_param in params.type_or_const_params() { match type_or_const_param { @@ -297,7 +303,7 @@ impl GenericParams { type_param.type_bound_list(), Either::Left(type_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param)); } ast::TypeOrConstParam::Const(const_param) => { let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); @@ -310,7 +316,7 @@ impl GenericParams { default: ConstRef::from_const_param(lower_ctx, &const_param), }; let idx = self.type_or_consts.alloc(param.into()); - add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param)); } } } @@ -325,7 +331,7 @@ impl GenericParams { lifetime_param.type_bound_list(), Either::Right(lifetime_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param)); + add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param)); } } @@ -433,7 +439,7 @@ impl GenericParams { let ctx = expander.ctx(db); let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref); - exp.1.exit(db, mark); + exp.1.exit(mark); } } }); @@ -518,7 +524,7 @@ fn file_id_and_params_of( (src.file_id, src.value.generic_param_list()) } // We won't be using this ID anyway - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None), + GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None), } } diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs index 46d24bd4a6142..7fc33abc7c9a1 100644 --- a/crates/hir-def/src/hir/format_args.rs +++ b/crates/hir-def/src/hir/format_args.rs @@ -3,9 +3,10 @@ use std::mem; use hir_expand::name::Name; use rustc_dependencies::parse_format as parse; +use stdx::TupleExt; use syntax::{ ast::{self, IsString}, - AstToken, SmolStr, TextRange, + SmolStr, TextRange, TextSize, }; use crate::hir::ExprId; @@ -14,6 +15,7 @@ use crate::hir::ExprId; pub struct FormatArgs { pub template: Box<[FormatArgsPiece]>, pub arguments: FormatArguments, + pub orphans: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -170,15 +172,18 @@ pub(crate) fn parse( mut args: FormatArgumentsCollector, is_direct_literal: bool, mut synth: impl FnMut(Name) -> ExprId, + mut record_usage: impl FnMut(Name, Option), ) -> FormatArgs { - let text = s.text(); + let text = s.text_without_quotes(); let str_style = match s.quote_offsets() { Some(offsets) => { let raw = u32::from(offsets.quotes.0.len()) - 1; - (raw != 0).then_some(raw as usize) + // subtract 1 for the `r` prefix + (raw != 0).then(|| raw as usize - 1) } None => None, }; + let mut parser = parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format); @@ -193,12 +198,17 @@ pub(crate) fn parse( let is_source_literal = parser.is_source_literal; if !parser.errors.is_empty() { // FIXME: Diagnose - return FormatArgs { template: Default::default(), arguments: args.finish() }; + return FormatArgs { + template: Default::default(), + arguments: args.finish(), + orphans: vec![], + }; } let to_span = |inner_span: parse::InnerSpan| { is_source_literal.then(|| { TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) + - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1) }) }; @@ -230,9 +240,10 @@ pub(crate) fn parse( Err(index) } } - ArgRef::Name(name, _span) => { + ArgRef::Name(name, span) => { let name = Name::new_text_dont_use(SmolStr::new(name)); if let Some((index, _)) = args.by_name(&name) { + record_usage(name, span); // Name found in `args`, so we resolve it to its index. if index < args.explicit_args().len() { // Mark it as used, if it was an explicit argument. @@ -246,6 +257,7 @@ pub(crate) fn parse( // disabled (see RFC #2795) // FIXME: Diagnose } + record_usage(name.clone(), span); Ok(args.add(FormatArgument { kind: FormatArgumentKind::Captured(name.clone()), // FIXME: This is problematic, we might want to synthesize a dummy @@ -413,7 +425,11 @@ pub(crate) fn parse( // FIXME: Diagnose } - FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() } + FormatArgs { + template: template.into_boxed_slice(), + arguments: args.finish(), + orphans: unused.into_iter().map(TupleExt::head).collect(), + } } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 7c11fb9d13676..ce83cb435e2e8 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -112,6 +112,7 @@ pub struct ItemScope { #[derive(Debug, PartialEq, Eq)] struct DeriveMacroInvocation { attr_id: AttrId, + /// The `#[derive]` call attr_call_id: MacroCallId, derive_call_ids: SmallVec<[Option; 1]>, } @@ -401,6 +402,14 @@ impl ItemScope { }) } + pub fn derive_macro_invoc( + &self, + ast_id: AstId, + attr_id: AttrId, + ) -> Option { + Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id) + } + // FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option { self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a) diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 473ae298c7744..16144394e3b1b 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -42,12 +42,11 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::CrateId; +use base_db::{span::SyntaxContextId, CrateId}; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::RawAttrs, - hygiene::Hygiene, name::{name, AsName, Name}, ExpandTo, HirFileId, InFile, }; @@ -118,7 +117,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -749,6 +748,7 @@ pub struct MacroCall { pub path: Interned, pub ast_id: FileAstId, pub expand_to: ExpandTo, + pub call_site: SyntaxContextId, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -778,9 +778,9 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = Hygiene::new(db.upcast(), file_id); - let (_, source_map) = - lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); + let span_map = db.span_map(file_id); + let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree) + .expect("failed to lower use tree"); source_map[index].clone() } /// Maps a `UseTree` contained in this import back to its AST node. @@ -793,8 +793,10 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = Hygiene::new(db.upcast(), file_id); - lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1 + let span_map = db.span_map(file_id); + lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree) + .expect("failed to lower use tree") + .1 } } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 6807326be5aef..83a2790ce8f1f 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,12 +2,13 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; +use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ generics::{GenericParams, TypeParamData, TypeParamProvenance}, type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + LocalLifetimeParamId, LocalTypeOrConstParamId, }; use super::*; @@ -33,8 +34,8 @@ impl<'a> Ctx<'a> { } } - pub(super) fn hygiene(&self) -> &Hygiene { - self.body_ctx.hygiene() + pub(super) fn span_map(&self) -> SpanMapRef<'_> { + self.body_ctx.span_map() } pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree { @@ -79,7 +80,7 @@ impl<'a> Ctx<'a> { pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { self.tree .attrs - .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene())); + .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map())); self.tree.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -109,8 +110,7 @@ impl<'a> Ctx<'a> { } fn lower_mod_item(&mut self, item: &ast::Item) -> Option { - let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); - let item: ModItem = match item { + let mod_item: ModItem = match item { ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(), ast::Item::Enum(ast) => self.lower_enum(ast)?.into(), @@ -129,10 +129,10 @@ impl<'a> Ctx<'a> { ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), }; + let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map()); + self.add_attrs(mod_item.into(), attrs); - self.add_attrs(item.into(), attrs); - - Some(item) + Some(mod_item) } fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { @@ -146,21 +146,32 @@ impl<'a> Ctx<'a> { } } - fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option { - match item { + fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option { + let item: AssocItem = match item_node { ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), - } + }?; + let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map()); + self.add_attrs( + match item { + AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)), + AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)), + AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)), + AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)), + }, + attrs, + ); + Some(item) } fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); + let ast_id = self.source_ast_id_map.ast_id(strukt); let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); let fields = self.lower_fields(&strukt.kind()); - let ast_id = self.source_ast_id_map.ast_id(strukt); let res = Struct { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().structs.alloc(res))) } @@ -184,7 +195,10 @@ impl<'a> Ctx<'a> { for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), &field, self.span_map()), + ); } } let end = self.next_field_idx(); @@ -205,7 +219,7 @@ impl<'a> Ctx<'a> { for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map())); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -222,12 +236,12 @@ impl<'a> Ctx<'a> { fn lower_union(&mut self, union: &ast::Union) -> Option> { let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); + let ast_id = self.source_ast_id_map.ast_id(union); let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); let fields = match union.record_field_list() { Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), }; - let ast_id = self.source_ast_id_map.ast_id(union); let res = Union { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().unions.alloc(res))) } @@ -235,12 +249,12 @@ impl<'a> Ctx<'a> { fn lower_enum(&mut self, enum_: &ast::Enum) -> Option> { let visibility = self.lower_visibility(enum_); let name = enum_.name()?.as_name(); + let ast_id = self.source_ast_id_map.ast_id(enum_); let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); let variants = match &enum_.variant_list() { Some(variant_list) => self.lower_variants(variant_list), None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()), }; - let ast_id = self.source_ast_id_map.ast_id(enum_); let res = Enum { name, visibility, generic_params, variants, ast_id }; Some(id(self.data().enums.alloc(res))) } @@ -252,7 +266,7 @@ impl<'a> Ctx<'a> { let idx = self.data().variants.alloc(data); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), + RawAttrs::new(self.db.upcast(), &variant, self.span_map()), ); } } @@ -303,28 +317,29 @@ impl<'a> Ctx<'a> { }); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), + RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), ); has_self_param = true; } for param in param_list.params() { + let ast_id = self.source_ast_id_map.ast_id(¶m); let idx = match param.dotdotdot_token() { - Some(_) => { - let ast_id = self.source_ast_id_map.ast_id(¶m); - self.data() - .params - .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }) - } + Some(_) => self + .data() + .params + .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }), None => { let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); let ty = Interned::new(type_ref); - let ast_id = self.source_ast_id_map.ast_id(¶m); self.data() .params .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) }) } }; - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), ¶m, self.span_map()), + ); } } let end_param = self.next_param_idx(); @@ -394,8 +409,8 @@ impl<'a> Ctx<'a> { let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); let visibility = self.lower_visibility(type_alias); let bounds = self.lower_type_bounds(type_alias); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; Some(id(self.data().type_aliases.alloc(res))) } @@ -443,23 +458,17 @@ impl<'a> Ctx<'a> { fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option> { let name = trait_def.name()?.as_name(); let visibility = self.lower_visibility(trait_def); + let ast_id = self.source_ast_id_map.ast_id(trait_def); let generic_params = self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); let is_auto = trait_def.auto_token().is_some(); let is_unsafe = trait_def.unsafe_token().is_some(); - let ast_id = self.source_ast_id_map.ast_id(trait_def); let items = trait_def .assoc_item_list() .into_iter() .flat_map(|list| list.assoc_items()) - .filter_map(|item| { - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.lower_assoc_item(&item).map(|item| { - self.add_attrs(ModItem::from(item).into(), attrs); - item - }) - }) + .filter_map(|item_node| self.lower_assoc_item(&item_node)) .collect(); let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; @@ -472,17 +481,18 @@ impl<'a> Ctx<'a> { ) -> Option> { let name = trait_alias_def.name()?.as_name(); let visibility = self.lower_visibility(trait_alias_def); + let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let generic_params = self.lower_generic_params( HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), trait_alias_def, ); - let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let alias = TraitAlias { name, visibility, generic_params, ast_id }; Some(id(self.data().trait_aliases.alloc(alias))) } fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { + let ast_id = self.source_ast_id_map.ast_id(impl_def); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); @@ -499,14 +509,8 @@ impl<'a> Ctx<'a> { .assoc_item_list() .into_iter() .flat_map(|it| it.assoc_items()) - .filter_map(|item| { - let assoc = self.lower_assoc_item(&item)?; - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.add_attrs(ModItem::from(assoc).into(), attrs); - Some(assoc) - }) + .filter_map(|item| self.lower_assoc_item(&item)) .collect(); - let ast_id = self.source_ast_id_map.ast_id(impl_def); let res = Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id }; Some(id(self.data().impls.alloc(res))) @@ -515,7 +519,7 @@ impl<'a> Ctx<'a> { fn lower_use(&mut self, use_item: &ast::Use) -> Option> { let visibility = self.lower_visibility(use_item); let ast_id = self.source_ast_id_map.ast_id(use_item); - let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?; + let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?; let res = Use { visibility, ast_id, use_tree }; Some(id(self.data().uses.alloc(res))) @@ -537,10 +541,16 @@ impl<'a> Ctx<'a> { } fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option> { - let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?); + let span_map = self.span_map(); + let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?); let ast_id = self.source_ast_id_map.ast_id(m); let expand_to = hir_expand::ExpandTo::from_call_site(m); - let res = MacroCall { path, ast_id, expand_to }; + let res = MacroCall { + path, + ast_id, + expand_to, + call_site: span_map.span_for_range(m.syntax().text_range()).ctx, + }; Some(id(self.data().macro_calls.alloc(res))) } @@ -572,15 +582,15 @@ impl<'a> Ctx<'a> { // (in other words, the knowledge that they're in an extern block must not be used). // This is because an extern block can contain macros whose ItemTree's top-level items // should be considered to be in an extern block too. - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - let id: ModItem = match item { - ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), - ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), - ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(), - ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(), + let mod_item: ModItem = match &item { + ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(), + ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(), + ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), + ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - self.add_attrs(id.into(), attrs); - Some(id) + let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map()); + self.add_attrs(mod_item.into(), attrs); + Some(mod_item) }) .collect() }); @@ -612,12 +622,16 @@ impl<'a> Ctx<'a> { generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param)); } - let add_param_attrs = |item, param| { - let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.hygiene()); + let add_param_attrs = |item: Either, + param| { + let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.span_map()); // This is identical to the body of `Ctx::add_attrs()` but we can't call that here // because it requires `&mut self` and the call to `generics.fill()` below also // references `self`. - match self.tree.attrs.entry(item) { + match self.tree.attrs.entry(match item { + Either::Right(id) => id.into(), + Either::Left(id) => id.into(), + }) { Entry::Occupied(mut entry) => { *entry.get_mut() = entry.get().merge(attrs); } @@ -643,7 +657,8 @@ impl<'a> Ctx<'a> { } fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { - let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene()); + let vis = + RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map()); self.data().vis.alloc(vis) } @@ -721,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned { struct UseTreeLowering<'a> { db: &'a dyn DefDatabase, - hygiene: &'a Hygiene, + span_map: SpanMapRef<'a>, mapping: Arena, } @@ -734,7 +749,7 @@ impl UseTreeLowering<'_> { // E.g. `use something::{inner}` (prefix is `None`, path is `something`) // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) Some(path) => { - match ModPath::from_src(self.db.upcast(), path, self.hygiene) { + match ModPath::from_src(self.db.upcast(), path, self.span_map) { Some(it) => Some(it), None => return None, // FIXME: report errors somewhere } @@ -753,7 +768,7 @@ impl UseTreeLowering<'_> { } else { let is_glob = tree.star_token().is_some(); let path = match tree.path() { - Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?), + Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?), None => None, }; let alias = tree.rename().map(|a| { @@ -789,10 +804,10 @@ impl UseTreeLowering<'_> { pub(crate) fn lower_use_tree( db: &dyn DefDatabase, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, tree: ast::UseTree, ) -> Option<(UseTree, Arena)> { - let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; + let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() }; let tree = lowering.lower_use_tree(tree)?; Some((tree, lowering.mapping)) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index ca3785bf28dfa..244111d202ceb 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -457,7 +457,7 @@ impl Printer<'_> { } } ModItem::MacroCall(it) => { - let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it]; + let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it]; wln!(self, "{}!(...);", path.display(self.db.upcast())); } ModItem::MacroRules(it) => { diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs index 4180f817209e8..96c65b941c1d0 100644 --- a/crates/hir-def/src/item_tree/tests.rs +++ b/crates/hir-def/src/item_tree/tests.rs @@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>; "#]], ) } + +#[test] +fn pub_self() { + check( + r#" +pub(self) struct S; + "#, + expect![[r#" + pub(self) struct S; + "#]], + ) +} diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index fd8f64d6705b0..7cf13a202e02c 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -7,7 +7,7 @@ //! Note that `hir_def` is a work in progress, so not all of the above is //! actually true. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[allow(unused)] @@ -63,7 +63,7 @@ use std::{ panic::{RefUnwindSafe, UnwindSafe}, }; -use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; +use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind}; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::{Attr, AttrId, AttrInput}, @@ -72,19 +72,18 @@ use hir_expand::{ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, - hygiene::Hygiene, name::Name, proc_macro::ProcMacroExpander, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, - MacroDefId, MacroDefKind, UnresolvedMacro, + MacroDefId, MacroDefKind, }; use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; use stdx::impl_from; -use syntax::ast; +use syntax::{ast, AstNode}; -use ::tt::token_id as tt; +pub use hir_expand::tt; use crate::{ builtin_type::BuiltinType, @@ -1166,16 +1165,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> { ) -> Result>, UnresolvedMacro> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); - let h = Hygiene::new(db, self.file_id); - let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h)); + let span_map = db.span_map(self.file_id); + let path = + self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref())); let Some(path) = path else { return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); }; + let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; + macro_call_as_call_id_with_eager( db, &AstIdWithPath::new(ast_id.file_id, ast_id.value, path), + call_site, expands_to, krate, resolver, @@ -1200,17 +1203,19 @@ impl AstIdWithPath { fn macro_call_as_call_id( db: &dyn ExpandDatabase, call: &AstIdWithPath, + call_site: SyntaxContextId, expand_to: ExpandTo, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option + Copy, ) -> Result, UnresolvedMacro> { - macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver) + macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver) .map(|res| res.value) } fn macro_call_as_call_id_with_eager( db: &dyn ExpandDatabase, call: &AstIdWithPath, + call_site: SyntaxContextId, expand_to: ExpandTo, krate: CrateId, resolver: impl FnOnce(path::ModPath) -> Option, @@ -1222,7 +1227,7 @@ fn macro_call_as_call_id_with_eager( let res = match def.kind { MacroDefKind::BuiltInEager(..) => { let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db)); - expand_eager_macro_input(db, krate, macro_call, def, &|path| { + expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| { eager_resolver(path).filter(MacroDefId::is_fn_like) }) } @@ -1231,6 +1236,7 @@ fn macro_call_as_call_id_with_eager( db, krate, MacroCallKind::FnLike { ast_id: call.ast_id, expand_to }, + call_site, )), err: None, }, @@ -1315,6 +1321,7 @@ fn derive_macro_as_call_id( item_attr: &AstIdWithPath, derive_attr_index: AttrId, derive_pos: u32, + call_site: SyntaxContextId, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { @@ -1329,6 +1336,7 @@ fn derive_macro_as_call_id( derive_index: derive_pos, derive_attr_index, }, + call_site, ); Ok((macro_id, def_id, call_id)) } @@ -1341,15 +1349,13 @@ fn attr_macro_as_call_id( def: MacroDefId, ) -> MacroCallId { let arg = match macro_attr.input.as_deref() { - Some(AttrInput::TokenTree(tt)) => ( - { - let mut tt = tt.0.clone(); - tt.delimiter = tt::Delimiter::UNSPECIFIED; - tt - }, - tt.1.clone(), - ), - _ => (tt::Subtree::empty(), Default::default()), + Some(AttrInput::TokenTree(tt)) => { + let mut tt = tt.as_ref().clone(); + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + Some(tt) + } + + _ => None, }; def.as_lazy_macro( @@ -1357,11 +1363,18 @@ fn attr_macro_as_call_id( krate, MacroCallKind::Attr { ast_id: item_attr.ast_id, - attr_args: Arc::new(arg), + attr_args: arg.map(Arc::new), invoc_attr_index: macro_attr.id, }, + macro_attr.ctxt, ) } + +#[derive(Debug)] +pub struct UnresolvedMacro { + pub path: hir_expand::mod_path::ModPath, +} + intern::impl_internable!( crate::type_ref::TypeRef, crate::type_ref::TraitRef, diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index 52781d9889212..a3505b65fe722 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -3,7 +3,7 @@ use std::cell::OnceCell; use hir_expand::{ ast_id_map::{AstIdMap, AstIdNode}, - hygiene::Hygiene, + span::{SpanMap, SpanMapRef}, AstId, HirFileId, InFile, }; use syntax::ast; @@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path}; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, - hygiene: Hygiene, + span_map: SpanMap, + // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways. ast_id_map: Option<(HirFileId, OnceCell>)>, } impl<'a> LowerCtx<'a> { - pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self { - LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) } + pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self { + LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) } } pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { LowerCtx { db, - hygiene: Hygiene::new(db.upcast(), file_id), + span_map: db.span_map(file_id), ast_id_map: Some((file_id, OnceCell::new())), } } - pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self { - LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None } + pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self { + LowerCtx { db, span_map, ast_id_map: None } } - pub(crate) fn hygiene(&self) -> &Hygiene { - &self.hygiene + pub(crate) fn span_map(&self) -> SpanMapRef<'_> { + self.span_map.as_ref() } pub(crate) fn lower_path(&self, ast: ast::Path) -> Option { - Path::from_src(ast, self) + Path::from_src(self, ast) } pub(crate) fn ast_id(&self, item: &N) -> Option> { diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 106ead83fad76..514219ee71505 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -468,12 +468,12 @@ macro_rules! concat_bytes {} fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); } "##, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro_rules! concat_bytes {} fn main() { [b'A', 66, 67, 68, b'E', 70]; } -"##]], +"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index d0906213243d6..9bf2a50d57c96 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check; fn token_mapping_smoke_test() { check( r#" -// +tokenids macro_rules! f { ( struct $ident:ident ) => { struct $ident { @@ -24,26 +23,22 @@ macro_rules! f { }; } -// +tokenids +// +spans+syntaxctxt f!(struct MyTraitMap2); "#, - expect![[r##" -// call ids will be shifted by Shift(30) -// +tokenids -macro_rules! f {#0 - (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9 - struct#10 $#11ident#12 {#13 - map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28 - }#13 - }#9;#29 -}#0 - -// // +tokenids -// f!(struct#1 MyTraitMap2#2); -struct#10 MyTraitMap2#32 {#13 - map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28 -}#13 -"##]], + expect![[r#" +macro_rules! f { + ( struct $ident:ident ) => { + struct $ident { + map: ::std::collections::HashSet<()>, + } + }; +} + +struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2# + map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# +}#FileId(0):1@132..133\2# +"#]], ); } @@ -53,49 +48,42 @@ fn token_mapping_floats() { // (and related issues) check( r#" -// +tokenids +// +spans+syntaxctxt macro_rules! f { ($($tt:tt)*) => { $($tt)* }; } -// +tokenids +// +spans+syntaxctxt f! { fn main() { 1; 1.0; + ((1,),).0.0; let x = 1; } } "#, - expect![[r##" -// call ids will be shifted by Shift(18) -// +tokenids -macro_rules! f {#0 - (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11 - $#12(#13$#14tt#15)#13*#16 - }#11;#17 -}#0 - -// // +tokenids -// f! { -// fn#1 main#2() { -// 1#5;#6 -// 1.0#7;#8 -// let#9 x#10 =#11 1#12;#13 -// } -// } -fn#19 main#20(#21)#21 {#22 - 1#23;#24 - 1.0#25;#26 - let#27 x#28 =#29 1#30;#31 -}#22 + expect![[r#" +// +spans+syntaxctxt +macro_rules! f { + ($($tt:tt)*) => { + $($tt)* + }; +} +fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0# + 1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0# + 1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0# + (#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0# + let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0# +}#FileId(0):2@110..111\0# -"##]], + +"#]], ); } @@ -105,59 +93,115 @@ fn eager_expands_with_unresolved_within() { r#" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} +macro_rules! concat {} +macro_rules! identity { + ($tt:tt) => { + $tt + } +} fn main(foo: ()) { - format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + concat!("hello", identity!("world"), unresolved!(), identity!("!")); } "#, expect![[r##" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} +macro_rules! concat {} +macro_rules! identity { + ($tt:tt) => { + $tt + } +} fn main(foo: ()) { - builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + /* error: unresolved macro unresolved */"helloworld!"; } "##]], ); } #[test] -fn token_mapping_eager() { +fn concat_spans() { check( r#" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} - +macro_rules! concat {} macro_rules! identity { - ($expr:expr) => { $expr }; + ($tt:tt) => { + $tt + } } fn main(foo: ()) { - format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + macro_rules! identity { + ($tt:tt) => { + $tt + } + } + + fn main(foo: ()) { + concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!")); + } } "#, expect![[r##" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} - +macro_rules! concat {} macro_rules! identity { - ($expr:expr) => { $expr }; + ($tt:tt) => { + $tt + } } fn main(foo: ()) { - // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17) -builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0 + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + macro_rules! identity { + ($tt:tt) => { + $tt + } + } + + fn main(foo: ()) { + /* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#; + } } "##]], ); } +#[test] +fn token_mapping_across_files() { + check( + r#" +//- /lib.rs +#[macro_use] +mod foo; + +mk_struct/*+spans+syntaxctxt*/!(Foo with u32); +//- /foo.rs +macro_rules! mk_struct { + ($foo:ident with $ty:ty) => { struct $foo($ty); } +} +"#, + expect![[r#" +#[macro_use] +mod foo; + +struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2# +"#]], + ); +} + #[test] fn float_field_access_macro_input() { check( diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 2886b2a366c04..9010050ee6788 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -1004,3 +1004,29 @@ fn main() { "##]], ); } + +#[test] +fn eager_concat_bytes_panic() { + check( + r#" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! concat_bytes {} + +fn main() { + let x = concat_bytes!(2); +} + +"#, + expect![[r#" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! concat_bytes {} + +fn main() { + let x = /* error: unexpected token in input */[]; +} + +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 8adced4e08244..be2a503d82b15 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,21 +16,16 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use ::mbe::TokenMap; -use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; +use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{ - db::{DeclarativeMacroExpander, ExpandDatabase}, - AstId, InFile, MacroFile, -}; +use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, - AstNode, SyntaxElement, - SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}, - SyntaxNode, TextRange, T, + AstNode, + SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, + SyntaxNode, T, }; -use tt::token_id::{Subtree, TokenId}; use crate::{ db::DefDatabase, @@ -39,6 +34,7 @@ use crate::{ resolver::HasResolver, src::HasSource, test_db::TestDB, + tt::Subtree, AdtId, AsMacroCall, Lookup, ModuleDefId, }; @@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut text_edits = Vec::new(); let mut expansions = Vec::new(); - for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) { - let mut show_token_ids = false; - for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { - show_token_ids |= comment.to_string().contains("+tokenids"); - } - if !show_token_ids { - continue; - } - - let call_offset = macro_.syntax().text_range().start().into(); - let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_); - let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); - - let DeclarativeMacroExpander { mac, def_site_token_map } = - &*db.decl_macro_expander(krate, ast_id); - assert_eq!(mac.err(), None); - let tt = match ¯o_ { - ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), - ast::Macro::MacroDef(_) => unimplemented!(""), - }; - - let tt_start = tt.syntax().text_range().start(); - tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each( - |token| { - let range = token.text_range().checked_sub(tt_start).unwrap(); - if let Some(id) = def_site_token_map.token_by_range(range) { - let offset = (range.end() + tt_start).into(); - text_edits.push((offset..offset, format!("#{}", id.0))); - } - }, - ); - text_edits.push(( - call_offset..call_offset, - format!("// call ids will be shifted by {:?}\n", mac.shift()), - )); - } - for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { let macro_call = InFile::new(source.file_id, ¯o_call); let res = macro_call @@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream }) .unwrap(); let macro_call_id = res.value.unwrap(); - let macro_file = MacroFile { macro_call_id }; + let macro_file = MacroFileId { macro_call_id }; let mut expansion_result = db.parse_macro_expansion(macro_file); expansion_result.err = expansion_result.err.or(res.err); - expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id))); + expansions.push((macro_call.value.clone(), expansion_result)); } - for (call, exp, arg) in expansions.into_iter().rev() { + for (call, exp) in expansions.into_iter().rev() { let mut tree = false; let mut expect_errors = false; - let mut show_token_ids = false; + let mut show_spans = false; + let mut show_ctxt = false; for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { tree |= comment.to_string().contains("+tree"); expect_errors |= comment.to_string().contains("+errors"); - show_token_ids |= comment.to_string().contains("+tokenids"); + show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); } let mut expn_text = String::new(); @@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } else { assert!( parse.errors().is_empty(), - "parse errors in expansion: \n{:#?}", - parse.errors() + "parse errors in expansion: \n{:#?}\n```\n{}\n```", + parse.errors(), + parse.syntax_node(), ); } let pp = pretty_print_macro_expansion( parse.syntax_node(), - show_token_ids.then_some(&*token_map), + SpanMapRef::ExpansionSpanMap(&token_map), + show_spans, + show_ctxt, ); let indent = IndentLevel::from_node(call.syntax()); let pp = reindent(indent, pp); @@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } let range = call.syntax().text_range(); let range: Range = range.into(); - - if show_token_ids { - if let Some((tree, map, _)) = arg.value.as_deref() { - let tt_range = call.token_tree().unwrap().syntax().text_range(); - let mut ranges = Vec::new(); - extract_id_ranges(&mut ranges, map, tree); - for (range, id) in ranges { - let idx = (tt_range.start() + range.end()).into(); - text_edits.push((idx..idx, format!("#{}", id.0))); - } - } - text_edits.push((range.start..range.start, "// ".into())); - call.to_string().match_indices('\n').for_each(|(offset, _)| { - let offset = offset + 1 + range.start; - text_edits.push((offset..offset, "// ".into())); - }); - text_edits.push((range.end..range.end, "\n".into())); - text_edits.push((range.end..range.end, expn_text)); - } else { - text_edits.push((range, expn_text)); - } + text_edits.push((range, expn_text)); } text_edits.sort_by_key(|(range, _)| range.start); @@ -226,19 +170,43 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } _ => None, }; + if let Some(src) = src { - if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) { - let pp = pretty_print_macro_expansion(src.value, None); - format_to!(expanded_text, "\n{}", pp) + if let Some(file_id) = src.file_id.macro_file() { + if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) { + let call = file_id.call_node(&db); + let mut show_spans = false; + let mut show_ctxt = false; + for comment in + call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) + { + show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); + } + let pp = pretty_print_macro_expansion( + src.value, + db.span_map(src.file_id).as_ref(), + show_spans, + show_ctxt, + ); + format_to!(expanded_text, "\n{}", pp) + } } } } for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); - if src.file_id.is_builtin_derive(&db) { - let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None); - format_to!(expanded_text, "\n{}", pp) + if let Some(macro_file) = src.file_id.macro_file() { + if macro_file.is_builtin_derive(&db) { + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(macro_file.into()).as_ref(), + false, + false, + ); + format_to!(expanded_text, "\n{}", pp) + } } } @@ -246,20 +214,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream expect.assert_eq(&expanded_text); } -fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) { - tree.token_trees.iter().for_each(|tree| match tree { - tt::TokenTree::Leaf(leaf) => { - let id = match leaf { - tt::Leaf::Literal(it) => it.span, - tt::Leaf::Punct(it) => it.span, - tt::Leaf::Ident(it) => it.span, - }; - ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id))); - } - tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree), - }); -} - fn reindent(indent: IndentLevel, pp: String) -> String { if !pp.contains('\n') { return pp; @@ -276,7 +230,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String { res } -fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String { +fn pretty_print_macro_expansion( + expn: SyntaxNode, + map: SpanMapRef<'_>, + show_spans: bool, + show_ctxt: bool, +) -> String { let mut res = String::new(); let mut prev_kind = EOF; let mut indent_level = 0; @@ -322,10 +281,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str } prev_kind = curr_kind; format_to!(res, "{}", token); - if let Some(map) = map { - if let Some(id) = map.token_by_range(token.text_range()) { - format_to!(res, "#{}", id.0); + if show_spans || show_ctxt { + let span = map.span_for_range(token.text_range()); + format_to!(res, "#"); + if show_spans { + format_to!( + res, + "{:?}:{:?}@{:?}", + span.anchor.file_id, + span.anchor.ast_id.into_raw(), + span.range, + ); + } + if show_ctxt { + format_to!(res, "\\{}", span.ctx); } + format_to!(res, "#"); } } res @@ -342,6 +313,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander { subtree: &Subtree, _: Option<&Subtree>, _: &base_db::Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result { let (parse, _) = ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 822bdcc122dc6..060b8aa8c1931 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -93,6 +93,41 @@ fn foo() { ); } +#[test] +fn macro_rules_in_attr() { + // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211 + check( + r#" +//- proc_macros: identity +macro_rules! id { + ($($t:tt)*) => { + $($t)* + }; +} +id! { + #[proc_macros::identity] + impl Foo for WrapBj { + async fn foo(&self) { + self.id().await; + } + } +} +"#, + expect![[r#" +macro_rules! id { + ($($t:tt)*) => { + $($t)* + }; +} +#[proc_macros::identity] impl Foo for WrapBj { + async fn foo(&self ) { + self .id().await ; + } +} +"#]], + ); +} + #[test] fn float_parsing_panic() { // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211 @@ -127,3 +162,27 @@ macro_rules! id { "#]], ); } + +#[test] +fn float_attribute_mapping() { + check( + r#" +//- proc_macros: identity +//+spans+syntaxctxt +#[proc_macros::identity] +fn foo(&self) { + self.0. 1; +} +"#, + expect![[r#" +//+spans+syntaxctxt +#[proc_macros::identity] +fn foo(&self) { + self.0. 1; +} + +fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0# + self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0# +}#FileId(0):1@76..77\0#"#]], + ); +} diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 2d4586146db02..b3a10a3869a43 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,7 +5,7 @@ use std::{cmp::Ordering, iter, mem}; -use base_db::{CrateId, Dependency, Edition, FileId}; +use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -14,7 +14,6 @@ use hir_expand::{ builtin_attr_macro::find_builtin_attr, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, - hygiene::Hygiene, name::{name, AsName, Name}, proc_macro::ProcMacroExpander, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, @@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI .enumerate() .map(|(idx, it)| { // FIXME: a hacky way to create a Name from string. - let name = - tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; + let name = tt::Ident { + text: it.name.clone(), + span: tt::SpanData { + range: syntax::TextRange::empty(syntax::TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContextId::ROOT, + }, + }; (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) }) .collect()) @@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI from_glob_import: Default::default(), skip_attrs: Default::default(), is_proc_macro, - hygienes: FxHashMap::default(), }; if tree_id.is_block() { collector.seed_with_inner(tree_id); @@ -212,9 +219,23 @@ struct MacroDirective { #[derive(Clone, Debug, Eq, PartialEq)] enum MacroDirectiveKind { - FnLike { ast_id: AstIdWithPath, expand_to: ExpandTo }, - Derive { ast_id: AstIdWithPath, derive_attr: AttrId, derive_pos: usize }, - Attr { ast_id: AstIdWithPath, attr: Attr, mod_item: ModItem, tree: TreeId }, + FnLike { + ast_id: AstIdWithPath, + expand_to: ExpandTo, + call_site: SyntaxContextId, + }, + Derive { + ast_id: AstIdWithPath, + derive_attr: AttrId, + derive_pos: usize, + call_site: SyntaxContextId, + }, + Attr { + ast_id: AstIdWithPath, + attr: Attr, + mod_item: ModItem, + /* is this needed? */ tree: TreeId, + }, } /// Walks the tree of module recursively @@ -242,12 +263,6 @@ struct DefCollector<'a> { /// This also stores the attributes to skip when we resolve derive helpers and non-macro /// non-builtin attributes in general. skip_attrs: FxHashMap, AttrId>, - /// `Hygiene` cache, because `Hygiene` construction is expensive. - /// - /// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction. - /// However, `DefCollector` still needs to lower paths in attributes, in particular those in - /// derive meta item list. - hygienes: FxHashMap, } impl DefCollector<'_> { @@ -315,12 +330,11 @@ impl DefCollector<'_> { } if *attr_name == hir_expand::name![feature] { - let hygiene = &Hygiene::new_unhygienic(); let features = attr - .parse_path_comma_token_tree(self.db.upcast(), hygiene) + .parse_path_comma_token_tree(self.db.upcast()) .into_iter() .flatten() - .filter_map(|feat| match feat.segments() { + .filter_map(|(feat, _)| match feat.segments() { [name] => Some(name.to_smol_str()), _ => None, }); @@ -471,7 +485,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: None, invoc_attr_index: attr.id, }, attr.path().clone(), @@ -1119,10 +1133,11 @@ impl DefCollector<'_> { let resolver_def_id = |path| resolver(path).map(|(_, it)| it); match &directive.kind { - MacroDirectiveKind::FnLike { ast_id, expand_to } => { + MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { let call_id = macro_call_as_call_id( self.db.upcast(), ast_id, + *call_site, *expand_to, self.def_map.krate, resolver_def_id, @@ -1134,12 +1149,13 @@ impl DefCollector<'_> { return false; } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { + MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { let id = derive_macro_as_call_id( self.db, ast_id, *derive_attr, *derive_pos as u32, + *call_site, self.def_map.krate, resolver, ); @@ -1212,7 +1228,7 @@ impl DefCollector<'_> { }; if matches!( def, - MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. } + MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } if expander.is_derive() ) { // Resolved to `#[derive]` @@ -1234,22 +1250,10 @@ impl DefCollector<'_> { }; let ast_id = ast_id.with_value(ast_adt_id); - let extend_unhygenic; - let hygiene = if file_id.is_macro() { - self.hygienes - .entry(file_id) - .or_insert_with(|| Hygiene::new(self.db.upcast(), file_id)) - } else { - // Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry - // when we're in an oridinary (non-macro) file. - extend_unhygenic = Hygiene::new_unhygienic(); - &extend_unhygenic - }; - - match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) { + match attr.parse_path_comma_token_tree(self.db.upcast()) { Some(derive_macros) => { let mut len = 0; - for (idx, path) in derive_macros.enumerate() { + for (idx, (path, call_site)) in derive_macros.enumerate() { let ast_id = AstIdWithPath::new(file_id, ast_id.value, path); self.unresolved_macros.push(MacroDirective { module_id: directive.module_id, @@ -1258,6 +1262,7 @@ impl DefCollector<'_> { ast_id, derive_attr: attr.id, derive_pos: idx, + call_site, }, container: directive.container, }); @@ -1414,11 +1419,12 @@ impl DefCollector<'_> { for directive in &self.unresolved_macros { match &directive.kind { - MacroDirectiveKind::FnLike { ast_id, expand_to } => { + MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error! let macro_call_as_call_id = macro_call_as_call_id( self.db.upcast(), ast_id, + *call_site, *expand_to, self.def_map.krate, |path| { @@ -1444,7 +1450,7 @@ impl DefCollector<'_> { )); } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { + MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( directive.module_id, MacroCallKind::Derive { @@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> { cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); let mut single_imports = Vec::new(); - let hygiene = Hygiene::new_unhygienic(); for attr in macro_use_attrs { - let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else { + let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else { // `#[macro_use]` (without any paths) found, forget collected names and just import // all visible macros. self.def_collector.import_macros_from_extern_crate( @@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> { ); return; }; - for path in paths { + for (path, _) in paths { if let Some(name) = path.as_ident() { single_imports.push(name.clone()); } @@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> { let name = match attrs.by_key("rustc_builtin_macro").string_value() { Some(it) => { // FIXME: a hacky way to create a Name from string. - name = - tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name(); + name = tt::Ident { + text: it.clone(), + span: tt::SpanData { + range: syntax::TextRange::empty(syntax::TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContextId::ROOT, + }, + } + .as_name(); &name } None => { @@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> { } } - fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) { - let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path)); + fn collect_macro_call( + &mut self, + &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall, + container: ItemContainerId, + ) { + let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path)); let db = self.def_collector.db; // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define @@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> { if let Ok(res) = macro_call_as_call_id_with_eager( db.upcast(), &ast_id, - mac.expand_to, + call_site, + expand_to, self.def_collector.def_map.krate, |path| { path.as_ident().and_then(|name| { @@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_macros.push(MacroDirective { module_id: self.module_id, depth: self.macro_depth + 1, - kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to }, + kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site }, container, }); } @@ -2363,7 +2383,6 @@ mod tests { from_glob_import: Default::default(), skip_attrs: Default::default(), is_proc_macro: false, - hygienes: FxHashMap::default(), }; collector.seed_with_top_level(); collector.collect(); diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 2dcc2c30fe169..c45200e2de9df 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,7 +1,7 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; use base_db::{AnchoredPath, FileId}; -use hir_expand::name::Name; +use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt}; use limit::Limit; use syntax::SmolStr; @@ -66,14 +66,14 @@ impl ModDir { attr_path: Option<&SmolStr>, ) -> Result<(FileId, bool, ModDir), Box<[String]>> { let name = name.unescaped(); - let orig_file_id = file_id.original_file(db.upcast()); + let orig_file_id = file_id.original_file_respecting_includes(db.upcast()); let mut candidate_files = ArrayVec::<_, 2>::new(); match attr_path { Some(attr_path) => { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } - None if file_id.is_include_macro(db.upcast()) => { + None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => { candidate_files.push(format!("{}.rs", name.display(db.upcast()))); candidate_files.push(format!("{}/mod.rs", name.display(db.upcast()))); } diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 4c1b8f306c50f..be3438e427dba 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -96,8 +96,8 @@ impl DefMap { let types = result.take_types()?; match types { ModuleDefId::ModuleId(m) => Visibility::Module(m), + // error: visibility needs to refer to module _ => { - // error: visibility needs to refer to module return None; } } diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs index e7cc44b04da80..b2ffbbe4c5d8f 100644 --- a/crates/hir-def/src/nameres/tests.rs +++ b/crates/hir-def/src/nameres/tests.rs @@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase}; use expect_test::{expect, Expect}; use triomphe::Arc; -use crate::{db::DefDatabase, test_db::TestDB}; - -use super::DefMap; +use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB}; fn compute_crate_def_map(ra_fixture: &str) -> Arc { let db = TestDB::with_files(ra_fixture); diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 4a86f88e57aff..78cb78e833ec5 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,13 +1,19 @@ -use base_db::SourceDatabaseExt; +use base_db::{SourceDatabase, SourceDatabaseExt}; use triomphe::Arc; -use crate::{db::DefDatabase, AdtId, ModuleDefId}; - -use super::*; +use crate::{ + db::DefDatabase, + nameres::tests::{TestDB, WithFixture}, + AdtId, ModuleDefId, +}; fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { let (mut db, pos) = TestDB::with_position(ra_fixture_initial); - let krate = db.test_crate(); + let krate = { + let crate_graph = db.crate_graph(); + // Some of these tests use minicore/proc-macros which will be injected as the first crate + crate_graph.iter().last().unwrap() + }; { let events = db.log_executed(|| { db.crate_def_map(krate); @@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: fn typing_inside_a_function_should_not_invalidate_def_map() { check_def_map_is_not_recomputed( r" - //- /lib.rs - mod foo;$0 +//- /lib.rs +mod foo;$0 - use crate::foo::bar::Baz; +use crate::foo::bar::Baz; - enum E { A, B } - use E::*; +enum E { A, B } +use E::*; - fn foo() -> i32 { - 1 + 1 - } +fn foo() -> i32 { + 1 + 1 +} - #[cfg(never)] - fn no() {} - //- /foo/mod.rs - pub mod bar; +#[cfg(never)] +fn no() {} +//- /foo/mod.rs +pub mod bar; - //- /foo/bar.rs - pub struct Baz; - ", +//- /foo/bar.rs +pub struct Baz; +", r" - mod foo; +mod foo; - use crate::foo::bar::Baz; +use crate::foo::bar::Baz; - enum E { A, B } - use E::*; +enum E { A, B } +use E::*; - fn foo() -> i32 { 92 } +fn foo() -> i32 { 92 } - #[cfg(never)] - fn no() {} - ", +#[cfg(never)] +fn no() {} +", ); } #[test] fn typing_inside_a_macro_should_not_invalidate_def_map() { - let (mut db, pos) = TestDB::with_position( + check_def_map_is_not_recomputed( r" - //- /lib.rs - macro_rules! m { - ($ident:ident) => { - fn f() { - $ident + $ident; - }; - } - } - mod foo; +//- /lib.rs +macro_rules! m { + ($ident:ident) => { + fn f() { + $ident + $ident; + }; + } +} +mod foo; - //- /foo/mod.rs - pub mod bar; +//- /foo/mod.rs +pub mod bar; - //- /foo/bar.rs - $0 - m!(X); - ", +//- /foo/bar.rs +$0 +m!(X); + +pub struct S {} +", + r" +m!(Y); + +pub struct S {} +", ); - let krate = db.test_crate(); - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") +} + +#[test] +fn typing_inside_an_attribute_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity +//- /lib.rs +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +#[proc_macros::identity] +fn f() {} +", + r" +#[proc_macros::identity] +fn f() { foo } +", + ); +} + +#[test] +fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity +//- /lib.rs +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +#[proc_macros::identity] +fn f() {} +", + r" +#[proc_macros::identity(foo)] +fn f() {} +", + ); +} +#[test] +fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity, derive_identity +//- /lib.rs +macro_rules! m { + ($ident:ident) => { + fn fm() { + $ident + $ident; + }; } - db.set_file_text(pos.file_id, Arc::from("m!(Y);")); +} +mod foo; - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +fn f() {} + +m!(X); +macro_rules! m2 { + ($ident:ident) => { + fn f2() { + $ident + $ident; + }; } } +m2!(X); + +#[proc_macros::identity] +#[derive(proc_macros::DeriveIdentity)] +pub struct S {} +", + r" +fn f() {0} + +m!(X); +macro_rules! m2 { + ($ident:ident) => { + fn f2() { + $ident + $ident; + }; + } +} +m2!(X); + +#[proc_macros::identity] +#[derive(proc_macros::DeriveIdentity)] +pub struct S {} +", + ); +} + +#[test] +fn typing_inside_a_derive_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: derive_identity +//- minicore:derive +//- /lib.rs +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +#[derive(proc_macros::DeriveIdentity)] +#[allow()] +struct S; +", + r" +#[derive(proc_macros::DeriveIdentity)] +#[allow(dead_code)] +struct S; +", + ); +} #[test] fn typing_inside_a_function_should_not_invalidate_item_expansions() { diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 3894172a5ad86..215c49d4c2ce4 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -96,8 +96,8 @@ pub enum GenericArg { impl Path { /// Converts an `ast::Path` to `Path`. Works with use trees. /// It correctly handles `$crate` based path from macro call. - pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option { - lower::lower_path(path, ctx) + pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option { + lower::lower_path(ctx, path) } /// Converts a known mod path to `Path`. diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index abd817893cc4c..39f1b6f1c06db 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -4,8 +4,10 @@ use std::iter; use crate::{lower::LowerCtx, type_ref::ConstRef}; -use either::Either; -use hir_expand::name::{name, AsName}; +use hir_expand::{ + mod_path::resolve_crate_root, + name::{name, AsName}, +}; use intern::Interned; use syntax::ast::{self, AstNode, HasTypeBounds}; @@ -16,12 +18,12 @@ use crate::{ /// Converts an `ast::Path` to `Path`. Works with use trees. /// It correctly handles `$crate` based path from macro call. -pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { +pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option { let mut kind = PathKind::Plain; let mut type_anchor = None; let mut segments = Vec::new(); let mut generic_args = Vec::new(); - let hygiene = ctx.hygiene(); + let span_map = ctx.span_map(); loop { let segment = path.segment()?; @@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { - // FIXME: this should just return name - match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) { - Either::Left(name) => { - let args = segment - .generic_arg_list() - .and_then(|it| lower_generic_args(ctx, it)) - .or_else(|| { - lower_generic_args_from_fn_path( - ctx, - segment.param_list(), - segment.ret_type(), - ) - }) - .map(Interned::new); - if let Some(_) = args { - generic_args.resize(segments.len(), None); - generic_args.push(args); - } - segments.push(name); - } - Either::Right(crate_id) => { - kind = PathKind::DollarCrate(crate_id); - break; - } + if name_ref.text() == "$crate" { + break kind = resolve_crate_root( + ctx.db.upcast(), + span_map.span_for_range(name_ref.syntax().text_range()).ctx, + ) + .map(PathKind::DollarCrate) + .unwrap_or(PathKind::Crate); + } + let name = name_ref.as_name(); + let args = segment + .generic_arg_list() + .and_then(|it| lower_generic_args(ctx, it)) + .or_else(|| { + lower_generic_args_from_fn_path( + ctx, + segment.param_list(), + segment.ret_type(), + ) + }) + .map(Interned::new); + if let Some(_) = args { + generic_args.resize(segments.len(), None); + generic_args.push(args); } + segments.push(name); } ast::PathSegmentKind::SelfTypeKw => { segments.push(name![Self]); @@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option>::Foo desugars to Trait::Foo Some(trait_ref) => { let Path::Normal { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(trait_ref.path()?, ctx)? + Path::from_src(ctx, trait_ref.path()?)? else { return None; }; @@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option PathKind::DollarCrate(crate_root), + None => PathKind::Crate, + } + } } } } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 50da9ed06a0d9..ba0a2c0224a05 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -588,6 +588,14 @@ impl Resolver { _ => None, }) } + + pub fn impl_def(&self) -> Option { + self.scopes().find_map(|scope| match scope { + Scope::ImplDefScope(def) => Some(*def), + _ => None, + }) + } + /// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver #[must_use] pub fn update_to_inner_scope( diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs index a6befc8a81a81..f4a6b61f7af5e 100644 --- a/crates/hir-def/src/test_db.rs +++ b/crates/hir-def/src/test_db.rs @@ -34,6 +34,7 @@ pub(crate) struct TestDB { impl Default for TestDB { fn default() -> Self { let mut this = Self { storage: Default::default(), events: Default::default() }; + this.setup_syntax_context_root(); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index 30f48de61f2e8..f5803653c73be 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -2,7 +2,7 @@ use std::iter; -use hir_expand::{hygiene::Hygiene, InFile}; +use hir_expand::{span::SpanMapRef, InFile}; use la_arena::ArenaMap; use syntax::ast; use triomphe::Arc; @@ -34,22 +34,22 @@ impl RawVisibility { db: &dyn DefDatabase, node: InFile>, ) -> RawVisibility { - Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id)) + Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref()) } - pub(crate) fn from_ast_with_hygiene( + pub(crate) fn from_ast_with_span_map( db: &dyn DefDatabase, node: Option, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, ) -> RawVisibility { - Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene) + Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map) } - pub(crate) fn from_ast_with_hygiene_and_default( + pub(crate) fn from_ast_with_span_map_and_default( db: &dyn DefDatabase, node: Option, default: RawVisibility, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, ) -> RawVisibility { let node = match node { None => return default, @@ -57,7 +57,7 @@ impl RawVisibility { }; match node.kind() { ast::VisibilityKind::In(path) => { - let path = ModPath::from_src(db.upcast(), path, hygiene); + let path = ModPath::from_src(db.upcast(), path, span_map); let path = match path { None => return RawVisibility::private(), Some(path) => path, @@ -73,7 +73,7 @@ impl RawVisibility { RawVisibility::Module(path) } ast::VisibilityKind::PubSelf => { - let path = ModPath::from_kind(PathKind::Plain); + let path = ModPath::from_kind(PathKind::Super(0)); RawVisibility::Module(path) } ast::VisibilityKind::Pub => RawVisibility::Public, diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 40726505491b6..be0b72f9dfa43 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -12,11 +12,40 @@ use std::{ marker::PhantomData, }; -use la_arena::{Arena, Idx}; +use la_arena::{Arena, Idx, RawIdx}; use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; +use crate::db; + +pub use base_db::span::ErasedFileAstId; + +/// `AstId` points to an AST node in any file. +/// +/// It is stable across reparses, and can be used as salsa key/value. +pub type AstId = crate::InFile>; + +impl AstId { + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { + self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) + } + pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile { + crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) + } + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr { + db.ast_id_map(self.file_id).get(self.value) + } +} + +pub type ErasedAstId = crate::InFile; + +impl ErasedAstId { + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { + db.ast_id_map(self.file_id).get_erased(self.value) + } +} + /// `AstId` points to an AST node in a specific file. pub struct FileAstId { raw: ErasedFileAstId, @@ -62,8 +91,6 @@ impl FileAstId { } } -pub type ErasedFileAstId = Idx; - pub trait AstIdNode: AstNode {} macro_rules! register_ast_id_node { (impl AstIdNode for $($ident:ident),+ ) => { @@ -129,6 +156,11 @@ impl AstIdMap { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); let mut res = AstIdMap::default(); + + // make sure to allocate the root node + if !should_alloc_id(node.kind()) { + res.alloc(node); + } // By walking the tree in breadth-first order we make sure that parents // get lower ids then children. That is, adding a new child does not // change parent's id. This means that, say, adding a new function to a @@ -136,9 +168,9 @@ impl AstIdMap { bdfs(node, |it| { if should_alloc_id(it.kind()) { res.alloc(&it); - true + TreeOrder::BreadthFirst } else { - false + TreeOrder::DepthFirst } }); res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ()); @@ -155,6 +187,11 @@ impl AstIdMap { res } + /// The [`AstId`] of the root node + pub fn root(&self) -> SyntaxNodePtr { + self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone() + } + pub fn ast_id(&self, item: &N) -> FileAstId { let raw = self.erased_ast_id(item.syntax()); FileAstId { raw, covariant: PhantomData } @@ -164,7 +201,7 @@ impl AstIdMap { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } - pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { + pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr { self.arena[id].clone() } @@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 { hasher.finish() } +#[derive(Copy, Clone, PartialEq, Eq)] +enum TreeOrder { + BreadthFirst, + DepthFirst, +} + /// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs /// order? It is a mix of breadth-first and depth first orders. Nodes for which -/// `f` returns true are visited breadth-first, all the other nodes are explored -/// depth-first. +/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored +/// [`TreeOrder::DepthFirst`]. /// /// In other words, the size of the bfs queue is bound by the number of "true" /// nodes. -fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) { +fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) { let mut curr_layer = vec![node.clone()]; let mut next_layer = vec![]; while !curr_layer.is_empty() { @@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) { while let Some(event) = preorder.next() { match event { syntax::WalkEvent::Enter(node) => { - if f(node.clone()) { + if f(node.clone()) == TreeOrder::BreadthFirst { next_layer.extend(node.children()); preorder.skip_subtree(); } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 0ec2422b30cf8..b8fc30c91189a 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,19 +1,19 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use base_db::CrateId; +use base_db::{span::SyntaxContextId, CrateId}; use cfg::CfgExpr; use either::Either; use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; -use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; +use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use triomphe::Arc; use crate::{ db::ExpandDatabase, - hygiene::Hygiene, mod_path::ModPath, + span::SpanMapRef, tt::{self, Subtree}, InFile, }; @@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs { impl RawAttrs { pub const EMPTY: Self = Self { entries: None }; - pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { - let entries = collect_attrs(owner) - .filter_map(|(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) - } - Either::Right(comment) => comment.doc_comment().map(|doc| Attr { - id, - input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), - path: Interned::new(ModPath::from(crate::name!(doc))), - }), - }) - .collect::>(); - // FIXME: use `Arc::from_iter` when it becomes available - let entries: Arc<[Attr]> = Arc::from(entries); + pub fn new( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + ) -> Self { + let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) + } + Either::Right(comment) => comment.doc_comment().map(|doc| Attr { + id, + input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), + path: Interned::new(ModPath::from(crate::name!(doc))), + ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, + }), + }); + let entries: Arc<[Attr]> = Arc::from_iter(entries); Self { entries: if entries.is_empty() { None } else { Some(entries) } } } - pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { - let hygiene = Hygiene::new(db, owner.file_id); - Self::new(db, owner.value, &hygiene) + pub fn from_attrs_owner( + db: &dyn ExpandDatabase, + owner: InFile<&dyn ast::HasAttrs>, + span_map: SpanMapRef<'_>, + ) -> Self { + Self::new(db, owner.value, span_map) } pub fn merge(&self, other: Self) -> Self { @@ -71,19 +76,13 @@ impl RawAttrs { (Some(a), Some(b)) => { let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32; Self { - entries: Some(Arc::from( - a.iter() - .cloned() - .chain(b.iter().map(|it| { - let mut it = it.clone(); - it.id.id = it.id.ast_index() as u32 + last_ast_index - | (it.id.cfg_attr_index().unwrap_or(0) as u32) - << AttrId::AST_INDEX_BITS; - it - })) - // FIXME: use `Arc::from_iter` when it becomes available - .collect::>(), - )), + entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| { + let mut it = it.clone(); + it.id.id = it.id.ast_index() as u32 + last_ast_index + | (it.id.cfg_attr_index().unwrap_or(0) as u32) + << AttrId::AST_INDEX_BITS; + it + })))), } } } @@ -100,51 +99,43 @@ impl RawAttrs { } let crate_graph = db.crate_graph(); - let new_attrs = Arc::from( - self.iter() - .flat_map(|attr| -> SmallVec<[_; 1]> { - let is_cfg_attr = - attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); - if !is_cfg_attr { - return smallvec![attr.clone()]; - } - - let subtree = match attr.token_tree_value() { - Some(it) => it, - _ => return smallvec![attr.clone()], - }; + let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> { + let is_cfg_attr = + attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); + if !is_cfg_attr { + return smallvec![attr.clone()]; + } - let (cfg, parts) = match parse_cfg_attr_input(subtree) { - Some(it) => it, - None => return smallvec![attr.clone()], + let subtree = match attr.token_tree_value() { + Some(it) => it, + _ => return smallvec![attr.clone()], + }; + + let (cfg, parts) = match parse_cfg_attr_input(subtree) { + Some(it) => it, + None => return smallvec![attr.clone()], + }; + let index = attr.id; + let attrs = + parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { + let tree = Subtree { + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: attr.to_vec(), }; - let index = attr.id; - let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map( - |(idx, attr)| { - let tree = Subtree { - delimiter: tt::Delimiter::unspecified(), - token_trees: attr.to_vec(), - }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx)) - }, - ); - - let cfg_options = &crate_graph[krate].cfg_options; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; - let cfg = CfgExpr::parse(&cfg); - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect() - } - }) - // FIXME: use `Arc::from_iter` when it becomes available - .collect::>(), - ); + Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) + }); + + let cfg_options = &crate_graph[krate].cfg_options; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = CfgExpr::parse(&cfg); + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect() + } + })); RawAttrs { entries: Some(new_attrs) } } @@ -185,21 +176,23 @@ pub struct Attr { pub id: AttrId, pub path: Interned, pub input: Option>, + pub ctxt: SyntaxContextId, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AttrInput { /// `#[attr = "string"]` + // FIXME: This is losing span Literal(SmolStr), /// `#[attr(subtree)]` - TokenTree(Box<(tt::Subtree, mbe::TokenMap)>), + TokenTree(Box), } impl fmt::Display for AttrInput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), - AttrInput::TokenTree(tt) => tt.0.fmt(f), + AttrInput::TokenTree(tt) => tt.fmt(f), } } } @@ -208,10 +201,10 @@ impl Attr { fn from_src( db: &dyn ExpandDatabase, ast: ast::Meta, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, id: AttrId, ) -> Option { - let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); + let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let value = match lit.kind() { ast::LiteralKind::String(string) => string.value()?.into(), @@ -219,24 +212,20 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let (tree, map) = syntax_node_to_token_tree(tt.syntax()); - Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map))))) + let tree = syntax_node_to_token_tree(tt.syntax(), span_map); + Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; - Some(Attr { id, path, input }) + Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) } - fn from_tt( - db: &dyn ExpandDatabase, - tt: &tt::Subtree, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { - let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { + // FIXME: Unecessary roundtrip tt -> ast -> tt + let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let ast = ast::Meta::cast(parse.syntax_node())?; - Self::from_src(db, ast, hygiene, id) + Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id) } pub fn path(&self) -> &ModPath { @@ -256,7 +245,7 @@ impl Attr { /// #[path(ident)] pub fn single_ident_value(&self) -> Option<&tt::Ident> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => match &*tt.0.token_trees { + AttrInput::TokenTree(tt) => match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), _ => None, }, @@ -267,7 +256,7 @@ impl Attr { /// #[path TokenTree] pub fn token_tree_value(&self) -> Option<&Subtree> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => Some(&tt.0), + AttrInput::TokenTree(tt) => Some(tt), _ => None, } } @@ -276,8 +265,7 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - hygiene: &'a Hygiene, - ) -> Option + 'a> { + ) -> Option + 'a> { let args = self.token_tree_value()?; if args.delimiter.kind != DelimiterKind::Parenthesis { @@ -290,12 +278,13 @@ impl Attr { if tts.is_empty() { return None; } - // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here. + // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation + // here or maybe just parse a mod path from a token tree directly let subtree = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), - token_trees: tts.into_iter().cloned().collect(), + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: tts.to_vec(), }; - let (parse, _) = + let (parse, span_map) = mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem); let meta = ast::Meta::cast(parse.syntax_node())?; // Only simple paths are allowed. @@ -304,7 +293,11 @@ impl Attr { return None; } let path = meta.path()?; - ModPath::from_src(db, path, hygiene) + let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; + Some(( + ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, + call_site, + )) }); Some(paths) diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 4ee12e2f21290..de58a495fef4f 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,16 +1,22 @@ //! Builtin attributes. +use base_db::{ + span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, +}; +use syntax::{TextRange, TextSize}; + use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { - ( $(($name:ident, $variant:ident) => $expand:ident),* ) => { + ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BuiltinAttrExpander { $($variant),* } impl BuiltinAttrExpander { - pub fn expand( + pub fn $expand_fn( &self, db: &dyn ExpandDatabase, id: MacroCallId, @@ -45,7 +51,7 @@ impl BuiltinAttrExpander { } } -register_builtin! { +register_builtin! { expand: (bench, Bench) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, @@ -77,9 +83,8 @@ fn dummy_attr_expand( /// /// As such, we expand `#[derive(Foo, bar::Bar)]` into /// ``` -/// #[Foo] -/// #[bar::Bar] -/// (); +/// #![Foo] +/// #![bar::Bar] /// ``` /// which allows fallback path resolution in hir::Semantics to properly identify our derives. /// Since we do not expand the attribute in nameres though, we keep the original item. @@ -98,21 +103,31 @@ fn derive_attr_expand( ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let derives = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0, - _ => return ExpandResult::ok(tt::Subtree::empty()), + MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { + attr_args + } + _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), }; - pseudo_derive_attr_expansion(tt, derives) + pseudo_derive_attr_expansion(tt, derives, loc.call_site) } pub fn pseudo_derive_attr_expansion( tt: &tt::Subtree, args: &tt::Subtree, + call_site: SyntaxContextId, ) -> ExpandResult { let mk_leaf = |char| { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span: tt::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: ROOT_ERASED_FILE_AST_ID, + }, + ctx: call_site, + }, })) }; @@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion( .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) { token_trees.push(mk_leaf('#')); + token_trees.push(mk_leaf('!')); token_trees.push(mk_leaf('[')); token_trees.extend(tt.iter().cloned()); token_trees.push(mk_leaf(']')); } - token_trees.push(mk_leaf('(')); - token_trees.push(mk_leaf(')')); - token_trees.push(mk_leaf(';')); ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees }) } diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index ecc8b407a9c87..410aa4d289ebc 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,16 +1,16 @@ //! Builtin derives. -use ::tt::Ident; -use base_db::{CrateOrigin, LangCrateOrigin}; +use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; use itertools::izip; -use mbe::TokenMap; use rustc_hash::FxHashSet; use stdx::never; use tracing::debug; use crate::{ + hygiene::span_with_def_site_ctxt, name::{AsName, Name}, - tt::{self, TokenId}, + span::SpanMapRef, + tt, }; use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; @@ -29,12 +29,15 @@ macro_rules! register_builtin { db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - token_map: &TokenMap, + token_map: SpanMapRef<'_>, ) -> ExpandResult { let expander = match *self { $( BuiltinDeriveExpander::$trait => $expand, )* }; - expander(db, id, tt, token_map) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, span, tt, token_map) } fn find_by_name(name: &name::Name) -> Option { @@ -70,19 +73,19 @@ enum VariantShape { Unit, } -fn tuple_field_iterator(n: usize) -> impl Iterator { - (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified())) +fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator { + (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) } impl VariantShape { - fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree { - self.as_pattern_map(path, |it| quote!(#it)) + fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree { + self.as_pattern_map(path, span, |it| quote!(span => #it)) } - fn field_names(&self) -> Vec { + fn field_names(&self, span: SpanData) -> Vec { match self { VariantShape::Struct(s) => s.clone(), - VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(), + VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(), VariantShape::Unit => vec![], } } @@ -90,26 +93,27 @@ impl VariantShape { fn as_pattern_map( &self, path: tt::Subtree, + span: SpanData, field_map: impl Fn(&tt::Ident) -> tt::Subtree, ) -> tt::Subtree { match self { VariantShape::Struct(fields) => { let fields = fields.iter().map(|it| { let mapped = field_map(it); - quote! { #it : #mapped , } + quote! {span => #it : #mapped , } }); - quote! { + quote! {span => #path { ##fields } } } &VariantShape::Tuple(n) => { - let fields = tuple_field_iterator(n).map(|it| { + let fields = tuple_field_iterator(span, n).map(|it| { let mapped = field_map(&it); - quote! { + quote! {span => #mapped , } }); - quote! { + quote! {span => #path ( ##fields ) } } @@ -117,7 +121,7 @@ impl VariantShape { } } - fn from(tm: &TokenMap, value: Option) -> Result { + fn from(tm: SpanMapRef<'_>, value: Option) -> Result { let r = match value { None => VariantShape::Unit, Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( @@ -139,17 +143,17 @@ enum AdtShape { } impl AdtShape { - fn as_pattern(&self, name: &tt::Ident) -> Vec { - self.as_pattern_map(name, |it| quote!(#it)) + fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec { + self.as_pattern_map(name, |it| quote!(span =>#it), span) } - fn field_names(&self) -> Vec> { + fn field_names(&self, span: SpanData) -> Vec> { match self { AdtShape::Struct(s) => { - vec![s.field_names()] + vec![s.field_names(span)] } AdtShape::Enum { variants, .. } => { - variants.iter().map(|(_, fields)| fields.field_names()).collect() + variants.iter().map(|(_, fields)| fields.field_names(span)).collect() } AdtShape::Union => { never!("using fields of union in derive is always wrong"); @@ -162,18 +166,21 @@ impl AdtShape { &self, name: &tt::Ident, field_map: impl Fn(&tt::Ident) -> tt::Subtree, + span: SpanData, ) -> Vec { match self { AdtShape::Struct(s) => { - vec![s.as_pattern_map(quote! { #name }, field_map)] + vec![s.as_pattern_map(quote! {span => #name }, span, field_map)] } AdtShape::Enum { variants, .. } => variants .iter() - .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map)) + .map(|(v, fields)| { + fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map) + }) .collect(), AdtShape::Union => { never!("pattern matching on union is always wrong"); - vec![quote! { un }] + vec![quote! {span => un }] } } } @@ -189,8 +196,12 @@ struct BasicAdtInfo { associated_types: Vec, } -fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result { - let (name, generic_param_list, shape) = match &adt { +fn parse_adt( + tm: SpanMapRef<'_>, + adt: &ast::Adt, + call_site: SpanData, +) -> Result { + let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), @@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree(it.syntax()).0 + mbe::syntax_node_to_token_tree(it.syntax(), tm) + } + None => { + tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) } - None => tt::Subtree::empty(), } }; let bounds = match ¶m { ast::TypeOrConstParam::Type(it) => { - it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) + it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) } ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) - .unwrap_or_else(tt::Subtree::empty); + .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) + .unwrap_or_else(|| { + tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) + }); Some(ty) } else { None @@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .collect(); - let name_token = name_to_token(&tm, name)?; + let name_token = name_to_token(tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) } -fn name_to_token(token_map: &TokenMap, name: Option) -> Result { +fn name_to_token( + token_map: SpanMapRef<'_>, + name: Option, +) -> Result { let name = name.ok_or_else(|| { debug!("parsed item has no name"); ExpandError::other("missing name") })?; - let name_token_id = - token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); - let name_token = tt::Ident { span: name_token_id, text: name.text().into() }; + let span = token_map.span_for_range(name.syntax().text_range()); + let name_token = tt::Ident { span, text: name.text().into() }; Ok(name_token) } @@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option) -> Result, trait_path: tt::Subtree, make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree, ) -> ExpandResult { - let info = match parse_adt(tm, tt) { + let info = match parse_adt(tm, tt, invoc_span) { Ok(info) => info, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }), + e, + ) + } }; let trait_body = make_trait_body(&info); let mut where_block = vec![]; @@ -349,13 +373,13 @@ fn expand_simple_derive( let ident_ = ident.clone(); if let Some(b) = bound { let ident = ident.clone(); - where_block.push(quote! { #ident : #b , }); + where_block.push(quote! {invoc_span => #ident : #b , }); } if let Some(ty) = param_ty { - (quote! { const #ident : #ty , }, quote! { #ident_ , }) + (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , }) } else { let bound = trait_path.clone(); - (quote! { #ident : #bound , }, quote! { #ident_ , }) + (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , }) } }) .unzip(); @@ -363,17 +387,17 @@ fn expand_simple_derive( where_block.extend(info.associated_types.iter().map(|it| { let it = it.clone(); let bound = trait_path.clone(); - quote! { #it : #bound , } + quote! {invoc_span => #it : #bound , } })); let name = info.name; - let expanded = quote! { + let expanded = quote! {invoc_span => impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body } }; ExpandResult::ok(expanded) } -fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree { +fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree { // FIXME: make hygiene works for builtin derive macro // such that $crate can be used here. let cg = db.crate_graph(); @@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) { cov_mark::hit!(test_copy_expand_in_core); - quote! { crate } + quote! {span => crate } } else { - quote! { core } + quote! {span => core } }; tt.token_trees[0].clone() @@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>}) } fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn clone(&self) -> Self { #star self } }; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn clone(&self) -> Self { match #star self {} } }; } let name = &adt.name; - let patterns = adt.shape.as_pattern(name); - let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() }); + let patterns = adt.shape.as_pattern(span, name); + let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span); let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| { - let fat_arrow = fat_arrow(); - quote! { + let fat_arrow = fat_arrow(span); + quote! {span => #pat #fat_arrow #expr, } }); - quote! { + quote! {span => fn clone(&self) -> Self { match self { ##arms @@ -451,53 +469,56 @@ fn clone_expand( }) } -/// This function exists since `quote! { => }` doesn't work. -fn fat_arrow() -> ::tt::Subtree { - let eq = - tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; - quote! { #eq> } +/// This function exists since `quote! {span => => }` doesn't work. +fn fat_arrow(span: SpanData) -> tt::Subtree { + let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span }; + quote! {span => #eq> } } -/// This function exists since `quote! { && }` doesn't work. -fn and_and() -> ::tt::Subtree { - let and = - tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; - quote! { #and& } +/// This function exists since `quote! {span => && }` doesn't work. +fn and_and(span: SpanData) -> tt::Subtree { + let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span }; + quote! {span => #and& } } fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| { let body = match &adt.shape { AdtShape::Struct(fields) => { let name = &adt.name; - fields - .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default())) + fields.as_pattern_map( + quote!(span =>#name), + span, + |_| quote!(span =>#krate::default::Default::default()), + ) } AdtShape::Enum { default_variant, variants } => { if let Some(d) = default_variant { let (name, fields) = &variants[*d]; let adt_name = &adt.name; fields.as_pattern_map( - quote!(#adt_name :: #name), - |_| quote!(#krate::default::Default::default()), + quote!(span =>#adt_name :: #name), + span, + |_| quote!(span =>#krate::default::Default::default()), ) } else { // FIXME: Return expand error here - quote!() + quote!(span =>) } } AdtShape::Union => { // FIXME: Return expand error here - quote!() + quote!(span =>) } }; - quote! { + quote! {span => fn default() -> Self { #body } @@ -508,44 +529,41 @@ fn default_expand( fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| { let for_variant = |name: String, v: &VariantShape| match v { VariantShape::Struct(fields) => { let for_fields = fields.iter().map(|it| { let x_string = it.to_string(); - quote! { + quote! {span => .field(#x_string, & #it) } }); - quote! { + quote! {span => f.debug_struct(#name) ##for_fields .finish() } } VariantShape::Tuple(n) => { - let for_fields = tuple_field_iterator(*n).map(|it| { - quote! { + let for_fields = tuple_field_iterator(span, *n).map(|it| { + quote! {span => .field( & #it) } }); - quote! { + quote! {span => f.debug_tuple(#name) ##for_fields .finish() } } - VariantShape::Unit => quote! { + VariantShape::Unit => quote! {span => f.write_str(#name) }, }; if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match #star self {} } @@ -553,20 +571,20 @@ fn debug_expand( } let arms = match &adt.shape { AdtShape::Struct(fields) => { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let name = &adt.name; - let pat = fields.as_pattern(quote!(#name)); + let pat = fields.as_pattern(quote!(span =>#name), span); let expr = for_variant(name.to_string(), fields); - vec![quote! { #pat #fat_arrow #expr }] + vec![quote! {span => #pat #fat_arrow #expr }] } AdtShape::Enum { variants, .. } => variants .iter() .map(|(name, v)| { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let adt_name = &adt.name; - let pat = v.as_pattern(quote!(#adt_name :: #name)); + let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span); let expr = for_variant(name.to_string(), v); - quote! { + quote! {span => #pat #fat_arrow #expr , } }) @@ -576,7 +594,7 @@ fn debug_expand( vec![] } }; - quote! { + quote! {span => fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match self { ##arms @@ -589,47 +607,46 @@ fn debug_expand( fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote! {}; + return quote! {span =>}; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn hash(&self, ra_expand_state: &mut H) { match #star self {} } }; } - let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map( - |(pat, names)| { - let expr = { - let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); }); - quote! { { - ##it - } } - }; - let fat_arrow = fat_arrow(); - quote! { - #pat #fat_arrow #expr , - } - }, - ); + let arms = + adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map( + |(pat, names)| { + let expr = { + let it = + names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); }); + quote! {span => { + ##it + } } + }; + let fat_arrow = fat_arrow(span); + quote! {span => + #pat #fat_arrow #expr , + } + }, + ); let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) { - quote! { #krate::mem::discriminant(self).hash(ra_expand_state); } + quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); } } else { - quote! {} + quote! {span =>} }; - quote! { + quote! {span => fn hash(&self, ra_expand_state: &mut H) { #check_discriminant match self { @@ -643,56 +660,58 @@ fn hash_expand( fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>}) } fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote! {}; + return quote! {span =>}; } let name = &adt.name; - let (self_patterns, other_patterns) = self_and_other_patterns(adt, name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, names)| { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let body = match &*names { [] => { - quote!(true) + quote!(span =>true) } [first, rest @ ..] => { let rest = rest.iter().map(|it| { - let t1 = Ident::new(format!("{}_self", it.text), it.span); - let t2 = Ident::new(format!("{}_other", it.text), it.span); - let and_and = and_and(); - quote!(#and_and #t1 .eq( #t2 )) + let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); + let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); + let and_and = and_and(span); + quote!(span =>#and_and #t1 .eq( #t2 )) }); let first = { - let t1 = Ident::new(format!("{}_self", first.text), first.span); - let t2 = Ident::new(format!("{}_other", first.text), first.span); - quote!(#t1 .eq( #t2 )) + let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); + let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); + quote!(span =>#t1 .eq( #t2 )) }; - quote!(#first ##rest) + quote!(span =>#first ##rest) } }; - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); - quote! { + let fat_arrow = fat_arrow(span); + quote! {span => fn eq(&self, other: &Self) -> bool { match (self, other) { ##arms @@ -706,35 +725,46 @@ fn partial_eq_expand( fn self_and_other_patterns( adt: &BasicAdtInfo, name: &tt::Ident, + span: SpanData, ) -> (Vec, Vec) { - let self_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_self", it.text), it.span); - quote!(#t) - }); - let other_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_other", it.text), it.span); - quote!(#t) - }); + let self_patterns = adt.shape.as_pattern_map( + name, + |it| { + let t = tt::Ident::new(format!("{}_self", it.text), it.span); + quote!(span =>#t) + }, + span, + ); + let other_patterns = adt.shape.as_pattern_map( + name, + |it| { + let t = tt::Ident::new(format!("{}_other", it.text), it.span); + quote!(span =>#t) + }, + span, + ); (self_patterns, other_patterns) } fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, + span: SpanData, ) -> tt::Subtree { - let fat_arrow1 = fat_arrow(); - let fat_arrow2 = fat_arrow(); - quote! { + let fat_arrow1 = fat_arrow(span); + let fat_arrow2 = fat_arrow(span); + quote! {span => match #left.cmp(&#right) { #krate::cmp::Ordering::Equal #fat_arrow1 { #rest @@ -745,34 +775,34 @@ fn ord_expand( } if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote!(); + return quote!(span =>); } - let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, fields)| { - let mut body = quote!(#krate::cmp::Ordering::Equal); + let mut body = quote!(span =>#krate::cmp::Ordering::Equal); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); - body = compare(krate, quote!(#t1), quote!(#t2), body); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); + body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } - let fat_arrow = fat_arrow(); - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + let fat_arrow = fat_arrow(span); + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); - let mut body = quote! { + let fat_arrow = fat_arrow(span); + let mut body = quote! {span => match (self, other) { ##arms _unused #fat_arrow #krate::cmp::Ordering::Equal } }; if matches!(&adt.shape, AdtShape::Enum { .. }) { - let left = quote!(#krate::intrinsics::discriminant_value(self)); - let right = quote!(#krate::intrinsics::discriminant_value(other)); - body = compare(krate, left, right, body); + let left = quote!(span =>#krate::intrinsics::discriminant_value(self)); + let right = quote!(span =>#krate::intrinsics::discriminant_value(other)); + body = compare(krate, left, right, body, span); } - quote! { + quote! {span => fn cmp(&self, other: &Self) -> #krate::cmp::Ordering { #body } @@ -783,20 +813,22 @@ fn ord_expand( fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &TokenMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, + span: SpanData, ) -> tt::Subtree { - let fat_arrow1 = fat_arrow(); - let fat_arrow2 = fat_arrow(); - quote! { + let fat_arrow1 = fat_arrow(span); + let fat_arrow2 = fat_arrow(span); + quote! {span => match #left.partial_cmp(&#right) { #krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 { #rest @@ -807,37 +839,39 @@ fn partial_ord_expand( } if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote!(); + return quote!(span =>); } - let left = quote!(#krate::intrinsics::discriminant_value(self)); - let right = quote!(#krate::intrinsics::discriminant_value(other)); + let left = quote!(span =>#krate::intrinsics::discriminant_value(self)); + let right = quote!(span =>#krate::intrinsics::discriminant_value(other)); - let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, fields)| { - let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); + let mut body = + quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); - body = compare(krate, quote!(#t1), quote!(#t2), body); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); + body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } - let fat_arrow = fat_arrow(); - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + let fat_arrow = fat_arrow(span); + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let body = compare( krate, left, right, - quote! { + quote! {span => match (self, other) { ##arms _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal) } }, + span, ); - quote! { + quote! {span => fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> { #body } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index a04de10b899cf..c8f04bfee54f1 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,17 +1,24 @@ //! Builtin macro -use base_db::{AnchoredPath, Edition, FileId}; +use base_db::{ + span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + AnchoredPath, Edition, FileId, +}; use cfg::CfgExpr; use either::Either; -use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap}; +use itertools::Itertools; +use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use syntax::{ ast::{self, AstToken}, SmolStr, }; use crate::{ - db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId, - MacroCallLoc, + db::ExpandDatabase, + hygiene::span_with_def_site_ctxt, + name, quote, + tt::{self, DelimSpan}, + ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; macro_rules! register_builtin { @@ -36,7 +43,10 @@ macro_rules! register_builtin { let expander = match *self { $( BuiltinFnLikeExpander::$kind => $expand, )* }; - expander(db, id, tt) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, tt, span) } } @@ -44,13 +54,16 @@ macro_rules! register_builtin { pub fn expand( &self, db: &dyn ExpandDatabase, - arg_id: MacroCallId, + id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { let expander = match *self { $( EagerExpander::$e_kind => $e_expand, )* }; - expander(db, arg_id, tt) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, tt, span) } } @@ -109,29 +122,44 @@ register_builtin! { (option_env, OptionEnv) => option_env_expand } -const DOLLAR_CRATE: tt::Ident = - tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; +fn mk_pound(span: SpanData) -> tt::Subtree { + crate::quote::IntoTt::to_subtree( + vec![crate::tt::Leaf::Punct(crate::tt::Punct { + char: '#', + spacing: crate::tt::Spacing::Alone, + span: span, + }) + .into()], + span, + ) +} fn module_path_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // Just return a dummy result. - ExpandResult::ok(quote! { "module::path" }) + ExpandResult::ok(quote! {span => + "module::path" + }) } fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // dummy implementation for type-checking purposes + // Note that `line!` and `column!` will never be implemented properly, as they are by definition + // not incremental ExpandResult::ok(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), - span: tt::Span::UNSPECIFIED, + span, }))], }) } @@ -140,26 +168,29 @@ fn log_syntax_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn trace_macros_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn stringify_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let pretty = ::tt::pretty(&tt.token_trees); - let expanded = quote! { + let expanded = quote! {span => #pretty }; @@ -170,27 +201,29 @@ fn assert_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let args = parse_exprs_with_sep(tt, ','); + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let expanded = match &*args { [cond, panic_args @ ..] => { let comma = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span, }))], }; let cond = cond.clone(); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); - quote! {{ + quote! {span =>{ if !(#cond) { - #DOLLAR_CRATE::panic!(##panic_args); + #dollar_crate::panic!(##panic_args); } }} } - [] => quote! {{}}, + [] => quote! {span =>{}}, }; ExpandResult::ok(expanded) @@ -200,12 +233,13 @@ fn file_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // FIXME: RA purposefully lacks knowledge of absolute file names // so just return "". let file_name = ""; - let expanded = quote! { + let expanded = quote! {span => #file_name }; @@ -216,16 +250,18 @@ fn format_args_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - format_args_expand_general(db, id, tt, "") + format_args_expand_general(db, id, tt, "", span) } fn format_args_nl_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - format_args_expand_general(db, id, tt, "\\n") + format_args_expand_general(db, id, tt, "\\n", span) } fn format_args_expand_general( @@ -234,11 +270,12 @@ fn format_args_expand_general( tt: &tt::Subtree, // FIXME: Make use of this so that mir interpretation works properly _end_string: &str, + span: SpanData, ) -> ExpandResult { - let pound = quote! {@PUNCT '#'}; + let pound = mk_pound(span); let mut tt = tt.clone(); tt.delimiter.kind = tt::DelimiterKind::Parenthesis; - return ExpandResult::ok(quote! { + return ExpandResult::ok(quote! {span => builtin #pound format_args #tt }); } @@ -247,25 +284,25 @@ fn asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // We expand all assembly snippets to `format_args!` invocations to get format syntax // highlighting for them. - let mut literals = Vec::new(); for tt in tt.token_trees.chunks(2) { match tt { [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))] | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] => { - let krate = DOLLAR_CRATE.clone(); - literals.push(quote!(#krate::format_args!(#lit);)); + let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; + literals.push(quote!(span=>#dollar_krate::format_args!(#lit);)); } _ => break, } } - let pound = quote! {@PUNCT '#'}; - let expanded = quote! { + let pound = mk_pound(span); + let expanded = quote! {span => builtin #pound asm ( {##literals} ) @@ -277,20 +314,22 @@ fn global_asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // Expand to nothing (at item-level) - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let expr = CfgExpr::parse(tt); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false); - let expanded = if enabled { quote!(true) } else { quote!(false) }; + let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) }; ExpandResult::ok(expanded) } @@ -298,13 +337,15 @@ fn panic_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; // Expand to a macro call `$crate::panic::panic_{edition}` let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(#DOLLAR_CRATE::panic::panic_2021!) + quote!(span =>#dollar_crate::panic::panic_2021!) } else { - quote!(#DOLLAR_CRATE::panic::panic_2015!) + quote!(span =>#dollar_crate::panic::panic_2015!) }; // Pass the original arguments @@ -316,13 +357,15 @@ fn unreachable_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); // Expand to a macro call `$crate::panic::unreachable_{edition}` + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(#DOLLAR_CRATE::panic::unreachable_2021!) + quote!(span =>#dollar_crate::panic::unreachable_2021!) } else { - quote!(#DOLLAR_CRATE::panic::unreachable_2015!) + quote!(span =>#dollar_crate::panic::unreachable_2015!) }; // Pass the original arguments @@ -352,6 +395,7 @@ fn compile_error_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { @@ -361,13 +405,14 @@ fn compile_error_expand( _ => ExpandError::other("`compile_error!` argument must be a string"), }; - ExpandResult { value: quote! {}, err: Some(err) } + ExpandResult { value: quote! {span =>}, err: Some(err) } } fn concat_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut err = None; let mut text = String::new(); @@ -407,13 +452,14 @@ fn concat_expand( } } } - ExpandResult { value: quote!(#text), err } + ExpandResult { value: quote!(span =>#text), err } } fn concat_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut bytes = Vec::new(); let mut err = None; @@ -446,8 +492,25 @@ fn concat_bytes_expand( } } } - let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() }; - ExpandResult { value: quote!([#ident]), err } + let value = tt::Subtree { + delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, + token_trees: { + Itertools::intersperse_with( + bytes.into_iter().map(|it| { + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span })) + }), + || { + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + char: ',', + spacing: tt::Spacing::Alone, + span, + })) + }, + ) + .collect() + }, + }; + ExpandResult { value, err } } fn concat_bytes_expand_subtree( @@ -480,6 +543,7 @@ fn concat_idents_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut err = None; let mut ident = String::new(); @@ -494,8 +558,9 @@ fn concat_idents_expand( } } } - let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() }; - ExpandResult { value: quote!(#ident), err } + // FIXME merge spans + let ident = tt::Ident { text: ident.into(), span }; + ExpandResult { value: quote!(span =>#ident), err } } fn relative_file( @@ -530,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result { fn include_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, - _tt: &tt::Subtree, + tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - match db.include_expand(arg_id) { - Ok((res, _)) => ExpandResult::ok(res.0.clone()), - Err(e) => ExpandResult::new(tt::Subtree::empty(), e), + let file_id = match include_input_to_file_id(db, arg_id, tt) { + Ok(it) => it, + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } + }; + match parse_to_token_tree( + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + SyntaxContextId::ROOT, + &db.file_text(file_id), + ) { + Some(it) => ExpandResult::ok(it), + None => ExpandResult::new( + tt::Subtree::empty(DelimSpan { open: span, close: span }), + ExpandError::other("failed to parse included file"), + ), } } -pub(crate) fn include_arg_to_tt( +pub fn include_input_to_file_id( db: &dyn ExpandDatabase, arg_id: MacroCallId, -) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { - let loc = db.lookup_intern_macro_call(arg_id); - let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { - panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); - }; - let path = parse_string(&arg.0)?; - let file_id = relative_file(db, *arg_id, &path, false)?; - - let (subtree, map) = - parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?; - Ok((triomphe::Arc::new((subtree, map)), file_id)) + arg: &tt::Subtree, +) -> Result { + relative_file(db, arg_id, &parse_string(arg)?, false) } fn include_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, - tt: &tt::Subtree, + _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - if let Err(e) = parse_string(tt) { - return ExpandResult::new(tt::Subtree::empty(), e); - } - // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), - span: tt::TokenId::unspecified(), + span, }))], }; ExpandResult::ok(res) @@ -578,10 +646,13 @@ fn include_str_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let path = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; // FIXME: we're not able to read excluded files (which is most of them because @@ -591,14 +662,14 @@ fn include_str_expand( let file_id = match relative_file(db, arg_id, &path, true) { Ok(file_id) => file_id, Err(_) => { - return ExpandResult::ok(quote!("")); + return ExpandResult::ok(quote!(span =>"")); } }; let text = db.file_text(file_id); let text = &*text; - ExpandResult::ok(quote!(#text)) + ExpandResult::ok(quote!(span =>#text)) } fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option { @@ -610,10 +681,13 @@ fn env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; let mut err = None; @@ -630,7 +704,7 @@ fn env_expand( // `include!("foo.rs"), which might go to infinite loop "UNRESOLVED_ENV_VAR".to_string() }); - let expanded = quote! { #s }; + let expanded = quote! {span => #s }; ExpandResult { value: expanded, err } } @@ -639,15 +713,18 @@ fn option_env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; // FIXME: Use `DOLLAR_CRATE` when that works in eager macros. let expanded = match get_env_inner(db, arg_id, &key) { - None => quote! { ::core::option::Option::None::<&str> }, - Some(s) => quote! { ::core::option::Option::Some(#s) }, + None => quote! {span => ::core::option::Option::None::<&str> }, + Some(s) => quote! {span => ::core::option::Option::Some(#s) }, }; ExpandResult::ok(expanded) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index ff0d279d8cce1..d2c6559b06b16 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,22 +1,31 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, CrateId, Edition, SourceDatabase}; +use base_db::{ + salsa::{self, debug::DebugQueryTable}, + span::SyntaxContextId, + CrateId, Edition, FileId, SourceDatabase, +}; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasAttrs, HasDocComments}, - AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, + AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, }; use triomphe::Arc; use crate::{ - ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, - BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, - ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, - MacroDefKind, MacroFile, ProcMacroExpander, + ast_id_map::AstIdMap, + attrs::RawAttrs, + builtin_attr_macro::pseudo_derive_attr_expansion, + builtin_fn_macro::EagerExpander, + fixup::{self, SyntaxFixupUndoInfo}, + hygiene::{apply_mark, SyntaxContextData, Transparency}, + span::{RealSpanMap, SpanMap, SpanMapRef}, + tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, + ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, - pub def_site_token_map: mbe::TokenMap, + pub mac: mbe::DeclarativeMacro, + pub transparency: Transparency, } impl DeclarativeMacroExpander { - pub fn expand(&self, tt: tt::Subtree) -> ExpandResult { + pub fn expand( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + call_id: MacroCallId, + ) -> ExpandResult { match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan::DUMMY), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(tt).map_err(Into::into), + None => self + .mac + .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency)) + .map_err(Into::into), } } - pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId { - self.mac.map_id_down(token_id) - } - - pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - self.mac.map_id_up(token_id) + pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult { + match self.mac.err() { + Some(e) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan::DUMMY), + ExpandError::other(format!("invalid macro definition: {e}")), + ), + None => self.mac.expand(&tt, |_| ()).map_err(Into::into), + } } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { + /// Old-style `macro_rules` or the new macros 2.0 DeclarativeMacro(Arc), /// Stuff like `line!` and `file!`. BuiltIn(BuiltinFnLikeExpander), @@ -69,31 +89,6 @@ pub enum TokenExpander { ProcMacro(ProcMacroExpander), } -// FIXME: Get rid of these methods -impl TokenExpander { - pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => id, - } - } - - pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), - } - } -} - #[salsa::query_group(ExpandDatabaseStorage)] pub trait ExpandDatabase: SourceDatabase { fn ast_id_map(&self, file_id: HirFileId) -> Arc; @@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase { // This query is LRU cached fn parse_macro_expansion( &self, - macro_file: MacroFile, - ) -> ExpandResult<(Parse, Arc)>; + macro_file: MacroFileId, + ) -> ExpandResult<(Parse, Arc)>; + #[salsa::transparent] + fn span_map(&self, file_id: HirFileId) -> SpanMap; + + fn real_span_map(&self, file_id: FileId) -> Arc; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase { /// to be incremental. #[salsa::interned] fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; + #[salsa::interned] + fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId; - /// Lowers syntactic macro call to a token tree representation. #[salsa::transparent] - fn macro_arg( - &self, - id: MacroCallId, - ) -> ValueResult< - Option>, - Arc>, - >; - /// Extracts syntax node, corresponding to a macro call. That's a firewall + fn setup_syntax_context_root(&self) -> (); + #[salsa::transparent] + fn dump_syntax_contexts(&self) -> String; + + /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. - fn macro_arg_node( + fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, Arc>>; + ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; @@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase { def_crate: CrateId, id: AstId, ) -> Arc; - - /// Expand macro call to a token tree. - // This query is LRU cached - fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult>; - #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)] - fn include_expand( - &self, - arg_id: MacroCallId, - ) -> Result< - (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId), - ExpandError, - >; /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. @@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase { &self, macro_call: MacroCallId, ) -> ExpandResult>; +} - fn hygiene_frame(&self, file_id: HirFileId) -> Arc; +#[inline] +pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), + HirFileIdRepr::MacroFile(m) => { + SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) + } + } +} + +pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc { + Arc::new(RealSpanMap::from_file(db, file_id)) } /// This expands the given macro call, but with different arguments. This is @@ -181,21 +178,36 @@ pub fn expand_speculative( token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - let token_range = token_to_map.text_range(); + + let span_map = RealSpanMap::absolute(FileId::BOGUS); + let span_map = SpanMapRef::RealSpanMap(&span_map); // Build the subtree and token mapping for the speculative args - let censor = censor_for_macro_input(&loc, speculative_args); - let mut fixups = fixup::fixup_syntax(speculative_args); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - speculative_args, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); + let (mut tt, undo_info) = match loc.kind { + MacroCallKind::FnLike { .. } => { + (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) + } + MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { + let censor = censor_for_macro_input(&loc, speculative_args); + let mut fixups = fixup::fixup_syntax(span_map, speculative_args); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Node(it) => !censor.contains(it), + syntax::NodeOrToken::Token(_) => true, + }); + fixups.remove.extend(censor); + ( + mbe::syntax_node_to_token_tree_modified( + speculative_args, + span_map, + fixups.append, + fixups.remove, + ), + fixups.undo_info, + ) + } + }; - let (attr_arg, token_id) = match loc.kind { + let attr_arg = match loc.kind { MacroCallKind::Attr { invoc_attr_index, .. } => { let attr = if loc.def.is_attribute_derive() { // for pseudo-derive expansion we actually pass the attribute itself only @@ -210,59 +222,45 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); - tree.delimiter = tt::Delimiter::unspecified(); - - let shift = mbe::Shift::new(&tt); - shift.shift_all(&mut tree); - - let token_id = if token_tree.syntax().text_range().contains_range(token_range) { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let range = token_range.checked_sub(attr_input_start)?; - let token_id = shift.shift(map.token_by_range(range)?); - Some(token_id) - } else { - None - }; - (Some(tree), token_id) - } - _ => (None, None), - } - } - _ => (None, None), - }; - let token_id = match token_id { - Some(token_id) => token_id, - // token wasn't inside an attribute input so it has to be in the general macro input - None => { - let range = token_range.checked_sub(speculative_args.text_range().start())?; - let token_id = spec_args_tmap.token_by_range(range)?; - match loc.def.kind { - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).map_id_down(token_id) + let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); + tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + + Some(tree) } - _ => token_id, + _ => None, } } + _ => None, }; // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::unspecified(); - expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + let call_site = loc.span(db); + expander.expand( + db, + loc.def.krate, + loc.krate, + &tt, + attr_arg.as_ref(), + call_site, + call_site, + call_site, + ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { - pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) + pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site) } MacroDefKind::BuiltInDerive(expander, ..) => { // this cast is a bit sus, can we avoid losing the typedness here? let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); - expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) + expander.expand(db, actual_macro_call, &adt, span_map) + } + MacroDefKind::Declarative(it) => { + db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) } - MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltInEager(it, _) => { it.expand(db, actual_macro_call, &tt).map_err(Into::into) @@ -270,13 +268,14 @@ pub fn expand_speculative( MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt), }; - let expand_to = macro_expand_to(db, actual_macro_call); - fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info); + let expand_to = loc.expand_to(); + + fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info); let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); let token = rev_tmap - .ranges_by_token(token_id, token_to_map.kind()) + .ranges_with_span(span_map.span_for_range(token_to_map.text_range())) .filter_map(|range| syntax_node.covering_element(range).into_token()) .min_by_key(|t| { // prefer tokens of the same kind and text @@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(), + HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), HirFileIdRepr::MacroFile(macro_file) => { db.parse_macro_expansion(macro_file).value.0.syntax_node() } @@ -312,17 +311,16 @@ fn parse_or_expand_with_err( } } +// FIXME: We should verify that the parsed node is one of the many macro node variants we expect +// instead of having it be untyped fn parse_macro_expansion( db: &dyn ExpandDatabase, - macro_file: MacroFile, -) -> ExpandResult<(Parse, Arc)> { + macro_file: MacroFileId, +) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); - let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); - - let expand_to = macro_expand_to(db, macro_file.macro_call_id); - - tracing::debug!("expanded = {}", tt.as_debug_string()); - tracing::debug!("kind = {:?}", expand_to); + let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let expand_to = loc.expand_to(); + let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); @@ -333,51 +331,129 @@ fn parse_macro_expansion_error( db: &dyn ExpandDatabase, macro_call_id: MacroCallId, ) -> ExpandResult> { - db.parse_macro_expansion(MacroFile { macro_call_id }) + db.parse_macro_expansion(MacroFileId { macro_call_id }) .map(|it| it.0.errors().to_vec().into_boxed_slice()) } +fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse, SpanMap) { + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id))) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse, SpanMap::ExpansionSpanMap(map)) + } + } +} + fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, -) -> ValueResult< - Option>, - Arc>, -> { - let loc = db.lookup_intern_macro_call(id); - - if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { - return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); - } - - let ValueResult { value, err } = db.macro_arg_node(id); - let Some(arg) = value else { - return ValueResult { value: None, err }; + // FIXME: consider the following by putting fixup info into eager call info args + // ) -> ValueResult>, Arc>> { +) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { + let mismatched_delimiters = |arg: &SyntaxNode| { + let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); + let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); + let well_formed_tt = + matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); + if !well_formed_tt { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // Some day, we'll have explicit recursion counters for all + // recursive things, at which point this code might be removed. + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + Some(Arc::new(Box::new([SyntaxError::new( + "unbalanced token tree".to_owned(), + arg.text_range(), + )]) as Box<[_]>)) + } else { + None + } }; + let loc = db.lookup_intern_macro_call(id); + if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) + .then(|| loc.eager.as_deref()) + .flatten() + { + ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) + } else { + let (parse, map) = parse_with_map(db, loc.kind.file_id()); + let root = parse.syntax_node(); + + let syntax = match loc.kind { + MacroCallKind::FnLike { ast_id, .. } => { + let node = &ast_id.to_ptr(db).to_node(&root); + let offset = node.syntax().text_range().start(); + match node.token_tree() { + Some(tt) => { + let tt = tt.syntax(); + if let Some(e) = mismatched_delimiters(tt) { + return ValueResult::only_err(e); + } + tt.clone() + } + None => { + return ValueResult::only_err(Arc::new(Box::new([ + SyntaxError::new_at_offset("missing token tree".to_owned(), offset), + ]))); + } + } + } + MacroCallKind::Derive { ast_id, .. } => { + ast_id.to_ptr(db).to_node(&root).syntax().clone() + } + MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), + }; + let (mut tt, undo_info) = match loc.kind { + MacroCallKind::FnLike { .. } => { + (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) + } + MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { + let censor = censor_for_macro_input(&loc, &syntax); + let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Node(it) => !censor.contains(it), + syntax::NodeOrToken::Token(_) => true, + }); + fixups.remove.extend(censor); + ( + mbe::syntax_node_to_token_tree_modified( + &syntax, + map, + fixups.append, + fixups.remove, + ), + fixups.undo_info, + ) + } + }; - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); + if loc.def.is_proc_macro() { + // proc macros expect their inputs without parentheses, MBEs expect it with them included + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + } - if loc.def.is_proc_macro() { - // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::unspecified(); - } - let val = Some(Arc::new((tt, tmap, fixups.undo_info))); - match err { - Some(err) => ValueResult::new(val, err), - None => ValueResult::ok(val), + if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { + match parse.errors() { + [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), + errors => ValueResult::new( + Some((Arc::new(tt), undo_info)), + // Box::<[_]>::from(res.errors()), not stable yet + Arc::new(errors.to_vec().into_boxed_slice()), + ), + } + } else { + ValueResult::ok(Some((Arc::new(tt), undo_info))) + } } } +// FIXME: Censoring info should be calculated by the caller! Namely by name resolution /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - attributes expect the invoking attribute to be stripped @@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet ValueResult, Arc>> { - let err = || -> Arc> { - Arc::new(Box::new([SyntaxError::new_at_offset( - "invalid macro call".to_owned(), - syntax::TextSize::from(0), - )])) - }; - let loc = db.lookup_intern_macro_call(id); - let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind { - let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { - Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0) - } else { - loc.kind - .arg(db) - .and_then(|arg| ast::TokenTree::cast(arg.value)) - .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax()) - }; - match res { - Some(res) if res.errors().is_empty() => res.syntax_node(), - Some(res) => { - return ValueResult::new( - Some(res.syntax_node().green().into()), - // Box::<[_]>::from(res.errors()), not stable yet - Arc::new(res.errors().to_vec().into_boxed_slice()), - ); - } - None => return ValueResult::only_err(err()), - } - } else { - match loc.kind.arg(db) { - Some(res) => res.value, - None => return ValueResult::only_err(err()), - } - }; - if matches!(loc.kind, MacroCallKind::FnLike { .. }) { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]))); - } - } - ValueResult::ok(Some(arg.green().into())) -} - fn decl_macro_expander( db: &dyn ExpandDatabase, def_crate: CrateId, id: AstId, ) -> Arc { let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; - let (mac, def_site_token_map) = match id.to_node(db) { - ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { - Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); - (mac, def_site_token_map) - } - None => ( - mbe::DeclarativeMacro::from_err( + let (root, map) = parse_with_map(db, id.file_id); + let root = root.syntax_node(); + + let transparency = |node| { + // ... would be nice to have the item tree here + let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); + match &*attrs + .iter() + .find(|it| { + it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency") + })? + .token_tree_value()? + .token_trees + { + [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { + "transparent" => Some(Transparency::Transparent), + "semitransparent" => Some(Transparency::SemiTransparent), + "opaque" => Some(Transparency::Opaque), + _ => None, + }, + _ => None, + } + }; + + let (mac, transparency) = match id.to_ptr(db).to_node(&root) { + ast::Macro::MacroRules(macro_rules) => ( + match macro_rules.token_tree() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); + mac + } + None => mbe::DeclarativeMacro::from_err( mbe::ParseError::Expected("expected a token tree".into()), is_2021, ), - Default::default(), - ), - }, - ast::Macro::MacroDef(macro_def) => match macro_def.body() { - Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); - (mac, def_site_token_map) - } - None => ( - mbe::DeclarativeMacro::from_err( + }, + transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), + ), + ast::Macro::MacroDef(macro_def) => ( + match macro_def.body() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); + mac + } + None => mbe::DeclarativeMacro::from_err( mbe::ParseError::Expected("expected a token tree".into()), is_2021, ), - Default::default(), - ), - }, + }, + transparency(¯o_def).unwrap_or(Transparency::Opaque), + ), }; - Arc::new(DeclarativeMacroExpander { mac, def_site_token_map }) + Arc::new(DeclarativeMacroExpander { mac, transparency }) } fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { @@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { } } -fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { +fn macro_expand( + db: &dyn ExpandDatabase, + macro_call_id: MacroCallId, + loc: MacroCallLoc, +) -> ExpandResult> { let _p = profile::span("macro_expand"); - let loc = db.lookup_intern_macro_call(id); let ExpandResult { value: tt, mut err } = match loc.def.kind { - MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id), + MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), MacroDefKind::BuiltInDerive(expander, ..) => { - let arg = db.macro_arg_node(id).value.unwrap(); - - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications( - &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); - - // this cast is a bit sus, can we avoid losing the typedness here? - let adt = ast::Adt::cast(node).unwrap(); - let mut res = expander.expand(db, id, &adt, &tmap); - fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info); - res + let (root, map) = parse_with_map(db, loc.kind.file_id()); + let root = root.syntax_node(); + let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; + let node = ast_id.to_ptr(db).to_node(&root); + + // FIXME: Use censoring + let _censor = censor_for_macro_input(&loc, node.syntax()); + expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { - let ValueResult { value, err } = db.macro_arg(id); - let Some(macro_arg) = value else { + let ValueResult { value, err } = db.macro_arg(macro_call_id); + let Some((macro_arg, undo_info)) = value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) + db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) + } + MacroDefKind::BuiltIn(it, _) => { + it.expand(db, macro_call_id, &arg).map_err(Into::into) } - MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into), // This might look a bit odd, but we do not expand the inputs to eager macros here. // Eager macros inputs are expanded, well, eagerly when we collect the macro calls. // That kind of expansion uses the ast id map of an eager macros input though which goes through @@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - let mut arg = arg.clone(); - fixup::reverse_fixups(&mut arg, arg_tm, undo_info); - return ExpandResult { - value: Arc::new(arg), + value: macro_arg.clone(), err: err.map(|err| { let mut buf = String::new(); for err in &**err { @@ -600,12 +631,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult it.expand(db, id, &arg).map_err(Into::into), - MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), + MacroDefKind::BuiltInEager(it, _) => { + it.expand(db, macro_call_id, &arg).map_err(Into::into) + } + MacroDefKind::BuiltInAttr(it, _) => { + let mut res = it.expand(db, macro_call_id, &arg); + fixup::reverse_fixups(&mut res.value, &undo_info); + res + } _ => unreachable!(), - }; - fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); - res + } } }; @@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some(macro_arg) = db.macro_arg(id).value else { + let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult expander, _ => unreachable!(), }; let attr_arg = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - let mut attr_args = attr_args.0.clone(); - mbe::Shift::new(arg_tt).shift_all(&mut attr_args); - Some(attr_args) - } + MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args), _ => None, }; - let ExpandResult { value: mut tt, err } = - expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref()); + let call_site = loc.span(db); + let ExpandResult { value: mut tt, err } = expander.expand( + db, + loc.def.krate, + loc.krate, + ¯o_arg, + attr_arg, + // FIXME + call_site, + call_site, + // FIXME + call_site, + ); // Set a hard limit for the expanded tt if let Err(value) = check_tt_count(&tt) { return value; } - fixup::reverse_fixups(&mut tt, arg_tm, undo_info); + fixup::reverse_fixups(&mut tt, &undo_info); ExpandResult { value: Arc::new(tt), err } } -fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { - Arc::new(HygieneFrame::new(db, file_id)) -} - -fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { - db.lookup_intern_macro_call(id).expand_to() -} - fn token_tree_to_syntax_node( tt: &tt::Subtree, expand_to: ExpandTo, -) -> (Parse, mbe::TokenMap) { +) -> (Parse, ExpansionSpanMap) { let entry_point = match expand_to { ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Items => mbe::TopEntryPoint::MacroItems, @@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult> if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![], }), err: Some(ExpandError::other(format!( @@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult> Ok(()) } } + +fn setup_syntax_context_root(db: &dyn ExpandDatabase) { + db.intern_syntax_context(SyntaxContextData::root()); +} + +fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { + let mut s = String::from("Expansions:"); + let mut entries = InternMacroCallLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + let id = e.key; + let expn_data = e.value.as_ref().unwrap(); + s.push_str(&format!( + "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}", + id, + expn_data.kind.file_id(), + expn_data.call_site, + SyntaxContextId::ROOT, // FIXME expn_data.def_site, + expn_data.kind.descr(), + )); + } + + s.push_str("\n\nSyntaxContexts:\n"); + let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + struct SyntaxContextDebug<'a>( + &'a dyn ExpandDatabase, + SyntaxContextId, + &'a SyntaxContextData, + ); + + impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.2.fancy_debug(self.1, self.0, f) + } + } + stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap())); + } + s +} diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 4110f2847592d..ef7200f615ccf 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,18 +18,17 @@ //! //! //! See the full discussion : -use base_db::CrateId; -use rustc_hash::{FxHashMap, FxHashSet}; -use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; +use base_db::{span::SyntaxContextId, CrateId}; +use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use triomphe::Arc; use crate::{ ast::{self, AstNode}, db::ExpandDatabase, - hygiene::Hygiene, mod_path::ModPath, - EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, + span::SpanMapRef, + EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; pub fn expand_eager_macro_input( @@ -37,6 +36,7 @@ pub fn expand_eager_macro_input( krate: CrateId, macro_call: InFile, def: MacroDefId, + call_site: SyntaxContextId, resolver: &dyn Fn(ModPath) -> Option, ) -> ExpandResult> { let ast_map = db.ast_id_map(macro_call.file_id); @@ -53,75 +53,44 @@ pub fn expand_eager_macro_input( krate, eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, + call_site, }); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); - // we need this map here as the expansion of the eager input fake file loses whitespace ... - let mut ws_mapping = FxHashMap::default(); - if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() { - ws_mapping.extend(tm.entries().filter_map(|(id, range)| { - Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) - })); - } + + let mut arg_map = ExpansionSpanMap::empty(); let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( db, - &Hygiene::new(db, macro_call.file_id), + &arg_exp_map, + &mut arg_map, + TextSize::new(0), InFile::new(arg_id.as_file(), arg_exp.syntax_node()), krate, + call_site, resolver, ) }; let err = parse_err.or(err); + if cfg!(debug) { + arg_map.finish(); + } - let Some((expanded_eager_input, mapping)) = expanded_eager_input else { + let Some((expanded_eager_input, _mapping)) = expanded_eager_input else { return ExpandResult { value: None, err }; }; - let (mut subtree, expanded_eager_input_token_map) = - mbe::syntax_node_to_token_tree(&expanded_eager_input); + let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); - let og_tmap = if let Some(tt) = macro_call.value.token_tree() { - let mut ids_used = FxHashSet::default(); - let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); - // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside - // so we need to remap them to the original input of the eager macro. - subtree.visit_ids(&mut |id| { - // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix - - if let Some(range) = expanded_eager_input_token_map - .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) - { - // remap from expanded eager input to eager input expansion - if let Some(og_range) = mapping.get(&range) { - // remap from eager input expansion to original eager input - if let Some(&og_range) = ws_mapping.get(og_range) { - if let Some(og_token) = og_tmap.token_by_range(og_range) { - ids_used.insert(og_token); - return og_token; - } - } - } - } - tt::TokenId::UNSPECIFIED - }); - og_tmap.filter(|id| ids_used.contains(&id)); - og_tmap - } else { - Default::default() - }; - subtree.delimiter = crate::tt::Delimiter::unspecified(); + subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; let loc = MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree, og_tmap)), - arg_id, - error: err.clone(), - })), + eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, + call_site, }; ExpandResult { value: Some(db.intern_macro_call(loc)), err } @@ -132,12 +101,13 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult<(InFile>, Arc)> { + call_site: SyntaxContextId, +) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); let ast_id = macro_call.with_value(ast_id); - let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }); + let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site); let macro_file = id.as_macro_file(); db.parse_macro_expansion(macro_file) @@ -146,57 +116,59 @@ fn lazy_expand( fn eager_macro_recur( db: &dyn ExpandDatabase, - hygiene: &Hygiene, + span_map: &ExpansionSpanMap, + expanded_map: &mut ExpansionSpanMap, + mut offset: TextSize, curr: InFile, krate: CrateId, + call_site: SyntaxContextId, macro_resolver: &dyn Fn(ModPath) -> Option, -) -> ExpandResult)>> { +) -> ExpandResult> { let original = curr.value.clone_for_update(); - let mut mapping = FxHashMap::default(); let mut replacements = Vec::new(); // FIXME: We only report a single error inside of eager expansions let mut error = None; - let mut offset = 0i32; - let apply_offset = |it: TextSize, offset: i32| { - TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default()) - }; let mut children = original.preorder_with_tokens(); // Collect replacement while let Some(child) = children.next() { - let WalkEvent::Enter(child) = child else { continue }; let call = match child { - syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) { + WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) { Some(it) => { children.skip_subtree(); it } - None => continue, + _ => continue, }, - syntax::NodeOrToken::Token(t) => { - mapping.insert( - TextRange::new( - apply_offset(t.text_range().start(), offset), - apply_offset(t.text_range().end(), offset), - ), - t.text_range(), - ); + WalkEvent::Enter(_) => continue, + WalkEvent::Leave(child) => { + if let SyntaxElement::Token(t) = child { + let start = t.text_range().start(); + offset += t.text_range().len(); + expanded_map.push(offset, span_map.span_at(start)); + } continue; } }; - let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { + + let def = match call + .path() + .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map))) + { Some(path) => match macro_resolver(path.clone()) { Some(def) => def, None => { error = Some(ExpandError::other(format!("unresolved macro {}", path.display(db)))); + offset += call.syntax().text_range().len(); continue; } }, None => { error = Some(ExpandError::other("malformed macro invocation")); + offset += call.syntax().text_range().len(); continue; } }; @@ -207,29 +179,22 @@ fn eager_macro_recur( krate, curr.with_value(call.clone()), def, + call_site, macro_resolver, ); match value { Some(call_id) => { - let ExpandResult { value, err: err2 } = + let ExpandResult { value: (parse, map), err: err2 } = db.parse_macro_expansion(call_id.as_macro_file()); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = - apply_offset(call.syntax().text_range().start(), offset); - if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - value - .1 - .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } + map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span)); + let syntax_node = parse.syntax_node(); ExpandResult { - value: Some(value.0.syntax_node().clone_for_update()), + value: Some(( + syntax_node.clone_for_update(), + offset + syntax_node.text_range().len(), + )), err: err.or(err2), } } @@ -242,45 +207,23 @@ fn eager_macro_recur( | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { let ExpandResult { value: (parse, tm), err } = - lazy_expand(db, &def, curr.with_value(call.clone()), krate); - let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { - Some(db.decl_macro_expander(def.krate, ast_id)) - } else { - None - }; + lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site); // replace macro inside - let hygiene = Hygiene::new(db, parse.file_id); let ExpandResult { value, err: error } = eager_macro_recur( db, - &hygiene, + &tm, + expanded_map, + offset, // FIXME: We discard parse errors here parse.as_ref().map(|it| it.syntax_node()), krate, + call_site, macro_resolver, ); let err = err.or(error); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = apply_offset(call.syntax().text_range().start(), offset); - if let Some((_tt, arg_map, _)) = parse - .file_id - .macro_file() - .and_then(|id| db.macro_arg(id.macro_call_id).value) - .as_deref() - { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - tm.first_range_by_token( - decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), - syntax::SyntaxKind::TOMBSTONE, - ) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } - // FIXME: Do we need to re-use _m here? - ExpandResult { value: value.map(|(n, _m)| n), err } + ExpandResult { value, err } } }; if err.is_some() { @@ -288,16 +231,18 @@ fn eager_macro_recur( } // check if the whole original syntax is replaced if call.syntax() == &original { - return ExpandResult { value: value.zip(Some(mapping)), err: error }; + return ExpandResult { value, err: error }; } - if let Some(insert) = value { - offset += u32::from(insert.text_range().len()) as i32 - - u32::from(call.syntax().text_range().len()) as i32; - replacements.push((call, insert)); + match value { + Some((insert, new_offset)) => { + replacements.push((call, insert)); + offset = new_offset; + } + None => offset += call.syntax().text_range().len(), } } replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); - ExpandResult { value: Some((original, mapping)), err: error } + ExpandResult { value: Some((original, offset)), err: error } } diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs new file mode 100644 index 0000000000000..89f0685d5b679 --- /dev/null +++ b/crates/hir-expand/src/files.rs @@ -0,0 +1,375 @@ +//! Things to wrap other things in file ids. +use std::iter; + +use base_db::{ + span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}, + FileId, FileRange, +}; +use either::Either; +use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; + +use crate::{db, ExpansionInfo, MacroFileIdExt}; + +/// `InFile` stores a value of `T` inside a particular file/syntax tree. +/// +/// Typical usages are: +/// +/// * `InFile` -- syntax node in a file +/// * `InFile` -- ast node in a file +/// * `InFile` -- offset in a file +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct InFileWrapper { + pub file_id: FileKind, + pub value: T, +} +pub type InFile = InFileWrapper; +pub type InMacroFile = InFileWrapper; +pub type InRealFile = InFileWrapper; + +impl InFileWrapper { + pub fn new(file_id: FileKind, value: T) -> Self { + Self { file_id, value } + } + + pub fn map U, U>(self, f: F) -> InFileWrapper { + InFileWrapper::new(self.file_id, f(self.value)) + } +} + +impl InFileWrapper { + pub fn with_value(&self, value: U) -> InFileWrapper { + InFileWrapper::new(self.file_id, value) + } + + pub fn as_ref(&self) -> InFileWrapper { + self.with_value(&self.value) + } +} + +impl InFileWrapper { + pub fn cloned(&self) -> InFileWrapper { + self.with_value(self.value.clone()) + } +} + +impl From> for InFile { + fn from(InMacroFile { file_id, value }: InMacroFile) -> Self { + InFile { file_id: file_id.into(), value } + } +} + +impl From> for InFile { + fn from(InRealFile { file_id, value }: InRealFile) -> Self { + InFile { file_id: file_id.into(), value } + } +} + +// region:transpose impls + +impl InFileWrapper> { + pub fn transpose(self) -> Option> { + Some(InFileWrapper::new(self.file_id, self.value?)) + } +} + +impl InFileWrapper> { + pub fn transpose(self) -> Either, InFileWrapper> { + match self.value { + Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)), + Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)), + } + } +} + +// endregion:transpose impls + +trait FileIdToSyntax: Copy { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode; +} + +impl FileIdToSyntax for FileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse(self).syntax_node() + } +} +impl FileIdToSyntax for MacroFileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_macro_expansion(self).value.0.syntax_node() + } +} +impl FileIdToSyntax for HirFileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_or_expand(self) + } +} + +#[allow(private_bounds)] +impl InFileWrapper { + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + FileIdToSyntax::file_syntax(self.file_id, db) + } +} + +impl InFileWrapper { + pub fn syntax(&self) -> InFileWrapper { + self.with_value(self.value.syntax()) + } +} + +// region:specific impls + +impl InFile<&SyntaxNode> { + /// Skips the attributed item that caused the macro invocation we are climbing up + pub fn ancestors_with_macros_skip_attr_item( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => { + let macro_file_id = node.file_id.macro_file()?; + let parent_node = macro_file_id.call_node(db); + if macro_file_id.is_attr_macro(db) { + // macro call was an attributed item, skip it + // FIXME: does this fail if this is a direct expansion of another macro? + parent_node.map(|node| node.parent()).transpose() + } else { + Some(parent_node) + } + } + }; + iter::successors(succ(&self.cloned()), succ) + } + + /// Falls back to the macro call range if the node cannot be mapped up fully. + /// + /// For attributes and derives, this will point back to the attribute only. + /// For the entire item use [`InFile::original_file_range_full`]. + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + if let Some((res, ctxt)) = + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + { + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return res; + } + } + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range(db) + } + } + } + + /// Falls back to the macro call range if the node cannot be mapped up fully. + pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + if let Some((res, ctxt)) = + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + { + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return res; + } + } + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range_with_body(db) + } + } + } + + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT)) + } + HirFileIdRepr::MacroFile(mac_file) => { + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + } + } + } + + pub fn original_syntax_node( + self, + db: &dyn db::ExpandDatabase, + ) -> Option> { + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input + let file_id = match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value.clone() }) + } + HirFileIdRepr::MacroFile(m) => m, + }; + if !file_id.is_attr_macro(db) { + return None; + } + + let (FileRange { file_id, range }, ctx) = + ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?; + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if !ctx.is_root() { + return None; + } + + let anc = db.parse(file_id).syntax_node().covering_element(range); + let kind = self.value.kind(); + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? + let value = anc.ancestors().find(|it| it.kind() == kind)?; + Some(InRealFile::new(file_id, value)) + } +} + +impl InMacroFile { + pub fn upmap_once( + self, + db: &dyn db::ExpandDatabase, + ) -> InFile> { + self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range()) + } +} + +impl InFile { + /// Falls back to the macro call range if the node cannot be mapped up fully. + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + let (range, ctxt) = ExpansionInfo::new(db, mac_file) + .span_for_offset(db, self.value.text_range().start()); + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return range; + } + + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range(db) + } + } + } + + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some(FileRange { file_id, range: self.value.text_range() }) + } + HirFileIdRepr::MacroFile(mac_file) => { + let (range, ctxt) = ExpansionInfo::new(db, mac_file) + .span_for_offset(db, self.value.text_range().start()); + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + Some(range) + } else { + None + } + } + } + } +} + +impl InMacroFile { + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) { + ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value) + } +} + +impl InFile { + pub fn original_node_file_range( + self, + db: &dyn db::ExpandDatabase, + ) -> (FileRange, SyntaxContextId) { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) + } + HirFileIdRepr::MacroFile(mac_file) => { + match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { + Some(it) => it, + None => { + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + (loc.kind.original_call_range(db), SyntaxContextId::ROOT) + } + } + } + } + } + + pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, + HirFileIdRepr::MacroFile(mac_file) => { + match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { + Some((it, SyntaxContextId::ROOT)) => it, + _ => { + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range(db) + } + } + } + } + } + + pub fn original_node_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)) + } + HirFileIdRepr::MacroFile(mac_file) => { + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) + } + } + } +} + +impl InFile { + pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input + let file_id = match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value }) + } + HirFileIdRepr::MacroFile(m) => m, + }; + if !file_id.is_attr_macro(db) { + return None; + } + + let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id) + .map_node_range_up(db, self.value.syntax().text_range())?; + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if !ctx.is_root() { + return None; + } + + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? + let anc = db.parse(file_id).syntax_node().covering_element(range); + let value = anc.ancestors().find_map(N::cast)?; + Some(InRealFile::new(file_id, value)) + } +} diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index e6e8d8c029922..11775c531d4cd 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -1,111 +1,124 @@ //! To make attribute macros work reliably when typing, we need to take care to //! fix up syntax errors in the code we're passing to them. -use std::mem; -use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; -use rustc_hash::FxHashMap; +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData}, + FileId, +}; +use la_arena::RawIdx; +use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; use syntax::{ ast::{self, AstNode, HasLoopBody}, - match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, + match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, +}; +use triomphe::Arc; +use tt::Spacing; + +use crate::{ + span::SpanMapRef, + tt::{Ident, Leaf, Punct, Subtree}, }; -use tt::token_id::Subtree; /// The result of calculating fixes for a syntax node -- a bunch of changes /// (appending to and replacing nodes), the information that is needed to /// reverse those changes afterwards, and a token map. #[derive(Debug, Default)] pub(crate) struct SyntaxFixups { - pub(crate) append: FxHashMap>, - pub(crate) replace: FxHashMap>, + pub(crate) append: FxHashMap>, + pub(crate) remove: FxHashSet, pub(crate) undo_info: SyntaxFixupUndoInfo, - pub(crate) token_map: TokenMap, - pub(crate) next_id: u32, } /// This is the information needed to reverse the fixups. -#[derive(Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct SyntaxFixupUndoInfo { - original: Box<[Subtree]>, + // FIXME: ThinArc<[Subtree]> + original: Option>>, } -const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); +impl SyntaxFixupUndoInfo { + pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None }; +} -pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { +// censoring -> just don't convert the node +// replacement -> censor + append +// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how +// to remove later + +pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::::default(); - let mut replace = FxHashMap::::default(); + let mut remove = FxHashSet::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); - let mut token_map = TokenMap::default(); - let mut next_id = 0; + let dummy_range = TextRange::empty(TextSize::new(0)); + // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as + // the index into the replacement vec but only if the end points to !0 + let dummy_anchor = SpanAnchor { + file_id: FileId::from_raw(!0), + ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)), + }; + let fake_span = |range| SpanData { + range: dummy_range, + anchor: dummy_anchor, + ctx: span_map.span_for_range(range).ctx, + }; while let Some(event) = preorder.next() { - let node = match event { - syntax::WalkEvent::Enter(node) => node, - syntax::WalkEvent::Leave(_) => continue, - }; + let syntax::WalkEvent::Enter(node) = event else { continue }; + let node_range = node.text_range(); if can_handle_error(&node) && has_error_to_handle(&node) { + remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let (original_tree, new_tmap, new_next_id) = - mbe::syntax_node_to_token_tree_with_modifications( - &node, - mem::take(&mut token_map), - next_id, - Default::default(), - Default::default(), - ); - token_map = new_tmap; - next_id = new_next_id; + let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); let idx = original.len() as u32; original.push(original_tree); - let replacement = SyntheticToken { - kind: SyntaxKind::IDENT, + let replacement = Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: node.text_range(), - id: SyntheticTokenId(idx), - }; - replace.insert(node.clone().into(), vec![replacement]); + span: SpanData { + range: TextRange::new(TextSize::new(idx), TextSize::new(!0)), + anchor: dummy_anchor, + ctx: span_map.span_for_range(node_range).ctx, + }, + }); + append.insert(node.clone().into(), vec![replacement]); preorder.skip_subtree(); continue; } + // In some other situations, we can fix things by just appending some tokens. - let end_range = TextRange::empty(node.text_range().end()); match_ast! { match node { ast::FieldExpr(it) => { if it.name_ref().is_none() { // incomplete field access: some_expr.| append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span(node_range), + }), ]); } }, ast::ExprStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span(node_range), + }), ]); } }, ast::LetStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, @@ -117,28 +130,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(if_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span(node_range) + }), ]); } if it.then_branch().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, @@ -150,46 +160,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(while_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span(node_range) + }), ]); } if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, ast::LoopExpr(it) => { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, @@ -201,29 +207,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue }; append.insert(match_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID - }, + span: fake_span(node_range) + }), ]); } if it.match_arm_list().is_none() { // No match arms append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, @@ -234,10 +237,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }; let [pat, in_token, iter] = [ - (SyntaxKind::UNDERSCORE, "_"), - (SyntaxKind::IN_KW, "in"), - (SyntaxKind::IDENT, "__ra_fixup") - ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); + "_", + "in", + "__ra_fixup" + ].map(|text| + Leaf::Ident(Ident { + text: text.into(), + span: fake_span(node_range) + }), + ); if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); @@ -248,18 +256,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span(node_range) + }), ]); } }, @@ -267,12 +274,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { } } } + let needs_fixups = !append.is_empty() || !original.is_empty(); SyntaxFixups { append, - replace, - token_map, - next_id, - undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, + remove, + undo_info: SyntaxFixupUndoInfo { + original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())), + }, } } @@ -288,30 +296,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c)) } -pub(crate) fn reverse_fixups( - tt: &mut Subtree, - token_map: &TokenMap, - undo_info: &SyntaxFixupUndoInfo, -) { +pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { + let Some(undo_info) = undo_info.original.as_deref() else { return }; + let undo_info = &**undo_info; + reverse_fixups_(tt, undo_info); +} + +fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { let tts = std::mem::take(&mut tt.token_trees); tt.token_trees = tts .into_iter() + // delete all fake nodes .filter(|tt| match tt { tt::TokenTree::Leaf(leaf) => { - token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID) - } - tt::TokenTree::Subtree(st) => { - token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID) + let span = leaf.span(); + span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0) } + tt::TokenTree::Subtree(_) => true, }) .flat_map(|tt| match tt { tt::TokenTree::Subtree(mut tt) => { - reverse_fixups(&mut tt, token_map, undo_info); + reverse_fixups_(&mut tt, undo_info); SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if let Some(id) = token_map.synthetic_token_id(*leaf.span()) { - let original = undo_info.original[id.0 as usize].clone(); + if leaf.span().anchor.file_id == FileId::from_raw(!0) { + let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); if original.delimiter.kind == tt::DelimiterKind::Invisible { original.token_trees.into() } else { @@ -327,11 +337,15 @@ pub(crate) fn reverse_fixups( #[cfg(test)] mod tests { + use base_db::FileId; use expect_test::{expect, Expect}; + use triomphe::Arc; - use crate::tt; - - use super::reverse_fixups; + use crate::{ + fixup::reverse_fixups, + span::{RealSpanMap, SpanMap}, + tt, + }; // The following three functions are only meant to check partial structural equivalence of // `TokenTree`s, see the last assertion in `check()`. @@ -361,13 +375,13 @@ mod tests { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); - let fixups = super::fixup_syntax(&parsed.syntax_node()); - let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); + let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); + let mut tt = mbe::syntax_node_to_token_tree_modified( &parsed.syntax_node(), - fixups.token_map, - fixups.next_id, - fixups.replace, + span_map.as_ref(), fixups.append, + fixups.remove, ); let actual = format!("{tt}\n"); @@ -383,14 +397,15 @@ mod tests { parse.syntax_node() ); - reverse_fixups(&mut tt, &tmap, &fixups.undo_info); + reverse_fixups(&mut tt, &fixups.undo_info); // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node()); + let original_as_tt = + mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); assert!( check_subtree_eq(&tt, &original_as_tt), - "different token tree: {tt:?},\n{original_as_tt:?}" + "different token tree:\n{tt:?}\n\n{original_as_tt:?}" ); } @@ -403,7 +418,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {for _ in __ra_fixup {}} +fn foo () {for _ in __ra_fixup { }} "#]], ) } @@ -431,7 +446,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {for bar in qux {}} +fn foo () {for bar in qux { }} "#]], ) } @@ -462,7 +477,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {}} +fn foo () {match __ra_fixup { }} "#]], ) } @@ -494,7 +509,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {}} +fn foo () {match __ra_fixup { }} "#]], ) } @@ -609,7 +624,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if a {}} +fn foo () {if a { }} "#]], ) } @@ -623,7 +638,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if __ra_fixup {}} +fn foo () {if __ra_fixup { }} "#]], ) } @@ -637,7 +652,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if __ra_fixup {} {}} +fn foo () {if __ra_fixup {} { }} "#]], ) } @@ -651,7 +666,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {while __ra_fixup {}} +fn foo () {while __ra_fixup { }} "#]], ) } @@ -665,7 +680,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {while foo {}} +fn foo () {while foo { }} "#]], ) } @@ -692,7 +707,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {loop {}} +fn foo () {loop { }} "#]], ) } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index ca65db1136ce1..7b03709aced00 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -2,252 +2,247 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. -use base_db::CrateId; -use db::TokenExpander; -use either::Either; -use mbe::Origin; -use syntax::{ - ast::{self, HasDocComments}, - AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, -}; -use triomphe::Arc; - -use crate::{ - db::{self, ExpandDatabase}, - fixup, - name::{AsName, Name}, - HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, -}; - -#[derive(Clone, Debug)] -pub struct Hygiene { - frames: Option, +use std::iter; + +use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; + +use crate::db::ExpandDatabase; + +#[derive(Copy, Clone, Hash, PartialEq, Eq)] +pub struct SyntaxContextData { + pub outer_expn: Option, + pub outer_transparency: Transparency, + pub parent: SyntaxContextId, + /// This context, but with all transparent and semi-transparent expansions filtered away. + pub opaque: SyntaxContextId, + /// This context, but with all transparent expansions filtered away. + pub opaque_and_semitransparent: SyntaxContextId, } -impl Hygiene { - pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene { - Hygiene { frames: Some(HygieneFrames::new(db, file_id)) } +impl std::fmt::Debug for SyntaxContextData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SyntaxContextData") + .field("outer_expn", &self.outer_expn) + .field("outer_transparency", &self.outer_transparency) + .field("parent", &self.parent) + .field("opaque", &self.opaque) + .field("opaque_and_semitransparent", &self.opaque_and_semitransparent) + .finish() } +} - pub fn new_unhygienic() -> Hygiene { - Hygiene { frames: None } +impl SyntaxContextData { + pub fn root() -> Self { + SyntaxContextData { + outer_expn: None, + outer_transparency: Transparency::Opaque, + parent: SyntaxContextId::ROOT, + opaque: SyntaxContextId::ROOT, + opaque_and_semitransparent: SyntaxContextId::ROOT, + } } - // FIXME: this should just return name - pub fn name_ref_to_name( - &self, + pub fn fancy_debug( + self, + self_id: SyntaxContextId, db: &dyn ExpandDatabase, - name_ref: ast::NameRef, - ) -> Either { - if let Some(frames) = &self.frames { - if name_ref.text() == "$crate" { - if let Some(krate) = frames.root_crate(db, name_ref.syntax()) { - return Either::Right(krate); - } + f: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?; + match self.outer_expn { + Some(id) => { + write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)? } + None => write!(f, "root")?, } - - Either::Left(name_ref.as_name()) + write!(f, ", {:?})", self.outer_transparency) } +} - pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option { - let mut token = path.syntax().first_token()?.text_range(); - let frames = self.frames.as_ref()?; - let mut current = &frames.0; - - loop { - let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; - if origin == Origin::Def { - return if current.local_inner { - frames.root_crate(db, path.syntax()) - } else { - None - }; - } - current = current.call_site.as_ref()?; - token = mapped.value; - } - } +/// A property of a macro expansion that determines how identifiers +/// produced by that expansion are resolved. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] +pub enum Transparency { + /// Identifier produced by a transparent expansion is always resolved at call-site. + /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. + Transparent, + /// Identifier produced by a semi-transparent expansion may be resolved + /// either at call-site or at definition-site. + /// If it's a local variable, label or `$crate` then it's resolved at def-site. + /// Otherwise it's resolved at call-site. + /// `macro_rules` macros behave like this, built-in macros currently behave like this too, + /// but that's an implementation detail. + SemiTransparent, + /// Identifier produced by an opaque expansion is always resolved at definition-site. + /// Def-site spans in procedural macros, identifiers from `macro` by default use this. + Opaque, } -#[derive(Clone, Debug)] -struct HygieneFrames(Arc); +pub fn span_with_def_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque) +} -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct HygieneFrame { - expansion: Option, +pub fn span_with_call_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent) +} - // Indicate this is a local inner macro - local_inner: bool, - krate: Option, +pub fn span_with_mixed_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent) +} - call_site: Option>, - def_site: Option>, +fn span_with_ctxt_from_mark( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, + transparency: Transparency, +) -> SpanData { + SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } } -impl HygieneFrames { - fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self { - // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory - // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work. - HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) +pub(super) fn apply_mark( + db: &dyn ExpandDatabase, + ctxt: SyntaxContextId, + call_id: MacroCallId, + transparency: Transparency, +) -> SyntaxContextId { + if transparency == Transparency::Opaque { + return apply_mark_internal(db, ctxt, Some(call_id), transparency); } - fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { - let mut token = node.first_token()?.text_range(); - let mut result = self.0.krate; - let mut current = self.0.clone(); - - while let Some((mapped, origin)) = - current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) - { - result = current.krate; - - let site = match origin { - Origin::Def => ¤t.def_site, - Origin::Call => ¤t.call_site, - }; + let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; + let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { + call_site_ctxt.normalize_to_macros_2_0(db) + } else { + call_site_ctxt.normalize_to_macro_rules(db) + }; - let site = match site { - None => break, - Some(it) => it, - }; - - current = site.clone(); - token = mapped.value; - } + if call_site_ctxt.is_root() { + return apply_mark_internal(db, ctxt, Some(call_id), transparency); + } - result + // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a + // macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition. + // + // In this case, the tokens from the macros 1.0 definition inherit the hygiene + // at their invocation. That is, we pretend that the macros 1.0 definition + // was defined at its invocation (i.e., inside the macros 2.0 definition) + // so that the macros 2.0 definition remains hygienic. + // + // See the example at `test/ui/hygiene/legacy_interaction.rs`. + for (call_id, transparency) in ctxt.marks(db) { + call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency); } + apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency) } -#[derive(Debug, Clone, PartialEq, Eq)] -struct HygieneInfo { - file: MacroFile, - /// The start offset of the `macro_rules!` arguments or attribute input. - attr_input_or_mac_def_start: Option>, +fn apply_mark_internal( + db: &dyn ExpandDatabase, + ctxt: SyntaxContextId, + call_id: Option, + transparency: Transparency, +) -> SyntaxContextId { + let syntax_context_data = db.lookup_intern_syntax_context(ctxt); + let mut opaque = syntax_context_data.opaque; + let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; + + if transparency >= Transparency::Opaque { + let parent = opaque; + let new_opaque = SyntaxContextId::SELF_REF; + // But we can't just grab the to be allocated ID either as that would not deduplicate + // things! + // So we need a new salsa store type here ... + opaque = db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque: new_opaque, + opaque_and_semitransparent: new_opaque, + }); + } + + if transparency >= Transparency::SemiTransparent { + let parent = opaque_and_semitransparent; + let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; + opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semitransparent: new_opaque_and_semitransparent, + }); + } - macro_def: TokenExpander, - macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - macro_arg_shift: mbe::Shift, - exp_map: Arc, + let parent = ctxt; + db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semitransparent, + }) +} +pub trait SyntaxContextExt { + fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self; + fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self; + fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self; + fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency); + fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency); + fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)>; } -impl HygieneInfo { - fn map_ident_up( - &self, - db: &dyn ExpandDatabase, - token: TextRange, - ) -> Option<(InFile, Origin)> { - let token_id = self.exp_map.token_by_range(token)?; - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let loc = db.lookup_intern_macro_call(self.file.macro_call_id); - - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def_start?) - } - None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())), - }, - _ => match origin { - mbe::Origin::Call => { - (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())) - } - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, *tt) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, - }, - }; - - let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; - Some((tt.with_value(range + tt.value), origin)) +#[inline(always)] +fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId { + match n { + SyntaxContextId::SELF_REF => p, + _ => n, } } -fn make_hygiene_info( - db: &dyn ExpandDatabase, - macro_file: MacroFile, - loc: &MacroCallLoc, -) -> HygieneInfo { - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - let macro_def = db.macro_expander(loc.def); - let (_, exp_map) = db.parse_macro_expansion(macro_file).value; - let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) - }); - - HygieneInfo { - file: macro_file, - attr_input_or_mac_def_start: attr_input_or_mac_def - .map(|it| it.map(|tt| tt.syntax().text_range().start())), - macro_arg_shift: mbe::Shift::new(¯o_arg.0), - macro_arg, - macro_def, - exp_map, +impl SyntaxContextExt for SyntaxContextId { + fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self { + handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent) + } + fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self { + handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque) + } + fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self { + db.lookup_intern_syntax_context(self).parent + } + fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency) { + let data = db.lookup_intern_syntax_context(self); + (data.outer_expn, data.outer_transparency) + } + fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency) { + let data = db.lookup_intern_syntax_context(*self); + *self = data.parent; + (data.outer_expn, data.outer_transparency) + } + fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)> { + let mut marks = marks_rev(self, db).collect::>(); + marks.reverse(); + marks } } -impl HygieneFrame { - pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame { - let (info, krate, local_inner) = match file_id.macro_file() { - None => (None, None, false), - Some(macro_file) => { - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id())); - match loc.def.kind { - MacroDefKind::Declarative(_) => { - (info, Some(loc.def.krate), loc.def.local_inner) - } - MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false), - MacroDefKind::BuiltInAttr(..) => (info, None, false), - MacroDefKind::BuiltInDerive(..) => (info, None, false), - MacroDefKind::BuiltInEager(..) => (info, None, false), - MacroDefKind::ProcMacro(..) => (info, None, false), - } - } - }; - - let Some((info, calling_file)) = info else { - return HygieneFrame { - expansion: None, - local_inner, - krate, - call_site: None, - def_site: None, - }; - }; - - let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id)); - let call_site = Some(db.hygiene_frame(calling_file)); - - HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site } - } +// FIXME: Make this a SyntaxContextExt method once we have RPIT +pub fn marks_rev( + ctxt: SyntaxContextId, + db: &dyn ExpandDatabase, +) -> impl Iterator, Transparency)> + '_ { + iter::successors(Some(ctxt), move |&mark| { + Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT) + }) + .map(|ctx| ctx.outer_mark(db)) } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 4be55126b8621..74089593ac035 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -4,7 +4,7 @@ //! tree originates not from the text of some `FileId`, but from some macro //! expansion. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod db; pub mod ast_id_map; @@ -18,39 +18,58 @@ pub mod quote; pub mod eager; pub mod mod_path; pub mod attrs; +pub mod span; +pub mod files; mod fixup; -use mbe::TokenMap; -pub use mbe::{Origin, ValueResult}; - -use ::tt::token_id as tt; use triomphe::Arc; -use std::{fmt, hash::Hash, iter}; +use std::{fmt, hash::Hash}; use base_db::{ - impl_intern_key, - salsa::{self, InternId}, + span::{HirFileIdRepr, SpanData, SyntaxContextId}, CrateId, FileId, FileRange, ProcMacroKind, }; use either::Either; use syntax::{ - algo::{self, skip_trivia_token}, ast::{self, AstNode, HasDocComments}, - AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, + SyntaxNode, SyntaxToken, TextRange, TextSize, }; use crate::{ - ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId}, attrs::AttrId, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::TokenExpander, + fixup::SyntaxFixupUndoInfo, mod_path::ModPath, proc_macro::ProcMacroExpander, + span::{ExpansionSpanMap, SpanMap}, }; +pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; +pub use crate::files::{InFile, InMacroFile, InRealFile}; + +pub use base_db::span::{HirFileId, MacroCallId, MacroFileId}; +pub use mbe::ValueResult; + +pub type DeclarativeMacro = ::mbe::DeclarativeMacro; + +pub mod tt { + pub use base_db::span::SpanData; + pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; + + pub type Delimiter = ::tt::Delimiter; + pub type DelimSpan = ::tt::DelimSpan; + pub type Subtree = ::tt::Subtree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; + pub type TokenTree = ::tt::TokenTree; +} + pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -59,6 +78,7 @@ pub enum ExpandError { Mbe(mbe::ExpandError), RecursionOverflowPoisoned, Other(Box>), + ProcMacroPanic(Box>), } impl ExpandError { @@ -81,56 +101,24 @@ impl fmt::Display for ExpandError { ExpandError::RecursionOverflowPoisoned => { f.write_str("overflow expanding the original macro") } + ExpandError::ProcMacroPanic(it) => { + f.write_str("proc-macro panicked: ")?; + f.write_str(it) + } ExpandError::Other(it) => f.write_str(it), } } } -/// Input to the analyzer is a set of files, where each file is identified by -/// `FileId` and contains source code. However, another source of source code in -/// Rust are macros: each macro can be thought of as producing a "temporary -/// file". To assign an id to such a file, we use the id of the macro call that -/// produced the file. So, a `HirFileId` is either a `FileId` (source code -/// written by user), or a `MacroCallId` (source code produced by macro). -/// -/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file -/// containing the call plus the offset of the macro call in the file. Note that -/// this is a recursive definition! However, the size_of of `HirFileId` is -/// finite (because everything bottoms out at the real `FileId`) and small -/// (`MacroCallId` uses the location interning. You can check details here: -/// ). -/// -/// The two variants are encoded in a single u32 which are differentiated by the MSB. -/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a -/// `MacroCallId`. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(u32); - -impl fmt::Debug for HirFileId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.repr().fmt(f) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { - pub macro_call_id: MacroCallId, -} - -/// `MacroCallId` identifies a particular macro invocation, like -/// `println!("Hello, {}", world)`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroCallId(salsa::InternId); -impl_intern_key!(MacroCallId); - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { pub def: MacroDefId, - pub(crate) krate: CrateId, + pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. eager: Option>, pub kind: MacroCallKind, + pub call_site: SyntaxContextId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -139,6 +127,7 @@ pub struct MacroDefId { pub kind: MacroDefKind, pub local_inner: bool, pub allow_internal_unsafe: bool, + // pub def_site: SyntaxContextId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -152,9 +141,9 @@ pub enum MacroDefKind { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -struct EagerCallInfo { +pub struct EagerCallInfo { /// The expanded argument of the eager macro. - arg: Arc<(tt::Subtree, TokenMap)>, + arg: Arc, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). arg_id: MacroCallId, error: Option, @@ -178,7 +167,7 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - attr_args: Arc<(tt::Subtree, mbe::TokenMap)>, + attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support @@ -187,76 +176,68 @@ pub enum MacroCallKind { }, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} +pub trait HirFileIdExt { + /// Returns the original file of this macro call hierarchy. + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; -impl From for HirFileId { - fn from(FileId(id): FileId) -> Self { - assert!(id < Self::MAX_FILE_ID); - HirFileId(id) - } -} + /// Returns the original file of this macro call hierarchy while going into the included file if + /// one of the calls comes from an `include!``. + fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; -impl From for HirFileId { - fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { - let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID); - HirFileId(id | Self::MACRO_FILE_TAG_MASK) - } -} + /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option>; + + /// Return expansion information if it is a macro-expansion file + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option; -impl HirFileId { - const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; - const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) + -> Option>; +} - /// For macro-expansion files, returns the file original source file the - /// expansion originated from. - pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { +impl HirFileIdExt for HirFileId { + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { HirFileIdRepr::FileId(id) => break id, - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); - let is_include_expansion = loc.def.is_include() && loc.eager.is_some(); - file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) { - Some(Ok((_, file))) => file.into(), - _ => loc.kind.file_id(), - } + HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { + file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id(); } } } } - pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { - let mut level = 0; - let mut curr = self; - while let Some(macro_file) = curr.macro_file() { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - - level += 1; - curr = loc.kind.file_id(); + fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId { + loop { + match self.repr() { + base_db::span::HirFileIdRepr::FileId(id) => break id, + base_db::span::HirFileIdRepr::MacroFile(file) => { + let loc = db.lookup_intern_macro_call(file.macro_call_id); + if loc.def.is_include() { + if let Some(eager) = &loc.eager { + if let Ok(it) = builtin_fn_macro::include_input_to_file_id( + db, + file.macro_call_id, + &eager.arg, + ) { + break it; + } + } + } + self = loc.kind.file_id(); + } + } } - level } - /// If this is a macro call, returns the syntax node of the call. - pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { - let macro_file = self.macro_file()?; - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - Some(loc.to_node(db)) - } - - /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { - HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { + HirFileIdRepr::FileId(file_id) => { + break Some(InRealFile { file_id, value: call.value }) + } + HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).to_node(db); } } @@ -264,12 +245,11 @@ impl HirFileId { } /// Return expansion information if it is a macro-expansion file - pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { - let macro_file = self.macro_file()?; - ExpansionInfo::new(db, macro_file) + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { + Some(ExpansionInfo::new(db, self.macro_file()?)) } - pub fn as_builtin_derive_attr_node( + fn as_builtin_derive_attr_node( &self, db: &dyn db::ExpandDatabase, ) -> Option> { @@ -281,104 +261,84 @@ impl HirFileId { }; Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } +} - pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - matches!( - db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, - MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) - ) - } - None => false, - } - } +pub trait MacroFileIdExt { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + /// If this is a macro call, returns the syntax node of the call. + fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile; - pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - matches!( - db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, - MacroDefKind::BuiltInDerive(..) - ) - } - None => false, - } - } + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; + + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; /// Return whether this file is an include macro - pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include() - } - _ => false, - } + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + /// Return whether this file is an attr macro + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + /// Return whether this file is the pseudo expansion of the derive attribute. + /// See [`crate::builtin_attr_macro::derive_attr_expand`]. + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; +} + +impl MacroFileIdExt for MacroFileId { + fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile { + db.lookup_intern_macro_call(self.macro_call_id).to_node(db) } + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + let mut level = 0; + let mut macro_file = self; + loop { + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) - } - _ => false, + level += 1; + macro_file = match loc.kind.file_id().repr() { + HirFileIdRepr::FileId(_) => break level, + HirFileIdRepr::MacroFile(it) => it, + }; } } - /// Return whether this file is an attr macro - pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.kind, MacroCallKind::Attr { .. }) - } - _ => false, - } + /// Return expansion information if it is a macro-expansion file + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { + ExpansionInfo::new(db, self) } - /// Return whether this file is the pseudo expansion of the derive attribute. - /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - loc.def.is_attribute_derive() - } - None => false, - } + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + matches!( + db.lookup_intern_macro_call(self.macro_call_id).def.kind, + MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) + ) } - #[inline] - pub fn is_macro(self) -> bool { - self.0 & Self::MACRO_FILE_TAG_MASK != 0 + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + matches!( + db.lookup_intern_macro_call(self.macro_call_id).def.kind, + MacroDefKind::BuiltInDerive(..) + ) } - #[inline] - pub fn macro_file(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => None, - _ => Some(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + db.lookup_intern_macro_call(self.macro_call_id).def.is_include() } - #[inline] - pub fn file_id(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => Some(FileId(self.0)), - _ => None, - } + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } - fn repr(self) -> HirFileIdRepr { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => HirFileIdRepr::FileId(FileId(self.0)), - _ => HirFileIdRepr::MacroFile(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + matches!(loc.kind, MacroCallKind::Attr { .. }) + } + + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + loc.def.is_attribute_derive() } } @@ -388,20 +348,35 @@ impl MacroDefId { db: &dyn db::ExpandDatabase, krate: CrateId, kind: MacroCallKind, + call_site: SyntaxContextId, ) -> MacroCallId { - db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind }) + db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) + } + + pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile { + match self.kind { + MacroDefKind::Declarative(id) + | MacroDefKind::BuiltIn(_, id) + | MacroDefKind::BuiltInAttr(_, id) + | MacroDefKind::BuiltInDerive(_, id) + | MacroDefKind::BuiltInEager(_, id) => { + id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) + } + MacroDefKind::ProcMacro(_, _, id) => { + id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) + } + } } pub fn ast_id(&self) -> Either, AstId> { - let id = match self.kind { + match self.kind { MacroDefKind::ProcMacro(.., id) => return Either::Right(id), MacroDefKind::Declarative(id) | MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltInAttr(_, id) | MacroDefKind::BuiltInDerive(_, id) - | MacroDefKind::BuiltInEager(_, id) => id, - }; - Either::Left(id) + | MacroDefKind::BuiltInEager(_, id) => Either::Left(id), + } } pub fn is_proc_macro(&self) -> bool { @@ -443,6 +418,18 @@ impl MacroDefId { } impl MacroCallLoc { + pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData { + let ast_id = self.kind.erased_ast_id(); + let file_id = self.kind.file_id(); + let range = db.ast_id_map(file_id).get_erased(ast_id).text_range(); + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range), + HirFileIdRepr::MacroFile(m) => { + db.parse_macro_expansion(m).value.1.span_at(range.start()) + } + } + } + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile { match self.kind { MacroCallKind::FnLike { ast_id, .. } => { @@ -483,20 +470,26 @@ impl MacroCallLoc { match self.kind { MacroCallKind::FnLike { expand_to, .. } => expand_to, MacroCallKind::Derive { .. } => ExpandTo::Items, - MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements, + MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Items, MacroCallKind::Attr { .. } => { - // is this always correct? + // FIXME(stmt_expr_attributes) ExpandTo::Items } } } } -// FIXME: attribute indices do not account for nested `cfg_attr` - impl MacroCallKind { + fn descr(&self) -> &'static str { + match self { + MacroCallKind::FnLike { .. } => "macro call", + MacroCallKind::Derive { .. } => "derive macro", + MacroCallKind::Attr { .. } => "attribute macro", + } + } + /// Returns the file containing the macro invocation. - fn file_id(&self) -> HirFileId { + pub fn file_id(&self) -> HirFileId { match *self { MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. } | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. } @@ -504,6 +497,14 @@ impl MacroCallKind { } } + fn erased_ast_id(&self) -> ErasedFileAstId { + match *self { + MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(), + MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(), + MacroCallKind::Attr { ast_id: InFile { value, .. }, .. } => value.erase(), + } + } + /// Returns the original file range that best describes the location of this macro call. /// /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. @@ -571,219 +572,156 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::ExpandDatabase) -> Option> { + fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile> { match self { - MacroCallKind::FnLike { ast_id, .. } => ast_id - .to_in_file_node(db) - .map(|it| Some(it.token_tree()?.syntax().clone())) - .transpose(), + MacroCallKind::FnLike { ast_id, .. } => { + ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone())) + } MacroCallKind::Derive { ast_id, .. } => { - Some(ast_id.to_in_file_node(db).syntax().cloned()) + ast_id.to_in_file_node(db).syntax().cloned().map(Some) } MacroCallKind::Attr { ast_id, .. } => { - Some(ast_id.to_in_file_node(db).syntax().cloned()) + ast_id.to_in_file_node(db).syntax().cloned().map(Some) } } } } -impl MacroCallId { - pub fn as_file(self) -> HirFileId { - MacroFile { macro_call_id: self }.into() - } - - pub fn as_macro_file(self) -> MacroFile { - MacroFile { macro_call_id: self } - } -} - /// ExpansionInfo mainly describes how to map text range between src and expanded macro +// FIXME: can be expensive to create, we should check the use sites and maybe replace them with +// simpler function calls if the map is only used once #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { - expanded: InMacroFile, + pub expanded: InMacroFile, /// The argument TokenTree or item for attributes - arg: InFile, + arg: InFile>, /// The `macro_rules!` or attribute input. attr_input_or_mac_def: Option>, macro_def: TokenExpander, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg - /// and as such we need to shift tokens if they are part of an attributes input instead of their item. - macro_arg_shift: mbe::Shift, - exp_map: Arc, + macro_arg: Arc, + pub exp_map: Arc, + arg_map: SpanMap, } impl ExpansionInfo { - pub fn expanded(&self) -> InFile { - self.expanded.clone().into() + pub fn expanded(&self) -> InMacroFile { + self.expanded.clone() } pub fn call_node(&self) -> Option> { - Some(self.arg.with_value(self.arg.value.parent()?)) + Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) } - /// Map a token down from macro input into the macro expansion. - /// - /// The inner workings of this function differ slightly depending on the type of macro we are dealing with: - /// - declarative: - /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input) - /// , as tokens can mapped in and out of it. - /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy - /// way to map all the tokens. - /// - attribute: - /// Attributes have two different inputs, the input tokentree in the attribute node and the item - /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here - /// as well. Currently this is done by shifting the attribute input by the maximum id of the item. - /// - function-like and derives: - /// Both of these only have one simple call site input so no special handling is required here. - pub fn map_token_down( - &self, - db: &dyn db::ExpandDatabase, - item: Option, - token: InFile<&SyntaxToken>, - // FIXME: use this for range mapping, so that we can resolve inline format args - _relative_token_offset: Option, - ) -> Option> + '_> { - assert_eq!(token.file_id, self.arg.file_id); - let token_id_in_attr_input = if let Some(item) = item { - // check if we are mapping down in an attribute input - // this is a special case as attributes can have two inputs - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - let token_range = token.value.text_range(); - match &loc.kind { - MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => { - // FIXME: handle `cfg_attr` - let attr = item - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)?; - match attr.token_tree() { - Some(token_tree) - if token_tree.syntax().text_range().contains_range(token_range) => - { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let relative_range = - token.value.text_range().checked_sub(attr_input_start)?; - // shift by the item's tree's max id - let token_id = attr_args.1.token_by_range(relative_range)?; - - let token_id = if loc.def.is_attribute_derive() { - // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens - token_id - } else { - self.macro_arg_shift.shift(token_id) - }; - Some(token_id) - } - _ => None, - } - } - _ => None, - } - } else { - None - }; - - let token_id = match token_id_in_attr_input { - Some(token_id) => token_id, - // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual - None => { - let relative_range = - token.value.text_range().checked_sub(self.arg.value.text_range().start())?; - let token_id = self.macro_arg.1.token_by_range(relative_range)?; - // conditionally shift the id by a declarative macro definition - self.macro_def.map_id_down(token_id) - } - }; - + /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. + pub fn map_range_down<'a>( + &'a self, + span: SpanData, + ) -> Option + 'a>> { let tokens = self .exp_map - .ranges_by_token(token_id, token.value.kind()) + .ranges_with_span(span) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); - Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) + Some(InMacroFile::new(self.expanded.file_id, tokens)) } - /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion. - pub fn map_token_up( + /// Looks up the span at the given offset. + pub fn span_for_offset( &self, db: &dyn db::ExpandDatabase, - token: InFile<&SyntaxToken>, - ) -> Option<(InFile, Origin)> { - assert_eq!(token.file_id, self.expanded.file_id.into()); - // Fetch the id through its text range, - let token_id = self.exp_map.token_by_range(token.value.text_range())?; - // conditionally unshifting the id to accommodate for macro-rules def site - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - // Special case: map tokens from `include!` expansions to the included file - if loc.def.is_include() { - if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) { - let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?; - let source = db.parse(file_id); - - let token = source.syntax_node().covering_element(range).into_token()?; - - return Some((InFile::new(file_id.into(), token), Origin::Call)); + offset: TextSize, + ) -> (FileRange, SyntaxContextId) { + debug_assert!(self.expanded.value.text_range().contains(offset)); + let span = self.exp_map.span_at(offset); + let anchor_offset = db + .ast_id_map(span.anchor.file_id.into()) + .get_erased(span.anchor.ast_id) + .text_range() + .start(); + (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx) + } + + /// Maps up the text range out of the expansion hierarchy back into the original file its from. + pub fn map_node_range_up( + &self, + db: &dyn db::ExpandDatabase, + range: TextRange, + ) -> Option<(FileRange, SyntaxContextId)> { + debug_assert!(self.expanded.value.text_range().contains_range(range)); + let mut spans = self.exp_map.spans_for_range(range); + let SpanData { range, anchor, ctx } = spans.next()?; + let mut start = range.start(); + let mut end = range.end(); + + for span in spans { + if span.anchor != anchor || span.ctx != ctx { + return None; } + start = start.min(span.range.start()); + end = end.max(span.range.end()); } - - // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item. - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - if loc.def.is_attribute_derive() { - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } else { - // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input - // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this - match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } - None => (&self.macro_arg.1, self.arg.clone()), - } - } - } - _ => match origin { - mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, tt.syntax().cloned()) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, + let anchor_offset = + db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some(( + FileRange { + file_id: anchor.file_id, + range: TextRange::new(start, end) + anchor_offset, }, - }; + ctx, + )) + } - let range = token_map.first_range_by_token(token_id, token.value.kind())?; - let token = - tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; - Some((tt.with_value(token), origin)) + /// Maps up the text range out of the expansion into is macro call. + pub fn map_range_up_once( + &self, + db: &dyn db::ExpandDatabase, + token: TextRange, + ) -> InFile> { + debug_assert!(self.expanded.value.text_range().contains_range(token)); + let span = self.exp_map.span_at(token.start()); + match &self.arg_map { + SpanMap::RealSpanMap(_) => { + let file_id = span.anchor.file_id.into(); + let anchor_offset = + db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); + InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } + } + SpanMap::ExpansionSpanMap(arg_map) => { + let arg_range = self + .arg + .value + .as_ref() + .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range()); + InFile::new( + self.arg.file_id, + arg_map + .ranges_with_span(span) + .filter(|range| range.intersect(arg_range).is_some()) + .collect(), + ) + } + } } - fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option { + pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let arg_tt = loc.kind.arg(db)?; + let arg_tt = loc.kind.arg(db); + let arg_map = db.span_map(arg_tt.file_id); let macro_def = db.macro_expander(loc.def); let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter::DUMMY_INVISIBLE, + token_trees: Vec::new(), + }), + SyntaxFixupUndoInfo::NONE, + ) }); let def = loc.def.ast_id().left().and_then(|id| { @@ -810,331 +748,18 @@ impl ExpansionInfo { _ => None, }); - Some(ExpansionInfo { + ExpansionInfo { expanded, arg: arg_tt, attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), macro_arg, macro_def, exp_map, - }) - } -} - -/// `AstId` points to an AST node in any file. -/// -/// It is stable across reparses, and can be used as salsa key/value. -pub type AstId = InFile>; - -impl AstId { - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { - self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) - } - pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile { - InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) - } - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr { - db.ast_id_map(self.file_id).get(self.value) - } -} - -pub type ErasedAstId = InFile; - -impl ErasedAstId { - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { - db.ast_id_map(self.file_id).get_raw(self.value) - } -} - -/// `InFile` stores a value of `T` inside a particular file/syntax tree. -/// -/// Typical usages are: -/// -/// * `InFile` -- syntax node in a file -/// * `InFile` -- ast node in a file -/// * `InFile` -- offset in a file -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InFile { - pub file_id: HirFileId, - pub value: T, -} - -impl InFile { - pub fn new(file_id: HirFileId, value: T) -> InFile { - InFile { file_id, value } - } - - pub fn with_value(&self, value: U) -> InFile { - InFile::new(self.file_id, value) - } - - pub fn map U, U>(self, f: F) -> InFile { - InFile::new(self.file_id, f(self.value)) - } - - pub fn as_ref(&self) -> InFile<&T> { - self.with_value(&self.value) - } - - pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id) - } -} - -impl InFile<&T> { - pub fn cloned(&self) -> InFile { - self.with_value(self.value.clone()) - } -} - -impl InFile> { - pub fn transpose(self) -> Option> { - let value = self.value?; - Some(InFile::new(self.file_id, value)) - } -} - -impl InFile> { - pub fn transpose(self) -> Either, InFile> { - match self.value { - Either::Left(l) => Either::Left(InFile::new(self.file_id, l)), - Either::Right(r) => Either::Right(InFile::new(self.file_id, r)), + arg_map, } } } -impl InFile<&SyntaxNode> { - pub fn ancestors_with_macros( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + Clone + '_ { - iter::successors(Some(self.cloned()), move |node| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => node.file_id.call_node(db), - }) - } - - /// Skips the attributed item that caused the macro invocation we are climbing up - pub fn ancestors_with_macros_skip_attr_item( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + '_ { - let succ = move |node: &InFile| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => { - let parent_node = node.file_id.call_node(db)?; - if node.file_id.is_attr_macro(db) { - // macro call was an attributed item, skip it - // FIXME: does this fail if this is a direct expansion of another macro? - parent_node.map(|node| node.parent()).transpose() - } else { - Some(parent_node) - } - } - }; - iter::successors(succ(&self.cloned()), succ) - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - /// - /// For attributes and derives, this will point back to the attribute only. - /// For the entire item use [`InFile::original_file_range_full`]. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range(db) - } - } - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range_with_body(db) - } - } - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { - match ascend_node_border_tokens(db, self) { - Some(InFile { file_id, value: (first, last) }) => { - let original_file = file_id.original_file(db); - let range = first.text_range().cover(last.text_range()); - if file_id != original_file.into() { - tracing::error!("Failed mapping up more for {:?}", range); - return None; - } - Some(FileRange { file_id: original_file, range }) - } - _ if !self.file_id.is_macro() => Some(FileRange { - file_id: self.file_id.original_file(db), - range: self.value.text_range(), - }), - _ => None, - } - } - - pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option> { - // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - if !self.file_id.is_macro() { - return Some(self.map(Clone::clone)); - } else if !self.file_id.is_attr_macro(db) { - return None; - } - - if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self) - { - if file_id.is_macro() { - let range = first.text_range().cover(last.text_range()); - tracing::error!("Failed mapping out of macro file for {:?}", range); - return None; - } - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes - let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; - let kind = self.value.kind(); - let value = anc.ancestors().find(|it| it.kind() == kind)?; - return Some(InFile::new(file_id, value)); - } - None - } -} - -impl InFile { - pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option> { - let expansion = self.file_id.expansion_info(db)?; - expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it) - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range(db) - } - } - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - Some(FileRange { file_id, range: self.value.text_range() }) - } - HirFileIdRepr::MacroFile(_) => { - let expansion = self.file_id.expansion_info(db)?; - let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?; - let original_file = file_id.original_file(db); - if file_id != original_file.into() { - return None; - } - Some(FileRange { file_id: original_file, range: value.text_range() }) - } - } - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InMacroFile { - pub file_id: MacroFile, - pub value: T, -} - -impl From> for InFile { - fn from(macro_file: InMacroFile) -> Self { - InFile { file_id: macro_file.file_id.into(), value: macro_file.value } - } -} - -fn ascend_node_border_tokens( - db: &dyn db::ExpandDatabase, - InFile { file_id, value: node }: InFile<&SyntaxNode>, -) -> Option> { - let expansion = file_id.expansion_info(db)?; - - let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next); - let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev); - - // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore - let first = first_token(node)?; - let last = last_token(node)?; - let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?; - let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?; - (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value))) -} - -fn ascend_call_token( - db: &dyn db::ExpandDatabase, - expansion: &ExpansionInfo, - token: InFile, -) -> Option> { - let mut mapping = expansion.map_token_up(db, token.as_ref())?; - while let (mapped, Origin::Call) = mapping { - match mapped.file_id.expansion_info(db) { - Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?, - None => return Some(mapped), - } - } - None -} - -impl InFile { - pub fn descendants(self) -> impl Iterator> { - self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) - } - - // FIXME: this should return `Option>` - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { - // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - if !self.file_id.is_macro() { - return Some(self); - } else if !self.file_id.is_attr_macro(db) { - return None; - } - - if let Some(InFile { file_id, value: (first, last) }) = - ascend_node_border_tokens(db, self.syntax()) - { - if file_id.is_macro() { - let range = first.text_range().cover(last.text_range()); - tracing::error!("Failed mapping out of macro file for {:?}", range); - return None; - } - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes - let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; - let value = anc.ancestors().find_map(N::cast)?; - return Some(InFile::new(file_id, value)); - } - None - } - - pub fn syntax(&self) -> InFile<&SyntaxNode> { - self.with_value(self.value.syntax()) - } -} - /// In Rust, macros expand token trees to token trees. When we want to turn a /// token tree into an AST node, we need to figure out what kind of AST node we /// want: something like `foo` can be a type, an expression, or a pattern. @@ -1199,9 +824,4 @@ impl ExpandTo { } } -#[derive(Debug)] -pub struct UnresolvedMacro { - pub path: ModPath, -} - intern::impl_internable!(ModPath, attrs::AttrInput); diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index 69aa09c4a5212..9534b5039f682 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -7,11 +7,11 @@ use std::{ use crate::{ db::ExpandDatabase, - hygiene::Hygiene, - name::{known, Name}, + hygiene::{marks_rev, SyntaxContextExt, Transparency}, + name::{known, AsName, Name}, + span::SpanMapRef, }; -use base_db::CrateId; -use either::Either; +use base_db::{span::SyntaxContextId, CrateId}; use smallvec::SmallVec; use syntax::{ast, AstNode}; @@ -38,6 +38,7 @@ pub enum PathKind { Crate, /// Absolute path (::foo) Abs, + // FIXME: Remove this /// `$crate` from macro expansion DollarCrate(CrateId), } @@ -46,9 +47,9 @@ impl ModPath { pub fn from_src( db: &dyn ExpandDatabase, path: ast::Path, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, ) -> Option { - convert_path(db, None, path, hygiene) + convert_path(db, None, path, span_map) } pub fn from_segments(kind: PathKind, segments: impl IntoIterator) -> ModPath { @@ -193,33 +194,36 @@ fn convert_path( db: &dyn ExpandDatabase, prefix: Option, path: ast::Path, - hygiene: &Hygiene, + span_map: SpanMapRef<'_>, ) -> Option { let prefix = match path.qualifier() { - Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?), + Some(qual) => Some(convert_path(db, prefix, qual, span_map)?), None => prefix, }; let segment = path.segment()?; let mut mod_path = match segment.kind()? { ast::PathSegmentKind::Name(name_ref) => { - match hygiene.name_ref_to_name(db, name_ref) { - Either::Left(name) => { - // no type args in use - let mut res = prefix.unwrap_or_else(|| { - ModPath::from_kind( - segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), - ) - }); - res.segments.push(name); - res - } - Either::Right(crate_id) => { - return Some(ModPath::from_segments( - PathKind::DollarCrate(crate_id), - iter::empty(), - )) + if name_ref.text() == "$crate" { + if prefix.is_some() { + return None; } + ModPath::from_kind( + resolve_crate_root( + db, + span_map.span_for_range(name_ref.syntax().text_range()).ctx, + ) + .map(PathKind::DollarCrate) + .unwrap_or(PathKind::Crate), + ) + } else { + let mut res = prefix.unwrap_or_else(|| { + ModPath::from_kind( + segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), + ) + }); + res.segments.push(name_ref.as_name()); + res } } ast::PathSegmentKind::SelfTypeKw => { @@ -261,8 +265,14 @@ fn convert_path( // We follow what it did anyway :) if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - if let Some(crate_id) = hygiene.local_inner_macros(db, path) { - mod_path.kind = PathKind::DollarCrate(crate_id); + let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx; + if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn { + if db.lookup_intern_macro_call(macro_call_id).def.local_inner { + mod_path.kind = match resolve_crate_root(db, syn_ctx) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, + } + } } } } @@ -270,6 +280,29 @@ fn convert_path( Some(mod_path) } +pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option { + // When resolving `$crate` from a `macro_rules!` invoked in a `macro`, + // we don't want to pretend that the `macro_rules!` definition is in the `macro` + // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks. + // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!` + // definitions actually produced by `macro` and `macro` definitions produced by + // `macro_rules!`, but at least such configurations are not stable yet. + ctxt = ctxt.normalize_to_macro_rules(db); + let mut iter = marks_rev(ctxt, db).peekable(); + let mut result_mark = None; + // Find the last opaque mark from the end if it exists. + while let Some(&(mark, Transparency::Opaque)) = iter.peek() { + result_mark = Some(mark); + iter.next(); + } + // Then find the last semi-transparent mark from the end if it exists. + while let Some((mark, Transparency::SemiTransparent)) = iter.next() { + result_mark = Some(mark); + } + + result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate) +} + pub use crate::name as __name; #[macro_export] diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index a876f48bda4e6..a321f94cd7553 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -470,6 +470,7 @@ pub mod known { pub const SELF_TYPE: super::Name = super::Name::new_inline("Self"); pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static"); + pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate"); #[macro_export] macro_rules! name { diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 41675c630dcfe..de577796831fd 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -1,6 +1,6 @@ //! Proc Macro Expander stub -use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; +use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use stdx::never; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; @@ -33,11 +33,15 @@ impl ProcMacroExpander { calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> ExpandResult { match self.proc_macro_id { - ProcMacroId(DUMMY_ID) => { - ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate)) - } + ProcMacroId(DUMMY_ID) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::UnresolvedProcMacro(def_crate), + ), ProcMacroId(id) => { let proc_macros = db.proc_macros(); let proc_macros = match proc_macros.get(&def_crate) { @@ -45,7 +49,7 @@ impl ProcMacroExpander { Some(Err(_)) | None => { never!("Non-dummy expander even though there are no proc macros"); return ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other("Internal error"), ); } @@ -59,7 +63,7 @@ impl ProcMacroExpander { id ); return ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other("Internal error"), ); } @@ -68,7 +72,8 @@ impl ProcMacroExpander { let krate_graph = db.crate_graph(); // Proc macros have access to the environment variables of the invoking crate. let env = &krate_graph[calling_crate].env; - match proc_macro.expander.expand(tt, attr_arg, env) { + match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site) + { Ok(t) => ExpandResult::ok(t), Err(err) => match err { // Don't discard the item in case something unexpected happened while expanding attributes @@ -78,9 +83,10 @@ impl ProcMacroExpander { ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) } } ProcMacroExpansionError::System(text) - | ProcMacroExpansionError::Panic(text) => { - ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text)) - } + | ProcMacroExpansionError::Panic(text) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())), + ), }, } } diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index ab3809abc7a26..acbde26c8ddf0 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -1,5 +1,7 @@ //! A simplified version of quote-crate like quasi quote macro +use base_db::span::SpanData; + // A helper macro quote macro // FIXME: // 1. Not all puncts are handled @@ -8,109 +10,109 @@ #[doc(hidden)] #[macro_export] macro_rules! __quote { - () => { + ($span:ident) => { Vec::::new() }; - ( @SUBTREE $delim:ident $($tt:tt)* ) => { + ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => { { - let children = $crate::__quote!($($tt)*); + let children = $crate::__quote!($span $($tt)*); crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::$delim, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: $span, + close: $span, }, token_trees: $crate::quote::IntoTt::to_tokens(children), } } }; - ( @PUNCT $first:literal ) => { + ( @PUNCT($span:ident) $first:literal ) => { { vec![ crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: $span, }).into() ] } }; - ( @PUNCT $first:literal, $sec:literal ) => { + ( @PUNCT($span:ident) $first:literal, $sec:literal ) => { { vec![ crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Joint, - span: crate::tt::TokenId::unspecified(), + span: $span, }).into(), crate::tt::Leaf::Punct(crate::tt::Punct { char: $sec, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: $span, }).into() ] } }; // hash variable - ( # $first:ident $($tail:tt)* ) => { + ($span:ident # $first:ident $($tail:tt)* ) => { { - let token = $crate::quote::ToTokenTree::to_token($first); + let token = $crate::quote::ToTokenTree::to_token($first, $span); let mut tokens = vec![token.into()]; - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; - ( ## $first:ident $($tail:tt)* ) => { + ($span:ident ## $first:ident $($tail:tt)* ) => { { - let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::>(); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::>(); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; // Brace - ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) }; + ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) }; // Bracket - ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) }; + ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) }; // Parenthesis - ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) }; + ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) }; // Literal - ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] }; + ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] }; // Ident - ( $tt:ident ) => { + ($span:ident $tt:ident ) => { vec![ { crate::tt::Leaf::Ident(crate::tt::Ident { text: stringify!($tt).into(), - span: crate::tt::TokenId::unspecified(), + span: $span, }).into() }] }; // Puncts // FIXME: Not all puncts are handled - ( -> ) => {$crate::__quote!(@PUNCT '-', '>')}; - ( & ) => {$crate::__quote!(@PUNCT '&')}; - ( , ) => {$crate::__quote!(@PUNCT ',')}; - ( : ) => {$crate::__quote!(@PUNCT ':')}; - ( ; ) => {$crate::__quote!(@PUNCT ';')}; - ( :: ) => {$crate::__quote!(@PUNCT ':', ':')}; - ( . ) => {$crate::__quote!(@PUNCT '.')}; - ( < ) => {$crate::__quote!(@PUNCT '<')}; - ( > ) => {$crate::__quote!(@PUNCT '>')}; - ( ! ) => {$crate::__quote!(@PUNCT '!')}; - - ( $first:tt $($tail:tt)+ ) => { + ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')}; + ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')}; + ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')}; + ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')}; + ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')}; + ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')}; + ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')}; + ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')}; + ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')}; + ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')}; + + ($span:ident $first:tt $($tail:tt)+ ) => { { - let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first)); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first )); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens @@ -122,19 +124,22 @@ macro_rules! __quote { /// It probably should implement in proc-macro #[macro_export] macro_rules! quote { - ( $($tt:tt)* ) => { - $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*)) + ($span:ident=> $($tt:tt)* ) => { + $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span) } } pub(crate) trait IntoTt { - fn to_subtree(self) -> crate::tt::Subtree; + fn to_subtree(self, span: SpanData) -> crate::tt::Subtree; fn to_tokens(self) -> Vec; } impl IntoTt for Vec { - fn to_subtree(self) -> crate::tt::Subtree { - crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self } + fn to_subtree(self, span: SpanData) -> crate::tt::Subtree { + crate::tt::Subtree { + delimiter: crate::tt::Delimiter::invisible_spanned(span), + token_trees: self, + } } fn to_tokens(self) -> Vec { @@ -143,7 +148,7 @@ impl IntoTt for Vec { } impl IntoTt for crate::tt::Subtree { - fn to_subtree(self) -> crate::tt::Subtree { + fn to_subtree(self, _: SpanData) -> crate::tt::Subtree { self } @@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree { } pub(crate) trait ToTokenTree { - fn to_token(self) -> crate::tt::TokenTree; + fn to_token(self, span: SpanData) -> crate::tt::TokenTree; } impl ToTokenTree for crate::tt::TokenTree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self } } impl ToTokenTree for &crate::tt::TokenTree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self.clone() } } impl ToTokenTree for crate::tt::Subtree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self.into() } } macro_rules! impl_to_to_tokentrees { - ($($ty:ty => $this:ident $im:block);*) => { + ($($span:ident: $ty:ty => $this:ident $im:block);*) => { $( impl ToTokenTree for $ty { - fn to_token($this) -> crate::tt::TokenTree { + fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.into(); leaf.into() } } impl ToTokenTree for &$ty { - fn to_token($this) -> crate::tt::TokenTree { + fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.clone().into(); leaf.into() } @@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees { } impl_to_to_tokentrees! { - u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - crate::tt::Leaf => self { self }; - crate::tt::Literal => self { self }; - crate::tt::Ident => self { self }; - crate::tt::Punct => self { self }; - &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}; - String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}} + span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} }; + _span: crate::tt::Leaf => self { self }; + _span: crate::tt::Literal => self { self }; + _span: crate::tt::Ident => self { self }; + _span: crate::tt::Punct => self { self }; + span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; + span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}} } #[cfg(test)] mod tests { + use crate::tt; + use base_db::{ + span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, + }; + use expect_test::expect; + use syntax::{TextRange, TextSize}; + + const DUMMY: tt::SpanData = tt::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::ROOT, + }; + #[test] fn test_quote_delimiters() { - assert_eq!(quote!({}).to_string(), "{}"); - assert_eq!(quote!(()).to_string(), "()"); - assert_eq!(quote!([]).to_string(), "[]"); + assert_eq!(quote!(DUMMY =>{}).to_string(), "{}"); + assert_eq!(quote!(DUMMY =>()).to_string(), "()"); + assert_eq!(quote!(DUMMY =>[]).to_string(), "[]"); } #[test] fn test_quote_idents() { - assert_eq!(quote!(32).to_string(), "32"); - assert_eq!(quote!(struct).to_string(), "struct"); + assert_eq!(quote!(DUMMY =>32).to_string(), "32"); + assert_eq!(quote!(DUMMY =>struct).to_string(), "struct"); } #[test] fn test_quote_hash_simple_literal() { let a = 20; - assert_eq!(quote!(#a).to_string(), "20"); + assert_eq!(quote!(DUMMY =>#a).to_string(), "20"); let s: String = "hello".into(); - assert_eq!(quote!(#s).to_string(), "\"hello\""); + assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\""); } fn mk_ident(name: &str) -> crate::tt::Ident { - crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() } + crate::tt::Ident { text: name.into(), span: DUMMY } } #[test] fn test_quote_hash_token_tree() { let a = mk_ident("hello"); - let quoted = quote!(#a); + let quoted = quote!(DUMMY =>#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); - assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295"); + expect![[r#" + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); } #[test] fn test_quote_simple_derive_copy() { let name = mk_ident("Foo"); - let quoted = quote! { + let quoted = quote! {DUMMY => impl Clone for #name { fn clone(&self) -> Self { Self {} @@ -268,18 +289,19 @@ mod tests { // } let struct_name = mk_ident("Foo"); let fields = [mk_ident("name"), mk_ident("id")]; - let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees); + let fields = + fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees); let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::Brace, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: DUMMY, + close: DUMMY, }, token_trees: fields.collect(), }; - let quoted = quote! { + let quoted = quote! {DUMMY => impl Clone for #struct_name { fn clone(&self) -> Self { Self #list diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs new file mode 100644 index 0000000000000..0a6c22fe42dc4 --- /dev/null +++ b/crates/hir-expand/src/span.rs @@ -0,0 +1,111 @@ +//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well +//! as associating spans with text ranges in a particular file. +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, +}; +use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize}; +use triomphe::Arc; + +use crate::db::ExpandDatabase; + +pub type ExpansionSpanMap = mbe::SpanMap; + +/// Spanmap for a macro file or a real file +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum SpanMap { + /// Spanmap for a macro file + ExpansionSpanMap(Arc), + /// Spanmap for a real file + RealSpanMap(Arc), +} + +#[derive(Copy, Clone)] +pub enum SpanMapRef<'a> { + /// Spanmap for a macro file + ExpansionSpanMap(&'a ExpansionSpanMap), + /// Spanmap for a real file + RealSpanMap(&'a RealSpanMap), +} + +impl mbe::SpanMapper for SpanMap { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} +impl mbe::SpanMapper for SpanMapRef<'_> { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} +impl mbe::SpanMapper for RealSpanMap { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} + +impl SpanMap { + pub fn span_for_range(&self, range: TextRange) -> SpanData { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } + + pub fn as_ref(&self) -> SpanMapRef<'_> { + match self { + Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), + Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), + } + } +} + +impl SpanMapRef<'_> { + pub fn span_for_range(self, range: TextRange) -> SpanData { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } +} + +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct RealSpanMap { + file_id: FileId, + /// Invariant: Sorted vec over TextSize + // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? + pairs: Box<[(TextSize, ErasedFileAstId)]>, +} + +impl RealSpanMap { + /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). + pub fn absolute(file_id: FileId) -> Self { + RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) } + } + + pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self { + let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]; + let ast_id_map = db.ast_id_map(file_id.into()); + pairs.extend( + db.parse(file_id) + .tree() + .items() + .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), + ); + RealSpanMap { file_id, pairs: pairs.into_boxed_slice() } + } + + pub fn span_for_range(&self, range: TextRange) -> SpanData { + let start = range.start(); + let idx = self + .pairs + .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) + .unwrap_err(); + let (offset, ast_id) = self.pairs[idx - 1]; + SpanData { + range: range - offset, + anchor: SpanAnchor { file_id: self.file_id, ast_id }, + ctx: SyntaxContextId::ROOT, + } + } +} diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 0348680e5da19..9792d945eb8f5 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -1,9 +1,10 @@ //! Constant evaluation details -use base_db::CrateId; +use base_db::{salsa::Cycle, CrateId}; use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex}; use hir_def::{ - hir::Expr, + body::Body, + hir::{Expr, ExprId}, path::Path, resolver::{Resolver, ValueNs}, type_ref::LiteralConstRef, @@ -136,7 +137,7 @@ pub fn intern_const_ref( ty: Ty, krate: CrateId, ) -> Const { - let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate))); + let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate)); let bytes = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. @@ -184,7 +185,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option { pub(crate) fn const_eval_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &GeneralConstId, _: &Substitution, _: &Option>, @@ -194,7 +195,7 @@ pub(crate) fn const_eval_recover( pub(crate) fn const_eval_static_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &StaticId, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) @@ -202,7 +203,7 @@ pub(crate) fn const_eval_static_recover( pub(crate) fn const_eval_discriminant_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &EnumVariantId, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) @@ -280,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant( // get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here // and make this function private. See the fixme comment on `InferenceContext::resolve_all`. pub(crate) fn eval_to_const( - expr: Idx, + expr: ExprId, mode: ParamLoweringMode, ctx: &mut InferenceContext<'_>, args: impl FnOnce() -> Generics, @@ -288,13 +289,24 @@ pub(crate) fn eval_to_const( ) -> Const { let db = ctx.db; let infer = ctx.clone().resolve_all(); + fn has_closure(body: &Body, expr: ExprId) -> bool { + if matches!(body[expr], Expr::Closure { .. }) { + return true; + } + let mut r = false; + body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx)); + r + } + if has_closure(&ctx.body, expr) { + // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic. + return unknown_const(infer[expr].clone()); + } if let Expr::Path(p) = &ctx.body.exprs[expr] { let resolver = &ctx.resolver; if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) { return c; } } - let infer = ctx.clone().resolve_all(); if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) { if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 { return result; diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index f4c079b48c58c..51a044d8ef562 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -24,7 +24,7 @@ use hir_def::{ }; use hir_expand::{ name::{AsName, Name}, - HirFileId, + HirFileId, MacroFileIdExt, }; use stdx::{always, never}; use syntax::{ @@ -196,7 +196,7 @@ impl<'a> DeclValidator<'a> { AttrDefId::GenericParamId(_) => None, } .map_or(false, |file_id| { - file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()) + matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())) }) }; diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 9ccf467358ece..d81926f7c9762 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -23,7 +23,7 @@ use hir_def::{ EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{hygiene::Hygiene, name::Name}; +use hir_expand::name::Name; use intern::{Internable, Interned}; use itertools::Itertools; use la_arena::ArenaMap; @@ -448,9 +448,8 @@ fn render_const_scalar( ) -> Result<(), HirDisplayError> { // FIXME: We need to get krate from the final callers of the hir display // infrastructure and have it here as a field on `f`. - let trait_env = Arc::new(TraitEnvironment::empty( - *f.db.crate_graph().crates_in_topological_order().last().unwrap(), - )); + let trait_env = + TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap()); match ty.kind(Interner) { TyKind::Scalar(s) => match s { Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }), @@ -1732,13 +1731,13 @@ impl HirDisplay for TypeRef { f.write_joined(bounds, " + ")?; } TypeRef::Macro(macro_call) => { - let macro_call = macro_call.to_node(f.db.upcast()); - let ctx = hir_def::lower::LowerCtx::with_hygiene( + let ctx = hir_def::lower::LowerCtx::with_span_map( f.db.upcast(), - &Hygiene::new_unhygienic(), + f.db.span_map(macro_call.file_id), ); + let macro_call = macro_call.to_node(f.db.upcast()); match macro_call.path() { - Some(path) => match Path::from_src(path, &ctx) { + Some(path) => match Path::from_src(&ctx, path) { Some(path) => path.hir_fmt(f)?, None => write!(f, "{{macro}}")?, }, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 0c3c725a7c743..a5e77a12d8c50 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -18,7 +18,6 @@ use hir_def::{ use hir_expand::name::{name, Name}; use stdx::always; use syntax::ast::RangeOp; -use triomphe::Arc; use crate::{ autoderef::{builtin_deref, deref_by_trait, Autoderef}, @@ -40,7 +39,8 @@ use crate::{ traits::FnTrait, utils::{generics, Generics}, Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst, - Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, + TyKind, }; use super::{ @@ -579,7 +579,7 @@ impl InferenceContext<'_> { } ty } - Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name), + Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name, expected), Expr::Await { expr } => { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) @@ -1291,7 +1291,7 @@ impl InferenceContext<'_> { let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr); let prev_env = block_id.map(|block_id| { let prev_env = self.table.trait_env.clone(); - Arc::make_mut(&mut self.table.trait_env).block = Some(block_id); + TraitEnvironment::with_block(&mut self.table.trait_env, block_id); prev_env }); @@ -1456,7 +1456,13 @@ impl InferenceContext<'_> { }) } - fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty { + fn infer_field_access( + &mut self, + tgt_expr: ExprId, + receiver: ExprId, + name: &Name, + expected: &Expectation, + ) -> Ty { let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none()); if name.is_missing() { @@ -1482,28 +1488,42 @@ impl InferenceContext<'_> { ty } None => { - // no field found, - let method_with_same_name_exists = { - self.get_traits_in_scope(); - - let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); - method_resolution::lookup_method( - self.db, - &canonicalized_receiver.value, - self.table.trait_env.clone(), - self.get_traits_in_scope().as_ref().left_or_else(|&it| it), - VisibleFromModule::Filter(self.resolver.module()), - name, - ) - .is_some() - }; + // no field found, lets attempt to resolve it like a function so that IDE things + // work out while people are typing + let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); + let resolved = method_resolution::lookup_method( + self.db, + &canonicalized_receiver.value, + self.table.trait_env.clone(), + self.get_traits_in_scope().as_ref().left_or_else(|&it| it), + VisibleFromModule::Filter(self.resolver.module()), + name, + ); self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField { expr: tgt_expr, - receiver: receiver_ty, + receiver: receiver_ty.clone(), name: name.clone(), - method_with_same_name_exists, + method_with_same_name_exists: resolved.is_some(), }); - self.err_ty() + match resolved { + Some((adjust, func, _)) => { + let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); + let generics = generics(self.db.upcast(), func.into()); + let substs = self.substs_for_method_call(generics, None); + self.write_expr_adj(receiver, adjustments); + self.write_method_resolution(tgt_expr, func, substs.clone()); + + self.check_method_call( + tgt_expr, + &[], + self.db.value_ty(func.into()), + substs, + ty, + expected, + ) + } + None => self.err_ty(), + } } } } @@ -1517,7 +1537,7 @@ impl InferenceContext<'_> { generic_args: Option<&GenericArgs>, expected: &Expectation, ) -> Ty { - let receiver_ty = self.infer_expr(receiver, &Expectation::none()); + let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none()); let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); let resolved = method_resolution::lookup_method( @@ -1568,23 +1588,32 @@ impl InferenceContext<'_> { ) } }; + self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected) + } + + fn check_method_call( + &mut self, + tgt_expr: ExprId, + args: &[ExprId], + method_ty: Binders, + substs: Substitution, + receiver_ty: Ty, + expected: &Expectation, + ) -> Ty { let method_ty = method_ty.substitute(Interner, &substs); self.register_obligations_for_call(&method_ty); - let (formal_receiver_ty, param_tys, ret_ty, is_varargs) = + let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) = match method_ty.callable_sig(self.db) { - Some(sig) => { + Some(sig) => ( if !sig.params().is_empty() { - ( - sig.params()[0].clone(), - sig.params()[1..].to_vec(), - sig.ret().clone(), - sig.is_varargs, - ) + (sig.params()[0].clone(), sig.params()[1..].to_vec()) } else { - (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs) - } - } - None => (self.err_ty(), Vec::new(), self.err_ty(), true), + (self.err_ty(), Vec::new()) + }, + sig.ret().clone(), + sig.is_varargs, + ), + None => ((self.err_ty(), Vec::new()), self.err_ty(), true), }; self.unify(&formal_receiver_ty, &receiver_ty); diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index c6bbf2f614071..fcfe1a3b5cf45 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -390,6 +390,7 @@ impl InferenceContext<'_> { } } +#[derive(Debug)] enum ValuePathResolution { // It's awkward to wrap a single ID in two enums, but we need both and this saves fallible // conversion between them + `unwrap()`. diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 27c7949986878..bfc4f1383ec6a 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -2,6 +2,7 @@ use std::fmt; +use base_db::salsa::Cycle; use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy}; use hir_def::{ layout::{ @@ -431,7 +432,7 @@ pub fn layout_of_ty_query( pub fn layout_of_ty_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &Ty, _: &Arc, ) -> Result, LayoutError> { diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs index 58a06dc643542..39788a9502993 100644 --- a/crates/hir-ty/src/layout/adt.rs +++ b/crates/hir-ty/src/layout/adt.rs @@ -2,6 +2,7 @@ use std::{cmp, ops::Bound}; +use base_db::salsa::Cycle; use hir_def::{ data::adt::VariantData, layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout}, @@ -140,7 +141,7 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, pub fn layout_of_adt_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &AdtId, _: &Substitution, _: &Arc, diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index bcf7bfa0d2730..cf174feed24b8 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -1,6 +1,6 @@ //! The type system. We currently use this to infer types for completion, hover //! information and various assists. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[allow(unused)] @@ -73,8 +73,8 @@ pub use infer::{ }; pub use interner::Interner; pub use lower::{ - associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId, - TyLoweringContext, ValueTyDefId, + associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode, + TyDefId, TyLoweringContext, ValueTyDefId, }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, @@ -122,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind; pub type TypeFlags = chalk_ir::TypeFlags; pub type DynTy = chalk_ir::DynTy; pub type FnPointer = chalk_ir::FnPointer; -// pub type FnSubst = chalk_ir::FnSubst; +// pub type FnSubst = chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor pub use chalk_ir::FnSubst; pub type ProjectionTy = chalk_ir::ProjectionTy; pub type AliasTy = chalk_ir::AliasTy; @@ -322,8 +322,7 @@ impl CallableSig { pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig { CallableSig { // FIXME: what to do about lifetime params? -> return PolyFnSig - // FIXME: use `Arc::from_iter` when it becomes available - params_and_return: Arc::from( + params_and_return: Arc::from_iter( fn_ptr .substitution .clone() @@ -332,8 +331,7 @@ impl CallableSig { .0 .as_slice(Interner) .iter() - .map(|arg| arg.assert_ty_ref(Interner).clone()) - .collect::>(), + .map(|arg| arg.assert_ty_ref(Interner).clone()), ), is_varargs: fn_ptr.sig.variadic, safety: fn_ptr.sig.safety, diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 9f5b59b239a4b..c86fe9adff866 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -10,7 +10,7 @@ use std::{ iter, }; -use base_db::CrateId; +use base_db::{salsa::Cycle, CrateId}; use chalk_ir::{ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety, }; @@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> { drop(expander); let ty = self.lower_ty(&type_ref); - self.expander - .borrow_mut() - .as_mut() - .unwrap() - .exit(self.db.upcast(), mark); + self.expander.borrow_mut().as_mut().unwrap().exit(mark); Some(ty) } _ => { @@ -1458,13 +1454,12 @@ pub(crate) fn generic_predicates_for_param_query( pub(crate) fn generic_predicates_for_param_recover( _db: &dyn HirDatabase, - _cycle: &[String], + _cycle: &Cycle, _def: &GenericDefId, _param_id: &TypeOrConstParamId, _assoc_name: &Option, ) -> Arc<[Binders]> { - // FIXME: use `Arc::from_iter` when it becomes available - Arc::from(vec![]) + Arc::from_iter(None) } pub(crate) fn trait_environment_for_body_query( @@ -1473,7 +1468,7 @@ pub(crate) fn trait_environment_for_body_query( ) -> Arc { let Some(def) = def.as_generic_def_id() else { let krate = def.module(db.upcast()).krate(); - return Arc::new(TraitEnvironment::empty(krate)); + return TraitEnvironment::empty(krate); }; db.trait_environment(def) } @@ -1533,12 +1528,7 @@ pub(crate) fn trait_environment_query( let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); - Arc::new(TraitEnvironment { - krate, - block: None, - traits_from_clauses: traits_in_scope.into_boxed_slice(), - env, - }) + TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env) } /// Resolve the where clause(s) of an item with generics. @@ -1607,69 +1597,54 @@ pub(crate) fn generic_defaults_query( let generic_params = generics(db.upcast(), def); let parent_start_idx = generic_params.len_self(); - let defaults = Arc::from( - generic_params - .iter() - .enumerate() - .map(|(idx, (id, p))| { - match p { - TypeOrConstParamData::TypeParamData(p) => { - let mut ty = p - .default - .as_ref() - .map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); - // Each default can only refer to previous parameters. - // Type variable default referring to parameter coming - // after it is forbidden (FIXME: report diagnostic) - ty = fallback_bound_vars(ty, idx, parent_start_idx); - crate::make_binders(db, &generic_params, ty.cast(Interner)) - } - TypeOrConstParamData::ConstParamData(p) => { - let mut val = p.default.as_ref().map_or_else( - || { - unknown_const_as_generic( - db.const_param_ty(ConstParamId::from_unchecked(id)), - ) - }, - |c| { - let c = ctx.lower_const(c, ctx.lower_ty(&p.ty)); - c.cast(Interner) - }, - ); - // Each default can only refer to previous parameters, see above. - val = fallback_bound_vars(val, idx, parent_start_idx); - make_binders(db, &generic_params, val) - } - } - }) - // FIXME: use `Arc::from_iter` when it becomes available - .collect::>(), - ); + let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| { + match p { + TypeOrConstParamData::TypeParamData(p) => { + let mut ty = + p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); + // Each default can only refer to previous parameters. + // Type variable default referring to parameter coming + // after it is forbidden (FIXME: report diagnostic) + ty = fallback_bound_vars(ty, idx, parent_start_idx); + crate::make_binders(db, &generic_params, ty.cast(Interner)) + } + TypeOrConstParamData::ConstParamData(p) => { + let mut val = p.default.as_ref().map_or_else( + || { + unknown_const_as_generic( + db.const_param_ty(ConstParamId::from_unchecked(id)), + ) + }, + |c| { + let c = ctx.lower_const(c, ctx.lower_ty(&p.ty)); + c.cast(Interner) + }, + ); + // Each default can only refer to previous parameters, see above. + val = fallback_bound_vars(val, idx, parent_start_idx); + make_binders(db, &generic_params, val) + } + } + })); defaults } pub(crate) fn generic_defaults_recover( db: &dyn HirDatabase, - _cycle: &[String], + _cycle: &Cycle, def: &GenericDefId, ) -> Arc<[Binders]> { let generic_params = generics(db.upcast(), *def); // FIXME: this code is not covered in tests. // we still need one default per parameter - let defaults = Arc::from( - generic_params - .iter_id() - .map(|id| { - let val = match id { - Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner), - Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)), - }; - crate::make_binders(db, &generic_params, val) - }) - // FIXME: use `Arc::from_iter` when it becomes available - .collect::>(), - ); + let defaults = Arc::from_iter(generic_params.iter_id().map(|id| { + let val = match id { + Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner), + Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)), + }; + crate::make_binders(db, &generic_params, val) + })); defaults } @@ -1885,7 +1860,7 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders { } } -pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders { +pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders { let generics = match *def { TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)), TyDefId::AdtId(it) => generics(db.upcast(), it.into()), @@ -1935,7 +1910,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T pub(crate) fn impl_self_ty_recover( db: &dyn HirDatabase, - _cycle: &[String], + _cycle: &Cycle, impl_id: &ImplId, ) -> Binders { let generics = generics(db.upcast(), (*impl_id).into()); diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 732643566a2d6..041d61c1b153d 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -168,12 +168,9 @@ impl TraitImpls { ) -> Arc<[Arc]> { let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}")); let crate_graph = db.crate_graph(); - // FIXME: use `Arc::from_iter` when it becomes available - Arc::from( - crate_graph - .transitive_deps(krate) - .map(|krate| db.trait_impls_in_crate(krate)) - .collect::>(), + + Arc::from_iter( + crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)), ) } diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index 2e6fe59d3bd80..f1795e71d945c 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs @@ -40,7 +40,6 @@ pub use monomorphization::{ use rustc_hash::FxHashMap; use smallvec::{smallvec, SmallVec}; use stdx::{impl_from, never}; -use triomphe::Arc; use super::consteval::{intern_const_scalar, try_const_usize}; @@ -147,7 +146,7 @@ impl ProjectionElem { base = normalize( db, // FIXME: we should get this from caller - Arc::new(TraitEnvironment::empty(krate)), + TraitEnvironment::empty(krate), base, ); } diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 62efb858511b4..fbfb6ff8cddde 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -21,7 +21,7 @@ use hir_def::{ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, VariantId, }; -use hir_expand::{mod_path::ModPath, InFile}; +use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use intern::Interned; use la_arena::ArenaMap; use rustc_hash::{FxHashMap, FxHashSet}; diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 922aee011cf33..639fabc198c15 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -2,7 +2,7 @@ use std::{fmt::Write, iter, mem}; -use base_db::FileId; +use base_db::{salsa::Cycle, FileId}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ body::Body, @@ -2110,7 +2110,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result Result> { Err(MirLowerError::Loop) diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs index 7d2bb95d931c2..8da03eef2e0c6 100644 --- a/crates/hir-ty/src/mir/monomorphization.rs +++ b/crates/hir-ty/src/mir/monomorphization.rs @@ -9,6 +9,7 @@ use std::mem; +use base_db::salsa::Cycle; use chalk_ir::{ fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable}, ConstData, DebruijnIndex, @@ -300,7 +301,7 @@ pub fn monomorphized_mir_body_query( pub fn monomorphized_mir_body_recover( _: &dyn HirDatabase, - _: &[String], + _: &Cycle, _: &DefWithBodyId, _: &Substitution, _: &Arc, diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs index 7d19e0a19169a..6f4aef22d2f78 100644 --- a/crates/hir-ty/src/test_db.rs +++ b/crates/hir-ty/src/test_db.rs @@ -30,6 +30,7 @@ pub(crate) struct TestDB { impl Default for TestDB { fn default() -> Self { let mut this = Self { storage: Default::default(), events: Default::default() }; + this.setup_syntax_context_root(); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 6ea059065e935..35079e70946d5 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -2000,3 +2000,15 @@ fn test() { "#, ); } + +#[test] +fn rustc_test_issue_52437() { + check_types( + r#" + fn main() { + let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize] + //^ [(); _] + } + "#, + ); +} diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 467b94a2662a1..b6bc76bc98d53 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -48,18 +48,32 @@ pub struct TraitEnvironment { pub krate: CrateId, pub block: Option, // FIXME make this a BTreeMap - pub(crate) traits_from_clauses: Box<[(Ty, TraitId)]>, + traits_from_clauses: Box<[(Ty, TraitId)]>, pub env: chalk_ir::Environment, } impl TraitEnvironment { - pub fn empty(krate: CrateId) -> Self { - TraitEnvironment { + pub fn empty(krate: CrateId) -> Arc { + Arc::new(TraitEnvironment { krate, block: None, traits_from_clauses: Box::default(), env: chalk_ir::Environment::new(Interner), - } + }) + } + + pub fn new( + krate: CrateId, + block: Option, + traits_from_clauses: Box<[(Ty, TraitId)]>, + env: chalk_ir::Environment, + ) -> Arc { + Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env }) + } + + // pub fn with_block(self: &mut Arc, block: BlockId) { + pub fn with_block(this: &mut Arc, block: BlockId) { + Arc::make_mut(this).block = Some(block); } pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator + '_ { diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 796490abd7f23..185853353181d 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -1,5 +1,6 @@ //! Attributes & documentation for hir types. +use base_db::FileId; use hir_def::{ attr::AttrsWithOwner, item_scope::ItemInNs, @@ -8,7 +9,10 @@ use hir_def::{ resolver::{HasResolver, Resolver, TypeNs}, AssocItemId, AttrDefId, ModuleDefId, }; -use hir_expand::{hygiene::Hygiene, name::Name}; +use hir_expand::{ + name::Name, + span::{RealSpanMap, SpanMapRef}, +}; use hir_ty::db::HirDatabase; use syntax::{ast, AstNode}; @@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { if ast_path.syntax().text() != link { return None; } - ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic()) + ModPath::from_src( + db.upcast(), + ast_path, + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)), + ) }; let full = try_get_modpath(link); diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index 936581bfe32c3..d98e3decd21ed 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -3,10 +3,27 @@ //! we didn't do that. //! //! But we need this for at least LRU caching at the query level. -pub use hir_def::db::*; +pub use hir_def::db::{ + AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery, + ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery, + CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery, + EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery, + FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, + FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery, + ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, + InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, + InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery, + InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, + InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery, + InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery, + Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery, + StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery, + TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery, + UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery, +}; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, - MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, + ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index cf9a2b73d9b1f..1cb36f9b021fe 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -12,7 +12,7 @@ use hir_def::path::ModPath; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; -use crate::{AssocItem, Field, Local, MacroKind, Type}; +use crate::{AssocItem, Field, Local, MacroKind, Trait, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { @@ -55,6 +55,7 @@ diagnostics![ ReplaceFilterMapNextWithFindMap, TraitImplIncorrectSafety, TraitImplMissingAssocItems, + TraitImplRedundantAssocItems, TraitImplOrphan, TypedHole, TypeMismatch, @@ -310,3 +311,11 @@ pub struct TraitImplMissingAssocItems { pub impl_: AstPtr, pub missing: Vec<(Name, AssocItem)>, } + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitImplRedundantAssocItems { + pub file_id: HirFileId, + pub trait_: Trait, + pub impl_: AstPtr, + pub assoc_item: (Name, AssocItem), +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 908027a2026d1..e0230fa3761b2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -17,7 +17,7 @@ //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: //! . -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "512"] @@ -59,7 +59,7 @@ use hir_def::{ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; -use hir_expand::{name::name, MacroCallKind}; +use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, @@ -81,7 +81,7 @@ use once_cell::unsync::Lazy; use rustc_hash::FxHashSet; use stdx::{impl_from, never}; use syntax::{ - ast::{self, HasAttrs as _, HasDocComments, HasName}, + ast::{self, HasAttrs as _, HasName}, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T, }; use triomphe::Arc; @@ -92,7 +92,9 @@ pub use crate::{ attrs::{resolve_doc_path_on, HasAttrs}, diagnostics::*, has_source::HasSource, - semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, + semantics::{ + DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits, + }, }; // Be careful with these re-exports. @@ -123,8 +125,10 @@ pub use { }, hir_expand::{ attrs::{Attr, AttrId}, + hygiene::{marks_rev, SyntaxContextExt}, name::{known, Name}, - ExpandResult, HirFileId, InFile, MacroFile, Origin, + tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, + MacroFileIdExt, }, hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, @@ -140,7 +144,10 @@ pub use { #[allow(unused)] use { hir_def::path::Path, - hir_expand::{hygiene::Hygiene, name::AsName}, + hir_expand::{ + name::AsName, + span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, + }, }; /// hir::Crate describes a single crate. It's the main interface with which @@ -601,7 +608,7 @@ impl Module { let tree = loc.id.item_tree(db.upcast()); let node = &tree[loc.id.value]; let file_id = loc.id.file_id(); - if file_id.is_builtin_derive(db.upcast()) { + if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) { // these expansion come from us, diagnosing them is a waste of resources // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow continue; @@ -664,7 +671,8 @@ impl Module { _ => (), }; - if let Some(trait_) = trait_ { + // Negative impls can't have items, don't emit missing items diagnostic for them + if let (false, Some(trait_)) = (impl_is_negative, trait_) { let items = &db.trait_data(trait_.into()).items; let required_items = items.iter().filter(|&(_, assoc)| match *assoc { AssocItemId::FunctionId(it) => !db.function_data(it).has_body(), @@ -686,6 +694,26 @@ impl Module { }, )); + let redundant = impl_assoc_items_scratch + .iter() + .filter(|(id, name)| { + !items.iter().any(|(impl_name, impl_item)| { + discriminant(impl_item) == discriminant(id) && impl_name == name + }) + }) + .map(|(item, name)| (name.clone(), AssocItem::from(*item))); + for (name, assoc_item) in redundant { + acc.push( + TraitImplRedundantAssocItems { + trait_, + file_id, + impl_: ast_id_map.get(node.ast_id()), + assoc_item: (name, assoc_item), + } + .into(), + ) + } + let missing: Vec<_> = required_items .filter(|(name, id)| { !impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| { @@ -947,10 +975,9 @@ fn precise_macro_call_location( // Compute the precise location of the macro name's token in the derive // list. let token = (|| { - let derive_attr = node - .doc_comments_and_attrs() + let derive_attr = collect_attrs(&node) .nth(derive_attr_index.ast_index()) - .and_then(Either::left)?; + .and_then(|x| Either::left(x.1))?; let token_tree = derive_attr.meta()?.token_tree()?; let group_by = token_tree .syntax() @@ -975,10 +1002,9 @@ fn precise_macro_call_location( } MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { let node = ast_id.to_node(db.upcast()); - let attr = node - .doc_comments_and_attrs() + let attr = collect_attrs(&node) .nth(invoc_attr_index.ast_index()) - .and_then(Either::left) + .and_then(|x| Either::left(x.1)) .unwrap_or_else(|| { panic!("cannot find attribute #{}", invoc_attr_index.ast_index()) }); @@ -3490,9 +3516,34 @@ impl Impl { self.id.lookup(db.upcast()).container.into() } - pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option> { + pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option> { let src = self.source(db)?; - src.file_id.as_builtin_derive_attr_node(db.upcast()) + + let macro_file = src.file_id.macro_file()?; + let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let (derive_attr, derive_index) = match loc.kind { + MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { + let module_id = self.id.lookup(db.upcast()).container; + ( + db.crate_def_map(module_id.krate())[module_id.local_id] + .scope + .derive_macro_invoc(ast_id, derive_attr_index)?, + derive_index, + ) + } + _ => return None, + }; + let file_id = MacroFileId { macro_call_id: derive_attr }; + let path = db + .parse_macro_expansion(file_id) + .value + .0 + .syntax_node() + .children() + .nth(derive_index as usize) + .and_then(::cast) + .and_then(|it| it.path())?; + Some(InMacroFile { file_id, value: path }) } pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool { @@ -3512,10 +3563,9 @@ impl TraitRef { resolver: &Resolver, trait_ref: hir_ty::TraitRef, ) -> TraitRef { - let env = resolver.generic_def().map_or_else( - || Arc::new(TraitEnvironment::empty(resolver.krate())), - |d| db.trait_environment(d), - ); + let env = resolver + .generic_def() + .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d)); TraitRef { env, trait_ref } } @@ -3655,15 +3705,14 @@ impl Type { resolver: &Resolver, ty: Ty, ) -> Type { - let environment = resolver.generic_def().map_or_else( - || Arc::new(TraitEnvironment::empty(resolver.krate())), - |d| db.trait_environment(d), - ); + let environment = resolver + .generic_def() + .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d)); Type { env: environment, ty } } pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type { - Type { env: Arc::new(TraitEnvironment::empty(krate)), ty } + Type { env: TraitEnvironment::empty(krate), ty } } pub fn reference(inner: &Type, m: Mutability) -> Type { @@ -3679,10 +3728,9 @@ impl Type { fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type { let resolver = lexical_env.resolver(db.upcast()); - let environment = resolver.generic_def().map_or_else( - || Arc::new(TraitEnvironment::empty(resolver.krate())), - |d| db.trait_environment(d), - ); + let environment = resolver + .generic_def() + .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d)); Type { env: environment, ty } } @@ -4252,10 +4300,10 @@ impl Type { let canonical = hir_ty::replace_errors_with_variables(&self.ty); let krate = scope.krate(); - let environment = scope.resolver().generic_def().map_or_else( - || Arc::new(TraitEnvironment::empty(krate.id)), - |d| db.trait_environment(d), - ); + let environment = scope + .resolver() + .generic_def() + .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); method_resolution::iterate_method_candidates_dyn( &canonical, @@ -4309,10 +4357,10 @@ impl Type { let canonical = hir_ty::replace_errors_with_variables(&self.ty); let krate = scope.krate(); - let environment = scope.resolver().generic_def().map_or_else( - || Arc::new(TraitEnvironment::empty(krate.id)), - |d| db.trait_environment(d), - ); + let environment = scope + .resolver() + .generic_def() + .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); method_resolution::iterate_path_candidates( &canonical, diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index a42e0978b25f3..92fa76c96fbdb 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,7 +2,11 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter, mem, ops}; +use std::{ + cell::RefCell, + fmt, iter, mem, + ops::{self, ControlFlow, Not}, +}; use base_db::{FileId, FileRange}; use either::Either; @@ -13,16 +17,21 @@ use hir_def::{ nameres::MacroSubNs, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, - AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, + AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, +}; +use hir_expand::{ + db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId, + MacroFileId, MacroFileIdExt, }; -use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; +use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, - match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, + ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _}, + match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, + TextRange, TextSize, }; use crate::{ @@ -35,7 +44,13 @@ use crate::{ TypeAlias, TypeParam, VariantDef, }; -#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DescendPreference { + SameText, + SameKind, + None, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), @@ -114,11 +129,12 @@ pub struct Semantics<'db, DB> { pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, - expansion_info_cache: RefCell>>, - // Rootnode to HirFileId cache + /// Rootnode to HirFileId cache cache: RefCell>, - // MacroCall to its expansion's HirFileId cache - macro_call_cache: RefCell, HirFileId>>, + // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens + expansion_info_cache: RefCell>, + /// MacroCall to its expansion's MacroFileId cache + macro_call_cache: RefCell, MacroFileId>>, } impl fmt::Debug for Semantics<'_, DB> { @@ -182,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } - pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { - self.imp.resolve_method_call(call).map(Function::from) - } - - /// Attempts to resolve this call expression as a method call falling back to resolving it as a field. - pub fn resolve_method_call_field_fallback( - &self, - call: &ast::MethodCallExpr, - ) -> Option> { - self.imp - .resolve_method_call_fallback(call) - .map(|it| it.map_left(Function::from).map_right(Field::from)) - } - pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.imp.resolve_await_to_poll(await_expr).map(Function::from) } @@ -255,7 +257,7 @@ impl<'db> SemanticsImpl<'db> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; - let node = self.parse_or_expand(file_id); + let node = self.parse_or_expand(file_id.into()); Some(node) } @@ -388,11 +390,72 @@ impl<'db> SemanticsImpl<'db> { ) } + pub fn as_format_args_parts( + &self, + string: &ast::String, + ) -> Option)>> { + if let Some(quote) = string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, string.syntax().clone()) + .into_iter() + .find_map(|token| { + let string = ast::String::cast(token)?; + let literal = + string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + let res = source_analyzer + .as_format_args_parts(self.db, format_args.as_ref())? + .map(|(range, res)| (range + quote.end(), res)) + .collect(); + Some(res) + }); + } + None + } + + pub fn check_for_format_args_template( + &self, + original_token: SyntaxToken, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + if let Some(original_string) = ast::String::cast(original_token.clone()) { + if let Some(quote) = original_string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, original_token.clone()) + .into_iter() + .find_map(|token| { + self.resolve_offset_in_format_args( + ast::String::cast(token)?, + offset - quote.end(), + ) + }) + .map(|(range, res)| (range + quote.end(), res)); + } + } + None + } + + fn resolve_offset_in_format_args( + &self, + string: ast::String, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + debug_assert!(offset <= string.syntax().text_range().len()); + let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { + // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) @@ -403,24 +466,28 @@ impl<'db> SemanticsImpl<'db> { }; if first == last { - self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { - if let Some(node) = value.parent_ancestors().find_map(N::cast) { + // node is just the token, so descend the token + self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { + if let Some(node) = value + .parent_ancestors() + .take_while(|it| it.text_range() == value.text_range()) + .find_map(N::cast) + { res.push(node) } - false + ControlFlow::Continue(()) }); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, 0.into(), &mut |token| { + self.descend_into_macros_impl(first, &mut |token| { scratch.push(token); - false + ControlFlow::Continue(()) }); let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, - 0.into(), &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { @@ -437,7 +504,7 @@ impl<'db> SemanticsImpl<'db> { } } } - false + ControlFlow::Continue(()) }, ); } @@ -449,32 +516,42 @@ impl<'db> SemanticsImpl<'db> { /// be considered for the mapping in case of inline format args. pub fn descend_into_macros( &self, + mode: DescendPreference, token: SyntaxToken, - offset: TextSize, - ) -> SmallVec<[SyntaxToken; 1]> { - let mut res = smallvec![]; - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { - res.push(value); - false - }); - res - } - - /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token. - /// - /// Returns the original non descended token if none of the mapped counterparts have the same text. - pub fn descend_into_macros_with_same_text( - &self, - token: SyntaxToken, - offset: TextSize, ) -> SmallVec<[SyntaxToken; 1]> { - let text = token.text(); + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } + let fetch_kind = |token: &SyntaxToken| match token.parent() { + Some(node) => match node.kind() { + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, + _ => token.kind(), + }, + None => token.kind(), + }; + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; let mut res = smallvec![]; - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if value.text() == text { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) + } + Dp::None => true, + }; + if is_a_match { res.push(value); } - false + ControlFlow::Continue(()) }); if res.is_empty() { res.push(token); @@ -482,44 +559,46 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_with_kind_preference( + pub fn descend_into_macros_single( &self, + mode: DescendPreference, token: SyntaxToken, - offset: TextSize, ) -> SyntaxToken { + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } let fetch_kind = |token: &SyntaxToken| match token.parent() { Some(node) => match node.kind() { - kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => { - node.parent().map_or(kind, |it| it.kind()) - } + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, _ => token.kind(), }, None => token.kind(), }; - let preferred_kind = fetch_kind(&token); - let mut res = None; - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if fetch_kind(&value) == preferred_kind { - res = Some(value); - true - } else { - if let None = res { - res = Some(value) - } - false - } - }); - res.unwrap_or(token) - } - - /// Descend the token into its macro call if it is part of one, returning the token in the - /// expansion that it is associated with. If `offset` points into the token's range, it will - /// be considered for the mapping in case of inline format args. - pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken { + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; let mut res = token.clone(); - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) + } + Dp::None => true, + }; res = value; - true + if is_a_match { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } }); res } @@ -527,177 +606,193 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, token: SyntaxToken, - // FIXME: We might want this to be Option to be able to opt out of subrange - // mapping, specifically for node downmapping - offset: TextSize, - f: &mut dyn FnMut(InFile) -> bool, + f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { let _p = profile::span("descend_into_macros"); - let relative_token_offset = token.text_range().start().checked_sub(offset); - let parent = match token.parent() { + let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { Some(it) => it, None => return, }; - let sa = match self.analyze_no_infer(&parent) { - Some(it) => it, - None => return, + + let span = match sa.file_id.file_id() { + Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()), + None => { + stdx::never!(); + return; + } }; - let def_map = sa.resolver.def_map(); - let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); + let def_map = sa.resolver.def_map(); - let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| { - let expansion_info = cache - .entry(macro_file) - .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) - .as_ref()?; + let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { + let expansion_info = cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); - { - let InFile { file_id, value } = expansion_info.expanded(); - self.cache(value, file_id); - } + { + let InMacroFile { file_id, value } = expansion_info.expanded(); + self.cache(value, file_id.into()); + } - let mapped_tokens = expansion_info.map_token_down( - self.db.upcast(), - item, - token, - relative_token_offset, - )?; - let len = stack.len(); - - // requeue the tokens we got from mapping our current token down - stack.extend(mapped_tokens); - // if the length changed we have found a mapping for the token - (stack.len() != len).then_some(()) - }; + let InMacroFile { file_id, value: mapped_tokens } = + expansion_info.map_range_down(span)?; + let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); - // Remap the next token in the queue into a macro call its in, if it is not being remapped - // either due to not being in a macro-call or because its unused push it into the result vec, - // otherwise push the remapped tokens back into the queue as they can potentially be remapped again. - while let Some(token) = stack.pop() { - self.db.unwind_if_cancelled(); - let was_not_remapped = (|| { - // First expand into attribute invocations - let containing_attribute_macro_call = self.with_ctx(|ctx| { - token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { - if item.attrs().next().is_none() { - // Don't force populate the dyn cache for items that don't have an attribute anyways - return None; - } - Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item)) - }) - }); - if let Some((call_id, item)) = containing_attribute_macro_call { - let file_id = call_id.as_file(); - return process_expansion_for_token( - &mut stack, - file_id, - Some(item), - token.as_ref(), - ); - } + // if the length changed we have found a mapping for the token + let res = mapped_tokens.is_empty().not().then_some(()); + // requeue the tokens we got from mapping our current token down + stack.push((HirFileId::from(file_id), mapped_tokens)); + res + }; - // Then check for token trees, that means we are either in a function-like macro or - // secondary attribute inputs - let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?; - let parent = tt.syntax().parent()?; + let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])]; + + while let Some((file_id, mut tokens)) = stack.pop() { + while let Some(token) = tokens.pop() { + let was_not_remapped = (|| { + // First expand into attribute invocations + let containing_attribute_macro_call = self.with_ctx(|ctx| { + token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { + if item.attrs().next().is_none() { + // Don't force populate the dyn cache for items that don't have an attribute anyways + return None; + } + Some(( + ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?, + item, + )) + }) + }); + if let Some((call_id, item)) = containing_attribute_macro_call { + let file_id = call_id.as_macro_file(); + let attr_id = match self.db.lookup_intern_macro_call(call_id).kind { + hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => { + invoc_attr_index.ast_index() + } + _ => 0, + }; + let text_range = item.syntax().text_range(); + let start = item + .doc_comments_and_attrs() + .nth(attr_id) + .map(|attr| match attr { + Either::Left(it) => it.syntax().text_range().start(), + Either::Right(it) => it.syntax().text_range().start(), + }) + .unwrap_or_else(|| text_range.start()); + let text_range = TextRange::new(start, text_range.end()); + // remove any other token in this macro input, all their mappings are the + // same as this one + tokens.retain(|t| !text_range.contains_range(t.text_range())); + return process_expansion_for_token(&mut stack, file_id); + } - if tt.left_delimiter_token().map_or(false, |it| it == token.value) { - return None; - } - if tt.right_delimiter_token().map_or(false, |it| it == token.value) { - return None; - } + // Then check for token trees, that means we are either in a function-like macro or + // secondary attribute inputs + let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?; + let parent = tt.syntax().parent()?; - if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { - let mcall = token.with_value(macro_call); - let file_id = match mcache.get(&mcall) { - Some(&it) => it, - None => { - let it = sa.expand(self.db, mcall.as_ref())?; - mcache.insert(mcall, it); - it - } - }; - process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) - } else if let Some(meta) = ast::Meta::cast(parent) { - // attribute we failed expansion for earlier, this might be a derive invocation - // or derive helper attribute - let attr = meta.parent_attr()?; - - let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) { - // this might be a derive, or a derive helper on an ADT - let derive_call = self.with_ctx(|ctx| { - // so try downmapping the token into the pseudo derive expansion - // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works - ctx.attr_to_derive_macro_call( - token.with_value(&adt), - token.with_value(attr.clone()), - ) - .map(|(_, call_id, _)| call_id) - }); - - match derive_call { - Some(call_id) => { - // resolved to a derive - let file_id = call_id.as_file(); - return process_expansion_for_token( - &mut stack, - file_id, - Some(adt.into()), - token.as_ref(), - ); + if tt.left_delimiter_token().map_or(false, |it| it == token) { + return None; + } + if tt.right_delimiter_token().map_or(false, |it| it == token) { + return None; + } + + if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { + let mcall: hir_expand::files::InFileWrapper = + InFile::new(file_id, macro_call); + let file_id = match mcache.get(&mcall) { + Some(&it) => it, + None => { + let it = sa.expand(self.db, mcall.as_ref())?; + mcache.insert(mcall, it); + it } - None => Some(adt), - } - } else { - // Otherwise this could be a derive helper on a variant or field - if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast) + }; + let text_range = tt.syntax().text_range(); + // remove any other token in this macro input, all their mappings are the + // same as this one + tokens.retain(|t| !text_range.contains_range(t.text_range())); + process_expansion_for_token(&mut stack, file_id) + } else if let Some(meta) = ast::Meta::cast(parent) { + // attribute we failed expansion for earlier, this might be a derive invocation + // or derive helper attribute + let attr = meta.parent_attr()?; + + let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) { - field.syntax().ancestors().take(4).find_map(ast::Adt::cast) - } else if let Some(field) = - attr.syntax().parent().and_then(ast::TupleField::cast) - { - field.syntax().ancestors().take(4).find_map(ast::Adt::cast) - } else if let Some(variant) = - attr.syntax().parent().and_then(ast::Variant::cast) - { - variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast) + // this might be a derive, or a derive helper on an ADT + let derive_call = self.with_ctx(|ctx| { + // so try downmapping the token into the pseudo derive expansion + // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works + ctx.attr_to_derive_macro_call( + InFile::new(file_id, &adt), + InFile::new(file_id, attr.clone()), + ) + .map(|(_, call_id, _)| call_id) + }); + + match derive_call { + Some(call_id) => { + // resolved to a derive + let file_id = call_id.as_macro_file(); + let text_range = attr.syntax().text_range(); + // remove any other token in this macro input, all their mappings are the + // same as this one + tokens.retain(|t| !text_range.contains_range(t.text_range())); + return process_expansion_for_token(&mut stack, file_id); + } + None => Some(adt), + } } else { - None + // Otherwise this could be a derive helper on a variant or field + if let Some(field) = + attr.syntax().parent().and_then(ast::RecordField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(field) = + attr.syntax().parent().and_then(ast::TupleField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(variant) = + attr.syntax().parent().and_then(ast::Variant::cast) + { + variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast) + } else { + None + } + }?; + if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) { + return None; } - }?; - if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) { - return None; - } - // Not an attribute, nor a derive, so it's either a builtin or a derive helper - // Try to resolve to a derive helper and downmap - let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); - let id = self.db.ast_id_map(token.file_id).ast_id(&adt); - let helpers = - def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; - let item = Some(adt.into()); - let mut res = None; - for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { - res = res.or(process_expansion_for_token( - &mut stack, - derive.as_file(), - item.clone(), - token.as_ref(), - )); + // Not an attribute, nor a derive, so it's either a builtin or a derive helper + // Try to resolve to a derive helper and downmap + let attr_name = + attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); + let id = self.db.ast_id_map(file_id).ast_id(&adt); + let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?; + let mut res = None; + for (.., derive) in + helpers.iter().filter(|(helper, ..)| *helper == attr_name) + { + res = res.or(process_expansion_for_token( + &mut stack, + derive.as_macro_file(), + )); + } + res + } else { + None } - res - } else { - None - } - })() - .is_none(); + })() + .is_none(); - if was_not_remapped && f(token) { - break; + if was_not_remapped && f(InFile::new(file_id, token)).is_break() { + break; + } } } } @@ -712,7 +807,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(token, offset)) + .map(move |token| self.descend_into_macros(DescendPreference::None, token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) @@ -737,14 +832,16 @@ impl<'db> SemanticsImpl<'db> { pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); node.original_file_range_opt(self.db.upcast()) + .filter(|(_, ctx)| ctx.is_root()) + .map(TupleExt::head) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( - |InFile { file_id, value }| { - self.cache(find_root(value.syntax()), file_id); + |InRealFile { file_id, value }| { + self.cache(find_root(value.syntax()), file_id.into()); value }, ) @@ -755,8 +852,8 @@ impl<'db> SemanticsImpl<'db> { pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( - |InFile { file_id, value }| { - self.cache(find_root(&value), file_id); + |InRealFile { file_id, value }| { + self.cache(find_root(&value), file_id.into()); value }, ) @@ -787,7 +884,7 @@ impl<'db> SemanticsImpl<'db> { Some(parent) => Some(InFile::new(file_id, parent)), None => { self.cache(value.clone(), file_id); - file_id.call_node(db) + Some(file_id.macro_file()?.call_node(db)) } } }) @@ -851,9 +948,9 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_trait(&self, path: &ast::Path) -> Option { let analyze = self.analyze(path.syntax())?; - let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); - let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene); - let hir_path = Path::from_src(path.clone(), &ctx)?; + let span_map = self.db.span_map(analyze.file_id); + let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map); + let hir_path = Path::from_src(&ctx, path.clone())?; match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), _ => None, @@ -937,14 +1034,15 @@ impl<'db> SemanticsImpl<'db> { self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat) } - fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { + pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { self.analyze(call.syntax())?.resolve_method_call(self.db, call) } - fn resolve_method_call_fallback( + /// Attempts to resolve this call expression as a method call falling back to resolving it as a field. + pub fn resolve_method_call_fallback( &self, call: &ast::MethodCallExpr, - ) -> Option> { + ) -> Option> { self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call) } @@ -976,6 +1074,13 @@ impl<'db> SemanticsImpl<'db> { self.analyze(field.syntax())?.resolve_field(self.db, field) } + pub fn resolve_field_fallback( + &self, + field: &ast::FieldExpr, + ) -> Option> { + self.analyze(field.syntax())?.resolve_field_fallback(self.db, field) + } + pub fn resolve_record_field( &self, field: &ast::RecordExprField, @@ -1037,7 +1142,7 @@ impl<'db> SemanticsImpl<'db> { fn with_ctx) -> T, T>(&self, f: F) -> T { let mut cache = self.s2d_cache.borrow_mut(); - let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache }; + let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache }; f(&mut ctx) } @@ -1187,7 +1292,7 @@ impl<'db> SemanticsImpl<'db> { return None; } - let func = self.resolve_method_call(method_call_expr).map(Function::from)?; + let func = self.resolve_method_call(method_call_expr)?; let res = match func.self_param(self.db)?.access(self.db) { Access::Shared | Access::Exclusive => true, Access::Owned => false, @@ -1451,7 +1556,7 @@ impl SemanticsScope<'_> { /// necessary a heuristic, as it doesn't take hygiene into account. pub fn speculative_resolve(&self, path: &ast::Path) -> Option { let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id); - let path = Path::from_src(path.clone(), &ctx)?; + let path = Path::from_src(&ctx, path.clone())?; resolve_hir_path(self.db, &self.resolver, &path) } @@ -1478,6 +1583,10 @@ impl SemanticsScope<'_> { pub fn extern_crate_decls(&self) -> impl Iterator + '_ { self.resolver.extern_crate_decls_in_scope(self.db.upcast()) } + + pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool { + self.resolver.impl_def() == other.resolver.impl_def() + } } #[derive(Debug)] diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index aabda3655602a..df8c1e904fe89 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -97,7 +97,7 @@ use hir_def::{ FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, }; -use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::{impl_from, never}; @@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap pub(super) struct SourceToDefCtx<'a, 'b> { pub(super) db: &'b dyn HirDatabase, - pub(super) cache: &'a mut SourceToDefCache, + pub(super) dynmap_cache: &'a mut SourceToDefCache, } impl SourceToDefCtx<'_, '_> { @@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> { fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap { let db = self.db; - self.cache + self.dynmap_cache .entry((container, file_id)) .or_insert_with(|| container.child_by_source(db, file_id)) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 55c2f8324c6d0..73db6f8f0b86b 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -26,11 +26,10 @@ use hir_def::{ }; use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, - hygiene::Hygiene, mod_path::path, name, name::{AsName, Name}, - HirFileId, InFile, + HirFileId, InFile, MacroFileId, MacroFileIdExt, }; use hir_ty::{ diagnostics::{ @@ -281,25 +280,49 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, call: &ast::MethodCallExpr, - ) -> Option { + ) -> Option { let expr_id = self.expr_id(db, &call.clone().into())?; let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; - Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)) + Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into()) } pub(crate) fn resolve_method_call_fallback( &self, db: &dyn HirDatabase, call: &ast::MethodCallExpr, - ) -> Option> { + ) -> Option> { let expr_id = self.expr_id(db, &call.clone().into())?; let inference_result = self.infer.as_ref()?; match inference_result.method_resolution(expr_id) { - Some((f_in_trait, substs)) => { - Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))) - } - None => inference_result.field_resolution(expr_id).map(Either::Right), + Some((f_in_trait, substs)) => Some(Either::Left( + self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(), + )), + None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right), + } + } + + pub(crate) fn resolve_field( + &self, + db: &dyn HirDatabase, + field: &ast::FieldExpr, + ) -> Option { + let expr_id = self.expr_id(db, &field.clone().into())?; + self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) + } + + pub(crate) fn resolve_field_fallback( + &self, + db: &dyn HirDatabase, + field: &ast::FieldExpr, + ) -> Option> { + let expr_id = self.expr_id(db, &field.clone().into())?; + let inference_result = self.infer.as_ref()?; + match inference_result.field_resolution(expr_id) { + Some(field) => Some(Either::Left(field.into())), + None => inference_result.method_resolution(expr_id).map(|(f, substs)| { + Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into()) + }), } } @@ -418,15 +441,6 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } - pub(crate) fn resolve_field( - &self, - db: &dyn HirDatabase, - field: &ast::FieldExpr, - ) -> Option { - let expr_id = self.expr_id(db, &field.clone().into())?; - self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) - } - pub(crate) fn resolve_record_field( &self, db: &dyn HirDatabase, @@ -484,7 +498,7 @@ impl SourceAnalyzer { macro_call: InFile<&ast::MacroCall>, ) -> Option { let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id); - let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; + let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?; self.resolver .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) .map(|(it, _)| it.into()) @@ -596,9 +610,8 @@ impl SourceAnalyzer { } // This must be a normal source file rather than macro file. - let hygiene = Hygiene::new(db.upcast(), self.file_id); - let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene); - let hir_path = Path::from_src(path.clone(), &ctx)?; + let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id)); + let hir_path = Path::from_src(&ctx, path.clone())?; // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. @@ -755,14 +768,15 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, - ) -> Option { + ) -> Option { let krate = self.resolver.krate(); let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { self.resolver .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) .map(|(it, _)| macro_id_to_def_id(db.upcast(), it)) })?; - Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) + // why the 64? + Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) } pub(crate) fn resolve_variant( @@ -821,6 +835,52 @@ impl SourceAnalyzer { false } + pub(crate) fn resolve_offset_in_format_args( + &self, + db: &dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + let implicits = self.body_source_map()?.implicit_format_args(format_args)?; + implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }) + } + + pub(crate) fn as_format_args_parts<'a>( + &'a self, + db: &'a dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + ) -> Option)> + 'a> { + Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map( + move |(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }, + )) + } + fn resolve_impl_method_or_trait_def( &self, db: &dyn HirDatabase, @@ -894,11 +954,12 @@ fn scope_for_offset( } // FIXME handle attribute expansion - let source = iter::successors(file_id.call_node(db.upcast()), |it| { - it.file_id.call_node(db.upcast()) - }) - .find(|it| it.file_id == from_file) - .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; + let source = + iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| { + Some(it.file_id.macro_file()?.call_node(db.upcast())) + }) + .find(|it| it.file_id == from_file) + .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; Some((source.value.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) @@ -1039,24 +1100,7 @@ fn resolve_hir_path_( }; let body_owner = resolver.body_owner(); - let values = || { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { - let res = match val { - ValueNs::LocalBinding(binding_id) => { - let var = Local { parent: body_owner?, binding_id }; - PathResolution::Local(var) - } - ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), - ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), - ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), - ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), - ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), - ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), - ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), - }; - Some(res) - }) - }; + let values = || resolve_hir_value_path(db, resolver, body_owner, path); let items = || { resolver @@ -1076,6 +1120,30 @@ fn resolve_hir_path_( .or_else(macros) } +fn resolve_hir_value_path( + db: &dyn HirDatabase, + resolver: &Resolver, + body_owner: Option, + path: &Path, +) -> Option { + resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { + let res = match val { + ValueNs::LocalBinding(binding_id) => { + let var = Local { parent: body_owner?, binding_id }; + PathResolution::Local(var) + } + ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), + ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), + ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), + ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), + ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), + ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), + ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), + }; + Some(res) + }) +} + /// Resolves a path where we know it is a qualifier of another path. /// /// For example, if we have: diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index ca7874c3683c5..03112f6de5afe 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -9,7 +9,7 @@ use hir_def::{ }; use hir_expand::{HirFileId, InFile}; use hir_ty::db::HirDatabase; -use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr}; +use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr}; use crate::{Module, ModuleDef, Semantics}; @@ -32,7 +32,7 @@ pub struct DeclarationLocation { /// This points to the whole syntax node of the declaration. pub ptr: SyntaxNodePtr, /// This points to the [`syntax::ast::Name`] identifier of the declaration. - pub name_ptr: SyntaxNodePtr, + pub name_ptr: AstPtr, } impl DeclarationLocation { @@ -49,15 +49,6 @@ impl DeclarationLocation { let node = resolve_node(db, self.hir_file_id, &self.ptr); node.as_ref().original_file_range(db.upcast()) } - - pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option { - if let Some(file_id) = self.hir_file_id.file_id() { - // fast path to prevent parsing - return Some(FileRange { file_id, range: self.name_ptr.text_range() }); - } - let node = resolve_node(db, self.hir_file_id, &self.name_ptr); - node.as_ref().original_file_range_opt(db.upcast()) - } } fn resolve_node( @@ -190,7 +181,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(use_tree_src.syntax()), - name_ptr: SyntaxNodePtr::new(name.syntax()), + name_ptr: AstPtr::new(&name), }; self.symbols.push(FileSymbol { @@ -294,7 +285,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(source.value.syntax()), - name_ptr: SyntaxNodePtr::new(name_node.syntax()), + name_ptr: AstPtr::new(&name_node), }; if let Some(attrs) = def.attrs(self.db) { @@ -327,7 +318,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: declaration.file_id, ptr: SyntaxNodePtr::new(module.syntax()), - name_ptr: SyntaxNodePtr::new(name_node.syntax()), + name_ptr: AstPtr::new(&name_node), }; let def = ModuleDef::Module(module_id.into()); diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 11facc5bee2ac..0f2d1057c0a45 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -328,6 +328,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { range, name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) }) .map(|(range, name, ref_module)| { @@ -455,6 +456,7 @@ fn add_enum_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 1f3caa7db33f1..79b46d66121eb 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -186,6 +186,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) }) .map(|(name, ref_module)| { @@ -238,6 +239,7 @@ fn add_tuple_struct_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 31a1ff496e133..9d72d3af096a7 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,4 +1,5 @@ use crate::{AssistContext, Assists}; +use hir::DescendPreference; use ide_db::{ assists::{AssistId, AssistKind}, syntax_helpers::{ @@ -35,7 +36,8 @@ pub(crate) fn extract_expressions_from_format_string( let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let expanded_t = ast::String::cast( - ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()), + ctx.sema + .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()), )?; if !is_format_string(&expanded_t) { return None; diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 6b48d15881523..347a3e9ba0747 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -3,8 +3,8 @@ use std::iter; use ast::make; use either::Either; use hir::{ - HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics, - TypeInfo, TypeParam, + DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, + PathResolution, Semantics, TypeInfo, TypeParam, }; use ide_db::{ defs::{Definition, NameRefClass}, @@ -147,7 +147,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op _ => format_function(ctx, module, &fun, old_indent, new_indent), }; - if fn_def.contains("ControlFlow") { + // There are external control flows + if fun + .control_flow + .kind + .is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_))) + { let scope = match scope { ImportScope::File(it) => ImportScope::File(builder.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), @@ -751,7 +756,7 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| sema.descend_into_macros(t, 0.into())) + .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t)) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } @@ -4968,6 +4973,27 @@ pub fn testfn(arg: &mut Foo) { fn $0fun_name(arg: &mut Foo) { arg.field = 8; } +"#, + ); + } + #[test] + fn does_not_import_control_flow() { + check_assist( + extract_function, + r#" +//- minicore: try +fn func() { + $0let cf = "I'm ControlFlow";$0 +} +"#, + r#" +fn func() { + fun_name(); +} + +fn $0fun_name() { + let cf = "I'm ControlFlow"; +} "#, ); } diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 6839c5820dc99..4b9fedc7e8557 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -3,7 +3,7 @@ use std::{ iter, }; -use hir::{HasSource, ModuleSource}; +use hir::{HasSource, HirFileIdExt, ModuleSource}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index c9f272474e7e1..204e796fa2c0d 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,4 +1,6 @@ -use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; +use hir::{ + db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, +}; use ide_db::base_db::FileId; use syntax::{ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs index 2ea6f58fa0f1e..8b46a23f9a64d 100644 --- a/crates/ide-assists/src/handlers/flip_binexpr.rs +++ b/crates/ide-assists/src/handlers/flip_binexpr.rs @@ -19,8 +19,19 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // ``` pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let expr = ctx.find_node_at_offset::()?; - let lhs = expr.lhs()?.syntax().clone(); let rhs = expr.rhs()?.syntax().clone(); + let lhs = expr.lhs()?.syntax().clone(); + + let lhs = if let Some(bin_expr) = BinExpr::cast(lhs.clone()) { + if bin_expr.op_kind() == expr.op_kind() { + bin_expr.rhs()?.syntax().clone() + } else { + lhs + } + } else { + lhs + }; + let op_range = expr.op_token()?.text_range(); // The assist should be applied only if the cursor is on the operator let cursor_in_range = op_range.contains_range(ctx.selection_trimmed()); @@ -114,6 +125,24 @@ mod tests { ) } + #[test] + fn flip_binexpr_works_for_lhs_arith() { + check_assist( + flip_binexpr, + r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }", + r"fn f() { let res = 1 + 4 + (2 - 3) + 5; }", + ) + } + + #[test] + fn flip_binexpr_works_for_lhs_cmp() { + check_assist( + flip_binexpr, + r"fn f() { let res = 1 + (2 - 3) >$0 4 + 5; }", + r"fn f() { let res = 4 + 5 < 1 + (2 - 3); }", + ) + } + #[test] fn flip_binexpr_works_inside_match() { check_assist( diff --git a/crates/ide-assists/src/handlers/generate_constant.rs b/crates/ide-assists/src/handlers/generate_constant.rs index eccd7675fbaae..a4e8e7388f624 100644 --- a/crates/ide-assists/src/handlers/generate_constant.rs +++ b/crates/ide-assists/src/handlers/generate_constant.rs @@ -1,5 +1,5 @@ use crate::assist_context::{AssistContext, Assists}; -use hir::{HasVisibility, HirDisplay, Module}; +use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::{FileId, Upcast}, diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index bbac0a26ea4ca..db1e0ceaec1e7 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,6 +1,7 @@ use std::collections::HashSet; -use hir::{self, HasCrate, HasSource, HasVisibility}; +use hir::{self, HasCrate, HasVisibility}; +use ide_db::path_transform::PathTransform; use syntax::{ ast::{ self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _, @@ -105,7 +106,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' target, |edit| { // Create the function - let method_source = match method.source(ctx.db()) { + let method_source = match ctx.sema.source(method) { Some(source) => source.value, None => return, }; @@ -130,7 +131,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' vis, fn_name, type_params, - None, + method_source.where_clause(), params, body, ret_type, @@ -183,6 +184,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let assoc_items = impl_def.get_or_create_assoc_item_list(); assoc_items.add_item(f.clone().into()); + if let Some((target, source)) = + ctx.sema.scope(strukt.syntax()).zip(ctx.sema.scope(method_source.syntax())) + { + PathTransform::generic_transformation(&target, &source).apply(f.syntax()); + } + if let Some(cap) = ctx.config.snippet_cap { edit.add_tabstop_before(cap, f) } @@ -454,6 +461,209 @@ impl Person { ); } + #[test] + fn test_preserve_where_clause() { + check_assist( + generate_delegate_methods, + r#" +struct Inner(T); +impl Inner { + fn get(&self) -> T + where + T: Copy, + T: PartialEq, + { + self.0 + } +} + +struct Struct { + $0field: Inner, +} +"#, + r#" +struct Inner(T); +impl Inner { + fn get(&self) -> T + where + T: Copy, + T: PartialEq, + { + self.0 + } +} + +struct Struct { + field: Inner, +} + +impl Struct { + $0fn get(&self) -> T where + T: Copy, + T: PartialEq, { + self.field.get() + } +} +"#, + ); + } + + #[test] + fn test_fixes_basic_self_references() { + check_assist( + generate_delegate_methods, + r#" +struct Foo { + field: $0Bar, +} + +struct Bar; + +impl Bar { + fn bar(&self, other: Self) -> Self { + other + } +} +"#, + r#" +struct Foo { + field: Bar, +} + +impl Foo { + $0fn bar(&self, other: Bar) -> Bar { + self.field.bar(other) + } +} + +struct Bar; + +impl Bar { + fn bar(&self, other: Self) -> Self { + other + } +} +"#, + ); + } + + #[test] + fn test_fixes_nested_self_references() { + check_assist( + generate_delegate_methods, + r#" +struct Foo { + field: $0Bar, +} + +struct Bar; + +impl Bar { + fn bar(&mut self, a: (Self, [Self; 4]), b: Vec) {} +} +"#, + r#" +struct Foo { + field: Bar, +} + +impl Foo { + $0fn bar(&mut self, a: (Bar, [Bar; 4]), b: Vec) { + self.field.bar(a, b) + } +} + +struct Bar; + +impl Bar { + fn bar(&mut self, a: (Self, [Self; 4]), b: Vec) {} +} +"#, + ); + } + + #[test] + fn test_fixes_self_references_with_lifetimes_and_generics() { + check_assist( + generate_delegate_methods, + r#" +struct Foo<'a, T> { + $0field: Bar<'a, T>, +} + +struct Bar<'a, T>(&'a T); + +impl<'a, T> Bar<'a, T> { + fn bar(self, mut b: Vec<&'a Self>) -> &'a Self { + b.pop().unwrap() + } +} +"#, + r#" +struct Foo<'a, T> { + field: Bar<'a, T>, +} + +impl<'a, T> Foo<'a, T> { + $0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> { + self.field.bar(b) + } +} + +struct Bar<'a, T>(&'a T); + +impl<'a, T> Bar<'a, T> { + fn bar(self, mut b: Vec<&'a Self>) -> &'a Self { + b.pop().unwrap() + } +} +"#, + ); + } + + #[test] + fn test_fixes_self_references_across_macros() { + check_assist( + generate_delegate_methods, + r#" +//- /bar.rs +macro_rules! test_method { + () => { + pub fn test(self, b: Bar) -> Self { + self + } + }; +} + +pub struct Bar; + +impl Bar { + test_method!(); +} + +//- /main.rs +mod bar; + +struct Foo { + $0bar: bar::Bar, +} +"#, + r#" +mod bar; + +struct Foo { + bar: bar::Bar, +} + +impl Foo { + $0pub fn test(self,b:bar::Bar) ->bar::Bar { + self.bar.test(b) + } +} +"#, + ); + } + #[test] fn test_generate_delegate_visibility() { check_assist_not_applicable( diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 184f523e01bd9..1a1e992e28a48 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, HirDisplay, InFile}; +use hir::{HasSource, HirDisplay, InRealFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ ast::{self, make, HasArgList}, @@ -114,14 +114,14 @@ fn add_variant_to_accumulator( parent: PathParent, ) -> Option<()> { let db = ctx.db(); - let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; + let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; acc.add( AssistId("generate_enum_variant", AssistKind::Generate), "Generate variant", target, |builder| { - builder.edit_file(file_id.original_file(db)); + builder.edit_file(file_id); let node = builder.make_mut(enum_node); let variant = make_variant(ctx, name_ref, parent); node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index f74fc5df4bd26..a113c817f7e94 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1,5 +1,6 @@ use hir::{ - Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo, + Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type, + TypeInfo, }; use ide_db::{ base_db::FileId, @@ -510,7 +511,7 @@ fn assoc_fn_target_info( } fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { - match &target { + match target { GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), } diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index f8c75bdb0de1c..5b9cc5f66cde1 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -8,7 +8,7 @@ use ide_db::{ defs::Definition, imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, - search::{FileReference, SearchScope}, + search::{FileReference, FileReferenceNode, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, RootDatabase, @@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses( ) -> (Vec, Vec) { iter.into_iter() .filter_map(|file_ref| match file_ref.name { - ast::NameLike::NameRef(name_ref) => Some(name_ref), + FileReferenceNode::NameRef(name_ref) => Some(name_ref), _ => None, }) .filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) { @@ -346,7 +346,7 @@ fn inline( match param.as_local(sema.db) { Some(l) => usages_for_locals(l) .map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => body + FileReferenceNode::NameRef(_) => body .syntax() .covering_element(range) .ancestors() @@ -372,7 +372,7 @@ fn inline( if let Some(self_local) = params[0].2.as_local(sema.db) { usages_for_locals(self_local) .filter_map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)), + FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)), _ => None, }) .for_each(|usage| { diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs index 49dcde75d2b31..5d8ba43ec8461 100644 --- a/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::Definition, - search::{FileReference, UsageSearchResult}, + search::{FileReference, FileReferenceNode, UsageSearchResult}, RootDatabase, }; use syntax::{ @@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) let wrap_in_parens = references .into_iter() .filter_map(|FileReference { range, name, .. }| match name { - ast::NameLike::NameRef(name) => Some((range, name)), + FileReferenceNode::NameRef(name) => Some((range, name)), _ => None, }) .map(|(range, name_ref)| { diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index 5fcab8c02b06d..ee44064e7c5e7 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,6 +1,6 @@ use std::collections::{hash_map::Entry, HashMap}; -use hir::{InFile, Module, ModuleSource}; +use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; use ide_db::{ base_db::FileRange, defs::Definition, @@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec Vec { let (file_id, range) = { let InFile { file_id, value } = module.definition_source(db); - if let Some((file_id, call_source)) = file_id.original_call_node(db) { + if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) { (file_id, Some(call_source.text_range())) } else { ( diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 69a4e748b7c5d..b54e4204e3f39 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,4 +1,4 @@ -use hir::{InFile, ModuleDef}; +use hir::{InFile, MacroFileIdExt, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ @@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl( ) -> Option<()> { let attr = ctx.find_node_at_offset_with_descend::()?; let path = attr.path()?; - let hir_file = ctx.sema.hir_file_for(attr.syntax()); - if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) { + let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?; + if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) { return None; } - let InFile { file_id, value } = hir_file.call_node(ctx.db())?; + let InFile { file_id, value } = macro_file.call_node(ctx.db()); if file_id.is_macro() { // FIXME: make this work in macro files return None; @@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl( // collect the derive paths from the #[derive] expansion let current_derives = ctx .sema - .parse_or_expand(hir_file) + .parse_or_expand(macro_file.into()) .descendants() .filter_map(ast::Attr::cast) .filter_map(|attr| attr.path()) diff --git a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index c7c0be4c7d4f8..e61ce481727a1 100644 --- a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl( let mut path_types_to_replace = Vec::new(); for (_a, refs) in usage_refs.iter() { for usage_ref in refs { - let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?; + let Some(name_like) = usage_ref.name.clone().into_name_like() else { + continue; + }; + let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?; path_types_to_replace.push(param_node); } } diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index 7f612c2a142c7..1cfa291a29d8e 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -2,11 +2,11 @@ use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, syntax_helpers::node_ext::full_path_of_name_ref, }; use syntax::{ - ast::{self, NameLike, NameRef}, + ast::{self, NameRef}, AstNode, SyntaxKind, TextRange, }; @@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O for await_expr in find_all_references(ctx, &Definition::Function(fn_def)) // Keep only references that correspond NameRefs. .filter_map(|(_, reference)| match reference.name { - NameLike::NameRef(nameref) => Some(nameref), + FileReferenceNode::NameRef(nameref) => Some(nameref), _ => None, }) // Keep only references that correspond to await expressions diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index e6f03214ed30d..1e4d1c94f5bee 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -58,7 +58,7 @@ //! See also this post: //! -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[allow(unused)] macro_rules! eprintln { diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 57e06461099e5..613a35dcb1088 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -940,9 +940,9 @@ impl Foo { fn foo(&self) { $0 } }"#, expect![[r#" fd self.field i32 lc self &Foo - sp Self - st Foo - bt u32 + sp Self Foo + st Foo Foo + bt u32 u32 me self.foo() fn(&self) "#]], ); @@ -954,9 +954,9 @@ impl Foo { fn foo(&mut self) { $0 } }"#, expect![[r#" fd self.0 i32 lc self &mut Foo - sp Self - st Foo - bt u32 + sp Self Foo + st Foo Foo + bt u32 u32 me self.foo() fn(&mut self) "#]], ); diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 42dfbfc7d9a14..b0e4d8a5acd10 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -417,10 +417,10 @@ impl Test for T { } ", expect![[r#" - sp Self - st T + sp Self T + st T T tt Test - bt u32 + bt u32 u32 "#]], ); @@ -526,10 +526,10 @@ impl Test for T { } ", expect![[r#" - sp Self - st T + sp Self T + st T T tt Test - bt u32 + bt u32 u32 "#]], ); @@ -543,10 +543,10 @@ impl Test for T { } ", expect![[r#" - sp Self - st T + sp Self T + st T T tt Test - bt u32 + bt u32 u32 "#]], ); @@ -562,10 +562,10 @@ impl Test for T { } ", expect![[r#" - sp Self - st T + sp Self T + st T T tt Test - bt u32 + bt u32 u32 "#]], ); @@ -610,10 +610,10 @@ impl Test for T { } ", expect![[r#" - sp Self - st T + sp Self T + st T T tt Test - bt u32 + bt u32 u32 "#]], ); diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs index 1e09894059d5a..5d138eea46f4b 100644 --- a/crates/ide-completion/src/completions/mod_.rs +++ b/crates/ide-completion/src/completions/mod_.rs @@ -2,7 +2,7 @@ use std::iter; -use hir::{Module, ModuleSource}; +use hir::{HirFileIdExt, Module, ModuleSource}; use ide_db::{ base_db::{SourceDatabaseExt, VfsPath}, FxHashSet, RootDatabase, SymbolKind, diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 945c3945bfa39..46213deb0afef 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -427,6 +427,31 @@ fn foo() { ..Default::default() }; } +"#, + ); + } + + #[test] + fn callable_field_struct_init() { + check_edit( + "field", + r#" +struct S { + field: fn(), +} + +fn main() { + S {fi$0 +} +"#, + r#" +struct S { + field: fn(), +} + +fn main() { + S {field +} "#, ); } diff --git a/crates/ide-completion/src/completions/use_.rs b/crates/ide-completion/src/completions/use_.rs index 7a60030e9ef72..81107c1f419d2 100644 --- a/crates/ide-completion/src/completions/use_.rs +++ b/crates/ide-completion/src/completions/use_.rs @@ -71,9 +71,9 @@ pub(crate) fn complete_use_path( if add_resolution { let mut builder = Builder::from_resolution(ctx, path_ctx, name, def); - builder.set_relevance(CompletionRelevance { + builder.with_relevance(|r| CompletionRelevance { is_name_already_imported, - ..Default::default() + ..r }); acc.add(builder.build(ctx.db)); } diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 99b895eed4d23..b982322a73499 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -1,6 +1,6 @@ //! See `CompletionItem` structure. -use std::fmt; +use std::{fmt, mem}; use hir::Mutability; use ide_db::{ @@ -570,6 +570,13 @@ impl Builder { self.relevance = relevance; self } + pub(crate) fn with_relevance( + &mut self, + relevance: impl FnOnce(CompletionRelevance) -> CompletionRelevance, + ) -> &mut Builder { + self.relevance = relevance(mem::take(&mut self.relevance)); + self + } pub(crate) fn trigger_call_info(&mut self) -> &mut Builder { self.trigger_call_info = true; self diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index aaf7cd7843afe..37a2828e8dc8f 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -1,6 +1,6 @@ //! `completions` crate provides utilities for generating completions of user input. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod completions; mod config; diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 048730c078d7b..2ea3f74d18bce 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -10,7 +10,7 @@ pub(crate) mod variant; pub(crate) mod union_literal; pub(crate) mod literal; -use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef}; +use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use ide_db::{ documentation::{Documentation, HasDocs}, helpers::item_name, @@ -169,14 +169,14 @@ pub(crate) fn render_field( if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { builder.insert(receiver.syntax().text_range().start(), "(".to_string()); builder.insert(ctx.source_range().end(), ")".to_string()); - } - } - let is_parens_needed = - !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); + let is_parens_needed = + !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); - if is_parens_needed { - builder.insert(ctx.source_range().end(), "()".to_string()); + if is_parens_needed { + builder.insert(ctx.source_range().end(), "()".to_string()); + } + } } } @@ -340,6 +340,7 @@ fn render_resolution_path( let cap = ctx.snippet_cap(); let db = completion.db; let config = completion.config; + let requires_import = import_to_add.is_some(); let name = local_name.to_smol_str(); let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution); @@ -370,8 +371,8 @@ fn render_resolution_path( } } } - if let ScopeDef::Local(local) = resolution { - let ty = local.ty(db); + + let mut set_item_relevance = |ty: Type| { if !ty.is_unknown() { item.detail(ty.display(db).to_string()); } @@ -379,12 +380,38 @@ fn render_resolution_path( item.set_relevance(CompletionRelevance { type_match: compute_type_match(completion, &ty), exact_name_match: compute_exact_name_match(completion, &name), - is_local: true, + is_local: matches!(resolution, ScopeDef::Local(_)), + requires_import, ..CompletionRelevance::default() }); path_ref_match(completion, path_ctx, &ty, &mut item); }; + + match resolution { + ScopeDef::Local(local) => set_item_relevance(local.ty(db)), + ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => { + set_item_relevance(adt.ty(db)) + } + // Filtered out above + ScopeDef::ModuleDef( + ModuleDef::Function(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_), + ) => (), + ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)), + ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)), + ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)), + ScopeDef::ImplSelfType(imp) => set_item_relevance(imp.self_ty(db)), + ScopeDef::GenericParam(_) + | ScopeDef::Label(_) + | ScopeDef::Unknown + | ScopeDef::ModuleDef( + ModuleDef::Trait(_) + | ModuleDef::TraitAlias(_) + | ModuleDef::Module(_) + | ModuleDef::TypeAlias(_), + ) => (), + }; + item } @@ -471,6 +498,21 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo } } +// FIXME: This checks types without possible coercions which some completions might want to do +fn match_types( + ctx: &CompletionContext<'_>, + ty1: &hir::Type, + ty2: &hir::Type, +) -> Option { + if ty1 == ty2 { + Some(CompletionRelevanceTypeMatch::Exact) + } else if ty1.could_unify_with(ctx.db, ty2) { + Some(CompletionRelevanceTypeMatch::CouldUnify) + } else { + None + } +} + fn compute_type_match( ctx: &CompletionContext<'_>, completion_ty: &hir::Type, @@ -483,13 +525,7 @@ fn compute_type_match( return None; } - if completion_ty == expected_type { - Some(CompletionRelevanceTypeMatch::Exact) - } else if expected_type.could_unify_with(ctx.db, completion_ty) { - Some(CompletionRelevanceTypeMatch::CouldUnify) - } else { - None - } + match_types(ctx, expected_type, completion_ty) } fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool { @@ -635,6 +671,330 @@ mod tests { } } + #[test] + fn set_struct_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub struct Struct {} +} + +pub mod test_mod_a { + pub struct Struct {} +} + +//- /main.rs crate:main deps:dep + +fn test(input: dep::test_mod_b::Struct) { } + +fn main() { + test(Struct$0); +} +"#, + expect![[r#" + st dep::test_mod_b::Struct {…} [type_could_unify] + st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import] + fn main() [] + fn test(…) [] + md dep [] + st Struct (use dep::test_mod_a::Struct) [requires_import] + "#]], + ); + } + + #[test] + fn set_union_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub union Union { + a: i32, + b: i32 + } +} + +pub mod test_mod_a { + pub enum Union { + a: i32, + b: i32 + } +} + +//- /main.rs crate:main deps:dep + +fn test(input: dep::test_mod_b::Union) { } + +fn main() { + test(Union$0); +} +"#, + expect![[r#" + un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import] + fn main() [] + fn test(…) [] + md dep [] + en Union (use dep::test_mod_a::Union) [requires_import] + "#]], + ); + } + + #[test] + fn set_enum_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub enum Enum { + variant + } +} + +pub mod test_mod_a { + pub enum Enum { + variant + } +} + +//- /main.rs crate:main deps:dep + +fn test(input: dep::test_mod_b::Enum) { } + +fn main() { + test(Enum$0); +} +"#, + expect![[r#" + ev dep::test_mod_b::Enum::variant [type_could_unify] + en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import] + fn main() [] + fn test(…) [] + md dep [] + en Enum (use dep::test_mod_a::Enum) [requires_import] + "#]], + ); + } + + #[test] + fn set_enum_variant_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub enum Enum { + Variant + } +} + +pub mod test_mod_a { + pub enum Enum { + Variant + } +} + +//- /main.rs crate:main deps:dep + +fn test(input: dep::test_mod_b::Enum) { } + +fn main() { + test(Variant$0); +} +"#, + expect![[r#" + ev dep::test_mod_b::Enum::Variant [type_could_unify] + fn main() [] + fn test(…) [] + md dep [] + "#]], + ); + } + + #[test] + fn set_fn_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub fn function(j: isize) -> i32 {} +} + +pub mod test_mod_a { + pub fn function(i: usize) -> i32 {} +} + +//- /main.rs crate:main deps:dep + +fn test(input: fn(usize) -> i32) { } + +fn main() { + test(function$0); +} +"#, + expect![[r#" + fn main [] + fn test [] + md dep [] + fn function (use dep::test_mod_a::function) [requires_import] + fn function (use dep::test_mod_b::function) [requires_import] + "#]], + ); + } + + #[test] + fn set_const_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub const CONST: i32 = 1; +} + +pub mod test_mod_a { + pub const CONST: i64 = 2; +} + +//- /main.rs crate:main deps:dep + +fn test(input: i32) { } + +fn main() { + test(CONST$0); +} +"#, + expect![[r#" + ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import] + fn main() [] + fn test(…) [] + md dep [] + ct CONST (use dep::test_mod_a::CONST) [requires_import] + "#]], + ); + } + + #[test] + fn set_static_type_completion_info() { + check_relevance( + r#" +//- /lib.rs crate:dep + +pub mod test_mod_b { + pub static STATIC: i32 = 5; +} + +pub mod test_mod_a { + pub static STATIC: i64 = 5; +} + +//- /main.rs crate:main deps:dep + +fn test(input: i32) { } + +fn main() { + test(STATIC$0); +} +"#, + expect![[r#" + sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import] + fn main() [] + fn test(…) [] + md dep [] + sc STATIC (use dep::test_mod_a::STATIC) [requires_import] + "#]], + ); + } + + #[test] + fn set_self_type_completion_info_with_params() { + check_relevance( + r#" +//- /lib.rs crate:dep +pub struct Struct; + +impl Struct { + pub fn Function(&self, input: i32) -> bool { + false + } +} + + +//- /main.rs crate:main deps:dep + +use dep::Struct; + + +fn test(input: fn(&dep::Struct, i32) -> bool) { } + +fn main() { + test(Struct::Function$0); +} + +"#, + expect![[r#" + me Function [] + "#]], + ); + } + + #[test] + fn set_self_type_completion_info() { + check_relevance( + r#" +//- /main.rs crate:main + +struct Struct; + +impl Struct { +fn test(&self) { + func(Self$0); + } +} + +fn func(input: Struct) { } + +"#, + expect![[r#" + st Struct [type] + st Self [type] + sp Self [type] + st Struct [type] + lc self [local] + fn func(…) [] + me self.test() [] + "#]], + ); + } + + #[test] + fn set_builtin_type_completion_info() { + check_relevance( + r#" +//- /main.rs crate:main + +fn test(input: bool) { } + pub Input: bool = false; + +fn main() { + let input = false; + let inputbad = 3; + test(inp$0); +} +"#, + expect![[r#" + lc input [type+name+local] + lc inputbad [local] + fn main() [] + fn test(…) [] + "#]], + ); + } + #[test] fn enum_detail_includes_record_fields() { check( @@ -1026,6 +1386,7 @@ use self::E::*; kind: SymbolKind( Enum, ), + detail: "E", documentation: Documentation( "enum docs", ), @@ -1270,6 +1631,7 @@ fn go(world: &WorldSnapshot) { go(w$0) } st WorldSnapshot {…} [] st &WorldSnapshot {…} [type] st WorldSnapshot [] + st &WorldSnapshot [type] fn go(…) [] "#]], ); @@ -1369,6 +1731,7 @@ fn main() { st S [] st &mut S [type] st S [] + st &mut S [type] fn foo(…) [] fn main() [] "#]], @@ -1385,7 +1748,7 @@ fn main() { expect![[r#" lc s [type+name+local] st S [type] - st S [] + st S [type] fn foo(…) [] fn main() [] "#]], @@ -1402,7 +1765,7 @@ fn main() { expect![[r#" lc ssss [type+local] st S [type] - st S [] + st S [type] fn foo(…) [] fn main() [] "#]], @@ -1441,7 +1804,9 @@ fn main() { st S [] st &S [type] st S [] + st &S [type] st T [] + st &T [type] fn foo(…) [] fn main() [] md core [] @@ -1487,7 +1852,9 @@ fn main() { st S [] st &mut S [type] st S [] + st &mut S [type] st T [] + st &mut T [type] fn foo(…) [] fn main() [] md core [] @@ -1526,7 +1893,7 @@ fn bar(t: Foo) {} expect![[r#" ev Foo::A [type] ev Foo::B [type] - en Foo [] + en Foo [type] fn bar(…) [] fn foo() [] "#]], @@ -1549,6 +1916,7 @@ fn bar(t: &Foo) {} ev Foo::B [] ev &Foo::B [type] en Foo [] + en &Foo [type] fn bar(…) [] fn foo() [] "#]], @@ -1582,7 +1950,9 @@ fn main() { st S [] st &S [type] st S [] + st &S [type] st T [] + st &T [type] fn bar() [] fn &bar() [type] fn foo(…) [] @@ -1791,8 +2161,8 @@ fn foo() { lc foo [type+local] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] + en Foo [type_could_unify] fn foo() [] - en Foo [] fn bar() [] fn baz() [] "#]], diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index dfae715afe36d..d23ed71fdcc65 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -62,6 +62,7 @@ fn render( ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; + let mut item = CompletionItem::new( if func.self_param(db).is_some() { CompletionItemKind::Method @@ -77,8 +78,31 @@ fn render( .as_assoc_item(ctx.db()) .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db())) .map_or(false, |trait_| completion.is_ops_trait(trait_)); + + let (has_dot_receiver, has_call_parens, cap) = match func_kind { + FuncKind::Function(&PathCompletionCtx { + kind: PathKind::Expr { .. }, + has_call_parens, + .. + }) => (false, has_call_parens, ctx.completion.config.snippet_cap), + FuncKind::Method(&DotAccess { kind: DotAccessKind::Method { has_parens }, .. }, _) => { + (true, has_parens, ctx.completion.config.snippet_cap) + } + FuncKind::Method(DotAccess { kind: DotAccessKind::Field { .. }, .. }, _) => { + (true, false, ctx.completion.config.snippet_cap) + } + _ => (false, false, None), + }; + let complete_call_parens = cap + .filter(|_| !has_call_parens) + .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); + item.set_relevance(CompletionRelevance { - type_match: compute_type_match(completion, &ret_type), + type_match: if has_call_parens || complete_call_parens.is_some() { + compute_type_match(completion, &ret_type) + } else { + compute_type_match(completion, &func.ty(db)) + }, exact_name_match: compute_exact_name_match(completion, &call), is_op_method, ..ctx.completion_relevance() @@ -108,42 +132,9 @@ fn render( .detail(detail) .lookup_by(name.unescaped().to_smol_str()); - match ctx.completion.config.snippet_cap { - Some(cap) => { - let complete_params = match func_kind { - FuncKind::Function(PathCompletionCtx { - kind: PathKind::Expr { .. }, - has_call_parens: false, - .. - }) => Some(false), - FuncKind::Method( - DotAccess { - kind: - DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. }, - .. - }, - _, - ) => Some(true), - _ => None, - }; - if let Some(has_dot_receiver) = complete_params { - if let Some((self_param, params)) = - params(ctx.completion, func, &func_kind, has_dot_receiver) - { - add_call_parens( - &mut item, - completion, - cap, - call, - escaped_call, - self_param, - params, - ); - } - } - } - _ => (), - }; + if let Some((cap, (self_param, params))) = complete_call_parens { + add_call_parens(&mut item, completion, cap, call, escaped_call, self_param, params); + } match ctx.import_to_add { Some(import_to_add) => { diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index be5b7f8a3404e..b4f936b35aead 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -26,22 +26,22 @@ fn baz() { "#, // This should not contain `FooDesc {…}`. expect![[r#" - ct CONST - en Enum + ct CONST Unit + en Enum Enum fn baz() fn() fn create_foo(…) fn(&FooDesc) fn function() fn() ma makro!(…) macro_rules! makro md _69latrick md module - sc STATIC - st FooDesc - st Record - st Tuple - st Unit - un Union + sc STATIC Unit + st FooDesc FooDesc + st Record Record + st Tuple Tuple + st Unit Unit + un Union Union ev TupleV(…) TupleV(u32) - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -83,7 +83,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) { lc param0 (i32, i32) lc param1 i32 lc param2 i32 - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -117,24 +117,24 @@ impl Unit { "#, // `self` is in here twice, once as the module, once as the local expect![[r#" - ct CONST + ct CONST Unit cp CONST_PARAM - en Enum + en Enum Enum fn function() fn() fn local_func() fn() lc self Unit ma makro!(…) macro_rules! makro md module md qualified - sp Self - sc STATIC - st Record - st Tuple - st Unit + sp Self Unit + sc STATIC Unit + st Record Record + st Tuple Tuple + st Unit Unit tp TypeParam - un Union + un Union Union ev TupleV(…) TupleV(u32) - bt u32 + bt u32 u32 kw const kw crate:: kw enum @@ -181,18 +181,18 @@ impl Unit { } "#, expect![[r#" - ct CONST - en Enum + ct CONST Unit + en Enum Enum fn function() fn() ma makro!(…) macro_rules! makro md module md qualified - sc STATIC - st Record - st Tuple - st Unit + sc STATIC Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union + un Union Union ev TupleV(…) TupleV(u32) ?? Unresolved "#]], @@ -211,7 +211,7 @@ fn complete_in_block() { "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw enum @@ -256,7 +256,7 @@ fn complete_after_if_expr() { "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -304,7 +304,7 @@ fn complete_in_match_arm() { "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -328,7 +328,7 @@ fn completes_in_loop_ctx() { r"fn my() { loop { $0 } }", expect![[r#" fn my() fn() - bt u32 + bt u32 u32 kw break kw const kw continue @@ -370,7 +370,7 @@ fn completes_in_let_initializer() { r#"fn main() { let _ = $0 }"#, expect![[r#" fn main() fn() - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -403,8 +403,8 @@ fn foo() { "#, expect![[r#" fn foo() fn() - st Foo - bt u32 + st Foo Foo + bt u32 u32 kw crate:: kw false kw for @@ -439,7 +439,7 @@ fn foo() { expect![[r#" fn foo() fn() lc bar i32 - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -470,7 +470,7 @@ fn quux(x: i32) { fn quux(…) fn(i32) lc x i32 ma m!(…) macro_rules! m - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -497,7 +497,7 @@ fn quux(x: i32) { fn quux(…) fn(i32) lc x i32 ma m!(…) macro_rules! m - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -683,11 +683,11 @@ fn brr() { } "#, expect![[r#" - en HH + en HH HH fn brr() fn() - st YoloVariant + st YoloVariant YoloVariant st YoloVariant {…} YoloVariant { f: usize } - bt u32 + bt u32 u32 kw crate:: kw false kw for @@ -749,7 +749,7 @@ fn foo() { if foo {} $0 } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -789,7 +789,7 @@ fn foo() { if foo {} el$0 } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -829,7 +829,7 @@ fn foo() { bar(if foo {} $0) } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw crate:: kw else kw else if @@ -853,7 +853,7 @@ fn foo() { bar(if foo {} el$0) } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw crate:: kw else kw else if @@ -877,7 +877,7 @@ fn foo() { if foo {} $0 let x = 92; } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -917,7 +917,7 @@ fn foo() { if foo {} el$0 let x = 92; } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -957,7 +957,7 @@ fn foo() { if foo {} el$0 { let x = 92; } } "#, expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw else @@ -1009,7 +1009,7 @@ pub struct UnstableThisShouldNotBeListed; expect![[r#" fn main() fn() md std - bt u32 + bt u32 u32 kw const kw crate:: kw enum @@ -1060,8 +1060,8 @@ pub struct UnstableButWeAreOnNightlyAnyway; expect![[r#" fn main() fn() md std - st UnstableButWeAreOnNightlyAnyway - bt u32 + st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway + bt u32 u32 kw const kw crate:: kw enum @@ -1094,3 +1094,157 @@ pub struct UnstableButWeAreOnNightlyAnyway; "#]], ); } + +#[test] +fn inside_format_args_completions_work() { + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("{}", Foo.$0); +} +"#, + expect![[r#" + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + "#]], + ); + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("{}", Foo.f$0); +} +"#, + expect![[r#" + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + "#]], + ); +} + +#[test] +fn inside_faulty_format_args_completions_work() { + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("", Foo.$0); +} +"#, + expect![[r#" + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + "#]], + ); + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("", Foo.f$0); +} +"#, + expect![[r#" + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + "#]], + ); + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("{} {named} {captured} {named} {}", a, named = c, Foo.f$0); +} +"#, + expect![[r#" + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + "#]], + ); + check_empty( + r#" +//- minicore: fmt +struct Foo; +impl Foo { + fn foo(&self) {} +} + +fn main() { + format_args!("{", Foo.f$0); +} +"#, + expect![[r#" + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + sn while while expr {} + "#]], + ); +} diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index 21f693d79f1db..9a4a94a24566c 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -139,10 +139,10 @@ fn main() { } "#, expect![[r#" - st Rc (use dep::Rc) - st Rcar (use dep::Rcar) - st Rc (use dep::some_module::Rc) - st Rcar (use dep::some_module::Rcar) + st Rc (use dep::Rc) Rc + st Rcar (use dep::Rcar) Rcar + st Rc (use dep::some_module::Rc) Rc + st Rcar (use dep::some_module::Rcar) Rcar "#]], ); check( @@ -165,12 +165,12 @@ fn main() { } "#, expect![[r#" - ct RC (use dep::RC) - st Rc (use dep::Rc) - st Rcar (use dep::Rcar) - ct RC (use dep::some_module::RC) - st Rc (use dep::some_module::Rc) - st Rcar (use dep::some_module::Rcar) + ct RC (use dep::RC) () + st Rc (use dep::Rc) Rc + st Rcar (use dep::Rcar) Rcar + ct RC (use dep::some_module::RC) () + st Rc (use dep::some_module::Rc) Rc + st Rcar (use dep::some_module::Rcar) Rcar "#]], ); check( @@ -193,8 +193,8 @@ fn main() { } "#, expect![[r#" - ct RC (use dep::RC) - ct RC (use dep::some_module::RC) + ct RC (use dep::RC) () + ct RC (use dep::some_module::RC) () "#]], ); } @@ -227,10 +227,10 @@ fn main() { } "#, expect![[r#" - st ThirdStruct (use dep::some_module::ThirdStruct) - st AfterThirdStruct (use dep::some_module::AfterThirdStruct) - st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) - "#]], + st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct + st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct + st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct + "#]], ); } @@ -309,7 +309,7 @@ fn trait_const_fuzzy_completion() { check( fixture, expect![[r#" - ct SPECIAL_CONST (use dep::test_mod::TestTrait) + ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 "#]], ); @@ -597,7 +597,7 @@ fn main() { } "#, expect![[r#" - ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED + ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED "#]], ); @@ -717,7 +717,7 @@ fn main() { check( fixture, expect![[r#" - st Item (use foo::bar::baz::Item) + st Item (use foo::bar::baz::Item) Item "#]], ); @@ -759,7 +759,7 @@ fn main() { check( fixture, expect![[r#" - ct TEST_ASSOC (use foo::Item) + ct TEST_ASSOC (use foo::Item) usize "#]], ); @@ -803,8 +803,8 @@ fn main() { check( fixture, expect![[r#" - ct TEST_ASSOC (use foo::bar::Item) - "#]], + ct TEST_ASSOC (use foo::bar::Item) usize + "#]], ); check_edit( @@ -897,7 +897,7 @@ fn main() { TES$0 }"#, expect![[r#" - ct TEST_CONST (use foo::TEST_CONST) + ct TEST_CONST (use foo::TEST_CONST) usize "#]], ); @@ -914,7 +914,7 @@ fn main() { tes$0 }"#, expect![[r#" - ct TEST_CONST (use foo::TEST_CONST) + ct TEST_CONST (use foo::TEST_CONST) usize fn test_function() (use foo::test_function) fn() -> i32 "#]], ); @@ -1138,8 +1138,8 @@ mod mud { } "#, expect![[r#" - st Struct (use crate::Struct) - "#]], + st Struct (use crate::Struct) Struct + "#]], ); } @@ -1250,7 +1250,7 @@ enum Foo { } }"#, expect![[r#" - st Barbara (use foo::Barbara) + st Barbara (use foo::Barbara) Barbara "#]], ) } diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs index 3ef2a7c942bce..de3fd05189f37 100644 --- a/crates/ide-completion/src/tests/item.rs +++ b/crates/ide-completion/src/tests/item.rs @@ -18,15 +18,15 @@ fn target_type_or_trait_in_impl_block() { impl Tra$0 "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -40,15 +40,15 @@ fn target_type_in_trait_impl_block() { impl Trait for Str$0 "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index b2e8274a84d7d..67cf551fce84c 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -435,7 +435,7 @@ fn foo() { } "#, expect![[r#" - st Bar + st Bar Bar kw crate:: kw self:: "#]], @@ -450,7 +450,7 @@ fn foo() { } "#, expect![[r#" - st Foo + st Foo Foo kw crate:: kw self:: "#]], diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs index 789ad66345b12..46a3e97d3e92d 100644 --- a/crates/ide-completion/src/tests/predicate.rs +++ b/crates/ide-completion/src/tests/predicate.rs @@ -16,16 +16,16 @@ fn predicate_start() { struct Foo<'lt, T, const C: usize> where $0 {} "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> - st Record - st Tuple - st Unit + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -89,16 +89,16 @@ fn param_list_for_for_pred() { struct Foo<'lt, T, const C: usize> where for<'a> $0 {} "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> - st Record - st Tuple - st Unit + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -114,16 +114,16 @@ impl Record { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self - st Record - st Tuple - st Unit + sp Self Record + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 65cefdb0856d2..18afde1b7cefd 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -186,10 +186,10 @@ fn main() { lc foo Foo lc thing i32 md core - st Foo + st Foo Foo st Foo {…} Foo { foo1: u32, foo2: u32 } tt Default - bt u32 + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index d3dbd7cc22777..f96fb71f28932 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -84,10 +84,10 @@ pub mod prelude { } "#, expect![[r#" - md std - st Option - bt u32 - "#]], + md std + st Option Option + bt u32 u32 + "#]], ); } @@ -112,11 +112,11 @@ mod macros { } "#, expect![[r#" - fn f() fn() - ma concat!(…) macro_rules! concat - md std - bt u32 - "#]], + fn f() fn() + ma concat!(…) macro_rules! concat + md std + bt u32 u32 + "#]], ); } @@ -142,11 +142,11 @@ pub mod prelude { } "#, expect![[r#" - md core - md std - st String - bt u32 - "#]], + md core + md std + st String String + bt u32 u32 + "#]], ); } @@ -171,10 +171,10 @@ pub mod prelude { } "#, expect![[r#" - fn f() fn() - md std - bt u32 - "#]], + fn f() fn() + md std + bt u32 u32 + "#]], ); } @@ -446,10 +446,10 @@ mod p { } "#, expect![[r#" - ct RIGHT_CONST - fn right_fn() fn() - st RightType - "#]], + ct RIGHT_CONST u32 + fn right_fn() fn() + st RightType WrongType + "#]], ); check_edit( @@ -881,7 +881,7 @@ fn main() { fn main() fn() lc foobar i32 ma x!(…) macro_rules! x - bt u32 + bt u32 u32 "#]], ) } @@ -1008,8 +1008,8 @@ fn here_we_go() { "#, expect![[r#" fn here_we_go() fn() - st Foo (alias Bar) - bt u32 + st Foo (alias Bar) Foo + bt u32 u32 kw const kw crate:: kw enum @@ -1057,8 +1057,8 @@ fn here_we_go() { "#, expect![[r#" fn here_we_go() fn() - st Foo (alias Bar, Qux, Baz) - bt u32 + st Foo (alias Bar, Qux, Baz) Foo + bt u32 u32 kw const kw crate:: kw enum @@ -1178,7 +1178,7 @@ fn bar() { qu$0 } expect![[r#" fn bar() fn() fn foo() (alias qux) fn() - bt u32 + bt u32 u32 kw const kw crate:: kw enum @@ -1227,7 +1227,7 @@ fn here_we_go() { } "#, expect![[r#" - st Bar (alias Qux) + st Bar (alias Qux) Bar "#]], ); } @@ -1246,7 +1246,7 @@ fn here_we_go() { } "#, expect![[r#" - st Bar (alias Qux) + st Bar (alias Qux) Bar "#]], ); } @@ -1267,8 +1267,8 @@ fn here_we_go() { expect![[r#" fn here_we_go() fn() md foo - st Bar (alias Qux) (use foo::Bar) - bt u32 + st Bar (alias Qux) (use foo::Bar) Bar + bt u32 u32 kw crate:: kw false kw for @@ -1286,6 +1286,30 @@ fn here_we_go() { ); } +#[test] +fn completes_only_public() { + check( + r#" +//- /e.rs +pub(self) fn i_should_be_hidden() {} +pub(in crate::e) fn i_should_also_be_hidden() {} +pub fn i_am_public () {} + +//- /lib.rs crate:krate +pub mod e; + +//- /main.rs deps:krate crate:main +use krate::e; +fn main() { + e::$0 +}"#, + expect![ + "fn i_am_public() fn() +" + ], + ) +} + #[test] fn completion_filtering_excludes_non_identifier_doc_aliases() { check_edit( @@ -1409,7 +1433,7 @@ fn foo() { Some('_'), expect![[r#" fn foo() fn() - bt u32 + bt u32 u32 kw const kw crate:: kw enum @@ -1461,7 +1485,7 @@ fn foo(_: a_$0) { } "#, Some('_'), expect![[r#" - bt u32 + bt u32 u32 kw crate:: kw self:: "#]], @@ -1475,7 +1499,7 @@ fn foo() { Some('_'), expect![[r#" tp T - bt u32 + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index d518dd7641020..c7161f82ce74f 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self - st Foo<…> - st Record - st Tuple - st Unit + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -42,18 +42,18 @@ fn tuple_struct_field() { struct Foo<'lt, T, const C: usize>(f$0); "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self - st Foo<…> - st Record - st Tuple - st Unit + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw pub kw pub(crate) @@ -70,16 +70,16 @@ fn fn_return_type() { fn x<'lt, T, const C: usize>() -> $0 "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -100,19 +100,19 @@ fn foo() -> B$0 { } "#, expect![[r#" - en Enum - ma makro!(…) macro_rules! makro - md module - st Record - st Tuple - st Unit - tt Trait - un Union - bt u32 - it () - kw crate:: - kw self:: - "#]], + en Enum Enum + ma makro!(…) macro_rules! makro + md module + st Record Record + st Tuple Tuple + st Unit Unit + tt Trait + un Union Union + bt u32 u32 + it () + kw crate:: + kw self:: + "#]], ) } @@ -124,16 +124,16 @@ struct Foo(T); const FOO: $0 = Foo(2); "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> - st Record - st Tuple - st Unit + st Foo<…> Foo<{unknown}> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 it Foo kw crate:: kw self:: @@ -151,15 +151,15 @@ fn f2() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 it i32 kw crate:: kw self:: @@ -179,15 +179,15 @@ fn f2() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 it u64 kw crate:: kw self:: @@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 it u64 kw crate:: kw self:: @@ -230,15 +230,15 @@ fn f2(x: $0) { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 it i32 kw crate:: kw self:: @@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md a md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 it a::Foo> kw crate:: kw self:: @@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> - st Record - st Tuple - st Unit + st Foo<…> Foo<{unknown}> + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 it Foo kw crate:: kw self:: @@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tp T - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() { } "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait - un Union + un Union Union "#]], ); } @@ -384,18 +384,18 @@ trait Trait2: Trait1 { fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {} "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tt Trait1 tt Trait2 tp T - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -409,15 +409,15 @@ trait Trait2 { fn foo<'lt, T: Trait2, const CONST_PARAM: usize>(_: T) {} "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - st Record - st Tuple - st Unit + st Record Record + st Tuple Tuple + st Unit Unit tt Trait tt Trait2 - un Union + un Union Union "#]], ); } @@ -434,18 +434,18 @@ trait Tr { impl Tr<$0 "#, expect![[r#" - en Enum + en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self - st Record - st S - st Tuple - st Unit + sp Self dyn Tr<{unknown}> + st Record Record + st S S + st Tuple Tuple + st Unit Unit tt Tr tt Trait - un Union - bt u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], @@ -481,16 +481,16 @@ trait MyTrait { fn f(t: impl MyTrait { fn f(t: impl MyTrait { fn f(t: impl MyTrait { fn f(t: impl MyTrait = ()>) {} "#, expect![[r#" - en Enum - ma makro!(…) macro_rules! makro - md module - st Foo - st Record - st Tuple - st Unit - tt Bar - tt Trait - un Union - bt u32 - kw crate:: - kw self:: - "#]], + en Enum Enum + ma makro!(…) macro_rules! makro + md module + st Foo Foo + st Record Record + st Tuple Tuple + st Unit Unit + tt Bar + tt Trait + un Union Union + bt u32 u32 + kw crate:: + kw self:: + "#]], ); check( r#" @@ -853,12 +853,12 @@ fn completes_const_and_type_generics_separately() { fn foo = ()>>() {} "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Type generic params @@ -871,12 +871,12 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Type alias generic params @@ -890,12 +890,12 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Enum variant params @@ -908,12 +908,12 @@ fn completes_const_and_type_generics_separately() { } "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Trait params @@ -924,12 +924,12 @@ fn completes_const_and_type_generics_separately() { impl Foo<(), $0> for () {} "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Trait alias params @@ -942,12 +942,12 @@ fn completes_const_and_type_generics_separately() { fn foo>() {} "#, expect![[r#" - ct CONST - ct X - ma makro!(…) macro_rules! makro - kw crate:: - kw self:: - "#]], + ct CONST Unit + ct X usize + ma makro!(…) macro_rules! makro + kw crate:: + kw self:: + "#]], ); // Omitted lifetime params @@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>); fn foo<'a>() { S::; } "#, expect![[r#" - ct CONST + ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: kw self:: @@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>); fn foo<'a>() { S::<'static, 'static, F$0, _>; } "#, expect![[r#" - ct CONST + ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: kw self:: diff --git a/crates/ide-completion/src/tests/use_tree.rs b/crates/ide-completion/src/tests/use_tree.rs index 4c74dba526b6f..167bdec546d63 100644 --- a/crates/ide-completion/src/tests/use_tree.rs +++ b/crates/ide-completion/src/tests/use_tree.rs @@ -65,7 +65,7 @@ use self::{foo::*, bar$0}; "#, expect![[r#" md foo - st S + st S S "#]], ); } @@ -82,7 +82,7 @@ mod foo { use foo::{bar::$0} "#, expect![[r#" - st FooBar + st FooBar FooBar "#]], ); check( @@ -115,7 +115,7 @@ mod foo { use foo::{bar::{baz::$0}} "#, expect![[r#" - st FooBarBaz + st FooBarBaz FooBarBaz "#]], ); check( @@ -152,7 +152,7 @@ struct Bar; "#, expect![[r#" ma foo macro_rules! foo_ - st Foo + st Foo Foo "#]], ); } @@ -193,7 +193,7 @@ struct Bar; "#, expect![[r#" md foo - st Bar + st Bar Bar "#]], ); } @@ -212,7 +212,7 @@ struct Bar; expect![[r#" md bar md foo - st Bar + st Bar Bar "#]], ); } @@ -230,7 +230,7 @@ mod a { } "#, expect![[r#" - ct A + ct A usize md b kw super:: "#]], @@ -248,7 +248,7 @@ struct Bar; "#, expect![[r#" md foo - st Bar + st Bar Bar "#]], ); } @@ -265,7 +265,7 @@ pub mod foo {} "#, expect![[r#" md foo - st Foo + st Foo Foo "#]], ); } @@ -425,7 +425,7 @@ marco_rules! m { () => {} } expect![[r#" fn foo fn() md simd - st S + st S S "#]], ); } diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index a0b05c87ae73f..343be870c9eea 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -97,13 +97,13 @@ impl RootDatabase { // ExpandDatabase hir::db::AstIdMapQuery - hir::db::ParseMacroExpansionQuery - hir::db::InternMacroCallQuery - hir::db::MacroArgNodeQuery hir::db::DeclMacroExpanderQuery - hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery - hir::db::HygieneFrameQuery + hir::db::InternMacroCallQuery + hir::db::InternSyntaxContextQuery + hir::db::MacroArgQuery + hir::db::ParseMacroExpansionQuery + hir::db::RealSpanMapQuery // DefDatabase hir::db::FileItemTreeQuery @@ -143,6 +143,13 @@ impl RootDatabase { hir::db::FunctionVisibilityQuery hir::db::ConstVisibilityQuery hir::db::CrateSupportsNoStdQuery + hir::db::BlockItemTreeQueryQuery + hir::db::ExternCrateDeclDataQuery + hir::db::LangAttrQuery + hir::db::InternAnonymousConstQuery + hir::db::InternExternCrateQuery + hir::db::InternInTypeConstQuery + hir::db::InternUseQuery // HirDatabase hir::db::InferQueryQuery diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index ef72fc3861a7f..ded5d4e3db534 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -492,7 +492,7 @@ impl NameRefClass { match_ast! { match parent { ast::MethodCallExpr(method_call) => { - sema.resolve_method_call_field_fallback(&method_call) + sema.resolve_method_call_fallback(&method_call) .map(|it| { it.map_left(Definition::Function) .map_right(Definition::Field) @@ -500,9 +500,12 @@ impl NameRefClass { }) }, ast::FieldExpr(field_expr) => { - sema.resolve_field(&field_expr) - .map(Definition::Field) - .map(NameRefClass::Definition) + sema.resolve_field_fallback(&field_expr) + .map(|it| { + it.map_left(Definition::Field) + .map_right(Definition::Function) + .either(NameRefClass::Definition, NameRefClass::Definition) + }) }, ast::RecordPatField(record_pat_field) => { sema.resolve_record_pat_field(&record_pat_field) diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index 330af442f754d..9363bdfa14b2a 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -3,7 +3,7 @@ use std::collections::VecDeque; use base_db::{FileId, SourceDatabaseExt}; -use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; +use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; use syntax::{ ast::{self, make}, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, @@ -117,7 +117,7 @@ pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(token, 0.into()) { + for token in sema.descend_into_macros(DescendPreference::None, token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 226def4d52684..fefc05e535505 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -2,7 +2,7 @@ //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod apply_change; @@ -144,6 +144,7 @@ impl RootDatabase { db.set_library_roots_with_durability(Default::default(), Durability::HIGH); db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH); db.update_parse_query_lru_capacity(lru_capacity); + db.setup_syntax_context_root(); db } @@ -156,7 +157,6 @@ impl RootDatabase { base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); // macro expansions are usually rather small, so we can afford to keep more of them alive hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); - hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); } pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap, usize>) { @@ -174,12 +174,6 @@ impl RootDatabase { .copied() .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), ); - hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity( - lru_capacities - .get(stringify!(MacroExpandQuery)) - .copied() - .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), - ); macro_rules! update_lru_capacity_per_query { ($( $module:ident :: $query:ident )*) => {$( @@ -204,11 +198,10 @@ impl RootDatabase { hir_db::AstIdMapQuery // hir_db::ParseMacroExpansionQuery // hir_db::InternMacroCallQuery - hir_db::MacroArgNodeQuery + hir_db::MacroArgQuery hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery - hir_db::HygieneFrameQuery hir_db::ParseMacroExpansionErrorQuery // DefDatabase diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index fa9339f30f206..fb4c0c12691db 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -2,7 +2,7 @@ use crate::helpers::mod_path_to_ast; use either::Either; -use hir::{AsAssocItem, HirDisplay, SemanticsScope}; +use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope}; use rustc_hash::FxHashMap; use syntax::{ ast::{self, make, AstNode}, @@ -183,6 +183,7 @@ impl<'a> PathTransform<'a> { lifetime_substs, target_module, source_scope: self.source_scope, + same_self_type: self.target_scope.has_same_self_type(self.source_scope), }; ctx.transform_default_values(defaulted_params); ctx @@ -195,6 +196,7 @@ struct Ctx<'a> { lifetime_substs: FxHashMap, target_module: hir::Module, source_scope: &'a SemanticsScope<'a>, + same_self_type: bool, } fn postorder(item: &SyntaxNode) -> impl Iterator { @@ -332,8 +334,42 @@ impl Ctx<'_> { ted::replace(path.syntax(), subst.clone_subtree().clone_for_update()); } } + hir::PathResolution::SelfType(imp) => { + // keep Self type if it does not need to be replaced + if self.same_self_type { + return None; + } + + let ty = imp.self_ty(self.source_scope.db); + let ty_str = &ty + .display_source_code( + self.source_scope.db, + self.source_scope.module().into(), + true, + ) + .ok()?; + let ast_ty = make::ty(&ty_str).clone_for_update(); + + if let Some(adt) = ty.as_adt() { + if let ast::Type::PathType(path_ty) = &ast_ty { + let found_path = self.target_module.find_use_path( + self.source_scope.db.upcast(), + ModuleDef::from(adt), + false, + true, + )?; + + if let Some(qual) = mod_path_to_ast(&found_path).qualifier() { + let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); + ted::replace(path.syntax(), res.syntax()); + return Some(()); + } + } + } + + ted::replace(path.syntax(), ast_ty.syntax()); + } hir::PathResolution::Local(_) - | hir::PathResolution::SelfType(_) | hir::PathResolution::Def(_) | hir::PathResolution::BuiltinAttr(_) | hir::PathResolution::ToolModule(_) diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 353a9749a37d9..d2b6a732689c1 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -22,10 +22,10 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; -use base_db::{AnchoredPathBuf, FileId, FileRange}; +use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange}; use either::Either; -use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics}; -use stdx::never; +use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; +use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, AstNode, SyntaxKind, TextRange, T, @@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder}; use crate::{ defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, source_change::{FileSystemEdit, SourceChange}, syntax_helpers::node_ext::expr_as_name_ref, traits::convert_to_def_in_trait, @@ -103,6 +103,7 @@ impl Definition { /// renamed and extern crate names will report its range, though a rename will introduce /// an alias instead. pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option { + let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range); let res = match self { Definition::Macro(mac) => { let src = mac.source(sema.db)?; @@ -110,14 +111,18 @@ impl Definition { Either::Left(it) => it.name()?, Either::Right(it) => it.name()?, }; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } Definition::Field(field) => { let src = field.source(sema.db)?; match &src.value { FieldSource::Named(record_field) => { let name = record_field.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } FieldSource::Pos(_) => None, } @@ -125,25 +130,31 @@ impl Definition { Definition::Module(module) => { let src = module.declaration_source(sema.db)?; let name = src.value.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } - Definition::Function(it) => name_range(it, sema), + Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::Adt(adt) => match adt { - hir::Adt::Struct(it) => name_range(it, sema), - hir::Adt::Union(it) => name_range(it, sema), - hir::Adt::Enum(it) => name_range(it, sema), + hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root), + hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root), + hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root), }, - Definition::Variant(it) => name_range(it, sema), - Definition::Const(it) => name_range(it, sema), - Definition::Static(it) => name_range(it, sema), - Definition::Trait(it) => name_range(it, sema), - Definition::TraitAlias(it) => name_range(it, sema), - Definition::TypeAlias(it) => name_range(it, sema), - Definition::Local(it) => name_range(it.primary_source(sema.db), sema), + Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Local(it) => { + name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root) + } Definition::GenericParam(generic_param) => match generic_param { hir::GenericParam::LifetimeParam(lifetime_param) => { let src = lifetime_param.source(sema.db)?; - src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db) + src.with_value(src.value.lifetime()?.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } _ => { let x = match generic_param { @@ -156,22 +167,30 @@ impl Definition { Either::Left(x) => x.name()?, Either::Right(_) => return None, }; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } }, Definition::Label(label) => { let src = label.source(sema.db); let lifetime = src.value.lifetime()?; - src.with_value(lifetime.syntax()).original_file_range_opt(sema.db) + src.with_value(lifetime.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } Definition::ExternCrateDecl(it) => { let src = it.source(sema.db)?; if let Some(rename) = src.value.rename() { let name = rename.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } else { let name = src.value.name_ref()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } } Definition::BuiltinType(_) => return None, @@ -183,7 +202,10 @@ impl Definition { }; return res; - fn name_range(def: D, sema: &Semantics<'_, RootDatabase>) -> Option + fn name_range( + def: D, + sema: &Semantics<'_, RootDatabase>, + ) -> Option<(FileRange, SyntaxContextId)> where D: HasSource, D::Ast: ast::HasName, @@ -256,8 +278,10 @@ fn rename_mod( let file_id = src.file_id.original_file(sema.db); match src.value.name() { Some(name) => { - if let Some(file_range) = - src.with_value(name.syntax()).original_file_range_opt(sema.db) + if let Some(file_range) = src + .with_value(name.syntax()) + .original_file_range_opt(sema.db) + .map(TupleExt::head) { source_change.insert_source_edit( file_id, @@ -337,7 +361,7 @@ pub fn source_edit_from_references( // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far let mut edited_ranges = Vec::new(); for &FileReference { range, ref name, .. } in references { - let name_range = name.syntax().text_range(); + let name_range = name.text_range(); if name_range.len() != range.len() { // This usage comes from a different token kind that was downmapped to a NameLike in a macro // Renaming this will most likely break things syntax-wise @@ -347,17 +371,17 @@ pub fn source_edit_from_references( // if the ranges differ then the node is inside a macro call, we can't really attempt // to make special rewrites like shorthand syntax and such, so just rename the node in // the macro input - ast::NameLike::NameRef(name_ref) if name_range == range => { + FileReferenceNode::NameRef(name_ref) if name_range == range => { source_edit_from_name_ref(&mut edit, name_ref, new_name, def) } - ast::NameLike::Name(name) if name_range == range => { + FileReferenceNode::Name(name) if name_range == range => { source_edit_from_name(&mut edit, name, new_name) } _ => false, }; if !has_emitted_edit && !edited_ranges.contains(&range.start()) { let (range, new_name) = match name { - ast::NameLike::Lifetime(_) => ( + FileReferenceNode::Lifetime(_) => ( TextRange::new(range.start() + syntax::TextSize::from(1), range.end()), new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(), ), @@ -493,7 +517,12 @@ fn source_edit_from_def( for source in local.sources(sema.db) { let source = match source.source.clone().original_ast_node(sema.db) { Some(source) => source, - None => match source.source.syntax().original_file_range_opt(sema.db) { + None => match source + .source + .syntax() + .original_file_range_opt(sema.db) + .map(TupleExt::head) + { Some(FileRange { file_id: file_id2, range }) => { file_id = Some(file_id2); edit.replace(range, new_name.to_owned()); @@ -504,7 +533,7 @@ fn source_edit_from_def( } }, }; - file_id = source.file_id.file_id(); + file_id = Some(source.file_id); if let Either::Left(pat) = source.value { let name_range = pat.name().unwrap().syntax().text_range(); // special cases required for renaming fields/locals in Record patterns diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 22438a203bd78..dbef360268224 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,13 +8,14 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, + AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, + InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; use once_cell::unsync::Lazy; use parser::SyntaxKind; -use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; +use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize}; use triomphe::Arc; use crate::{ @@ -62,10 +63,67 @@ pub struct FileReference { /// The range of the reference in the original file pub range: TextRange, /// The node of the reference in the (macro-)file - pub name: ast::NameLike, + pub name: FileReferenceNode, pub category: Option, } +#[derive(Debug, Clone)] +pub enum FileReferenceNode { + Name(ast::Name), + NameRef(ast::NameRef), + Lifetime(ast::Lifetime), + FormatStringEntry(ast::String, TextRange), +} + +impl FileReferenceNode { + pub fn text_range(&self) -> TextRange { + match self { + FileReferenceNode::Name(it) => it.syntax().text_range(), + FileReferenceNode::NameRef(it) => it.syntax().text_range(), + FileReferenceNode::Lifetime(it) => it.syntax().text_range(), + FileReferenceNode::FormatStringEntry(_, range) => *range, + } + } + pub fn syntax(&self) -> SyntaxElement { + match self { + FileReferenceNode::Name(it) => it.syntax().clone().into(), + FileReferenceNode::NameRef(it) => it.syntax().clone().into(), + FileReferenceNode::Lifetime(it) => it.syntax().clone().into(), + FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(), + } + } + pub fn into_name_like(self) -> Option { + match self { + FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)), + FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)), + FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)), + FileReferenceNode::FormatStringEntry(_, _) => None, + } + } + pub fn as_name_ref(&self) -> Option<&ast::NameRef> { + match self { + FileReferenceNode::NameRef(name_ref) => Some(name_ref), + _ => None, + } + } + pub fn as_lifetime(&self) -> Option<&ast::Lifetime> { + match self { + FileReferenceNode::Lifetime(lifetime) => Some(lifetime), + _ => None, + } + } + pub fn text(&self) -> syntax::TokenText<'_> { + match self { + FileReferenceNode::NameRef(name_ref) => name_ref.text(), + FileReferenceNode::Name(name) => name.text(), + FileReferenceNode::Lifetime(lifetime) => lifetime.text(), + FileReferenceNode::FormatStringEntry(it, range) => { + syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()]) + } + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ReferenceCategory { // FIXME: Add this variant and delete the `retain_adt_literal_usages` function. @@ -132,7 +190,8 @@ impl SearchScope { let (file_id, range) = { let InFile { file_id, value } = module.definition_source(db); - if let Some((file_id, call_source)) = file_id.original_call_node(db) { + if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) + { (file_id, Some(call_source.text_range())) } else { ( @@ -465,7 +524,9 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent()) + sema.descend_into_macros(DescendPreference::None, token) + .into_iter() + .filter_map(|it| it.parent()) }) }; @@ -475,6 +536,17 @@ impl<'a> FindUsages<'a> { // Search for occurrences of the items name for offset in match_indices(&text, finder, search_range) { + tree.token_at_offset(offset).into_iter().for_each(|token| { + let Some(str_token) = ast::String::cast(token.clone()) else { return }; + if let Some((range, nameres)) = + sema.check_for_format_args_template(token.clone(), offset) + { + if self.found_format_args_ref(file_id, range, str_token, nameres, sink) { + return; + } + } + }); + for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { if match name { ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), @@ -589,7 +661,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: None, }; sink(file_id, reference) @@ -608,7 +680,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), }; sink(file_id, reference) @@ -617,6 +689,27 @@ impl<'a> FindUsages<'a> { } } + fn found_format_args_ref( + &self, + file_id: FileId, + range: TextRange, + token: ast::String, + res: Option, + sink: &mut dyn FnMut(FileId, FileReference) -> bool, + ) -> bool { + match res.map(Definition::from) { + Some(def) if def == self.def => { + let reference = FileReference { + range, + name: FileReferenceNode::FormatStringEntry(token, range), + category: Some(ReferenceCategory::Read), + }; + sink(file_id, reference) + } + _ => false, + } + } + fn found_lifetime( &self, lifetime: &ast::Lifetime, @@ -627,7 +720,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax()); let reference = FileReference { range, - name: ast::NameLike::Lifetime(lifetime.clone()), + name: FileReferenceNode::Lifetime(lifetime.clone()), category: None, }; sink(file_id, reference) @@ -651,7 +744,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -667,7 +760,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -677,7 +770,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -701,7 +794,7 @@ impl<'a> FindUsages<'a> { }; let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: access, }; sink(file_id, reference) @@ -724,7 +817,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), // FIXME: mutable patterns should have `Write` access category: Some(ReferenceCategory::Read), }; @@ -734,7 +827,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) @@ -759,7 +852,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 7834c66033c00..4a72881fe5e4a 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -21,18 +21,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 83..119, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), }, container_name: None, is_alias: false, @@ -50,18 +50,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: false, @@ -79,18 +79,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -108,18 +108,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -137,18 +137,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -166,18 +166,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 83..119, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), }, container_name: None, is_alias: true, @@ -195,18 +195,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 87ad5844c64cf..da1f3167d7d49 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -19,18 +19,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, range: 397..417, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 402..407, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 402..407, + }, + ), }, container_name: None, is_alias: false, @@ -46,18 +46,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, range: 340..361, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 346..351, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 346..351, + }, + ), }, container_name: None, is_alias: false, @@ -73,18 +73,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, range: 520..592, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 526..542, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 526..542, + }, + ), }, container_name: None, is_alias: false, @@ -102,18 +102,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, range: 185..207, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 190..194, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 190..194, + }, + ), }, container_name: None, is_alias: false, @@ -131,18 +131,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 654..676, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 663..676, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 663..676, + }, + ), }, container_name: None, is_alias: false, @@ -160,18 +160,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, range: 153..168, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 159..164, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 159..164, + }, + ), }, container_name: None, is_alias: false, @@ -187,18 +187,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, range: 362..396, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 369..375, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 369..375, + }, + ), }, container_name: None, is_alias: false, @@ -216,18 +216,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 170..184, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 177..183, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 177..183, + }, + ), }, container_name: None, is_alias: false, @@ -245,20 +245,18 @@ ), loc: DeclarationLocation { hir_file_id: MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..22, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 6..21, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 6..21, + }, + ), }, container_name: None, is_alias: false, @@ -276,18 +274,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 318..336, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 325..335, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 325..335, + }, + ), }, container_name: Some( "main", @@ -307,18 +305,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 555..581, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 562..580, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 562..580, + }, + ), }, container_name: Some( "CONST_WITH_INNER", @@ -338,18 +336,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 479..507, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 486..506, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 486..506, + }, + ), }, container_name: None, is_alias: false, @@ -365,18 +363,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, range: 261..300, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 267..272, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 267..272, + }, + ), }, container_name: None, is_alias: false, @@ -394,18 +392,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 682..696, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 691..696, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 691..696, + }, + ), }, container_name: None, is_alias: false, @@ -423,18 +421,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, range: 208..222, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 214..219, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 214..219, + }, + ), }, container_name: None, is_alias: false, @@ -452,18 +450,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, range: 419..457, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 423..428, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 423..428, + }, + ), }, container_name: None, is_alias: false, @@ -481,18 +479,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, range: 594..604, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 598..603, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 598..603, + }, + ), }, container_name: None, is_alias: false, @@ -510,18 +508,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, range: 51..131, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 64..77, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 64..77, + }, + ), }, container_name: None, is_alias: false, @@ -537,18 +535,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, range: 242..257, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 245..252, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 245..252, + }, + ), }, container_name: None, is_alias: false, @@ -566,18 +564,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, range: 1..48, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 14..31, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 14..31, + }, + ), }, container_name: None, is_alias: false, @@ -593,18 +591,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, range: 302..338, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 305..309, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 305..309, + }, + ), }, container_name: None, is_alias: false, @@ -622,18 +620,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 611..648, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 628..648, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 628..648, + }, + ), }, container_name: None, is_alias: false, @@ -649,18 +647,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, range: 279..298, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 282..290, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 282..290, + }, + ), }, container_name: Some( "Trait", @@ -691,18 +689,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 435..455, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 442..454, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 442..454, + }, + ), }, container_name: None, is_alias: false, @@ -731,18 +729,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 111..143, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 127..143, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 127..143, + }, + ), }, container_name: None, is_alias: false, @@ -760,18 +758,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, range: 0..20, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 7..19, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 7..19, + }, + ), }, container_name: None, is_alias: false, @@ -789,18 +787,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 25..59, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 41..59, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 41..59, + }, + ), }, container_name: None, is_alias: false, @@ -818,18 +816,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 65..105, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 95..105, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 95..105, + }, + ), }, container_name: None, is_alias: false, @@ -847,18 +845,18 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, range: 65..105, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 95..105, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 95..105, + }, + ), }, container_name: None, is_alias: false, diff --git a/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/crates/ide-diagnostics/src/handlers/field_shorthand.rs index 9ed8199ae4d0c..45fc6f8e68d0e 100644 --- a/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -1,7 +1,10 @@ //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both //! expressions and patterns. -use ide_db::{base_db::FileId, source_change::SourceChange}; +use ide_db::{ + base_db::{FileId, FileRange}, + source_change::SourceChange, +}; use syntax::{ast, match_ast, AstNode, SyntaxNode}; use text_edit::TextEdit; @@ -49,7 +52,7 @@ fn check_expr_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct initialization", - field_range, + FileRange { file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_expr_field_shorthand", @@ -93,7 +96,7 @@ fn check_pat_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct pattern", - field_range, + FileRange { file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_pat_field_shorthand", diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 9eb763d3e2c23..3b2e15a17887b 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -31,7 +31,7 @@ pub(crate) fn inactive_code( let res = Diagnostic::new( DiagnosticCode::Ra("inactive-code", Severity::WeakWarning), message, - ctx.sema.diagnostics_display_range(d.node.clone()).range, + ctx.sema.diagnostics_display_range(d.node.clone()), ) .with_unused(true); Some(res) diff --git a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs index 1ec17952b238d..f68f5b44b11bd 100644 --- a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs +++ b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs @@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target( ctx: &DiagnosticsContext<'_>, d: &hir::InvalidDeriveTarget, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; + let display_range = ctx.sema.diagnostics_display_range(d.node.clone()); Diagnostic::new( DiagnosticCode::RustcHardError("E0774"), diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 659b74445f8fe..d330973aaaa30 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -3,7 +3,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ - base_db::FileId, + base_db::{FileId, FileRange}, helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, source_change::SourceChangeBuilder, @@ -119,7 +119,7 @@ pub(crate) fn json_in_items( Diagnostic::new( DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning), "JSON syntax is not valid as a Rust item", - range, + FileRange { file_id, range }, ) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index 7ca0a0eab2b6f..099de4528d468 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} } #[test] fn eager_macro_concat() { - // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic. - // See: https://github.com/rust-lang/rust-analyzer/issues/10300 - check_diagnostics( r#" //- /lib.rs crate:lib deps:core @@ -80,7 +77,6 @@ macro_rules! m { fn f() { m!(); - //^^^^ error: unresolved macro $crate::private::concat } //- /core.rs crate:core @@ -268,4 +264,24 @@ fn f() { "#, ) } + + #[test] + fn include_does_not_break_diagnostics() { + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.insert("inactive-code".to_string()); + config.disabled.insert("unlinked-file".to_string()); + check_diagnostics_with_config( + config, + r#" +//- minicore: include +//- /lib.rs crate:lib +include!("include-me.rs"); +//- /include-me.rs +/// long doc that pushes the diagnostic range beyond the first file's text length + #[err] +//^^^^^^error: unresolved macro `err` +mod prim_never {} +"#, + ); + } } diff --git a/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/crates/ide-diagnostics/src/handlers/malformed_derive.rs index fc57dde69f2a0..6202d15853967 100644 --- a/crates/ide-diagnostics/src/handlers/malformed_derive.rs +++ b/crates/ide-diagnostics/src/handlers/malformed_derive.rs @@ -7,7 +7,7 @@ pub(crate) fn malformed_derive( ctx: &DiagnosticsContext<'_>, d: &hir::MalformedDerive, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; + let display_range = ctx.sema.diagnostics_display_range(d.node.clone()); Diagnostic::new( DiagnosticCode::RustcHardError("E0777"), diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 06ba13bcc55c4..8296018022cb2 100644 --- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -1,8 +1,9 @@ use either::Either; use hir::InFile; +use ide_db::base_db::FileRange; use syntax::{ ast::{self, HasArgList}, - AstNode, SyntaxNodePtr, TextRange, + AstNode, SyntaxNodePtr, }; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -48,7 +49,7 @@ fn invalid_args_range( source: InFile, expected: usize, found: usize, -) -> TextRange { +) -> FileRange { adjusted_display_range::>(ctx, source, &|expr| { let (text_range, r_paren_token, expected_arg) = match expr { Either::Left(ast::Expr::CallExpr(call)) => { diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index d7dca1083a076..cb38bc54d7d61 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,7 +1,7 @@ use either::Either; use hir::{ db::{ExpandDatabase, HirDatabase}, - known, AssocItem, HirDisplay, InFile, Type, + known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type, }; use ide_db::{ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 0f695b2745a8a..f93a35cf181c2 100644 --- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,4 +1,5 @@ use hir::db::ExpandDatabase; +use hir::HirFileIdExt; use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, SyntaxNode}; use syntax::{match_ast, AstNode}; diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index ee8a9c95793c1..0abcbffe72b6b 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics}; +use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics}; use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index d15233d15c2c5..258ac6cd82338 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, InFile}; +use hir::{db::ExpandDatabase, HirFileIdExt, InFile}; use ide_db::source_change::SourceChange; use syntax::{ ast::{self, HasArgList}, diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs index 51923797ac91a..56188cddf0b2e 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs @@ -112,4 +112,18 @@ impl Trait for () { "#, ); } + + #[test] + fn negative_impl() { + check_diagnostics( + r#" +trait Trait { + fn item(); +} + +// Negative impls don't require any items (in fact, the forbid providing any) +impl !Trait for () {} +"#, + ) + } } diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs new file mode 100644 index 0000000000000..820014391467e --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -0,0 +1,79 @@ +use hir::{Const, Function, HasSource, TypeAlias}; +use ide_db::base_db::FileRange; + +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: trait-impl-redundant-assoc_item +// +// Diagnoses redundant trait items in a trait impl. +pub(crate) fn trait_impl_redundant_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::TraitImplRedundantAssocItems, +) -> Diagnostic { + let name = d.assoc_item.0.clone(); + let assoc_item = d.assoc_item.1; + let db = ctx.sema.db; + + let default_range = d.impl_.syntax_node_ptr().text_range(); + let trait_name = d.trait_.name(db).to_smol_str(); + + let (redundant_item_name, diagnostic_range) = match assoc_item { + hir::AssocItem::Function(id) => ( + format!("`fn {}`", name.display(db)), + Function::from(id) + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + ), + hir::AssocItem::Const(id) => ( + format!("`const {}`", name.display(db)), + Const::from(id) + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + ), + hir::AssocItem::TypeAlias(id) => ( + format!("`type {}`", name.display(db)), + TypeAlias::from(id) + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + ), + }; + + Diagnostic::new( + DiagnosticCode::RustcHardError("E0407"), + format!("{redundant_item_name} is not a member of trait `{trait_name}`"), + FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn trait_with_default_value() { + check_diagnostics( + r#" +trait Marker { + const FLAG: bool = false; + fn boo(); + fn foo () {} +} +struct Foo; +impl Marker for Foo { + type T = i32; + //^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker` + + const FLAG: bool = true; + + fn bar() {} + //^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker` + + fn boo() {} +} + "#, + ) + } +} diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c92d92ceae8c8..70beb9468938c 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type}; +use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ ast::{self, BlockExpr, ExprStmt}, @@ -35,14 +35,10 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) Some(salient_token_range) }, ), - pat => { - ctx.sema - .diagnostics_display_range(InFile { - file_id: d.expr_or_pat.file_id, - value: pat.syntax_node_ptr(), - }) - .range - } + pat => ctx.sema.diagnostics_display_range(InFile { + file_id: d.expr_or_pat.file_id, + value: pat.syntax_node_ptr(), + }), }; let mut diag = Diagnostic::new( DiagnosticCode::RustcHardError("E0308"), @@ -84,7 +80,7 @@ fn add_reference( expr_ptr: &InFile>, acc: &mut Vec, ) -> Option<()> { - let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range; + let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())); let (_, mutability) = d.expected.as_reference()?; let actual_with_ref = Type::reference(&d.actual, mutability); @@ -94,10 +90,9 @@ fn add_reference( let ampersands = format!("&{}", mutability.as_keyword_for_ref()); - let edit = TextEdit::insert(range.start(), ampersands); - let source_change = - SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit); - acc.push(fix("add_reference_here", "Add reference here", source_change, range)); + let edit = TextEdit::insert(range.range.start(), ampersands); + let source_change = SourceChange::from_text_edit(range.file_id, edit); + acc.push(fix("add_reference_here", "Add reference here", source_change, range.range)); Some(()) } diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 4e215a89d7932..a740e332bbddf 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -26,14 +26,14 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di ) }; - Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range) + Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range) .with_fixes(fixes) } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option> { let db = ctx.sema.db; let root = db.parse_or_expand(d.expr.file_id); - let original_range = + let (original_range, _) = d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; let mut assists = vec![]; diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index e04f27c27fdf7..becc24ab21ecb 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; use ide_db::{ - base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, + base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt}, source_change::SourceChange, RootDatabase, }; @@ -46,8 +46,12 @@ pub(crate) fn unlinked_file( .unwrap_or(range); acc.push( - Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range) - .with_fixes(fixes), + Diagnostic::new( + DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), + message, + FileRange { file_id, range }, + ) + .with_fixes(fixes), ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs index f8265b63275fc..71c501a336b1e 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs @@ -44,6 +44,21 @@ extern crate core; extern crate self as foo; struct Foo; use foo::Foo as Bar; +"#, + ); + } + + #[test] + fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() { + check_diagnostics( + r#" +//- /lib.rs + #[macro_use] extern crate doesnotexist; +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate + mod _test_inner { + #![empty_attr] + //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr` + } "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 0758706e45a27..321459412182f 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -8,7 +8,7 @@ use ide_db::{ use syntax::{ast, AstNode, AstPtr}; use text_edit::TextEdit; -use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-field // @@ -22,15 +22,24 @@ pub(crate) fn unresolved_field( } else { "" }; - Diagnostic::new_with_syntax_node_ptr( - ctx, + Diagnostic::new( DiagnosticCode::RustcHardError("E0559"), format!( "no field `{}` on type `{}`{method_suffix}", d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - d.expr.clone().map(|it| it.into()), + adjusted_display_range_new(ctx, d.expr, &|expr| { + Some( + match expr { + ast::Expr::MethodCallExpr(it) => it.name_ref(), + ast::Expr::FieldExpr(it) => it.name_ref(), + _ => None, + }? + .syntax() + .text_range(), + ) + }), ) .with_fixes(fixes(ctx, d)) .experimental() @@ -79,7 +88,7 @@ mod tests { r#" fn main() { ().foo; - // ^^^^^^ error: no field `foo` on type `()` + // ^^^ error: no field `foo` on type `()` } "#, ); @@ -95,7 +104,7 @@ impl Foo { } fn foo() { Foo.bar; - // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists + // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists } "#, ); @@ -112,7 +121,7 @@ trait Bar { impl Bar for Foo {} fn foo() { Foo.bar; - // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists + // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists } "#, ); @@ -131,7 +140,7 @@ impl Bar for Foo { } fn foo() { Foo.bar; - // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists + // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists } "#, ); diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index ae9f6744c40f9..464b0a710ea7b 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -8,7 +8,7 @@ use ide_db::{ use syntax::{ast, AstNode, TextRange}; use text_edit::TextEdit; -use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-method // @@ -22,15 +22,24 @@ pub(crate) fn unresolved_method( } else { "" }; - Diagnostic::new_with_syntax_node_ptr( - ctx, + Diagnostic::new( DiagnosticCode::RustcHardError("E0599"), format!( "no method `{}` on type `{}`{field_suffix}", d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - d.expr.clone().map(|it| it.into()), + adjusted_display_range_new(ctx, d.expr, &|expr| { + Some( + match expr { + ast::Expr::MethodCallExpr(it) => it.name_ref(), + ast::Expr::FieldExpr(it) => it.name_ref(), + _ => None, + }? + .syntax() + .text_range(), + ) + }), ) .with_fixes(fixes(ctx, d)) .experimental() @@ -92,7 +101,41 @@ mod tests { r#" fn main() { ().foo(); - // ^^^^^^^^ error: no method `foo` on type `()` + // ^^^ error: no method `foo` on type `()` +} +"#, + ); + } + + #[test] + fn smoke_test_in_macro_def_site() { + check_diagnostics( + r#" +macro_rules! m { + ($rcv:expr) => { + $rcv.foo() + } +} +fn main() { + m!(()); + // ^^^^^^ error: no method `foo` on type `()` +} +"#, + ); + } + + #[test] + fn smoke_test_in_macro_call_site() { + check_diagnostics( + r#" +macro_rules! m { + ($ident:ident) => { + ().$ident() + } +} +fn main() { + m!(foo); + // ^^^ error: no method `foo` on type `()` } "#, ); @@ -105,7 +148,7 @@ fn main() { struct Foo { bar: i32 } fn foo() { Foo { bar: i32 }.bar(); - // ^^^^^^^^^^^^^^^^^^^^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists + // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists } "#, ); diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index be24e50c9871d..e90d385bab8c5 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -1,4 +1,4 @@ -use hir::db::ExpandDatabase; +use hir::{db::ExpandDatabase, HirFileIdExt}; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use itertools::Itertools; use syntax::AstNode; @@ -87,7 +87,12 @@ mod baz {} "E0583", ), message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs", - range: 0..8, + range: FileRange { + file_id: FileId( + 0, + ), + range: 0..8, + }, severity: Error, unused: false, experimental: false, @@ -150,11 +155,9 @@ mod baz {} ], ), main_node: Some( - InFile { + InFileWrapper { file_id: FileId( - FileId( - 0, - ), + 0, ), value: MODULE@0..8 MOD_KW@0..3 "mod" diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index c4ac59ec2a4d0..8dce2af23e328 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -1,5 +1,8 @@ use hir::InFile; -use ide_db::{base_db::FileId, source_change::SourceChange}; +use ide_db::{ + base_db::{FileId, FileRange}, + source_change::SourceChange, +}; use itertools::Itertools; use syntax::{ast, AstNode, SyntaxNode}; use text_edit::TextEdit; @@ -38,7 +41,7 @@ pub(crate) fn useless_braces( Diagnostic::new( DiagnosticCode::RustcLint("unused_braces"), "Unnecessary braces in use statement".to_string(), - use_range, + FileRange { file_id, range: use_range }, ) .with_main_node(InFile::new(file_id.into(), node.clone())) .with_fixes(Some(vec![fix( diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 6744895f3cd2e..6541bf605794a 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -23,7 +23,7 @@ //! There are also a couple of ad-hoc diagnostics implemented directly here, we //! don't yet have a great pattern for how to do them properly. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod handlers { pub(crate) mod break_outside_of_loop; @@ -47,6 +47,7 @@ mod handlers { pub(crate) mod trait_impl_orphan; pub(crate) mod trait_impl_incorrect_safety; pub(crate) mod trait_impl_missing_assoc_item; + pub(crate) mod trait_impl_redundant_assoc_item; pub(crate) mod typed_hole; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; @@ -89,7 +90,7 @@ use stdx::never; use syntax::{ algo::find_node_at_range, ast::{self, AstNode}, - SyntaxNode, SyntaxNodePtr, TextRange, + AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, }; // FIXME: Make this an enum @@ -133,7 +134,7 @@ impl DiagnosticCode { pub struct Diagnostic { pub code: DiagnosticCode, pub message: String, - pub range: TextRange, + pub range: FileRange, pub severity: Severity, pub unused: bool, pub experimental: bool, @@ -143,7 +144,7 @@ pub struct Diagnostic { } impl Diagnostic { - fn new(code: DiagnosticCode, message: impl Into, range: TextRange) -> Diagnostic { + fn new(code: DiagnosticCode, message: impl Into, range: FileRange) -> Diagnostic { let message = message.into(); Diagnostic { code, @@ -172,7 +173,7 @@ impl Diagnostic { node: InFile, ) -> Diagnostic { let file_id = node.file_id; - Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range) + Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone())) .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) } @@ -267,7 +268,7 @@ impl DiagnosticsContext<'_> { &self, node: &InFile, precise_location: Option, - ) -> TextRange { + ) -> FileRange { let sema = &self.sema; (|| { let precise_location = precise_location?; @@ -280,10 +281,11 @@ impl DiagnosticsContext<'_> { } })() .unwrap_or_else(|| sema.diagnostics_display_range(node.clone())) - .range } } +/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files +/// due to macros. pub fn diagnostics( db: &RootDatabase, config: &DiagnosticsConfig, @@ -300,7 +302,7 @@ pub fn diagnostics( Diagnostic::new( DiagnosticCode::RustcHardError("syntax-error"), format!("Syntax Error: {err}"), - err.range(), + FileRange { file_id, range: err.range() }, ) })); @@ -363,6 +365,7 @@ pub fn diagnostics( AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d), AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d), + AnyDiagnostic::TraitImplRedundantAssocItems(d) => handlers::trait_impl_redundant_assoc_item::trait_impl_redundant_assoc_item(&ctx, &d), AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d), AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), @@ -569,12 +572,28 @@ fn adjusted_display_range( ctx: &DiagnosticsContext<'_>, diag_ptr: InFile, adj: &dyn Fn(N) -> Option, -) -> TextRange { +) -> FileRange { let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr); let source_file = ctx.sema.db.parse(file_id); - find_node_at_range::(&source_file.syntax_node(), range) - .filter(|it| it.syntax().text_range() == range) - .and_then(adj) - .unwrap_or(range) + FileRange { + file_id, + range: find_node_at_range::(&source_file.syntax_node(), range) + .filter(|it| it.syntax().text_range() == range) + .and_then(adj) + .unwrap_or(range), + } +} + +// FIXME Replace the one above with this one? +fn adjusted_display_range_new( + ctx: &DiagnosticsContext<'_>, + diag_ptr: InFile>, + adj: &dyn Fn(N) -> Option, +) -> FileRange { + let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id); + let node = diag_ptr.value.to_node(&source_file); + diag_ptr + .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range())) + .original_node_file_range_rooted(ctx.sema.db) } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index c766a018bfd05..48e0363c9ca8d 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -7,6 +7,7 @@ use ide_db::{ base_db::{fixture::WithFixture, SourceDatabaseExt}, LineIndexDatabase, RootDatabase, }; +use itertools::Itertools; use stdx::trim_indent; use test_utils::{assert_eq_text, extract_annotations, MiniCore}; @@ -103,33 +104,39 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { #[track_caller] pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) { let (db, files) = RootDatabase::with_many_files(ra_fixture); + let mut annotations = files + .iter() + .copied() + .flat_map(|file_id| { + super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map( + |d| { + let mut annotation = String::new(); + if let Some(fixes) = &d.fixes { + assert!(!fixes.is_empty()); + annotation.push_str("💡 ") + } + annotation.push_str(match d.severity { + Severity::Error => "error", + Severity::WeakWarning => "weak", + Severity::Warning => "warn", + Severity::Allow => "allow", + }); + annotation.push_str(": "); + annotation.push_str(&d.message); + (d.range, annotation) + }, + ) + }) + .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) + .into_group_map(); for file_id in files { let line_index = db.line_index(file_id); - let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); + let mut actual = annotations.remove(&file_id).unwrap_or_default(); let expected = extract_annotations(&db.file_text(file_id)); - let mut actual = diagnostics - .into_iter() - .map(|d| { - let mut annotation = String::new(); - if let Some(fixes) = &d.fixes { - assert!(!fixes.is_empty()); - annotation.push_str("💡 ") - } - annotation.push_str(match d.severity { - Severity::Error => "error", - Severity::WeakWarning => "weak", - Severity::Warning => "warn", - Severity::Allow => "allow", - }); - annotation.push_str(": "); - annotation.push_str(&d.message); - (d.range, annotation) - }) - .collect::>(); actual.sort_by_key(|(range, _)| range.start()); if expected.is_empty() { - // makes minicore smoke test debugable + // makes minicore smoke test debuggable for (e, _) in &actual { eprintln!( "Code in range {e:?} = {}", diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index 66832a0bee496..d756e7a63eb9b 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -3,7 +3,7 @@ //! Allows searching the AST for code that matches one or more patterns and then replacing that code //! based on a template. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] // Feature: Structural Search and Replace // diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index d5c3439f95638..0943574ec1b5b 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" crossbeam-channel = "0.5.5" +arrayvec = "0.7.4" either.workspace = true itertools.workspace = true tracing.workspace = true diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index fb79b5dc211a4..d7f82b4af3e10 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, InFile, Semantics}; +use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::Definition, @@ -149,8 +149,8 @@ pub(crate) fn annotations( node: InFile, source_file_id: FileId, ) -> Option<(TextRange, Option)> { - if let Some(InFile { file_id, value }) = node.original_ast_node(db) { - if file_id == source_file_id.into() { + if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) { + if file_id == source_file_id { return Some(( value.syntax().text_range(), value.name().map(|name| name.syntax().text_range()), diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index f834f2ce59279..458b852e2a1e7 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -1,6 +1,8 @@ //! Entry point for call-hierarchy -use hir::Semantics; +use std::iter; + +use hir::{DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -66,7 +68,10 @@ pub(crate) fn incoming_calls( def.try_to_nav(sema.db) }); if let Some(nav) = nav { - calls.add(nav, sema.original_range(name.syntax()).range); + calls.add(nav.call_site, sema.original_range(name.syntax()).range); + if let Some(other) = nav.def_site { + calls.add(other, sema.original_range(name.syntax()).range); + } } } } @@ -87,7 +92,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { @@ -117,8 +122,9 @@ pub(crate) fn outgoing_calls( function.try_to_nav(db).zip(Some(range)) } }?; - Some((nav_target, range)) + Some(nav_target.into_iter().zip(iter::repeat(range))) }) + .flatten() .for_each(|(nav, range)| calls.add(nav, range)); Some(calls.into_items()) @@ -149,7 +155,7 @@ mod tests { fn check_hierarchy( ra_fixture: &str, - expected: Expect, + expected_nav: Expect, expected_incoming: Expect, expected_outgoing: Expect, ) { @@ -158,7 +164,7 @@ mod tests { let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); - expected.assert_eq(&nav.debug_render()); + expected_nav.assert_eq(&nav.debug_render()); let item_pos = FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index ac15b6aba6189..9760f9daf0a39 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions use stdx::format_to; use url::Url; -use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs}; +use hir::{ + db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs, +}; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, defs::{Definition, NameClass, NameRefClass}, @@ -144,7 +146,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); let node = token.parent()?; let definition = match_ast! { @@ -286,7 +288,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| { + sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 9ae70ae66f504..f388aea4c379b 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -1,4 +1,4 @@ -use std::ffi::OsStr; +use std::{ffi::OsStr, iter}; use expect_test::{expect, Expect}; use hir::Semantics; @@ -63,10 +63,12 @@ fn check_doc_links(ra_fixture: &str) { let defs = extract_definitions_from_docs(&docs); let actual: Vec<_> = defs .into_iter() - .map(|(_, link, ns)| { + .flat_map(|(_, link, ns)| { let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns) .unwrap_or_else(|| panic!("Failed to resolve {link}")); - let nav_target = def.try_to_nav(sema.db).unwrap(); + def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link)) + }) + .map(|(nav_target, link)| { let range = FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; (range, link) diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 3220774567750..024053effe423 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::Semantics; +use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, @@ -40,16 +40,20 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // struct Bar; // ``` - let derive = - sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| { - let hir_file = sema.hir_file_for(&descended.parent()?); - if !hir_file.is_derive_attr_pseudo_expansion(db) { + let derive = sema + .descend_into_macros(DescendPreference::None, tok.clone()) + .into_iter() + .find_map(|descended| { + let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?; + if !macro_file.is_derive_attr_pseudo_expansion(db) { return None; } let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); // up map out of the #[derive] expansion - let token = hir::InFile::new(hir_file, descended).upmap(db)?.value; + let InFile { file_id, value: tokens } = + hir::InMacroFile::new(macro_file, descended).upmap_once(db); + let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?; let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; let expansions = sema.expand_derive_macro(&attr)?; let idx = attr diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 9b2ff070c74b1..b706e959d34ef 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -1,6 +1,6 @@ use std::iter::successors; -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::RootDatabase; use syntax::{ algo::{self, skip_trivia_token}, @@ -141,9 +141,9 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { let fst_expanded = - sema.descend_into_macros_single(first_token.clone(), original_range.start()); + sema.descend_into_macros_single(DescendPreference::None, first_token.clone()); let lst_expanded = - sema.descend_into_macros_single(last_token.clone(), original_range.end()); + sema.descend_into_macros_single(DescendPreference::None, last_token.clone()); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -154,10 +154,10 @@ fn extend_tokens_from_range( }; // Compute parent node range - let validate = |offset: TextSize| { + let validate = || { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = sema.descend_into_macros_single(token.clone(), offset); + let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone()); let parent = match expanded.parent() { Some(it) => it, None => return false, @@ -171,14 +171,14 @@ fn extend_tokens_from_range( let token = token.prev_token()?; skip_trivia_token(token, Direction::Prev) }) - .take_while(validate(original_range.start())) + .take_while(validate()) .last()?; let last = successors(Some(last_token), |token| { let token = token.next_token()?; skip_trivia_token(token, Direction::Next) }) - .take_while(validate(original_range.end())) + .take_while(validate()) .last()?; let range = first.text_range().cover(last.text_range()); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 7e0fab42608b8..fae1007435432 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Semantics}; +use hir::{AsAssocItem, DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(original_token, offset) + .descend_into_macros(DescendPreference::None, original_token) .iter() .filter_map(|token| { let parent = token.parent()?; @@ -66,6 +66,7 @@ pub(crate) fn goto_declaration( let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.try_to_nav(db) }) + .flatten() .collect(); if info.is_empty() { diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index e09b9f3914820..7491879a67fb4 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -4,7 +4,7 @@ use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, RangeInfo, TryToNav, }; -use hir::{AsAssocItem, AssocItem, Semantics}; +use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileId, FileLoader}, defs::{Definition, IdentClass}, @@ -52,21 +52,34 @@ pub(crate) fn goto_definition( if let Some(doc_comment) = token_as_doc_comment(&original_token) { return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| { let nav = def.try_to_nav(db)?; - Some(RangeInfo::new(link_range, vec![nav])) + Some(RangeInfo::new(link_range, nav.collect())) }); } + + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + return Some(RangeInfo::new( + range, + match resolution { + Some(res) => def_to_nav(db, Definition::from(res)), + None => vec![], + }, + )); + } + let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; - if let Some(tt) = ast::TokenTree::cast(parent) { + if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { return Some(vec![x]); } } Some( - IdentClass::classify_token(sema, &token)? + IdentClass::classify_node(sema, &parent)? .definitions() .into_iter() .flat_map(|def| { @@ -75,6 +88,7 @@ pub(crate) fn goto_definition( .resolved_crate(db) .map(|it| it.root_module().to_nav(sema.db)) .into_iter() + .flatten() .collect(); } try_filter_trait_item_definition(sema, &def) @@ -125,6 +139,7 @@ fn try_lookup_include_path( docs: None, }) } + /// finds the trait definition of an impl'd item, except function /// e.g. /// ```rust @@ -153,13 +168,13 @@ fn try_filter_trait_item_definition( .iter() .filter(|itm| discriminant(*itm) == discri_value) .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten()) - .map(|it| vec![it]) + .map(|it| it.collect()) } } } fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec { - def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default() + def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() } #[cfg(test)] @@ -399,11 +414,11 @@ fn bar() { //- /lib.rs macro_rules! define_fn { () => (fn foo() {}) + //^^^ } define_fn!(); //^^^^^^^^^^^^^ - fn bar() { $0foo(); } @@ -807,18 +822,13 @@ mod confuse_index { fn foo(); } fn goto_through_format() { check( r#" +//- minicore: fmt #[macro_export] macro_rules! format { ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} pub mod __export { - pub use crate::format_args; + pub use core::format_args; fn foo() {} // for index confusion } fn foo() -> i8 {} @@ -1738,9 +1748,9 @@ macro_rules! foo { fn $ident(Foo { $ident }: Foo) {} } } -foo!(foo$0); - //^^^ - //^^^ + foo!(foo$0); + //^^^ + //^^^ "#, ); check( @@ -2054,6 +2064,20 @@ fn f2() { struct S2; S1::e$0(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "world"; + // ^ + format_args!("hello {a$0}"); +} "#, ); } diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 544c6b42317eb..6384db39d7c62 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Impl, Semantics}; +use hir::{AsAssocItem, DescendPreference, Impl, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -34,7 +34,7 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(original_token, offset) + sema.descend_into_macros(DescendPreference::None, original_token) .into_iter() .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|node| match &node { @@ -82,7 +82,11 @@ pub(crate) fn goto_implementation( } fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec { - Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect() + Impl::all_for_type(sema.db, ty) + .into_iter() + .filter_map(|imp| imp.try_to_nav(sema.db)) + .flatten() + .collect() } fn impls_for_trait( @@ -92,6 +96,7 @@ fn impls_for_trait( Impl::all_for_trait(sema.db, trait_) .into_iter() .filter_map(|imp| imp.try_to_nav(sema.db)) + .flatten() .collect() } @@ -109,6 +114,7 @@ fn impls_for_trait_item( })?; item.try_to_nav(sema.db) }) + .flatten() .collect() } @@ -249,7 +255,7 @@ impl T for &Foo {} r#" //- minicore: copy, derive #[derive(Copy)] -//^^^^^^^^^^^^^^^ + //^^^^ struct Foo$0; "#, ); diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 955923d76910d..ad393d98001b2 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,3 +1,4 @@ +use hir::{DescendPreference, GenericParam}; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -30,14 +31,45 @@ pub(crate) fn goto_type_definition( let mut res = Vec::new(); let mut push = |def: Definition| { - if let Some(nav) = def.try_to_nav(db) { - if !res.contains(&nav) { - res.push(nav); + if let Some(navs) = def.try_to_nav(db) { + for nav in navs { + if !res.contains(&nav) { + res.push(nav); + } } } }; + let mut process_ty = |ty: hir::Type| { + // collect from each `ty` into the `res` result vec + let ty = ty.strip_references(); + ty.walk(db, |t| { + if let Some(adt) = t.as_adt() { + push(adt.into()); + } else if let Some(trait_) = t.as_dyn_trait() { + push(trait_.into()); + } else if let Some(traits) = t.as_impl_traits(db) { + traits.for_each(|it| push(it.into())); + } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { + push(trait_.into()); + } + }); + }; + if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + if let Some(ty) = resolution.and_then(|res| match Definition::from(res) { + Definition::Const(it) => Some(it.ty(db)), + Definition::Static(it) => Some(it.ty(db)), + Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)), + Definition::Local(it) => Some(it.ty(db)), + Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)), + _ => None, + }) { + process_ty(ty); + } + return Some(RangeInfo::new(range, res)); + } + let range = token.text_range(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| { let ty = sema @@ -75,21 +107,7 @@ pub(crate) fn goto_type_definition( }); ty }) - .for_each(|ty| { - // collect from each `ty` into the `res` result vec - let ty = ty.strip_references(); - ty.walk(db, |t| { - if let Some(adt) = t.as_adt() { - push(adt.into()); - } else if let Some(trait_) = t.as_dyn_trait() { - push(trait_.into()); - } else if let Some(traits) = t.as_impl_traits(db) { - traits.for_each(|it| push(it.into())); - } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { - push(trait_.into()); - } - }); - }); + .for_each(process_ty); Some(RangeInfo::new(range, res)) } @@ -325,6 +343,42 @@ struct Baz(T); //^^^ fn foo(x$0: Bar, Baz) {} +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + let a = Bar; + format_args!("hello {a$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + format_args!("hello {Bar$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ +const BAR: Bar = Bar; +fn test() { + format_args!("hello {BAR$0}"); +} "#, ); } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index a7f5ae92a4cac..3aed007f3ea55 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,4 +1,6 @@ -use hir::Semantics; +use std::iter; + +use hir::{DescendPreference, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::{Definition, IdentClass}, @@ -15,7 +17,6 @@ use syntax::{ SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxNode, SyntaxToken, TextRange, T, }; -use text_edit::TextSize; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; @@ -116,7 +117,7 @@ fn highlight_closure_captures( local .sources(sema.db) .into_iter() - .map(|x| x.to_nav(sema.db)) + .flat_map(|x| x.to_nav(sema.db)) .filter(|decl| decl.file_id == file_id) .filter_map(|decl| decl.focus_range) .map(move |range| HighlightedRange { range, category }) @@ -132,7 +133,16 @@ fn highlight_references( token: SyntaxToken, FilePosition { file_id, offset }: FilePosition, ) -> Option> { - let defs = find_defs(sema, token.clone(), offset); + let defs = if let Some((range, resolution)) = + sema.check_for_format_args_template(token.clone(), offset) + { + match resolution.map(Definition::from) { + Some(def) => iter::once(def).collect(), + None => return Some(vec![HighlightedRange { range, category: None }]), + } + } else { + find_defs(sema, token.clone()) + }; let usages = defs .iter() .filter_map(|&d| { @@ -206,7 +216,7 @@ fn highlight_references( local .sources(sema.db) .into_iter() - .map(|x| x.to_nav(sema.db)) + .flat_map(|x| x.to_nav(sema.db)) .filter(|decl| decl.file_id == file_id) .filter_map(|decl| decl.focus_range) .map(|range| HighlightedRange { range, category }) @@ -215,21 +225,27 @@ fn highlight_references( }); } def => { - let hl_range = match def { + let navs = match def { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(sema.db, module)) + NavigationTarget::from_module_to_decl(sema.db, module) + } + def => match def.try_to_nav(sema.db) { + Some(it) => it, + None => continue, + }, + }; + for nav in navs { + if nav.file_id != file_id { + continue; + } + let hl_range = nav.focus_range.map(|range| { + let category = references::decl_mutability(&def, node, range) + .then_some(ReferenceCategory::Write); + HighlightedRange { range, category } + }); + if let Some(hl_range) = hl_range { + res.insert(hl_range); } - def => def.try_to_nav(sema.db), - } - .filter(|decl| decl.file_id == file_id) - .and_then(|decl| decl.focus_range) - .map(|range| { - let category = references::decl_mutability(&def, node, range) - .then_some(ReferenceCategory::Write); - HighlightedRange { range, category } - }); - if let Some(hl_range) = hl_range { - res.insert(hl_range); } } } @@ -456,12 +472,8 @@ fn cover_range(r0: Option, r1: Option) -> Option, - token: SyntaxToken, - offset: TextSize, -) -> FxHashSet { - sema.descend_into_macros(token, offset) +fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet { + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .map(IdentClass::definitions_no_ops) @@ -1620,6 +1632,23 @@ fn f2(t: T) { T::C; T::f(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + // ^ + format_args!("hello {a} {a$0} {}", a); + // ^read + // ^read + // ^read +} "#, ); } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index e0b64fe7988e5..5ad119ace89db 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -6,7 +6,7 @@ mod tests; use std::iter; use either::Either; -use hir::{db::DefDatabase, HasSource, LangItem, Semantics}; +use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics}; use ide_db::{ base_db::FileRange, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, @@ -21,6 +21,7 @@ use crate::{ doc_links::token_as_doc_comment, markdown_remove::remove_markdown, markup::Markup, + navigation_target::UpmappingResult, runnables::{runnable_fn, runnable_mod}, FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, }; @@ -73,7 +74,7 @@ impl HoverAction { it.module(db)?, it.name(db).map(|name| name.display(db).to_string()), ), - nav: it.try_to_nav(db)?, + nav: it.try_to_nav(db)?.call_site(), }) }) .collect(); @@ -150,6 +151,19 @@ fn hover_simple( }); } + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + let res = hover_for_definition( + sema, + file_id, + Definition::from(resolution?), + &original_token.parent()?, + config, + )?; + return Some(RangeInfo::new(range, res)); + } + let in_attr = original_token .parent_ancestors() .filter_map(ast::Item::cast) @@ -161,11 +175,10 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important - let descended = if in_attr { - [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into() - } else { - sema.descend_into_macros_with_same_text(original_token.clone(), offset) - }; + let descended = sema.descend_into_macros( + if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, + original_token.clone(), + ); let descended = || descended.iter(); let result = descended() @@ -298,11 +311,11 @@ pub(crate) fn hover_for_definition( sema: &Semantics<'_, RootDatabase>, file_id: FileId, definition: Definition, - node: &SyntaxNode, + scope_node: &SyntaxNode, config: &HoverConfig, ) -> Option { let famous_defs = match &definition { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), _ => None, }; render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { @@ -330,22 +343,26 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option return it.try_to_nav(db).map(to_action), + Definition::Trait(it) => { + return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) + } Definition::Adt(it) => Some(it), Definition::SelfType(it) => it.self_ty(db).as_adt(), _ => None, }?; - adt.try_to_nav(db).map(to_action) + adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) } fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option { match def { - Definition::Function(it) => it.try_to_nav(db).map(|nav_target| { - HoverAction::Reference(FilePosition { - file_id: nav_target.file_id, - offset: nav_target.focus_or_full_range().start(), + Definition::Function(it) => { + it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| { + HoverAction::Reference(FilePosition { + file_id: nav_target.file_id, + offset: nav_target.focus_or_full_range().start(), + }) }) - }), + } _ => None, } } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index d3d492f3fdef0..d5ec336fc7ed7 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6613,3 +6613,115 @@ fn test() { "#]], ); } + +#[test] +fn format_args_implicit() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{aaaaa$0}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit2() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit_raw() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit_nested() { + check( + r#" +//- minicore: fmt +macro_rules! foo { + ($($tt:tt)*) => { + format_args!($($tt)*) + } +} +fn test() { +let aaaaa = "foo"; +foo!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn method_call_without_parens() { + check( + r#" +struct S; +impl S { + fn foo(&self, t: T) {} +} + +fn main() { + S.foo$0; +} +"#, + expect![[r#" + *foo* + + ```rust + test::S + ``` + + ```rust + fn foo(&self, t: T) + ``` + "#]], + ); +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 7ea9d4f1038b4..ca334e9157977 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -315,6 +315,7 @@ impl HirWrite for InlayHintLabelBuilder<'_> { } self.make_new_part(); let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; + let location = location.call_site(); let location = FileRange { file_id: location.file_id, range: location.focus_or_full_range() }; self.location = Some(location); diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs index d691303c18b81..2f8b959516df8 100644 --- a/crates/ide/src/inlay_hints/closure_captures.rs +++ b/crates/ide/src/inlay_hints/closure_captures.rs @@ -2,6 +2,7 @@ //! //! Tests live in [`bind_pat`][super::bind_pat] module. use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use stdx::TupleExt; use syntax::ast::{self, AstNode}; use text_edit::{TextRange, TextSize}; @@ -73,7 +74,9 @@ pub(super) fn hints( capture.display_place(sema.db) ), None, - source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)), + source.name().and_then(|name| { + name.syntax().original_file_range_opt(sema.db).map(TupleExt::head) + }), ); acc.push(InlayHint { needs_resolve: label.needs_resolve(), diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index 60f1f3496f6cd..9cbaed090dc78 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -62,7 +62,11 @@ pub(super) fn hints( match_ast! { match expr { ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()), - _ => expr.text_range(), + // make the inlay hint appear after the semicolon if there is + _ => { + let nearest_semicolon = nearest_token_after_node(expr, syntax::SyntaxKind::SEMICOLON); + nearest_semicolon.map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()) + }, } } } @@ -95,7 +99,7 @@ pub(super) fn hints( label.append_str(")"); acc.push(InlayHint { range, - position: InlayHintPosition::Before, + position: InlayHintPosition::After, pad_left: true, pad_right: true, kind: InlayKind::Drop, @@ -109,6 +113,16 @@ pub(super) fn hints( Some(()) } +fn nearest_token_after_node( + node: &syntax::SyntaxNode, + token_type: syntax::SyntaxKind, +) -> Option { + node.siblings_with_tokens(syntax::Direction::Next) + .filter_map(|it| it.as_token().map(|it| it.clone())) + .filter(|it| it.kind() == token_type) + .next() +} + #[cfg(test)] mod tests { use crate::{ @@ -129,7 +143,7 @@ mod tests { let x = X; if 2 == 5 { return; - //^^^^^^ drop(x) + //^ drop(x) } } //^ drop(x) @@ -176,7 +190,7 @@ mod tests { let x = X; let t_opt = Some(2); let t = t_opt?; - //^^^^^^ drop(x) + //^ drop(x) Some(()) } //^ drop(x) diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs index d06ffd535758b..21697490482ec 100644 --- a/crates/ide/src/interpret_function.rs +++ b/crates/ide/src/interpret_function.rs @@ -1,10 +1,10 @@ use hir::Semantics; -use ide_db::base_db::SourceDatabaseExt; -use ide_db::RootDatabase; -use ide_db::{base_db::FilePosition, LineIndexDatabase}; +use ide_db::{ + base_db::{FilePosition, SourceDatabaseExt}, + LineIndexDatabase, RootDatabase, +}; use std::{fmt::Write, time::Instant}; -use syntax::TextRange; -use syntax::{algo::find_node_at_offset, ast, AstNode}; +use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; // Feature: Interpret Function // @@ -28,7 +28,9 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option(source_file.syntax(), position.offset)?; + let item = ancestors_at_offset(source_file.syntax(), position.offset) + .filter(|it| !ast::MacroCall::can_cast(it.kind())) + .find_map(ast::Item::cast)?; let def = match item { ast::Item::Fn(it) => sema.to_def(&it)?, _ => return None, diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 2320c95b4a1a3..d8f6e4e1b1b15 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -8,7 +8,7 @@ //! in this crate. // For proving that RootDatabase is RefUnwindSafe. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] @@ -100,7 +100,7 @@ pub use crate::{ markup::Markup, moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation}, move_item::Direction, - navigation_target::NavigationTarget, + navigation_target::{NavigationTarget, UpmappingResult}, prime_caches::ParallelPrimeCachesProgress, references::ReferenceSearchResult, rename::RenameError, @@ -230,7 +230,7 @@ impl Analysis { // `AnalysisHost` for creating a fully-featured analysis. pub fn from_single_file(text: String) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let file_id = FileId(0); + let file_id = FileId::from_raw(0); let mut file_set = FileSet::default(); file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); let source_root = SourceRoot::new_local(file_set); @@ -413,6 +413,7 @@ impl Analysis { symbol_index::world_symbols(db, query) .into_iter() // xx: should we make this a par iter? .filter_map(|s| s.try_to_nav(db)) + .map(UpmappingResult::call_site) .collect::>() }) } diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 2ca2b5b1d5f3e..8e8bb5e0139ea 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -1,7 +1,7 @@ //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! for LSIF and LSP. -use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics}; +use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics}; use ide_db::{ base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, defs::{Definition, IdentClass}, @@ -99,7 +99,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 32f211c6b289c..6cb7d7724d5f7 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -2,10 +2,11 @@ use std::fmt; +use arrayvec::ArrayVec; use either::Either; use hir::{ - symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId, - InFile, LocalSource, ModuleSource, + db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, + HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, }; use ide_db::{ base_db::{FileId, FileRange}, @@ -40,6 +41,8 @@ pub struct NavigationTarget { /// comments, and `focus_range` is the range of the identifier. /// /// Clients should place the cursor on this range when navigating to this target. + /// + /// This range must be contained within [`Self::full_range`]. pub focus_range: Option, pub name: SmolStr, pub kind: Option, @@ -70,15 +73,15 @@ impl fmt::Debug for NavigationTarget { } pub(crate) trait ToNav { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult; } pub(crate) trait TryToNav { - fn try_to_nav(&self, db: &RootDatabase) -> Option; + fn try_to_nav(&self, db: &RootDatabase) -> Option>; } impl TryToNav for Either { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { Either::Left(it) => it.try_to_nav(db), Either::Right(it) => it.try_to_nav(db), @@ -91,23 +94,30 @@ impl NavigationTarget { self.focus_range.unwrap_or(self.full_range) } - pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { + pub(crate) fn from_module_to_decl( + db: &RootDatabase, + module: hir::Module, + ) -> UpmappingResult { let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); - if let Some(InFile { value, file_id }) = &module.declaration_source(db) { - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, *file_id, value.syntax(), value.name()); - let mut res = NavigationTarget::from_syntax( - file_id, - name, - focus_range, - full_range, - SymbolKind::Module, - ); - res.docs = module.docs(db); - res.description = Some(module.display(db).to_string()); - return res; + match module.declaration_source(db) { + Some(InFile { value, file_id }) => { + orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let mut res = NavigationTarget::from_syntax( + file_id, + name.clone(), + focus_range, + full_range, + SymbolKind::Module, + ); + res.docs = module.docs(db); + res.description = Some(module.display(db).to_string()); + res + }, + ) + } + _ => module.to_nav(db), } - module.to_nav(db) } #[cfg(test)] @@ -133,13 +143,14 @@ impl NavigationTarget { db: &RootDatabase, InFile { file_id, value }: InFile<&dyn ast::HasName>, kind: SymbolKind, - ) -> NavigationTarget { - let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); + ) -> UpmappingResult { + let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.name()); - - NavigationTarget::from_syntax(file_id, name, focus_range, full_range, kind) + orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind) + }, + ) } fn from_syntax( @@ -164,48 +175,51 @@ impl NavigationTarget { } impl TryToNav for FileSymbol { - fn try_to_nav(&self, db: &RootDatabase) -> Option { - let full_range = self.loc.original_range(db); - let focus_range = self.loc.original_name_range(db).and_then(|it| { - if it.file_id == full_range.file_id { - Some(it.range) - } else { - None - } - }); - - Some(NavigationTarget { - file_id: full_range.file_id, - name: self - .is_alias - .then(|| self.def.name(db)) - .flatten() - .map_or_else(|| self.name.clone(), |it| it.to_smol_str()), - alias: self.is_alias.then(|| self.name.clone()), - kind: Some(hir::ModuleDefId::from(self.def).into()), - full_range: full_range.range, - focus_range, - container_name: self.container_name.clone(), - description: match self.def { - hir::ModuleDef::Module(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Function(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Const(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Static(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()), - hir::ModuleDef::BuiltinType(_) => None, - }, - docs: None, - }) + fn try_to_nav(&self, db: &RootDatabase) -> Option> { + let root = db.parse_or_expand(self.loc.hir_file_id); + self.loc.ptr.to_node(&root); + Some( + orig_range_with_focus( + db, + self.loc.hir_file_id, + &self.loc.ptr.to_node(&root), + Some(self.loc.name_ptr.to_node(&root)), + ) + .map(|(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget { + file_id, + name: self + .is_alias + .then(|| self.def.name(db)) + .flatten() + .map_or_else(|| self.name.clone(), |it| it.to_smol_str()), + alias: self.is_alias.then(|| self.name.clone()), + kind: Some(hir::ModuleDefId::from(self.def).into()), + full_range, + focus_range, + container_name: self.container_name.clone(), + description: match self.def { + hir::ModuleDef::Module(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Function(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Const(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Static(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()), + hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()), + hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()), + hir::ModuleDef::BuiltinType(_) => None, + }, + docs: None, + } + }), + ) } } impl TryToNav for Definition { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { Definition::Local(it) => Some(it.to_nav(db)), Definition::Label(it) => Some(it.to_nav(db)), @@ -233,7 +247,7 @@ impl TryToNav for Definition { } impl TryToNav for hir::ModuleDef { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::ModuleDef::Module(it) => Some(it.to_nav(db)), hir::ModuleDef::Function(it) => it.try_to_nav(db), @@ -331,22 +345,26 @@ where D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay, D::Ast: ast::HasName, { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; - let mut res = NavigationTarget::from_named( - db, - src.as_ref().map(|it| it as &dyn ast::HasName), - D::KIND, - ); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res.container_name = self.container_name(db); - Some(res) + Some( + NavigationTarget::from_named( + db, + src.as_ref().map(|it| it as &dyn ast::HasName), + D::KIND, + ) + .map(|mut res| { + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res.container_name = self.container_name(db); + res + }), + ) } } impl ToNav for hir::Module { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = self.definition_source(db); let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); @@ -355,97 +373,125 @@ impl ToNav for hir::Module { ModuleSource::Module(node) => (node.syntax(), node.name()), ModuleSource::BlockExpr(node) => (node.syntax(), None), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - NavigationTarget::from_syntax(file_id, name, focus_range, full_range, SymbolKind::Module) + + orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + name.clone(), + focus_range, + full_range, + SymbolKind::Module, + ) + }, + ) } } impl TryToNav for hir::Impl { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; - let derive_attr = self.as_builtin_derive(db); + let derive_path = self.as_builtin_derive_path(db); - let (focus, syntax) = match &derive_attr { - Some(attr) => (None, attr.value.syntax()), - None => (value.self_ty(), value.syntax()), + let (file_id, focus, syntax) = match &derive_path { + Some(attr) => (attr.file_id.into(), None, attr.value.syntax()), + None => (file_id, value.self_ty(), value.syntax()), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - Some(NavigationTarget::from_syntax( - file_id, - "impl".into(), - focus_range, - full_range, - SymbolKind::Impl, + Some(orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + "impl".into(), + focus_range, + full_range, + SymbolKind::Impl, + ) + }, )) } } impl TryToNav for hir::ExternCrateDecl { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let InFile { file_id, value } = src; let focus = value .rename() .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right)); - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), focus); - let mut res = NavigationTarget::from_syntax( - file_id, - self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(), - focus_range, - full_range, - SymbolKind::Module, - ); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res.container_name = container_name(db, *self); - Some(res) + Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let mut res = NavigationTarget::from_syntax( + file_id, + self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(), + focus_range, + full_range, + SymbolKind::Module, + ); + + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res.container_name = container_name(db, *self); + res + }, + )) } } impl TryToNav for hir::Field { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let field_source = match &src.value { FieldSource::Named(it) => { - let mut res = - NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res - } - FieldSource::Pos(it) => { - let FileRange { file_id, range } = - src.with_value(it.syntax()).original_file_range(db); - NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field) + NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( + |mut res| { + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res + }, + ) } + FieldSource::Pos(it) => orig_range(db, src.file_id, it.syntax()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + format!("{}", self.index()).into(), + focus_range, + full_range, + SymbolKind::Field, + ) + }, + ), }; Some(field_source) } } impl TryToNav for hir::Macro { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let name_owner: &dyn ast::HasName = match &src.value { Either::Left(it) => it, Either::Right(it) => it, }; - let mut res = NavigationTarget::from_named( - db, - src.as_ref().with_value(name_owner), - self.kind(db).into(), - ); - res.docs = self.docs(db); - Some(res) + Some( + NavigationTarget::from_named( + db, + src.as_ref().with_value(name_owner), + self.kind(db).into(), + ) + .map(|mut res| { + res.docs = self.docs(db); + res + }), + ) } } impl TryToNav for hir::Adt { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::Adt::Struct(it) => it.try_to_nav(db), hir::Adt::Union(it) => it.try_to_nav(db), @@ -455,7 +501,7 @@ impl TryToNav for hir::Adt { } impl TryToNav for hir::AssocItem { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { AssocItem::Function(it) => it.try_to_nav(db), AssocItem::Const(it) => it.try_to_nav(db), @@ -465,7 +511,7 @@ impl TryToNav for hir::AssocItem { } impl TryToNav for hir::GenericParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::GenericParam::TypeParam(it) => it.try_to_nav(db), hir::GenericParam::ConstParam(it) => it.try_to_nav(db), @@ -475,7 +521,7 @@ impl TryToNav for hir::GenericParam { } impl ToNav for LocalSource { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = &self.source; let file_id = *file_id; let local = self.local; @@ -484,60 +530,61 @@ impl ToNav for LocalSource { Either::Right(it) => (it.syntax(), it.name()), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, node, name); - - let name = local.name(db).to_smol_str(); - let kind = if local.is_self(db) { - SymbolKind::SelfParam - } else if local.is_param(db) { - SymbolKind::ValueParam - } else { - SymbolKind::Local - }; - NavigationTarget { - file_id, - name, - alias: None, - kind: Some(kind), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } + orig_range_with_focus(db, file_id, node, name).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let name = local.name(db).to_smol_str(); + let kind = if local.is_self(db) { + SymbolKind::SelfParam + } else if local.is_param(db) { + SymbolKind::ValueParam + } else { + SymbolKind::Local + }; + NavigationTarget { + file_id, + name, + alias: None, + kind: Some(kind), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + }, + ) } } impl ToNav for hir::Local { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { self.primary_source(db).to_nav(db) } } impl ToNav for hir::Label { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = self.source(db); let name = self.name(db).to_smol_str(); - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()); - - NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::Label), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } + orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::Label), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + ) } } impl TryToNav for hir::TypeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; let name = self.name(db).to_smol_str(); @@ -556,51 +603,51 @@ impl TryToNav for hir::TypeParam { }; let focus = value.as_ref().either(|it| it.name(), |it| it.name()); - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::TypeParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }) + Some(orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::TypeParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) } } impl TryToNav for hir::TypeOrConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { self.split(db).try_to_nav(db) } } impl TryToNav for hir::LifetimeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; let name = self.name(db).to_smol_str(); - let FileRange { file_id, range } = - InFile::new(file_id, value.syntax()).original_file_range(db); - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::LifetimeParam), - full_range: range, - focus_range: Some(range), - container_name: None, - description: None, - docs: None, - }) + Some(orig_range(db, file_id, value.syntax()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::LifetimeParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) } } impl TryToNav for hir::ConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; let name = self.name(db).to_smol_str(); @@ -612,35 +659,178 @@ impl TryToNav for hir::ConstParam { } }; - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.name()); - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::ConstParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }) + Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::ConstParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) + } +} + +#[derive(Debug)] +pub struct UpmappingResult { + /// The macro call site. + pub call_site: T, + /// The macro definition site, if relevant. + pub def_site: Option, +} + +impl UpmappingResult { + pub fn call_site(self) -> T { + self.call_site + } + + pub fn collect>(self) -> FI { + FI::from_iter(self.into_iter()) + } +} + +impl IntoIterator for UpmappingResult { + type Item = T; + + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.def_site + .into_iter() + .chain(Some(self.call_site)) + .collect::>() + .into_iter() } } +impl UpmappingResult { + fn map(self, f: impl Fn(T) -> U) -> UpmappingResult { + UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) } + } +} + +/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions +/// May return two results if the mapped node originates from a macro definition in which case the +/// second result is the creating macro call. fn orig_range_with_focus( db: &RootDatabase, hir_file: HirFileId, value: &SyntaxNode, name: Option, -) -> (FileId, TextRange, Option) { - let FileRange { file_id, range: full_range } = - InFile::new(hir_file, value).original_file_range(db); - let focus_range = name - .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db)) - .and_then(|range| if range.file_id == file_id { Some(range.range) } else { None }); - - (file_id, full_range, focus_range) +) -> UpmappingResult<(FileRange, Option)> { + let Some(name) = name else { return orig_range(db, hir_file, value) }; + + let call_range = || { + db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) + .kind + .original_call_range(db) + }; + + let def_range = || { + db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) + .def + .definition_range(db) + }; + + let value_range = InFile::new(hir_file, value).original_file_range_opt(db); + let ((call_site_range, call_site_focus), def_site) = + match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) { + // call site name + Some((focus_range, ctxt)) if ctxt.is_root() => { + // Try to upmap the node as well, if it ends up in the def site, go back to the call site + ( + ( + match value_range { + // name is in the node in the macro input so we can return it + Some((range, ctxt)) + if ctxt.is_root() + && range.file_id == focus_range.file_id + && range.range.contains_range(focus_range.range) => + { + range + } + // name lies outside the node, so instead point to the macro call which + // *should* contain the name + _ => call_range(), + }, + Some(focus_range), + ), + // no def site relevant + None, + ) + } + + // def site name + // FIXME: This can be de improved + Some((focus_range, _ctxt)) => { + match value_range { + // but overall node is in macro input + Some((range, ctxt)) if ctxt.is_root() => ( + // node mapped up in call site, show the node + (range, None), + // def site, if the name is in the (possibly) upmapped def site range, show the + // def site + { + let (def_site, _) = def_range().original_node_file_range(db); + (def_site.file_id == focus_range.file_id + && def_site.range.contains_range(focus_range.range)) + .then_some((def_site, Some(focus_range))) + }, + ), + // node is in macro def, just show the focus + _ => ( + // show the macro call + (call_range(), None), + Some((focus_range, Some(focus_range))), + ), + } + } + // lost name? can't happen for single tokens + None => return orig_range(db, hir_file, value), + }; + + UpmappingResult { + call_site: ( + call_site_range, + call_site_focus.and_then(|FileRange { file_id, range }| { + if call_site_range.file_id == file_id && call_site_range.range.contains_range(range) + { + Some(range) + } else { + None + } + }), + ), + def_site: def_site.map(|(def_site_range, def_site_focus)| { + ( + def_site_range, + def_site_focus.and_then(|FileRange { file_id, range }| { + if def_site_range.file_id == file_id + && def_site_range.range.contains_range(range) + { + Some(range) + } else { + None + } + }), + ) + }), + } +} + +fn orig_range( + db: &RootDatabase, + hir_file: HirFileId, + value: &SyntaxNode, +) -> UpmappingResult<(FileRange, Option)> { + UpmappingResult { + call_site: (InFile::new(hir_file, value).original_file_range(db), None), + def_site: None, + } } #[cfg(test)] diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 506f9452cf196..413dbf9c5dfc6 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -45,11 +45,11 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec sema .to_def(&module) .into_iter() - .map(|module| NavigationTarget::from_module_to_decl(db, module)) + .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) .collect(), None => sema .to_module_defs(position.file_id) - .map(|module| NavigationTarget::from_module_to_decl(db, module)) + .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) .collect(), } } diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index f387bbf6b0143..6c0fb0baf2e2b 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -9,7 +9,9 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. -use hir::{PathResolution, Semantics}; +use std::collections::HashMap; + +use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, @@ -60,19 +62,6 @@ pub(crate) fn find_all_refs( let syntax = sema.parse(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { - let declaration = match def { - Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(sema.db, module)) - } - def => def.try_to_nav(sema.db), - } - .map(|nav| { - let decl_range = nav.focus_or_full_range(); - Declaration { - is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range), - nav, - } - }); let mut usages = def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all(); @@ -80,7 +69,7 @@ pub(crate) fn find_all_refs( retain_adt_literal_usages(&mut usages, def, sema); } - let references = usages + let mut references = usages .into_iter() .map(|(file_id, refs)| { ( @@ -91,8 +80,30 @@ pub(crate) fn find_all_refs( .collect(), ) }) - .collect(); - + .collect::, _>>(); + let declaration = match def { + Definition::Module(module) => { + Some(NavigationTarget::from_module_to_decl(sema.db, module)) + } + def => def.try_to_nav(sema.db), + } + .map(|nav| { + let (nav, extra_ref) = match nav.def_site { + Some(call) => (call, Some(nav.call_site)), + None => (nav.call_site, None), + }; + if let Some(extra_ref) = extra_ref { + references + .entry(extra_ref.file_id) + .or_default() + .push((extra_ref.focus_or_full_range(), None)); + } + let decl_range = nav.focus_or_full_range(); + Declaration { + is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range), + nav, + } + }); ReferenceSearchResult { declaration, references } } }; @@ -109,7 +120,7 @@ pub(crate) fn find_all_refs( } None => { let search = make_searcher(false); - Some(find_defs(sema, &syntax, position.offset)?.map(search).collect()) + Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect()) } } } @@ -118,15 +129,27 @@ pub(crate) fn find_defs<'a>( sema: &'a Semantics<'_, RootDatabase>, syntax: &SyntaxNode, offset: TextSize, -) -> Option + 'a> { +) -> Option + 'a> { let token = syntax.token_at_offset(offset).find(|t| { matches!( t.kind(), - IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | STRING + | T![self] + | T![super] + | T![crate] + | T![Self] ) - }); - token.map(|token| { - sema.descend_into_macros_with_same_text(token, offset) + })?; + + if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + return resolution.map(Definition::from).map(|it| vec![it]); + } + + Some( + sema.descend_into_macros(DescendPreference::SameText, token) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { @@ -162,7 +185,8 @@ pub(crate) fn find_defs<'a>( }; Some(def) }) - }) + .collect(), + ) } pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool { @@ -869,7 +893,7 @@ pub(super) struct Foo$0 { check_with_scope( code, - Some(SearchScope::single_file(FileId(2))), + Some(SearchScope::single_file(FileId::from_raw(2))), expect![[r#" quux Function FileId(0) 19..35 26..30 @@ -1168,7 +1192,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 55..57 55..57 + 'a LifetimeParam FileId(0) 55..57 FileId(0) 63..65 FileId(0) 71..73 @@ -1186,7 +1210,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { type Foo<'a, T> where T: 'a$0 = &'a T; "#, expect![[r#" - 'a LifetimeParam FileId(0) 9..11 9..11 + 'a LifetimeParam FileId(0) 9..11 FileId(0) 25..27 FileId(0) 31..33 @@ -1208,7 +1232,7 @@ impl<'a> Foo<'a> for &'a () { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 47..49 47..49 + 'a LifetimeParam FileId(0) 47..49 FileId(0) 55..57 FileId(0) 64..66 @@ -2092,4 +2116,27 @@ fn main() { r#fn(); } "#]], ); } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + format_args!("hello {a} {a$0} {}", a); + // ^ + // ^ + // ^ +} +"#, + expect![[r#" + a Local FileId(0) 20..21 20..21 + + FileId(0) 56..57 Read + FileId(0) 60..61 Read + FileId(0) 68..69 Read + "#]], + ); + } } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index ac9df5ed6d1f0..1febfabfcb7f4 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -4,16 +4,18 @@ //! tests. This module also implements a couple of magic tricks, like renaming //! `self` and to `self` (to switch between associated function and method). -use hir::{AsAssocItem, InFile, Semantics}; +use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ - base_db::FileId, + base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, RootDatabase, }; use itertools::Itertools; use stdx::{always, never}; -use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize}; +use syntax::{ + ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize, +}; use text_edit::TextEdit; @@ -34,23 +36,20 @@ pub(crate) fn prepare_rename( let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position)? - .map(|(name_like, def)| { + .map(|(frange, kind, def)| { // ensure all ranges are valid if def.range_for_rename(&sema).is_none() { bail!("No references found at position") } - let Some(frange) = sema.original_range_opt(name_like.syntax()) else { - bail!("No references found at position"); - }; always!( frange.range.contains_inclusive(position.offset) && frange.file_id == position.file_id ); - Ok(match name_like { - ast::NameLike::Lifetime(_) => { + Ok(match kind { + SyntaxKind::LIFETIME => { TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end()) } _ => frange.range, @@ -93,7 +92,7 @@ pub(crate) fn rename( let defs = find_definitions(&sema, syntax, position)?; let ops: RenameResult> = defs - .map(|(_namelike, def)| { + .map(|(.., def)| { if let Definition::Local(local) = def { if let Some(self_param) = local.as_self_param(sema.db) { cov_mark::hit!(rename_self_to_param); @@ -134,11 +133,27 @@ pub(crate) fn will_rename_file( fn find_definitions( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, - position: FilePosition, -) -> RenameResult> { - let symbols = sema - .find_nodes_at_offset_with_descend::(syntax, position.offset) - .map(|name_like| { + FilePosition { file_id, offset }: FilePosition, +) -> RenameResult> { + let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING)); + + if let Some((range, Some(resolution))) = + token.and_then(|token| sema.check_for_format_args_template(token, offset)) + { + return Ok(vec![( + FileRange { file_id, range }, + SyntaxKind::STRING, + Definition::from(resolution), + )] + .into_iter()); + } + + let symbols = + sema.find_nodes_at_offset_with_descend::(syntax, offset).map(|name_like| { + let kind = name_like.syntax().kind(); + let range = sema + .original_range_opt(name_like.syntax()) + .ok_or_else(|| format_err!("No references found at position"))?; let res = match &name_like { // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet ast::NameLike::Name(name) @@ -163,7 +178,6 @@ fn find_definitions( Definition::Local(local_def) } }) - .map(|def| (name_like.clone(), def)) .ok_or_else(|| format_err!("No references found at position")), ast::NameLike::NameRef(name_ref) => { NameRefClass::classify(sema, name_ref) @@ -187,7 +201,7 @@ fn find_definitions( { Err(format_err!("Renaming aliases is currently unsupported")) } else { - Ok((name_like.clone(), def)) + Ok(def) } }) } @@ -203,11 +217,10 @@ fn find_definitions( _ => None, }) }) - .map(|def| (name_like, def)) .ok_or_else(|| format_err!("No references found at position")) } }; - res + res.map(|def| (range, kind, def)) }); let res: RenameResult> = symbols.collect(); @@ -218,7 +231,7 @@ fn find_definitions( Err(format_err!("No references found at position")) } else { // remove duplicates, comparing `Definition`s - Ok(v.into_iter().unique_by(|t| t.1)) + Ok(v.into_iter().unique_by(|&(.., def)| def).collect::>().into_iter()) } } Err(e) => Err(e), @@ -2663,4 +2676,44 @@ struct A; "error: Cannot rename a non-local definition.", ) } + + #[test] + fn implicit_format_args() { + check( + "fbar", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fbar = "foo"; + format_args!("hello {fbar} {fbar} {}", fbar); +} +"#, + ); + } + + #[test] + fn implicit_format_args2() { + check( + "fo", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fo = "foo"; + format_args!("hello {fo} {fo} {}", fo); +} +"#, + ); + } } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 07cdddd15f82e..d334e66d3dd6e 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -2,14 +2,14 @@ use std::fmt; use ast::HasName; use cfg::CfgExpr; -use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics}; +use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics}; use ide_assists::utils::test_related_attribute; use ide_db::{ base_db::{FilePosition, FileRange}, defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, - search::SearchScope, + search::{FileReferenceNode, SearchScope}, FxHashMap, FxHashSet, RootDatabase, SymbolKind, }; use itertools::Itertools; @@ -142,7 +142,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { Definition::Function(it) => it.source(db).map(|src| src.file_id), _ => None, }; - if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) { + if let Some(file_id) = file_id.filter(|file| file.macro_file().is_some()) { in_macro_expansion.entry(file_id).or_default().push(runnable); return; } @@ -240,7 +240,7 @@ fn find_related_tests( .flatten(); for ref_ in defs { let name_ref = match ref_.name { - ast::NameLike::NameRef(name_ref) => name_ref, + FileReferenceNode::NameRef(name_ref) => name_ref, _ => continue, }; if let Some(fn_def) = @@ -335,7 +335,8 @@ pub(crate) fn runnable_fn( sema.db, def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName), SymbolKind::Function, - ); + ) + .call_site(); let cfg = def.attrs(sema.db).cfg(); Some(Runnable { use_name_in_title: false, nav, kind, cfg }) } @@ -357,7 +358,7 @@ pub(crate) fn runnable_mod( let attrs = def.attrs(sema.db); let cfg = attrs.cfg(); - let nav = NavigationTarget::from_module_to_decl(sema.db, def); + let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site(); Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg }) } @@ -370,7 +371,7 @@ pub(crate) fn runnable_impl( return None; } let cfg = attrs.cfg(); - let nav = def.try_to_nav(sema.db)?; + let nav = def.try_to_nav(sema.db)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.generic_parameters(sema.db).peekable(); @@ -407,7 +408,7 @@ fn runnable_mod_outline_definition( match def.definition_source(sema.db).value { hir::ModuleSource::SourceFile(_) => Some(Runnable { use_name_in_title: false, - nav: def.to_nav(sema.db), + nav: def.to_nav(sema.db).call_site(), kind: RunnableKind::TestMod { path }, cfg, }), @@ -465,7 +466,8 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let mut nav = match def { Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), def => def.try_to_nav(db)?, - }; + } + .call_site(); nav.focus_range = None; nav.description = None; nav.docs = None; diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e020b52e17104..990376a49659d 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -4,7 +4,10 @@ use std::collections::BTreeSet; use either::Either; -use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait}; +use hir::{ + AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, + Trait, +}; use ide_db::{ active_parameter::{callable_for_node, generic_def_for_node}, base_db::FilePosition, @@ -79,7 +82,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); for node in token.parent_ancestors() { match_ast! { diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index b54874d59f8bc..3724dc2822117 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide_db::helpers::get_definition; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, @@ -13,6 +13,7 @@ use ide_db::{ use syntax::{AstNode, SyntaxKind::*, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; +use crate::navigation_target::UpmappingResult; use crate::{ hover::hover_for_definition, inlay_hints::AdjustmentHintsMode, @@ -166,9 +167,8 @@ impl StaticIndex<'_> { } else { let it = self.tokens.insert(TokenStaticData { hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), - definition: def.try_to_nav(self.db).map(|it| FileRange { - file_id: it.file_id, - range: it.focus_or_full_range(), + definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { + FileRange { file_id: it.file_id, range: it.focus_or_full_range() } }), references: vec![], moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), @@ -179,7 +179,7 @@ impl StaticIndex<'_> { let token = self.tokens.get_mut(id).unwrap(); token.references.push(ReferenceData { range: FileRange { range, file_id }, - is_definition: match def.try_to_nav(self.db) { + is_definition: match def.try_to_nav(self.db).map(UpmappingResult::call_site) { Some(it) => it.file_id == file_id && it.focus_or_full_range() == range, None => false, }, @@ -243,6 +243,7 @@ mod tests { } } + #[track_caller] fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index c9ee460a1c261..e7f97ebe6f7bd 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData}; use hir::{ db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery}, - Attr, Attrs, ExpandResult, MacroFile, Module, + Attr, Attrs, ExpandResult, MacroFileId, Module, }; use ide_db::{ base_db::{ @@ -199,8 +199,12 @@ impl StatCollect> for SyntaxTreeStats { } } -impl StatCollect, M)>> for SyntaxTreeStats { - fn collect_entry(&mut self, _: MacroFile, value: Option, M)>>) { +impl StatCollect, M)>> for SyntaxTreeStats { + fn collect_entry( + &mut self, + _: MacroFileId, + value: Option, M)>>, + ) { self.total += 1; self.retained += value.is_some() as usize; } diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index dd72484b3807c..307812156e92b 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -13,7 +13,7 @@ mod html; #[cfg(test)] mod tests; -use hir::{Name, Semantics}; +use hir::{DescendPreference, Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use syntax::{ ast::{self, IsString}, @@ -393,13 +393,18 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => { - sema.descend_into_macros_with_kind_preference(token, 0.into()) - } - Some(AttrOrDerive::Derive(_)) | None => { - sema.descend_into_macros_single(token, 0.into()) - } + let token = if token.kind() == STRING { + // for strings, try to prefer a string that has not been lost in a token + // tree + // FIXME: This should be done for everything, but check perf first + sema.descend_into_macros(DescendPreference::SameKind, token) + .into_iter() + .max_by_key(|it| { + it.parent().map_or(false, |it| it.kind() != TOKEN_TREE) + }) + .unwrap() + } else { + sema.descend_into_macros_single(DescendPreference::SameKind, token) }; match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes @@ -441,7 +446,7 @@ fn traverse( { continue; } - highlight_format_string(hl, &string, &expanded_string, range); + highlight_format_string(hl, sema, krate, &string, &expanded_string, range); if !string.is_raw() { highlight_escape_string(hl, &string, range.start()); diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 2ef1315945a04..518e71454798f 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -1,14 +1,20 @@ //! Syntax highlighting for format macro strings. use ide_db::{ + defs::Definition, syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier}, SymbolKind, }; use syntax::{ast, TextRange}; -use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag}; +use crate::{ + syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, + HlRange, HlTag, +}; pub(super) fn highlight_format_string( stack: &mut Highlights, + sema: &hir::Semantics<'_, ide_db::RootDatabase>, + krate: hir::Crate, string: &ast::String, expanded_string: &ast::String, range: TextRange, @@ -27,6 +33,18 @@ pub(super) fn highlight_format_string( }); } }); + + if let Some(parts) = sema.as_format_args_parts(string) { + parts.into_iter().for_each(|(range, res)| { + if let Some(res) = res { + stack.add(HlRange { + range, + highlight: highlight_def(sema, krate, Definition::from(res)), + binding_hash: None, + }) + } + }) + } } fn highlight_format_specifier(kind: FormatSpecifier) -> Option { diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 7d00282fc14bd..0558f658fd190 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -1,6 +1,6 @@ //! Computes color for a single element. -use hir::{AsAssocItem, HasVisibility, Semantics}; +use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, @@ -218,7 +218,10 @@ fn highlight_name_ref( // We can fix this for derive attributes since derive helpers are recorded, but not for // general attributes. None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR) - && !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) => + && !sema + .hir_file_for(name_ref.syntax()) + .macro_file() + .map_or(false, |it| it.is_derive_attr_pseudo_expansion(sema.db)) => { return HlTag::Symbol(SymbolKind::Attribute).into(); } @@ -348,7 +351,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 { hash((name, shadow_count)) } -fn highlight_def( +pub(super) fn highlight_def( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, def: Definition, diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 06b66b302ae02..e8b3a38c9e0f4 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -43,7 +43,9 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
proc_macros::mirror! {
+
use proc_macros::{mirror, identity, DeriveIdentity};
+
+mirror! {
     {
         ,i32 :x pub
         ,i32 :y pub
@@ -90,17 +92,11 @@
     }
 }
 
-#[rustc_builtin_macro]
-macro_rules! concat {}
-#[rustc_builtin_macro]
-macro_rules! include {}
-#[rustc_builtin_macro]
-macro_rules! format_args {}
 
-include!(concat!("foo/", "foo.rs"));
+include!(concat!("foo/", "foo.rs"));
 
 fn main() {
-    format_args!("Hello, {}!", 92);
+    format_args!("Hello, {}!", 92);
     dont_color_me_braces!();
     noop!(noop!(1));
 }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 64e614cecd20f..84a823363f68f 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -48,47 +48,38 @@
         $crate::io::_print(format_args_nl!($($arg)*));
     })
 }
-#[rustc_builtin_macro]
-#[macro_export]
-macro_rules! format_args_nl {}
 
 mod panic {
     pub macro panic_2015 {
         () => (
-            $crate::panicking::panic("explicit panic")
+            panic("explicit panic")
         ),
         ($msg:literal $(,)?) => (
-            $crate::panicking::panic($msg)
+            panic($msg)
         ),
         // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
         ($msg:expr $(,)?) => (
-            $crate::panicking::panic_str($msg)
+            panic_str($msg)
         ),
         // Special-case the single-argument case for const_panic.
         ("{}", $arg:expr $(,)?) => (
-            $crate::panicking::panic_display(&$arg)
+            panic_display(&$arg)
         ),
         ($fmt:expr, $($arg:tt)+) => (
-            $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+))
+            panic_fmt(const_format_args!($fmt, $($arg)+))
         ),
     }
 }
 
-#[rustc_builtin_macro(std_panic)]
-#[macro_export]
-macro_rules! panic {}
-#[rustc_builtin_macro]
-macro_rules! assert {}
-#[rustc_builtin_macro]
-macro_rules! asm {}
-#[rustc_builtin_macro]
-macro_rules! concat {}
-
 macro_rules! toho {
     () => ($crate::panic!("not yet implemented"));
     ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+)));
 }
 
+macro_rules! reuse_twice {
+    ($literal:literal) => {{stringify!($literal); format_args!($literal)}};
+}
+
 fn main() {
     let a = '\n';
     let a = '\t';
@@ -165,20 +156,23 @@
     println!("{ничоси}", ничоси = 92);
 
     println!("{:x?} {} ", thingy, n2);
-    panic!("{}", 0);
-    panic!("more {}", 1);
-    assert!(true, "{}", 1);
-    assert!(true, "{} asdasd", 1);
+    panic!("{}", 0);
+    panic!("more {}", 1);
+    assert!(true, "{}", 1);
+    assert!(true, "{} asdasd", 1);
     toho!("{}fmt", 0);
     let i: u64 = 3;
     let o: u64;
-    asm!(
-        "mov {0}, {1}",
-        "add {0}, 5",
+    asm!(
+        "mov {0}, {1}",
+        "add {0}, 5",
         out(reg) o,
         in(reg) i,
     );
 
-    format_args!(concat!("{}"), "{}");
-    format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+    const CONSTANT: () = ():
+    let mut m = ();
+    format_args!(concat!("{}"), "{}");
+    format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+    reuse_twice!("{backslash}");
 }
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 542d8992531f5..afb6c555b4afd 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -47,9 +47,12 @@ struct Foo; fn macros() { check_highlighting( r#" -//- proc_macros: mirror +//- proc_macros: mirror, identity, derive_identity +//- minicore: fmt, include, concat //- /lib.rs crate:lib -proc_macros::mirror! { +use proc_macros::{mirror, identity, DeriveIdentity}; + +mirror! { { ,i32 :x pub ,i32 :y pub @@ -96,12 +99,6 @@ macro without_args { } } -#[rustc_builtin_macro] -macro_rules! concat {} -#[rustc_builtin_macro] -macro_rules! include {} -#[rustc_builtin_macro] -macro_rules! format_args {} include!(concat!("foo/", "foo.rs")); @@ -401,53 +398,44 @@ fn test_string_highlighting() { // thus, we have to copy the macro definition from `std` check_highlighting( r#" -//- minicore: fmt +//- minicore: fmt, assert, asm, concat, panic macro_rules! println { ($($arg:tt)*) => ({ $crate::io::_print(format_args_nl!($($arg)*)); }) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args_nl {} mod panic { pub macro panic_2015 { () => ( - $crate::panicking::panic("explicit panic") + panic("explicit panic") ), ($msg:literal $(,)?) => ( - $crate::panicking::panic($msg) + panic($msg) ), // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint. ($msg:expr $(,)?) => ( - $crate::panicking::panic_str($msg) + panic_str($msg) ), // Special-case the single-argument case for const_panic. ("{}", $arg:expr $(,)?) => ( - $crate::panicking::panic_display(&$arg) + panic_display(&$arg) ), ($fmt:expr, $($arg:tt)+) => ( - $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+)) + panic_fmt(const_format_args!($fmt, $($arg)+)) ), } } -#[rustc_builtin_macro(std_panic)] -#[macro_export] -macro_rules! panic {} -#[rustc_builtin_macro] -macro_rules! assert {} -#[rustc_builtin_macro] -macro_rules! asm {} -#[rustc_builtin_macro] -macro_rules! concat {} - macro_rules! toho { () => ($crate::panic!("not yet implemented")); ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); } +macro_rules! reuse_twice { + ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; +} + fn main() { let a = '\n'; let a = '\t'; @@ -538,8 +526,11 @@ fn main() { in(reg) i, ); + const CONSTANT: () = (): + let mut m = (); format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + reuse_twice!("{backslash}"); }"#, expect_file!["./test_data/highlight_strings.html"], false, diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs index d2bbbf6d26ab4..9abe54cd39036 100644 --- a/crates/ide/src/view_hir.rs +++ b/crates/ide/src/view_hir.rs @@ -1,7 +1,7 @@ use hir::{DefWithBody, Semantics}; use ide_db::base_db::FilePosition; use ide_db::RootDatabase; -use syntax::{algo::find_node_at_offset, ast, AstNode}; +use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Hir // @@ -19,7 +19,9 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); - let item = find_node_at_offset::(source_file.syntax(), position.offset)?; + let item = ancestors_at_offset(source_file.syntax(), position.offset) + .filter(|it| !ast::MacroCall::can_cast(it.kind())) + .find_map(ast::Item::cast)?; let def: DefWithBody = match item { ast::Item::Fn(it) => sema.to_def(&it)?.into(), ast::Item::Const(it) => sema.to_def(&it)?.into(), diff --git a/crates/ide/src/view_mir.rs b/crates/ide/src/view_mir.rs index a36aba58bc0ed..08d810c134628 100644 --- a/crates/ide/src/view_mir.rs +++ b/crates/ide/src/view_mir.rs @@ -1,7 +1,7 @@ use hir::{DefWithBody, Semantics}; use ide_db::base_db::FilePosition; use ide_db::RootDatabase; -use syntax::{algo::find_node_at_offset, ast, AstNode}; +use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Mir // @@ -18,7 +18,9 @@ fn body_mir(db: &RootDatabase, position: FilePosition) -> Option { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); - let item = find_node_at_offset::(source_file.syntax(), position.offset)?; + let item = ancestors_at_offset(source_file.syntax(), position.offset) + .filter(|it| !ast::MacroCall::can_cast(it.kind())) + .find_map(ast::Item::cast)?; let def: DefWithBody = match item { ast::Item::Fn(it) => sema.to_def(&it)?.into(), ast::Item::Const(it) => sema.to_def(&it)?.into(), diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index 89b302c796b52..d9184b0fb6fe5 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] # We need to freeze the version of the crate, as the raw-api feature is considered unstable -dashmap = { version = "=5.4.0", features = ["raw-api"] } +dashmap.workspace = true hashbrown.workspace = true rustc-hash = "1.1.0" triomphe.workspace = true diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index 7fb4b513a7157..7f4b00df0bac8 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -1,6 +1,6 @@ //! limit defines a struct to enforce limits. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(feature = "tracking")] use std::sync::atomic::AtomicUsize; diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 68b592ffaa4de..db9654220dd74 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -4,19 +4,19 @@ // to run rust-analyzer as a library. use std::{collections::hash_map::Entry, mem, path::Path, sync}; -use ::tt::token_id as tt; use crossbeam_channel::{unbounded, Receiver}; use ide::{AnalysisHost, Change, SourceRoot}; use ide_db::{ base_db::{ - CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, - ProcMacroLoadResult, ProcMacros, + span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, + ProcMacroKind, ProcMacroLoadResult, ProcMacros, }, FxHashMap, }; use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; +use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -374,12 +374,15 @@ struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { fn expand( &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result { + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, + ) -> Result, ProcMacroExpansionError> { let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - match self.0.expand(subtree, attrs, env) { + match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { Ok(Ok(subtree)) => Ok(subtree), Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), @@ -394,10 +397,13 @@ struct IdentityExpander; impl ProcMacroExpander for IdentityExpander { fn expand( &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, + subtree: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { + _: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -409,11 +415,14 @@ struct EmptyExpander; impl ProcMacroExpander for EmptyExpander { fn expand( &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, + _: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { - Ok(tt::Subtree::empty()) + call_site: SpanData, + _: SpanData, + _: SpanData, + ) -> Result, ProcMacroExpansionError> { + Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) } } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 9d43e130457dd..f503aecce2c2f 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, tt, DeclarativeMacro, + syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY, }; #[test] @@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(tt); + let res = rules[&id].expand(&tt, |_| ()); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap { +fn macro_rules_fixtures() -> FxHashMap> { macro_rules_fixtures_tt() .into_iter() .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap { +fn macro_rules_fixtures_tt() -> FxHashMap> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap { .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax()); + let def_tt = + syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap); (id, def_tt) }) .collect() } /// Generate random invocation fixtures from rules -fn invocation_fixtures(rules: &FxHashMap) -> Vec<(String, tt::Subtree)> { +fn invocation_fixtures( + rules: &FxHashMap>, +) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, + open: DUMMY, + close: DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(subtree.clone()).err.is_none() { + if it.expand(&subtree, |_| ()).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } return res; - fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + fn collect_from_op( + op: &Op, + parent: &mut tt::Subtree, + seed: &mut usize, + ) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { - span: tt::TokenId::unspecified(), - text: SmolStr::new(ident), - }) - .into() + fn make_ident(ident: &str) -> tt::TokenTree { + tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into() } - fn make_punct(char: char) -> tt::TokenTree { - tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::unspecified(), - char, - spacing: tt::Spacing::Alone, - }) - .into() + fn make_punct(char: char) -> tt::TokenTree { + tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into() } - fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { - span: tt::TokenId::unspecified(), - text: SmolStr::new(lit), - }) - .into() + fn make_literal(lit: &str) -> tt::TokenTree { + tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into() } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>, - ) -> tt::TokenTree { + token_trees: Option>>, + ) -> tt::TokenTree { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), - kind, - }, + delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 908048c990424..0e755f69bf7d3 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -7,15 +7,17 @@ mod transcriber; use rustc_hash::FxHashMap; use syntax::SmolStr; +use tt::Span; -use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult}; +use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; -pub(crate) fn expand_rules( - rules: &[crate::Rule], - input: &tt::Subtree, +pub(crate) fn expand_rules( + rules: &[crate::Rule], + input: &tt::Subtree, + marker: impl Fn(&mut S) + Copy, is_2021: bool, -) -> ExpandResult { - let mut match_: Option<(matcher::Match, &crate::Rule)> = None; +) -> ExpandResult> { + let mut match_: Option<(matcher::Match, &crate::Rule)> = None; for rule in rules { let new_match = matcher::match_(&rule.lhs, input, is_2021); @@ -24,7 +26,7 @@ pub(crate) fn expand_rules( // Unconditionally returning the transcription here makes the // `test_repeat_bad_var` test fail. let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &new_match.bindings); + transcriber::transcribe(&rule.rhs, &new_match.bindings, marker); if transcribe_err.is_none() { return ExpandResult::ok(value); } @@ -43,11 +45,11 @@ pub(crate) fn expand_rules( if let Some((match_, rule)) = match_ { // if we got here, there was no match without errors let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &match_.bindings); + transcriber::transcribe(&rule.rhs, &match_.bindings, marker); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, + tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }, ExpandError::NoMatchingRule, ) } @@ -98,23 +100,29 @@ pub(crate) fn expand_rules( /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to /// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// many is not a plain `usize`, but a `&[usize]`. -#[derive(Debug, Default, Clone, PartialEq, Eq)] -struct Bindings { - inner: FxHashMap, +#[derive(Debug, Clone, PartialEq, Eq)] +struct Bindings { + inner: FxHashMap>, +} + +impl Default for Bindings { + fn default() -> Self { + Self { inner: Default::default() } + } } #[derive(Debug, Clone, PartialEq, Eq)] -enum Binding { - Fragment(Fragment), - Nested(Vec), +enum Binding { + Fragment(Fragment), + Nested(Vec>), Empty, Missing(MetaVarKind), } #[derive(Debug, Clone, PartialEq, Eq)] -enum Fragment { +enum Fragment { /// token fragments are just copy-pasted into the output - Tokens(tt::TokenTree), + Tokens(tt::TokenTree), /// Expr ast fragments are surrounded with `()` on insertion to preserve /// precedence. Note that this impl is different from the one currently in /// `rustc` -- `rustc` doesn't translate fragments into token trees at all. @@ -122,7 +130,7 @@ enum Fragment { /// At one point in time, we tried to use "fake" delimiters here à la /// proc-macro delimiter=none. As we later discovered, "none" delimiters are /// tricky to handle in the parser, and rustc doesn't handle those either. - Expr(tt::TokenTree), + Expr(tt::Subtree), /// There are roughly two types of paths: paths in expression context, where a /// separator `::` between an identifier and its following generic argument list /// is mandatory, and paths in type context, where `::` can be omitted. @@ -132,5 +140,5 @@ enum Fragment { /// and is trasncribed as an expression-context path, verbatim transcription /// would cause a syntax error. We need to fix it up just before transcribing; /// see `transcriber::fix_up_and_push_path_tt()`. - Path(tt::TokenTree), + Path(tt::Subtree), } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 1471af98b75b8..012b02a3f87ab 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -63,21 +63,21 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; +use tt::Span; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt, tt_iter::TtIter, ExpandError, MetaTemplate, ValueResult, }; -impl Bindings { +impl Bindings { fn push_optional(&mut self, name: &SmolStr) { // FIXME: Do we have a better way to represent an empty token ? // Insert an empty subtree for empty token let tt = - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into(); + tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into(); self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); } @@ -85,14 +85,14 @@ impl Bindings { self.inner.insert(name.clone(), Binding::Empty); } - fn bindings(&self) -> impl Iterator { + fn bindings(&self) -> impl Iterator> { self.inner.values() } } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub(super) struct Match { - pub(super) bindings: Bindings, +#[derive(Clone, Debug, PartialEq, Eq)] +pub(super) struct Match { + pub(super) bindings: Bindings, /// We currently just keep the first error and count the rest to compare matches. pub(super) err: Option, pub(super) err_count: usize, @@ -102,7 +102,19 @@ pub(super) struct Match { pub(super) bound_count: usize, } -impl Match { +impl Default for Match { + fn default() -> Self { + Self { + bindings: Default::default(), + err: Default::default(), + err_count: Default::default(), + unmatched_tts: Default::default(), + bound_count: Default::default(), + } + } +} + +impl Match { fn add_err(&mut self, err: ExpandError) { let prev_err = self.err.take(); self.err = prev_err.or(Some(err)); @@ -111,12 +123,16 @@ impl Match { } /// Matching errors are added to the `Match`. -pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match { +pub(super) fn match_( + pattern: &MetaTemplate, + input: &tt::Subtree, + is_2021: bool, +) -> Match { let mut res = match_loop(pattern, input, is_2021); res.bound_count = count(res.bindings.bindings()); return res; - fn count<'a>(bindings: impl Iterator) -> usize { + fn count<'a, S: 'a>(bindings: impl Iterator>) -> usize { bindings .map(|it| match it { Binding::Fragment(_) => 1, @@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) } #[derive(Debug, Clone)] -enum BindingKind { +enum BindingKind { Empty(SmolStr), Optional(SmolStr), - Fragment(SmolStr, Fragment), + Fragment(SmolStr, Fragment), Missing(SmolStr, MetaVarKind), Nested(usize, usize), } @@ -146,13 +162,18 @@ enum LinkNode { Parent { idx: usize, len: usize }, } -#[derive(Default)] -struct BindingsBuilder { - nodes: Vec>>>, +struct BindingsBuilder { + nodes: Vec>>>>, nested: Vec>>, } -impl BindingsBuilder { +impl Default for BindingsBuilder { + fn default() -> Self { + Self { nodes: Default::default(), nested: Default::default() } + } +} + +impl BindingsBuilder { fn alloc(&mut self) -> BindingsIdx { let idx = self.nodes.len(); self.nodes.push(Vec::new()); @@ -189,7 +210,7 @@ impl BindingsBuilder { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); } - fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { + fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { self.nodes[idx.0] .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); } @@ -210,11 +231,11 @@ impl BindingsBuilder { idx.0 = new_idx; } - fn build(self, idx: &BindingsIdx) -> Bindings { + fn build(self, idx: &BindingsIdx) -> Bindings { self.build_inner(&self.nodes[idx.0]) } - fn build_inner(&self, link_nodes: &[LinkNode>]) -> Bindings { + fn build_inner(&self, link_nodes: &[LinkNode>>]) -> Bindings { let mut bindings = Bindings::default(); let mut nodes = Vec::new(); self.collect_nodes(link_nodes, &mut nodes); @@ -264,7 +285,7 @@ impl BindingsBuilder { &'a self, id: usize, len: usize, - nested_refs: &mut Vec<&'a [LinkNode>]>, + nested_refs: &mut Vec<&'a [LinkNode>>]>, ) { self.nested[id].iter().take(len).for_each(|it| match it { LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]), @@ -272,7 +293,7 @@ impl BindingsBuilder { }); } - fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec) { + fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec>) { let last = &self.nodes[idx]; let mut nested_refs: Vec<&[_]> = Vec::new(); self.nested[nested_idx].iter().for_each(|it| match *it { @@ -283,7 +304,7 @@ impl BindingsBuilder { nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter))); } - fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { + fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { self.nodes[id].iter().take(len).for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), @@ -292,8 +313,8 @@ impl BindingsBuilder { fn collect_nodes<'a>( &'a self, - link_nodes: &'a [LinkNode>], - nodes: &mut Vec<&'a BindingKind>, + link_nodes: &'a [LinkNode>>], + nodes: &mut Vec<&'a BindingKind>, ) { link_nodes.iter().for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), @@ -303,22 +324,22 @@ impl BindingsBuilder { } #[derive(Debug, Clone)] -struct MatchState<'t> { +struct MatchState<'t, S> { /// The position of the "dot" in this matcher - dot: OpDelimitedIter<'t>, + dot: OpDelimitedIter<'t, S>, /// Token subtree stack /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. ) /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does /// that where the bottom of the stack is the outermost matcher. - stack: SmallVec<[OpDelimitedIter<'t>; 4]>, + stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the repetition. - up: Option>>, + up: Option>>, /// The separator if we are in a repetition. - sep: Option, + sep: Option>, /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, @@ -330,7 +351,7 @@ struct MatchState<'t> { bindings: BindingsIdx, /// Cached result of meta variable parsing - meta_result: Option<(TtIter<'t>, ExpandResult>)>, + meta_result: Option<(TtIter<'t, S>, ExpandResult>>)>, /// Is error occurred in this state, will `poised` to "parent" is_error: bool, @@ -355,16 +376,16 @@ struct MatchState<'t> { /// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `error_items`: the set of items in errors, used for error-resilient parsing #[inline] -fn match_loop_inner<'t>( - src: TtIter<'t>, - stack: &[TtIter<'t>], - res: &mut Match, - bindings_builder: &mut BindingsBuilder, - cur_items: &mut SmallVec<[MatchState<'t>; 1]>, - bb_items: &mut SmallVec<[MatchState<'t>; 1]>, - next_items: &mut Vec>, - eof_items: &mut SmallVec<[MatchState<'t>; 1]>, - error_items: &mut SmallVec<[MatchState<'t>; 1]>, +fn match_loop_inner<'t, S: Span>( + src: TtIter<'t, S>, + stack: &[TtIter<'t, S>], + res: &mut Match, + bindings_builder: &mut BindingsBuilder, + cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + next_items: &mut Vec>, + eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, is_2021: bool, ) { macro_rules! try_push { @@ -468,7 +489,7 @@ fn match_loop_inner<'t>( if let Ok(subtree) = src.clone().expect_subtree() { if subtree.delimiter.kind == delimiter.kind { item.stack.push(item.dot); - item.dot = tokens.iter_delimited(Some(delimiter)); + item.dot = tokens.iter_delimited(Some(*delimiter)); cur_items.push(item); } } @@ -587,9 +608,9 @@ fn match_loop_inner<'t>( } } -fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { +fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { let mut src = TtIter::new(src); - let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new(); + let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); let mut res = Match::default(); let mut error_recover_item = None; @@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match } } -fn match_meta_var( +fn match_meta_var( kind: MetaVarKind, - input: &mut TtIter<'_>, + input: &mut TtIter<'_, S>, is_2021: bool, -) -> ExpandResult> { +) -> ExpandResult>> { let fragment = match kind { MetaVarKind::Path => { return input .expect_fragment(parser::PrefixEntryPoint::Path) - .map(|it| it.map(Fragment::Path)); + .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path)); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, @@ -771,9 +792,21 @@ fn match_meta_var( } _ => {} }; - return input - .expect_fragment(parser::PrefixEntryPoint::Expr) - .map(|tt| tt.map(Fragment::Expr)); + return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { + tt.map(|tt| match tt { + tt::TokenTree::Leaf(leaf) => tt::Subtree { + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: vec![leaf.into()], + }, + tt::TokenTree::Subtree(mut s) => { + if s.delimiter.kind == tt::DelimiterKind::Invisible { + s.delimiter.kind = tt::DelimiterKind::Parenthesis; + } + s + } + }) + .map(Fragment::Expr) + }); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { let tt_result = match kind { @@ -796,7 +829,7 @@ fn match_meta_var( match neg { None => lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![neg, lit.into()], }), } @@ -811,7 +844,7 @@ fn match_meta_var( input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) } -fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { +fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), @@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) } } } -impl MetaTemplate { - fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> { +impl MetaTemplate { + fn iter_delimited(&self, delimited: Option>) -> OpDelimitedIter<'_, S> { OpDelimitedIter { inner: &self.0, idx: 0, - delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED), + delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE), } } } #[derive(Debug, Clone, Copy)] -enum OpDelimited<'a> { - Op(&'a Op), +enum OpDelimited<'a, S> { + Op(&'a Op), Open, Close, } #[derive(Debug, Clone, Copy)] -struct OpDelimitedIter<'a> { - inner: &'a [Op], - delimited: &'a tt::Delimiter, +struct OpDelimitedIter<'a, S> { + inner: &'a [Op], + delimited: tt::Delimiter, idx: usize, } -impl<'a> OpDelimitedIter<'a> { +impl<'a, S: Span> OpDelimitedIter<'a, S> { fn is_eof(&self) -> bool { let len = self.inner.len() + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; self.idx >= len } - fn peek(&self) -> Option> { + fn peek(&self) -> Option> { match self.delimited.kind { tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op), _ => match self.idx { @@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> { } } -impl<'a> Iterator for OpDelimitedIter<'a> { - type Item = OpDelimited<'a>; +impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> { + type Item = OpDelimited<'a, S>; fn next(&mut self) -> Option { let res = self.peek(); @@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } } -impl TtIter<'_> { - fn expect_separator(&mut self, separator: &Separator) -> bool { +impl TtIter<'_, S> { + fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { @@ -919,7 +952,7 @@ impl TtIter<'_> { ok } - fn expect_tt(&mut self) -> Result { + fn expect_tt(&mut self) -> Result, ()> { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { if punct.char == '\'' { self.expect_lifetime() @@ -927,7 +960,7 @@ impl TtIter<'_> { let puncts = self.expect_glued_punct()?; let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); Ok(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees, })) } @@ -936,7 +969,7 @@ impl TtIter<'_> { } } - fn expect_lifetime(&mut self) -> Result { + fn expect_lifetime(&mut self) -> Result, ()> { let punct = self.expect_single_punct()?; if punct.char != '\'' { return Err(()); @@ -944,7 +977,7 @@ impl TtIter<'_> { let ident = self.expect_ident_or_underscore()?; Ok(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), @@ -953,7 +986,7 @@ impl TtIter<'_> { .into()) } - fn eat_char(&mut self, c: char) -> Option { + fn eat_char(&mut self, c: char) -> Option> { let mut fork = self.clone(); match fork.expect_char(c) { Ok(_) => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index cdac2f1e3bb8c..7a3e8653c28ff 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -2,31 +2,29 @@ //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` use syntax::SmolStr; +use tt::{Delimiter, Span}; use crate::{ expander::{Binding, Bindings, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt::{self, Delimiter}, CountError, ExpandError, ExpandResult, MetaTemplate, }; -impl Bindings { - fn contains(&self, name: &str) -> bool { - self.inner.contains_key(name) - } - - fn get(&self, name: &str) -> Result<&Binding, ExpandError> { +impl Bindings { + fn get(&self, name: &str) -> Result<&Binding, ExpandError> { match self.inner.get(name) { Some(binding) => Ok(binding), - None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))), + None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))), } } fn get_fragment( &self, name: &str, + mut span: S, nesting: &mut [NestingState], - ) -> Result { + marker: impl Fn(&mut S), + ) -> Result, ExpandError> { macro_rules! binding_err { ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; } @@ -48,54 +46,75 @@ impl Bindings { }; } match b { - Binding::Fragment(it) => Ok(it.clone()), - // emit some reasonable default expansion for missing bindings, - // this gives better recovery than emitting the `$fragment-name` verbatim - Binding::Missing(it) => Ok(match it { - MetaVarKind::Stmt => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::unspecified(), - char: ';', - spacing: tt::Spacing::Alone, - }))) - } - MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => { + let tt::Subtree { delimiter, token_trees } = sub; + marker(&mut span); + let subtree = tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), - kind: tt::DelimiterKind::Brace, + // FIXME split span + open: span, + close: span, + kind: delimiter.kind, }, - token_trees: vec![], - })), - // FIXME: Meta and Item should get proper defaults - MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { - Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: token_trees.clone(), + }; + Ok(match f { + Fragment::Tokens(_) => unreachable!(), + Fragment::Expr(_) => Fragment::Expr, + Fragment::Path(_) => Fragment::Path, + }(subtree)) + } + Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()), + // emit some reasonable default expansion for missing bindings, + // this gives better recovery than emitting the `$fragment-name` verbatim + Binding::Missing(it) => Ok({ + marker(&mut span); + match it { + MetaVarKind::Stmt => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + span, + char: ';', + spacing: tt::Spacing::Alone, + }))) + } + MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: span, + close: span, + kind: tt::DelimiterKind::Brace, + }, token_trees: vec![], - })) - } - MetaVarKind::Path - | MetaVarKind::Ty - | MetaVarKind::Pat - | MetaVarKind::PatParam - | MetaVarKind::Expr - | MetaVarKind::Ident => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("missing"), - span: tt::TokenId::unspecified(), - }))) - } - MetaVarKind::Lifetime => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("'missing"), - span: tt::TokenId::unspecified(), - }))) - } - MetaVarKind::Literal => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("\"missing\""), - span: tt::TokenId::unspecified(), - }))) + })), + // FIXME: Meta and Item should get proper defaults + MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::DUMMY_INVISIBLE, + token_trees: vec![], + })) + } + MetaVarKind::Path + | MetaVarKind::Ty + | MetaVarKind::Pat + | MetaVarKind::PatParam + | MetaVarKind::Expr + | MetaVarKind::Ident => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("missing"), + span, + }))) + } + MetaVarKind::Lifetime => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("'missing"), + span, + }))) + } + MetaVarKind::Literal => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("\"missing\""), + span, + }))) + } } }), Binding::Nested(_) => { @@ -108,13 +127,14 @@ impl Bindings { } } -pub(super) fn transcribe( - template: &MetaTemplate, - bindings: &Bindings, -) -> ExpandResult { +pub(super) fn transcribe( + template: &MetaTemplate, + bindings: &Bindings, + marker: impl Fn(&mut S) + Copy, +) -> ExpandResult> { let mut ctx = ExpandCtx { bindings, nesting: Vec::new() }; - let mut arena: Vec = Vec::new(); - expand_subtree(&mut ctx, template, None, &mut arena) + let mut arena: Vec> = Vec::new(); + expand_subtree(&mut ctx, template, None, &mut arena, marker) } #[derive(Debug)] @@ -129,50 +149,75 @@ struct NestingState { } #[derive(Debug)] -struct ExpandCtx<'a> { - bindings: &'a Bindings, +struct ExpandCtx<'a, S> { + bindings: &'a Bindings, nesting: Vec, } -fn expand_subtree( - ctx: &mut ExpandCtx<'_>, - template: &MetaTemplate, - delimiter: Option, - arena: &mut Vec, -) -> ExpandResult { +fn expand_subtree( + ctx: &mut ExpandCtx<'_, S>, + template: &MetaTemplate, + delimiter: Option>, + arena: &mut Vec>, + marker: impl Fn(&mut S) + Copy, +) -> ExpandResult> { // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation let start_elements = arena.len(); let mut err = None; 'ops: for op in template.iter() { match op { - Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()), - Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Literal(it) => arena.push( + tt::Leaf::from({ + let mut it = it.clone(); + marker(&mut it.span); + it + }) + .into(), + ), + Op::Ident(it) => arena.push( + tt::Leaf::from({ + let mut it = it.clone(); + marker(&mut it.span); + it + }) + .into(), + ), Op::Punct(puncts) => { for punct in puncts { - arena.push(tt::Leaf::from(*punct).into()); + arena.push( + tt::Leaf::from({ + let mut it = punct.clone(); + marker(&mut it.span); + it + }) + .into(), + ); } } Op::Subtree { tokens, delimiter } => { + let mut delimiter = *delimiter; + marker(&mut delimiter.open); + marker(&mut delimiter.close); let ExpandResult { value: tt, err: e } = - expand_subtree(ctx, tokens, Some(*delimiter), arena); + expand_subtree(ctx, tokens, Some(delimiter), arena, marker); err = err.or(e); arena.push(tt.into()); } Op::Var { name, id, .. } => { - let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id); + let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker); err = err.or(e); push_fragment(arena, fragment); } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = - expand_repeat(ctx, subtree, *kind, separator, arena); + expand_repeat(ctx, subtree, *kind, separator, arena, marker); err = err.or(e); push_fragment(arena, fragment) } Op::Ignore { name, id } => { // Expand the variable, but ignore the result. This registers the repetition count. // FIXME: Any emitted errors are dropped. - expand_var(ctx, name, *id); + expand_var(ctx, name, *id, marker); } Op::Index { depth } => { let index = @@ -180,7 +225,8 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), - span: tt::TokenId::unspecified(), + // FIXME + span: S::DUMMY, }) .into(), ); @@ -239,7 +285,8 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), - span: tt::TokenId::unspecified(), + // FIXME + span: S::DUMMY, }) .into(), ); @@ -250,60 +297,70 @@ fn expand_subtree( let tts = arena.drain(start_elements..).collect(); ExpandResult { value: tt::Subtree { - delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified), + delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible), token_trees: tts, }, err, } } -fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult { +fn expand_var( + ctx: &mut ExpandCtx<'_, S>, + v: &SmolStr, + id: S, + marker: impl Fn(&mut S), +) -> ExpandResult> { // We already handle $crate case in mbe parser debug_assert!(v != "crate"); - if !ctx.bindings.contains(v) { - // Note that it is possible to have a `$var` inside a macro which is not bound. - // For example: - // ``` - // macro_rules! foo { - // ($a:ident, $b:ident, $c:tt) => { - // macro_rules! bar { - // ($bi:ident) => { - // fn $bi() -> u8 {$c} - // } - // } - // } - // ``` - // We just treat it a normal tokens - let tt = tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, - token_trees: vec![ - tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) - .into(), - tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), - ], + match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) { + Ok(it) => ExpandResult::ok(it), + Err(ExpandError::UnresolvedBinding(_)) => { + // Note that it is possible to have a `$var` inside a macro which is not bound. + // For example: + // ``` + // macro_rules! foo { + // ($a:ident, $b:ident, $c:tt) => { + // macro_rules! bar { + // ($bi:ident) => { + // fn $bi() -> u8 {$c} + // } + // } + // } + // ``` + // We just treat it a normal tokens + let tt = tt::Subtree { + delimiter: tt::Delimiter::DUMMY_INVISIBLE, + token_trees: vec![ + tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) + .into(), + tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), + ], + } + .into(); + ExpandResult::ok(Fragment::Tokens(tt)) } - .into(); - ExpandResult::ok(Fragment::Tokens(tt)) - } else { - ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else( - |e| ExpandResult { - value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())), - err: Some(e), - }, - ExpandResult::ok, - ) + Err(e) => ExpandResult { + value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { + // FIXME + open: S::DUMMY, + // FIXME + close: S::DUMMY, + }))), + err: Some(e), + }, } } -fn expand_repeat( - ctx: &mut ExpandCtx<'_>, - template: &MetaTemplate, +fn expand_repeat( + ctx: &mut ExpandCtx<'_, S>, + template: &MetaTemplate, kind: RepeatKind, - separator: &Option, - arena: &mut Vec, -) -> ExpandResult { - let mut buf: Vec = Vec::new(); + separator: &Option>, + arena: &mut Vec>, + marker: impl Fn(&mut S) + Copy, +) -> ExpandResult> { + let mut buf: Vec> = Vec::new(); ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); // Dirty hack to make macro-expansion terminate. // This should be replaced by a proper macro-by-example implementation @@ -313,7 +370,8 @@ fn expand_repeat( let mut err = None; loop { - let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena); + let ExpandResult { value: mut t, err: e } = + expand_subtree(ctx, template, None, arena, marker); let nesting_state = ctx.nesting.last_mut().unwrap(); if nesting_state.at_end || !nesting_state.hit { break; @@ -330,8 +388,11 @@ fn expand_repeat( ); return ExpandResult { value: Fragment::Tokens( - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] } - .into(), + tt::Subtree { + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: vec![], + } + .into(), ), err: Some(ExpandError::LimitExceeded), }; @@ -342,7 +403,7 @@ fn expand_repeat( continue; } - t.delimiter = tt::Delimiter::unspecified(); + t.delimiter = tt::Delimiter::DUMMY_INVISIBLE; push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -376,7 +437,7 @@ fn expand_repeat( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into(); + let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { @@ -387,25 +448,18 @@ fn expand_repeat( ExpandResult { value: Fragment::Tokens(tt), err } } -fn push_fragment(buf: &mut Vec, fragment: Fragment) { +fn push_fragment(buf: &mut Vec>, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), - Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => { - if tt.delimiter.kind == tt::DelimiterKind::Invisible { - tt.delimiter = tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, - kind: tt::DelimiterKind::Parenthesis, - }; - } - buf.push(tt.into()) + Fragment::Expr(sub) => { + push_subtree(buf, sub); } - Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt), - Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt), + Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), + Fragment::Tokens(tt) => buf.push(tt), } } -fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { +fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { match tt.delimiter.kind { tt::DelimiterKind::Invisible => buf.extend(tt.token_trees), _ => buf.push(tt.into()), @@ -415,7 +469,7 @@ fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { /// Inserts the path separator `::` between an identifier and its following generic /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// we need this fixup. -fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { +fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt::Subtree) { stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); let mut prev_was_ident = false; // Note that we only need to fix up the top-level `TokenTree`s because the @@ -432,7 +486,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Joint, - span: tt::Span::unspecified(), + // FIXME + span: S::DUMMY, }) .into(), ); @@ -440,7 +495,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Alone, - span: tt::Span::unspecified(), + // FIXME + span: S::DUMMY, }) .into(), ); @@ -453,9 +509,9 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// defined by the metavar expression. -fn count( - ctx: &ExpandCtx<'_>, - binding: &Binding, +fn count( + ctx: &ExpandCtx<'_, S>, + binding: &Binding, our_depth: usize, count_depth: Option, ) -> Result { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index a439c9c50d6c6..9331798589fcc 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -6,7 +6,7 @@ //! The tests for this functionality live in another crate: //! `hir_def::macro_expansion_tests::mbe`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod parser; mod expander; @@ -18,8 +18,8 @@ mod to_parser_input; mod benchmark; mod token_map; -use ::tt::token_id as tt; use stdx::impl_from; +use tt::Span; use std::fmt; @@ -28,19 +28,21 @@ use crate::{ tt_iter::TtIter, }; -pub use self::tt::{Delimiter, DelimiterKind, Punct}; +// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; +pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; pub use crate::{ syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map, - syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree, - syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken, - SyntheticTokenId, + parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, + syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, + SpanMapper, }, - token_map::TokenMap, + token_map::SpanMap, }; +pub use crate::syntax_bridge::dummy_test_span_utils::*; + #[derive(Debug, PartialEq, Eq, Clone)] pub enum ParseError { UnexpectedToken(Box), @@ -73,6 +75,7 @@ impl fmt::Display for ParseError { #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum ExpandError { BindingError(Box>), + UnresolvedBinding(Box>), LeftoverTokens, ConversionError, LimitExceeded, @@ -95,6 +98,10 @@ impl fmt::Display for ExpandError { ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"), ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), ExpandError::BindingError(e) => f.write_str(e), + ExpandError::UnresolvedBinding(binding) => { + f.write_str("could not find binding ")?; + f.write_str(binding) + } ExpandError::ConversionError => f.write_str("could not convert tokens"), ExpandError::LimitExceeded => f.write_str("Expand exceed limit"), ExpandError::LeftoverTokens => f.write_str("leftover tokens"), @@ -124,10 +131,8 @@ impl fmt::Display for CountError { /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] -pub struct DeclarativeMacro { - rules: Box<[Rule]>, - /// Highest id of the token we have in TokenMap - shift: Shift, +pub struct DeclarativeMacro { + rules: Box<[Rule]>, // This is used for correctly determining the behavior of the pat fragment // FIXME: This should be tracked by hygiene of the fragment identifier! is_2021: bool, @@ -135,96 +140,18 @@ pub struct DeclarativeMacro { } #[derive(Clone, Debug, PartialEq, Eq)] -struct Rule { - lhs: MetaTemplate, - rhs: MetaTemplate, +struct Rule { + lhs: MetaTemplate, + rhs: MetaTemplate, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Shift(u32); - -impl Shift { - pub fn new(tt: &tt::Subtree) -> Shift { - // Note that TokenId is started from zero, - // We have to add 1 to prevent duplication. - let value = max_id(tt).map_or(0, |it| it + 1); - return Shift(value); - - // Find the max token id inside a subtree - fn max_id(subtree: &tt::Subtree) -> Option { - let filter = - |tt: &_| match tt { - tt::TokenTree::Subtree(subtree) => { - let tree_id = max_id(subtree); - if subtree.delimiter.open != tt::TokenId::unspecified() { - Some(tree_id.map_or(subtree.delimiter.open.0, |t| { - t.max(subtree.delimiter.open.0) - })) - } else { - tree_id - } - } - tt::TokenTree::Leaf(leaf) => { - let &(tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf; - - (span != tt::TokenId::unspecified()).then_some(span.0) - } - }; - subtree.token_trees.iter().filter_map(filter).max() - } - } - - /// Shift given TokenTree token id - pub fn shift_all(self, tt: &mut tt::Subtree) { - for t in &mut tt.token_trees { - match t { - tt::TokenTree::Leaf( - tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. }), - ) => *span = self.shift(*span), - tt::TokenTree::Subtree(tt) => { - tt.delimiter.open = self.shift(tt.delimiter.open); - tt.delimiter.close = self.shift(tt.delimiter.close); - self.shift_all(tt) - } - } - } - } - - pub fn shift(self, id: tt::TokenId) -> tt::TokenId { - if id == tt::TokenId::unspecified() { - id - } else { - tt::TokenId(id.0 + self.0) - } - } - - pub fn unshift(self, id: tt::TokenId) -> Option { - id.0.checked_sub(self.0).map(tt::TokenId) - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Origin { - Def, - Call, -} - -impl DeclarativeMacro { - pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { - DeclarativeMacro { - rules: Box::default(), - shift: Shift(0), - is_2021, - err: Some(Box::new(err)), - } +impl DeclarativeMacro { + pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { + DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) } } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -256,11 +183,11 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -307,36 +234,24 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } - } - - pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult { - self.shift.shift_all(&mut tt); - expander::expand_rules(&self.rules, &tt, self.is_2021) + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } pub fn err(&self) -> Option<&ParseError> { self.err.as_deref() } - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - self.shift.shift(id) - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { - match self.shift.unshift(id) { - Some(id) => (id, Origin::Call), - None => (id, Origin::Def), - } - } - - pub fn shift(&self) -> Shift { - self.shift + pub fn expand( + &self, + tt: &tt::Subtree, + marker: impl Fn(&mut S) + Copy, + ) -> ExpandResult> { + expander::expand_rules(&self.rules, &tt, marker, self.is_2021) } } -impl Rule { - fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result { +impl Rule { + fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; if expect_arrow { src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; @@ -351,7 +266,7 @@ impl Rule { } } -fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { +fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { for op in pattern.iter() { match op { Op::Subtree { tokens, .. } => validate(tokens)?, diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 7a143e7466a93..00ba35377a427 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -3,8 +3,9 @@ use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; +use tt::Span; -use crate::{tt, tt_iter::TtIter, ParseError}; +use crate::{tt_iter::TtIter, ParseError}; /// Consider /// @@ -20,22 +21,22 @@ use crate::{tt, tt_iter::TtIter, ParseError}; /// Stuff to the right is a [`MetaTemplate`] template which is used to produce /// output. #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); +pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); -impl MetaTemplate { - pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { +impl MetaTemplate { + pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { MetaTemplate::parse(pattern, Mode::Pattern) } - pub(crate) fn parse_template(template: &tt::Subtree) -> Result { + pub(crate) fn parse_template(template: &tt::Subtree) -> Result { MetaTemplate::parse(template, Mode::Template) } - pub(crate) fn iter(&self) -> impl Iterator { + pub(crate) fn iter(&self) -> impl Iterator> { self.0.iter() } - fn parse(tt: &tt::Subtree, mode: Mode) -> Result { + fn parse(tt: &tt::Subtree, mode: Mode) -> Result { let mut src = TtIter::new(tt); let mut res = Vec::new(); @@ -49,16 +50,16 @@ impl MetaTemplate { } #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum Op { - Var { name: SmolStr, kind: Option, id: tt::TokenId }, - Ignore { name: SmolStr, id: tt::TokenId }, +pub(crate) enum Op { + Var { name: SmolStr, kind: Option, id: S }, + Ignore { name: SmolStr, id: S }, Index { depth: usize }, Count { name: SmolStr, depth: Option }, - Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, - Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, - Literal(tt::Literal), - Punct(SmallVec<[tt::Punct; 3]>), - Ident(tt::Ident), + Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option> }, + Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, + Literal(tt::Literal), + Punct(SmallVec<[tt::Punct; 3]>), + Ident(tt::Ident), } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -87,15 +88,15 @@ pub(crate) enum MetaVarKind { } #[derive(Clone, Debug, Eq)] -pub(crate) enum Separator { - Literal(tt::Literal), - Ident(tt::Ident), - Puncts(SmallVec<[tt::Punct; 3]>), +pub(crate) enum Separator { + Literal(tt::Literal), + Ident(tt::Ident), + Puncts(SmallVec<[tt::Punct; 3]>), } // Note that when we compare a Separator, we just care about its textual value. -impl PartialEq for Separator { - fn eq(&self, other: &Separator) -> bool { +impl PartialEq for Separator { + fn eq(&self, other: &Separator) -> bool { use Separator::*; match (self, other) { @@ -117,11 +118,11 @@ enum Mode { Template, } -fn next_op( - first_peeked: &tt::TokenTree, - src: &mut TtIter<'_>, +fn next_op( + first_peeked: &tt::TokenTree, + src: &mut TtIter<'_, S>, mode: Mode, -) -> Result { +) -> Result, ParseError> { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { src.next().expect("first token already peeked"); @@ -212,7 +213,10 @@ fn next_op( Ok(res) } -fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result, ParseError> { +fn eat_fragment_kind( + src: &mut TtIter<'_, S>, + mode: Mode, +) -> Result, ParseError> { if let Mode::Pattern = mode { src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?; let ident = src @@ -240,11 +244,13 @@ fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result bool { +fn is_boolean_literal(lit: &tt::Literal) -> bool { matches!(lit.text.as_str(), "true" | "false") } -fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), ParseError> { +fn parse_repeat( + src: &mut TtIter<'_, S>, +) -> Result<(Option>, RepeatKind), ParseError> { let mut separator = Separator::Puncts(SmallVec::new()); for tt in src { let tt = match tt { @@ -281,7 +287,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { +fn parse_metavar_expr(src: &mut TtIter<'_, S>) -> Result, ()> { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -314,7 +320,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { Ok(op) } -fn parse_depth(src: &mut TtIter<'_>) -> Result { +fn parse_depth(src: &mut TtIter<'_, S>) -> Result { if src.len() == 0 { Ok(0) } else if let tt::Leaf::Literal(lit) = src.expect_literal()? { @@ -325,7 +331,7 @@ fn parse_depth(src: &mut TtIter<'_>) -> Result { } } -fn try_eat_comma(src: &mut TtIter<'_>) -> bool { +fn try_eat_comma(src: &mut TtIter<'_, S>) -> bool { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) { let _ = src.next(); return true; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 7b9bb61e696ad..1c46471a38320 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,98 +1,102 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. -use rustc_hash::FxHashMap; -use stdx::{always, non_empty_vec::NonEmptyVec}; +use rustc_hash::{FxHashMap, FxHashSet}; +use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; - -use crate::{ - to_parser_input::to_parser_input, - tt::{ - self, - buffer::{Cursor, TokenBuffer}, - }, - tt_iter::TtIter, - TokenMap, +use tt::{ + buffer::{Cursor, TokenBuffer}, + Span, SpanData, SyntaxContext, }; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap}; + #[cfg(test)] mod tests; -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { - let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ); - (subtree, token_map) +pub trait SpanMapper { + fn span_for(&self, range: TextRange) -> S; } -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_tree_with_modifications( - node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (tt::Subtree, TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - let subtree = convert_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (subtree, c.id_alloc.map, c.id_alloc.next_id) +impl SpanMapper for SpanMap { + fn span_for(&self, range: TextRange) -> S { + self.span_at(range.start()) + } } -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap { - syntax_node_to_token_map_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ) - .0 +impl> SpanMapper for &SM { + fn span_for(&self, range: TextRange) -> S { + SM::span_for(self, range) + } } -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_map_with_modifications( - node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - collect_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (c.id_alloc.map, c.id_alloc.next_id) +/// Dummy things for testing where spans don't matter. +pub(crate) mod dummy_test_span_utils { + use super::*; + + pub type DummyTestSpanData = tt::SpanData; + pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY; + + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct DummyTestSpanAnchor; + impl tt::SpanAnchor for DummyTestSpanAnchor { + const DUMMY: Self = DummyTestSpanAnchor; + } + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub struct DummyTestSyntaxContext; + impl SyntaxContext for DummyTestSyntaxContext { + const DUMMY: Self = DummyTestSyntaxContext; + } + + pub struct DummyTestSpanMap; + + impl SpanMapper> for DummyTestSpanMap { + fn span_for( + &self, + range: syntax::TextRange, + ) -> tt::SpanData { + tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext } + } + } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct SyntheticTokenId(pub u32); +/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the +/// subtree's spans. +pub fn syntax_node_to_token_tree( + node: &SyntaxNode, + map: SpanMap, +) -> tt::Subtree> +where + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, + SpanMap: SpanMapper>, +{ + let mut c = Converter::new(node, map, Default::default(), Default::default()); + convert_tokens(&mut c) +} -#[derive(Debug, Clone)] -pub struct SyntheticToken { - pub kind: SyntaxKind, - pub text: SmolStr, - pub range: TextRange, - pub id: SyntheticTokenId, +/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the +/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can +/// be injected or hidden from the output. +pub fn syntax_node_to_token_tree_modified( + node: &SyntaxNode, + map: SpanMap, + append: FxHashMap>>>, + remove: FxHashSet, +) -> tt::Subtree> +where + SpanMap: SpanMapper>, + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, +{ + let mut c = Converter::new(node, map, append, remove); + convert_tokens(&mut c) } // The following items are what `rustc` macro can be parsed into : @@ -107,10 +111,17 @@ pub struct SyntheticToken { // * AssocItems(SmallVec<[ast::AssocItem; 1]>) // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree, +/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`]. +/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans. +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, TokenMap) { +) -> (Parse, SpanMap>) +where + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, +{ let buffer = match tt { tt::Subtree { delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. }, @@ -137,29 +148,41 @@ pub fn token_tree_to_syntax_node( tree_sink.finish() } -/// Convert a string to a `TokenTree` -pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { +/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided +/// anchor with the given context. +pub fn parse_to_token_tree( + anchor: Anchor, + ctx: Ctx, + text: &str, +) -> Option>> +where + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, +{ let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } + let mut conv = RawConverter { lexed, pos: 0, anchor, ctx }; + Some(convert_tokens(&mut conv)) +} - let mut conv = RawConverter { - lexed, - pos: 0, - id_alloc: TokenIdAlloc { - map: Default::default(), - global_offset: TextSize::default(), - next_id: 0, - }, - }; - - let subtree = convert_tokens(&mut conv); - Some((subtree, conv.id_alloc.map)) +/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree. +pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option> +where + S: Span, +{ + let lexed = parser::LexedStr::new(text); + if lexed.errors().next().is_some() { + return None; + } + let mut conv = StaticRawConverter { lexed, pos: 0, span }; + Some(convert_tokens(&mut conv)) } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -172,10 +195,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res.push(match expanded.value { None => break, - Some(tt @ tt::TokenTree::Leaf(_)) => { - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] } - } - Some(tt::TokenTree::Subtree(tt)) => tt, + Some(tt) => tt.subtree_or_wrap(), }); let mut fork = iter.clone(); @@ -187,7 +207,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { if iter.peek_n(0).is_some() { res.push(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: iter.cloned().collect(), }); } @@ -195,136 +215,118 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res } -fn convert_tokens(conv: &mut C) -> tt::Subtree { - struct StackEntry { - subtree: tt::Subtree, - idx: usize, - open_range: TextRange, - } - - let entry = StackEntry { - subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; +fn convert_tokens(conv: &mut C) -> tt::Subtree +where + C: TokenConverter, + S: Span, +{ + let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); - loop { - let StackEntry { subtree, .. } = stack.last_mut(); - let result = &mut subtree.token_trees; - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); - - let kind = token.kind(conv); - if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); - result.extend(tokens); - } - continue; - } - let tt = if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - - let expected = match subtree.delimiter.kind { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; - - if let Some(expected) = expected { - if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - stack.last_mut().subtree.token_trees.push(entry.subtree.into()); + while let Some((token, abs_range)) = conv.bump() { + let tt::Subtree { delimiter, token_trees: result } = stack.last_mut(); + + let tt = match token.as_leaf() { + Some(leaf) => tt::TokenTree::Leaf(leaf.clone()), + None => match token.kind(conv) { + // Desugar doc comments into doc attributes + COMMENT => { + let span = conv.span_for(abs_range); + if let Some(tokens) = conv.convert_doc_comment(&token, span) { + result.extend(tokens); } continue; } - } - - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; + kind if kind.is_punct() && kind != UNDERSCORE => { + let expected = match delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + }; + + // Current token is a closing delimiter that we expect, fix up the closing span + // and end the subtree here + if matches!(expected, Some(expected) if expected == kind) { + if let Some(mut subtree) = stack.pop() { + subtree.delimiter.close = conv.span_for(abs_range); + stack.last_mut().token_trees.push(subtree.into()); + } + continue; + } - if let Some(kind) = delim { - let (id, idx) = conv.id_alloc().open_delim(range, synth_id); - let subtree = tt::Subtree { - delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind }, - token_trees: vec![], - }; - stack.push(StackEntry { subtree, idx, open_range: range }); - continue; - } + let delim = match kind { + T!['('] => Some(tt::DelimiterKind::Parenthesis), + T!['{'] => Some(tt::DelimiterKind::Brace), + T!['['] => Some(tt::DelimiterKind::Bracket), + _ => None, + }; + + // Start a new subtree + if let Some(kind) = delim { + let open = conv.span_for(abs_range); + stack.push(tt::Subtree { + delimiter: tt::Delimiter { + open, + // will be overwritten on subtree close above + close: open, + kind, + }, + token_trees: vec![], + }); + continue; + } - let spacing = match conv.peek().map(|next| next.kind(conv)) { - Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, - _ => tt::Spacing::Alone, - }; - let char = match token.to_char(conv) { - Some(c) => c, - None => { - panic!("Token from lexer must be single char: token = {token:#?}"); + let spacing = match conv.peek().map(|next| next.kind(conv)) { + Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, + _ => tt::Spacing::Alone, + }; + let Some(char) = token.to_char(conv) else { + panic!("Token from lexer must be single char: token = {token:#?}") + }; + tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }) + .into() } - }; - tt::Leaf::from(tt::Punct { - char, - spacing, - span: conv.id_alloc().alloc(range, synth_id), - }) - .into() - } else { - macro_rules! make_leaf { - ($i:ident) => { - tt::$i { - span: conv.id_alloc().alloc(range, synth_id), - text: token.to_text(conv), + kind => { + macro_rules! make_leaf { + ($i:ident) => { + tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) } + .into() + }; } - .into() - }; - } - let leaf: tt::Leaf = match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); - let apostrophe = tt::Leaf::from(tt::Punct { - char: '\'', - spacing: tt::Spacing::Joint, - span: conv.id_alloc().alloc(r, synth_id), - }); - result.push(apostrophe.into()); - - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); - let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.id_alloc().alloc(r, synth_id), - }); - result.push(ident.into()); - continue; - } - _ => continue, - }; + let leaf: tt::Leaf<_> = match kind { + T![true] | T![false] => make_leaf!(Ident), + IDENT => make_leaf!(Ident), + UNDERSCORE => make_leaf!(Ident), + k if k.is_keyword() => make_leaf!(Ident), + k if k.is_literal() => make_leaf!(Literal), + LIFETIME_IDENT => { + let apostrophe = tt::Leaf::from(tt::Punct { + char: '\'', + spacing: tt::Spacing::Joint, + span: conv + .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))), + }); + result.push(apostrophe.into()); + + let ident = tt::Leaf::from(tt::Ident { + text: SmolStr::new(&token.to_text(conv)[1..]), + span: conv.span_for(TextRange::at( + abs_range.start() + TextSize::of('\''), + abs_range.end(), + )), + }); + result.push(ident.into()); + continue; + } + _ => continue, + }; - leaf.into() + leaf.into() + } + }, }; + result.push(tt); } @@ -334,10 +336,9 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { while let Some(entry) = stack.pop() { let parent = stack.last_mut(); - conv.id_alloc().close_delim(entry.idx, None); - let leaf: tt::Leaf = tt::Punct { - span: conv.id_alloc().alloc(entry.open_range, None), - char: match entry.subtree.delimiter.kind { + let leaf: tt::Leaf<_> = tt::Punct { + span: entry.delimiter.open, + char: match entry.delimiter.kind { tt::DelimiterKind::Parenthesis => '(', tt::DelimiterKind::Brace => '{', tt::DelimiterKind::Bracket => '[', @@ -346,11 +347,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { spacing: tt::Spacing::Alone, } .into(); - parent.subtree.token_trees.push(leaf.into()); - parent.subtree.token_trees.extend(entry.subtree.token_trees); + parent.token_trees.push(leaf.into()); + parent.token_trees.extend(entry.token_trees); } - let subtree = stack.into_last().subtree; + let subtree = stack.into_last(); if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees { first.clone() } else { @@ -358,111 +359,6 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { } } -fn collect_tokens(conv: &mut C) { - struct StackEntry { - idx: usize, - open_range: TextRange, - delimiter: tt::DelimiterKind, - } - - let entry = StackEntry { - delimiter: tt::DelimiterKind::Invisible, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; - let mut stack = NonEmptyVec::new(entry); - - loop { - let StackEntry { delimiter, .. } = stack.last_mut(); - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); - - let kind = token.kind(conv); - if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); - } - continue; - } - if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - - let expected = match delimiter { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; - - if let Some(expected) = expected { - if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - } - continue; - } - } - - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; - - if let Some(kind) = delim { - let (_id, idx) = conv.id_alloc().open_delim(range, synth_id); - - stack.push(StackEntry { idx, open_range: range, delimiter: kind }); - continue; - } - - conv.id_alloc().alloc(range, synth_id); - } else { - macro_rules! make_leaf { - ($i:ident) => {{ - conv.id_alloc().alloc(range, synth_id); - }}; - } - match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); - conv.id_alloc().alloc(r, synth_id); - - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); - conv.id_alloc().alloc(r, synth_id); - continue; - } - _ => continue, - }; - }; - - // If we get here, we've consumed all input tokens. - // We might have more than one subtree in the stack, if the delimiters are improperly balanced. - // Merge them so we're left with one. - while let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, None); - conv.id_alloc().alloc(entry.open_range, None); - } - } -} - fn is_single_token_op(kind: SyntaxKind) -> bool { matches!( kind, @@ -511,162 +407,126 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text.into() } -fn convert_doc_comment( +fn convert_doc_comment( token: &syntax::SyntaxToken, - span: tt::TokenId, -) -> Option> { + span: S, +) -> Option>> { cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; - // Make `doc="\" Comments\"" - let meta_tkns = - vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)]; + let mk_ident = + |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })); - // Make `#![]` - let mut token_trees = Vec::with_capacity(3); - token_trees.push(mk_punct('#', span)); - if let ast::CommentPlacement::Inner = doc { - token_trees.push(mk_punct('!', span)); - } - token_trees.push(tt::TokenTree::from(tt::Subtree { - delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, - token_trees: meta_tkns, - })); - - return Some(token_trees); - - // Helper functions - fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })) - } - - fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { + let mk_punct = |c: char| { tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, span, })) - } + }; - fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { + let mk_doc_literal = |comment: &ast::Comment| { let lit = tt::Literal { text: doc_comment_text(comment), span }; tt::TokenTree::from(tt::Leaf::from(lit)) - } -} - -struct TokenIdAlloc { - map: TokenMap, - global_offset: TextSize, - next_id: u32, -} - -impl TokenIdAlloc { - fn alloc( - &mut self, - absolute_range: TextRange, - synthetic_id: Option, - ) -> tt::TokenId { - let relative_range = absolute_range - self.global_offset; - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - self.map.insert(token_id, relative_range); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - token_id - } + }; - fn open_delim( - &mut self, - open_abs_range: TextRange, - synthetic_id: Option, - ) -> (tt::TokenId, usize) { - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - let idx = self.map.insert_delim( - token_id, - open_abs_range - self.global_offset, - open_abs_range - self.global_offset, - ); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - (token_id, idx) - } + // Make `doc="\" Comments\"" + let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; - fn close_delim(&mut self, idx: usize, close_abs_range: Option) { - match close_abs_range { - None => { - self.map.remove_delim(idx); - } - Some(close) => { - self.map.update_close_delim(idx, close - self.global_offset); - } - } + // Make `#![]` + let mut token_trees = Vec::with_capacity(3); + token_trees.push(mk_punct('#')); + if let ast::CommentPlacement::Inner = doc { + token_trees.push(mk_punct('!')); } + token_trees.push(tt::TokenTree::from(tt::Subtree { + delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, + token_trees: meta_tkns, + })); - fn peek_next_id(&self) -> tt::TokenId { - tt::TokenId(self.next_id) - } + Some(token_trees) } /// A raw token (straight from lexer) converter -struct RawConverter<'a> { +struct RawConverter<'a, Anchor, Ctx> { + lexed: parser::LexedStr<'a>, + pos: usize, + anchor: Anchor, + ctx: Ctx, +} +/// A raw token (straight from lexer) converter that gives every token the same span. +struct StaticRawConverter<'a, S> { lexed: parser::LexedStr<'a>, pos: usize, - id_alloc: TokenIdAlloc, + span: S, } -trait SrcToken: std::fmt::Debug { +trait SrcToken: std::fmt::Debug { fn kind(&self, ctx: &Ctx) -> SyntaxKind; fn to_char(&self, ctx: &Ctx) -> Option; fn to_text(&self, ctx: &Ctx) -> SmolStr; - fn synthetic_id(&self, ctx: &Ctx) -> Option; + fn as_leaf(&self) -> Option<&tt::Leaf> { + None + } } -trait TokenConverter: Sized { - type Token: SrcToken; +trait TokenConverter: Sized { + type Token: SrcToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: tt::TokenId, - ) -> Option>; + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>; fn peek(&self) -> Option; - fn id_alloc(&mut self) -> &mut TokenIdAlloc; + fn span_for(&self, range: TextRange) -> S; } -impl SrcToken> for usize { - fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { +impl SrcToken, S> for usize { + fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr { ctx.lexed.text(*self).into() } +} - fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option { - None +impl SrcToken, S> for usize { + fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { + ctx.lexed.kind(*self) + } + + fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option { + ctx.lexed.text(*self).chars().next() + } + + fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr { + ctx.lexed.text(*self).into() } } -impl TokenConverter for RawConverter<'_> { +impl TokenConverter> + for RawConverter<'_, Anchor, Ctx> +where + SpanData: Span, +{ type Token = usize; - fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option> { + fn convert_doc_comment( + &self, + &token: &usize, + span: SpanData, + ) -> Option>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } @@ -678,7 +538,7 @@ impl TokenConverter for RawConverter<'_> { let token = self.pos; self.pos += 1; let range = self.lexed.text_range(token); - let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); Some((token, range)) } @@ -690,137 +550,184 @@ impl TokenConverter for RawConverter<'_> { Some(self.pos) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn span_for(&self, range: TextRange) -> SpanData { + SpanData { range, anchor: self.anchor, ctx: self.ctx } } } -struct Converter { - id_alloc: TokenIdAlloc, +impl TokenConverter for StaticRawConverter<'_, S> +where + S: Span, +{ + type Token = usize; + + fn convert_doc_comment(&self, &token: &usize, span: S) -> Option>> { + let text = self.lexed.text(token); + convert_doc_comment(&doc_comment(text), span) + } + + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + if self.pos == self.lexed.len() { + return None; + } + let token = self.pos; + self.pos += 1; + let range = self.lexed.text_range(token); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); + + Some((token, range)) + } + + fn peek(&self) -> Option { + if self.pos == self.lexed.len() { + return None; + } + Some(self.pos) + } + + fn span_for(&self, _: TextRange) -> S { + self.span + } +} + +struct Converter { current: Option, - current_synthetic: Vec, + current_leafs: Vec>, preorder: PreorderWithTokens, - replace: FxHashMap>, - append: FxHashMap>, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, + /// Used to make the emitted text ranges in the spans relative to the span anchor. + map: SpanMap, + append: FxHashMap>>, + remove: FxHashSet, } -impl Converter { +impl Converter { fn new( node: &SyntaxNode, - global_offset: TextSize, - existing_token_map: TokenMap, - next_id: u32, - mut replace: FxHashMap>, - mut append: FxHashMap>, - ) -> Converter { - let range = node.text_range(); - let mut preorder = node.preorder_with_tokens(); - let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append); - Converter { - id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } }, - current: first, - current_synthetic: synthetic, - preorder, - range, - replace, - append, + map: SpanMap, + append: FxHashMap>>, + remove: FxHashSet, + ) -> Self { + let mut this = Converter { + current: None, + preorder: node.preorder_with_tokens(), + range: node.text_range(), punct_offset: None, - } - } - - fn next_token( - preorder: &mut PreorderWithTokens, - replace: &mut FxHashMap>, - append: &mut FxHashMap>, - ) -> (Option, Vec) { - while let Some(ev) = preorder.next() { - let ele = match ev { - WalkEvent::Enter(ele) => ele, - WalkEvent::Leave(ele) => { - if let Some(mut v) = append.remove(&ele) { - if !v.is_empty() { - v.reverse(); - return (None, v); - } + map, + append, + remove, + current_leafs: vec![], + }; + let first = this.next_token(); + this.current = first; + this + } + + fn next_token(&mut self) -> Option { + // while let Some(ev) = self.preorder.next() { + // match ev { + // WalkEvent::Enter(SyntaxElement::Token(t)) => { + // if let Some(leafs) = self.append.remove(&t.clone().into()) { + // self.current_leafs.extend(leafs); + // } + // return Some(t); + // } + // WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { + // self.preorder.skip_subtree(); + // if let Some(leafs) = self.append.remove(&n.into()) { + // self.current_leafs.extend(leafs); + // } + // } + // _ => (), + // } + // } + // None; + + while let Some(ev) = self.preorder.next() { + match ev { + WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), + WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { + self.preorder.skip_subtree(); + if let Some(mut v) = self.append.remove(&n.into()) { + v.reverse(); + self.current_leafs.extend(v); + return None; } - continue; } - }; - if let Some(mut v) = replace.remove(&ele) { - preorder.skip_subtree(); - if !v.is_empty() { - v.reverse(); - return (None, v); + WalkEvent::Enter(SyntaxElement::Node(_)) => (), + WalkEvent::Leave(ele) => { + if let Some(mut v) = self.append.remove(&ele) { + v.reverse(); + self.current_leafs.extend(v); + return None; + } } } - match ele { - SyntaxElement::Token(t) => return (Some(t), Vec::new()), - _ => {} - } } - (None, Vec::new()) + None } } #[derive(Debug)] -enum SynToken { +enum SynToken { Ordinary(SyntaxToken), - // FIXME is this supposed to be `Punct`? - Punch(SyntaxToken, TextSize), - Synthetic(SyntheticToken), + Punct { token: SyntaxToken, offset: usize }, + Leaf(tt::Leaf), } -impl SynToken { - fn token(&self) -> Option<&SyntaxToken> { +impl SynToken { + fn token(&self) -> &SyntaxToken { match self { - SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it), - SynToken::Synthetic(_) => None, + SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it, + SynToken::Leaf(_) => unreachable!(), } } } -impl SrcToken for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { +impl SrcToken, S> for SynToken { + fn kind(&self, ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), - SynToken::Synthetic(token) => token.kind, + SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Leaf(_) => { + never!(); + SyntaxKind::ERROR + } } } - fn to_char(&self, _ctx: &Converter) -> Option { + fn to_char(&self, _ctx: &Converter) -> Option { match self { SynToken::Ordinary(_) => None, - SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), - SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(), - SynToken::Synthetic(_) => None, + SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i), + SynToken::Leaf(_) => None, } } - fn to_text(&self, _ctx: &Converter) -> SmolStr { + fn to_text(&self, _ctx: &Converter) -> SmolStr { match self { - SynToken::Ordinary(token) => token.text().into(), - SynToken::Punch(token, _) => token.text().into(), - SynToken::Synthetic(token) => token.text.clone(), + SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(), + SynToken::Leaf(_) => { + never!(); + "".into() + } } } - - fn synthetic_id(&self, _ctx: &Converter) -> Option { + fn as_leaf(&self) -> Option<&tt::Leaf> { match self { - SynToken::Synthetic(token) => Some(token.id), - _ => None, + SynToken::Ordinary(_) | SynToken::Punct { .. } => None, + SynToken::Leaf(it) => Some(it), } } } -impl TokenConverter for Converter { - type Token = SynToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: tt::TokenId, - ) -> Option> { - convert_doc_comment(token.token()?, span) +impl TokenConverter for Converter +where + S: Span, + SpanMap: SpanMapper, +{ + type Token = SynToken; + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>> { + convert_doc_comment(token.token(), span) } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { @@ -830,34 +737,31 @@ impl TokenConverter for Converter { let range = punct.text_range(); self.punct_offset = Some((punct.clone(), offset)); let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some((SynToken::Punch(punct, offset), range)); + return Some(( + SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, + range, + )); } } - if let Some(synth_token) = self.current_synthetic.pop() { - if self.current_synthetic.is_empty() { - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; + if let Some(leaf) = self.current_leafs.pop() { + if self.current_leafs.is_empty() { + self.current = self.next_token(); } - let range = synth_token.range; - return Some((SynToken::Synthetic(synth_token), range)); + return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0)))); } let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; + + self.current = self.next_token(); let token = if curr.kind().is_punct() { self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - (SynToken::Punch(curr, 0.into()), range) + (SynToken::Punct { token: curr, offset: 0 as usize }, range) } else { self.punct_offset = None; let range = curr.text_range(); @@ -871,55 +775,55 @@ impl TokenConverter for Converter { if let Some((punct, mut offset)) = self.punct_offset.clone() { offset += TextSize::of('.'); if usize::from(offset) < punct.text().len() { - return Some(SynToken::Punch(punct, offset)); + return Some(SynToken::Punct { token: punct, offset: usize::from(offset) }); } } - if let Some(synth_token) = self.current_synthetic.last() { - return Some(SynToken::Synthetic(synth_token.clone())); - } - let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } let token = if curr.kind().is_punct() { - SynToken::Punch(curr, 0.into()) + SynToken::Punct { token: curr, offset: 0 as usize } } else { SynToken::Ordinary(curr) }; Some(token) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn span_for(&self, range: TextRange) -> S { + self.map.span_for(range) } } -struct TtTreeSink<'a> { +struct TtTreeSink<'a, Anchor, Ctx> +where + SpanData: Span, +{ buf: String, - cursor: Cursor<'a>, - open_delims: FxHashMap, + cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: TokenMap, + token_map: SpanMap>, } -impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a>) -> Self { +impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx> +where + SpanData: Span, +{ + fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { buf: String::new(), cursor, - open_delims: FxHashMap::default(), text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), - token_map: TokenMap::default(), + token_map: SpanMap::empty(), } } - fn finish(mut self) -> (Parse, TokenMap) { - self.token_map.shrink_to_fit(); + fn finish(mut self) -> (Parse, SpanMap>) { + self.token_map.finish(); (self.inner.finish(), self.token_map) } } @@ -936,27 +840,34 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl TtTreeSink<'_> { +impl TtTreeSink<'_, Anchor, Ctx> +where + SpanData: Span, +{ /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { - let (text, _span) = match self.cursor.token_tree() { + let (text, span) = match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { (lit.text.as_str(), lit.span) } _ => unreachable!(), }; + // FIXME: Span splitting match text.split_once('.') { Some((left, right)) => { assert!(!left.is_empty()); + self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, left); self.inner.finish_node(); + self.token_map.push(self.text_pos + TextSize::of(left), span); // here we move the exit up, the original exit has been deleted in process self.inner.finish_node(); self.inner.token(SyntaxKind::DOT, "."); + self.token_map.push(self.text_pos + TextSize::of(left) + TextSize::of("."), span); if has_pseudo_dot { assert!(right.is_empty(), "{left}.{right}"); @@ -964,11 +875,13 @@ impl TtTreeSink<'_> { assert!(!right.is_empty(), "{left}.{right}"); self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, right); + self.token_map.push(self.text_pos + TextSize::of(text), span); self.inner.finish_node(); // the parser creates an unbalanced start node, we are required to close it here self.inner.finish_node(); } + self.text_pos += TextSize::of(text); } None => unreachable!(), } @@ -987,11 +900,11 @@ impl TtTreeSink<'_> { break; } last = self.cursor; - let text: &str = loop { + let (text, span) = loop { break match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed - let (text, id) = match leaf { + let (text, span) = match leaf { tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span), tt::Leaf::Punct(punct) => { assert!(punct.char.is_ascii()); @@ -1003,18 +916,13 @@ impl TtTreeSink<'_> { } tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), }; - let range = TextRange::at(self.text_pos, TextSize::of(text)); - self.token_map.insert(id, range); self.cursor = self.cursor.bump(); - text + (text, span) } Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { self.cursor = self.cursor.subtree().unwrap(); match delim_to_str(subtree.delimiter.kind, false) { - Some(it) => { - self.open_delims.insert(subtree.delimiter.open, self.text_pos); - it - } + Some(it) => (it, subtree.delimiter.open), None => continue, } } @@ -1022,21 +930,7 @@ impl TtTreeSink<'_> { let parent = self.cursor.end().unwrap(); self.cursor = self.cursor.bump(); match delim_to_str(parent.delimiter.kind, true) { - Some(it) => { - if let Some(open_delim) = - self.open_delims.get(&parent.delimiter.open) - { - let open_range = TextRange::at(*open_delim, TextSize::of('(')); - let close_range = - TextRange::at(self.text_pos, TextSize::of('(')); - self.token_map.insert_delim( - parent.delimiter.open, - open_range, - close_range, - ); - } - it - } + Some(it) => (it, parent.delimiter.close), None => continue, } } @@ -1044,10 +938,12 @@ impl TtTreeSink<'_> { }; self.buf += text; self.text_pos += TextSize::of(text); + self.token_map.push(self.text_pos, span); } self.inner.token(kind, self.buf.as_str()); self.buf.clear(); + // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it. // Add whitespace between adjoint puncts let next = last.bump(); if let ( @@ -1063,6 +959,7 @@ impl TtTreeSink<'_> { if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' { self.inner.token(WHITESPACE, " "); self.text_pos += TextSize::of(' '); + self.token_map.push(self.text_pos, curr.span); } } } diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index fa0125f3e9e04..bd8187a148a5e 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -7,21 +7,20 @@ use tt::{ Leaf, Punct, Spacing, }; -use super::syntax_node_to_token_tree; +use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap}; fn check_punct_spacing(fixture: &str) { let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax()); + let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { - let token = token_map.token_by_range(range).expect("no token found"); let spacing = match annotation.as_str() { "Alone" => Spacing::Alone, "Joint" => Spacing::Joint, a => panic!("unknown annotation: {a}"), }; - (token, spacing) + (range, spacing) }) .collect(); @@ -29,8 +28,12 @@ fn check_punct_spacing(fixture: &str) { let mut cursor = buf.begin(); while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { - if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(span) { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { assert_eq!(expected, *spacing); } } diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 051e20b3a3f9c..00a14f04686e2 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -3,9 +3,9 @@ use syntax::{SyntaxKind, SyntaxKind::*, T}; -use crate::tt::buffer::TokenBuffer; +use tt::{buffer::TokenBuffer, Span}; -pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { +pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_, S>) -> parser::Input { let mut res = parser::Input::default(); let mut current = buffer.begin(); diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 73a27df5dbca6..7d15812f8cb60 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,123 +2,75 @@ use std::hash::Hash; -use parser::{SyntaxKind, T}; +use stdx::{always, itertools::Itertools}; use syntax::{TextRange, TextSize}; +use tt::Span; -use crate::syntax_bridge::SyntheticTokenId; - -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange), +/// Maps absolute text ranges for the corresponding file to the relevant span data. +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub struct SpanMap { + spans: Vec<(TextSize, S)>, } -impl TokenTextRange { - fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(it) => match kind { - T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), - T!['}'] | T![')'] | T![']'] => { - Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) - } - _ => None, - }, - } +impl SpanMap { + /// Creates a new empty [`SpanMap`]. + pub fn empty() -> Self { + Self { spans: Vec::new() } } -} -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, - pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>, -} - -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(it) => { - let open = TextRange::at(it.start(), 1.into()); - let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); - open == relative_range || close == relative_range - } - })?; - Some(token_id) + /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are + /// in order. + pub fn finish(&mut self) { + always!( + self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0), + "spans are not in order" + ); + self.spans.shrink_to_fit(); } - pub fn ranges_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> impl Iterator + '_ { - self.entries - .iter() - .filter(move |&&(tid, _)| tid == token_id) - .filter_map(move |(_, range)| range.by_kind(kind)) - } - - pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { - self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) - } - - pub fn first_range_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> Option { - self.ranges_by_token(token_id, kind).next() - } - - pub(crate) fn shrink_to_fit(&mut self) { - self.entries.shrink_to_fit(); - self.synthetic_entries.shrink_to_fit(); - } - - pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); - } - - pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { - self.synthetic_entries.push((token_id, id)); - } - - pub(crate) fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - let cover = open_relative_range.cover(close_relative_range); - - self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - res + /// Pushes a new span onto the [`SpanMap`]. + pub fn push(&mut self, offset: TextSize, span: S) { + if cfg!(debug_assertions) { + if let Some(&(last_offset, _)) = self.spans.last() { + assert!( + last_offset < offset, + "last_offset({last_offset:?}) must be smaller than offset({offset:?})" + ); + } + } + self.spans.push((offset, span)); } - pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim) = token_text_range { - let cover = dim.cover(close_relative_range); - *token_text_range = TokenTextRange::Delimiter(cover); - } + /// Returns all [`TextRange`]s that correspond to the given span. + /// + /// Note this does a linear search through the entire backing vector. + pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + // FIXME: This should ignore the syntax context! + self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { + if s != span { + return None; + } + let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); + Some(TextRange::new(start, end)) + }) } - pub(crate) fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidentally quadratic - self.entries.remove(idx); + /// Returns the span at the given position. + pub fn span_at(&self, offset: TextSize) -> S { + let entry = self.spans.partition_point(|&(it, _)| it <= offset); + self.spans[entry].1 } - pub fn entries(&self) -> impl Iterator + '_ { - self.entries.iter().filter_map(|&(tid, tr)| match tr { - TokenTextRange::Token(range) => Some((tid, range)), - TokenTextRange::Delimiter(_) => None, - }) + /// Returns the spans associated with the given range. + /// In other words, this will return all spans that correspond to all offsets within the given range. + pub fn spans_for_range(&self, range: TextRange) -> impl Iterator + '_ { + let (start, end) = (range.start(), range.end()); + let start_entry = self.spans.partition_point(|&(it, _)| it <= start); + let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? + (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s) } - pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { - self.entries.retain(|&(tid, _)| id(tid)); + pub fn iter(&self) -> impl Iterator + '_ { + self.spans.iter().copied() } } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 79ff8ca28e863..595691b177368 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -3,16 +3,17 @@ use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; +use tt::Span; -use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult}; +use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; #[derive(Debug, Clone)] -pub(crate) struct TtIter<'a> { - pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, +pub(crate) struct TtIter<'a, S> { + pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, } -impl<'a> TtIter<'a> { - pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> { +impl<'a, S: Span> TtIter<'a, S> { + pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a, S> { TtIter { inner: subtree.token_trees.iter() } } @@ -36,35 +37,35 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { + pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { match self.next() { Some(tt::TokenTree::Subtree(it)) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { + pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { match self.next() { Some(tt::TokenTree::Leaf(it)) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { + pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) if it.text != "_" => Ok(it), _ => Err(()), } } - pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> { + pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { + pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { let it = self.expect_leaf()?; match it { tt::Leaf::Literal(_) => Ok(it), @@ -73,7 +74,7 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { + pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { match self.expect_leaf()? { tt::Leaf::Punct(it) => Ok(it), _ => Err(()), @@ -84,7 +85,7 @@ impl<'a> TtIter<'a> { /// /// This method currently may return a single quotation, which is part of lifetime ident and /// conceptually not a punct in the context of mbe. Callers should handle this. - pub(crate) fn expect_glued_punct(&mut self) -> Result, ()> { + pub(crate) fn expect_glued_punct(&mut self) -> Result; 3]>, ()> { let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { return Err(()); }; @@ -126,7 +127,7 @@ impl<'a> TtIter<'a> { pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, - ) -> ExpandResult> { + ) -> ExpandResult>> { let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let parser_input = to_parser_input(&buffer); let tree_traversal = entry_point.parse(&parser_input); @@ -174,20 +175,20 @@ impl<'a> TtIter<'a> { let res = match res.len() { 0 | 1 => res.pop(), _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: res, })), }; ExpandResult { value: res, err } } - pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { + pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { self.inner.as_slice().get(n) } } -impl<'a> Iterator for TtIter<'a> { - type Item = &'a tt::TokenTree; +impl<'a, S> Iterator for TtIter<'a, S> { + type Item = &'a tt::TokenTree; fn next(&mut self) -> Option { self.inner.next() } @@ -197,4 +198,4 @@ impl<'a> Iterator for TtIter<'a> { } } -impl std::iter::ExactSizeIterator for TtIter<'_> {} +impl std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index fcfd1a50719bd..d9b3f46f2001c 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -17,7 +17,7 @@ //! //! [`Parser`]: crate::parser::Parser -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(rustdoc::private_intra_doc_links)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index 88b8d0aee3a49..db705a7b69ec5 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -1,7 +1,7 @@ //! Thin wrappers around `std::path`, distinguishing between absolute and //! relative paths. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ borrow::Borrow, diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 2c2d2e8a94525..2cbbc9489a294 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -25,11 +25,15 @@ tracing.workspace = true triomphe.workspace = true memmap2 = "0.5.4" snap = "1.1.0" +indexmap = "2.1.0" # local deps paths.workspace = true tt.workspace = true stdx.workspace = true profile.workspace = true -# Intentionally *not* depend on anything salsa-related -# base-db.workspace = true +text-size.workspace = true +# Ideally this crate would not depend on salsa things, but we need span information here which wraps +# InternIds for the syntax context +base-db.workspace = true +la-arena.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1603458f756ee..f697ecd3518f1 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -5,22 +5,22 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod msg; mod process; mod version; +use base_db::span::SpanData; +use indexmap::IndexSet; use paths::AbsPathBuf; use std::{fmt, io, sync::Mutex}; use triomphe::Arc; use serde::{Deserialize, Serialize}; -use ::tt::token_id as tt; - use crate::{ - msg::{ExpandMacro, FlatTree, PanicMessage}, + msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, process::ProcMacroProcessSrv, }; @@ -136,30 +136,47 @@ impl ProcMacro { pub fn expand( &self, - subtree: &tt::Subtree, - attr: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - ) -> Result, ServerError> { + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, + ) -> Result, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() .find(|(name, _)| name == "CARGO_MANIFEST_DIR") .map(|(_, value)| value.clone()); + let mut span_data_table = IndexSet::default(); + let def_site = span_data_table.insert_full(def_site).0; + let call_site = span_data_table.insert_full(call_site).0; + let mixed_site = span_data_table.insert_full(mixed_site).0; let task = ExpandMacro { - macro_body: FlatTree::new(subtree, version), + macro_body: FlatTree::new(subtree, version, &mut span_data_table), macro_name: self.name.to_string(), - attributes: attr.map(|subtree| FlatTree::new(subtree, version)), + attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), lib: self.dylib_path.to_path_buf().into(), env, current_dir, + has_global_spans: ExpnGlobals { + serialize: version >= HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, }; - let request = msg::Request::ExpandMacro(task); - let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?; + let response = self + .process + .lock() + .unwrap_or_else(|e| e.into_inner()) + .send_task(msg::Request::ExpandMacro(task))?; + match response { msg::Response::ExpandMacro(it) => { - Ok(it.map(|tree| FlatTree::to_subtree(tree, version))) + Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { Err(ServerError { message: "unexpected response".to_string(), io: None }) diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 4b01643c2a298..1d3e45aff385e 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -10,14 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::ProcMacroKind; -pub use crate::msg::flat::FlatTree; +pub use crate::msg::flat::{FlatTree, TokenId}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; +pub const HAS_GLOBAL_SPANS: u32 = 3; -pub const CURRENT_API_VERSION: u32 = ENCODE_CLOSE_SPAN_VERSION; +pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -59,6 +60,26 @@ pub struct ExpandMacro { pub env: Vec<(String, String)>, pub current_dir: Option, + /// marker for serde skip stuff + #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] + #[serde(default)] + pub has_global_spans: ExpnGlobals, +} + +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct ExpnGlobals { + #[serde(skip_serializing)] + #[serde(default)] + pub serialize: bool, + pub def_site: usize, + pub call_site: usize, + pub mixed_site: usize, +} + +impl ExpnGlobals { + fn skip_serializing_if(&self) -> bool { + !self.serialize + } } pub trait Message: Serialize + DeserializeOwned { @@ -115,30 +136,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { #[cfg(test)] mod tests { + use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, + }; + use la_arena::RawIdx; + use text_size::{TextRange, TextSize}; + use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; + use super::*; - use crate::tt::*; - fn fixture_token_tree() -> Subtree { - let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() }; - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into())); - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into())); + fn fixture_token_tree() -> Subtree { + let anchor = SpanAnchor { + file_id: FileId::from_raw(0), + ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), + }; + let mut subtree = Subtree { + delimiter: Delimiter { + open: SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::empty(TextSize::new(13)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + kind: DelimiterKind::Invisible, + }, + token_trees: Vec::new(), + }; + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "struct".into(), + span: SpanData { + range: TextRange::at(TextSize::new(0), TextSize::of("struct")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "Foo".into(), + span: SpanData { + range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: TokenId::unspecified(), + + span: SpanData { + range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: TokenId::unspecified(), + span: SpanData { + range: TextRange::at(TextSize::new(11), TextSize::of('@')), + anchor, + ctx: SyntaxContextId::ROOT, + }, spacing: Spacing::Joint, }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { - open: TokenId(2), - close: TokenId::UNSPECIFIED, + open: SpanData { + range: TextRange::at(TextSize::new(12), TextSize::of('{')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::at(TextSize::new(13), TextSize::of('}')), + anchor, + ctx: SyntaxContextId::ROOT, + }, kind: DelimiterKind::Brace, }, token_trees: vec![], @@ -149,19 +229,26 @@ mod tests { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); + let mut span_data_table = Default::default(); let task = ExpandMacro { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION), + macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_name: Default::default(), attributes: None, lib: std::env::current_dir().unwrap(), env: Default::default(), current_dir: Default::default(), + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, }; let json = serde_json::to_string(&task).unwrap(); // println!("{}", json); let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION)); + assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); } } diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 44245336f0166..5835718628e55 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -37,12 +37,26 @@ use std::collections::{HashMap, VecDeque}; +use base_db::span::SpanData; +use indexmap::IndexSet; use serde::{Deserialize, Serialize}; -use crate::{ - msg::ENCODE_CLOSE_SPAN_VERSION, - tt::{self, TokenId}, -}; +use crate::msg::ENCODE_CLOSE_SPAN_VERSION; + +type SpanDataIndexMap = IndexSet; + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(pub u32); + +impl std::fmt::Debug for TokenId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl tt::Span for TokenId { + const DUMMY: Self = TokenId(!0); +} #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { @@ -55,33 +69,38 @@ pub struct FlatTree { } struct SubtreeRepr { - open: tt::TokenId, - close: tt::TokenId, + open: TokenId, + close: TokenId, kind: tt::DelimiterKind, tt: [u32; 2], } struct LiteralRepr { - id: tt::TokenId, + id: TokenId, text: u32, } struct PunctRepr { - id: tt::TokenId, + id: TokenId, char: char, spacing: tt::Spacing, } struct IdentRepr { - id: tt::TokenId, + id: TokenId, text: u32, } impl FlatTree { - pub fn new(subtree: &tt::Subtree, version: u32) -> FlatTree { + pub fn new( + subtree: &tt::Subtree, + version: u32, + span_data_table: &mut SpanDataIndexMap, + ) -> FlatTree { let mut w = Writer { string_table: HashMap::new(), work: VecDeque::new(), + span_data_table, subtree: Vec::new(), literal: Vec::new(), @@ -92,7 +111,7 @@ impl FlatTree { }; w.write(subtree); - return FlatTree { + FlatTree { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { write_vec(w.subtree, SubtreeRepr::write_with_close_span) } else { @@ -103,15 +122,44 @@ impl FlatTree { ident: write_vec(w.ident, IdentRepr::write), token_tree: w.token_tree, text: w.text, + } + } + + pub fn new_raw(subtree: &tt::Subtree, version: u32) -> FlatTree { + let mut w = Writer { + string_table: HashMap::new(), + work: VecDeque::new(), + span_data_table: &mut (), + + subtree: Vec::new(), + literal: Vec::new(), + punct: Vec::new(), + ident: Vec::new(), + token_tree: Vec::new(), + text: Vec::new(), }; + w.write(subtree); - fn write_vec [u32; N], const N: usize>(xs: Vec, f: F) -> Vec { - xs.into_iter().flat_map(f).collect() + FlatTree { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + write_vec(w.subtree, SubtreeRepr::write_with_close_span) + } else { + write_vec(w.subtree, SubtreeRepr::write) + }, + literal: write_vec(w.literal, LiteralRepr::write), + punct: write_vec(w.punct, PunctRepr::write), + ident: write_vec(w.ident, IdentRepr::write), + token_tree: w.token_tree, + text: w.text, } } - pub fn to_subtree(self, version: u32) -> tt::Subtree { - return Reader { + pub fn to_subtree_resolved( + self, + version: u32, + span_data_table: &SpanDataIndexMap, + ) -> tt::Subtree { + Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { @@ -122,18 +170,40 @@ impl FlatTree { ident: read_vec(self.ident, IdentRepr::read), token_tree: self.token_tree, text: self.text, + span_data_table, } - .read(); + .read() + } - fn read_vec T, const N: usize>(xs: Vec, f: F) -> Vec { - let mut chunks = xs.chunks_exact(N); - let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect(); - assert!(chunks.remainder().is_empty()); - res + pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree { + Reader { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + read_vec(self.subtree, SubtreeRepr::read_with_close_span) + } else { + read_vec(self.subtree, SubtreeRepr::read) + }, + literal: read_vec(self.literal, LiteralRepr::read), + punct: read_vec(self.punct, PunctRepr::read), + ident: read_vec(self.ident, IdentRepr::read), + token_tree: self.token_tree, + text: self.text, + span_data_table: &(), } + .read() } } +fn read_vec T, const N: usize>(xs: Vec, f: F) -> Vec { + let mut chunks = xs.chunks_exact(N); + let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect(); + assert!(chunks.remainder().is_empty()); + res +} + +fn write_vec [u32; N], const N: usize>(xs: Vec, f: F) -> Vec { + xs.into_iter().flat_map(f).collect() +} + impl SubtreeRepr { fn write(self) -> [u32; 4] { let kind = match self.kind { @@ -152,7 +222,7 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId::UNSPECIFIED, kind, tt: [lo, len] } + SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } } fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { @@ -211,9 +281,36 @@ impl IdentRepr { } } -struct Writer<'a> { - work: VecDeque<(usize, &'a tt::Subtree)>, +trait Span: Copy { + type Table; + fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; +} + +impl Span for TokenId { + type Table = (); + fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { + token_id + } + + fn span_for_token_id((): &Self::Table, id: TokenId) -> Self { + id + } +} +impl Span for SpanData { + type Table = IndexSet; + fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { + TokenId(table.insert_full(span).0 as u32) + } + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self { + *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0]) + } +} + +struct Writer<'a, 'span, S: Span> { + work: VecDeque<(usize, &'a tt::Subtree)>, string_table: HashMap<&'a str, u32>, + span_data_table: &'span mut S::Table, subtree: Vec, literal: Vec, @@ -223,15 +320,19 @@ struct Writer<'a> { text: Vec, } -impl<'a> Writer<'a> { - fn write(&mut self, root: &'a tt::Subtree) { +impl<'a, 'span, S: Span> Writer<'a, 'span, S> { + fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { self.subtree(idx, subtree); } } - fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { + fn token_id_of(&mut self, span: S) -> TokenId { + S::token_id_of(self.span_data_table, span) + } + + fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.token_trees.len(); self.token_tree.resize(first_tt + n_tt, !0); @@ -248,22 +349,21 @@ impl<'a> Writer<'a> { tt::Leaf::Literal(lit) => { let idx = self.literal.len() as u32; let text = self.intern(&lit.text); - self.literal.push(LiteralRepr { id: lit.span, text }); + let id = self.token_id_of(lit.span); + self.literal.push(LiteralRepr { id, text }); idx << 2 | 0b01 } tt::Leaf::Punct(punct) => { let idx = self.punct.len() as u32; - self.punct.push(PunctRepr { - char: punct.char, - spacing: punct.spacing, - id: punct.span, - }); + let id = self.token_id_of(punct.span); + self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id }); idx << 2 | 0b10 } tt::Leaf::Ident(ident) => { let idx = self.ident.len() as u32; let text = self.intern(&ident.text); - self.ident.push(IdentRepr { id: ident.span, text }); + let id = self.token_id_of(ident.span); + self.ident.push(IdentRepr { id, text }); idx << 2 | 0b11 } }, @@ -273,10 +373,10 @@ impl<'a> Writer<'a> { } } - fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { + fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { let idx = self.subtree.len(); - let open = subtree.delimiter.open; - let close = subtree.delimiter.close; + let open = self.token_id_of(subtree.delimiter.open); + let close = self.token_id_of(subtree.delimiter.close); let delimiter_kind = subtree.delimiter.kind; self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] }); self.work.push_back((idx, subtree)); @@ -293,23 +393,29 @@ impl<'a> Writer<'a> { } } -struct Reader { +struct Reader<'span, S: Span> { subtree: Vec, literal: Vec, punct: Vec, ident: Vec, token_tree: Vec, text: Vec, + span_data_table: &'span S::Table, } -impl Reader { - pub(crate) fn read(self) -> tt::Subtree { - let mut res: Vec> = vec![None; self.subtree.len()]; +impl<'span, S: Span> Reader<'span, S> { + pub(crate) fn read(self) -> tt::Subtree { + let mut res: Vec>> = vec![None; self.subtree.len()]; + let read_span = |id| S::span_for_token_id(self.span_data_table, id); for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; let s = tt::Subtree { - delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind }, + delimiter: tt::Delimiter { + open: read_span(repr.open), + close: read_span(repr.close), + kind: repr.kind, + }, token_trees: token_trees .iter() .copied() @@ -324,7 +430,7 @@ impl Reader { let repr = &self.literal[idx]; tt::Leaf::Literal(tt::Literal { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -333,7 +439,7 @@ impl Reader { tt::Leaf::Punct(tt::Punct { char: repr.char, spacing: repr.spacing, - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -341,7 +447,7 @@ impl Reader { let repr = &self.ident[idx]; tt::Leaf::Ident(tt::Ident { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index bece1951872c4..50ce586fc429b 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -18,12 +18,12 @@ fn main() -> std::io::Result<()> { run() } -#[cfg(not(feature = "sysroot-abi"))] +#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] fn run() -> io::Result<()> { panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); } -#[cfg(feature = "sysroot-abi")] +#[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index dd05e250c2def..f20e6832f6e98 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,7 @@ use libloading::Library; use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -152,9 +152,15 @@ impl Expander { macro_name: &str, macro_body: &crate::tt::Subtree, attributes: Option<&crate::tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { - let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes); - result.map_err(|e| e.as_str().unwrap_or_else(|| "".to_string())) + let result = self + .inner + .proc_macros + .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site); + result.map_err(|e| e.into_string().unwrap_or_default()) } pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 58833cb7e9264..56529f71d855e 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -10,9 +10,9 @@ //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… -#![cfg(feature = "sysroot-abi")] +#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unreachable_pub, internal_features)] extern crate proc_macro; @@ -32,11 +32,23 @@ use std::{ }; use proc_macro_api::{ - msg::{self, CURRENT_API_VERSION}, + msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION}, ProcMacroKind, }; -use ::tt::token_id as tt; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Delimiter = ::tt::Delimiter; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} // see `build.rs` include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); @@ -70,16 +82,28 @@ impl ProcMacroSrv { None => None, }; - let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION)); + let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) .name(task.macro_name.clone()) .spawn_scoped(s, || { expander - .expand(&task.macro_name, ¯o_body, attributes.as_ref()) - .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION)) + .expand( + &task.macro_name, + ¯o_body, + attributes.as_ref(), + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) }); let res = match thread { Ok(handle) => handle.join(), @@ -136,8 +160,8 @@ pub struct PanicMessage { } impl PanicMessage { - pub fn as_str(&self) -> Option { - self.message.clone() + pub fn into_string(self) -> Option { + self.message } } diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 3c6f320331928..716b85d096d07 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -1,16 +1,17 @@ //! Proc macro ABI use libloading::Library; -use proc_macro_api::{ProcMacroKind, RustCInfo}; +use proc_macro::bridge; +use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; pub(crate) struct ProcMacros { - exported_macros: Vec, + exported_macros: Vec, } -impl From for crate::PanicMessage { - fn from(p: proc_macro::bridge::PanicMessage) -> Self { +impl From for crate::PanicMessage { + fn from(p: bridge::PanicMessage) -> Self { Self { message: p.as_str().map(|s| s.to_string()) } } } @@ -31,9 +32,8 @@ impl ProcMacros { info: RustCInfo, ) -> Result { if info.version_string == crate::RUSTC_VERSION_STRING { - let macros = unsafe { - lib.get::<&&[proc_macro::bridge::client::ProcMacro]>(symbol_name.as_bytes()) - }?; + let macros = + unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?; return Ok(Self { exported_macros: macros.to_vec() }); } @@ -45,6 +45,9 @@ impl ProcMacros { macro_name: &str, macro_body: &tt::Subtree, attributes: Option<&tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); @@ -54,58 +57,76 @@ impl ProcMacros { for proc_macro in &self.exported_macros { match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { - trait_name, client, .. - } if *trait_name == macro_name => { + bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } + if *trait_name == macro_name => + { let res = client.run( - &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + &bridge::server::SameThread, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, - true, + false, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } - proc_macro::bridge::client::ProcMacro::Bang { name, client } - if *name == macro_name => - { + bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( - &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + &bridge::server::SameThread, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, - true, + false, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } - proc_macro::bridge::client::ProcMacro::Attr { name, client } - if *name == macro_name => - { + bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( - &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + &bridge::server::SameThread, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + + call_site, + def_site, + mixed_site, + }, parsed_attributes, parsed_body, - true, + false, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } _ => continue, } } - Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into()) + Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into()) } pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { self.exported_macros .iter() .map(|proc_macro| match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { + bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { (trait_name.to_string(), ProcMacroKind::CustomDerive) } - proc_macro::bridge::client::ProcMacro::Bang { name, .. } => { + bridge::client::ProcMacro::Bang { name, .. } => { (name.to_string(), ProcMacroKind::FuncLike) } - proc_macro::bridge::client::ProcMacro::Attr { name, .. } => { + bridge::client::ProcMacro::Attr { name, .. } => { (name.to_string(), ProcMacroKind::Attr) } }) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index fe18451d38482..54430e0d19051 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -11,6 +11,7 @@ use proc_macro::bridge::{self, server}; mod token_stream; +use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; use token_stream::TokenStreamBuilder; @@ -43,6 +44,9 @@ pub struct FreeFunctions; pub struct RustAnalyzer { // FIXME: store span information here. pub(crate) interner: SymbolInternerRef, + pub call_site: TokenId, + pub def_site: TokenId, + pub mixed_site: TokenId, } impl server::Types for RustAnalyzer { @@ -69,7 +73,7 @@ impl server::FreeFunctions for RustAnalyzer { kind: bridge::LitKind::Err, symbol: Symbol::intern(self.interner, s), suffix: None, - span: tt::TokenId::unspecified(), + span: self.call_site, }) } @@ -83,7 +87,7 @@ impl server::TokenStream for RustAnalyzer { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - src.parse().expect("cannot parse string") + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() @@ -280,7 +284,7 @@ impl server::Span for RustAnalyzer { } fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { // FIXME stub - tt::TokenId::unspecified() + self.call_site } /// Recent feature, not yet in the proc_macro /// @@ -317,15 +321,15 @@ impl server::Span for RustAnalyzer { } fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { // FIXME handle span - tt::TokenId::unspecified() + self.call_site } fn end(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn start(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn line(&mut self, _span: Self::Span) -> usize { @@ -349,9 +353,9 @@ impl server::Symbol for RustAnalyzer { impl server::Server for RustAnalyzer { fn globals(&mut self) -> bridge::ExpnGlobals { bridge::ExpnGlobals { - def_site: Span::unspecified(), - call_site: Span::unspecified(), - mixed_site: Span::unspecified(), + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, } } @@ -430,16 +434,16 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId(0), })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId(0), })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId(0), + close: tt::TokenId(0), kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -452,33 +456,32 @@ mod tests { #[test] fn test_ra_server_from_str() { - use std::str::FromStr; let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId(0), + close: tt::TokenId(0), kind: tt::DelimiterKind::Parenthesis, }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId(0), }))], }); - let t1 = TokenStream::from_str("(a)").unwrap(); + let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); assert_eq!(t1.token_trees.len(), 1); assert_eq!(t1.token_trees[0], subtree_paren_a); - let t2 = TokenStream::from_str("(a);").unwrap(); + let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); assert_eq!(t2.token_trees.len(), 2); assert_eq!(t2.token_trees[0], subtree_paren_a); - let underscore = TokenStream::from_str("_").unwrap(); + let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); assert_eq!( underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId(0), })) ); } diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 2589d8b64d489..36be88250388d 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,5 +1,7 @@ //! TokenStream implementation used by sysroot ABI +use proc_macro_api::msg::TokenId; + use crate::tt::{self, TokenTree}; #[derive(Debug, Default, Clone)] @@ -20,8 +22,15 @@ impl TokenStream { } } - pub(crate) fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees } + pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + tt::Subtree { + delimiter: tt::Delimiter { + open: call_site, + close: call_site, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: self.token_trees, + } } pub(super) fn is_empty(&self) -> bool { @@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder { /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use std::str::FromStr; + use proc_macro_api::msg::TokenId; use super::{tt, TokenStream, TokenTree}; @@ -109,14 +118,15 @@ pub(super) mod token_stream { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; + #[rustfmt::skip] + impl /*FromStr for*/ TokenStream { + // type Err = LexError; - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; + pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result { + let subtree = + mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_unspecified(subtree); + let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } @@ -127,43 +137,39 @@ pub(super) mod token_stream { } } - fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { + fn subtree_replace_token_ids_with_call_site( + subtree: tt::Subtree, + call_site: TokenId, + ) -> tt::Subtree { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, - ..subtree.delimiter - }, + delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, token_trees: subtree .token_trees .into_iter() - .map(token_tree_replace_token_ids_with_unspecified) + .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) .collect(), } } - fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree { + fn token_tree_replace_token_ids_with_call_site( + tt: tt::TokenTree, + call_site: TokenId, + ) -> tt::TokenTree { match tt { tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf)) + tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) } tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree)) + tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) } } } - fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { + fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { match leaf { - tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit }) - } - tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct }) - } - tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident }) - } + tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), + tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), + tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }), } } } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 04a0ae7bc7201..b04e3ca19ac1b 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ use expect_test::expect; #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]); } #[test] @@ -17,12 +17,12 @@ fn test_derive_error() { "DeriveError", r#"struct S;"#, expect![[r##" - SUBTREE $$ 4294967295 4294967295 - IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 4294967295 - LITERAL "#[derive(DeriveError)] struct S ;" 4294967295 - PUNCH ; [alone] 4294967295"##]], + SUBTREE $$ 1 1 + IDENT compile_error 1 + PUNCH ! [alone] 1 + SUBTREE () 1 1 + LITERAL "#[derive(DeriveError)] struct S ;" 1 + PUNCH ; [alone] 1"##]], ); } @@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() { "fn_like_noop", r#"ident, 0, 1, []"#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT ident 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 0 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 1 4294967295 - PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295 4294967295"#]], + SUBTREE $$ 1 1 + IDENT ident 1 + PUNCH , [alone] 1 + LITERAL 0 1 + PUNCH , [alone] 1 + LITERAL 1 1 + PUNCH , [alone] 1 + SUBTREE [] 1 1"#]], ); } @@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() { "fn_like_clone_tokens", r#"ident, []"#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT ident 4294967295 - PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295 4294967295"#]], + SUBTREE $$ 1 1 + IDENT ident 1 + PUNCH , [alone] 1 + SUBTREE [] 1 1"#]], ); } @@ -62,8 +62,8 @@ fn test_fn_like_macro_clone_raw_ident() { "fn_like_clone_tokens", "r#async", expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT r#async 4294967295"#]], + SUBTREE $$ 1 1 + IDENT r#async 1"#]], ); } @@ -73,14 +73,14 @@ fn test_fn_like_mk_literals() { "fn_like_mk_literals", r#""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - LITERAL b"byte_string" 4294967295 - LITERAL 'c' 4294967295 - LITERAL "string" 4294967295 - LITERAL 3.14f64 4294967295 - LITERAL 3.14 4294967295 - LITERAL 123i64 4294967295 - LITERAL 123 4294967295"#]], + SUBTREE $$ 1 1 + LITERAL b"byte_string" 1 + LITERAL 'c' 1 + LITERAL "string" 1 + LITERAL 3.14f64 1 + LITERAL 3.14 1 + LITERAL 123i64 1 + LITERAL 123 1"#]], ); } @@ -90,9 +90,9 @@ fn test_fn_like_mk_idents() { "fn_like_mk_idents", r#""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT standard 4294967295 - IDENT r#raw 4294967295"#]], + SUBTREE $$ 1 1 + IDENT standard 1 + IDENT r#raw 1"#]], ); } @@ -102,17 +102,17 @@ fn test_fn_like_macro_clone_literals() { "fn_like_clone_tokens", r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - LITERAL 1u16 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 2_u32 4294967295 - PUNCH , [alone] 4294967295 - PUNCH - [alone] 4294967295 - LITERAL 4i64 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 3.14f32 4294967295 - PUNCH , [alone] 4294967295 - LITERAL "hello bridge" 4294967295"#]], + SUBTREE $$ 1 1 + LITERAL 1u16 1 + PUNCH , [alone] 1 + LITERAL 2_u32 1 + PUNCH , [alone] 1 + PUNCH - [alone] 1 + LITERAL 4i64 1 + PUNCH , [alone] 1 + LITERAL 3.14f32 1 + PUNCH , [alone] 1 + LITERAL "hello bridge" 1"#]], ); } @@ -126,12 +126,12 @@ fn test_attr_macro() { r#"mod m {}"#, r#"some arguments"#, expect![[r##" - SUBTREE $$ 4294967295 4294967295 - IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 4294967295 - LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295 - PUNCH ; [alone] 4294967295"##]], + SUBTREE $$ 1 1 + IDENT compile_error 1 + PUNCH ! [alone] 1 + SUBTREE () 1 1 + LITERAL "#[attr_error(some arguments)] mod m {}" 1 + PUNCH ; [alone] 1"##]], ); } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 49b4d973b63bb..c12096d140c98 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -1,18 +1,18 @@ //! utils used in proc-macro tests use expect_test::Expect; -use std::str::FromStr; +use proc_macro_api::msg::TokenId; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str) -> Option { +fn parse_string(code: &str, call_site: TokenId) -> Option { // This is a bit strange. We need to parse a string into a token stream into // order to create a tt::SubTree from it in fixtures. `into_subtree` is // implemented by all the ABIs we have so we arbitrarily choose one ABI to // write a `parse_string` function for and use that. The tests don't really // care which ABI we're using as the `into_subtree` function isn't part of // the ABI and shouldn't change between ABI versions. - crate::server::TokenStream::from_str(code).ok() + crate::server::TokenStream::from_str(code, call_site).ok() } pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) { @@ -24,12 +24,24 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e } fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { + let def_site = TokenId(0); + let call_site = TokenId(1); + let mixed_site = TokenId(2); let path = proc_macro_test_dylib_path(); let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input).unwrap(); - let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); - - let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); + let fixture = parse_string(input, call_site).unwrap(); + let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + + let res = expander + .expand( + macro_name, + &fixture.into_subtree(call_site), + attr.as_ref(), + def_site, + call_site, + mixed_site, + ) + .unwrap(); expect.assert_eq(&format!("{res:?}")); } diff --git a/crates/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-test/imp/src/lib.rs index feeacdb6407af..32510fba2f8ca 100644 --- a/crates/proc-macro-test/imp/src/lib.rs +++ b/crates/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; diff --git a/crates/proc-macro-test/src/lib.rs b/crates/proc-macro-test/src/lib.rs index 6d57bc81e0e33..739c6ec6f4494 100644 --- a/crates/proc-macro-test/src/lib.rs +++ b/crates/proc-macro-test/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub static PROC_MACRO_TEST_LOCATION: &str = include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt")); diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index e7fc3d970bfff..fdd724e2aab45 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -1,6 +1,6 @@ //! A collection of tools for profiling rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod stop_watch; mod memory_usage; diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 901dcfd2b1102..5f9b708289d16 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -15,7 +15,7 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod manifest_path; mod cargo_workspace; diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 98f3063bb9829..4887b29815ae2 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -58,7 +58,7 @@ fn load_cargo_with_sysroot( &mut { |path| { let len = file_map.len(); - Some(*file_map.entry(path.to_path_buf()).or_insert(FileId(len as u32))) + Some(*file_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) } }, &Default::default(), @@ -142,7 +142,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacro let mut counter = 0; move |_path| { counter += 1; - Some(FileId(counter)) + Some(FileId::from_raw(counter)) } }, &Default::default(), diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 76f764460329b..408c1fb6f39b7 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -43,11 +43,6 @@ triomphe.workspace = true nohash-hasher.workspace = true always-assert = "0.1.2" -# These 3 deps are not used by r-a directly, but we list them here to lock in their versions -# in our transitive deps to prevent them from pulling in windows-sys 0.45.0 -mio = "=0.8.5" -parking_lot_core = "=0.9.6" - cfg.workspace = true flycheck.workspace = true hir-def.workspace = true diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index a7d0a0b0dfc47..29bd02f92da70 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -2,7 +2,7 @@ //! //! Based on cli flags, either spawns an LSP server, or runs a batch analysis -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] #[allow(unused_extern_crates)] diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index c7b84c41b33e9..728bade0d0a5f 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,7 +209,7 @@ mod tests { use super::*; use cfg::CfgExpr; - use mbe::syntax_node_to_token_tree; + use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use syntax::{ ast::{self, AstNode}, SmolStr, @@ -219,7 +219,7 @@ mod tests { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let (tt, _) = syntax_node_to_token_tree(tt.syntax()); + let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap); CfgExpr::parse(&tt) }; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index b4debba38c974..1908c73b3b46e 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -8,7 +8,7 @@ use std::{ use hir::{ db::{DefDatabase, ExpandDatabase, HirDatabase}, - Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name, + Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name, }; use hir_def::{ body::{BodySourceMap, SyntheticSyntax}, diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 8541be715a938..abec2679464ea 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -4,7 +4,7 @@ use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity}; use ide_db::base_db::SourceDatabaseExt; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 71701ef16179f..f80beb9caadd8 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -5,6 +5,7 @@ use std::mem; use ide::FileId; use ide_db::FxHashMap; +use itertools::Itertools; use nohash_hasher::{IntMap, IntSet}; use rustc_hash::FxHashSet; use triomphe::Arc; @@ -129,8 +130,28 @@ pub(crate) fn fetch_native_diagnostics( ) -> Vec<(FileId, Vec)> { let _p = profile::span("fetch_native_diagnostics"); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); - subscriptions - .into_iter() + + let convert_diagnostic = + |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic { + range: lsp::to_proto::range(&line_index, d.range.range), + severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), + code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())), + code_description: Some(lsp_types::CodeDescription { + href: lsp_types::Url::parse(&d.code.url()).unwrap(), + }), + source: Some("rust-analyzer".to_string()), + message: d.message, + related_information: None, + tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), + data: None, + }; + + // the diagnostics produced may point to different files not requested by the concrete request, + // put those into here and filter later + let mut odd_ones = Vec::new(); + let mut diagnostics = subscriptions + .iter() + .copied() .filter_map(|file_id| { let line_index = snapshot.file_line_index(file_id).ok()?; let diagnostics = snapshot @@ -142,21 +163,39 @@ pub(crate) fn fetch_native_diagnostics( ) .ok()? .into_iter() - .map(move |d| lsp_types::Diagnostic { - range: lsp::to_proto::range(&line_index, d.range), - severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), - code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())), - code_description: Some(lsp_types::CodeDescription { - href: lsp_types::Url::parse(&d.code.url()).unwrap(), - }), - source: Some("rust-analyzer".to_string()), - message: d.message, - related_information: None, - tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), - data: None, + .filter_map(|d| { + if d.range.file_id == file_id { + Some(convert_diagnostic(&line_index, d)) + } else { + odd_ones.push(d); + None + } }) .collect::>(); Some((file_id, diagnostics)) }) - .collect() + .collect::>(); + + // Add back any diagnostics that point to files we are subscribed to + for (file_id, group) in odd_ones + .into_iter() + .sorted_by_key(|it| it.range.file_id) + .group_by(|it| it.range.file_id) + .into_iter() + { + if !subscriptions.contains(&file_id) { + continue; + } + let Some((_, diagnostics)) = diagnostics.iter_mut().find(|&&mut (id, _)| id == file_id) + else { + continue; + }; + let Some(line_index) = snapshot.file_line_index(file_id).ok() else { + break; + }; + for diagnostic in group { + diagnostics.push(convert_diagnostic(&line_index, diagnostic)); + } + } + diagnostics } diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index c09f57252ce96..0f31fe16054a5 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -187,11 +187,9 @@ impl GlobalState { config_errors: Default::default(), proc_macro_changed: false, - // FIXME: use `Arc::from_iter` when it becomes available - proc_macro_clients: Arc::from(Vec::new()), + proc_macro_clients: Arc::from_iter([]), - // FIXME: use `Arc::from_iter` when it becomes available - flycheck: Arc::from(Vec::new()), + flycheck: Arc::from_iter([]), flycheck_sender, flycheck_receiver, last_flycheck_error: None, @@ -202,7 +200,7 @@ impl GlobalState { vfs_progress_n_total: 0, vfs_progress_n_done: 0, - workspaces: Arc::new(Vec::new()), + workspaces: Arc::from(Vec::new()), crate_graph_file_dependencies: FxHashSet::default(), fetch_workspaces_queue: OpQueue::default(), fetch_build_data_queue: OpQueue::default(), diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 06c27332d4406..57955ebf897e3 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -51,8 +51,7 @@ use crate::{ }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { - // FIXME: use `Arc::from_iter` when it becomes available - state.proc_macro_clients = Arc::from(Vec::new()); + state.proc_macro_clients = Arc::from_iter([]); state.proc_macro_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false); @@ -60,8 +59,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: } pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { - // FIXME: use `Arc::from_iter` when it becomes available - state.proc_macro_clients = Arc::from(Vec::new()); + state.proc_macro_clients = Arc::from_iter([]); state.proc_macro_changed = false; state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ()); @@ -1438,7 +1436,7 @@ pub(crate) fn handle_inlay_hints_resolve( }; let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - let file_id = FileId(resolve_data.file_id); + let file_id = FileId::from_raw(resolve_data.file_id); anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data"); let line_index = snap.file_line_index(file_id)?; diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index b7e839ac2a5e2..ed2cf07551b0c 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -30,7 +30,7 @@ fn integrated_highlighting_benchmark() { // Load rust-analyzer itself. let workspace_to_load = project_root(); - let file = "./crates/ide-db/src/apply_change.rs"; + let file = "./crates/rust-analyzer/src/config.rs"; let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { @@ -57,7 +57,6 @@ fn integrated_highlighting_benchmark() { } profile::init_from("*>100"); - // let _s = profile::heartbeat_span(); { let _it = stdx::timeit("change"); diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 6c62577f696e2..29bc0b80d8a1d 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -9,7 +9,7 @@ //! The `cli` submodule implements some batch-processing analysis, primarily as //! a debugging aid. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod cli; diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index fb366fd5cc41f..dae560c5de12b 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -457,7 +457,7 @@ pub(crate) fn inlay_hint( inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it)) }; let data = if needs_resolve && something_to_resolve { - Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.0 }).unwrap()) + Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index() }).unwrap()) } else { None }; diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index abe2191f40024..7ab528f497511 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -437,28 +437,22 @@ impl GlobalState { if self.config.expand_proc_macros() { tracing::info!("Spawning proc-macro servers"); - // FIXME: use `Arc::from_iter` when it becomes available - self.proc_macro_clients = Arc::from( - self.workspaces - .iter() - .map(|ws| { - let path = match self.config.proc_macro_srv() { - Some(path) => path, - None => ws.find_sysroot_proc_macro_srv()?, - }; - - tracing::info!("Using proc-macro server at {path}"); - ProcMacroServer::spawn(path.clone()).map_err(|err| { - tracing::error!( - "Failed to run proc-macro server from path {path}, error: {err:?}", - ); - anyhow::format_err!( - "Failed to run proc-macro server from path {path}, error: {err:?}", - ) - }) - }) - .collect::>(), - ) + self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| { + let path = match self.config.proc_macro_srv() { + Some(path) => path, + None => ws.find_sysroot_proc_macro_srv()?, + }; + + tracing::info!("Using proc-macro server at {path}"); + ProcMacroServer::spawn(path.clone()).map_err(|err| { + tracing::error!( + "Failed to run proc-macro server from path {path}, error: {err:?}", + ); + anyhow::format_err!( + "Failed to run proc-macro server from path {path}, error: {err:?}", + ) + }) + })) }; } diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 5cd02f7840460..ec8e5c6dd9681 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -8,7 +8,7 @@ //! specific JSON shapes here -- there's little value in such tests, as we can't //! be sure without a real client anyway. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(not(feature = "in-rust-tree"))] mod sourcegen; diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 45adbf5c573be..dba336ea7d6dc 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -250,6 +250,7 @@ fn check_dbg(path: &Path, text: &str) { // We have .dbg postfix "ide-completion/src/completions/postfix.rs", "ide-completion/src/completions/keyword.rs", + "ide-completion/src/tests/expression.rs", "ide-completion/src/tests/proc_macros.rs", // The documentation in string literals may contain anything for its own purposes "ide-completion/src/lib.rs", diff --git a/crates/rustc-dependencies/Cargo.toml b/crates/rustc-dependencies/Cargo.toml index cd7ec30593656..1b3b6ec735e75 100644 --- a/crates/rustc-dependencies/Cargo.toml +++ b/crates/rustc-dependencies/Cargo.toml @@ -11,10 +11,10 @@ authors.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ra-ap-rustc_lexer = { version = "0.20.0" } -ra-ap-rustc_parse_format = { version = "0.20.0", default-features = false } -ra-ap-rustc_index = { version = "0.20.0", default-features = false } -ra-ap-rustc_abi = { version = "0.20.0", default-features = false } +ra-ap-rustc_lexer = { version = "0.21.0" } +ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false } +ra-ap-rustc_index = { version = "0.21.0", default-features = false } +ra-ap-rustc_abi = { version = "0.21.0", default-features = false } [features] in-rust-tree = [] diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 1514c6c7d4c18..18fa77fd9743f 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -6,7 +6,7 @@ //! //! This crate contains utilities to make this kind of source-gen easy. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ fmt, fs, mem, diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 35986799fdd4d..c914ae2144b59 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -17,10 +17,11 @@ always-assert = { version = "0.1.2", features = ["log"] } jod-thread = "0.1.2" libc.workspace = true crossbeam-channel = "0.5.5" +itertools.workspace = true # Think twice before adding anything here [target.'cfg(windows)'.dependencies] -miow = "0.5.0" +miow = "0.6.0" winapi = { version = "0.3.9", features = ["winerror"] } [features] diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index f26c7f6dfc2d5..71e269f74bc76 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -1,6 +1,6 @@ //! Missing batteries for standard libraries. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::io as sio; use std::process::Command; @@ -15,6 +15,7 @@ pub mod thread; pub mod anymap; pub use always_assert::{always, never}; +pub use itertools; #[inline(always)] pub fn is_ci() -> bool { @@ -40,6 +41,24 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`. ); } +pub trait TupleExt { + type Head; + type Tail; + fn head(self) -> Self::Head; + fn tail(self) -> Self::Tail; +} + +impl TupleExt for (T, U) { + type Head = T; + type Tail = U; + fn head(self) -> Self::Head { + self.0 + } + fn tail(self) -> Self::Tail { + self.1 + } +} + pub fn to_lower_snake_case(s: &str) -> String { to_snake_case(s, char::to_lowercase) } diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 3b55921dc759e..7a7c0d267fede 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -16,7 +16,7 @@ doctest = false cov-mark = "2.0.0-pre.1" either.workspace = true itertools.workspace = true -rowan = "0.15.11" +rowan = "0.15.15" rustc-hash = "1.1.0" once_cell = "1.17.0" indexmap.workspace = true diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8cc271d226c4e..d5d565a015a0d 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -121,6 +121,7 @@ impl ast::Whitespace { } } +#[derive(Debug)] pub struct QuoteOffsets { pub quotes: (TextRange, TextRange), pub contents: TextRange, @@ -167,6 +168,11 @@ pub trait IsString: AstToken { fn text_range_between_quotes(&self) -> Option { self.quote_offsets().map(|it| it.contents) } + fn text_without_quotes(&self) -> &str { + let text = self.text(); + let Some(offsets) = self.text_range_between_quotes() else { return text }; + &text[offsets - self.syntax().text_range().start()] + } fn open_quote_text_range(&self) -> Option { self.quote_offsets().map(|it| it.quotes.0) } diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 4939ab39049c0..d600698040dc7 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -20,7 +20,7 @@ //! [Swift]: #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[allow(unused)] macro_rules! eprintln { @@ -75,7 +75,7 @@ pub use smol_str::SmolStr; #[derive(Debug, PartialEq, Eq)] pub struct Parse { green: GreenNode, - errors: Arc<[SyntaxError]>, + errors: Option>, _ty: PhantomData T>, } @@ -87,14 +87,18 @@ impl Clone for Parse { impl Parse { fn new(green: GreenNode, errors: Vec) -> Parse { - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } pub fn syntax_node(&self) -> SyntaxNode { SyntaxNode::new_root(self.green.clone()) } pub fn errors(&self) -> &[SyntaxError] { - &self.errors + self.errors.as_deref().unwrap_or_default() } } @@ -108,10 +112,9 @@ impl Parse { } pub fn ok(self) -> Result> { - if self.errors.is_empty() { - Ok(self.tree()) - } else { - Err(self.errors) + match self.errors { + Some(e) => Err(e), + None => Ok(self.tree()), } } } @@ -129,7 +132,7 @@ impl Parse { impl Parse { pub fn debug_dump(&self) -> String { let mut buf = format!("{:#?}", self.tree().syntax()); - for err in self.errors.iter() { + for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) { format_to!(buf, "error {:?}: {}\n", err.range(), err); } buf @@ -141,13 +144,16 @@ impl Parse { fn incremental_reparse(&self, indel: &Indel) -> Option> { // FIXME: validation errors are not handled here - parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map( - |(green_node, errors, _reparsed_range)| Parse { - green: green_node, - errors: errors.into(), - _ty: PhantomData, - }, + parsing::incremental_reparse( + self.tree().syntax(), + indel, + self.errors.as_deref().unwrap_or_default().iter().cloned(), ) + .map(|(green_node, errors, _reparsed_range)| Parse { + green: green_node, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + }) } fn full_reparse(&self, indel: &Indel) -> Parse { @@ -168,7 +174,11 @@ impl SourceFile { errors.extend(validation::validate(&root)); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } @@ -275,7 +285,11 @@ impl ast::TokenTree { let (green, errors) = builder.finish_raw(); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 45e5916098282..0ddc641711fb9 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -20,7 +20,7 @@ use crate::{ pub(crate) fn incremental_reparse( node: &SyntaxNode, edit: &Indel, - errors: Vec, + errors: impl IntoIterator, ) -> Option<(GreenNode, Vec, TextRange)> { if let Some((green, new_errors, old_range)) = reparse_token(node, edit) { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); @@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool { } fn merge_errors( - old_errors: Vec, + old_errors: impl IntoIterator, new_errors: Vec, range_before_reparse: TextRange, edit: &Indel, @@ -191,8 +191,12 @@ mod tests { let fully_reparsed = SourceFile::parse(&after); let incrementally_reparsed: Parse = { let before = SourceFile::parse(&before); - let (green, new_errors, range) = - incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); + let (green, new_errors, range) = incremental_reparse( + before.tree().syntax(), + &edit, + before.errors.as_deref().unwrap_or_default().iter().cloned(), + ) + .unwrap(); assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); Parse::new(green, new_errors) }; diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index 71762996cd7d1..8750147ee11af 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -22,12 +22,18 @@ use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode}; pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr; /// Like `SyntaxNodePtr`, but remembers the type of node. -#[derive(Debug)] pub struct AstPtr { raw: SyntaxNodePtr, _ty: PhantomData N>, } +impl std::fmt::Debug for AstPtr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("AstPtr").field(&self.raw).finish() + } +} + +impl Copy for AstPtr {} impl Clone for AstPtr { fn clone(&self) -> AstPtr { AstPtr { raw: self.raw.clone(), _ty: PhantomData } diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 3010d77d827e1..8ae1242cf7fd4 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs @@ -38,7 +38,7 @@ fn benchmark_parser() { let tree = { let _b = bench("parsing"); let p = SourceFile::parse(&data); - assert!(p.errors.is_empty()); + assert!(p.errors.is_none()); assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); p.tree() }; diff --git a/crates/syntax/src/token_text.rs b/crates/syntax/src/token_text.rs index 09c080c0c2301..e69deb49ce142 100644 --- a/crates/syntax/src/token_text.rs +++ b/crates/syntax/src/token_text.rs @@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> { } impl<'a> TokenText<'a> { - pub(crate) fn borrowed(text: &'a str) -> Self { + pub fn borrowed(text: &'a str) -> Self { TokenText(Repr::Borrowed(text)) } diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index fd3e68e2d2c63..e48b27313068c 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -6,7 +6,7 @@ //! * Extracting markup (mainly, `$0` markers) out of fixture strings. //! * marks (see the eponymous module). -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod assert_linear; pub mod bench_fixture; diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index ba5c86db0e2ea..f766747d707ca 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -9,12 +9,15 @@ //! //! Available flags: //! add: +//! asm: +//! assert: //! as_ref: sized //! bool_impl: option, fn //! builtin_impls: //! cell: copy, drop //! clone: sized //! coerce_unsized: unsize +//! concat: //! copy: clone //! default: sized //! deref_mut: deref @@ -1357,7 +1360,7 @@ mod panicking { mod macros { // region:panic #[macro_export] - #[rustc_builtin_macro(std_panic)] + #[rustc_builtin_macro(core_panic)] macro_rules! panic { ($($arg:tt)*) => { /* compiler built-in */ @@ -1365,6 +1368,26 @@ mod macros { } // endregion:panic + // region:asm + #[macro_export] + #[rustc_builtin_macro] + macro_rules! asm { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:asm + + // region:assert + #[macro_export] + #[rustc_builtin_macro] + macro_rules! assert { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:assert + // region:fmt #[macro_export] #[rustc_builtin_macro] @@ -1380,6 +1403,13 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[macro_export] + #[rustc_builtin_macro] + macro_rules! format_args_nl { + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; + } + #[macro_export] macro_rules! print { ($($arg:tt)*) => {{ @@ -1410,6 +1440,12 @@ mod macros { ($file:expr $(,)?) => {{ /* compiler built-in */ }}; } // endregion:include + + // region:concat + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + // endregion:concat } // region:non_zero diff --git a/crates/text-edit/src/lib.rs b/crates/text-edit/src/lib.rs index 4705d18187af8..fb52a50f0b3f1 100644 --- a/crates/text-edit/src/lib.rs +++ b/crates/text-edit/src/lib.rs @@ -4,7 +4,7 @@ //! so `TextEdit` is the ultimate representation of the work done by //! rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use itertools::Itertools; use std::cmp::max; diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 729f84a8150c5..997f339edc4d7 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -1,6 +1,6 @@ //! Discovery of `cargo` & `rustc` executables. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{env, iter, path::PathBuf}; diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index a28ee5f1ca2bb..57222449790ec 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -13,5 +13,6 @@ doctest = false [dependencies] smol_str.workspace = true +text-size.workspace = true stdx.workspace = true diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index b5a72bec079a4..481d575403aca 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -2,115 +2,98 @@ //! input and output) of macros. It closely mirrors `proc_macro` crate's //! `TokenTree`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::fmt; use stdx::impl_from; pub use smol_str::SmolStr; +pub use text_size::{TextRange, TextSize}; + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SpanData { + /// The text range of this span, relative to the anchor. + /// We need the anchor for incrementality, as storing absolute ranges will require + /// recomputation on every change in a file at all times. + pub range: TextRange, + pub anchor: Anchor, + /// The syntax context of the span. + pub ctx: Ctx, +} -/// Represents identity of the token. -/// -/// For hygiene purposes, we need to track which expanded tokens originated from -/// which source tokens. We do it by assigning an distinct identity to each -/// source token and making sure that identities are preserved during macro -/// expansion. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(pub u32); +impl Span for SpanData { + #[allow(deprecated)] + const DUMMY: Self = SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: Anchor::DUMMY, + ctx: Ctx::DUMMY, + }; +} -impl fmt::Debug for TokenId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } +pub trait Span: std::fmt::Debug + Copy + Sized + Eq { + // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead, + // the call site or def site spans should be used in relevant places, its just that we don't + // expose those everywhere in the yet. + const DUMMY: Self; } -impl TokenId { - pub const UNSPECIFIED: TokenId = TokenId(!0); - pub const fn unspecified() -> TokenId { - Self::UNSPECIFIED - } +// FIXME: Should not exist +pub trait SpanAnchor: + std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash +{ + #[deprecated(note = "this should not exist")] + const DUMMY: Self; } -pub mod token_id { - pub use crate::{DelimiterKind, Spacing, TokenId}; - pub type Span = crate::TokenId; - pub type Subtree = crate::Subtree; - pub type Punct = crate::Punct; - pub type Delimiter = crate::Delimiter; - pub type Leaf = crate::Leaf; - pub type Ident = crate::Ident; - pub type Literal = crate::Literal; - pub type TokenTree = crate::TokenTree; - pub mod buffer { - pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>; - pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>; - pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>; - } +// FIXME: Should not exist +pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { + #[deprecated(note = "this should not exist")] + const DUMMY: Self; +} - impl Delimiter { - pub const UNSPECIFIED: Self = Self { - open: TokenId::UNSPECIFIED, - close: TokenId::UNSPECIFIED, - kind: DelimiterKind::Invisible, - }; - pub const fn unspecified() -> Self { - Self::UNSPECIFIED - } - } - impl Subtree { - pub const fn empty() -> Self { - Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] } - } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TokenTree { + Leaf(Leaf), + Subtree(Subtree), +} +impl_from!(Leaf, Subtree for TokenTree); +impl TokenTree { + pub const fn empty(span: S) -> Self { + Self::Subtree(Subtree { + delimiter: Delimiter::invisible_spanned(span), + token_trees: vec![], + }) } - impl TokenTree { - pub const fn empty() -> Self { - Self::Subtree(Subtree::empty()) + + pub fn subtree_or_wrap(self) -> Subtree { + match self { + TokenTree::Leaf(_) => { + Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] } + } + TokenTree::Subtree(s) => s, } } - - impl Subtree { - pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) { - self.delimiter.open = f(self.delimiter.open); - self.delimiter.close = f(self.delimiter.close); - self.token_trees.iter_mut().for_each(|tt| match tt { - crate::TokenTree::Leaf(leaf) => match leaf { - crate::Leaf::Literal(it) => it.span = f(it.span), - crate::Leaf::Punct(it) => it.span = f(it.span), - crate::Leaf::Ident(it) => it.span = f(it.span), - }, - crate::TokenTree::Subtree(s) => s.visit_ids(f), - }) + pub fn subtree_or_wrap2(self, span: DelimSpan) -> Subtree { + match self { + TokenTree::Leaf(_) => Subtree { + delimiter: Delimiter::invisible_delim_spanned(span), + token_trees: vec![self], + }, + TokenTree::Subtree(s) => s, } } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct SyntaxContext(pub u32); - -// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -// pub struct Span { -// pub id: TokenId, -// pub ctx: SyntaxContext, -// } -// pub type Span = (TokenId, SyntaxContext); - #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TokenTree { - Leaf(Leaf), - Subtree(Subtree), +pub enum Leaf { + Literal(Literal), + Punct(Punct), + Ident(Ident), } -impl_from!(Leaf, Subtree for TokenTree); -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Leaf { - Literal(Literal), - Punct(Punct), - Ident(Ident), -} - -impl Leaf { - pub fn span(&self) -> &Span { +impl Leaf { + pub fn span(&self) -> &S { match self { Leaf::Literal(it) => &it.span, Leaf::Punct(it) => &it.span, @@ -118,21 +101,74 @@ impl Leaf { } } } -impl_from!(Literal, Punct, Ident for Leaf); +impl_from!(Literal, Punct, Ident for Leaf); #[derive(Clone, PartialEq, Eq, Hash)] -pub struct Subtree { - pub delimiter: Delimiter, - pub token_trees: Vec>, +pub struct Subtree { + pub delimiter: Delimiter, + pub token_trees: Vec>, +} + +impl Subtree { + pub const fn empty(span: DelimSpan) -> Self { + Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: vec![] } + } + + pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) { + self.delimiter.open = f(self.delimiter.open); + self.delimiter.close = f(self.delimiter.close); + self.token_trees.iter_mut().for_each(|tt| match tt { + crate::TokenTree::Leaf(leaf) => match leaf { + crate::Leaf::Literal(it) => it.span = f(it.span), + crate::Leaf::Punct(it) => it.span = f(it.span), + crate::Leaf::Ident(it) => it.span = f(it.span), + }, + crate::TokenTree::Subtree(s) => s.visit_ids(f), + }) + } +} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct DelimSpan { + pub open: S, + pub close: S, +} + +impl DelimSpan { + // FIXME should not exist + pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY }; } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Delimiter { - pub open: Span, - pub close: Span, +pub struct Delimiter { + pub open: S, + pub close: S, pub kind: DelimiterKind, } +impl Delimiter { + // FIXME should not exist + pub const DUMMY_INVISIBLE: Self = + Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; + + // FIXME should not exist + pub const fn dummy_invisible() -> Self { + Self::DUMMY_INVISIBLE + } + + pub const fn invisible_spanned(span: S) -> Self { + Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } + } + + pub const fn invisible_delim_spanned(span: DelimSpan) -> Self { + Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible } + } + + pub fn delim_span(&self) -> DelimSpan { + DelimSpan { open: self.open, close: self.close } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum DelimiterKind { Parenthesis, @@ -142,16 +178,16 @@ pub enum DelimiterKind { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Literal { +pub struct Literal { pub text: SmolStr, - pub span: Span, + pub span: S, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Punct { +pub struct Punct { pub char: char, pub spacing: Spacing, - pub span: Span, + pub span: S, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -162,9 +198,9 @@ pub enum Spacing { #[derive(Debug, Clone, PartialEq, Eq, Hash)] /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. -pub struct Ident { +pub struct Ident { pub text: SmolStr, - pub span: Span, + pub span: S, } impl Ident { @@ -173,9 +209,9 @@ impl Ident { } } -fn print_debug_subtree( +fn print_debug_subtree( f: &mut fmt::Formatter<'_>, - subtree: &Subtree, + subtree: &Subtree, level: usize, ) -> fmt::Result { let align = " ".repeat(level); @@ -203,9 +239,9 @@ fn print_debug_subtree( Ok(()) } -fn print_debug_token( +fn print_debug_token( f: &mut fmt::Formatter<'_>, - tkn: &TokenTree, + tkn: &TokenTree, level: usize, ) -> fmt::Result { let align = " ".repeat(level); @@ -231,13 +267,13 @@ fn print_debug_token( Ok(()) } -impl fmt::Debug for Subtree { +impl fmt::Debug for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { print_debug_subtree(f, self, 0) } } -impl fmt::Display for TokenTree { +impl fmt::Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TokenTree::Leaf(it) => fmt::Display::fmt(it, f), @@ -246,7 +282,7 @@ impl fmt::Display for TokenTree { } } -impl fmt::Display for Subtree { +impl fmt::Display for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let (l, r) = match self.delimiter.kind { DelimiterKind::Parenthesis => ("(", ")"), @@ -274,7 +310,7 @@ impl fmt::Display for Subtree { } } -impl fmt::Display for Leaf { +impl fmt::Display for Leaf { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Leaf::Ident(it) => fmt::Display::fmt(it, f), @@ -284,25 +320,25 @@ impl fmt::Display for Leaf { } } -impl fmt::Display for Ident { +impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Literal { +impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Punct { +impl fmt::Display for Punct { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.char, f) } } -impl Subtree { +impl Subtree { /// Count the number of tokens recursively pub fn count(&self) -> usize { let children_count = self @@ -318,7 +354,7 @@ impl Subtree { } } -impl Subtree { +impl Subtree { /// A simple line string used for debugging pub fn as_debug_string(&self) -> String { let delim = match self.delimiter.kind { @@ -366,8 +402,8 @@ impl Subtree { pub mod buffer; -pub fn pretty(tkns: &[TokenTree]) -> String { - fn tokentree_to_text(tkn: &TokenTree) -> String { +pub fn pretty(tkns: &[TokenTree]) -> String { + fn tokentree_to_text(tkn: &TokenTree) -> String { match tkn { TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(), TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(), diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs index abfc51dfec66f..0306504371465 100644 --- a/crates/vfs-notify/src/lib.rs +++ b/crates/vfs-notify/src/lib.rs @@ -7,7 +7,7 @@ //! Hopefully, one day a reliable file watching/walking crate appears on //! crates.io, and we can reduce this to trivial glue code. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::fs; diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 06004adad34a1..8ffda5d78d13e 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -38,7 +38,7 @@ //! [`Handle`]: loader::Handle //! [`Entries`]: loader::Entry -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod anchored_path; pub mod file_set; @@ -60,7 +60,22 @@ pub use paths::{AbsPath, AbsPathBuf}; /// /// Most functions in rust-analyzer use this when they need to refer to a file. #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct FileId(pub u32); +pub struct FileId(u32); + +impl FileId { + /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics! + pub const BOGUS: FileId = FileId(0xe4e4e); + + #[inline] + pub fn from_raw(raw: u32) -> FileId { + FileId(raw) + } + + #[inline] + pub fn index(self) -> u32 { + self.0 + } +} /// safe because `FileId` is a newtype of `u32` impl nohash_hasher::IsEnabled for FileId {} diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 57881803a6a04..778cbc5762aeb 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts @@ -7,6 +7,8 @@ import type { CtxInit } from "./ctx"; import { makeDebugConfig } from "./debug"; import type { Config, RunnableEnvCfg, RunnableEnvCfgItem } from "./config"; import { unwrapUndefinable } from "./undefinable"; +import type { LanguageClient } from "vscode-languageclient/node"; +import type { RustEditor } from "./util"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -21,73 +23,36 @@ export async function selectRunnable( const editor = ctx.activeRustEditor; if (!editor) return; - const client = ctx.client; - const textDocument: lc.TextDocumentIdentifier = { - uri: editor.document.uri.toString(), - }; - - const runnables = await client.sendRequest(ra.runnables, { - textDocument, - position: client.code2ProtocolConverter.asPosition(editor.selection.active), - }); - const items: RunnableQuickPick[] = []; - if (prevRunnable) { - items.push(prevRunnable); + // show a placeholder while we get the runnables from the server + const quickPick = vscode.window.createQuickPick(); + quickPick.title = "Select Runnable"; + if (showButtons) { + quickPick.buttons = quickPickButtons; } - for (const r of runnables) { - if (prevRunnable && JSON.stringify(prevRunnable.runnable) === JSON.stringify(r)) { - continue; - } + quickPick.items = [{ label: "Looking for runnables..." }]; + quickPick.activeItems = []; + quickPick.show(); - if (debuggeeOnly && (r.label.startsWith("doctest") || r.label.startsWith("cargo"))) { - continue; - } - items.push(new RunnableQuickPick(r)); - } + const runnables = await getRunnables(ctx.client, editor, prevRunnable, debuggeeOnly); - if (items.length === 0) { + if (runnables.length === 0) { // it is the debug case, run always has at least 'cargo check ...' // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables await vscode.window.showErrorMessage("There's no debug target!"); + quickPick.dispose(); return; } - return await new Promise((resolve) => { - const disposables: vscode.Disposable[] = []; - const close = (result?: RunnableQuickPick) => { - resolve(result); - disposables.forEach((d) => d.dispose()); - }; + // clear the list before we hook up listeners to to avoid invoking them + // if the user happens to accept the placeholder item + quickPick.items = []; - const quickPick = vscode.window.createQuickPick(); - quickPick.items = items; - quickPick.title = "Select Runnable"; - if (showButtons) { - quickPick.buttons = quickPickButtons; - } - disposables.push( - quickPick.onDidHide(() => close()), - quickPick.onDidAccept(() => close(quickPick.selectedItems[0])), - quickPick.onDidTriggerButton(async (_button) => { - const runnable = unwrapUndefinable(quickPick.activeItems[0]).runnable; - await makeDebugConfig(ctx, runnable); - close(); - }), - quickPick.onDidChangeActive((activeList) => { - if (showButtons && activeList.length > 0) { - const active = unwrapUndefinable(activeList[0]); - if (active.label.startsWith("cargo")) { - // save button makes no sense for `cargo test` or `cargo check` - quickPick.buttons = []; - } else if (quickPick.buttons.length === 0) { - quickPick.buttons = quickPickButtons; - } - } - }), - quickPick, - ); - quickPick.show(); - }); + return await populateAndGetSelection( + quickPick as vscode.QuickPick, + runnables, + ctx, + showButtons, + ); } export class RunnableQuickPick implements vscode.QuickPickItem { @@ -187,3 +152,75 @@ export function createArgs(runnable: ra.Runnable): string[] { } return args; } + +async function getRunnables( + client: LanguageClient, + editor: RustEditor, + prevRunnable?: RunnableQuickPick, + debuggeeOnly = false, +): Promise { + const textDocument: lc.TextDocumentIdentifier = { + uri: editor.document.uri.toString(), + }; + + const runnables = await client.sendRequest(ra.runnables, { + textDocument, + position: client.code2ProtocolConverter.asPosition(editor.selection.active), + }); + const items: RunnableQuickPick[] = []; + if (prevRunnable) { + items.push(prevRunnable); + } + for (const r of runnables) { + if (prevRunnable && JSON.stringify(prevRunnable.runnable) === JSON.stringify(r)) { + continue; + } + + if (debuggeeOnly && (r.label.startsWith("doctest") || r.label.startsWith("cargo"))) { + continue; + } + items.push(new RunnableQuickPick(r)); + } + + return items; +} + +async function populateAndGetSelection( + quickPick: vscode.QuickPick, + runnables: RunnableQuickPick[], + ctx: CtxInit, + showButtons: boolean, +): Promise { + return new Promise((resolve) => { + const disposables: vscode.Disposable[] = []; + const close = (result?: RunnableQuickPick) => { + resolve(result); + disposables.forEach((d) => d.dispose()); + }; + disposables.push( + quickPick.onDidHide(() => close()), + quickPick.onDidAccept(() => close(quickPick.selectedItems[0] as RunnableQuickPick)), + quickPick.onDidTriggerButton(async (_button) => { + const runnable = unwrapUndefinable( + quickPick.activeItems[0] as RunnableQuickPick, + ).runnable; + await makeDebugConfig(ctx, runnable); + close(); + }), + quickPick.onDidChangeActive((activeList) => { + if (showButtons && activeList.length > 0) { + const active = unwrapUndefinable(activeList[0]); + if (active.label.startsWith("cargo")) { + // save button makes no sense for `cargo test` or `cargo check` + quickPick.buttons = []; + } else if (quickPick.buttons.length === 0) { + quickPick.buttons = quickPickButtons; + } + } + }), + quickPick, + ); + // populate the list with the actual runnables + quickPick.items = runnables; + }); +} diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts index 1037e513aa106..a0b34406c1b08 100644 --- a/editors/code/src/toolchain.ts +++ b/editors/code/src/toolchain.ts @@ -74,7 +74,7 @@ export class Cargo { artifacts.push({ fileName: message.executable, name: message.target.name, - workspace: message.manifest_path.replace(/\/Cargo\.toml$/, ""), + workspace: path.dirname(message.manifest_path), kind: message.target.kind[0], isTest: message.profile.test, }); diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index f39c3a3e4ca12..d195bdd156bea 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -1,6 +1,6 @@ //! Yet another index-based arena. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(missing_docs)] use std::{ diff --git a/lib/line-index/src/lib.rs b/lib/line-index/src/lib.rs index 03371c9c87af9..58f266d67f629 100644 --- a/lib/line-index/src/lib.rs +++ b/lib/line-index/src/lib.rs @@ -363,7 +363,10 @@ fn analyze_source_file_generic( let c = src[i..].chars().next().unwrap(); char_len = c.len_utf8(); - let pos = TextSize::from(i as u32) + output_offset; + // The last element of `lines` represents the offset of the start of + // current line. To get the offset inside the line, we subtract it. + let pos = TextSize::from(i as u32) + output_offset + - lines.last().unwrap_or(&TextSize::default()); if char_len > 1 { assert!((2..=4).contains(&char_len)); diff --git a/lib/line-index/src/tests.rs b/lib/line-index/src/tests.rs index 8f3762d191064..981008e346ba4 100644 --- a/lib/line-index/src/tests.rs +++ b/lib/line-index/src/tests.rs @@ -1,4 +1,4 @@ -use crate::{LineIndex, TextSize, WideChar}; +use crate::{LineCol, LineIndex, TextSize, WideChar, WideEncoding, WideLineCol}; macro_rules! test { ( @@ -102,7 +102,7 @@ test!( case: multi_byte_with_new_lines, text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", lines: vec![7, 27], - multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))], + multi_byte_chars: vec![(1, (6, 8)), (2, (2, 4))], ); test!( @@ -118,3 +118,27 @@ test!( lines: vec![16], multi_byte_chars: vec![], ); + +#[test] +fn test_try_line_col() { + let text = "\n\n\n\n\n宽3456"; + assert_eq!(&text[5..8], "宽"); + assert_eq!(&text[11..12], "6"); + let line_index = LineIndex::new(text); + let before_6 = TextSize::from(11); + let line_col = line_index.try_line_col(before_6); + assert_eq!(line_col, Some(LineCol { line: 5, col: 6 })); +} + +#[test] +fn test_to_wide() { + let text = "\n\n\n\n\n宽3456"; + assert_eq!(&text[5..8], "宽"); + assert_eq!(&text[11..12], "6"); + let line_index = LineIndex::new(text); + let before_6 = TextSize::from(11); + let line_col = line_index.try_line_col(before_6); + assert_eq!(line_col, Some(LineCol { line: 5, col: 6 })); + let wide_line_col = line_index.to_wide(WideEncoding::Utf16, line_col.unwrap()); + assert_eq!(wide_line_col, Some(WideLineCol { line: 5, col: 4 })); +} diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml index be1573913ff24..2a70aedbe8ec1 100644 --- a/lib/lsp-server/Cargo.toml +++ b/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.7.4" +version = "0.7.5" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index b190c0af73d73..2797a6b60de46 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -4,7 +4,7 @@ //! //! Run with `RUST_LOG=lsp_server=debug` to see all the messages. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod msg; mod stdio; diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 6a45033ada3ba..49f8ae79baf0e 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -8,7 +8,7 @@ //! This binary is integrated into the `cargo` command line by using an alias in //! `.cargo/config`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod flags; From 71d98c53bd63d9c79b8a8a4d99eb8e4779ae4214 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 11 Dec 2023 11:55:43 +0200 Subject: [PATCH 09/28] Fix typo in cfg --- crates/hir-expand/src/eager.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index ef7200f615ccf..1e2722e846463 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -73,7 +73,7 @@ pub fn expand_eager_macro_input( ) }; let err = parse_err.or(err); - if cfg!(debug) { + if cfg!(debug_assertions) { arg_map.finish(); } From be6d34b810141710f60a94959a8cc67adceb1fca Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 11 Dec 2023 14:50:41 +0200 Subject: [PATCH 10/28] Query for nearest parent block around the hint to resolve This way, parameter hints will be found for resolution --- crates/ide/src/inlay_hints.rs | 19 +++++++++++++++++-- crates/ide/src/inlay_hints/bind_pat.rs | 8 ++++++-- crates/ide/src/lib.rs | 4 ++-- crates/rust-analyzer/src/handlers/request.rs | 19 +++++-------------- 4 files changed, 30 insertions(+), 20 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index ca334e9157977..cdf83ff7d2832 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -422,6 +422,11 @@ fn ty_to_text_edit( Some(builder.finish()) } +pub enum RangeLimit { + Fixed(TextRange), + NearestParentBlock(TextSize), +} + // Feature: Inlay Hints // // rust-analyzer shows additional information inline with the source code. @@ -443,7 +448,7 @@ fn ty_to_text_edit( pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, - range_limit: Option, + range_limit: Option, config: &InlayHintsConfig, ) -> Vec { let _p = profile::span("inlay_hints"); @@ -458,13 +463,23 @@ pub(crate) fn inlay_hints( let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); match range_limit { - Some(range) => match file.covering_element(range) { + Some(RangeLimit::Fixed(range)) => match file.covering_element(range) { NodeOrToken::Token(_) => return acc, NodeOrToken::Node(n) => n .descendants() .filter(|descendant| range.intersect(descendant.text_range()).is_some()) .for_each(hints), }, + Some(RangeLimit::NearestParentBlock(position)) => { + match file + .token_at_offset(position) + .left_biased() + .and_then(|token| token.parent_ancestors().find_map(ast::BlockExpr::cast)) + { + Some(parent_block) => parent_block.syntax().descendants().for_each(hints), + None => return acc, + } + } None => file.descendants().for_each(hints), }; } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 680035c721b35..45b51e3557034 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -177,7 +177,11 @@ mod tests { use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; - use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints}; + use crate::{ + fixture, + inlay_hints::{InlayHintsConfig, RangeLimit}, + ClosureReturnTypeHints, + }; use crate::inlay_hints::tests::{ check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG, @@ -400,7 +404,7 @@ fn main() { .inlay_hints( &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, file_id, - Some(TextRange::new(TextSize::from(500), TextSize::from(600))), + Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))), ) .unwrap(); let actual = diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index d8f6e4e1b1b15..e3548f3f0cbf4 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -94,7 +94,7 @@ pub use crate::{ inlay_hints::{ AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition, - InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, + InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit, }, join_lines::JoinLinesConfig, markup::Markup, @@ -397,7 +397,7 @@ impl Analysis { &self, config: &InlayHintsConfig, file_id: FileId, - range: Option, + range: Option, ) -> Cancellable> { self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config)) } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 57955ebf897e3..6ec9fe153b92b 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -12,8 +12,8 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, - HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, - Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, + ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; use lsp_server::ErrorCode; @@ -1409,7 +1409,7 @@ pub(crate) fn handle_inlay_hints( let inlay_hints_config = snap.config.inlay_hints(); Ok(Some( snap.analysis - .inlay_hints(&inlay_hints_config, file_id, Some(range))? + .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))? .into_iter() .map(|it| { to_proto::inlay_hint( @@ -1440,22 +1440,13 @@ pub(crate) fn handle_inlay_hints_resolve( anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data"); let line_index = snap.file_line_index(file_id)?; - let range = from_proto::text_range( - &line_index, - lsp_types::Range { start: original_hint.position, end: original_hint.position }, - )?; - let range_start = range.start(); - let range_end = range.end(); - let large_range = TextRange::new( - range_start.checked_sub(1.into()).unwrap_or(range_start), - range_end.checked_add(1.into()).unwrap_or(range_end), - ); + let hint_position = from_proto::offset(&line_index, original_hint.position)?; let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); let resolve_hints = snap.analysis.inlay_hints( &forced_resolve_inlay_hints_config, file_id, - Some(large_range), + Some(RangeLimit::NearestParentBlock(hint_position)), )?; let mut resolved_hints = resolve_hints From 8ae42b55e79a127c811d85411ecea204b9529091 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 11 Dec 2023 15:16:55 +0200 Subject: [PATCH 11/28] Search for parent blocks and items when resolving inlay hints --- crates/ide/src/inlay_hints.rs | 24 +++++++++++++------- crates/rust-analyzer/src/handlers/request.rs | 2 +- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index cdf83ff7d2832..e82d730e4a367 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -424,7 +424,7 @@ fn ty_to_text_edit( pub enum RangeLimit { Fixed(TextRange), - NearestParentBlock(TextSize), + NearestParent(TextSize), } // Feature: Inlay Hints @@ -470,13 +470,21 @@ pub(crate) fn inlay_hints( .filter(|descendant| range.intersect(descendant.text_range()).is_some()) .for_each(hints), }, - Some(RangeLimit::NearestParentBlock(position)) => { - match file - .token_at_offset(position) - .left_biased() - .and_then(|token| token.parent_ancestors().find_map(ast::BlockExpr::cast)) - { - Some(parent_block) => parent_block.syntax().descendants().for_each(hints), + Some(RangeLimit::NearestParent(position)) => { + match file.token_at_offset(position).left_biased() { + Some(token) => { + if let Some(parent_block) = + token.parent_ancestors().find_map(ast::BlockExpr::cast) + { + parent_block.syntax().descendants().for_each(hints) + } else if let Some(parent_item) = + token.parent_ancestors().find_map(ast::Item::cast) + { + parent_item.syntax().descendants().for_each(hints) + } else { + return acc; + } + } None => return acc, } } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 6ec9fe153b92b..d8a590c808849 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -1446,7 +1446,7 @@ pub(crate) fn handle_inlay_hints_resolve( let resolve_hints = snap.analysis.inlay_hints( &forced_resolve_inlay_hints_config, file_id, - Some(RangeLimit::NearestParentBlock(hint_position)), + Some(RangeLimit::NearestParent(hint_position)), )?; let mut resolved_hints = resolve_hints From 18591ae5c8f1f8f649cab46193a226e740545c68 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 11 Dec 2023 18:37:12 +0100 Subject: [PATCH 12/28] Always render the path to be imported in the completion detail --- crates/ide-completion/src/item.rs | 12 +++--- crates/ide-completion/src/snippet.rs | 2 +- crates/ide-completion/src/tests/flyimport.rs | 26 ++++++------ crates/ide-db/src/imports/import_assets.rs | 42 ++++++-------------- 4 files changed, 31 insertions(+), 51 deletions(-) diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index b982322a73499..de41a5bd70c4d 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -458,13 +458,11 @@ impl Builder { } if let [import_edit] = &*self.imports_to_add { // snippets can have multiple imports, but normal completions only have up to one - if let Some(original_path) = import_edit.original_path.as_ref() { - label_detail.replace(SmolStr::from(format!( - "{} (use {})", - label_detail.as_deref().unwrap_or_default(), - original_path.display(db) - ))); - } + label_detail.replace(SmolStr::from(format!( + "{} (use {})", + label_detail.as_deref().unwrap_or_default(), + import_edit.import_path.display(db) + ))); } else if let Some(trait_name) = self.trait_name { label_detail.replace(SmolStr::from(format!( "{} (as {trait_name})", diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs index 50618296ee437..e667e2e01683b 100644 --- a/crates/ide-completion/src/snippet.rs +++ b/crates/ide-completion/src/snippet.rs @@ -181,7 +181,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option 1).then(|| LocatedImport::new(path.clone(), item, item, None))) + Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item))) }; let mut res = Vec::with_capacity(requires.len()); for import in requires { diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index 9a4a94a24566c..c58374f2e8339 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -597,8 +597,8 @@ fn main() { } "#, expect![[r#" - ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED + ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED "#]], ); } @@ -717,7 +717,7 @@ fn main() { check( fixture, expect![[r#" - st Item (use foo::bar::baz::Item) Item + st Item (use foo::bar) Item "#]], ); @@ -725,19 +725,19 @@ fn main() { "Item", fixture, r#" - use foo::bar; +use foo::bar; - mod foo { - pub mod bar { - pub mod baz { - pub struct Item; - } - } +mod foo { + pub mod bar { + pub mod baz { + pub struct Item; } + } +} - fn main() { - bar::baz::Item - }"#, +fn main() { + bar::baz::Item +}"#, ); } @@ -803,7 +803,7 @@ fn main() { check( fixture, expect![[r#" - ct TEST_ASSOC (use foo::bar::Item) usize + ct TEST_ASSOC (use foo::bar) usize "#]], ); diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index 04263d15d0a5a..e92af1298003c 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -195,18 +195,11 @@ pub struct LocatedImport { /// the original item is the associated constant, but the import has to be a trait that /// defines this constant. pub original_item: ItemInNs, - /// A path of the original item. - pub original_path: Option, } impl LocatedImport { - pub fn new( - import_path: ModPath, - item_to_import: ItemInNs, - original_item: ItemInNs, - original_path: Option, - ) -> Self { - Self { import_path, item_to_import, original_item, original_path } + pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self { + Self { import_path, item_to_import, original_item } } } @@ -351,7 +344,7 @@ fn path_applicable_imports( ) .filter_map(|item| { let mod_path = mod_path(item)?; - Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path))) + Some(LocatedImport::new(mod_path, item, item)) }) .collect() } @@ -416,24 +409,15 @@ fn import_for_item( // especially in case of lazy completion edit resolutions. return None; } - (false, Some(trait_to_import)) => LocatedImport::new( - mod_path(trait_to_import)?, - trait_to_import, - original_item, - mod_path(original_item), - ), - (true, None) => LocatedImport::new( - import_path_candidate, - original_item_candidate, - original_item, - mod_path(original_item), - ), - (false, None) => LocatedImport::new( - mod_path(segment_import)?, - segment_import, - original_item, - mod_path(original_item), - ), + (false, Some(trait_to_import)) => { + LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item) + } + (true, None) => { + LocatedImport::new(import_path_candidate, original_item_candidate, original_item) + } + (false, None) => { + LocatedImport::new(mod_path(segment_import)?, segment_import, original_item) + } }) } @@ -550,7 +534,6 @@ fn trait_applicable_items( mod_path(trait_item)?, trait_item, original_item, - mod_path(original_item), )); } None::<()> @@ -573,7 +556,6 @@ fn trait_applicable_items( mod_path(trait_item)?, trait_item, original_item, - mod_path(original_item), )); } None::<()> From 801c0ea47808a241eaecc8ba81c6feb1df1af38d Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Mon, 11 Dec 2023 22:56:50 +0330 Subject: [PATCH 13/28] Replace `doc_comments_and_attrs` with `collect_attrs`, 2nd round --- crates/hir-def/src/child_by_source.rs | 7 ++-- crates/hir-expand/src/db.rs | 13 +++---- crates/hir-expand/src/lib.rs | 39 +++++++++---------- crates/hir/src/semantics.rs | 23 ++++++++--- .../src/handlers/unresolved_extern_crate.rs | 15 ------- .../src/handlers/unresolved_macro_call.rs | 12 ++++++ crates/syntax/src/ast/traits.rs | 3 -- 7 files changed, 57 insertions(+), 55 deletions(-) diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 4cfd318a43377..c82d2347de5c6 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -5,8 +5,7 @@ //! node for a *child*, and get its hir. use either::Either; -use hir_expand::HirFileId; -use syntax::ast::HasDocComments; +use hir_expand::{attrs::collect_attrs, HirFileId}; use crate::{ db::DefDatabase, @@ -118,8 +117,8 @@ impl ChildBySource for ItemScope { |(ast_id, calls)| { let adt = ast_id.to_node(db.upcast()); calls.for_each(|(attr_id, call_id, calls)| { - if let Some(Either::Left(attr)) = - adt.doc_comments_and_attrs().nth(attr_id.ast_index()) + if let Some((_, Either::Left(attr))) = + collect_attrs(&adt).nth(attr_id.ast_index()) { res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index d2c6559b06b16..32baa6694b4df 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -10,14 +10,14 @@ use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use syntax::{ - ast::{self, HasAttrs, HasDocComments}, + ast::{self, HasAttrs}, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, }; use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, - attrs::RawAttrs, + attrs::{collect_attrs, RawAttrs}, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, fixup::{self, SyntaxFixupUndoInfo}, @@ -216,9 +216,9 @@ pub fn expand_speculative( // Attributes may have an input token tree, build the subtree and map for this as well // then try finding a token id for our token if it is inside this input subtree. let item = ast::Item::cast(speculative_args.clone())?; - item.doc_comments_and_attrs() + collect_attrs(&item) .nth(invoc_attr_index.ast_index()) - .and_then(Either::left) + .and_then(|x| Either::left(x.1)) }?; match attr.token_tree() { Some(token_tree) => { @@ -479,10 +479,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet return None, MacroCallKind::Attr { invoc_attr_index, .. } => { cov_mark::hit!(attribute_macro_attr_censoring); - ast::Item::cast(node.clone())? - .doc_comments_and_attrs() + collect_attrs(&ast::Item::cast(node.clone())?) .nth(invoc_attr_index.ast_index()) - .and_then(Either::left) + .and_then(|x| Either::left(x.1)) .map(|attr| attr.syntax().clone()) .into_iter() .collect() diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 74089593ac035..f5e9cd33f2b66 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -22,6 +22,7 @@ pub mod span; pub mod files; mod fixup; +use attrs::collect_attrs; use triomphe::Arc; use std::{fmt, hash::Hash}; @@ -32,7 +33,7 @@ use base_db::{ }; use either::Either; use syntax::{ - ast::{self, AstNode, HasDocComments}, + ast::{self, AstNode}, SyntaxNode, SyntaxToken, TextRange, TextSize, }; @@ -438,9 +439,9 @@ impl MacroCallLoc { MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { - it.doc_comments_and_attrs() + collect_attrs(&it) .nth(derive_attr_index.ast_index()) - .and_then(|it| match it { + .and_then(|it| match it.1 { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, }) @@ -451,9 +452,9 @@ impl MacroCallLoc { if self.def.is_attribute_derive() { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { - it.doc_comments_and_attrs() + collect_attrs(&it) .nth(invoc_attr_index.ast_index()) - .and_then(|it| match it { + .and_then(|it| match it.1 { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, }) @@ -549,24 +550,24 @@ impl MacroCallKind { MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: should be the range of the macro name, not the whole derive // FIXME: handle `cfg_attr` - ast_id - .to_node(db) - .doc_comments_and_attrs() + collect_attrs(&ast_id.to_node(db)) .nth(derive_attr_index.ast_index()) .expect("missing derive") + .1 .expect_left("derive is a doc comment?") .syntax() .text_range() } // FIXME: handle `cfg_attr` - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .expect("missing attribute") - .expect_left("attribute macro is a doc comment?") - .syntax() - .text_range(), + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + collect_attrs(&ast_id.to_node(db)) + .nth(invoc_attr_index.ast_index()) + .expect("missing attribute") + .1 + .expect_left("attribute macro is a doc comment?") + .syntax() + .text_range() + } }; FileRange { range, file_id } @@ -737,11 +738,9 @@ impl ExpansionInfo { let attr_input_or_mac_def = def.or_else(|| match loc.kind { MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { // FIXME: handle `cfg_attr` - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() + let tt = collect_attrs(&ast_id.to_node(db)) .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)? + .and_then(|x| Either::left(x.1))? .token_tree()?; Some(InFile::new(ast_id.file_id, tt)) } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 92fa76c96fbdb..dcf8ba27a68a0 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -20,8 +20,8 @@ use hir_def::{ AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ - db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId, - MacroFileId, MacroFileIdExt, + attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, + InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; @@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _}, + ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, }; @@ -673,11 +673,22 @@ impl<'db> SemanticsImpl<'db> { } _ => 0, }; + // FIXME: here, the attribute's text range is used to strip away all + // entries from the start of the attribute "list" up the the invoking + // attribute. But in + // ``` + // mod foo { + // #![inner] + // } + // ``` + // we don't wanna strip away stuff in the `mod foo {` range, that is + // here if the id corresponds to an inner attribute we got strip all + // text ranges of the outer ones, and then all of the inner ones up + // to the invoking attribute so that the inbetween is ignored. let text_range = item.syntax().text_range(); - let start = item - .doc_comments_and_attrs() + let start = collect_attrs(&item) .nth(attr_id) - .map(|attr| match attr { + .map(|attr| match attr.1 { Either::Left(it) => it.syntax().text_range().start(), Either::Right(it) => it.syntax().text_range().start(), }) diff --git a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs index 71c501a336b1e..f8265b63275fc 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs @@ -44,21 +44,6 @@ extern crate core; extern crate self as foo; struct Foo; use foo::Foo as Bar; -"#, - ); - } - - #[test] - fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() { - check_diagnostics( - r#" -//- /lib.rs - #[macro_use] extern crate doesnotexist; -//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate - mod _test_inner { - #![empty_attr] - //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr` - } "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 33e7c2e37c3b0..c8ff54cba3a74 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -67,6 +67,18 @@ macro_rules! m { () => {} } } self::m!(); self::m2!(); //^^ error: unresolved macro `self::m2!` +"#, + ); + } + + #[test] + fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() { + check_diagnostics( + r#" + mod _test_inner { + #![empty_attr] + //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr` + } "#, ); } diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs index 3e43df2d0d52b..16f7356b1e3db 100644 --- a/crates/syntax/src/ast/traits.rs +++ b/crates/syntax/src/ast/traits.rs @@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs { fn doc_comments(&self) -> DocCommentIter { DocCommentIter { iter: self.syntax().children_with_tokens() } } - fn doc_comments_and_attrs(&self) -> AttrDocCommentIter { - AttrDocCommentIter { iter: self.syntax().children_with_tokens() } - } } impl DocCommentIter { From 54acf874a6f67189c2edc25a17d21d19f745fbe6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 12 Dec 2023 12:26:01 +0200 Subject: [PATCH 14/28] Fix formatting --- crates/proc-macro-api/src/version.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 87118a62650cc..5ff1f36c545e6 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -131,7 +131,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { let len_bytes = &dot_rustc[8..16]; let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize; (&dot_rustc[16..data_len + 12], 17) - } + } _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, From 34ec665ba1d39be2dd01775b96b63344cb38a57d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 12 Dec 2023 11:35:34 +0100 Subject: [PATCH 15/28] Simplify and improve perf of import_assets::import_for_item --- crates/ide-db/src/imports/import_assets.rs | 160 +++++++++++---------- crates/syntax/src/ast/node_ext.rs | 13 ++ crates/syntax/src/utils.rs | 42 +----- 3 files changed, 99 insertions(+), 116 deletions(-) diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index e92af1298003c..a4f0a6df7813f 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -1,14 +1,14 @@ //! Look up accessible paths for items. + use hir::{ - AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, + AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type, }; -use itertools::Itertools; -use rustc_hash::FxHashSet; +use itertools::{EitherOrBoth, Itertools}; +use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ ast::{self, make, HasName}, - utils::path_to_string_stripping_turbo_fish, - AstNode, SyntaxNode, + AstNode, SmolStr, SyntaxNode, }; use crate::{ @@ -51,18 +51,11 @@ pub struct TraitImportCandidate { #[derive(Debug)] pub struct PathImportCandidate { /// Optional qualifier before name. - pub qualifier: Option, + pub qualifier: Option>, /// The name the item (struct, trait, enum, etc.) should have. pub name: NameToImport, } -/// A qualifier that has a first segment and it's unresolved. -#[derive(Debug)] -pub struct FirstSegmentUnresolved { - fist_segment: ast::NameRef, - full_qualifier: ast::Path, -} - /// A name that will be used during item lookups. #[derive(Debug, Clone)] pub enum NameToImport { @@ -348,60 +341,71 @@ fn path_applicable_imports( }) .collect() } - Some(first_segment_unresolved) => { - let unresolved_qualifier = - path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier); - let unresolved_first_segment = first_segment_unresolved.fist_segment.text(); - items_locator::items_with_name( - sema, - current_crate, - path_candidate.name.clone(), - AssocSearchMode::Include, - Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), - ) - .filter_map(|item| { - import_for_item( - sema.db, - mod_path, - &unresolved_first_segment, - &unresolved_qualifier, - item, - ) - }) - .collect() - } + Some(qualifier) => items_locator::items_with_name( + sema, + current_crate, + path_candidate.name.clone(), + AssocSearchMode::Include, + Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), + ) + .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item)) + .collect(), } } fn import_for_item( db: &RootDatabase, mod_path: impl Fn(ItemInNs) -> Option, - unresolved_first_segment: &str, - unresolved_qualifier: &str, + unresolved_qualifier: &[SmolStr], original_item: ItemInNs, ) -> Option { let _p = profile::span("import_assets::import_for_item"); + let [first_segment, ..] = unresolved_qualifier else { return None }; - let original_item_candidate = item_for_path_search(db, original_item)?; + let item_as_assoc = item_as_assoc(db, original_item); + + let (original_item_candidate, trait_item_to_import) = match item_as_assoc { + Some(assoc_item) => match assoc_item.container(db) { + AssocItemContainer::Trait(trait_) => { + let trait_ = ItemInNs::from(ModuleDef::from(trait_)); + (trait_, Some(trait_)) + } + AssocItemContainer::Impl(impl_) => { + (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None) + } + }, + None => (original_item, None), + }; let import_path_candidate = mod_path(original_item_candidate)?; - let import_path_string = import_path_candidate.display(db).to_string(); - let expected_import_end = if item_as_assoc(db, original_item).is_some() { - unresolved_qualifier.to_string() - } else { - format!("{unresolved_qualifier}::{}", item_name(db, original_item)?.display(db)) + let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev(); + let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it { + // segments match, check next one + EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None, + // segments mismatch / qualifier is longer than the path, bail out + EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false), + // all segments match and we have exhausted the qualifier, proceed + EitherOrBoth::Right(_) => Some(true), }; - if !import_path_string.contains(unresolved_first_segment) - || !import_path_string.ends_with(&expected_import_end) - { + if item_as_assoc.is_none() { + let item_name = item_name(db, original_item)?.as_text()?; + let last_segment = import_path_candidate_segments.next()?; + if last_segment.as_str() != Some(&*item_name) { + return None; + } + } + let ends_with = unresolved_qualifier + .iter() + .rev() + .zip_longest(import_path_candidate_segments) + .find_map(predicate) + .unwrap_or(true); + if !ends_with { return None; } - let segment_import = - find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?; - let trait_item_to_import = item_as_assoc(db, original_item) - .and_then(|assoc| assoc.containing_trait(db)) - .map(|trait_| ItemInNs::from(ModuleDef::from(trait_))); + let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?; + Some(match (segment_import == original_item_candidate, trait_item_to_import) { (true, Some(_)) => { // FIXME we should be able to import both the trait and the segment, @@ -424,18 +428,22 @@ fn import_for_item( pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option { Some(match item { ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) { - Some(assoc_item) => match assoc_item.container(db) { - AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)), - AssocItemContainer::Impl(impl_) => { - ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)) - } - }, + Some(assoc_item) => item_for_path_search_assoc(db, assoc_item)?, None => item, }, ItemInNs::Macros(_) => item, }) } +fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Option { + Some(match assoc_item.container(db) { + AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)), + AssocItemContainer::Impl(impl_) => { + ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)) + } + }) +} + fn find_import_for_segment( db: &RootDatabase, original_item: ItemInNs, @@ -512,6 +520,7 @@ fn trait_applicable_items( .collect(); let mut located_imports = FxHashSet::default(); + let mut trait_import_paths = FxHashMap::default(); if trait_assoc_item { trait_candidate.receiver_ty.iterate_path_candidates( @@ -529,11 +538,14 @@ fn trait_applicable_items( } let located_trait = assoc.containing_trait(db)?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); - let original_item = assoc_to_item(assoc); + let import_path = trait_import_paths + .entry(trait_item) + .or_insert_with(|| mod_path(trait_item)) + .clone()?; located_imports.insert(LocatedImport::new( - mod_path(trait_item)?, + import_path, trait_item, - original_item, + assoc_to_item(assoc), )); } None::<()> @@ -551,11 +563,14 @@ fn trait_applicable_items( if required_assoc_items.contains(&assoc) { let located_trait = assoc.containing_trait(db)?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); - let original_item = assoc_to_item(assoc); + let import_path = trait_import_paths + .entry(trait_item) + .or_insert_with(|| mod_path(trait_item)) + .clone()?; located_imports.insert(LocatedImport::new( - mod_path(trait_item)?, + import_path, trait_item, - original_item, + assoc_to_item(assoc), )); } None::<()> @@ -653,18 +668,13 @@ fn path_import_candidate( Some(match qualifier { Some(qualifier) => match sema.resolve_path(&qualifier) { None => { - let qualifier_start = - qualifier.syntax().descendants().find_map(ast::NameRef::cast)?; - let qualifier_start_path = - qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?; - if sema.resolve_path(&qualifier_start_path).is_none() { - ImportCandidate::Path(PathImportCandidate { - qualifier: Some(FirstSegmentUnresolved { - fist_segment: qualifier_start, - full_qualifier: qualifier, - }), - name, - }) + if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) { + let mut qualifier = qualifier + .segments_of_this_path_only_rev() + .map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text()))) + .collect::>>()?; + qualifier.reverse(); + ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name }) } else { return None; } diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index be5b954ad345f..f81dff8840ccc 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -275,10 +275,19 @@ impl ast::Path { successors(Some(self.clone()), ast::Path::qualifier).last().unwrap() } + pub fn first_qualifier(&self) -> Option { + successors(self.qualifier(), ast::Path::qualifier).last() + } + pub fn first_segment(&self) -> Option { self.first_qualifier_or_self().segment() } + // FIXME: Check usages of Self::segments, they might be wrong because of the logic of the bloew function + pub fn segments_of_this_path_only_rev(&self) -> impl Iterator + Clone { + self.qualifiers_and_self().filter_map(|it| it.segment()) + } + pub fn segments(&self) -> impl Iterator + Clone { successors(self.first_segment(), |p| { p.parent_path().parent_path().and_then(|p| p.segment()) @@ -289,6 +298,10 @@ impl ast::Path { successors(self.qualifier(), |p| p.qualifier()) } + pub fn qualifiers_and_self(&self) -> impl Iterator + Clone { + successors(Some(self.clone()), |p| p.qualifier()) + } + pub fn top_path(&self) -> ast::Path { let mut this = self.clone(); while let Some(path) = this.parent_path() { diff --git a/crates/syntax/src/utils.rs b/crates/syntax/src/utils.rs index 25f34ea9d397d..a38f8b2b55d4e 100644 --- a/crates/syntax/src/utils.rs +++ b/crates/syntax/src/utils.rs @@ -1,48 +1,8 @@ //! A set of utils methods to reuse on other abstraction levels -use itertools::Itertools; - -use crate::{ast, match_ast, AstNode, SyntaxKind}; - -pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String { - path.syntax() - .children() - .filter_map(|node| { - match_ast! { - match node { - ast::PathSegment(it) => { - Some(it.name_ref()?.to_string()) - }, - ast::Path(it) => { - Some(path_to_string_stripping_turbo_fish(&it)) - }, - _ => None, - } - } - }) - .join("::") -} +use crate::SyntaxKind; pub fn is_raw_identifier(name: &str) -> bool { let is_keyword = SyntaxKind::from_keyword(name).is_some(); is_keyword && !matches!(name, "self" | "crate" | "super" | "Self") } - -#[cfg(test)] -mod tests { - use super::path_to_string_stripping_turbo_fish; - use crate::ast::make; - - #[test] - fn turbofishes_are_stripped() { - assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::")),); - assert_eq!( - "Vec::new", - path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::::new")), - ); - assert_eq!( - "Vec::new", - path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")), - ); - } -} From 1604ad1a6d7918d96db1e51f2a8ed3902d436cd1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 12 Dec 2023 12:03:37 +0100 Subject: [PATCH 16/28] Bump DEFAULT_QUERY_SEARCH_LIMIT from 40 to 100 --- crates/hir/src/symbols.rs | 44 +++++++++++-------- crates/ide-db/src/items_locator.rs | 8 ++-- crates/ide-db/src/symbol_index.rs | 2 +- .../ide-db/src/test_data/test_doc_alias.txt | 7 +++ .../test_symbol_index_collection.txt | 29 ++++++++++++ 5 files changed, 66 insertions(+), 24 deletions(-) diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 03112f6de5afe..a2a30edeb0397 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -23,6 +23,7 @@ pub struct FileSymbol { pub loc: DeclarationLocation, pub container_name: Option, pub is_alias: bool, + pub is_assoc: bool, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -121,34 +122,34 @@ impl<'a> SymbolCollector<'a> { match module_def_id { ModuleDefId::ModuleId(id) => self.push_module(id), ModuleDefId::FunctionId(id) => { - self.push_decl(id); + self.push_decl(id, false); self.collect_from_body(id); } - ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id), - ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id), - ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id), + ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false), + ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false), + ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false), ModuleDefId::ConstId(id) => { - self.push_decl(id); + self.push_decl(id, false); self.collect_from_body(id); } ModuleDefId::StaticId(id) => { - self.push_decl(id); + self.push_decl(id, false); self.collect_from_body(id); } ModuleDefId::TraitId(id) => { - self.push_decl(id); + self.push_decl(id, false); self.collect_from_trait(id); } ModuleDefId::TraitAliasId(id) => { - self.push_decl(id); + self.push_decl(id, false); } ModuleDefId::TypeAliasId(id) => { - self.push_decl(id); + self.push_decl(id, false); } ModuleDefId::MacroId(id) => match id { - MacroId::Macro2Id(id) => self.push_decl(id), - MacroId::MacroRulesId(id) => self.push_decl(id), - MacroId::ProcMacroId(id) => self.push_decl(id), + MacroId::Macro2Id(id) => self.push_decl(id, false), + MacroId::MacroRulesId(id) => self.push_decl(id, false), + MacroId::ProcMacroId(id) => self.push_decl(id, false), }, // Don't index these. ModuleDefId::BuiltinType(_) => {} @@ -190,6 +191,7 @@ impl<'a> SymbolCollector<'a> { container_name: self.current_container_name.clone(), loc: dec_loc, is_alias: false, + is_assoc: false, }); }); } @@ -202,9 +204,9 @@ impl<'a> SymbolCollector<'a> { for &id in id { if id.module(self.db.upcast()) == module_id { match id { - MacroId::Macro2Id(id) => self.push_decl(id), - MacroId::MacroRulesId(id) => self.push_decl(id), - MacroId::ProcMacroId(id) => self.push_decl(id), + MacroId::Macro2Id(id) => self.push_decl(id, false), + MacroId::MacroRulesId(id) => self.push_decl(id, false), + MacroId::ProcMacroId(id) => self.push_decl(id, false), } } } @@ -266,13 +268,13 @@ impl<'a> SymbolCollector<'a> { fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) { match assoc_item_id { - AssocItemId::FunctionId(id) => self.push_decl(id), - AssocItemId::ConstId(id) => self.push_decl(id), - AssocItemId::TypeAliasId(id) => self.push_decl(id), + AssocItemId::FunctionId(id) => self.push_decl(id, true), + AssocItemId::ConstId(id) => self.push_decl(id, true), + AssocItemId::TypeAliasId(id) => self.push_decl(id, true), } } - fn push_decl(&mut self, id: L) + fn push_decl(&mut self, id: L, is_assoc: bool) where L: Lookup + Into, ::Data: HasSource, @@ -296,6 +298,7 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc.clone(), container_name: self.current_container_name.clone(), is_alias: true, + is_assoc, }); } } @@ -306,6 +309,7 @@ impl<'a> SymbolCollector<'a> { container_name: self.current_container_name.clone(), loc: dec_loc, is_alias: false, + is_assoc, }); } @@ -331,6 +335,7 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc.clone(), container_name: self.current_container_name.clone(), is_alias: true, + is_assoc: false, }); } } @@ -341,6 +346,7 @@ impl<'a> SymbolCollector<'a> { container_name: self.current_container_name.clone(), loc: dec_loc, is_alias: false, + is_assoc: false, }); } } diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 67ed44f08b7f7..022b167d492e5 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs @@ -3,13 +3,13 @@ //! The main reason for this module to exist is the fact that project's items and dependencies' items //! are located in different caches, with different APIs. use either::Either; -use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics}; +use hir::{import_map, Crate, ItemInNs, Semantics}; use limit::Limit; use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase}; /// A value to use, when uncertain which limit to pick. -pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40); +pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100); pub use import_map::AssocSearchMode; @@ -101,8 +101,8 @@ fn find_items<'a>( .into_iter() .filter(move |candidate| match assoc_item_search { AssocSearchMode::Include => true, - AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(), - AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(), + AssocSearchMode::Exclude => !candidate.is_assoc, + AssocSearchMode::AssocItemsOnly => candidate.is_assoc, }) .map(|local_candidate| match local_candidate.def { hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index 3e89159c2c6e5..be8566b759cf3 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -50,7 +50,7 @@ enum SearchMode { Prefix, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Query { query: String, lowercased: String, diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 4a72881fe5e4a..7c01ac0693959 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -36,6 +36,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Struct", @@ -65,6 +66,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "mul1", @@ -94,6 +96,7 @@ }, container_name: None, is_alias: true, + is_assoc: false, }, FileSymbol { name: "mul2", @@ -123,6 +126,7 @@ }, container_name: None, is_alias: true, + is_assoc: false, }, FileSymbol { name: "s1", @@ -152,6 +156,7 @@ }, container_name: None, is_alias: true, + is_assoc: false, }, FileSymbol { name: "s1", @@ -181,6 +186,7 @@ }, container_name: None, is_alias: true, + is_assoc: false, }, FileSymbol { name: "s2", @@ -210,6 +216,7 @@ }, container_name: None, is_alias: true, + is_assoc: false, }, ], ), diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index da1f3167d7d49..c9875c7f8f29c 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -34,6 +34,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "CONST", @@ -61,6 +62,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "CONST_WITH_INNER", @@ -88,6 +90,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Enum", @@ -117,6 +120,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "ItemLikeMacro", @@ -146,6 +150,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Macro", @@ -175,6 +180,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "STATIC", @@ -202,6 +208,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Struct", @@ -231,6 +238,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "StructFromMacro", @@ -260,6 +268,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "StructInFn", @@ -291,6 +300,7 @@ "main", ), is_alias: false, + is_assoc: false, }, FileSymbol { name: "StructInNamedConst", @@ -322,6 +332,7 @@ "CONST_WITH_INNER", ), is_alias: false, + is_assoc: false, }, FileSymbol { name: "StructInUnnamedConst", @@ -351,6 +362,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Trait", @@ -378,6 +390,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Trait", @@ -407,6 +420,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "Union", @@ -436,6 +450,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "a_mod", @@ -465,6 +480,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "b_mod", @@ -494,6 +510,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "define_struct", @@ -523,6 +540,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "impl_fn", @@ -550,6 +568,7 @@ }, container_name: None, is_alias: false, + is_assoc: true, }, FileSymbol { name: "macro_rules_macro", @@ -579,6 +598,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "main", @@ -606,6 +626,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "really_define_struct", @@ -635,6 +656,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "trait_fn", @@ -664,6 +686,7 @@ "Trait", ), is_alias: false, + is_assoc: true, }, ], ), @@ -704,6 +727,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, ], ), @@ -744,6 +768,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "StructInModB", @@ -773,6 +798,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "SuperItemLikeMacro", @@ -802,6 +828,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "ThisStruct", @@ -831,6 +858,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, FileSymbol { name: "ThisStruct", @@ -860,6 +888,7 @@ }, container_name: None, is_alias: false, + is_assoc: false, }, ], ), From ca995d765d37cdcd18705ecdbbe712307f0f6bc4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 12 Dec 2023 15:44:27 +0100 Subject: [PATCH 17/28] fix: Fix `import_map::search_dependencies` getting confused by assoc and non assoc items with the same name --- crates/hir-def/src/import_map.rs | 87 ++++++++++++------- crates/ide-db/src/items_locator.rs | 3 +- .../src/integrated_benchmarks.rs | 10 ++- 3 files changed, 65 insertions(+), 35 deletions(-) diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 649ea13888dde..62ee45bff62b4 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -9,6 +9,7 @@ use indexmap::IndexMap; use itertools::Itertools; use rustc_hash::{FxHashSet, FxHasher}; use smallvec::SmallVec; +use stdx::format_to; use triomphe::Arc; use crate::{ @@ -53,13 +54,25 @@ pub struct ImportMap { fst: fst::Map>, } -#[derive(Copy, Clone, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)] enum IsTraitAssocItem { Yes, No, } impl ImportMap { + pub fn dump(&self, db: &dyn DefDatabase) -> String { + let mut out = String::new(); + for (k, v) in self.map.iter() { + format_to!(out, "{:?} ({:?}) -> ", k, v.1); + for v in &v.0 { + format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container); + } + format_to!(out, "\n"); + } + out + } + pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { let _p = profile::span("import_map_query"); @@ -68,26 +81,31 @@ impl ImportMap { let mut importables: Vec<_> = map .iter() // We've only collected items, whose name cannot be tuple field. - .flat_map(|(&item, (info, _))| { - info.iter() - .map(move |info| (item, info.name.as_str().unwrap().to_ascii_lowercase())) + .flat_map(|(&item, (info, is_assoc))| { + info.iter().map(move |info| { + (item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase()) + }) }) .collect(); - importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name)); + importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| { + lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc)) + }); importables.dedup(); // Build the FST, taking care not to insert duplicate values. let mut builder = fst::MapBuilder::memory(); - let iter = - importables.iter().enumerate().dedup_by(|(_, (_, lhs)), (_, (_, rhs))| lhs == rhs); - for (start_idx, (_, name)) in iter { + let iter = importables + .iter() + .enumerate() + .dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs); + for (start_idx, (_, _, name)) in iter { let _ = builder.insert(name, start_idx as u64); } Arc::new(ImportMap { map, fst: builder.into_map(), - importables: importables.into_iter().map(|(item, _)| item).collect(), + importables: importables.into_iter().map(|(item, _, _)| item).collect(), }) } @@ -332,20 +350,20 @@ impl Query { } /// Checks whether the import map entry matches the query. - fn import_matches( - &self, - db: &dyn DefDatabase, - import: &ImportInfo, - enforce_lowercase: bool, - ) -> bool { + fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool { let _p = profile::span("import_map::Query::import_matches"); // FIXME: Can we get rid of the alloc here? - let mut input = import.name.display(db.upcast()).to_string(); + let input = import.name.to_smol_str(); + let mut _s_slot; let case_insensitive = enforce_lowercase || !self.case_sensitive; - if case_insensitive { - input.make_ascii_lowercase(); - } + let input = if case_insensitive { + _s_slot = String::from(input); + _s_slot.make_ascii_lowercase(); + &*_s_slot + } else { + &*input + }; let query_string = if case_insensitive { &self.lowercased } else { &self.query }; @@ -355,7 +373,7 @@ impl Query { SearchMode::Fuzzy => { let mut input_chars = input.chars(); for query_char in query_string.chars() { - if input_chars.find(|&it| it == query_char).is_none() { + if !input_chars.any(|it| it == query_char) { return false; } } @@ -376,6 +394,7 @@ pub fn search_dependencies( let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); let graph = db.crate_graph(); + let import_maps: Vec<_> = graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); @@ -390,22 +409,28 @@ pub fn search_dependencies( let mut res = FxHashSet::default(); let mut common_importable_data_scratch = vec![]; + // FIXME: Improve this, its rather unreadable and does duplicate amount of work while let Some((_, indexed_values)) = stream.next() { for &IndexedValue { index, value } in indexed_values { let import_map = &import_maps[index]; - let importables @ [importable, ..] = &import_map.importables[value as usize..] else { + let importables = &import_map.importables[value as usize..]; + + // Find the first item in this group that has a matching assoc mode and slice the rest away + let Some(importable) = + importables.iter().position(|it| query.matches_assoc_mode(import_map.map[it].1)) + else { continue; }; - - let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable]; - if !query.matches_assoc_mode(is_trait_assoc_item) { + let importables @ [importable, ..] = &importables[importable..] else { continue; - } + }; + // Fetch all the known names of this importable item (to handle import aliases/renames) common_importable_data_scratch.extend( - importable_data + import_map.map[importable] + .0 .iter() - .filter(|&info| query.import_matches(db, info, true)) + .filter(|&info| query.import_matches(info, true)) // Name shared by the importable items in this group. .map(|info| info.name.to_smol_str()), ); @@ -419,6 +444,7 @@ pub fn search_dependencies( common_importable_data_scratch.drain(..).flat_map(|common_importable_name| { // Add the items from this name group. Those are all subsequent items in // `importables` whose name match `common_importable_name`. + importables .iter() .copied() @@ -434,11 +460,8 @@ pub fn search_dependencies( .filter(move |item| { !query.case_sensitive || { // we've already checked the common importables name case-insensitively - let &(ref import_infos, assoc_mode) = &import_map.map[item]; - query.matches_assoc_mode(assoc_mode) - && import_infos - .iter() - .any(|info| query.import_matches(db, info, false)) + let &(ref import_infos, _) = &import_map.map[item]; + import_infos.iter().any(|info| query.import_matches(info, false)) } }) }); diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 67ed44f08b7f7..15522d553c57d 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs @@ -36,7 +36,8 @@ pub fn items_with_name<'a>( NameToImport::Prefix(exact_name, case_sensitive) | NameToImport::Exact(exact_name, case_sensitive) => { let mut local_query = symbol_index::Query::new(exact_name.clone()); - let mut external_query = import_map::Query::new(exact_name); + let mut external_query = + import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); if prefix { local_query.prefix(); external_query = external_query.prefix(); diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index ed2cf07551b0c..3d0ebf9bde615 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -32,7 +32,10 @@ fn integrated_highlighting_benchmark() { let workspace_to_load = project_root(); let file = "./crates/rust-analyzer/src/config.rs"; - let cargo_config = CargoConfig::default(); + let cargo_config = CargoConfig { + sysroot: Some(project_model::RustLibSource::Discover), + ..CargoConfig::default() + }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::None, @@ -85,7 +88,10 @@ fn integrated_completion_benchmark() { let workspace_to_load = project_root(); let file = "./crates/hir/src/lib.rs"; - let cargo_config = CargoConfig::default(); + let cargo_config = CargoConfig { + sysroot: Some(project_model::RustLibSource::Discover), + ..CargoConfig::default() + }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::None, From c30fd42685bf515b282f431e67d427649cbf9ae6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 12 Dec 2023 16:52:18 +0200 Subject: [PATCH 18/28] Publish line-index --- Cargo.lock | 2 +- lib/line-index/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 46efbdd93c97f..87401d8b3dfd6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -891,7 +891,7 @@ dependencies = [ [[package]] name = "line-index" -version = "0.1.0" +version = "0.1.1" dependencies = [ "nohash-hasher", "text-size", diff --git a/lib/line-index/Cargo.toml b/lib/line-index/Cargo.toml index b7b4a01818ea7..494a7fa979a78 100644 --- a/lib/line-index/Cargo.toml +++ b/lib/line-index/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "line-index" -version = "0.1.0" +version = "0.1.1" description = "Maps flat `TextSize` offsets to/from `(line, column)` representation." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index" From 3b23e9aacce54a38165b3d630f57275740154bfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 12 Dec 2023 17:18:08 +0200 Subject: [PATCH 19/28] Bump line-index --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 87401d8b3dfd6..f94b855ca7d18 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -684,7 +684,7 @@ dependencies = [ "indexmap", "itertools", "limit", - "line-index 0.1.0-pre.1", + "line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr", "nohash-hasher", "once_cell", @@ -881,9 +881,7 @@ version = "0.0.0" [[package]] name = "line-index" -version = "0.1.0-pre.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce" +version = "0.1.1" dependencies = [ "nohash-hasher", "text-size", @@ -892,6 +890,8 @@ dependencies = [ [[package]] name = "line-index" version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67d61795376ae2683928c218fda7d7d7db136fd38c06b7552904667f0d55580a" dependencies = [ "nohash-hasher", "text-size", diff --git a/Cargo.toml b/Cargo.toml index f3f01aab8eee6..1213979c390f6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,7 +85,7 @@ rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" } proc-macro-test = { path = "./crates/proc-macro-test" } # In-tree crates that are published separately and follow semver. See lib/README.md -line-index = { version = "0.1.0-pre.1" } +line-index = { version = "0.1.1" } la-arena = { version = "0.3.1" } lsp-server = { version = "0.7.4" } From c209b5f97c35c6d5e9a0f6868edbed1665acbdc7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 12 Dec 2023 21:54:27 +0100 Subject: [PATCH 20/28] fix: Fix syntax bridge assigning invalid span to lifetime tokens --- crates/hir-expand/src/span.rs | 29 +++++++++++++++++++++-------- crates/mbe/src/syntax_bridge.rs | 21 +-------------------- 2 files changed, 22 insertions(+), 28 deletions(-) diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs index 0a6c22fe42dc4..fe476a40febf6 100644 --- a/crates/hir-expand/src/span.rs +++ b/crates/hir-expand/src/span.rs @@ -75,27 +75,40 @@ pub struct RealSpanMap { /// Invariant: Sorted vec over TextSize // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? pairs: Box<[(TextSize, ErasedFileAstId)]>, + end: TextSize, } impl RealSpanMap { /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). pub fn absolute(file_id: FileId) -> Self { - RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) } + RealSpanMap { + file_id, + pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), + end: TextSize::new(!0), + } } pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self { let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]; let ast_id_map = db.ast_id_map(file_id.into()); - pairs.extend( - db.parse(file_id) - .tree() - .items() - .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), - ); - RealSpanMap { file_id, pairs: pairs.into_boxed_slice() } + let tree = db.parse(file_id).tree(); + pairs + .extend(tree.items().map(|item| { + (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase()) + })); + RealSpanMap { + file_id, + pairs: pairs.into_boxed_slice(), + end: tree.syntax().text_range().end(), + } } pub fn span_for_range(&self, range: TextRange) -> SpanData { + assert!( + range.end() <= self.end, + "range {range:?} goes beyond the end of the file {:?}", + self.end + ); let start = range.start(); let idx = self .pairs diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 1c46471a38320..b89bfd74a6e04 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -311,7 +311,7 @@ where let ident = tt::Leaf::from(tt::Ident { text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.span_for(TextRange::at( + span: conv.span_for(TextRange::new( abs_range.start() + TextSize::of('\''), abs_range.end(), )), @@ -625,25 +625,6 @@ impl Converter { } fn next_token(&mut self) -> Option { - // while let Some(ev) = self.preorder.next() { - // match ev { - // WalkEvent::Enter(SyntaxElement::Token(t)) => { - // if let Some(leafs) = self.append.remove(&t.clone().into()) { - // self.current_leafs.extend(leafs); - // } - // return Some(t); - // } - // WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { - // self.preorder.skip_subtree(); - // if let Some(leafs) = self.append.remove(&n.into()) { - // self.current_leafs.extend(leafs); - // } - // } - // _ => (), - // } - // } - // None; - while let Some(ev) = self.preorder.next() { match ev { WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), From 7cc6b0f2e98ee5045e9ecc6c7cb9c2183fbc9b50 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 12 Dec 2023 22:43:33 +0100 Subject: [PATCH 21/28] Partially revert #16101 --- crates/hir-def/src/import_map.rs | 15 +++---- crates/ide-db/src/items_locator.rs | 3 +- .../src/integrated_benchmarks.rs | 44 +++++++++++++++++-- 3 files changed, 47 insertions(+), 15 deletions(-) diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 62ee45bff62b4..fbd754c30f572 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -413,22 +413,17 @@ pub fn search_dependencies( while let Some((_, indexed_values)) = stream.next() { for &IndexedValue { index, value } in indexed_values { let import_map = &import_maps[index]; - let importables = &import_map.importables[value as usize..]; - - // Find the first item in this group that has a matching assoc mode and slice the rest away - let Some(importable) = - importables.iter().position(|it| query.matches_assoc_mode(import_map.map[it].1)) - else { + let importables @ [importable, ..] = &import_map.importables[value as usize..] else { continue; }; - let importables @ [importable, ..] = &importables[importable..] else { + let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable]; + if !query.matches_assoc_mode(is_trait_assoc_item) { continue; - }; + } // Fetch all the known names of this importable item (to handle import aliases/renames) common_importable_data_scratch.extend( - import_map.map[importable] - .0 + importable_data .iter() .filter(|&info| query.import_matches(info, true)) // Name shared by the importable items in this group. diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 93ea08b700146..4a5d234f73d2e 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs @@ -37,7 +37,8 @@ pub fn items_with_name<'a>( | NameToImport::Exact(exact_name, case_sensitive) => { let mut local_query = symbol_index::Query::new(exact_name.clone()); let mut external_query = - import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); + // import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); + import_map::Query::new(exact_name); if prefix { local_query.prefix(); external_query = external_query.prefix(); diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 3d0ebf9bde615..41ff17f5e4386 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -109,10 +109,46 @@ fn integrated_completion_benchmark() { vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; + // kick off parsing and index population + + let completion_offset = { + let _it = stdx::timeit("change"); + let mut text = host.analysis().file_text(file_id).unwrap().to_string(); + let completion_offset = + patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") + + "sel".len(); + let mut change = Change::new(); + change.change_file(file_id, Some(Arc::from(text))); + host.apply_change(change); + completion_offset + }; + { - let _it = stdx::timeit("initial"); + let _span = profile::cpu_span(); let analysis = host.analysis(); - analysis.highlight_as_html(file_id, false).unwrap(); + let config = CompletionConfig { + enable_postfix_completions: true, + enable_imports_on_the_fly: true, + enable_self_on_the_fly: true, + enable_private_editable: true, + full_function_signatures: false, + callable: Some(CallableSnippets::FillArguments), + snippet_cap: SnippetCap::new(true), + insert_use: InsertUseConfig { + granularity: ImportGranularity::Crate, + prefix_kind: hir::PrefixKind::ByCrate, + enforce_granularity: true, + group: true, + skip_glob_imports: true, + }, + snippets: Vec::new(), + prefer_no_std: false, + prefer_prelude: true, + limit: None, + }; + let position = + FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; + analysis.completions(&config, position, None).unwrap(); } profile::init_from("*>5"); @@ -122,8 +158,8 @@ fn integrated_completion_benchmark() { let _it = stdx::timeit("change"); let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let completion_offset = - patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") - + "sel".len(); + patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)") + + ";sel".len(); let mut change = Change::new(); change.change_file(file_id, Some(Arc::from(text))); host.apply_change(change); From 6a40400c139da1f2a6e68c26c00b28bd28cf002d Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Wed, 13 Dec 2023 21:18:24 +0330 Subject: [PATCH 22/28] Update builtin attrs from rustc --- crates/hir-def/src/attr/builtin.rs | 34 +++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index 15dceeb8af254..48a596f7f53a7 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -2,7 +2,7 @@ //! //! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`. //! -//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6. +//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972. //! //! The macros were adjusted to only expand to the attribute name, since that is all we need to do //! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to @@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(List: "address, kcfi, memory, thread"), DuplicatesOk, experimental!(no_sanitize) ), - gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)), + gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)), ungated!( doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk @@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ allow_internal_unsafe, Normal, template!(Word), WarnFollowing, "allow_internal_unsafe side-steps the unsafe_code lint", ), - ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk), rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing, "rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \ through unstable paths"), @@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ErrorFollowing, INTERNAL_UNSTABLE ), + rustc_attr!( + rustc_confusables, Normal, + template!(List: r#""name1", "name2", ..."#), + ErrorFollowing, + INTERNAL_UNSTABLE, + ), // Enumerates "identity-like" conversion methods to suggest on type mismatch. rustc_attr!( rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE @@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!( rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE ), + // Ensure the argument to this function is &&str during const-check. + rustc_attr!( + rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE + ), // ========================================================================== // Internal attributes, Layout related: @@ -520,6 +529,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_pass_by_value, Normal, template!(Word), ErrorFollowing, "#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference." ), + rustc_attr!( + rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing, + "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers." + ), rustc_attr!( rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true, "#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`." @@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ "#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl." ), rustc_attr!( - rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false, + rustc_deny_explicit_impl, + AttributeType::Normal, + template!(List: "implement_via_object = (true|false)"), + ErrorFollowing, + @only_local: true, "#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls" ), rustc_attr!( @@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing, r#"`rustc_doc_primitive` is a rustc internal attribute"#, ), + rustc_attr!( + rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing, + "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe" + ), // ========================================================================== // Internal attributes, Testing: @@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing), + rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing), rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_error, Normal, template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly ), - rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk From 070cd4e8b0c22444aea71f9a8b6fe7ae9847ebeb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 14 Dec 2023 10:24:41 +0100 Subject: [PATCH 23/28] minor: Add messages to some asserts for better debugging --- crates/base-db/src/span.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 3464f4cb6d1c7..6f027ce9394f1 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -151,7 +151,7 @@ impl fmt::Debug for HirFileIdRepr { impl From for HirFileId { fn from(id: FileId) -> Self { - assert!(id.index() < Self::MAX_FILE_ID); + assert!(id.index() < Self::MAX_FILE_ID, "FileId index {} is too large", id.index()); HirFileId(id.index()) } } @@ -159,7 +159,7 @@ impl From for HirFileId { impl From for HirFileId { fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID); + assert!(id < Self::MAX_FILE_ID, "MacroCallId index {} is too large", id); HirFileId(id | Self::MACRO_FILE_TAG_MASK) } } From 9083017c9d380f42b8feffc59008ea8e11d5a0ab Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 14 Dec 2023 14:11:12 +0100 Subject: [PATCH 24/28] Remove `ModuleId` from `TypeOwnerId` --- crates/hir-def/src/generics.rs | 9 ++-- crates/hir-def/src/item_tree.rs | 8 ++- crates/hir-def/src/lib.rs | 20 ++++--- crates/hir-def/src/resolver.rs | 11 +++- crates/hir-expand/src/eager.rs | 2 +- crates/hir-expand/src/lib.rs | 2 +- crates/hir-ty/src/infer.rs | 2 +- crates/hir-ty/src/lower.rs | 22 ++++++-- crates/hir-ty/src/tests/incremental.rs | 72 ++++++++++++++++++++++---- crates/hir/src/semantics.rs | 4 +- crates/hir/src/source_analyzer.rs | 5 +- 11 files changed, 112 insertions(+), 45 deletions(-) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 0d95d916ff99b..f5324f052e592 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -222,11 +222,10 @@ impl GenericParams { let module = loc.container.module(db); let func_data = db.function_data(id); - // Don't create an `Expander` nor call `loc.source(db)` if not needed since this - // causes a reparse after the `ItemTree` has been created. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.source(db).file_id, module)) - }); + // Don't create an `Expander` if not needed since this + // could cause a reparse after the `ItemTree` has been created due to the spanmap. + let mut expander = + Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module))); for param in func_data.params.iter() { generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); } diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 16144394e3b1b..3d2cddffa3ba2 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -106,11 +106,6 @@ impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); let syntax = db.parse_or_expand(file_id); - if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) - { - // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic - return Default::default(); - } let ctx = lower::Ctx::new(db, file_id); let mut top_attrs = None; @@ -129,6 +124,9 @@ impl ItemTree { ctx.lower_macro_stmts(stmts) }, _ => { + if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) { + return Default::default(); + } panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}"); }, } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 7cf13a202e02c..b5333861cc8a6 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -569,6 +569,8 @@ pub struct ConstBlockLoc { pub root: hir::ExprId, } +/// Something that holds types, required for the current const arg lowering implementation as they +/// need to be able to query where they are defined. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum TypeOwnerId { FunctionId(FunctionId), @@ -581,9 +583,6 @@ pub enum TypeOwnerId { TypeAliasId(TypeAliasId), ImplId(ImplId), EnumVariantId(EnumVariantId), - // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually - // useful for assigning ids to in type consts. - ModuleId(ModuleId), } impl TypeOwnerId { @@ -597,9 +596,7 @@ impl TypeOwnerId { TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), - TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => { - return None - } + TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None, }) } } @@ -614,8 +611,7 @@ impl_from!( TraitAliasId, TypeAliasId, ImplId, - EnumVariantId, - ModuleId + EnumVariantId for TypeOwnerId ); @@ -713,12 +709,15 @@ pub struct InTypeConstLoc { pub id: AstId, /// The thing this const arg appears in pub owner: TypeOwnerId, - pub thing: Box, + // FIXME(const-generic-body): The expected type should not be + pub expected_ty: Box, } impl PartialEq for InTypeConstLoc { fn eq(&self, other: &Self) -> bool { - self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing + self.id == other.id + && self.owner == other.owner + && &*self.expected_ty == &*other.expected_ty } } @@ -1041,7 +1040,6 @@ impl HasModule for TypeOwnerId { TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db), TypeOwnerId::ImplId(it) => it.lookup(db).container, TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container, - TypeOwnerId::ModuleId(it) => *it, } } } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index ba0a2c0224a05..2ac1516ec07be 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -589,6 +589,16 @@ impl Resolver { }) } + pub fn type_owner(&self) -> Option { + self.scopes().find_map(|scope| match scope { + Scope::BlockScope(_) => None, + &Scope::GenericParams { def, .. } => Some(def.into()), + &Scope::ImplDefScope(id) => Some(id.into()), + &Scope::AdtScope(adt) => Some(adt.into()), + Scope::ExprScope(it) => Some(it.owner.into()), + }) + } + pub fn impl_def(&self) -> Option { self.scopes().find_map(|scope| match scope { Scope::ImplDefScope(def) => Some(*def), @@ -1079,7 +1089,6 @@ impl HasResolver for TypeOwnerId { TypeOwnerId::TypeAliasId(it) => it.resolver(db), TypeOwnerId::ImplId(it) => it.resolver(db), TypeOwnerId::EnumVariantId(it) => it.resolver(db), - TypeOwnerId::ModuleId(it) => it.resolver(db), } } } diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 1e2722e846463..8d55240aef57b 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -88,7 +88,7 @@ pub fn expand_eager_macro_input( let loc = MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), + eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, call_site, }; diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index f5e9cd33f2b66..d7819b315c494 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -117,7 +117,7 @@ pub struct MacroCallLoc { pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. - eager: Option>, + eager: Option>, pub kind: MacroCallKind, pub call_site: SyntaxContextId, } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 8262edec22c1e..6f724e4587440 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc() .unwrap() diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index c86fe9adff866..97c4a741ff2a9 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, resolver: &'a Resolver, in_binders: DebruijnIndex, - owner: TypeOwnerId, + // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases + // where expected + owner: Option, /// Note: Conceptually, it's thinkable that we could be in a location where /// some type params should be represented as placeholders, and others /// should be converted to variables. I think in practice, this isn't @@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> { impl<'a> TyLoweringContext<'a> { pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self { + Self::new_maybe_unowned(db, resolver, Some(owner)) + } + + pub fn new_maybe_unowned( + db: &'a dyn HirDatabase, + resolver: &'a Resolver, + owner: Option, + ) -> Self { let impl_trait_mode = ImplTraitLoweringState::Disallowed; let type_param_mode = ParamLoweringMode::Placeholder; let in_binders = DebruijnIndex::INNERMOST; @@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> { } pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const { + let Some(owner) = self.owner else { return unknown_const(const_type) }; const_or_path_to_chalk( self.db, self.resolver, - self.owner, + owner, const_type, const_ref, self.type_param_mode, @@ -1768,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { let resolver = t.resolver(db.upcast()); let ctx = TyLoweringContext::new(db, &resolver, t.into()) .with_type_param_mode(ParamLoweringMode::Variable); - if db.type_alias_data(t).is_extern { + let type_alias_data = db.type_alias_data(t); + if type_alias_data.is_extern { Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)) } else { - let type_ref = &db.type_alias_data(t).type_ref; + let type_ref = &type_alias_data.type_ref; let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error)); make_binders(db, &generics, inner) } @@ -2042,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk( .intern_in_type_const(InTypeConstLoc { id: it, owner, - thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())), + expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())), }) .into(); intern_const_scalar( diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index bb15ca8c436a2..28e84e480d775 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -9,11 +9,10 @@ use super::visit_module; fn typing_whitespace_inside_a_function_should_not_invalidate_types() { let (mut db, pos) = TestDB::with_position( " - //- /lib.rs - fn foo() -> i32 { - $01 + 1 - } - ", +//- /lib.rs +fn foo() -> i32 { + $01 + 1 +}", ); { let events = db.log_executed(|| { @@ -27,12 +26,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { } let new_text = " - fn foo() -> i32 { - 1 - + - 1 - } - "; +fn foo() -> i32 { + 1 + + + 1 +}"; db.set_file_text(pos.file_id, Arc::from(new_text)); @@ -47,3 +45,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { assert!(!format!("{events:?}").contains("infer"), "{events:#?}") } } + +#[test] +fn typing_inside_a_function_should_not_invalidate_types_in_another() { + let (mut db, pos) = TestDB::with_position( + " +//- /lib.rs +fn foo() -> f32 { + 1.0 + 2.0 +} +fn bar() -> i32 { + $01 + 1 +} +fn baz() -> i32 { + 1 + 1 +}", + ); + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = module.def_map(&db); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(format!("{events:?}").contains("infer")) + } + + let new_text = " +fn foo() -> f32 { + 1.0 + 2.0 +} +fn bar() -> i32 { + 53 +} +fn baz() -> i32 { + 1 + 1 +} +"; + + db.set_file_text(pos.file_id, Arc::from(new_text)); + + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = module.def_map(&db); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}") + } +} diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index dcf8ba27a68a0..a03ff22074577 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -948,10 +948,10 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_type(&self, ty: &ast::Type) -> Option { let analyze = self.analyze(ty.syntax())?; let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id); - let ty = hir_ty::TyLoweringContext::new( + let ty = hir_ty::TyLoweringContext::new_maybe_unowned( self.db, &analyze.resolver, - analyze.resolver.module().into(), + analyze.resolver.type_owner(), ) .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone())); Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 73db6f8f0b86b..d05118bbc28b4 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -1040,8 +1040,9 @@ fn resolve_hir_path_( let types = || { let (ty, unresolved) = match path.type_anchor() { Some(type_ref) => { - let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into()) - .lower_ty_ext(type_ref); + let (_, res) = + TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) } None => { From 7b9595a5ab2c6eb455ea10f3708c9fb97a430978 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Fri, 15 Dec 2023 02:10:25 +0330 Subject: [PATCH 25/28] Run rust-analyzer on rustc tests in metrics --- .github/workflows/metrics.yaml | 9 +- Cargo.lock | 1 + crates/ide-diagnostics/src/lib.rs | 4 +- crates/ide/src/lib.rs | 4 +- crates/rust-analyzer/Cargo.toml | 1 + crates/rust-analyzer/src/bin/main.rs | 1 + crates/rust-analyzer/src/cli.rs | 1 + crates/rust-analyzer/src/cli/flags.rs | 16 ++ crates/rust-analyzer/src/cli/rustc_tests.rs | 236 ++++++++++++++++++++ xtask/src/flags.rs | 3 + xtask/src/metrics.rs | 17 ++ 11 files changed, 288 insertions(+), 5 deletions(-) create mode 100644 crates/rust-analyzer/src/cli/rustc_tests.rs diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index 741e559953fc8..e6a9917a0bf3d 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -67,7 +67,7 @@ jobs: other_metrics: strategy: matrix: - names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] + names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] runs-on: ubuntu-latest needs: [setup_cargo, build_metrics] @@ -118,6 +118,11 @@ jobs: with: name: self-${{ github.sha }} + - name: Download rustc_tests metrics + uses: actions/download-artifact@v3 + with: + name: rustc_tests-${{ github.sha }} + - name: Download ripgrep-13.0.0 metrics uses: actions/download-artifact@v3 with: @@ -146,7 +151,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 diff --git a/Cargo.lock b/Cargo.lock index f94b855ca7d18..227d1db0ec72d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1545,6 +1545,7 @@ dependencies = [ "triomphe", "vfs", "vfs-notify", + "walkdir", "winapi", "xflags", "xshell", diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 6541bf605794a..579386c72ef4d 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -94,7 +94,7 @@ use syntax::{ }; // FIXME: Make this an enum -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum DiagnosticCode { RustcHardError(&'static str), RustcLint(&'static str), @@ -198,7 +198,7 @@ impl Diagnostic { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Severity { Error, Warning, diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index e3548f3f0cbf4..a19952e4cae97 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -133,7 +133,9 @@ pub use ide_db::{ symbol_index::Query, RootDatabase, SymbolKind, }; -pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity}; +pub use ide_diagnostics::{ + Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity, +}; pub use ide_ssr::SsrError; pub use syntax::{TextRange, TextSize}; pub use text_edit::{Indel, TextEdit}; diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 408c1fb6f39b7..39ac338aa1a92 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -42,6 +42,7 @@ tracing-tree.workspace = true triomphe.workspace = true nohash-hasher.workspace = true always-assert = "0.1.2" +walkdir = "2.3.2" cfg.workspace = true flycheck.workspace = true diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 29bd02f92da70..8472e49de9838 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -87,6 +87,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, + flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } Ok(()) } diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 64646b33ad480..de00c4192b46a 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -10,6 +10,7 @@ mod ssr; mod lsif; mod scip; mod run_tests; +mod rustc_tests; mod progress_report; diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index fe5022f8606d1..5633c0c488aa7 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -98,6 +98,15 @@ xflags::xflags! { required path: PathBuf } + /// Run unit tests of the project using mir interpreter + cmd rustc-tests { + /// Directory with Cargo.toml. + required rustc_repo: PathBuf + + /// Only run tests with filter as substring + optional --filter path: String + } + cmd diagnostics { /// Directory with Cargo.toml. required path: PathBuf @@ -159,6 +168,7 @@ pub enum RustAnalyzerCmd { Highlight(Highlight), AnalysisStats(AnalysisStats), RunTests(RunTests), + RustcTests(RustcTests), Diagnostics(Diagnostics), Ssr(Ssr), Search(Search), @@ -211,6 +221,12 @@ pub struct RunTests { pub path: PathBuf, } +#[derive(Debug)] +pub struct RustcTests { + pub rustc_repo: PathBuf, + pub filter: Option, +} + #[derive(Debug)] pub struct Diagnostics { pub path: PathBuf, diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs new file mode 100644 index 0000000000000..c89b88ac0f9e6 --- /dev/null +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -0,0 +1,236 @@ +//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. + +use std::{ + cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf, +}; + +use hir::Crate; +use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig}; +use profile::StopWatch; +use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; + +use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; +use triomphe::Arc; +use vfs::{AbsPathBuf, FileId}; +use walkdir::WalkDir; + +use crate::cli::{flags, report_metric, Result}; + +struct Tester { + host: AnalysisHost, + root_file: FileId, + pass_count: u64, + ignore_count: u64, + fail_count: u64, + stopwatch: StopWatch, +} + +fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode { + thread_local! { + static LEAK_STORE: RefCell> = RefCell::new(HashMap::new()); + } + LEAK_STORE.with_borrow_mut(|s| match s.get(code) { + Some(c) => *c, + None => { + let v = DiagnosticCode::RustcHardError(format!("E{code}").leak()); + s.insert(code.to_owned(), v); + v + } + }) +} + +fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap { + let text = read_to_string(p).unwrap(); + let mut result = HashMap::new(); + { + let mut text = &*text; + while let Some(p) = text.find("error[E") { + text = &text[p + 7..]; + let code = string_to_diagnostic_code_leaky(&text[..4]); + *result.entry(code).or_insert(0) += 1; + } + } + result +} + +impl Tester { + fn new() -> Result { + let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into()); + std::fs::write(&tmp_file, "")?; + let mut cargo_config = CargoConfig::default(); + cargo_config.sysroot = Some(RustLibSource::Discover); + let workspace = ProjectWorkspace::DetachedFiles { + files: vec![tmp_file.clone()], + sysroot: Ok( + Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap() + ), + rustc_cfg: vec![], + }; + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: false, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, + prefill_caches: false, + }; + let (host, _vfs, _proc_macro) = + load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; + let db = host.raw_database(); + let krates = Crate::all(db); + let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap(); + let root_file = root_crate.root_file(db); + Ok(Self { + host, + root_file, + pass_count: 0, + ignore_count: 0, + fail_count: 0, + stopwatch: StopWatch::start(), + }) + } + + fn test(&mut self, p: PathBuf) { + if p.parent().unwrap().file_name().unwrap() == "auxiliary" { + // These are not tests + return; + } + if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) { + println!("{p:?} IGNORE"); + self.ignore_count += 1; + return; + } + let stderr_path = p.with_extension("stderr"); + let expected = if stderr_path.exists() { + detect_errors_from_rustc_stderr_file(stderr_path) + } else { + HashMap::new() + }; + let text = read_to_string(&p).unwrap(); + let mut change = Change::new(); + // Ignore unstable tests, since they move too fast and we do not intend to support all of them. + let mut ignore_test = text.contains("#![feature"); + // Ignore test with extern crates, as this infra don't support them yet. + ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:"); + // Ignore test with extern modules similarly. + ignore_test |= text.contains("mod "); + // These should work, but they don't, and I don't know why, so ignore them. + ignore_test |= text.contains("extern crate proc_macro"); + let should_have_no_error = text.contains("// check-pass") + || text.contains("// build-pass") + || text.contains("// run-pass"); + change.change_file(self.root_file, Some(Arc::from(text))); + self.host.apply_change(change); + let diagnostic_config = DiagnosticsConfig::test_sample(); + let diags = self + .host + .analysis() + .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file) + .unwrap(); + let mut actual = HashMap::new(); + for diag in diags { + if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) { + continue; + } + if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) { + continue; + } + *actual.entry(diag.code).or_insert(0) += 1; + } + // Ignore tests with diagnostics that we don't emit. + ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k)); + if ignore_test { + println!("{p:?} IGNORE"); + self.ignore_count += 1; + } else if actual == expected { + println!("{p:?} PASS"); + self.pass_count += 1; + } else { + println!("{p:?} FAIL"); + println!("actual (r-a) = {:?}", actual); + println!("expected (rustc) = {:?}", expected); + self.fail_count += 1; + } + } + + fn report(&mut self) { + println!( + "Pass count = {}, Fail count = {}, Ignore count = {}", + self.pass_count, self.fail_count, self.ignore_count + ); + println!("Testing time and memory = {}", self.stopwatch.elapsed()); + report_metric("rustc failed tests", self.fail_count, "#"); + report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms"); + } +} + +/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them. +const IGNORED_TESTS: &[&str] = &[ + "trait-with-missing-associated-type-restriction.rs", // #15646 + "trait-with-missing-associated-type-restriction-fixable.rs", // #15646 + "resolve-self-in-impl.rs", + "basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs + "issue-26056.rs", + "float-field.rs", + "invalid_operator_trait.rs", + "type-alias-impl-trait-assoc-dyn.rs", + "deeply-nested_closures.rs", // exponential time + "hang-on-deeply-nested-dyn.rs", // exponential time + "dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0 + "issue-16098.rs", // Huge recursion limit for macros? + "issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0 +]; + +const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[ + DiagnosticCode::RustcHardError("E0023"), + DiagnosticCode::RustcHardError("E0046"), + DiagnosticCode::RustcHardError("E0063"), + DiagnosticCode::RustcHardError("E0107"), + DiagnosticCode::RustcHardError("E0117"), + DiagnosticCode::RustcHardError("E0133"), + DiagnosticCode::RustcHardError("E0210"), + DiagnosticCode::RustcHardError("E0268"), + DiagnosticCode::RustcHardError("E0308"), + DiagnosticCode::RustcHardError("E0384"), + DiagnosticCode::RustcHardError("E0407"), + DiagnosticCode::RustcHardError("E0432"), + DiagnosticCode::RustcHardError("E0451"), + DiagnosticCode::RustcHardError("E0507"), + DiagnosticCode::RustcHardError("E0583"), + DiagnosticCode::RustcHardError("E0559"), + DiagnosticCode::RustcHardError("E0616"), + DiagnosticCode::RustcHardError("E0618"), + DiagnosticCode::RustcHardError("E0624"), + DiagnosticCode::RustcHardError("E0774"), + DiagnosticCode::RustcHardError("E0767"), + DiagnosticCode::RustcHardError("E0777"), +]; + +impl flags::RustcTests { + pub fn run(self) -> Result<()> { + let mut tester = Tester::new()?; + let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); + for i in walk_dir { + let i = i?; + let p = i.into_path(); + if let Some(f) = &self.filter { + if !p.as_os_str().to_string_lossy().contains(f) { + continue; + } + } + if p.extension().map_or(true, |x| x != "rs") { + continue; + } + if let Err(e) = std::panic::catch_unwind({ + let tester = AssertUnwindSafe(&mut tester); + let p = p.clone(); + move || { + let tester = tester; + tester.0.test(p); + } + }) { + println!("panic detected at test {:?}", p); + std::panic::resume_unwind(e); + } + } + tester.report(); + Ok(()) + } +} diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index e52cbfca3e6fb..092ab8c593ce8 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -110,6 +110,7 @@ pub struct PublishReleaseNotes { #[derive(Debug)] pub enum MeasurementType { Build, + RustcTests, AnalyzeSelf, AnalyzeRipgrep, AnalyzeWebRender, @@ -122,6 +123,7 @@ impl FromStr for MeasurementType { fn from_str(s: &str) -> Result { match s { "build" => Ok(Self::Build), + "rustc_tests" => Ok(Self::RustcTests), "self" => Ok(Self::AnalyzeSelf), "ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep), "webrender-2022" => Ok(Self::AnalyzeWebRender), @@ -135,6 +137,7 @@ impl AsRef for MeasurementType { fn as_ref(&self) -> &str { match self { Self::Build => "build", + Self::RustcTests => "rustc_tests", Self::AnalyzeSelf => "self", Self::AnalyzeRipgrep => "ripgrep-13.0.0", Self::AnalyzeWebRender => "webrender-2022", diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 59d41d8e4b842..3d28ecdb0eb2b 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -36,6 +36,9 @@ impl flags::Metrics { MeasurementType::Build => { metrics.measure_build(sh)?; } + MeasurementType::RustcTests => { + metrics.measure_rustc_tests(sh)?; + } MeasurementType::AnalyzeSelf => { metrics.measure_analysis_stats_self(sh)?; } @@ -50,6 +53,7 @@ impl flags::Metrics { } None => { metrics.measure_build(sh)?; + metrics.measure_rustc_tests(sh)?; metrics.measure_analysis_stats_self(sh)?; metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?; metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?; @@ -78,6 +82,19 @@ impl Metrics { self.report("build", time.as_millis() as u64, "ms".into()); Ok(()) } + + fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { + eprintln!("\nMeasuring rustc tests"); + + cmd!(sh, "git clone https://github.com/rust-lang/rust").run()?; + + let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; + for (metric, value, unit) in parse_metrics(&output) { + self.report(metric, value, unit.into()); + } + Ok(()) + } + fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> { self.measure_analysis_stats_path(sh, "self", ".") } From 4c45d239489e764aee9b609baaa95bd5f830bbef Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 15 Dec 2023 13:52:49 +0100 Subject: [PATCH 26/28] fix: Syntax fixup now removes subtrees with fake spans --- crates/base-db/src/span.rs | 11 ++++++--- crates/hir-expand/src/db.rs | 11 ++++++++- crates/hir-expand/src/fixup.rs | 39 +++++++++++++++++++++++------- crates/rust-analyzer/src/config.rs | 1 + crates/vfs/src/lib.rs | 6 ++++- 5 files changed, 54 insertions(+), 14 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 6f027ce9394f1..d8990eb7cae0d 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -151,21 +151,26 @@ impl fmt::Debug for HirFileIdRepr { impl From for HirFileId { fn from(id: FileId) -> Self { - assert!(id.index() < Self::MAX_FILE_ID, "FileId index {} is too large", id.index()); + _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; + assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index()); HirFileId(id.index()) } } impl From for HirFileId { fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { + _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID, "MacroCallId index {} is too large", id); + assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id); HirFileId(id | Self::MACRO_FILE_TAG_MASK) } } impl HirFileId { - const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const ASSERT_MAX_FILE_ID_IS_SAME: () = + [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize]; + + const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; const MACRO_FILE_TAG_MASK: u32 = 1 << 31; #[inline] diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 32baa6694b4df..935669d49b5b3 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -20,7 +20,7 @@ use crate::{ attrs::{collect_attrs, RawAttrs}, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, - fixup::{self, SyntaxFixupUndoInfo}, + fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, hygiene::{apply_mark, SyntaxContextData, Transparency}, span::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, @@ -421,6 +421,15 @@ fn macro_arg( syntax::NodeOrToken::Token(_) => true, }); fixups.remove.extend(censor); + { + let mut tt = mbe::syntax_node_to_token_tree_modified( + &syntax, + map.as_ref(), + fixups.append.clone(), + fixups.remove.clone(), + ); + reverse_fixups(&mut tt, &fixups.undo_info); + } ( mbe::syntax_node_to_token_tree_modified( &syntax, diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 11775c531d4cd..346cd39a7675d 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -8,12 +8,13 @@ use base_db::{ use la_arena::RawIdx; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; +use stdx::never; use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; use triomphe::Arc; -use tt::Spacing; +use tt::{Spacing, Span}; use crate::{ span::SpanMapRef, @@ -45,19 +46,20 @@ impl SyntaxFixupUndoInfo { // replacement -> censor + append // append -> insert a fake node, here we need to assemble some dummy span that we can figure out how // to remove later +const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID); +const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0)); +const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0)); +const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0); pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::::default(); let mut remove = FxHashSet::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); - let dummy_range = TextRange::empty(TextSize::new(0)); + let dummy_range = FIXUP_DUMMY_RANGE; // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as // the index into the replacement vec but only if the end points to !0 - let dummy_anchor = SpanAnchor { - file_id: FileId::from_raw(!0), - ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)), - }; + let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID }; let fake_span = |range| SpanData { range: dummy_range, anchor: dummy_anchor, @@ -76,7 +78,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta let replacement = Leaf::Ident(Ident { text: "__ra_fixup".into(), span: SpanData { - range: TextRange::new(TextSize::new(idx), TextSize::new(!0)), + range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), anchor: dummy_anchor, ctx: span_map.span_for_range(node_range).ctx, }, @@ -299,6 +301,13 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { let Some(undo_info) = undo_info.original.as_deref() else { return }; let undo_info = &**undo_info; + if never!( + tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE + || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE + ) { + tt.delimiter.close = SpanData::DUMMY; + tt.delimiter.open = SpanData::DUMMY; + } reverse_fixups_(tt, undo_info); } @@ -310,17 +319,28 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { .filter(|tt| match tt { tt::TokenTree::Leaf(leaf) => { let span = leaf.span(); - span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0) + let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE; + let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END; + is_real_leaf || is_replaced_node } tt::TokenTree::Subtree(_) => true, }) .flat_map(|tt| match tt { tt::TokenTree::Subtree(mut tt) => { + if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE + || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE + { + // Even though fixup never creates subtrees with fixup spans, the old proc-macro server + // might copy them if the proc-macro asks for it, so we need to filter those out + // here as well. + return SmallVec::new_const(); + } reverse_fixups_(&mut tt, undo_info); SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if leaf.span().anchor.file_id == FileId::from_raw(!0) { + if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE { + // we have a fake node here, we need to replace it again with the original let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); if original.delimiter.kind == tt::DelimiterKind::Invisible { original.token_trees.into() @@ -328,6 +348,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { SmallVec::from_const([original.into()]) } } else { + // just a normal leaf SmallVec::from_const([leaf.into()]) } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 90d1d6b055594..258f74106395d 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1354,6 +1354,7 @@ impl Config { } } + // FIXME: This should be an AbsolutePathBuf fn target_dir_from_config(&self) -> Option { self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir { TargetDirectory::UseSubdirectory(yes) if *yes => { diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 8ffda5d78d13e..ef5b10ee9db2f 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -61,13 +61,17 @@ pub use paths::{AbsPath, AbsPathBuf}; /// Most functions in rust-analyzer use this when they need to refer to a file. #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(u32); +// pub struct FileId(NonMaxU32); impl FileId { /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics! + // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages. pub const BOGUS: FileId = FileId(0xe4e4e); + pub const MAX_FILE_ID: u32 = 0x7fff_ffff; #[inline] - pub fn from_raw(raw: u32) -> FileId { + pub const fn from_raw(raw: u32) -> FileId { + assert!(raw <= Self::MAX_FILE_ID); FileId(raw) } From 4f722165b66612e8103299972f644d94da93b78b Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Fri, 15 Dec 2023 19:09:07 +0330 Subject: [PATCH 27/28] Fix false positive type mismatch in const reference patterns --- crates/hir-ty/src/infer/pat.rs | 40 +++++++++--------- crates/hir-ty/src/infer/path.rs | 63 ++++++++++++++++------------- crates/hir-ty/src/tests/patterns.rs | 38 +++++++++++++++++ 3 files changed, 96 insertions(+), 45 deletions(-) diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 7ff12e5b7f851..acdb540289d71 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -262,7 +262,7 @@ impl InferenceContext<'_> { fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty { let mut expected = self.resolve_ty_shallow(expected); - if is_non_ref_pat(self.body, pat) { + if self.is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); @@ -496,24 +496,28 @@ impl InferenceContext<'_> { self.infer_expr(expr, &Expectation::has_type(expected.clone())) } -} -fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool { - match &body[pat] { - Pat::Tuple { .. } - | Pat::TupleStruct { .. } - | Pat::Record { .. } - | Pat::Range { .. } - | Pat::Slice { .. } => true, - Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)), - // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented. - Pat::Path(..) => true, - Pat::ConstBlock(..) => true, - Pat::Lit(expr) => !matches!( - body[*expr], - Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..)) - ), - Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false, + fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool { + match &body[pat] { + Pat::Tuple { .. } + | Pat::TupleStruct { .. } + | Pat::Record { .. } + | Pat::Range { .. } + | Pat::Slice { .. } => true, + Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)), + Pat::Path(p) => { + let v = self.resolve_value_path_inner(p, pat.into()); + v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_))) + } + Pat::ConstBlock(..) => false, + Pat::Lit(expr) => !matches!( + body[*expr], + Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..)) + ), + Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => { + false + } + } } } diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index fcfe1a3b5cf45..49fb78f67a656 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -40,33 +40,7 @@ impl InferenceContext<'_> { } fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option { - let (value, self_subst) = if let Some(type_ref) = path.type_anchor() { - let last = path.segments().last()?; - - // Don't use `self.make_ty()` here as we need `orig_ns`. - let ctx = - crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); - let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); - - let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); - let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); - self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? - } else { - // FIXME: report error, unresolved first path segment - let value_or_partial = - self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; - - match value_or_partial { - ResolveValueResult::ValueNs(it, _) => (it, None), - ResolveValueResult::Partial(def, remaining_index, _) => self - .resolve_assoc_item(def, path, remaining_index, id) - .map(|(it, substs)| (it, Some(substs)))?, - } - }; + let (value, self_subst) = self.resolve_value_path_inner(path, id)?; let value_def = match value { ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) { @@ -144,6 +118,41 @@ impl InferenceContext<'_> { Some(ValuePathResolution::GenericDef(value_def, generic_def, substs)) } + pub(super) fn resolve_value_path_inner( + &mut self, + path: &Path, + id: ExprOrPatId, + ) -> Option<(ValueNs, Option>)> { + let (value, self_subst) = if let Some(type_ref) = path.type_anchor() { + let last = path.segments().last()?; + + // Don't use `self.make_ty()` here as we need `orig_ns`. + let ctx = + crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); + let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); + let ty = self.table.insert_type_vars(ty); + let ty = self.table.normalize_associated_types_in(ty); + + let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); + let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); + let ty = self.table.insert_type_vars(ty); + let ty = self.table.normalize_associated_types_in(ty); + self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? + } else { + // FIXME: report error, unresolved first path segment + let value_or_partial = + self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; + + match value_or_partial { + ResolveValueResult::ValueNs(it, _) => (it, None), + ResolveValueResult::Partial(def, remaining_index, _) => self + .resolve_assoc_item(def, path, remaining_index, id) + .map(|(it, substs)| (it, Some(substs)))?, + } + }; + Some((value, self_subst)) + } + fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) { let predicates = self.db.generic_predicates(def); for predicate in predicates.iter() { diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 5d7bab09c26e6..7234af2d68348 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1153,3 +1153,41 @@ fn main() { "#, ); } + +#[test] +fn type_mismatch_pat_const_reference() { + check_no_mismatches( + r#" +const TEST_STR: &'static str = "abcd"; + +fn main() { + let s = "abcd"; + match s { + TEST_STR => (), + _ => (), + } +} + + "#, + ); + check( + r#" +struct Foo(T); + +impl Foo { + const TEST_I32_REF: &'static i32 = &3; + const TEST_I32: i32 = 3; +} + +fn main() { + match &6 { + Foo::::TEST_I32_REF => (), + Foo::::TEST_I32 => (), + //^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32 + _ => (), + } +} + + "#, + ); +} From fa5a327786c4a4552602f2df4eb374d0b172dab2 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Fri, 15 Dec 2023 19:09:42 +0330 Subject: [PATCH 28/28] Use depth 1 in git clone --- xtask/src/metrics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 3d28ecdb0eb2b..845928432c4fd 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -86,7 +86,7 @@ impl Metrics { fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { eprintln!("\nMeasuring rustc tests"); - cmd!(sh, "git clone https://github.com/rust-lang/rust").run()?; + cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?; let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; for (metric, value, unit) in parse_metrics(&output) {