diff --git a/crates/argus-cli/src/plugin.rs b/crates/argus-cli/src/plugin.rs index 2a42b90..a9f0084 100644 --- a/crates/argus-cli/src/plugin.rs +++ b/crates/argus-cli/src/plugin.rs @@ -122,7 +122,7 @@ impl RustcPlugin for ArgusPlugin { match &args.command { AC::Preload => { let mut cmd = Command::new(cargo_path); - // Note: this command must share certain parameters with rustc_plugin so Cargo will not recompute + // NOTE: this command must share certain parameters with rustc_plugin so Cargo will not recompute // dependencies when actually running the driver, e.g. RUSTFLAGS. cmd .args(["check", "--all", "--all-features", "--target-dir"]) @@ -136,14 +136,14 @@ impl RustcPlugin for ArgusPlugin { println!("{commit_hash}"); exit(0); } - _ => {} + AC::Obligations { .. } | AC::Tree { .. } | AC::Bundle => {} }; let file = match &args.command { AC::Tree { file, .. } => Some(file), AC::Obligations { file } => file.as_ref(), AC::Bundle => None, - _ => unreachable!(), + AC::Preload | AC::RustcVersion => unreachable!(), }; let filter = file.map_or(CrateFilter::OnlyWorkspace, |file| { @@ -159,6 +159,7 @@ impl RustcPlugin for ArgusPlugin { plugin_args: ArgusPluginArgs, ) -> RustcResult<()> { use ArgusCommand as AC; + let no_target = || None::<(ObligationHash, CharRange)>; match &plugin_args.command { AC::Tree { file, @@ -192,11 +193,10 @@ impl RustcPlugin for ArgusPlugin { postprocess(v) } AC::Obligations { file, .. } => { - let nothing = || None::<(ObligationHash, CharRange)>; let v = run( analysis::obligations, file.as_ref().map(PathBuf::from), - nothing, + no_target, &plugin_args, &compiler_args, ); @@ -204,17 +204,16 @@ impl RustcPlugin for ArgusPlugin { } AC::Bundle => { log::warn!("Bundling takes an enormous amount of time."); - let nothing = || None::<(ObligationHash, CharRange)>; let v = run( analysis::bundle, None, - nothing, + no_target, &plugin_args, &compiler_args, ); postprocess(v) } - _ => unreachable!(), + AC::Preload | AC::RustcVersion => unreachable!(), } } } diff --git a/crates/argus-ext/src/ty/impl.rs b/crates/argus-ext/src/ty/impl.rs index 8286a4e..40b5cca 100644 --- a/crates/argus-ext/src/ty/impl.rs +++ b/crates/argus-ext/src/ty/impl.rs @@ -118,11 +118,13 @@ impl<'tcx> TyCtxtExt<'tcx> for TyCtxt<'tcx> { fn to_local(&self, body_id: BodyId, span: Span) -> Span { use rustc_utils::source_map::span::SpanExt; + let hir = self.hir(); let mut local_body_span = hir.body(body_id).value.span; while local_body_span.from_expansion() { local_body_span = local_body_span.source_callsite(); } + span.as_local(local_body_span).unwrap_or(span) } diff --git a/crates/argus-ser/src/argus.rs b/crates/argus-ser/src/argus.rs index c22e383..5e83ef5 100644 --- a/crates/argus-ser/src/argus.rs +++ b/crates/argus-ser/src/argus.rs @@ -183,7 +183,7 @@ pub enum ClauseBound<'tcx> { ), } -pub fn group_predicates_by_ty<'tcx>( +pub(crate) fn group_predicates_by_ty<'tcx>( tcx: ty::TyCtxt<'tcx>, predicates: impl IntoIterator>, ) -> GroupedClauses<'tcx> { diff --git a/crates/argus/src/aadebug/mod.rs b/crates/argus/src/aadebug/mod.rs index d37665c..3b69702 100644 --- a/crates/argus/src/aadebug/mod.rs +++ b/crates/argus/src/aadebug/mod.rs @@ -6,12 +6,10 @@ use std::time::Instant; use anyhow::Result; use argus_ext::ty::EvaluationResultExt; use index_vec::IndexVec; -use rustc_data_structures::fx::FxHashMap as HashMap; use rustc_infer::traits::solve::GoalSource; use rustc_trait_selection::solve::inspect::{InspectCandidate, InspectGoal}; use rustc_utils::timer; use serde::Serialize; -use serde_json as json; #[cfg(feature = "testing")] use ts_rs::TS; @@ -28,12 +26,6 @@ pub struct Storage<'tcx> { #[cfg_attr(feature = "testing", ts(export))] pub struct AnalysisResults { pub problematic_sets: Vec, - - #[cfg_attr( - feature = "testing", - ts(type = "Record") - )] - pub impl_candidates: HashMap>, } impl<'tcx> Storage<'tcx> { @@ -118,15 +110,12 @@ impl<'tcx> Storage<'tcx> { let tree_start = Instant::now(); let mut sets = vec![]; - tree.for_correction_set(|conjunct| { - sets.push(tree.weight(&conjunct)); - }); + tree.for_correction_set(|conjunct| sets.push(tree.weight(conjunct))); timer::elapsed("aadeg::into_results", tree_start); AnalysisResults { problematic_sets: sets, - impl_candidates: tree.reportable_impl_candidates(), } } } diff --git a/crates/argus/src/aadebug/tree.rs b/crates/argus/src/aadebug/tree.rs index 54a6267..4d46b9b 100644 --- a/crates/argus/src/aadebug/tree.rs +++ b/crates/argus/src/aadebug/tree.rs @@ -1,12 +1,7 @@ use std::{cell::RefCell, ops::Deref, time::Instant}; -use argus_ext::{ - rustc::InferCtxtExt, - ty::{EvaluationResultExt, PredicateExt, TyCtxtExt, TyExt}, -}; -use argus_ser as ser; +use argus_ext::ty::{EvaluationResultExt, TyCtxtExt, TyExt}; use index_vec::IndexVec; -use rustc_data_structures::fx::FxHashMap as HashMap; use rustc_infer::infer::InferCtxt; use rustc_middle::{ traits::solve::{CandidateSource, Goal as RGoal}, @@ -22,7 +17,6 @@ use super::dnf::{And, Dnf}; use crate::{ analysis::EvaluationResult, proof_tree::{topology::TreeTopology, ProofNodeIdx}, - tls, }; pub type I = ProofNodeIdx; @@ -175,6 +169,8 @@ impl<'a, 'tcx> Goal<'a, 'tcx> { } fn analyze(&self) -> Heuristic { + use std::cmp::Ordering; + // We should only be analyzing failed predicates assert!(!self.result.is_yes()); @@ -194,16 +190,14 @@ impl<'a, 'tcx> Goal<'a, 'tcx> { log::debug!("Fn Args {:?}", t.trait_ref.args.into_type_list(tcx)); log::debug!("{} v {}", fn_arity, trait_arity); - if fn_arity > trait_arity { - GoalKind::DeleteFnParams { - delta: fn_arity - trait_arity, - } - } else if fn_arity < trait_arity { - GoalKind::AddFnParams { + match fn_arity.cmp(&trait_arity) { + Ordering::Less => GoalKind::AddFnParams { delta: trait_arity - fn_arity, - } - } else { - GoalKind::IncorrectParams { arity: fn_arity } + }, + Ordering::Greater => GoalKind::DeleteFnParams { + delta: fn_arity - trait_arity, + }, + Ordering::Equal => GoalKind::IncorrectParams { arity: fn_arity }, } } @@ -404,10 +398,6 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> { } pub fn dnf(&self) -> impl Deref> + '_ { - if self.dnf.borrow().is_some() { - return self.expect_dnf(); - } - fn _goal(this: &T, goal: &Goal) -> Option> { if !((this.maybe_ambiguous && goal.result.is_maybe()) || goal.result.is_no()) @@ -436,6 +426,10 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> { Dnf::and(goals.filter_map(|g| _goal(this, &g))) } + if self.dnf.borrow().is_some() { + return self.expect_dnf(); + } + let dnf_report_msg = format!("Normalizing to DNF from {} nodes", self.ns.len()); let dnf_start = Instant::now(); @@ -487,43 +481,6 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> { goals, } } - - pub fn reportable_impl_candidates( - &self, - ) -> HashMap> { - let mut indices = Vec::default(); - self.for_correction_set(|and| indices.extend(and.iter().copied())); - - let goals_only = indices.iter().filter_map(|&idx| self.goal(idx)); - - let trait_goals = goals_only.filter(|g| { - matches!( - g.analyze().kind, - GoalKind::Trait { .. } | GoalKind::FnToTrait { .. } - ) - }); - - trait_goals - .filter_map(|g| { - g.predicate().as_trait_predicate().map(|tp| { - let candidates = g - .infcx - .find_similar_impl_candidates(tp) - .into_iter() - .filter_map(|can| { - let header = - ser::argus::get_opt_impl_header(g.infcx.tcx, can.impl_def_id)?; - Some(tls::unsafe_access_interner(|ty_interner| { - ser::to_value_expect(g.infcx, ty_interner, &header) - })) - }) - .collect(); - - (g.idx, candidates) - }) - }) - .collect() - } } // ------------------ diff --git a/crates/argus/src/analysis/entry.rs b/crates/argus/src/analysis/entry.rs index 5ce7504..61f8d6d 100644 --- a/crates/argus/src/analysis/entry.rs +++ b/crates/argus/src/analysis/entry.rs @@ -55,39 +55,42 @@ pub fn process_obligation<'tcx>( log::trace!("RECV OBLIGATION {result:?} {obl:?}"); - // Use this to get rid of any resolved inference variables, - // these could have been resolved while trying to solve the obligation - // and we want to present it as such to the user. - let obl = &infcx.resolve_vars_if_possible(obl.clone()); - - // HACK: Remove ambiguous obligations if a "stronger" result was found and - // the predicate implies the previous. This is necessary because we - // can't (currently) distinguish between a subsequent solving attempt - // of a previous obligation. - if result.is_yes() || result.is_no() { - tls::drain_implied_ambiguities(infcx, obl); - } + // Anything we accidentally do in here should not affect type checking + infcx.probe(|_| { + // Use this to get rid of any resolved inference variables, + // these could have been resolved while trying to solve the obligation + // and we want to present it as such to the user. + let obl = &infcx.resolve_vars_if_possible(obl.clone()); - if !INCLUDE_SUCCESSES.copied().unwrap_or(false) && result.is_yes() { - log::debug!("Skipping successful obligation {obl:?}"); - return; - } + // HACK: Remove ambiguous obligations if a "stronger" result was found and + // the predicate implies the previous. This is necessary because we + // can't (currently) distinguish between a subsequent solving attempt + // of a previous obligation. + // if result.is_yes() || result.is_no() { + // tls::drain_implied_ambiguities(infcx, obl); + // } - let necessity = infcx.obligation_necessity(obl); - let dataid = if matches!(necessity, ObligationNecessity::Yes) - || (matches!(necessity, ObligationNecessity::OnError) && result.is_no()) - { - Some(tls::unsafe_store_data(infcx, obl, result)) - } else { - None - }; + if !INCLUDE_SUCCESSES.copied().unwrap_or(false) && result.is_yes() { + log::debug!("Skipping successful obligation {obl:?}"); + return; + } - let obligation = - transform::compute_provenance(body_id, infcx, obl, result, dataid); + let necessity = infcx.obligation_necessity(obl); + let dataid = if matches!(necessity, ObligationNecessity::Yes) + || (matches!(necessity, ObligationNecessity::OnError) && result.is_no()) + { + Some(tls::unsafe_store_data(infcx, obl, result)) + } else { + None + }; - tls::store_obligation(obligation); + let obligation = + transform::compute_provenance(body_id, infcx, obl, result, dataid); - tls::replace_reported_errors(infcx); + tls::store_obligation(obligation); + + tls::replace_reported_errors(infcx); + }); } pub fn process_obligation_for_tree<'tcx>( @@ -101,23 +104,25 @@ pub fn process_obligation_for_tree<'tcx>( // Must go after the synthetic check. guard_inspection! {} - // Use this to get rid of any resolved inference variables, - // these could have been resolved while trying to solve the obligation - // and we want to present it as such to the user. - let obl = &infcx.resolve_vars_if_possible(obl.clone()); + infcx.probe(|_| { + // Use this to get rid of any resolved inference variables, + // these could have been resolved while trying to solve the obligation + // and we want to present it as such to the user. + let obl = &infcx.resolve_vars_if_possible(obl.clone()); - let fdata = infcx.bless_fulfilled(obl, result); + let fdata = infcx.bless_fulfilled(obl, result); - if fdata.hash != target.hash { - return; - } + if fdata.hash != target.hash { + return; + } - match generate_tree(infcx, obl, fdata.result) { - Ok(stree) => tls::store_tree(stree), - Err(e) => { - log::error!("matching target tree not generated {e:?}"); + match generate_tree(infcx, obl, fdata.result) { + Ok(stree) => tls::store_tree(stree), + Err(e) => { + log::error!("matching target tree not generated {e:?}"); + } } - } + }); }); } @@ -214,10 +219,13 @@ pub(in crate::analysis) fn build_obligations_in_body<'tcx>( // so as a first heuristic, if the body isn't tainted by errors, we'll just remove // all non-successful obligations. if typeck_results.tainted_by_errors.is_none() { - log::info!("BODY HAS NO ERRORS"); + log::debug!( + "Removing failures: Body not-tainted {:?}", + typeck_results.hir_owner + ); obligations.retain(|prov| prov.it.result.is_yes()); } else { - log::info!("BODY TAINTED! {:?}", typeck_results.hir_owner); + log::debug!("Body tainted! {:?}", typeck_results.hir_owner); } let ctx = ErrorAssemblyCtx { diff --git a/crates/argus/src/analysis/hir.rs b/crates/argus/src/analysis/hir.rs index 9a519a7..6b7f6b4 100644 --- a/crates/argus/src/analysis/hir.rs +++ b/crates/argus/src/analysis/hir.rs @@ -3,7 +3,7 @@ use rustc_data_structures::fx::FxHashMap as HashMap; use rustc_hir::{ self as hir, intravisit::Visitor as HirVisitor, BodyId, HirId, }; -use rustc_middle::ty::TyCtxt; +use rustc_middle::{hir::nested_filter, ty::TyCtxt}; use rustc_span::Span; use crate::types::intermediate::ErrorAssemblyCtx; @@ -48,6 +48,8 @@ fn bin_expressions( }; binner.visit_body(ctx.tcx.hir().body(ctx.body_id)); + + // Add remaining miscellaneous unbinned obligations let mut bins = binner.bins; for (hir_id, obligations) in map { bins.push(Bin { @@ -65,14 +67,13 @@ pub enum BinKind { CallableExpr, CallArg, Call, - // MethodCall, - // MethodReceiver, Misc, } pub struct Bin { pub hir_id: HirId, - // TODO: use IndexVec for obligations. + // TODO: use IndexVec for obligations instead of the-- + // // usize indexes into the obligation vec pub obligations: Vec, pub kind: BinKind, @@ -110,14 +111,25 @@ impl BinCreator<'_, '_> { } } -impl<'a, 'tcx: 'a> HirVisitor<'_> for BinCreator<'a, 'tcx> { +impl<'a, 'tcx: 'a> HirVisitor<'tcx> for BinCreator<'a, 'tcx> { + type NestedFilter = nested_filter::All; + + fn nested_visit_map(&mut self) -> Self::Map { + self.ctx.tcx.hir() + } + // FIXME: after updating to nightly-2024-05-20 this binning logic broke slightly. // Obligations associated with parameters are now being assigned to the overall call, // this makes more things use a method call table than necessary. - fn visit_expr(&mut self, ex: &hir::Expr) { + fn visit_expr(&mut self, ex: &'tcx hir::Expr) { // Drain nested obligations first to match the most specific node possible. hir::intravisit::walk_expr(self, ex); + log::debug!( + "Visiting expression: {}", + self.ctx.tcx.hir().node_to_string(ex.hir_id) + ); + match ex.kind { hir::ExprKind::Call(callable, args) => { for arg in args { @@ -150,29 +162,41 @@ pub fn find_most_enclosing_node( span: Span, ) -> Option { let hir = tcx.hir(); - let mut node_finder = FindNodeBySpan::new(span); + let mut node_finder = FindNodeBySpan::new(tcx, span); + + log::trace!( + "Finding HirId for span: {:?}, in body {:?}", + span, + hir.body(body_id) + ); + node_finder.visit_body(hir.body(body_id)); node_finder .result - // NOTE: this should not happen because there must *at least* be an enclosing item. + // NOTE: there should always be an enclosing body somewhere, this could be an expect .map(|t| t.0) } -// NOTE: this probably needs to be expanded to account for all nodes, not just expressions. -struct FindNodeBySpan { +/// Visitor for finding a `HirId` given a span. +/// +/// Similar to what happens in `rustc_trait_selection::traits::error_reporting`, but we +/// find spans that match as closely as possible and not just those that match exactly. +struct FindNodeBySpan<'tcx> { + tcx: TyCtxt<'tcx>, pub span: Span, pub result: Option<(HirId, Span)>, } -// Code taken from rustc_trait_selection::traits::error_reporting, -// modified to find items that enclose the span, not just match it -// exactly. -// TODO: this should work on all nodes, not just expressions. -impl FindNodeBySpan { - pub fn new(span: Span) -> Self { - Self { span, result: None } +impl<'tcx> FindNodeBySpan<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, span: Span) -> Self { + Self { + tcx, + span, + result: None, + } } + /// Is span `s` a closer match than the current best? fn is_better_match(&self, s: Span) -> bool { s.overlaps(self.span) && match self.result { @@ -195,7 +219,7 @@ impl FindNodeBySpan { macro_rules! simple_visitors { ( $( [$visitor:ident, $walker:ident, $t:ty], )* ) => {$( - fn $visitor(&mut self, v: &$t) { + fn $visitor(&mut self, v: &'tcx $t) { hir::intravisit::$walker(self, v); if self.is_better_match(v.span) { self.result = Some((v.hir_id, v.span)); @@ -204,7 +228,13 @@ macro_rules! simple_visitors { }; } -impl HirVisitor<'_> for FindNodeBySpan { +impl<'tcx> HirVisitor<'tcx> for FindNodeBySpan<'tcx> { + type NestedFilter = nested_filter::All; + + fn nested_visit_map(&mut self) -> Self::Map { + self.tcx.hir() + } + simple_visitors! { [visit_param, walk_param, hir::Param], [visit_local, walk_local, hir::LetStmt], diff --git a/crates/argus/src/analysis/mod.rs b/crates/argus/src/analysis/mod.rs index eae0150..2e3cfa5 100644 --- a/crates/argus/src/analysis/mod.rs +++ b/crates/argus/src/analysis/mod.rs @@ -49,6 +49,7 @@ pub fn tree(tcx: TyCtxt, body_id: BodyId) -> Result { let typeck_results = tcx.inspect_typeck(body_id, entry::process_obligation_for_tree); + // tcx.inspect_typeck(body_id, entry::process_obligation); entry::build_tree_output(tcx, body_id, typeck_results) } diff --git a/crates/argus/src/analysis/transform.rs b/crates/argus/src/analysis/transform.rs index b85d60d..4317f4f 100644 --- a/crates/argus/src/analysis/transform.rs +++ b/crates/argus/src/analysis/transform.rs @@ -8,7 +8,7 @@ use argus_ext::{ use index_vec::IndexVec; use indexmap::IndexSet; use rustc_data_structures::fx::{FxHashMap as HashMap, FxIndexMap}; -use rustc_hir::{BodyId, HirId}; +use rustc_hir::{self as hir, intravisit::Map, BodyId, HirId}; use rustc_infer::{infer::InferCtxt, traits::PredicateObligation}; use rustc_middle::ty::{TyCtxt, TypeckResults}; use rustc_span::Span; @@ -46,9 +46,11 @@ pub fn compute_provenance<'tcx>( ) -> Provenance { let hir = infcx.tcx.hir(); let fdata = infcx.bless_fulfilled(obligation, result); + // If the span is coming from a macro, point to the callsite. let callsite_cause_span = infcx.tcx.to_local(body_id, fdata.obligation.cause.span); + let hir_id = hier_hir::find_most_enclosing_node(infcx.tcx, body_id, callsite_cause_span) .unwrap_or_else(|| hir.body_owner(body_id)); @@ -182,6 +184,17 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { span.sanitized_snippet(source_map) } + fn hir_id_to_span(&self, hir_id: HirId) -> Span { + let hir = self.tcx.hir(); + match hir.hir_node(hir_id) { + hir::Node::Expr(hir::Expr { + kind: hir::ExprKind::MethodCall(_, _, _, span), + .. + }) => self.to_local(*span), + _ => self.to_local(hir.span_with_body(hir_id)), + } + } + fn sort_bins(&mut self, bins: Vec) { use ExprKind as EK; @@ -193,7 +206,7 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { mut obligations, kind, } = bin; - let span = self.to_local(hir.span_with_body(hir_id)); + let span = self.hir_id_to_span(hir_id); let snippet = self.local_snip(span); let Ok(range) = CharRange::from_span(span, source_map) else { log::error!( @@ -286,9 +299,7 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { let uoidx = prov.full_data?; let full_data = self.full_data.get(uoidx); tree_search::tree_contains_in_branchless( - // something &full_data.infcx, - // something &full_data.obligation, needle, ) @@ -297,15 +308,7 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { } fn relate_trait_bounds(&mut self) { - for (&span, predicates) in self.reported_trait_errors { - let span = self.to_local(span); - let range = CharRange::from_span(span, self.tcx.sess.source_map()) - .expect("failed to get range for reported trait error"); - - log::debug!( - "Relating trait bounds:\nrange {range:?}\nspan: {span:?}\n{predicates:#?}" - ); - + for (error_span, predicates) in self.reported_trait_errors { // Search for the obligation hash in our set of computed obligations. let predicates = predicates .iter() @@ -334,18 +337,20 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { self.trait_errors.push(TraitError { idx: expr_id, - range, + range: self.exprs[expr_id].range, hashes, }); continue; } - log::error!("failed to find root expression for {span:?} {predicates:?}"); + log::error!( + "failed to find root expression for {error_span:?} {predicates:?}" + ); // A predicate did not match exactly, now we're scrambling // to find an expression by span, and pick an obligation. let Some(err_hir_id) = - hier_hir::find_most_enclosing_node(self.tcx, self.body_id, span) + hier_hir::find_most_enclosing_node(self.tcx, self.body_id, *error_span) else { log::error!("reported error doesn't have an associated span ..."); continue; @@ -377,7 +382,7 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { // Mark the found Expr as containing an error. self.trait_errors.push(TraitError { idx: *expr_id, - range, + range: self.exprs[*expr_id].range, hashes: vec![], }); } diff --git a/crates/argus/src/proof_tree/interners.rs b/crates/argus/src/proof_tree/interners.rs index f2b4e9b..55d25ee 100644 --- a/crates/argus/src/proof_tree/interners.rs +++ b/crates/argus/src/proof_tree/interners.rs @@ -183,7 +183,11 @@ impl Interners { .insert(CanKey::ParamEnv(idx), CandidateData::ParamEnv(idx)) } - fn intern_impl(&mut self, infcx: &InferCtxt, def_id: DefId) -> CandidateIdx { + pub(super) fn intern_impl( + &mut self, + infcx: &InferCtxt, + def_id: DefId, + ) -> CandidateIdx { if let Some(i) = self.candidates.get_idx(&CanKey::Impl(def_id)) { return i; } diff --git a/crates/argus/src/proof_tree/mod.rs b/crates/argus/src/proof_tree/mod.rs index 26c579c..8f71fc6 100644 --- a/crates/argus/src/proof_tree/mod.rs +++ b/crates/argus/src/proof_tree/mod.rs @@ -110,9 +110,13 @@ pub struct SerializedTree { pub projection_values: HashMap, + pub all_impl_candidates: HashMap>, + pub topology: TreeTopology, + #[serde(skip_serializing_if = "Option::is_none")] pub cycle: Option, + pub analysis: aadebug::AnalysisResults, } diff --git a/crates/argus/src/proof_tree/serialize.rs b/crates/argus/src/proof_tree/serialize.rs index fd7b062..da61ec2 100644 --- a/crates/argus/src/proof_tree/serialize.rs +++ b/crates/argus/src/proof_tree/serialize.rs @@ -1,5 +1,8 @@ use anyhow::{bail, Result}; -use argus_ext::ty::{EvaluationResultExt, TyExt}; +use argus_ext::{ + rustc::InferCtxtExt, + ty::{EvaluationResultExt, PredicateExt, TyExt}, +}; use index_vec::IndexVec; use rustc_hir::def_id::DefId; use rustc_infer::infer::InferCtxt; @@ -22,6 +25,7 @@ pub struct SerializedTreeVisitor<'tcx> { pub topology: TreeTopology, pub cycle: Option, pub projection_values: HashMap, + pub all_impl_candidates: HashMap>, deferred_leafs: Vec<(ProofNodeIdx, EvaluationResult)>, interners: Interners, @@ -37,6 +41,7 @@ impl SerializedTreeVisitor<'_> { topology: TreeTopology::new(), cycle: None, projection_values: HashMap::default(), + all_impl_candidates: HashMap::default(), deferred_leafs: Vec::default(), interners: Interners::default(), @@ -45,11 +50,12 @@ impl SerializedTreeVisitor<'_> { } fn check_goal_projection(&mut self, goal: &InspectGoal) { - if let ty::PredicateKind::AliasRelate( - t1, - t2, - ty::AliasRelationDirection::Equate, - ) = goal.goal().predicate.kind().skip_binder() + if goal.result().is_yes() + && let ty::PredicateKind::AliasRelate( + t1, + t2, + ty::AliasRelationDirection::Equate, + ) = goal.goal().predicate.kind().skip_binder() && let Some(mut t1) = t1.ty() && let Some(mut t2) = t2.ty() // Disallow projections involving two aliases @@ -117,6 +123,7 @@ impl SerializedTreeVisitor<'_> { mut interners, aadebug, deferred_leafs, + all_impl_candidates, .. } = self else { @@ -143,6 +150,7 @@ impl SerializedTreeVisitor<'_> { results, tys, projection_values, + all_impl_candidates, topology, cycle, analysis, @@ -154,7 +162,7 @@ impl SerializedTreeVisitor<'_> { // interned keys does essentially). We should wait until the new trait solver // has some mechanism for detecting cycles and piggy back off that. // FIXME: this is currently dissabled but we should check for cycles again... - #[allow(dead_code)] + #[allow(dead_code, unused)] fn check_for_cycle_from(&mut self, from: ProofNodeIdx) { if self.cycle.is_some() { return; @@ -226,6 +234,25 @@ impl<'tcx> SerializedTreeVisitor<'tcx> { } } } + + fn record_all_impls( + &mut self, + idx: ProofNodeIdx, + goal: &InspectGoal<'_, 'tcx>, + ) { + // If the Goal is a TraitPredicate we will cache *all* possible implementors + if let Some(tp) = goal.goal().predicate.as_trait_predicate() { + let infcx = goal.infcx(); + for can in infcx.find_similar_impl_candidates(tp) { + let can_idx = self.interners.intern_impl(infcx, can.impl_def_id); + self + .all_impl_candidates + .entry(idx) + .or_default() + .push(can_idx); + } + } + } } impl<'tcx> ProofTreeVisitor<'tcx> for SerializedTreeVisitor<'tcx> { @@ -240,6 +267,10 @@ impl<'tcx> ProofTreeVisitor<'tcx> for SerializedTreeVisitor<'tcx> { let here_node = self.interners.mk_goal_node(goal); let here_idx = self.nodes.push(here_node); + + // Record all the possible candidate impls for this goal. + self.record_all_impls(here_idx, goal); + // Push node into the analysis tree. self.aadebug.push_goal(here_idx, goal).unwrap(); diff --git a/crates/argus/src/tls.rs b/crates/argus/src/tls.rs index a4ed6f8..cfb6b39 100644 --- a/crates/argus/src/tls.rs +++ b/crates/argus/src/tls.rs @@ -29,8 +29,6 @@ const DRAIN_WINDOW: usize = 100; // NOTE: we use thread local storage to accumulate obligations // accross call to the obligation inspector in `typeck_inspect`. // DO NOT set this directly, make sure to use the function `push_obligaion`. -// -// TODO: documentation thread_local! { static BODY_DEF_PATH: RefCell> = RefCell::default(); @@ -49,13 +47,19 @@ pub fn store_obligation(obl: Provenance) { }); } -// TODO: using `infcx` for error implication panics, but using +// FIXME: using `infcx` for error implication panics, but using // stored contexts doesn't. Investigate why, as this certainly // isn't a "solution." +// +// NOTE this causes an internal compiler error that *should not happen*, it's setting +// tainted by errors to be true when it shouldn't. Disabling for now. +#[allow(unused)] pub fn drain_implied_ambiguities<'tcx>( _infcx: &InferCtxt<'tcx>, obligation: &PredicateObligation<'tcx>, ) { + return; + OBLIGATIONS.with(|obls| { let mut obls = obls.borrow_mut(); diff --git a/crates/argus/src/types.rs b/crates/argus/src/types.rs index 2777ccc..d19e664 100644 --- a/crates/argus/src/types.rs +++ b/crates/argus/src/types.rs @@ -354,17 +354,18 @@ pub(super) mod intermediate { use super::*; - // The provenance about where an element came from, - // or was "spawned from," in the HIR. This type is intermediate - // but stored in the TLS, it shouldn't capture lifetimes but - // can capture unstable hashes. + /// The provenance from where an element came, or was "spawned from," + /// in the HIR. This type is intermediate but stored in the TLS, it + /// shouldn't capture lifetimes but can capture unstable hashes. pub(crate) struct Provenance { - // The expression from whence `it` came, the - // referenced element is expected to be an - // expression. + /// The expression from whence `it` came, the referenced element + /// is expected to be an expression. pub hir_id: HirId, - // Index into the full provenance data, this is stored for interesting obligations. + + /// Index into the full provenance data, this is stored for interesting obligations. pub full_data: Option, + + /// The actual element. pub it: T, } diff --git a/ide/packages/common/src/BodyInfo.ts b/ide/packages/common/src/BodyInfo.ts index 72ddbb6..1d7cad1 100644 --- a/ide/packages/common/src/BodyInfo.ts +++ b/ide/packages/common/src/BodyInfo.ts @@ -5,18 +5,29 @@ import type { Expr, ExprIdx, Obligation, - ObligationHash, ObligationIdx, ObligationsInBody } from "./bindings"; -import { isHiddenObl } from "./func"; +import { isVisibleObligation } from "./func"; class BodyInfo { + private existsImportantFailure; + constructor( private readonly oib: ObligationsInBody, - readonly idx: number, public readonly showHidden: boolean - ) {} + ) { + // An important failure is a *necessary* and *failing* obligation. We say that + // there exists an important failure if any of the expressions has an obligation + // that meets this criteria. + this.existsImportantFailure = false; + this.existsImportantFailure = _.some(this.exprs(), eidx => + _.some(this.obligations(eidx), oidx => { + const o = this.obligation(oidx)!; + return o.result === "no" && o.necessity === "Yes"; + }) + ); + } get hash(): BodyHash { return this.oib.hash; @@ -46,44 +57,21 @@ class BodyInfo { return this.oib.tys; } - notHidden(hash: ObligationIdx): boolean { - const o = this.getObligation(hash); - if (o === undefined) { - return false; - } - return this.showHidden || isHiddenObl(o); - } - exprs(): ExprIdx[] { - return _.map(this.oib.exprs, (_, idx) => idx); - } - - hasVisibleExprs() { - return _.some(this.exprs(), idx => this.hasVisibleObligations(idx)); - } - - hasVisibleObligations(idx: ExprIdx) { - return _.some(this.oib.exprs[idx].obligations, i => this.notHidden(i)); + return _.range(0, this.oib.exprs.length); } - byHash(hash: ObligationHash): Obligation | undefined { - return this.oib.obligations.find(o => o.hash === hash); + expr(idx: ExprIdx): Expr | undefined { + return this.oib.exprs[idx]; } - getObligation(idx: ObligationIdx): Obligation { - return this.oib.obligations[idx]; + private visibleObligations(idx: ExprIdx): ObligationIdx[] { + return _.filter(this.oib.exprs[idx].obligations, i => this.isVisible(i)); } - getExpr(idx: ExprIdx): Expr { - return this.oib.exprs[idx]; - } - - visibleObligations(idx: ExprIdx): ObligationIdx[] { - const filtered = _.filter(this.oib.exprs[idx].obligations, i => - this.notHidden(i) - ); - const sorted = _.sortBy(filtered, i => { - switch (this.getObligation(i).result) { + obligations(idx: ExprIdx): ObligationIdx[] { + return _.sortBy(this.visibleObligations(idx), i => { + switch (this.obligation(i)!.result) { case "no": return 0; case "yes": @@ -92,7 +80,38 @@ class BodyInfo { return 1; } }); - return sorted; + } + + obligation(idx: ObligationIdx): Obligation | undefined { + return this.oib.obligations[idx]; + } + + // Does this body have any expressions that have visible obligations? + hasVisibleExprs() { + return _.some(this.exprs(), idx => this.hasVisibleObligations(idx)); + } + + // Does the given expression have any visible obligations? + hasVisibleObligations(idx: ExprIdx) { + return this.visibleObligations(idx).length > 0; + } + + // Is the given obligation visible? + isVisible(idx: ObligationIdx) { + const o = this.obligation(idx); + if (o === undefined) return false; + + return ( + this.showHidden || + isVisibleObligation( + o, + // HACK: If there is a failing obligation, we filter ambiguities. This is + // a short workaround for a backend incompleteness. We can't filter obligations + // that get resolved in a second round of trait solving, this leaves Argus with + // more "failures" than rustc shows. + this.existsImportantFailure + ) + ); } } diff --git a/ide/packages/common/src/TreeInfo.ts b/ide/packages/common/src/TreeInfo.ts index feaef1d..ce6cffe 100644 --- a/ide/packages/common/src/TreeInfo.ts +++ b/ide/packages/common/src/TreeInfo.ts @@ -6,7 +6,6 @@ import type { EvaluationResult, GoalIdx, GoalKind, - ImplHeader, ProofNodeIdx, ResultIdx, SerializedTree, @@ -443,8 +442,8 @@ export class TreeInfo { return _.min(_.map(hs, TreeInfo.setInertia)) ?? 10_000; } - public implCandidates(idx: ProofNodeIdx): ImplHeader[] | undefined { - return this.tree.analysis.implCandidates[idx]; + public implCandidates(idx: ProofNodeIdx): CandidateIdx[] | undefined { + return this.tree.allImplCandidates[idx]; } } diff --git a/ide/packages/common/src/func.ts b/ide/packages/common/src/func.ts index ae81a5e..98ab8c7 100644 --- a/ide/packages/common/src/func.ts +++ b/ide/packages/common/src/func.ts @@ -5,8 +5,10 @@ import type { BoundTyKind, BoundVariableKind, CharRange, + EvaluationResult, GenericArg, ObligationHash, + ObligationNecessity, Predicate, Region, Ty, @@ -65,11 +67,19 @@ export function makeHighlightPosters( return [addHighlight, removeHighlight]; } -export function isHiddenObl(o: { necessity: string; result: string }) { - return ( - o.necessity === "Yes" || (o.necessity === "OnError" && o.result === "no") - ); -} +export const isVisibleObligation = ( + o: { necessity: ObligationNecessity; result: EvaluationResult }, + filterAmbiguities = false +) => + // Short-circuit ambiguities if we're filtering them + !( + (o.result === "maybe-ambiguity" || o.result === "maybe-overflow") && + filterAmbiguities + ) && + // If the obligation is listed as necessary, it's visible + (o.necessity === "Yes" || + // If the obligation is listed as necessary on error, and it failed, it's visible + (o.necessity === "OnError" && o.result === "no")); export function searchObject(obj: any, target: any) { for (let key in obj) { diff --git a/ide/packages/panoptes/src/App.css b/ide/packages/panoptes/src/App.css index e184195..bff7a5d 100644 --- a/ide/packages/panoptes/src/App.css +++ b/ide/packages/panoptes/src/App.css @@ -23,4 +23,5 @@ .DefinitionWrapper.hovered.meta-pressed { color: var(--vscode-editorLink-activeForeground); text-decoration: underline; + cursor: pointer; } \ No newline at end of file diff --git a/ide/packages/panoptes/src/Expr.tsx b/ide/packages/panoptes/src/Expr.tsx index 95099d2..850f4e2 100644 --- a/ide/packages/panoptes/src/Expr.tsx +++ b/ide/packages/panoptes/src/Expr.tsx @@ -16,7 +16,9 @@ import { HighlightTargetStore } from "./signals"; const Expr = observer(({ idx }: { idx: ExprIdx }) => { const bodyInfo = useContext(BodyInfoContext)!; const file = useContext(FileContext)!; - const expr = bodyInfo.getExpr(idx); + const expr = bodyInfo.expr(idx); + if (expr === undefined) return null; + const messageSystem = useContext(AppContext.MessageSystemContext)!; const [addHighlight, removeHighlight] = makeHighlightPosters( messageSystem, @@ -28,7 +30,7 @@ const Expr = observer(({ idx }: { idx: ExprIdx }) => { return null; } - const visibleObligations = bodyInfo.visibleObligations(idx); + const visibleObligations = bodyInfo.obligations(idx); if (visibleObligations.length === 0) { return null; } diff --git a/ide/packages/panoptes/src/File.tsx b/ide/packages/panoptes/src/File.tsx index 85fb21e..c7b5b5a 100644 --- a/ide/packages/panoptes/src/File.tsx +++ b/ide/packages/panoptes/src/File.tsx @@ -74,10 +74,7 @@ export interface FileProps { const File = ({ file, osibs }: FileProps) => { const showHidden = useContext(AppContext.ShowHiddenObligationsContext); - const bodyInfos = _.map( - osibs, - (osib, idx) => new BodyInfo(osib, idx, showHidden) - ); + const bodyInfos = _.map(osibs, osib => new BodyInfo(osib, showHidden)); const noBodiesFound = ( diff --git a/ide/packages/panoptes/src/MiniBuffer.css b/ide/packages/panoptes/src/MiniBuffer.css index cb75f1e..ee1907c 100644 --- a/ide/packages/panoptes/src/MiniBuffer.css +++ b/ide/packages/panoptes/src/MiniBuffer.css @@ -1,4 +1,7 @@ #MiniBuffer { + /* show the minibuffer on top of all other content, + * and stick it to the bottom of the screen. + */ z-index: 1; position: fixed; bottom: 0; @@ -18,10 +21,11 @@ #MiniBuffer > h2 { padding: 0; margin: 0 0 0.5em 0; - font-size: 0.6em; + font-size: 0.8em; } #MiniBuffer > i { + /* Stick the tack icon to the top-right of the buffer */ position: absolute; top: 0; right: 0; diff --git a/ide/packages/panoptes/src/Obligation.tsx b/ide/packages/panoptes/src/Obligation.tsx index 9a03c7f..a15f7da 100644 --- a/ide/packages/panoptes/src/Obligation.tsx +++ b/ide/packages/panoptes/src/Obligation.tsx @@ -28,13 +28,17 @@ import { HighlightTargetStore } from "./signals"; export const ObligationFromIdx = ({ idx }: { idx: ObligationIdx }) => { const bodyInfo = useContext(BodyInfoContext)!; - const o = bodyInfo.getObligation(idx); + const o = bodyInfo.obligation(idx); + if (o === undefined) return null; + return ; }; export const ObligationResultFromIdx = ({ idx }: { idx: ObligationIdx }) => { const bodyInfo = useContext(BodyInfoContext)!; - const o = bodyInfo.getObligation(idx); + const o = bodyInfo.obligation(idx); + if (o === undefined) return null; + return ; }; diff --git a/ide/packages/panoptes/src/TreeView/BottomUp.css b/ide/packages/panoptes/src/TreeView/BottomUp.css index 4f3da03..da2e5d8 100644 --- a/ide/packages/panoptes/src/TreeView/BottomUp.css +++ b/ide/packages/panoptes/src/TreeView/BottomUp.css @@ -1,13 +1,11 @@ .FailingSet { position: relative; margin-bottom: 0.25em; - - transition: all 0.25s ease; } .FailingSet.is-hovered { - border: 2px dashed var(--vscode-focusBorder); - padding: 0.25em; + outline: 2pt dashed var(--vscode-focusBorder); + border-radius: 3pt; } .FailingSet > i { diff --git a/ide/packages/panoptes/src/TreeView/Directory.css b/ide/packages/panoptes/src/TreeView/Directory.css index a949641..29ac034 100644 --- a/ide/packages/panoptes/src/TreeView/Directory.css +++ b/ide/packages/panoptes/src/TreeView/Directory.css @@ -29,10 +29,6 @@ gap: 10px; } -.DirNode.WhereConstraintArea { - flex-direction: column; -} - .DirNodeLabel { display: flex; } diff --git a/ide/packages/panoptes/src/TreeView/Directory.tsx b/ide/packages/panoptes/src/TreeView/Directory.tsx index be58457..206d551 100644 --- a/ide/packages/panoptes/src/TreeView/Directory.tsx +++ b/ide/packages/panoptes/src/TreeView/Directory.tsx @@ -99,15 +99,12 @@ export const DirNode = ({ ); + const info = ( + + {infoChild} + + ); - const info = - Wrappers === undefined ? ( - infoChild - ) : ( - - {infoChild} - - ); const startOpen = startOpenP ? startOpenP(idx) : false; return ( diff --git a/ide/packages/panoptes/src/TreeView/Panels.tsx b/ide/packages/panoptes/src/TreeView/Panels.tsx index f6caf50..18a8034 100644 --- a/ide/packages/panoptes/src/TreeView/Panels.tsx +++ b/ide/packages/panoptes/src/TreeView/Panels.tsx @@ -72,7 +72,7 @@ const Panels = ({ // NOTE: rerenders should not occur if the user clicks on a tab. We cache the // elements in state to avoid this. IFF the change is *programatic*, meaning // some GUI action caused the change, we always want to force a rerender so that - // state change visuals are shown. + // state change animations are shown. useEffect(() => { console.debug(`Panel(${id}) params changed`, active, programaticSwitch); if (programaticSwitch) { @@ -126,6 +126,7 @@ const Panels = ({ if (idx < 0 || description.length <= idx) { setTabs(idx, undefined); setPanels(idx, undefined); + return; } const d = desc ?? description[idx]; diff --git a/ide/packages/panoptes/src/TreeView/TopDown.tsx b/ide/packages/panoptes/src/TreeView/TopDown.tsx index d35a8d4..328fef5 100644 --- a/ide/packages/panoptes/src/TreeView/TopDown.tsx +++ b/ide/packages/panoptes/src/TreeView/TopDown.tsx @@ -6,6 +6,7 @@ import _ from "lodash"; import React, { useContext } from "react"; import { DirRecursive } from "./Directory"; +import { WrapImplCandidates } from "./Wrappers"; const TopDown = ({ start }: { start?: ProofNodeIdx }) => { const tree = useContext(TreeAppContext.TreeContext)!; @@ -64,6 +65,7 @@ const TopDown = ({ start }: { start?: ProofNodeIdx }) => { })(); const renderParams: TreeRenderParams = { + Wrappers: [WrapImplCandidates], styleEdges: true, ...ops }; diff --git a/ide/packages/panoptes/src/TreeView/Wrappers.css b/ide/packages/panoptes/src/TreeView/Wrappers.css index 9d122ce..bffb6f7 100644 --- a/ide/packages/panoptes/src/TreeView/Wrappers.css +++ b/ide/packages/panoptes/src/TreeView/Wrappers.css @@ -24,10 +24,22 @@ } .WrapperBox { - display: inline-flex; - flex-direction: row; - justify-content: space-evenly; - gap: 0.25em; + display: none; + padding-left: 0.5em; +} + +.WrapperBox.is-hovered { + display: inline; +} + +.WrapperBox i.codicon { + /* Removes the inline-block default that causes the + icons to overflow the bounding box of the proof node labels + + TODO: why is this default necessary for the codicons, and how + come it breaks our bounding box here? CSS is a mystery. + */ + display: inline; } .WrapperBox i:hover { diff --git a/ide/packages/panoptes/src/TreeView/Wrappers.tsx b/ide/packages/panoptes/src/TreeView/Wrappers.tsx index e1bdf13..9c25041 100644 --- a/ide/packages/panoptes/src/TreeView/Wrappers.tsx +++ b/ide/packages/panoptes/src/TreeView/Wrappers.tsx @@ -6,7 +6,6 @@ import type { import { TreeAppContext } from "@argus/common/context"; import { arrUpdate } from "@argus/common/func"; import { IcoListUL, IcoTreeDown } from "@argus/print/Icons"; -import { PrintImplHeader } from "@argus/print/lib"; import { FloatingArrow, FloatingFocusManager, @@ -23,6 +22,7 @@ import classNames from "classnames"; import _ from "lodash"; import React, { type ReactElement, useState, useContext, useRef } from "react"; import Graph from "./Graph"; +import { Candidate } from "./Node"; import "./Wrappers.css"; @@ -33,7 +33,11 @@ export const WrapNode = ({ }: React.PropsWithChildren<{ wrappers: InfoWrapper[]; n: ProofNodeIdx }>) => { const [hovered, setHovered] = useState(false); const [actives, setActives] = useState(Array(wrappers.length).fill(false)); + const active = _.some(actives); + const className = classNames("WrapperBox", { + "is-hovered": hovered || active + }); return ( setHovered(false)} > {children} - {(hovered || active) && ( - - {_.map(wrappers, (W, i) => ( - setActives(a => arrUpdate(a, i, b))} - /> - ))} - - )} + + {_.map(wrappers, (W, i) => ( + setActives(a => arrUpdate(a, i, b))} + /> + ))} + ); }; @@ -112,6 +114,7 @@ const DetailsPortal = ({ ref={refs.setFloating} style={floatingStyles} {...getFloatingProps()} + onClick={e => e.stopPropagation()} > ( export const WrapImplCandidates = ({ n, reportActive }: InfoWrapperProps) => { const tree = useContext(TreeAppContext.TreeContext)!; const candidates = tree.implCandidates(n); - - if (candidates === undefined || candidates.length === 0) { - return null; - } + if (candidates === undefined || candidates.length === 0) return null; return ( }> @@ -149,7 +149,7 @@ export const WrapImplCandidates = ({ n, reportActive }: InfoWrapperProps) => {
{_.map(candidates, (c, i) => (
- +
))}
diff --git a/ide/packages/print/src/Attention.tsx b/ide/packages/print/src/Attention.tsx index 3d6a99e..53e7f05 100644 --- a/ide/packages/print/src/Attention.tsx +++ b/ide/packages/print/src/Attention.tsx @@ -1,13 +1,32 @@ -import React from "react"; +import React, { useEffect, useRef } from "react"; import "./Attention.css"; +const DURATION = 1_000; +const CN = "Attention"; + +const Attn = ({ + children, + className = CN +}: React.PropsWithChildren<{ className?: string }>) => { + const ref = useRef(null); + useEffect(() => { + setTimeout(() => ref.current?.classList.remove(className), DURATION); + }, []); + + return ( + + {children} + + ); +}; + export const TextEmphasis = ({ children }: React.PropsWithChildren) => ( - {children} + {children} ); const Attention = ({ children }: React.PropsWithChildren) => ( - {children} + {children} ); export default Attention; diff --git a/ide/packages/print/src/private/argus.css b/ide/packages/print/src/private/argus.css new file mode 100644 index 0000000..9344216 --- /dev/null +++ b/ide/packages/print/src/private/argus.css @@ -0,0 +1,11 @@ +.WhereConstraintArea { + display: flex; + flex-direction: column; + gap: 0.5em; +} + +/* FIXME: this should be handled by the CommaSeparated component in `syntax.tsx`, but where + constraints are in divs which made the commas land in weird places. */ +.WhereConstraintArea > .WhereConstraint:not(:last-child, :empty):after { + content: ','; +} \ No newline at end of file diff --git a/ide/packages/print/src/private/argus.tsx b/ide/packages/print/src/private/argus.tsx index b404b7f..db43c27 100644 --- a/ide/packages/print/src/private/argus.tsx +++ b/ide/packages/print/src/private/argus.tsx @@ -3,12 +3,19 @@ import type { ClauseWithBounds, GroupedClauses, ImplHeader, + PolyClauseKind, + PolyClauseWithBounds, Ty } from "@argus/common/bindings"; import { anyElems, isUnitTy } from "@argus/common/func"; import _ from "lodash"; -import React, { type PropsWithChildren, useContext } from "react"; +import React, { + type PropsWithChildren, + type ReactElement, + useContext +} from "react"; +import classNames from "classnames"; import { Toggle } from "../Toggle"; import { AllowProjectionSubst, LocationActionable, TyCtxt } from "../context"; import { PrintDefinitionPath } from "./path"; @@ -23,15 +30,30 @@ import { PrintTyKind } from "./ty"; +import "./argus.css"; + +export const WhereConstraintArea = ({ + className, + children +}: React.PropsWithChildren<{ className?: string }>) => ( +
{children}
+); + +export const WhereConstraint = ({ children }: React.PropsWithChildren) => ( +
{children}
+); + // NOTE: it looks ugly, but we need to disable projection substitution for all parts // of the impl blocks. export const PrintImplHeader = ({ o }: { o: ImplHeader }) => { console.debug("Printing ImplHeader", o); + const genArgs = _.map(o.args, arg => ( )); + const argsWAngle = genArgs.length === 0 ? null : ( @@ -69,68 +91,98 @@ export const PrintImplHeader = ({ o }: { o: ImplHeader }) => { ); }; -export const PrintGroupedClauses = ({ o }: { o: GroupedClauses }) => { - console.debug("Printing GroupedClauses", o); - const Inner = ({ value }: { value: ClauseWithBounds }) => ( - +export const PrintClauses = ({ + grouped, + ungrouped, + tysWOBound +}: { + grouped: PolyClauseWithBounds[]; + ungrouped: PolyClauseKind[]; + tysWOBound: Ty[]; +}) => { + const Group = ({ value }: { value: PolyClauseWithBounds }) => ( + } + /> ); - const groupedClauses = _.map(o.grouped, (group, idx) => ( -
- -
- )); - const noGroupedClauses = _.map(o.other, (clause, idx) => ( -
- -
- )); - return ( + const Ungrouped = ({ value }: { value: PolyClauseKind }) => ( + + ); + const Unsized = ({ value }: { value: Ty }) => ( <> - {groupedClauses} - {noGroupedClauses} + : ?Sized ); + + const rawElements /*: (for [T, React.FC<{ value: T }>])[] */ = [ + ..._.map(grouped, group => [group, Group] as const), + ..._.map(ungrouped, ungroup => [ungroup, Ungrouped] as const), + ..._.map(tysWOBound, ty => [ty, Unsized] as const) + ] as const; + + const elements = _.map( + rawElements, + ( + [value, C] /*: for [T, React.FC<{ value: T }> */, + idx + ): ReactElement => ( + + + + ) + ); + + // TODO: the `{elements}` should be wrapped in a `CommaSeparated` component, + // but comman placement is done manually in the WhereConstraintsArea ... for now. See CSS + // file for more comments. + return {elements}; }; -export const PrintWhereClause = ({ - predicates, +const PrintWhereClause = ({ + predicates: { grouped, other: ungrouped }, tysWOBound }: { predicates: GroupedClauses; tysWOBound: Ty[]; }) => { - if (!anyElems(predicates.grouped, predicates.other, tysWOBound)) { + if (!anyElems(grouped, ungrouped, tysWOBound)) { return null; } - const whereHoverContent = () => ( -
- - {_.map(tysWOBound, (ty, idx) => ( -
- : ?Sized -
- ))} -
+ const content = ( + ); return ( <> - {" "} +
where {nbsp} - + content} /> ); }; const PrintClauseWithBounds = ({ o }: { o: ClauseWithBounds }) => { - const [traits, lifetimes] = _.partition(o.bounds, bound => "Trait" in bound); - const traitBounds = _.map(traits, bound => ); - const lifetimeBounds = _.map(lifetimes, bound => ( + // Sort the bounds to be Ty: Fn() + Trait + Region + const sortedBounds = _.sortBy(o.bounds, bound => + "FnTrait" in bound + ? 0 + : "Trait" in bound + ? 1 + : "Region" in bound + ? 2 + : undefined + ); + + const boundComponents = _.map(sortedBounds, bound => ( )); - const boundComponents = _.concat(traitBounds, lifetimeBounds); return ( <> diff --git a/ide/packages/print/src/private/predicate.tsx b/ide/packages/print/src/private/predicate.tsx index dfce307..2e3b8ca 100644 --- a/ide/packages/print/src/private/predicate.tsx +++ b/ide/packages/print/src/private/predicate.tsx @@ -15,7 +15,8 @@ import React from "react"; import { HoverInfo } from "../HoverInfo"; import { IcoNote } from "../Icons"; -import { PrintGroupedClauses } from "./argus"; +import MonoSpace from "../MonoSpace"; +import { PrintClauses } from "./argus"; import { PrintConst } from "./const"; import { PrintDefinitionPath } from "./path"; import { PrintTerm } from "./term"; @@ -33,7 +34,13 @@ export const PrintPredicateObligation = ({ o }: { o: PredicateObligation }) => { o.paramEnv.grouped, o.paramEnv.other ) ? null : ( - }> + ( + + + + )} + > {" "} @@ -47,19 +54,17 @@ export const PrintPredicateObligation = ({ o }: { o: PredicateObligation }) => { ); }; -export const PrintGoalPredicate = ({ o }: { o: GoalPredicate }) => { +export const PrintGoalPredicate = ({ o }: { o: GoalPredicate }) => ( // NOTE: goals and obligations aren't the same thing, but they // currently have the same semantic structure. - return ; -}; + +); -export const PrintParamEnv = ({ o }: { o: ParamEnv }) => { - return ( -
- -
- ); -}; +export const PrintParamEnv = ({ o }: { o: ParamEnv }) => ( +
+ +
+); export const PrintBinderPredicateKind = ({ o }: { o: PolyPredicateKind }) => { const Inner = ({ value }: { value: PredicateKind }) => (