Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tweaks and fixes #28

Merged
merged 13 commits into from
Aug 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions crates/argus-cli/src/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ impl RustcPlugin for ArgusPlugin {
match &args.command {
AC::Preload => {
let mut cmd = Command::new(cargo_path);
// Note: this command must share certain parameters with rustc_plugin so Cargo will not recompute
// NOTE: this command must share certain parameters with rustc_plugin so Cargo will not recompute
// dependencies when actually running the driver, e.g. RUSTFLAGS.
cmd
.args(["check", "--all", "--all-features", "--target-dir"])
Expand All @@ -136,14 +136,14 @@ impl RustcPlugin for ArgusPlugin {
println!("{commit_hash}");
exit(0);
}
_ => {}
AC::Obligations { .. } | AC::Tree { .. } | AC::Bundle => {}
};

let file = match &args.command {
AC::Tree { file, .. } => Some(file),
AC::Obligations { file } => file.as_ref(),
AC::Bundle => None,
_ => unreachable!(),
AC::Preload | AC::RustcVersion => unreachable!(),
};

let filter = file.map_or(CrateFilter::OnlyWorkspace, |file| {
Expand All @@ -159,6 +159,7 @@ impl RustcPlugin for ArgusPlugin {
plugin_args: ArgusPluginArgs,
) -> RustcResult<()> {
use ArgusCommand as AC;
let no_target = || None::<(ObligationHash, CharRange)>;
match &plugin_args.command {
AC::Tree {
file,
Expand Down Expand Up @@ -192,29 +193,27 @@ impl RustcPlugin for ArgusPlugin {
postprocess(v)
}
AC::Obligations { file, .. } => {
let nothing = || None::<(ObligationHash, CharRange)>;
let v = run(
analysis::obligations,
file.as_ref().map(PathBuf::from),
nothing,
no_target,
&plugin_args,
&compiler_args,
);
postprocess(v)
}
AC::Bundle => {
log::warn!("Bundling takes an enormous amount of time.");
let nothing = || None::<(ObligationHash, CharRange)>;
let v = run(
analysis::bundle,
None,
nothing,
no_target,
&plugin_args,
&compiler_args,
);
postprocess(v)
}
_ => unreachable!(),
AC::Preload | AC::RustcVersion => unreachable!(),
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions crates/argus-ext/src/ty/impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,13 @@ impl<'tcx> TyCtxtExt<'tcx> for TyCtxt<'tcx> {

fn to_local(&self, body_id: BodyId, span: Span) -> Span {
use rustc_utils::source_map::span::SpanExt;

let hir = self.hir();
let mut local_body_span = hir.body(body_id).value.span;
while local_body_span.from_expansion() {
local_body_span = local_body_span.source_callsite();
}

span.as_local(local_body_span).unwrap_or(span)
}

Expand Down
2 changes: 1 addition & 1 deletion crates/argus-ser/src/argus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ pub enum ClauseBound<'tcx> {
),
}

pub fn group_predicates_by_ty<'tcx>(
pub(crate) fn group_predicates_by_ty<'tcx>(
tcx: ty::TyCtxt<'tcx>,
predicates: impl IntoIterator<Item = ty::Clause<'tcx>>,
) -> GroupedClauses<'tcx> {
Expand Down
13 changes: 1 addition & 12 deletions crates/argus/src/aadebug/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@ use std::time::Instant;
use anyhow::Result;
use argus_ext::ty::EvaluationResultExt;
use index_vec::IndexVec;
use rustc_data_structures::fx::FxHashMap as HashMap;
use rustc_infer::traits::solve::GoalSource;
use rustc_trait_selection::solve::inspect::{InspectCandidate, InspectGoal};
use rustc_utils::timer;
use serde::Serialize;
use serde_json as json;
#[cfg(feature = "testing")]
use ts_rs::TS;

Expand All @@ -28,12 +26,6 @@ pub struct Storage<'tcx> {
#[cfg_attr(feature = "testing", ts(export))]
pub struct AnalysisResults {
pub problematic_sets: Vec<tree::SetHeuristic>,

#[cfg_attr(
feature = "testing",
ts(type = "Record<ProofNodeIdx, ImplHeader[]>")
)]
pub impl_candidates: HashMap<ProofNodeIdx, Vec<json::Value>>,
}

impl<'tcx> Storage<'tcx> {
Expand Down Expand Up @@ -118,15 +110,12 @@ impl<'tcx> Storage<'tcx> {
let tree_start = Instant::now();

let mut sets = vec![];
tree.for_correction_set(|conjunct| {
sets.push(tree.weight(&conjunct));
});
tree.for_correction_set(|conjunct| sets.push(tree.weight(conjunct)));

timer::elapsed("aadeg::into_results", tree_start);

AnalysisResults {
problematic_sets: sets,
impl_candidates: tree.reportable_impl_candidates(),
}
}
}
71 changes: 14 additions & 57 deletions crates/argus/src/aadebug/tree.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
use std::{cell::RefCell, ops::Deref, time::Instant};

use argus_ext::{
rustc::InferCtxtExt,
ty::{EvaluationResultExt, PredicateExt, TyCtxtExt, TyExt},
};
use argus_ser as ser;
use argus_ext::ty::{EvaluationResultExt, TyCtxtExt, TyExt};
use index_vec::IndexVec;
use rustc_data_structures::fx::FxHashMap as HashMap;
use rustc_infer::infer::InferCtxt;
use rustc_middle::{
traits::solve::{CandidateSource, Goal as RGoal},
Expand All @@ -22,7 +17,6 @@ use super::dnf::{And, Dnf};
use crate::{
analysis::EvaluationResult,
proof_tree::{topology::TreeTopology, ProofNodeIdx},
tls,
};

pub type I = ProofNodeIdx;
Expand Down Expand Up @@ -175,6 +169,8 @@ impl<'a, 'tcx> Goal<'a, 'tcx> {
}

fn analyze(&self) -> Heuristic {
use std::cmp::Ordering;

// We should only be analyzing failed predicates
assert!(!self.result.is_yes());

Expand All @@ -194,16 +190,14 @@ impl<'a, 'tcx> Goal<'a, 'tcx> {
log::debug!("Fn Args {:?}", t.trait_ref.args.into_type_list(tcx));
log::debug!("{} v {}", fn_arity, trait_arity);

if fn_arity > trait_arity {
GoalKind::DeleteFnParams {
delta: fn_arity - trait_arity,
}
} else if fn_arity < trait_arity {
GoalKind::AddFnParams {
match fn_arity.cmp(&trait_arity) {
Ordering::Less => GoalKind::AddFnParams {
delta: trait_arity - fn_arity,
}
} else {
GoalKind::IncorrectParams { arity: fn_arity }
},
Ordering::Greater => GoalKind::DeleteFnParams {
delta: fn_arity - trait_arity,
},
Ordering::Equal => GoalKind::IncorrectParams { arity: fn_arity },
}
}

Expand Down Expand Up @@ -404,10 +398,6 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> {
}

pub fn dnf(&self) -> impl Deref<Target = Dnf<I>> + '_ {
if self.dnf.borrow().is_some() {
return self.expect_dnf();
}

fn _goal(this: &T, goal: &Goal) -> Option<Dnf<I>> {
if !((this.maybe_ambiguous && goal.result.is_maybe())
|| goal.result.is_no())
Expand Down Expand Up @@ -436,6 +426,10 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> {
Dnf::and(goals.filter_map(|g| _goal(this, &g)))
}

if self.dnf.borrow().is_some() {
return self.expect_dnf();
}

let dnf_report_msg =
format!("Normalizing to DNF from {} nodes", self.ns.len());
let dnf_start = Instant::now();
Expand Down Expand Up @@ -487,43 +481,6 @@ impl<'a, 'tcx: 'a> T<'a, 'tcx> {
goals,
}
}

pub fn reportable_impl_candidates(
&self,
) -> HashMap<I, Vec<serde_json::Value>> {
let mut indices = Vec::default();
self.for_correction_set(|and| indices.extend(and.iter().copied()));

let goals_only = indices.iter().filter_map(|&idx| self.goal(idx));

let trait_goals = goals_only.filter(|g| {
matches!(
g.analyze().kind,
GoalKind::Trait { .. } | GoalKind::FnToTrait { .. }
)
});

trait_goals
.filter_map(|g| {
g.predicate().as_trait_predicate().map(|tp| {
let candidates = g
.infcx
.find_similar_impl_candidates(tp)
.into_iter()
.filter_map(|can| {
let header =
ser::argus::get_opt_impl_header(g.infcx.tcx, can.impl_def_id)?;
Some(tls::unsafe_access_interner(|ty_interner| {
ser::to_value_expect(g.infcx, ty_interner, &header)
}))
})
.collect();

(g.idx, candidates)
})
})
.collect()
}
}

// ------------------
Expand Down
94 changes: 51 additions & 43 deletions crates/argus/src/analysis/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,39 +55,42 @@ pub fn process_obligation<'tcx>(

log::trace!("RECV OBLIGATION {result:?} {obl:?}");

// Use this to get rid of any resolved inference variables,
// these could have been resolved while trying to solve the obligation
// and we want to present it as such to the user.
let obl = &infcx.resolve_vars_if_possible(obl.clone());

// HACK: Remove ambiguous obligations if a "stronger" result was found and
// the predicate implies the previous. This is necessary because we
// can't (currently) distinguish between a subsequent solving attempt
// of a previous obligation.
if result.is_yes() || result.is_no() {
tls::drain_implied_ambiguities(infcx, obl);
}
// Anything we accidentally do in here should not affect type checking
infcx.probe(|_| {
// Use this to get rid of any resolved inference variables,
// these could have been resolved while trying to solve the obligation
// and we want to present it as such to the user.
let obl = &infcx.resolve_vars_if_possible(obl.clone());

if !INCLUDE_SUCCESSES.copied().unwrap_or(false) && result.is_yes() {
log::debug!("Skipping successful obligation {obl:?}");
return;
}
// HACK: Remove ambiguous obligations if a "stronger" result was found and
// the predicate implies the previous. This is necessary because we
// can't (currently) distinguish between a subsequent solving attempt
// of a previous obligation.
// if result.is_yes() || result.is_no() {
// tls::drain_implied_ambiguities(infcx, obl);
// }

let necessity = infcx.obligation_necessity(obl);
let dataid = if matches!(necessity, ObligationNecessity::Yes)
|| (matches!(necessity, ObligationNecessity::OnError) && result.is_no())
{
Some(tls::unsafe_store_data(infcx, obl, result))
} else {
None
};
if !INCLUDE_SUCCESSES.copied().unwrap_or(false) && result.is_yes() {
log::debug!("Skipping successful obligation {obl:?}");
return;
}

let obligation =
transform::compute_provenance(body_id, infcx, obl, result, dataid);
let necessity = infcx.obligation_necessity(obl);
let dataid = if matches!(necessity, ObligationNecessity::Yes)
|| (matches!(necessity, ObligationNecessity::OnError) && result.is_no())
{
Some(tls::unsafe_store_data(infcx, obl, result))
} else {
None
};

tls::store_obligation(obligation);
let obligation =
transform::compute_provenance(body_id, infcx, obl, result, dataid);

tls::replace_reported_errors(infcx);
tls::store_obligation(obligation);

tls::replace_reported_errors(infcx);
});
}

pub fn process_obligation_for_tree<'tcx>(
Expand All @@ -101,23 +104,25 @@ pub fn process_obligation_for_tree<'tcx>(
// Must go after the synthetic check.
guard_inspection! {}

// Use this to get rid of any resolved inference variables,
// these could have been resolved while trying to solve the obligation
// and we want to present it as such to the user.
let obl = &infcx.resolve_vars_if_possible(obl.clone());
infcx.probe(|_| {
// Use this to get rid of any resolved inference variables,
// these could have been resolved while trying to solve the obligation
// and we want to present it as such to the user.
let obl = &infcx.resolve_vars_if_possible(obl.clone());

let fdata = infcx.bless_fulfilled(obl, result);
let fdata = infcx.bless_fulfilled(obl, result);

if fdata.hash != target.hash {
return;
}
if fdata.hash != target.hash {
return;
}

match generate_tree(infcx, obl, fdata.result) {
Ok(stree) => tls::store_tree(stree),
Err(e) => {
log::error!("matching target tree not generated {e:?}");
match generate_tree(infcx, obl, fdata.result) {
Ok(stree) => tls::store_tree(stree),
Err(e) => {
log::error!("matching target tree not generated {e:?}");
}
}
}
});
});
}

Expand Down Expand Up @@ -214,10 +219,13 @@ pub(in crate::analysis) fn build_obligations_in_body<'tcx>(
// so as a first heuristic, if the body isn't tainted by errors, we'll just remove
// all non-successful obligations.
if typeck_results.tainted_by_errors.is_none() {
log::info!("BODY HAS NO ERRORS");
log::debug!(
"Removing failures: Body not-tainted {:?}",
typeck_results.hir_owner
);
obligations.retain(|prov| prov.it.result.is_yes());
} else {
log::info!("BODY TAINTED! {:?}", typeck_results.hir_owner);
log::debug!("Body tainted! {:?}", typeck_results.hir_owner);
}

let ctx = ErrorAssemblyCtx {
Expand Down
Loading
Loading