Skip to content

Commit

Permalink
Backup for debugging signals
Browse files Browse the repository at this point in the history
  • Loading branch information
gavinleroy committed Feb 21, 2024
1 parent 9c0afd1 commit 8280a1c
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 113 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[workspace]
members = [ "crates/*" ]
exclude = [ "crates/argus_cli/tests/workspaces" ]
exclude = [ "crates/argus_cli/tests/workspaces", "examples" ]
resolver = "2"

[profile.dev.package.similar]
Expand Down
181 changes: 83 additions & 98 deletions crates/argus/src/analysis/transform.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ struct ObligationsBuilder<'a, 'tcx: 'a> {
// Structures to be filled in
exprs_to_hir_id: HashMap<ExprIdx, HirId>,
ambiguity_errors: IndexSet<ExprIdx>,
trait_errors: IndexSet<ExprIdx>,
trait_errors: Vec<(ExprIdx, Vec<ObligationHash>)>,
exprs: IndexVec<ExprIdx, Expr>,
method_lookups: IndexVec<MethodLookupIdx, MethodLookup>,
}
Expand All @@ -151,7 +151,8 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> {
kind,
} in bins
{
let span = hir.span_with_body(hir_id);
// let span = hir.span_with_body(hir_id);
let span = hir.span(hir_id);
if let Some((range, snippet)) =
CharRange::from_span(span, source_map).ok().and_then(|r| {
let snip = source_map
Expand Down Expand Up @@ -231,107 +232,91 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> {
// FIXME: this isn't efficient, but the number of obligations per
// body isn't large, so shouldnt' be an issue.
fn relate_trait_bound(&mut self) {
// 1. take the expressions from the "reported_trait_errors" and find
// all the expressions that they correspond to. we should also
// maintain the order in which they are reported and use this
// sorting to present errors.
for (span, predicates) in self.reported_trait_errors.iter() {
let Some(this_id) =
hier_hir::find_most_enclosing_node(&self.tcx, self.body_id, *span)
else {
log::error!("reported error doesn't have an associated span ...");
continue;
};

let matching_expressions = self
.exprs_to_hir_id
// Search for the obligation hash in our set of computed obligations.
let predicates = predicates
.iter()
.filter(|(_, that_id)| self.tcx.is_parent_of(**that_id, this_id))
.filter_map(|&p| {
self
.raw_obligations
.iter_enumerated()
.find_map(|(obl_id, obl)| (obl.hash == p).then(|| (obl_id, p)))
})
.collect::<Vec<_>>();

let Some((expr_id, _hir_id)) =
matching_expressions.iter().copied().find(|(_, this_id)| {
matching_expressions
.iter()
.all(|(_, that_id)| self.tcx.is_parent_of(**that_id, **this_id))
})
else {
log::error!(
"failed to find most enclosing hir id for {:?}",
matching_expressions
);
continue;
};

// Mark the found Expr as containing an error.
self.trait_errors.insert(*expr_id);

// Sort the Expr obligations according to the reported order.
let expr_obligations = &mut self.exprs[*expr_id].obligations;
let num_errs = predicates.len();
expr_obligations.sort_by_key(|&obl_idx| {
let obl = &self.raw_obligations[obl_idx];
let obl_hash = obl.hash;
let obl_is_certain = obl.result.is_certain();
predicates
.iter()
.position(|&h| h == obl_hash)
.unwrap_or_else(|| {
if obl_is_certain {
// push successful obligations down
num_errs + 1
} else {
num_errs
}
})
})
}
// Associate these with an expression, first comes first served.
let mut root_expr = None;
'outer: for (expr_id, expr) in self.exprs.iter_enumerated() {
for (p, _) in predicates.iter() {
if expr.obligations.contains(p) {
root_expr = Some(expr_id);
break 'outer;
}
}
}

// 2. we also need to search for expressions that are "ambiguous," these
// don't always have associated reported errors. my current thoughts to
// do this are to find obligations that are unsuccessful and have a
// concrete obligations code.
//
// TODO: this isn't quite doing what I want. We need a way to figure
// out which obligations are "reruns" of a previous goal, and
// then remove the prior 'ambiguous' answer from the list.
//
// let is_important_failed_query = |obl_idx: ObligationIdx| {
// use rustc_infer::traits::ObligationCauseCode::*;
// if let Some(prov) = self.obligations.iter().find(|p| ***p == obl_idx)
// && let Some(uodidx) = prov.full_data
// {
// let full_data = self.full_data.get(uodidx);
// !(full_data.result.is_certain()
// || matches!(full_data.obligation.cause.code(), MiscObligation))
// } else {
// false
// }
// };
// let lift_failed_obligations = |v: &mut Vec<ObligationIdx>| {
// v.sort_by_key(|&idx| {
// if self.raw_obligations[idx].result.is_certain() {
// 1
// } else {
// 0
// }
// })
// };
// let unmarked_exprs = self
// .exprs
// .iter_mut_enumerated()
// .filter(|(id, _)| !self.ambiguity_errors.contains(id));
// for (expr_id, expr) in unmarked_exprs {
// let contains_failed = expr
// .obligations
// .iter()
// .copied()
// .any(is_important_failed_query);
// if contains_failed {
// self.trait_errors.insert(expr_id);
// }
// lift_failed_obligations(&mut expr.obligations)
// }
let (_, hashes): (Vec<ObligationIdx>, _) = predicates.into_iter().unzip();

if let Some(expr_id) = root_expr {
self.trait_errors.push((expr_id, hashes));
}
// else {
// todo!("what should I do here?");
// }

// let Some(err_hir_id) =
// hier_hir::find_most_enclosing_node(&self.tcx, self.body_id, *span)
// else {
// log::error!("reported error doesn't have an associated span ...");
// continue;
// };

// let parent_ids_of_error = self
// .exprs_to_hir_id
// .iter()
// .filter(|(_, expr_hir_id)| {
// self.tcx.is_parent_of(**expr_hir_id, err_hir_id)
// })
// .collect::<Vec<_>>();

// let Some((expr_id, _hir_id)) =
// parent_ids_of_error.iter().copied().find(|(_, this_id)| {
// // Find child-most expression that contains the error.
// parent_ids_of_error
// .iter()
// .all(|(_, that_id)| self.tcx.is_parent_of(**that_id, **this_id))
// })
// else {
// log::error!(
// "failed to find most enclosing hir id for {:?}",
// parent_ids_of_error
// );
// continue;
// };

// // Mark the found Expr as containing an error.
// self.trait_errors.insert(*expr_id);

// // Sort the Expr obligations according to the reported order.
// let expr_obligations = &mut self.exprs[*expr_id].obligations;
// let num_errs = predicates.len();
// expr_obligations.sort_by_key(|&obl_idx| {
// let obl = &self.raw_obligations[obl_idx];
// let obl_hash = obl.hash;
// let obl_is_certain = obl.result.is_certain();
// predicates
// .iter()
// .position(|&h| h == obl_hash)
// .unwrap_or_else(|| {
// if obl_is_certain {
// // push successful obligations down
// num_errs + 1
// } else {
// num_errs
// }
// })
// })
}
}

// 1. build the method call table (see ambiguous / )
Expand Down
24 changes: 13 additions & 11 deletions crates/argus/src/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ pub trait StableHash<'__ctx, 'tcx>:
{
fn stable_hash(
self,
infcx: &InferCtxt<'tcx>,
infcx: &TyCtxt<'tcx>,
ctx: &mut StableHashingContext<'__ctx>,
) -> Hash64;
}
Expand All @@ -107,6 +107,8 @@ pub trait TyCtxtExt<'tcx> {

/// Test whether `a` is a parent node of `b`.
fn is_parent_of(&self, a: HirId, b: HirId) -> bool;

fn predicate_hash(&self, p: &Predicate<'tcx>) -> Hash64;
}

pub trait TypeckResultsExt<'tcx> {
Expand Down Expand Up @@ -207,16 +209,12 @@ where
{
fn stable_hash(
self,
infcx: &InferCtxt<'tcx>,
tcx: &TyCtxt<'tcx>,
ctx: &mut StableHashingContext<'__ctx>,
) -> Hash64 {
let mut h = StableHasher::new();
let sans_regions = infcx.tcx.erase_regions(self);
let mut freshener = rustc_infer::infer::TypeFreshener::new(infcx);
// let mut eraser = ty_eraser::TyVarEraserVisitor { infcx };
let this = sans_regions.fold_with(&mut freshener);
// erase infer vars
this.hash_stable(ctx, &mut h);
let sans_regions = tcx.erase_regions(self);
sans_regions.hash_stable(ctx, &mut h);
h.finish()
}
}
Expand Down Expand Up @@ -332,6 +330,10 @@ impl<'tcx> TyCtxtExt<'tcx> for TyCtxt<'tcx> {
fn is_parent_of(&self, a: HirId, b: HirId) -> bool {
a == b || self.hir().parent_iter(b).find(|&(id, _)| id == a).is_some()
}

fn predicate_hash(&self, p: &Predicate<'tcx>) -> Hash64 {
self.with_stable_hashing_context(|mut hcx| p.stable_hash(self, &mut hcx))
}
}

impl<'tcx> TypeckResultsExt<'tcx> for TypeckResults<'tcx> {
Expand Down Expand Up @@ -445,9 +447,9 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
}

fn predicate_hash(&self, p: &Predicate<'tcx>) -> Hash64 {
self
.tcx
.with_stable_hashing_context(|mut hcx| p.stable_hash(self, &mut hcx))
let mut freshener = rustc_infer::infer::TypeFreshener::new(self);
let p = p.fold_with(&mut freshener);
self.tcx.predicate_hash(&p)
}

fn bless_fulfilled<'a>(
Expand Down
4 changes: 2 additions & 2 deletions crates/argus/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ pub struct ObligationsInBody {
/// Concrete trait errors, this would be when the compiler
/// can say for certainty that a specific trait bound was required
/// but not satisfied.
pub trait_errors: IndexSet<ExprIdx>,
pub trait_errors: Vec<(ExprIdx, Vec<ObligationHash>)>,

#[cfg_attr(feature = "testing", ts(type = "Obligation[]"))]
pub obligations: IndexVec<ObligationIdx, Obligation>,
Expand All @@ -168,7 +168,7 @@ impl ObligationsInBody {
id: Option<(&InferCtxt, DefId)>,
range: CharRange,
ambiguity_errors: IndexSet<ExprIdx>,
trait_errors: IndexSet<ExprIdx>,
trait_errors: Vec<(ExprIdx, Vec<ObligationHash>)>,
obligations: IndexVec<ObligationIdx, Obligation>,
exprs: IndexVec<ExprIdx, Expr>,
method_lookups: IndexVec<MethodLookupIdx, MethodLookup>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ table! {

#[derive(Insertable)]
struct User {
name: i32,
name: Text,
}

0 comments on commit 8280a1c

Please sign in to comment.