From 7f25d7a67ec75f1db53b01516b69188abc18a1c3 Mon Sep 17 00:00:00 2001 From: Gavin Gray Date: Mon, 26 Feb 2024 22:07:05 +0100 Subject: [PATCH] Analyze const/static bodies, and those from macros. Intern proof tree information. --- crates/argus/src/analysis/mod.rs | 2 +- crates/argus/src/analysis/transform.rs | 39 +++-- crates/argus/src/ext.rs | 14 +- crates/argus/src/find_bodies.rs | 70 +++++++++ crates/argus/src/lib.rs | 2 + crates/argus/src/proof_tree/interners.rs | 13 +- crates/argus/src/proof_tree/mod.rs | 50 +++---- crates/argus/src/proof_tree/serialize.rs | 16 +- crates/argus_cli/src/plugin.rs | 37 ++--- .../diesel/src/bad_insertable_field.rs | 2 +- .../panoptes/src/TreeView/BottomUp.tsx | 141 +++++++++++------- ide/packages/panoptes/src/TreeView/Node.tsx | 5 +- .../panoptes/src/TreeView/TopDown.tsx | 2 +- .../panoptes/src/TreeView/TreeInfo.ts | 12 +- .../panoptes/src/print/private/path.tsx | 6 +- 15 files changed, 265 insertions(+), 146 deletions(-) create mode 100644 crates/argus/src/find_bodies.rs diff --git a/crates/argus/src/analysis/mod.rs b/crates/argus/src/analysis/mod.rs index 68e3cb1..36d0dab 100644 --- a/crates/argus/src/analysis/mod.rs +++ b/crates/argus/src/analysis/mod.rs @@ -29,7 +29,7 @@ pub fn obligations<'tcx>( body_id: BodyId, ) -> Result { rustc_middle::ty::print::with_no_trimmed_paths! {{ - log::debug!("Typeck'ing body {body_id:?}"); + log::info!("Typeck'ing body {body_id:?}"); let typeck_results = tcx.inspect_typeck(body_id, entry::process_obligation); diff --git a/crates/argus/src/analysis/transform.rs b/crates/argus/src/analysis/transform.rs index 56b65ef..c3f2bc0 100644 --- a/crates/argus/src/analysis/transform.rs +++ b/crates/argus/src/analysis/transform.rs @@ -182,6 +182,11 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { ); }; + log::info!( + "Building table for method call: {}", + self.tcx.hir().node_to_string(call_expr.hir_id) + ); + if let Some((idx, error_recvr, error_call)) = self .relate_method_call( call_expr, @@ -192,12 +197,14 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { &obligations, ) { + log::info!("built table!"); ambiguous_call = error_recvr || error_call; MethodCall { data: idx, error_recvr, } } else { + log::info!("Failed to build table..."); Misc } } @@ -330,22 +337,24 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { let (necessary_queries, trait_candidates): (Vec<_>, Vec<_>) = obligations .iter() .filter_map(|&idx| { - let Some(fdata) = self.obligations[idx] + let fdata = self.obligations[idx] .full_data - .map(|fdidx| self.full_data.get(fdidx)) - else { - return None; - }; + .map(|fdidx| self.full_data.get(fdidx))?; - let is_necessary = fdata.obligation.predicate.is_trait_predicate() && - fdata - .infcx - .obligation_necessity(&fdata.obligation) - .is_necessary(fdata.result) + if fdata.obligation.predicate.is_trait_predicate() { + log::info!( + "Predicate is a trait predicate {:?}", + fdata.obligation.predicate + ); + } + + let is_necessary = + // Bounds for extension method calls are always trait predicates. + fdata.obligation.predicate.is_trait_predicate() && // TODO: Obligations for method calls are registered // usder 'misc,' this of course can change. Find a better // way to gather the attempted traits! - && matches!( + matches!( fdata.obligation.cause.code(), ObligationCauseCode::MiscObligation ); @@ -376,12 +385,16 @@ impl<'a, 'tcx: 'a> ObligationsBuilder<'a, 'tcx> { } } - let (full_query_idx, query) = + let Some((full_query_idx, query)) = necessary_queries.first().and_then(|&idx| { self.obligations[idx] .full_data .map(|fdidx| (fdidx, self.full_data.get(fdidx))) - })?; + }) + else { + log::error!("necessary queries empty! {:?}", necessary_queries); + return None; + }; let infcx = &query.infcx; let o = &query.obligation; diff --git a/crates/argus/src/ext.rs b/crates/argus/src/ext.rs index f2cdbbd..7f149b1 100644 --- a/crates/argus/src/ext.rs +++ b/crates/argus/src/ext.rs @@ -370,9 +370,19 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> { .any(|(_lang_item, def_id)| p.is_trait_pred_rhs(def_id)) }; - if !p.is_trait_predicate() { + let is_writeable = || { + use rustc_type_ir::ClauseKind::*; + matches!( + p.kind().skip_binder(), + ty::PredicateKind::Clause( + Trait(..) | RegionOutlives(..) | TypeOutlives(..) | Projection(..) + ) + ) + }; + + if !is_writeable() { ForProfessionals - } else if is_rhs_lang_item() { + } else if p.is_trait_predicate() && is_rhs_lang_item() { OnError } else { Yes diff --git a/crates/argus/src/find_bodies.rs b/crates/argus/src/find_bodies.rs new file mode 100644 index 0000000..d8c11a7 --- /dev/null +++ b/crates/argus/src/find_bodies.rs @@ -0,0 +1,70 @@ +//! This is a copy of the `BodyFinder` from `rustc_utils` but it +//! does *not* skip const/static items. Funny enough, these items +//! often have imporant trait contraints evaluated (think derive macros). +use rustc_hir::{intravisit::Visitor, BodyId}; +use rustc_middle::{hir::nested_filter::OnlyBodies, ty::TyCtxt}; +use rustc_span::Span; +use rustc_utils::{block_timer, SpanExt}; + +struct BodyFinder<'tcx> { + tcx: TyCtxt<'tcx>, + bodies: Vec<(Span, BodyId)>, +} + +impl<'tcx> Visitor<'tcx> for BodyFinder<'tcx> { + type NestedFilter = OnlyBodies; + + fn nested_visit_map(&mut self) -> Self::Map { + self.tcx.hir() + } + + fn visit_nested_body(&mut self, id: BodyId) { + let hir = self.nested_visit_map(); + + // // const/static items are considered to have bodies, so we want to exclude + // // them from our search for functions + // if !hir + // .body_owner_kind(hir.body_owner_def_id(id)) + // .is_fn_or_closure() + // { + // return; + // } + + let body = hir.body(id); + self.visit_body(body); + + let hir = self.tcx.hir(); + let span = hir.span_with_body(hir.body_owner(id)); + log::trace!( + "Searching body for {:?} with span {span:?} (local {:?})", + self + .tcx + .def_path_debug_str(hir.body_owner_def_id(id).to_def_id()), + span.as_local(body.value.span) + ); + + self.bodies.push((span, id)); + } +} + +/// Finds all bodies in the current crate +pub fn find_bodies(tcx: TyCtxt) -> Vec<(Span, BodyId)> { + block_timer!("find_bodies"); + let mut finder = BodyFinder { + tcx, + bodies: Vec::new(), + }; + tcx.hir().visit_all_item_likes_in_crate(&mut finder); + finder.bodies +} + +/// Finds all the bodies that enclose the given span, from innermost to outermost +pub fn find_enclosing_bodies( + tcx: TyCtxt, + sp: Span, +) -> impl Iterator { + let mut bodies = find_bodies(tcx); + bodies.retain(|(other, _)| other.contains(sp)); + bodies.sort_by_key(|(span, _)| span.size()); + bodies.into_iter().map(|(_, id)| id) +} diff --git a/crates/argus/src/lib.rs b/crates/argus/src/lib.rs index 31472eb..d401e19 100644 --- a/crates/argus/src/lib.rs +++ b/crates/argus/src/lib.rs @@ -37,6 +37,8 @@ extern crate rustc_type_ir; pub mod analysis; pub mod emitter; mod ext; +// TODO: remove when upstreamed to rustc-plugin +pub mod find_bodies; mod proof_tree; mod rustc; mod serialize; diff --git a/crates/argus/src/proof_tree/interners.rs b/crates/argus/src/proof_tree/interners.rs index de2a332..f70e877 100644 --- a/crates/argus/src/proof_tree/interners.rs +++ b/crates/argus/src/proof_tree/interners.rs @@ -96,7 +96,7 @@ impl Interners { let result_idx = self.intern_result(goal.result()); let goal = goal.goal(); let goal_idx = self.intern_goal(infcx, &goal, result_idx); - Node::Goal(goal_idx, result_idx) + Node::Goal(goal_idx) } pub fn mk_candidate_node<'tcx>( @@ -164,6 +164,7 @@ impl Interners { necessity, num_vars, is_lhs_ty_var, + result: result_idx, #[cfg(debug_assertions)] debug_comparison: format!("{:?}", goal.predicate.kind().skip_binder()), @@ -193,10 +194,8 @@ impl Interners { return i; } - let tcx = infcx.tcx; - // First, try to get an impl header from the def_id ty - if let Some(header) = tcx.get_impl_header(def_id) { + if let Some(header) = infcx.tcx.get_impl_header(def_id) { return self.candidates.insert( CanKey::Impl(def_id), CandidateData::new_impl_header(infcx, &header), @@ -204,10 +203,12 @@ impl Interners { } // Second, try to get the span of the impl or just default to a fallback. - let string = tcx + let string = infcx + .tcx .span_of_impl(def_id) .map(|sp| { - tcx + infcx + .tcx .sess .source_map() .span_to_snippet(sp) diff --git a/crates/argus/src/proof_tree/mod.rs b/crates/argus/src/proof_tree/mod.rs index 4e73edb..808f839 100644 --- a/crates/argus/src/proof_tree/mod.rs +++ b/crates/argus/src/proof_tree/mod.rs @@ -32,21 +32,34 @@ crate::define_idx! { ResultIdx } -// FIXME: Nodes shouldn't be PartialEq, or Eq. They are currently -// so we can "detect cycles" by doing a raw comparison of the nodes. -// Of course, this isn't robust and should be removed ASAP. -// -// Same goes for Candidates and Goals. -#[derive(Serialize, Debug, Clone, PartialEq, Eq)] +#[derive(Serialize, Copy, Clone, Debug, PartialEq, Eq)] #[cfg_attr(feature = "testing", derive(TS))] #[cfg_attr(feature = "testing", ts(export))] pub enum Node { - Result(ResultIdx), + Goal(GoalIdx), Candidate(CandidateIdx), - Goal(GoalIdx, ResultIdx), + Result(ResultIdx), +} + +#[derive(Serialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "testing", derive(TS))] +#[cfg_attr(feature = "testing", ts(export))] +pub struct GoalData { + #[cfg_attr(feature = "testing", ts(type = "GoalPredicate"))] + value: serde_json::Value, + + necessity: ObligationNecessity, + num_vars: usize, + is_lhs_ty_var: bool, + result: ResultIdx, + + #[cfg(debug_assertions)] + #[cfg_attr(feature = "testing", ts(type = "string | undefined"))] + debug_comparison: String, } -#[derive(Serialize, Clone, Debug, PartialEq, Eq)] +#[derive(Serialize, Clone, Debug)] #[cfg_attr(feature = "testing", derive(TS))] #[cfg_attr(feature = "testing", ts(export))] pub enum CandidateData { @@ -58,7 +71,7 @@ pub enum CandidateData { Any(String), } -#[derive(Serialize, Clone, Debug, PartialEq, Eq)] +#[derive(Serialize, Clone, Debug)] #[cfg_attr(feature = "testing", derive(TS))] #[cfg_attr(feature = "testing", ts(export))] pub struct ResultData( @@ -67,23 +80,6 @@ pub struct ResultData( EvaluationResult, ); -#[derive(Serialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "testing", derive(TS))] -#[cfg_attr(feature = "testing", ts(export))] -pub struct GoalData { - #[cfg_attr(feature = "testing", ts(type = "GoalPredicate"))] - value: serde_json::Value, - - necessity: ObligationNecessity, - num_vars: usize, - is_lhs_ty_var: bool, - - #[cfg(debug_assertions)] - #[cfg_attr(feature = "testing", ts(type = "string | undefined"))] - debug_comparison: String, -} - #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "camelCase")] #[cfg_attr(feature = "testing", derive(TS))] diff --git a/crates/argus/src/proof_tree/serialize.rs b/crates/argus/src/proof_tree/serialize.rs index 5d35317..5bf4492 100644 --- a/crates/argus/src/proof_tree/serialize.rs +++ b/crates/argus/src/proof_tree/serialize.rs @@ -79,18 +79,12 @@ impl SerializedTreeVisitor { return; } - let Node::Goal(from_idx, result) = &self.nodes[from] else { - return; - }; - let to_root = self.topology.path_to_root(from); - if to_root.iter_exclusive().any(|idx| { - let Node::Goal(here_idx, hresult) = &self.nodes[*idx] else { - return false; - }; - - here_idx == from_idx && hresult == result - }) { + let from_node = self.nodes[from]; + if to_root + .iter_exclusive() + .any(|middle| self.nodes[*middle] == from_node) + { self.cycle = Some(to_root.into()); } } diff --git a/crates/argus_cli/src/plugin.rs b/crates/argus_cli/src/plugin.rs index 56995d3..bdd7a6c 100644 --- a/crates/argus_cli/src/plugin.rs +++ b/crates/argus_cli/src/plugin.rs @@ -7,12 +7,13 @@ use std::{ }; use argus_lib::{ + analysis, emitter::SilentEmitter, + find_bodies::{find_bodies, find_enclosing_bodies}, types::{ObligationHash, ToTarget}, }; use clap::{Parser, Subcommand}; use fluid_let::fluid_set; -use log::{debug, info}; use rustc_errors::DiagCtxt; use rustc_hir::BodyId; use rustc_interface::interface::Result as RustcResult; @@ -22,7 +23,6 @@ use rustc_span::{FileName, RealFileName}; use rustc_utils::{ source_map::{ filename::Filename, - find_bodies::{find_bodies, find_enclosing_bodies}, range::{CharPos, CharRange}, }, timer::elapsed, @@ -188,7 +188,7 @@ impl RustcPlugin for ArgusPlugin { }; let v = run( - argus_lib::analysis::tree, + analysis::tree, Some(PathBuf::from(&file)), compute_target, &compiler_args, @@ -198,7 +198,7 @@ impl RustcPlugin for ArgusPlugin { Obligations { file, .. } => { let nothing = || None::<(ObligationHash, CharRange)>; let v = run( - argus_lib::analysis::obligations, + analysis::obligations, file.map(PathBuf::from), nothing, &compiler_args, @@ -224,7 +224,7 @@ fn run( rustc_start: Instant::now(), }; - info!("Starting rustc analysis..."); + log::info!("Starting rustc analysis..."); #[allow(unused_must_use)] let _ = run_with_callbacks(args, &mut callbacks); @@ -300,13 +300,16 @@ impl Option> get_file_of_body(tcx, body) { if target_file.map(|f| f.ends_with(&p)).unwrap_or(true) { - debug!("analyzing {:?}", body); + log::info!("analyzing {:?}", body); match analysis.analyze(tcx, body) { Ok(v) => Some(v), - Err(_) => None, + Err(e) => { + log::error!("Error analyzing body {:?} {:?}", body, e); + None + } } } else { - debug!("Skipping file {:?} due to target {:?}", p, self.file); + log::debug!("Skipping file {:?} due to target {:?}", p, self.file); None } } else { @@ -316,26 +319,18 @@ impl Option> self.result = match (self.compute_target.take().unwrap())() { Some(target) => { - log::debug!("Getting target"); let target = target.to_target(tcx).expect("Couldn't compute target"); - log::debug!("Got target"); let body_span = target.span.clone(); - - debug!("target: {target:?}"); - - fluid_set!(argus_lib::analysis::OBLIGATION_TARGET, target); + fluid_set!(analysis::OBLIGATION_TARGET, target); find_enclosing_bodies(tcx, body_span) .filter_map(|b| inner((body_span, b))) .collect::>() } - None => { - debug!("no target"); - find_bodies(tcx) - .into_iter() - .filter_map(inner) - .collect::>() - } + None => find_bodies(tcx) + .into_iter() + .filter_map(inner) + .collect::>(), }; }); diff --git a/crates/argus_cli/tests/workspaces/diesel/src/bad_insertable_field.rs b/crates/argus_cli/tests/workspaces/diesel/src/bad_insertable_field.rs index 0518b90..2314ee4 100644 --- a/crates/argus_cli/tests/workspaces/diesel/src/bad_insertable_field.rs +++ b/crates/argus_cli/tests/workspaces/diesel/src/bad_insertable_field.rs @@ -1,4 +1,4 @@ -use diesel::prelude::*; +use diesel::{prelude::*, sql_types::Text}; table! { users(id) { diff --git a/ide/packages/panoptes/src/TreeView/BottomUp.tsx b/ide/packages/panoptes/src/TreeView/BottomUp.tsx index 4e57fa8..e74fe23 100644 --- a/ide/packages/panoptes/src/TreeView/BottomUp.tsx +++ b/ide/packages/panoptes/src/TreeView/BottomUp.tsx @@ -6,41 +6,75 @@ import { TreeContext } from "./Context"; import { CollapsibleElement, DirRecursive } from "./Directory"; import { TreeInfo, TreeView } from "./TreeInfo"; +type TreeViewWithRoot = TreeView & { root: ProofNodeIdx }; + class TopologyBuilder { private topo: TreeTopology; - constructor() { + constructor(readonly root: ProofNodeIdx, readonly tree: TreeInfo) { this.topo = { children: {}, parent: {} }; } + public toView(): TreeViewWithRoot { + return { topology: this.topo, root: this.root }; + } + get topology() { return this.topo; } - public add(parent: ProofNodeIdx, child: ProofNodeIdx) { - if (!this.topo.children[parent]) { - this.topo.children[parent] = []; + add(from: ProofNodeIdx, to: ProofNodeIdx) { + if (this.topo.children[from] === undefined) { + this.topo.children[from] = []; } - this.topo.children[parent]!.push(child); - this.topo.parent[child] = parent; + this.topo.children[from].push(to); + this.topo.parent[to] = from; } -} -/** - * - * @param inputArray array to generate windows across - * @param size the size of window to generate - * @returns an array of windows of size `size` across `inputArray`. Final window may be smaller than `size`. - */ -function toWindows(arr: T[], size: number): T[][] { - let windowed = []; - for (let idx = 0; idx < arr.length; idx += size) { - windowed.push(_.slice(arr, idx, idx + size)); - } - return windowed; -} + /** + * + * @param root the root node from where this path should start. + * @param path a path to be added uniquely to the tree. + */ + public addPathFromRoot(path: ProofNodeIdx[]) { + console.debug( + "Adding path", + _.keys(this.topo.children).length, + _.keys(this.topo.parent).length, + path + ); + const thisRoot = _.head(path); + if ( + thisRoot === undefined || + !_.isEqual(this.tree.node(thisRoot), this.tree.node(this.root)) + ) { + console.error("Path does not start from the root", { + a: thisRoot, + b: this.root, + c: this.tree.node(thisRoot!), + d: this.tree.node(this.root), + }); + throw new Error("Path does not start from the root"); + } -function toPairs(inputArray: T[]): [T, T][] { - return toWindows(inputArray, 2) as any; + let previous = this.root; + _.forEach(_.tail(path), node => { + // We want to add a node from `previous` to `node` only if an + // equivalent connection does not already exist. Equivalent is + // defined by the `Node` the `ProofNodeIdx` points to. + const currKids = this.topo.children[previous] ?? []; + const myNode = this.tree.node(node); + const hasEquivalent = _.find( + currKids, + kid => this.tree.node(kid) === myNode + ); + if (hasEquivalent === undefined) { + this.add(previous, node); + previous = node; + } else { + previous = hasEquivalent; + } + }); + } } /** @@ -52,37 +86,40 @@ function toPairs(inputArray: T[]): [T, T][] { function invertViewWithRoots( leaves: ProofNodeIdx[], tree: TreeInfo -): Array { - const groupedLeaves = _.groupBy(leaves, leaf => { - const node = tree.node(leaf); - if ("Goal" in node) { - console.debug("Grouping by goalidx", node.Goal[0]); - return node.Goal[0]; - } else { - throw new Error("Leaves should only be goals"); - } - }); - - console.debug("groupedLeaves", groupedLeaves); - - const groups: ProofNodeIdx[][] = _.values(groupedLeaves); +): Array { + const groups: ProofNodeIdx[][] = _.values( + _.groupBy(leaves, leaf => { + const node = tree.node(leaf); + if ("Goal" in node) { + return node.Goal; + } else { + throw new Error("Leaves must be goals"); + } + }) + ); return _.map(groups, group => { - const builder = new TopologyBuilder(); - const root = group[0]; - const parents = _.map(leaves, leaf => tree.parent(leaf)); - const pathsToRoot = _.map( - _.compact(parents), - parent => tree.pathToRoot(parent).path + // Each element of the group is equivalent, so just take the first + const builder = new TopologyBuilder(group[0], tree); + + // Get the paths to the root from all leaves + const pathsToRoot = _.map(group, parent => tree.pathToRoot(parent).path); + + console.debug( + "Paths to root", + _.sortBy( + _.map(pathsToRoot, p => _.map(p, n => tree.node(n))), + l => l.length + ) ); + _.forEach(pathsToRoot, path => { - builder.add(root, path[0]); - _.forEach(toPairs(path), ([a, b]) => { - builder.add(a, b); - }); + // No need to take the tail, `addPathFromRoot` checks that the + // roots are equal and then skips the first element. + builder.addPathFromRoot(path); }); - return { topology: builder.topology, root }; + return builder.toView(); }); } @@ -107,8 +144,8 @@ const BottomUp = () => { const [argusRecommended, others] = _.partition(invertedViews, view => { const node = tree.node(view.root); if ("Goal" in node) { - const goal = tree.goal(node.Goal[0]); - const result = tree.result(node.Goal[1]); + const goal = tree.goal(node.Goal); + const result = tree.result(goal.result); return result === "no" || result === "maybe-overflow" || !goal.isLhsTyVar; } else { // Leaves should only be goals... @@ -116,11 +153,7 @@ const BottomUp = () => { } }); - const LeafElement = ({ - leaf, - }: { - leaf: TreeView & { root: ProofNodeIdx }; - }) => ( + const LeafElement = ({ leaf }: { leaf: TreeViewWithRoot }) => ( { if ("Result" in node) { return ; } else if ("Goal" in node) { + const goal = treeInfo.goal(node.Goal); return ( <> - - + + ); } else if ("Candidate" in node) { diff --git a/ide/packages/panoptes/src/TreeView/TopDown.tsx b/ide/packages/panoptes/src/TreeView/TopDown.tsx index cd36e42..5ee6b7c 100644 --- a/ide/packages/panoptes/src/TreeView/TopDown.tsx +++ b/ide/packages/panoptes/src/TreeView/TopDown.tsx @@ -102,7 +102,7 @@ const TopDown = () => { }, k => { const node = tree.node(k); - "Goal" in node && tree.goal(node.Goal[0]).isLhsTyVar ? 1 : 0; + "Goal" in node && tree.goal(node.Goal).isLhsTyVar ? 1 : 0; } ); }; diff --git a/ide/packages/panoptes/src/TreeView/TreeInfo.ts b/ide/packages/panoptes/src/TreeView/TreeInfo.ts index e324cc2..f2dc51a 100644 --- a/ide/packages/panoptes/src/TreeView/TreeInfo.ts +++ b/ide/packages/panoptes/src/TreeView/TreeInfo.ts @@ -96,7 +96,7 @@ export class TreeInfo { const node = tree.nodes[n]; if ("Goal" in node) { - if (tree.goals[node.Goal[0]].necessity === "Yes") { + if (tree.goals[node.Goal].necessity === "Yes") { return "keep"; } else { return "remove-tree"; @@ -150,12 +150,16 @@ export class TreeInfo { return this.tree.results[n]; } + public resultOfGoal(n: GoalIdx): EvaluationResult { + return this.result(this.goal(n).result); + } + public nodeResult(n: ProofNodeIdx): EvaluationResult | undefined { const node = this.node(n); if ("Result" in node) { return this.result(node.Result); } else if ("Goal" in node) { - return this.result(node.Goal[1]); + return this.resultOfGoal(node.Goal); } else { return undefined; } @@ -225,7 +229,7 @@ export class TreeInfo { const numInferVars = _.map(pathToRoot.path, idx => { const node = this.tree.nodes[idx]; if ("Goal" in node) { - return this.goal(node.Goal[0]).numVars; + return this.goal(node.Goal).numVars; } else { return 0; } @@ -251,7 +255,7 @@ export class TreeInfo { const niv = _.reduce( this.children(n), (sum, k) => sum + this.inferVars(k), - "Goal" in node ? this.goal(node.Goal[0]).numVars : 0 + "Goal" in node ? this.goal(node.Goal).numVars : 0 ); this.numInferVars.set(n, niv); return niv; diff --git a/ide/packages/panoptes/src/print/private/path.tsx b/ide/packages/panoptes/src/print/private/path.tsx index a28963c..31f3266 100644 --- a/ide/packages/panoptes/src/print/private/path.tsx +++ b/ide/packages/panoptes/src/print/private/path.tsx @@ -137,8 +137,7 @@ export const PrintImplFor = ({ path, ty }: { path?: DefinedPath; ty: any }) => { const p = path === undefined ? null : ( <> - - for{" "} + for{" "} ); @@ -162,7 +161,8 @@ export const PrintImplAs = ({ path, ty }: { path?: DefinedPath; ty: any }) => { return ( <> - {p} + + {p} ); };