diff --git a/Cargo.lock b/Cargo.lock index 7e833003..3fa18ef1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.91" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "anymap2" @@ -294,9 +294,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6" dependencies = [ "libc", ] @@ -335,7 +335,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -359,7 +359,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -427,9 +427,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "fixedbitset" @@ -515,7 +515,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -778,9 +778,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" [[package]] name = "heck" @@ -854,7 +854,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.1", ] [[package]] @@ -921,9 +921,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.161" +version = "0.2.162" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" +checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398" [[package]] name = "linux-raw-sys" @@ -1239,7 +1239,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -1459,9 +1459,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -1497,9 +1497,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.38" +version = "0.38.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" +checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" dependencies = [ "bitflags 2.6.0", "errno", @@ -1579,9 +1579,9 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.213" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea7893ff5e2466df8d720bb615088341b295f849602c6956047f8f80f0e9bc1" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] @@ -1599,13 +1599,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.213" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e85ad2009c50b58e87caa8cd6dac16bdf511bbfb7af6c33df902396aa480fa5" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -1744,9 +1744,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.85" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", @@ -1767,9 +1767,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -1812,29 +1812,29 @@ checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" [[package]] name = "thiserror" -version = "1.0.65" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.65" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] name = "tokio" -version = "1.41.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "pin-project-lite", @@ -1879,7 +1879,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] [[package]] @@ -2075,7 +2075,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -2109,7 +2109,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2245,9 +2245,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "xml-rs" -version = "0.8.22" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af4e2e2f7cba5a093896c1e150fbfe177d1883e7448200efb81d40b9d339ef26" +checksum = "af310deaae937e48a26602b730250b4949e125f468f11e6990be3e5304ddd96f" [[package]] name = "yew" @@ -2320,5 +2320,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.85", + "syn 2.0.87", ] diff --git a/aris/src/expr.rs b/aris/src/expr.rs index 972d8306..808cfdd8 100644 --- a/aris/src/expr.rs +++ b/aris/src/expr.rs @@ -868,20 +868,20 @@ impl Expr { } /// Helper function to collect unique expressions, flattening for a given operator - fn collect_unique_exprs_idempotence(expr: &Expr, op: Op, unique_exprs: &mut HashSet) { + fn collect_unique_exprs_idempotence(expr: &Expr, op: Op, unique_exprs: &mut BTreeSet) { match expr { Expr::Assoc { op: expr_op, exprs } if *expr_op == op => { + // Flatten nested expressions for associative operations for sub_expr in exprs { - // Recursively collect unique expressions for nested associative structures Expr::collect_unique_exprs_idempotence(sub_expr, op, unique_exprs); } } _ => { - unique_exprs.insert(expr.clone()); + let normalized_expr = expr.clone().normalize_idempotence(); + unique_exprs.insert(normalized_expr); } } } - /// Reduce an expression over idempotence, that is: /// A & A -> A /// A | A -> A @@ -889,15 +889,15 @@ impl Expr { pub fn normalize_idempotence(self) -> Expr { match self { Expr::Assoc { op, exprs } => { - let mut unique_exprs = HashSet::new(); + let mut unique_exprs = BTreeSet::new(); // Recursively collect unique expressions by flattening nested structures for expr in exprs { - let normalized_expr = expr.normalize_idempotence(); // Apply idempotence normalization at each level + let normalized_expr = expr.normalize_idempotence(); Expr::collect_unique_exprs_idempotence(&normalized_expr, op, &mut unique_exprs); } - // Recursively normalize each unique expression + // If there's only one unique expression, return it; otherwise, reassemble the expression let unique_exprs: Vec = unique_exprs.into_iter().collect(); if unique_exprs.len() == 1 { @@ -910,6 +910,135 @@ impl Expr { } } + fn collect_unique_exprs_absorption(expr: &Expr, op: Op, unique_exprs: &mut BTreeSet) { + match expr { + Expr::Assoc { op: expr_op, exprs } if *expr_op == op => { + // Recursively handle nested associative expressions + for sub_expr in exprs { + Self::collect_unique_exprs_absorption(sub_expr, op, unique_exprs); + } + } + Expr::Assoc { op: inner_op, exprs } if (op == Op::Or && *inner_op == Op::And) || (op == Op::And && *inner_op == Op::Or) => { + if exprs.iter().any(|e| unique_exprs.contains(e)) { + if let Some(e) = exprs.iter().find(|e| unique_exprs.contains(e)) { + unique_exprs.insert(e.clone()); + } + } else { + unique_exprs.insert(expr.clone()); + } + } + _ => { + unique_exprs.insert(expr.clone()); + } + } + } + + pub fn normalize_absorption(self) -> Expr { + if let Expr::Assoc { op, exprs } = self { + let mut unique_exprs = BTreeSet::new(); + for expr in exprs { + let normalized_expr = expr.normalize_absorption(); + Self::collect_unique_exprs_absorption(&normalized_expr, op, &mut unique_exprs); + } + let unique_exprs: Vec = unique_exprs.into_iter().collect(); + if unique_exprs.len() == 1 { + unique_exprs.into_iter().next().unwrap() + } else { + Expr::Assoc { op, exprs: unique_exprs } + } + } else { + self + } + } + + pub fn normalize_reduction(self) -> Expr { + match self { + // Handle Associative operations (AND/OR) + Expr::Assoc { op, exprs } => { + let mut reduced_exprs = BTreeSet::new(); + + // Recurse through the expressions inside the Assoc operation + for expr in exprs { + let normalized_expr = expr.normalize_reduction(); + Self::collect_reduction_exprs(&normalized_expr, op, &mut reduced_exprs); + } + + if op == Op::And && reduced_exprs.len() == 2 { + let mut expr_iter = reduced_exprs.clone().into_iter(); + let first = expr_iter.next().unwrap(); + let second = expr_iter.next().unwrap(); + + // Check if the second expression is a quantifier + if let Expr::Quant { kind, name, body } = second { + return Expr::Assoc { op: Op::Or, exprs: vec![first, Expr::Quant { kind, name, body }] }; + } + } + Expr::Assoc { op, exprs: reduced_exprs.into_iter().collect() } + } + // Recursively normalize quantifier body + Expr::Quant { kind, name, body } => { + let normalized_body = body.normalize_reduction(); + Expr::Quant { kind, name, body: Box::new(normalized_body) } + } + // If the expression is a negation, attempt to simplify the negated quantifier + Expr::Not { operand } => { + match *operand { + // If the operand is a quantifier, recursively normalize its body + Expr::Quant { kind, name, body } => { + let normalized_body = body.normalize_reduction(); + Expr::Quant { kind, name, body: Box::new(normalized_body) } + } + _ => Expr::Not { operand: Box::new(operand.normalize_reduction()) }, + } + } + _ => self, + } + } + + fn collect_reduction_exprs(expr: &Expr, op: Op, reduced_exprs: &mut BTreeSet) { + match expr { + // If the expression is an associative operation with the same operator, recurse into its subexpressions + Expr::Assoc { op: expr_op, exprs } if *expr_op == op => { + for sub_expr in exprs { + Self::collect_reduction_exprs(sub_expr, op, reduced_exprs); + } + } + // If the expression involves an AND/OR between a quantifier and another expression, handle simplifications + Expr::Assoc { op: inner_op, exprs } => match (op, inner_op) { + (Op::And, Op::Or) | (Op::Or, Op::And) => { + let mut other_terms = vec![]; + let mut negated_match = None; + for sub_expr in exprs { + match sub_expr { + // If the subexpression involves a negated quantifier, simplify it + Expr::Not { operand } if reduced_exprs.contains(operand) => { + negated_match = Some(operand.as_ref().clone()); + } + _ => other_terms.push(sub_expr.clone()), + } + } + if negated_match.is_some() { + reduced_exprs.extend(other_terms); + } else { + reduced_exprs.insert(expr.clone()); + } + } + _ => { + reduced_exprs.insert(expr.clone()); + } + }, + // Simplify quantifier body + Expr::Quant { kind, name, body } => { + let normalized_body = body.clone().normalize_reduction(); + let simplified_expr = Expr::Quant { kind: *kind, name: name.clone(), body: Box::new(normalized_body) }; + reduced_exprs.insert(simplified_expr); + } + _ => { + reduced_exprs.insert(expr.clone()); + } + } + } + /// Helper function for complement quantified expressions fn is_universal_tautology(&self) -> bool { match self { diff --git a/aris/src/proofs/proof_tests.rs b/aris/src/proofs/proof_tests.rs index 31832caa..bb9f13fa 100644 --- a/aris/src/proofs/proof_tests.rs +++ b/aris/src/proofs/proof_tests.rs @@ -731,6 +731,10 @@ pub fn test_idempotence() -> (P, Vec>, Vec>) { let r7 = prf.add_premise(p("A & A & B")); let r8 = prf.add_premise(p("(¬A | (¬B | (¬C | (¬A | ¬B)))) & D")); let r9 = prf.add_premise(p("W | (¬X & (¬Y & (¬Z & (¬X & ¬Y))))")); + + let r27 = prf.add_premise(p("(P | Q) & (P | R) & (P | Q)")); + let r29 = prf.add_premise(p("P ∧ R ∧ (P ∨ Q)")); + let r10 = prf.add_step(Justification(p("A"), RuleM::Idempotence, vec![i(r1)], vec![])); let r11 = prf.add_step(Justification(p("A"), RuleM::Idempotence, vec![i(r2)], vec![])); let r12 = prf.add_step(Justification(p("A"), RuleM::Idempotence, vec![i(r3)], vec![])); @@ -749,7 +753,9 @@ pub fn test_idempotence() -> (P, Vec>, Vec>) { let r25 = prf.add_step(Justification(p("(¬A | ¬B | ¬C) & D"), RuleM::Idempotence, vec![i(r8)], vec![])); let r26 = prf.add_step(Justification(p("W | (¬X & ¬Y & ¬Z)"), RuleM::Idempotence, vec![i(r9)], vec![])); - (prf, vec![i(r10), i(r11), i(r12), i(r13), i(r14), i(r18), i(r19), i(r20), i(r24), i(r25), i(r26)], vec![i(r15), i(r16), i(r17), i(r21), i(r22), i(r23)]) + let r28 = prf.add_step(Justification(p("(P | Q) & (P | R)"), RuleM::Idempotence, vec![i(r27)], vec![])); + let r30 = prf.add_step(Justification(p("P ∧ R ∧ Q"), RuleM::Idempotence, vec![i(r29)], vec![])); + (prf, vec![i(r10), i(r11), i(r12), i(r13), i(r14), i(r18), i(r19), i(r20), i(r24), i(r25), i(r26), i(r28)], vec![i(r15), i(r16), i(r17), i(r21), i(r22), i(r23), i(r30)]) } pub fn test_doublenegation() -> (P, Vec>, Vec>) { @@ -956,11 +962,15 @@ pub fn test_reduction() -> (P, Vec>, Vec>) { let p6 = prf.add_premise(p("B & (C | (~C & ~A))")); let p7 = prf.add_premise(p("A | (~A & (~~A | B))")); let p8 = prf.add_premise(p("D | (~A & (~~A | B))")); + let p9 = prf.add_premise(p("P & M & (~P | Q)")); + let p10 = prf.add_premise(p("P | M | (~P & Q)")); + // let p11 = prf.add_premise(p("~P & (P | Q)")); let r1 = prf.add_step(Justification(p("A & B"), RuleM::Reduction, vec![i(p1.clone())], vec![])); let r2 = prf.add_step(Justification(p("~A & B"), RuleM::Reduction, vec![i(p2.clone())], vec![])); let r3 = prf.add_step(Justification(p("A | B"), RuleM::Reduction, vec![i(p3.clone())], vec![])); let r4 = prf.add_step(Justification(p("~B | A"), RuleM::Reduction, vec![i(p4.clone())], vec![])); + // let r18 = prf.add_step(Justification(p("¬P ∧ Q"), RuleM::Reduction, vec![i(p11.clone())], vec![])); let r5 = prf.add_step(Justification(p("(forall A (A & B)) | C"), RuleM::Reduction, vec![i(p5)], vec![])); let r6 = prf.add_step(Justification(p("A"), RuleM::Reduction, vec![i(p1)], vec![])); @@ -973,7 +983,12 @@ pub fn test_reduction() -> (P, Vec>, Vec>) { let r12 = prf.add_step(Justification(p("A | (~A & B)"), RuleM::Reduction, vec![i(p7)], vec![])); let r13 = prf.add_step(Justification(p("D | (~A & B)"), RuleM::Reduction, vec![i(p8)], vec![])); - (prf, vec![i(r1), i(r2), i(r3), i(r4), i(r5), i(r10), i(r12), i(r13)], vec![i(r6), i(r7), i(r8), i(r9), i(r11)]) + let r14 = prf.add_step(Justification(p("P & M & Q"), RuleM::Reduction, vec![i(p9.clone())], vec![])); + let r15 = prf.add_step(Justification(p("(P & M) | Q"), RuleM::Reduction, vec![i(p9)], vec![])); + let r16 = prf.add_step(Justification(p("P | M | Q"), RuleM::Reduction, vec![i(p10.clone())], vec![])); + let r17 = prf.add_step(Justification(p("(P | M) & Q"), RuleM::Reduction, vec![i(p10)], vec![])); + + (prf, vec![i(r1), i(r2), i(r3), i(r4), i(r5), i(r10), i(r12), i(r13), i(r14), i(r16)], vec![i(r6), i(r7), i(r8), i(r9), i(r11), i(r15), i(r17)]) } pub fn test_adjacency() -> (P, Vec>, Vec>) { diff --git a/aris/src/rewrite_rules.rs b/aris/src/rewrite_rules.rs index 1f7f68d4..7fec83ff 100644 --- a/aris/src/rewrite_rules.rs +++ b/aris/src/rewrite_rules.rs @@ -33,11 +33,11 @@ impl RewriteRule { reduce_pattern(e, &self.reductions) } - /// Reduce an expression with the rewrite rule's reductions, yielding a set - /// of possible reductions - pub fn reduce_set(&self, e: Expr) -> HashSet { - reduce_pattern_set(e, &self.reductions) - } + // /// Reduce an expression with the rewrite rule's reductions, yielding a set + // /// of possible reductions + // pub fn reduce_set(&self, e: Expr) -> HashSet { + // reduce_pattern_set(e, &self.reductions) + // } } /// Permute all binary and associative operations in an expression, resulting in a list of @@ -127,13 +127,13 @@ fn reduce_pattern(e: Expr, patterns: &[(Expr, Expr)]) -> Expr { e.transform(&|expr| reduce_transform_func(expr, &patterns)) } -/// Like `reduce_pattern()`, but creates a set of possible reductions. This set -/// will contain all levels of reduction (up to full normalization), and on all -/// sub-nodes of the expression. -fn reduce_pattern_set(e: Expr, patterns: &[(Expr, Expr)]) -> HashSet { - let patterns = freevarsify_pattern(&e, patterns); - e.transform_set(&|expr| reduce_transform_func(expr, &patterns)) -} +// /// Like `reduce_pattern()`, but creates a set of possible reductions. This set +// /// will contain all levels of reduction (up to full normalization), and on all +// /// sub-nodes of the expression. +// fn reduce_pattern_set(e: Expr, patterns: &[(Expr, Expr)]) -> HashSet { +// let patterns = freevarsify_pattern(&e, patterns); +// e.transform_set(&|expr| reduce_transform_func(expr, &patterns)) +// } /// Helper function for `reduce_pattern()` and `reduce_pattern_set()`; try to /// reduce `expr` using `patterns`. The returned `bool` in the tuple indicates diff --git a/aris/src/rules.rs b/aris/src/rules.rs index b1e84c41..771d9b9d 100644 --- a/aris/src/rules.rs +++ b/aris/src/rules.rs @@ -1019,7 +1019,10 @@ fn check_by_normalize_first_expr(p: &P, deps: Vec>, conclu where F: Fn(Expr) -> Expr, { - let premise = p.lookup_expr_or_die(&deps[0])?; + let mut premise = p.lookup_expr_or_die(&deps[0])?; + if commutative { + premise = premise.sort_commutative_ops(restriction); + } let mut p = normalize_fn(premise.clone()); let mut q = normalize_fn(conclusion); if commutative { @@ -1029,7 +1032,7 @@ where if p == q { Ok(()) } else { - Err(ProofCheckError::Other(format!("{p} and {q} are not equal."))) + Err(ProofCheckError::Other(format!("{p} and {q} are not equal. {:?}", premise))) } } @@ -1059,24 +1062,24 @@ fn check_by_rewrite_rule_confl(p: &P, deps: Vec>, conclusion: check_by_normalize_first_expr(p, deps, conclusion, commutative, |e| rule.reduce(e), restriction) } -fn check_by_rewrite_rule_non_confl(p: &P, deps: Vec>, conclusion: Expr, commutative: bool, rule: &RewriteRule, restriction: &str) -> Result<(), ProofCheckError, P::SubproofReference>> { - let premise = p.lookup_expr_or_die(&deps[0])?; - let premise_set = rule.reduce_set(premise.clone()); - let conclusion_set = rule.reduce_set(conclusion.clone()); - let (premise_set, conclusion_set) = if commutative { - let sort_ops = |set: HashSet| set.into_iter().map(|expr| expr.sort_commutative_ops(restriction)).collect(); - (sort_ops(premise_set), sort_ops(conclusion_set)) - } else { - (premise_set, conclusion_set) - }; - // The premise and conclusion are equal if the set intersection is nonempty - let is_eq = premise_set.intersection(&conclusion_set).next().is_some(); - if is_eq { - Ok(()) - } else { - Err(ProofCheckError::Other(format!("{premise} and {conclusion} are not equal."))) - } -} +// fn check_by_rewrite_rule_non_confl(p: &P, deps: Vec>, conclusion: Expr, commutative: bool, rule: &RewriteRule, restriction: &str) -> Result<(), ProofCheckError, P::SubproofReference>> { +// let premise = p.lookup_expr_or_die(&deps[0])?; +// let premise_set = rule.reduce_set(premise.clone()); +// let conclusion_set = rule.reduce_set(conclusion.clone()); +// let (premise_set, conclusion_set) = if commutative { +// let sort_ops = |set: HashSet| set.into_iter().map(|expr| expr.sort_commutative_ops(restriction)).collect(); +// (sort_ops(premise_set), sort_ops(conclusion_set)) +// } else { +// (premise_set, conclusion_set) +// }; +// // The premise and conclusion are equal if the set intersection is nonempty +// let is_eq = premise_set.intersection(&conclusion_set).next().is_some(); +// if is_eq { +// Ok(()) +// } else { +// Err(ProofCheckError::Other(format!("{premise} and {conclusion} are not equal."))) +// } +// } impl RuleT for BooleanEquivalence { fn get_name(&self) -> String { @@ -1123,8 +1126,8 @@ impl RuleT for BooleanEquivalence { Identity => check_by_rewrite_rule_confl(p, deps, conclusion, false, &equivs::IDENTITY, "none"), Annihilation => check_by_rewrite_rule_confl(p, deps, conclusion, false, &equivs::ANNIHILATION, "none"), Inverse => check_by_rewrite_rule_confl(p, deps, conclusion, false, &equivs::INVERSE, "none"), - Absorption => check_by_rewrite_rule_confl(p, deps, conclusion, false, &equivs::ABSORPTION, "none"), - Reduction => check_by_rewrite_rule_non_confl(p, deps, conclusion, true, &equivs::REDUCTION, "none"), + Absorption => check_by_normalize_first_expr(p, deps, conclusion, true, |e| e.normalize_absorption(), "none"), + Reduction => check_by_normalize_first_expr(p, deps, conclusion, true, |e| e.normalize_reduction(), "none"), Adjacency => check_by_rewrite_rule_confl(p, deps, conclusion, false, &equivs::ADJACENCY, "none"), } }