Skip to content

Commit

Permalink
more tests and tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
sweeneyde committed Mar 19, 2024
1 parent 290ce41 commit cf95256
Show file tree
Hide file tree
Showing 5 changed files with 140 additions and 25 deletions.
Empty file.
6 changes: 3 additions & 3 deletions monoid_homology/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .crs import CompleteRewritingSystem, CRS
from .from_table import (
op_from_index,
all_gens_crs,
all_ops,
op_from_id,
find_best_gens_crs,
best_gens_crs_from_index,
)
from .knuth_bendix import kb_normalize
35 changes: 18 additions & 17 deletions monoid_homology/from_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
Manipulating finite semigroups by their multiplication table.
"""

from itertools import chain, combinations
from itertools import combinations
from functools import cache
from pathlib import Path
import string

from .knuth_bendix import normalize
from .knuth_bendix import kb_normalize
from .crs import CRS

DATA_DIR = Path(__file__).parent.parent / "finite_semigroup_data"
Expand All @@ -23,7 +23,10 @@ def all_op_strings(num_elts):
def string_to_op(s):
return [list(map(int, row)) for row in s.rstrip().split(";")]

def op_from_index(num_elts, index):
def all_ops(num_elts):
return map(string_to_op, all_op_strings(num_elts))

def op_from_id(num_elts, index):
if index == 0:
raise ValueError("GAP smallsemi package uses 1-based indexing")
return string_to_op(all_op_strings(num_elts)[index - 1])
Expand Down Expand Up @@ -87,9 +90,15 @@ def all_gens_crs(op):
pairs = relation_str_pairs(op, rep, alphabet, gens)
return CRS(alphabet, pairs)

def nonempty_powerset(iterable):
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(1, len(s)+1))
def crs_from_gens(op, gens):
rep = representation_by_generators(op, gens)
if rep is None:
return None
# alphabet = ''.join(SYMBOLS[g] for g in gens)
alphabet = SYMBOLS[:len(gens)]
pairs = relation_str_pairs(op, rep, alphabet, gens)
pairs = kb_normalize(pairs) # this should terminate?
return CRS(alphabet, pairs)

def find_best_gens_crs(op, maxdim, verbose=False):
"""
Expand All @@ -100,14 +109,10 @@ def find_best_gens_crs(op, maxdim, verbose=False):

cost_best_crs, best_crs = None, None
for num_gens in range(1, n + 1):
alphabet = SYMBOLS[:num_gens]
for gens in combinations(range(n), num_gens):
rep = representation_by_generators(op, gens)
if rep is None:
crs = crs_from_gens(op, gens)
if crs is None:
continue
pairs = relation_str_pairs(op, rep, alphabet, gens)
pairs = normalize(pairs) # this should terminate?
crs = CRS(alphabet, pairs)
lengths = crs.essential_counts(maxdim + 1)
if verbose:
print(gens, lengths)
Expand All @@ -116,9 +121,5 @@ def find_best_gens_crs(op, maxdim, verbose=False):
if best_crs is None or cost < cost_best_crs:
cost_best_crs, best_crs = cost, crs
if verbose:
print("Best:", best_crs.essential_counts(maxdim + 1))
print("Best:", gens, best_crs.essential_counts(maxdim + 1))
return best_crs

def best_gens_crs_from_index(num_elts, index, maxdim, verbose=False):
op = op_from_index(num_elts, index)
return find_best_gens_crs(op, maxdim, verbose=verbose)
4 changes: 2 additions & 2 deletions monoid_homology/knuth_bendix.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
Example:
>>> normalize([('xxx', ''), ('yyy', ''), ('xyxyxy', '')])
>>> kb_normalize([('xxx', ''), ('yyy', ''), ('xyxyxy', '')])
[('xxx', ''), ('yyy', ''), ('yyxx', 'xyxy'), ('yxyx', 'xxyy')]
The output list is normalized so that applying
Expand Down Expand Up @@ -45,7 +45,7 @@ def shortlex_ordered(a: str, b: str):
else:
return (b, a)

def normalize(rules, iteration_limit=20, verbose=False):
def kb_normalize(rules, iteration_limit=20, verbose=False):
# copy to verify at the end
rules0 = [(left, right) for left, right in rules]

Expand Down
120 changes: 117 additions & 3 deletions test/test_monoid_homology.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,19 @@
from monoid_homology import CRS
from itertools import combinations

from monoid_homology import (
CRS,
all_ops,
op_from_id,
find_best_gens_crs,
kb_normalize,
)

from monoid_homology.from_table import (
string_to_op,
representation_by_generators,
all_gens_crs,
crs_from_gens,
)

def test_crs_homology():
# Uses Sympy because it's hard to set up CI for SAGE.
Expand Down Expand Up @@ -31,7 +46,6 @@ def test_crs_homology():
assert rect33.sympy_rational_homology_ranks(3) == [1, 0, 4, 0]



def test_crs_essentials():
rect22 = CRS("xy", [("xx", "x"), ("yy", "y"), ("xyx", "x"), ("yxy", "y")])
rect22.compute_essentials(3)
Expand All @@ -56,4 +70,104 @@ def test_crs_essentials():
2: {("x", "y"), ("y", "x")},
3: {("x", "y", "x"), ("y", "x", "y")},
}.items():
assert set(Z.essentials[dim]) == ess_set
assert set(Z.essentials[dim]) == ess_set

def all_ops():
assert all_ops(1) == [
[[0]]
]
assert all_ops(2) == [
[[0, 0], [0, 0]],
[[0, 1], [1, 0]],
[[0, 0], [0, 1]],
[[0, 0], [1, 1]]
]
assert all_ops(3) == [
[[0, 0, 0], [0, 0, 0], [0, 0, 0]],
[[0, 0, 2], [0, 0, 2], [2, 2, 0]],
[[0, 1, 1], [1, 0, 0], [1, 0, 0]],
[[0, 0, 0], [0, 0, 0], [0, 0, 1]],
[[0, 0, 0], [0, 0, 0], [0, 0, 2]],
[[0, 0, 0], [0, 0, 0], [0, 1, 2]],
[[0, 0, 0], [0, 0, 0], [2, 2, 2]],
[[0, 0, 0], [0, 0, 1], [0, 1, 2]],
[[0, 0, 2], [0, 0, 2], [2, 2, 2]],
[[0, 1, 0], [1, 0, 1], [0, 1, 2]],
[[0, 1, 2], [1, 0, 2], [2, 2, 2]],
[[0, 0, 0], [0, 1, 0], [0, 0, 2]],
[[0, 0, 0], [0, 1, 0], [2, 2, 2]],
[[0, 0, 0], [0, 1, 1], [0, 1, 2]],
[[0, 0, 0], [0, 1, 1], [0, 2, 2]],
[[0, 0, 0], [0, 1, 2], [2, 2, 2]],
[[0, 0, 0], [1, 1, 1], [2, 2, 2]],
[[0, 1, 2], [1, 2, 0], [2, 0, 1]]
]
for n in (1, 2, 3):
for op in all_ops(n):
for i in range(n):
for j in range(n):
for k in range(n):
assert op[op[i][j]][k] == op[i][op[j][k]]

def test_op_from_id():
assert op_from_id(2, 1) == [[0, 0], [0, 0]]

def all_gens_crs():
z3 = all_gens_crs([[(i+j)%3 for i in range(3)] for j in range(3)])
z3.compute_essentials(3)
assert z3.alphabet == "ABC"
assert set(z3.relations) == {("AA", "A"), ("AB", "B"), ("AC", "C"),
("BA", "B"), ("BB", "C"), ("BC", "A"),
("CA", "C"), ("CB", "A"), ("CC", "B")}
assert set(z3.essentials[0]) == {()}
assert set(z3.essentials[1]) == {("A",), ("B",), ("C",)}
assert set(z3.essentials[2]) == {(i, j) for i in "ABC" for j in "ABC"}

rect22 = all_gens_crs([[(i&2) | (j&1) for j in range(4)] for i in range(4)])
assert rect22.alphabet == "ABCD"
assert set(rect22.essentials[0]) == {()}
assert set(rect22.essentials[1]) == {(i,) for i in "ABCD"}
assert set(rect22.essentials[1]) == {(i, j) for i in "ABCD" for j in "ABCD"}

def test_crs_from_gens():
z3_op = [[(i+j)%3 for i in range(3)] for j in range(3)]
# 0 doesn't generate Z/3Z.
assert crs_from_gens(z3_op, [0]) is None

z3_1 = crs_from_gens(z3_op, [1])
assert z3_1.alphabet == "A"
assert set(z3_1.rules) == {("AAAA", "A")}
z3_1.compute_essentials(3)
assert set(z3_1.essentials[1]) == {("A",)}
assert set(z3_1.essentials[2]) == {("A", "AAA")}
assert set(z3_1.essentials[3]) == {("A", "AAA", "A")}

z3_01 = crs_from_gens(z3_op, [0, 1])
assert z3_01.alphabet == "AB"
assert set(z3_01.rules) == {("BBB", "A"), ("AB", "B"), ("BA", "B"), ("AA", "A")}
z3_01.compute_essentials(3)
assert set(z3_01.essentials[1]) == {("A",), ("B",)}
assert set(z3_01.essentials[2]) == {("A", "A"), ("A", "B"), ("B", "BB"), ("B", "A")}
assert set(z3_01.essentials[3]) == {("A", "A", "A"), ("A", "A", "B"),
("A", "B", "BB"), ("A", "B", "A"),
("B", "BB", "B"), ("B", "BB", "A"),
("B", "A", "B"), ("B", "A", "A")}

def test_all_gen_sets_give_same_ranks():
table_ranks = [
((3, 10), [1, 0, 0, 0]),
((4, 123), [1, 0, 1, 0]),
((5, 917), [1, 0, 1, 1]),
((5, 1142), [1, 0, 1, 0]),
((6, 8713), [1, 0, 1, 2]),
((6, 15870), [1, 0, 0, 1]),
]
for (n, i), ranks in table_ranks:
op = op_from_id(n, i)
for num_gens in range(1, n + 1):
for gens in combinations(range(n), num_gens):
crs = crs_from_gens(op, gens)
if crs is not None:
assert crs.sympy_rational_homology_ranks(3) == ranks
assert find_best_gens_crs(op, 3).sympy_rational_homology_ranks(3) == ranks

0 comments on commit cf95256

Please sign in to comment.