From a724a56ec0328eed4c2fe366dbf36b952fd1cb1b Mon Sep 17 00:00:00 2001 From: root Date: Thu, 16 Nov 2023 20:17:10 -0800 Subject: [PATCH] Revert "Update spacc tests" This reverts commit f28f02a7a86049e5b71d20d498f87f5e35b63445. --- sam/sim/test/primitives/test_spacc.py | 421 +++----------------------- 1 file changed, 37 insertions(+), 384 deletions(-) diff --git a/sam/sim/test/primitives/test_spacc.py b/sam/sim/test/primitives/test_spacc.py index 57a71814..39b08960 100644 --- a/sam/sim/test/primitives/test_spacc.py +++ b/sam/sim/test/primitives/test_spacc.py @@ -1,50 +1,47 @@ import copy import pytest -import random -import numpy as np from sam.sim.src.accumulator import SparseAccumulator1, SparseAccumulator2 -from sam.sim.src.rd_scanner import CompressedCrdRdScan -from sam.sim.src.array import Array -from sam.sim.src.wr_scanner import ValsWrScan, CompressWrScan from sam.sim.src.base import remove_emptystr -from sam.sim.test.test import TIMEOUT, gen_n_comp_arrs, gen_val_arr, get_point_list, \ - convert_point_tuple, convert_ndarr_point_tuple, convert_point_tuple_ndarr, check_point_tuple, remove_zeros +from sam.sim.test.test import TIMEOUT -arrs_dict1 = {'ocrd_in': [0, 2, 'S0', 2, 'S1', 'D'], - 'icrd_in': [0, 2, 3, 'S0', 0, 2, 3, 'S1', 0, 2, 3, 'S2', 'D'], - 'val_in': [50, 5, 10, 'S0', 40, 4, 8, 'S1', -40, 33, 36, 'S2', 'D'], +arrs_dict1 = {'ocrd_in': [0, 0, 0, 2, 2, 2, 2, 2, 2, 'D'], + 'icrd_in': [0, 2, 3, 0, 2, 3, 0, 2, 3, 'D'], + 'val_in': [50, 5, 10, 40, 4, 8, -40, 33, 36, 'D'], + 'ocrd_gold': [0, 2, 'S0', 'D'], 'icrd_gold': [0, 2, 3, 'S0', 0, 2, 3, 'S1', 'D'], - 'val_gold': [90, 9, 18, 'S0', -40, 33, 36, 'S1', 'D']} + 'val_gold': [50, 5, 10, 'S0', 0, 37, 44, 'S1', 'D']} -# New sparse accumulator @pytest.mark.parametrize("arrs", [arrs_dict1]) -def test_SparseAccumulator1new_direct(arrs, debug_sim): +def test_spacc1_direct(arrs, debug_sim): icrd = copy.deepcopy(arrs['icrd_in']) ocrd = copy.deepcopy(arrs['ocrd_in']) val = copy.deepcopy(arrs['val_in']) + gold_ocrd = copy.deepcopy(arrs['ocrd_gold']) gold_icrd = copy.deepcopy(arrs['icrd_gold']) gold_val = copy.deepcopy(arrs['val_gold']) - sa = SparseAccumulator1(valtype=int, val_stkn=True, debug=debug_sim) + sa = SparseAccumulator1(val_stkn=True, debug=debug_sim) done = False time = 0 + out_ocrd = [] out_icrd = [] out_val = [] while not done and time < TIMEOUT: if len(icrd) > 0: - sa.set_in_crd0(icrd.pop(0)) + sa.set_inner_crdpt(icrd.pop(0)) if len(ocrd) > 0: - sa.set_in_crd1(ocrd.pop(0)) + sa.set_outer_crdpt(ocrd.pop(0)) if len(val) > 0: sa.set_val(val.pop(0)) sa.update() - out_icrd.append(sa.out_crd0()) + out_ocrd.append(sa.out_outer_crd()) + out_icrd.append(sa.out_inner_crd()) out_val.append(sa.out_val()) print("Timestep", time, "\t Done:", sa.out_done()) @@ -52,139 +49,37 @@ def test_SparseAccumulator1new_direct(arrs, debug_sim): done = sa.out_done() time += 1 + out_ocrd = remove_emptystr(out_ocrd) out_icrd = remove_emptystr(out_icrd) out_val = remove_emptystr(out_val) if debug_sim: + print("Outer Crd: ", out_ocrd) print("Inner Crd: ", out_icrd) print("Vals: ", out_val) + assert (out_ocrd == gold_ocrd) assert (out_icrd == gold_icrd) assert (out_val == gold_val) -@pytest.mark.parametrize("dim", [2 ** x for x in range(2, 11, 2)]) -def test_SparseAccumulator1new_rand(dim, debug_sim, max_val=1000, fill=0): - in_mat_crds1, in_mat_segs1 = gen_n_comp_arrs(2, dim) - in_mat_vals1 = gen_val_arr(len(in_mat_crds1[-1]), max_val, -max_val) - - in1_tup = convert_point_tuple(get_point_list(in_mat_crds1, in_mat_segs1, in_mat_vals1)) - - nd1 = convert_point_tuple_ndarr(in1_tup, dim) - gold_nd = np.sum(nd1, 0) - gold_tup = convert_ndarr_point_tuple(gold_nd) - - rdscan_B1 = CompressedCrdRdScan(crd_arr=in_mat_crds1[0], seg_arr=in_mat_segs1[0], debug=debug_sim) - rdscan_B2 = CompressedCrdRdScan(crd_arr=in_mat_crds1[1], seg_arr=in_mat_segs1[1], debug=debug_sim) - - val_B = Array(init_arr=in_mat_vals1, debug=debug_sim) - sa = SparseAccumulator1(valtype=int, val_stkn=True, debug=debug_sim) - - vals_X = ValsWrScan(size=dim * dim, fill=fill, debug=debug_sim) - wrscan_X1 = CompressWrScan(seg_size=2, size=dim, fill=fill, debug=debug_sim) - - done = False - time = 0 - in_ref_B = [0, 'D'] - out_rdscan_B1 = [] - out_rdscan_B2 = [] - out_val_B = [] - while not done and time < TIMEOUT: - if len(in_ref_B) > 0: - rdscan_B1.set_in_ref(in_ref_B.pop(0)) - - rdscan_B2.set_in_ref(rdscan_B1.out_ref()) - val_B.set_load(rdscan_B2.out_ref()) +arrs_dict1 = {'crd1_in': [0, 2, 3, 0, 0, 2, 2, 2, 3, 3, 3, 3, 3, 0, 'D'], + 'crd0_in': [0, 2, 3, 0, 2, 0, 0, 3, 0, 2, 3, 0, 2, 0, 'D'], + 'val_in': [50, 5, 10, 40, 4, 8, -40, 33, 36, 1, 2, 3, 4, 5, 'D'], + 'crd1_gold': [0, 2, 3, 'S0', 'D'], + 'crd0_gold': [0, 2, 'S0', 0, 2, 3, 'S0', 0, 2, 3, 'S1', 'D'], + 'val_gold': [95.0, 4.0, 'S0', -32.0, 5.0, 33.0, 'S0', 39.0, 5.0, 12.0, 'S1', 'D']} - # Inject random empty strings - out_rdscan_B1.append(rdscan_B1.out_crd()) - out_rdscan_B2.append(rdscan_B2.out_crd()) - out_val_B.append(val_B.out_load()) - if random.random() < 0.2: - out_rdscan_B1.append("") - if random.random() < 0.2: - out_rdscan_B2.append("") - if random.random() < 0.2: - out_val_B.append("") - - sa.set_in_crd1(out_rdscan_B1.pop(0)) - sa.set_in_crd0(out_rdscan_B2.pop(0)) - sa.set_val(out_val_B.pop(0)) - vals_X.set_input(sa.out_val()) - wrscan_X1.set_input(sa.out_crd0()) - - rdscan_B1.update() - rdscan_B2.update() - val_B.update() - sa.update() - vals_X.update() - wrscan_X1.update() - - print("Timestep", time, "\t Done --", - "\tRdScan B1:", rdscan_B1.out_done(), - "\tSparseAccumulator1New:", sa.out_done(), - "\tArr:", val_B.out_done(), - "\tWrScan:", vals_X.out_done(), - "\tWrScan X1:", wrscan_X1.out_done(), - ) - - done = wrscan_X1.out_done() - time += 1 - - wrscan_X1.autosize() - vals_X.autosize() - - out_crds = [wrscan_X1.get_arr()] - out_segs = [wrscan_X1.get_seg_arr()] - out_val = vals_X.get_arr() - - if debug_sim: - print("Input", in_mat_segs1, in_mat_crds1, in_mat_vals1) - print("X seg", out_segs) - print("X crd", out_crds) - print("X val", out_val) - print("Gold np", gold_nd) - print("Gold Tuple", gold_tup) +arrs_dict2 = {'crd1_in': [0, 0, 0, 1, 1, 1, 1, 0, 0, 'D'], + 'crd0_in': [0, 2, 3, 0, 2, 3, 4, 2, 3, 'D'], + 'val_in': [50, 5, 10, 40, 4, 8, -40, 33, 36, 'D'], + 'crd1_gold': [0, 1, 'S0', 'D'], + 'crd0_gold': [0, 2, 3, 'S0', 0, 2, 3, 4, 'S1', 'D'], + 'val_gold': [50, 38, 46, 'S0', 40, 4, 8, -40, 'S1', 'D']} - if not out_val: - assert out_val == gold_tup - else: - out_tup = convert_point_tuple(get_point_list(out_crds, out_segs, out_val)) - out_tup = remove_zeros(out_tup) - assert (check_point_tuple(out_tup, gold_tup)) - -arrs_dict1 = {'crd2_in': [0, 1, 'S0', 'D'], - 'crd1_in': [0, 2, 'S0', 2, 'S1', 'D'], - 'crd0_in': [0, 2, 3, 'S0', 0, 2, 3, 'S1', 0, 2, 3, 'S2', 'D'], - 'val_in': [50, 5, 10, 'S0', 40, 4, 8, 'S1', -40, 33, 36, 'S2', 'D'], - 'crd1_gold': [0, 2, 'S0', 'D'], - 'crd0_gold': [0, 2, 3, 'S0', 0, 2, 3, 'S1', 'D'], - 'val_gold': [50, 5, 10, 'S0', 0, 37, 44, 'S1', 'D']} - -# [[0, 1], [0, 4], [0, 2, 3, 4, 5]] [[0], [0, 1, 2, 3], [0, 2, 3, 1, 3]] [-60, 85, 314, 241, -887] -arrs_dict2 = {'crd2_in': [0, 'S0', 'D'], - 'crd1_in': [0, 1, 2, 3, 'S1', 'D'], - 'crd0_in': [0, 2, 'S0', 3, 'S0', 1, 'S0', 3, 'S2', 'D'], - 'val_in': [-60, 85, 'S0', 314, 'S0', 241, 'S0', -887, 'S2', 'D'], - 'crd1_gold': [0, 1, 2, 3, 'S0', 'D'], - 'crd0_gold': [0, 2, 'S0', 3, 'S0', 1, 'S0', 3, 'S1', 'D'], - 'val_gold': [-60, 85, 'S0', 314, 'S0', 241, 'S0', -887, 'S1', 'D']} - -# [[0, 1], [0, 3], [0, 4, 6, 8]] [[1], [0, 1, 2], [0, 1, 2, 3, 1, 3, 0, 2]] [637, 210, -847, 358, 162, 687, 95, -91] -arrs_dict3 = {'crd2_in': [1, 'S0', 'D'], - 'crd1_in': [0, 1, 2, 'S1', 'D'], - 'crd0_in': [0, 1, 2, 3, 'S0', 1, 3, 'S0', 0, 2, 'S2', 'D'], - 'val_in': [637, 210, -847, 358, 'S0', 162, 687, 'S0', 95, -91, 'S2', 'D'], - 'crd1_gold': [0, 1, 2, 'S0', 'D'], - 'crd0_gold': [0, 1, 2, 3, 'S0', 1, 3, 'S0', 0, 2, 'S1', 'D'], - 'val_gold': [637, 210, -847, 358, 'S0', 162, 687, 'S0', 95, -91, 'S1', 'D']} - - -# New sparse accumulator -@pytest.mark.parametrize("arrs", [arrs_dict1, arrs_dict2, arrs_dict3]) -def test_SparseAccumulator2new_stream_direct(arrs, debug_sim): - crd2 = copy.deepcopy(arrs['crd2_in']) +@pytest.mark.parametrize("arrs", [arrs_dict1, arrs_dict2]) +def test_spacc2_direct(arrs, debug_sim): crd1 = copy.deepcopy(arrs['crd1_in']) crd0 = copy.deepcopy(arrs['crd0_in']) val = copy.deepcopy(arrs['val_in']) @@ -193,7 +88,7 @@ def test_SparseAccumulator2new_stream_direct(arrs, debug_sim): gold_crd0 = copy.deepcopy(arrs['crd0_gold']) gold_val = copy.deepcopy(arrs['val_gold']) - sa = SparseAccumulator2(valtype=int, val_stkn=True, debug=debug_sim) + sa = SparseAccumulator2(val_stkn=True, debug=debug_sim) done = False time = 0 @@ -201,19 +96,17 @@ def test_SparseAccumulator2new_stream_direct(arrs, debug_sim): out_crd0 = [] out_val = [] while not done and time < TIMEOUT: - if len(crd2) > 0: - sa.set_in_crd2(crd2.pop(0)) - if len(crd1) > 0: - sa.set_in_crd1(crd1.pop(0)) if len(crd0) > 0: - sa.set_in_crd0(crd0.pop(0)) + sa.set_crd_inner(crd0.pop(0)) + if len(crd1) > 0: + sa.set_crd_outer(crd1.pop(0)) if len(val) > 0: sa.set_val(val.pop(0)) sa.update() - out_crd1.append(sa.out_crd1()) - out_crd0.append(sa.out_crd0()) + out_crd1.append(sa.out_crd_outer()) + out_crd0.append(sa.out_crd_inner()) out_val.append(sa.out_val()) print("Timestep", time, "\t Done:", sa.out_done()) @@ -233,243 +126,3 @@ def test_SparseAccumulator2new_stream_direct(arrs, debug_sim): assert (out_crd1 == gold_crd1) assert (out_crd0 == gold_crd0) assert (out_val == gold_val) - - -# Segs: [[0, 11], [0, 8, 18, 25, 31, 36, 44, 50, 61, 68, 78, 86], [0, 10, 18, 26, 35, 45, 53, 61, 69, 76, 84, 93, 99, 107, 114, 124, 134, 145, 154, 160, 168, 176, 188, 198, 208, 214, 223, 232, 242, 251, 261, 271, 280, 288, 294, 301, 308, 315, 323, 328, 339, 348, 357, 367, 378, 385, 391, 399, 408, 417, 425, 435, 440, 446, 453, 463, 467, 477, 489, 494, 502, 508, 516, 523, 531, 537, 541, 550, 557, 569, 576, 586, 593, 600, 608, 614, 623, 631, 640, 647, 656, 665, 675, 681, 689, 699, 703]] -# Crds: [[0, 2, 3, 4, 5, 8, 9, 10, 11, 14, 15], [0, 1, 2, 5, 6, 7, 9, 14, 3, 4, 5, 6, 7, 8, 10, 11, 13, 15, 4, 7, 8, 11, 12, 13, 14, 1, 5, 8, 10, 13, 14, 0, 3, 4, 7, 15, 1, 7, 9, 10, 12, 13, 14, 15, 1, 4, 8, 10, 11, 12, 1, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 0, 2, 4, 5, 8, 14, 15, 0, 1, 3, 4, 6, 8, 9, 10, 11, 13, 1, 2, 3, 6, 7, 8, 10, 12], [1, 2, 3, 8, 9, 10, 11, 13, 14, 15, 2, 3, 5, 8, 9, 10, 11, 14, 1, 4, 6, 7, 9, 10, 11, 12, 0, 1, 4, 9, 10, 12, 13, 14, 15, 0, 1, 2, 3, 4, 5, 6, 7, 9, 13, 0, 1, 2, 6, 7, 9, 10, 15, 0, 1, 2, 8, 9, 12, 13, 15, 0, 1, 4, 6, 8, 9, 13, 15, 2, 4, 5, 7, 10, 13, 15, 2, 5, 7, 10, 11, 13, 14, 15, 0, 1, 2, 3, 9, 10, 12, 13, 15, 0, 5, 6, 11, 13, 15, 0, 5, 6, 9, 11, 12, 13, 14, 2, 3, 5, 6, 8, 10, 15, 0, 1, 2, 3, 5, 9, 11, 12, 13, 15, 1, 2, 4, 6, 8, 9, 10, 11, 14, 15, 0, 2, 3, 4, 7, 8, 9, 10, 11, 13, 15, 0, 3, 4, 5, 9, 10, 12, 14, 15, 1, 5, 6, 9, 12, 14, 0, 1, 4, 7, 10, 11, 12, 14, 1, 2, 6, 8, 10, 12, 13, 15, 0, 1, 2, 3, 4, 5, 7, 9, 11, 13, 14, 15, 0, 2, 3, 4, 6, 9, 10, 11, 13, 15, 1, 3, 4, 5, 6, 7, 8, 11, 13, 14, 0, 3, 6, 8, 11, 12, 0, 2, 5, 6, 7, 8, 9, 14, 15, 1, 3, 6, 9, 11, 12, 13, 14, 15, 0, 3, 4, 5, 6, 9, 10, 12, 13, 14, 2, 4, 5, 6, 7, 9, 10, 13, 15, 0, 4, 6, 7, 8, 9, 12, 13, 14, 15, 0, 1, 2, 3, 5, 7, 8, 10, 13, 15, 2, 3, 4, 5, 6, 10, 12, 13, 15, 2, 3, 6, 7, 8, 12, 14, 15, 0, 4, 5, 10, 12, 15, 4, 5, 6, 7, 9, 10, 15, 0, 1, 5, 8, 10, 12, 14, 1, 5, 8, 9, 11, 12, 15, 0, 1, 4, 5, 6, 7, 8, 12, 0, 2, 5, 9, 11, 0, 2, 3, 4, 5, 6, 9, 10, 12, 13, 14, 1, 2, 4, 7, 10, 12, 13, 14, 15, 0, 3, 4, 5, 7, 11, 13, 14, 15, 2, 3, 4, 5, 6, 7, 8, 11, 12, 13, 0, 1, 2, 3, 4, 5, 6, 7, 9, 12, 15, 0, 1, 3, 4, 7, 8, 15, 1, 4, 6, 9, 10, 11, 0, 2, 3, 4, 7, 9, 12, 14, 0, 2, 4, 5, 8, 10, 12, 14, 15, 0, 3, 4, 6, 9, 10, 11, 12, 13, 1, 2, 6, 8, 9, 10, 12, 13, 4, 5, 6, 8, 9, 10, 11, 12, 14, 15, 1, 2, 3, 10, 15, 2, 5, 7, 9, 13, 15, 0, 2, 5, 6, 10, 11, 13, 0, 3, 4, 5, 6, 7, 10, 13, 14, 15, 2, 8, 10, 15, 0, 1, 2, 4, 6, 9, 10, 11, 12, 15, 0, 1, 2, 3, 5, 6, 7, 9, 10, 13, 14, 15, 0, 1, 7, 11, 12, 1, 2, 3, 4, 9, 11, 12, 14, 0, 2, 5, 6, 8, 10, 1, 3, 5, 7, 9, 12, 13, 14, 2, 4, 5, 7, 9, 11, 14, 0, 4, 5, 6, 7, 8, 11, 14, 0, 3, 4, 5, 7, 12, 5, 9, 12, 13, 0, 3, 4, 6, 7, 8, 9, 11, 13, 2, 3, 5, 6, 7, 9, 14, 0, 1, 2, 3, 5, 6, 7, 8, 9, 12, 13, 15, 2, 5, 6, 8, 12, 13, 15, 0, 2, 4, 5, 6, 7, 8, 12, 14, 15, 1, 2, 3, 5, 9, 12, 15, 0, 1, 5, 8, 12, 13, 14, 1, 3, 6, 8, 9, 11, 12, 13, 3, 4, 5, 9, 11, 13, 1, 2, 4, 6, 7, 8, 9, 12, 15, 1, 4, 5, 7, 12, 13, 14, 15, 0, 2, 3, 5, 7, 8, 9, 11, 12, 3, 6, 7, 9, 11, 13, 15, 1, 2, 3, 6, 8, 9, 11, 13, 14, 0, 2, 3, 4, 5, 7, 8, 11, 14, 0, 1, 2, 4, 5, 9, 12, 13, 14, 15, 5, 7, 11, 12, 13, 15, 0, 2, 3, 4, 7, 8, 10, 13, 1, 2, 3, 4, 5, 6, 7, 9, 12, 13, 4, 8, 11, 14]] -# Vals: [-275, 130, -863, 147, 180, 534, 563, 41, -124, -359, -481, -40, 158, 413, -110, 421, -441, 978, 443, 295, 743, 955, -57, 765, -901, -143, -471, -476, -820, -405, 7, -413, 566, 477, -92, -791, -672, -733, 958, -773, 265, -42, -895, 139, -262, 337, 602, -5, 424, 478, 489, 614, -744, 854, -604, 393, -693, 769, 605, -357, -970, -304, -87, 363, -919, 166, 179, -313, 151, -734, 992, -158, -603, -453, 260, -731, -135, -945, 885, -649, -917, -371, -483, -874, -761, 918, -704, 401, 985, 5, 902, -946, -218, -504, 857, 943, 273, 622, 692, 364, 61, -109, 540, -733, -826, 43, -303, 185, -938, -269, -951, -117, -279, -388, -57, 288, 562, -34, 562, -856, 481, 307, -584, 153, 600, -527, 50, 374, 887, 705, 742, -482, -52, -280, 14, -582, 387, -910, -11, -823, -974, 765, 569, -106, 754, 27, -678, 297, -248, 642, -383, -984, 119, 276, -635, -843, 558, -833, 831, 910, 148, -222, -878, -881, 576, -3, -772, 660, 718, -266, 24, -96, 125, -94, 144, -490, 14, 713, 765, -527, -925, 109, 542, -567, -368, 951, -610, -594, -482, -740, 995, 993, -487, -178, -51, 91, -517, 102, 68, -291, -558, 201, -465, -673, 112, 484, 327, 931, -988, -786, -942, 935, 961, -891, -143, 255, -823, 628, 775, -290, -163, -183, 354, -342, -465, 137, 902, 870, -466, -324, 207, 896, -308, 637, 717, -537, 395, -746, 351, 523, -960, -458, 505, 869, -580, -502, 664, 171, 320, 877, 497, 461, -226, 130, -743, 98, 826, 427, 92, -223, -822, 952, -565, 920, 569, 539, 372, -248, 592, 405, -832, -692, 673, 22, 978, -327, 935, -10, 191, -558, -430, -874, -147, -133, -850, 268, 380, 828, -519, 414, 289, -11, -407, -844, -54, 660, -9, 537, -25, -614, -985, 238, -52, -437, 763, 792, 212, -892, -527, 658, 944, 416, -910, -82, -262, 634, -380, 672, -667, 662, 331, 928, -893, 141, 715, -590, -762, 124, -116, -736, -652, 812, -919, 952, 5, -801, -189, 315, 778, 762, 269, -594, -215, 492, 650, 938, 203, 443, 546, -976, 404, 917, 377, 729, 194, -759, -12, -253, 395, -152, -142, 136, -768, 525, -20, -761, 171, -793, 174, 845, 377, 354, -424, 941, 213, -732, 836, 191, 314, -275, -390, 520, 595, -466, -639, -888, 775, 672, 299, 762, 735, -329, -566, 137, 423, -774, -548, 307, -297, -332, -497, -633, -242, -297, -648, 239, -828, -2, -577, -643, 236, 849, -792, -513, -521, -470, -212, 148, -897, 497, 627, 834, -720, -914, 245, 550, 99, -410, 550, 537, 784, -716, 646, 213, -387, 914, 976, 388, 534, -543, 588, -453, -54, -819, -792, 885, 89, -834, 832, -427, -906, -630, -79, -354, 605, 651, -96, 261, 96, -726, -832, 550, -327, 519, -561, -928, -47, -352, 186, -103, 3, -563, 247, -416, 289, -54, 815, -216, 859, 909, -540, 38, -759, 541, -132, -466, 685, -354, -746, -956, -826, 431, -343, 67, 823, -732, 946, -611, 592, 541, 198, -28, 518, 433, 348, 264, -620, 935, 700, 594, -144, -14, 15, -101, -730, 706, -200, 713, 951, 348, -62, -935, 419, 511, -358, -734, 561, -329, 685, 948, 554, 256, -615, -666, -473, 295, -688, 508, 917, -269, 363, 484, 775, 807, 408, -519, 388, -205, 443, -959, 344, 107, -600, -403, 254, 778, 518, 320, 415, -976, 339, -36, -723, -980, 846, 66, -405, 956, 932, 747, -111, -804, -495, -821, 538, 366, -866, 614, -349, -145, 426, 256, 662, 533, -549, -381, -668, 531, 954, -343, -645, 823, -611, -382, -846, 474, 503, -673, 841, -549, 263, 966, 632, 574, 719, -219, -396, -98, 374, 700, -567, 84, 951, 294, -260, 334, -532, -828, -867, -399, 575, -9, 342, 913, 736, -882, -993, 232, -627, -655, -451, 606, 571, 289, -690, -161, 864, 608, -695, 806, -620, -844, 719, 353, 834, -915, 371, -151, -239, 355, 379, 405, -48, -565, 592, -531, -850, -800, 424, -384, -663, -982, 225, 294, -557, -97, 883, -674, -42, -436, -100, 548, 466, -54, -337, -222, -11, 112, 893, 372, -950, 418, 80, -24, 391, 659, 741, 25, -321, 367, -488, 106, 361, 41, 301, 650, -672, 105, -715, 401, -989, 804, 858] -arrs_dict1 = { "segs": [[0, 11], [0, 8, 18, 25, 31, 36, 44, 50, 61, 68, 78, 86], [0, 10, 18, 26, 35, 45, 53, 61, 69, 76, - 84, 93, 99, 107, 114, 124, 134, 145, - 154, 160, 168, 176, 188, 198, 208, - 214, 223, 232, 242, 251, 261, 271, - 280, 288, 294, 301, 308, 315, 323, 328, 339, 348, 357, 367, 378, 385, 391, 399, 408, 417, 425, 435, 440, 446, 453, 463, 467, 477, 489, 494, 502, 508, 516, 523, 531, 537, 541, 550, 557, 569, 576, 586, 593, 600, 608, 614, 623, 631, 640, 647, 656, 665, 675, 681, 689, 699, 703]], - "crds": [[0, 2, 3, 4, 5, 8, 9, 10, 11, 14, 15], [0, 1, 2, 5, 6, 7, 9, 14, 3, 4, 5, 6, 7, 8, 10, 11, 13, - 15, 4, 7, 8, 11, 12, 13, 14, 1, 5, 8, 10, 13, 14, 0, 3, 4, 7, 15, 1, 7, 9, 10, 12, 13, 14, 15, 1, 4, 8, 10, 11, 12, 1, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 0, 2, 4, 5, 8, 14, 15, 0, 1, 3, 4, 6, 8, 9, 10, 11, 13, 1, 2, 3, 6, 7, 8, 10, 12], [1, 2, 3, 8, 9, 10, 11, 13, 14, 15, 2, 3, 5, 8, 9, 10, 11, 14, 1, 4, 6, 7, 9, 10, 11, 12, 0, 1, 4, 9, 10, 12, 13, 14, 15, 0, 1, 2, 3, 4, 5, 6, 7, 9, 13, 0, 1, 2, 6, 7, 9, 10, 15, 0, 1, 2, 8, 9, 12, 13, 15, 0, 1, 4, 6, 8, 9, 13, 15, 2, 4, 5, 7, 10, 13, 15, 2, 5, 7, 10, 11, 13, 14, 15, 0, 1, 2, 3, 9, 10, 12, 13, 15, 0, 5, 6, 11, 13, 15, 0, 5, 6, 9, 11, 12, 13, 14, 2, 3, 5, 6, 8, 10, 15, 0, 1, 2, 3, 5, 9, 11, 12, 13, 15, 1, 2, 4, 6, 8, 9, 10, 11, 14, 15, 0, 2, 3, 4, 7, 8, 9, 10, 11, 13, 15, 0, 3, 4, 5, 9, 10, 12, 14, 15, 1, 5, 6, 9, 12, 14, 0, 1, 4, 7, 10, 11, 12, 14, 1, 2, 6, 8, 10, 12, 13, 15, 0, 1, 2, 3, 4, 5, 7, 9, 11, 13, 14, 15, 0, 2, 3, 4, 6, 9, 10, 11, 13, 15, 1, 3, 4, 5, 6, 7, 8, 11, 13, 14, 0, 3, 6, 8, 11, 12, 0, 2, 5, 6, 7, 8, 9, 14, 15, 1, 3, 6, 9, 11, 12, 13, 14, 15, 0, 3, 4, 5, 6, 9, 10, 12, 13, 14, 2, 4, 5, 6, 7, 9, 10, 13, 15, 0, 4, 6, 7, 8, 9, 12, 13, 14, 15, 0, 1, 2, 3, 5, 7, 8, 10, 13, 15, 2, 3, 4, 5, 6, 10, 12, 13, 15, 2, 3, 6, 7, 8, 12, 14, 15, 0, 4, 5, 10, 12, 15, 4, 5, 6, 7, 9, 10, 15, 0, 1, 5, 8, 10, 12, 14, 1, 5, 8, 9, 11, 12, 15, 0, 1, 4, 5, 6, 7, 8, 12, 0, 2, 5, 9, 11, 0, 2, 3, 4, 5, 6, 9, 10, 12, 13, 14, 1, 2, 4, 7, 10, 12, 13, 14, 15, 0, 3, 4, 5, 7, 11, 13, 14, 15, 2, 3, 4, 5, 6, 7, 8, 11, 12, 13, 0, 1, 2, 3, 4, 5, 6, 7, 9, 12, 15, 0, 1, 3, 4, 7, 8, 15, 1, 4, 6, 9, 10, 11, 0, 2, 3, 4, 7, 9, 12, 14, 0, 2, 4, 5, 8, 10, 12, 14, 15, 0, 3, 4, 6, 9, 10, 11, 12, 13, 1, 2, 6, 8, 9, 10, 12, 13, 4, 5, 6, 8, 9, 10, 11, 12, 14, 15, 1, 2, 3, 10, 15, 2, 5, 7, 9, 13, 15, 0, 2, 5, 6, 10, 11, 13, 0, 3, 4, 5, 6, 7, 10, 13, 14, 15, 2, 8, 10, 15, 0, 1, 2, 4, 6, 9, 10, 11, 12, 15, 0, 1, 2, 3, 5, 6, 7, 9, 10, 13, 14, 15, 0, 1, 7, 11, 12, 1, 2, 3, 4, 9, 11, 12, 14, 0, 2, 5, 6, 8, 10, 1, 3, 5, 7, 9, 12, 13, 14, 2, 4, 5, 7, 9, 11, 14, 0, 4, 5, 6, 7, 8, 11, 14, 0, 3, 4, 5, 7, 12, 5, 9, 12, 13, 0, 3, 4, 6, 7, 8, 9, 11, 13, 2, 3, 5, 6, 7, 9, 14, 0, 1, 2, 3, 5, 6, 7, 8, 9, 12, 13, 15, 2, 5, 6, 8, 12, 13, 15, 0, 2, 4, 5, 6, 7, 8, 12, 14, 15, 1, 2, 3, 5, 9, 12, 15, 0, 1, 5, 8, 12, 13, 14, 1, 3, 6, 8, 9, 11, 12, 13, 3, 4, 5, 9, 11, 13, 1, 2, 4, 6, 7, 8, 9, 12, 15, 1, 4, 5, 7, 12, 13, 14, 15, 0, 2, 3, 5, 7, 8, 9, 11, 12, 3, 6, 7, 9, 11, 13, 15, 1, 2, 3, 6, 8, 9, 11, 13, 14, 0, 2, 3, 4, 5, 7, 8, 11, 14, 0, 1, 2, 4, 5, 9, 12, 13, 14, 15, 5, 7, 11, 12, 13, 15, 0, 2, 3, 4, 7, 8, 10, 13, 1, 2, 3, 4, 5, 6, 7, 9, 12, 13, 4, 8, 11, 14]], - "vals": [-275, 130, -863, 147, 180, 534, 563, 41, -124, -359, -481, -40, 158, 413, -110, 421, -441, 978, - 443, 295, 743, 955, -57, 765, -901, -143, -471, -476, -820, -405, 7, -413, 566, 477, -92, -791, -672, -733, 958, -773, 265, -42, -895, 139, -262, 337, 602, -5, 424, 478, 489, 614, -744, 854, -604, 393, -693, 769, 605, -357, -970, -304, -87, 363, -919, 166, 179, -313, 151, -734, 992, -158, -603, -453, 260, -731, -135, -945, 885, -649, -917, -371, -483, -874, -761, 918, -704, 401, 985, 5, 902, -946, -218, -504, 857, 943, 273, 622, 692, 364, 61, -109, 540, -733, -826, 43, -303, 185, -938, -269, -951, -117, -279, -388, -57, 288, 562, -34, 562, -856, 481, 307, -584, 153, 600, -527, 50, 374, 887, 705, 742, -482, -52, -280, 14, -582, 387, -910, -11, -823, -974, 765, 569, -106, 754, 27, -678, 297, -248, 642, -383, -984, 119, 276, -635, -843, 558, -833, 831, 910, 148, -222, -878, -881, 576, -3, -772, 660, 718, -266, 24, -96, 125, -94, 144, -490, 14, 713, 765, -527, -925, 109, 542, -567, -368, 951, -610, -594, -482, -740, 995, 993, -487, -178, -51, 91, -517, 102, 68, -291, -558, 201, -465, -673, 112, 484, 327, 931, -988, -786, -942, 935, 961, -891, -143, 255, -823, 628, 775, -290, -163, -183, 354, -342, -465, 137, 902, 870, -466, -324, 207, 896, -308, 637, 717, -537, 395, -746, 351, 523, -960, -458, 505, 869, -580, -502, 664, 171, 320, 877, 497, 461, -226, 130, -743, 98, 826, 427, 92, -223, -822, 952, -565, 920, 569, 539, 372, -248, 592, 405, -832, -692, 673, 22, 978, -327, 935, -10, 191, -558, -430, -874, -147, -133, -850, 268, 380, 828, -519, 414, 289, -11, -407, -844, -54, 660, -9, 537, -25, -614, -985, 238, -52, -437, 763, 792, 212, -892, -527, 658, 944, 416, -910, -82, -262, 634, -380, 672, -667, 662, 331, 928, -893, 141, 715, -590, -762, 124, -116, -736, -652, 812, -919, 952, 5, -801, -189, 315, 778, 762, 269, -594, -215, 492, 650, 938, 203, 443, 546, -976, 404, 917, 377, 729, 194, -759, -12, -253, 395, -152, -142, 136, -768, 525, -20, -761, 171, -793, 174, 845, 377, 354, -424, 941, 213, -732, 836, 191, 314, -275, -390, 520, 595, -466, -639, -888, 775, 672, 299, 762, 735, -329, -566, 137, 423, -774, -548, 307, -297, -332, -497, -633, -242, -297, -648, 239, -828, -2, -577, -643, 236, 849, -792, -513, -521, -470, -212, 148, -897, 497, 627, 834, -720, -914, 245, 550, 99, -410, 550, 537, 784, -716, 646, 213, -387, 914, 976, 388, 534, -543, 588, -453, -54, -819, -792, 885, 89, -834, 832, -427, -906, -630, -79, -354, 605, 651, -96, 261, 96, -726, -832, 550, -327, 519, -561, -928, -47, -352, 186, -103, 3, -563, 247, -416, 289, -54, 815, -216, 859, 909, -540, 38, -759, 541, -132, -466, 685, -354, -746, -956, -826, 431, -343, 67, 823, -732, 946, -611, 592, 541, 198, -28, 518, 433, 348, 264, -620, 935, 700, 594, -144, -14, 15, -101, -730, 706, -200, 713, 951, 348, -62, -935, 419, 511, -358, -734, 561, -329, 685, 948, 554, 256, -615, -666, -473, 295, -688, 508, 917, -269, 363, 484, 775, 807, 408, -519, 388, -205, 443, -959, 344, 107, -600, -403, 254, 778, 518, 320, 415, -976, 339, -36, -723, -980, 846, 66, -405, 956, 932, 747, -111, -804, -495, -821, 538, 366, -866, 614, -349, -145, 426, 256, 662, 533, -549, -381, -668, 531, 954, -343, -645, 823, -611, -382, -846, 474, 503, -673, 841, -549, 263, 966, 632, 574, 719, -219, -396, -98, 374, 700, -567, 84, 951, 294, -260, 334, -532, -828, -867, -399, 575, -9, 342, 913, 736, -882, -993, 232, -627, -655, -451, 606, 571, 289, -690, -161, 864, 608, -695, 806, -620, -844, 719, 353, 834, -915, 371, -151, -239, 355, 379, 405, -48, -565, 592, -531, -850, -800, 424, -384, -663, -982, 225, 294, -557, -97, 883, -674, -42, -436, -100, 548, 466, -54, -337, -222, -11, 112, 893, 372, -950, 418, 80, -24, 391, 659, 741, 25, -321, 367, -488, 106, 361, 41, 301, 650, -672, 105, -715, 401, -989, 804, 858], - "dim": 16 -} - -# Segs: [[0, 8], [0, 10, 21, 30, 40, 52, 61, 68, 76], [0, 8, 18, 28, 35, 48, 57, 62, 73, 83, 94, 99, 111, 121, 129, 137, 144, 152, 159, 166, 176, 184, 191, 197, 204, 214, 221, 233, 240, 246, 255, 264, 268, 275, 282, 291, 300, 308, 313, 320, 328, 338, 346, 356, 365, 376, 383, 391, 401, 407, 419, 426, 430, 439, 448, 457, 462, 468, 473, 480, 486, 494, 501, 508, 517, 526, 533, 543, 551, 559, 567, 577, 586, 592, 598, 608, 614]] -# Crds: [[0, 1, 2, 4, 7, 10, 12, 13], [0, 2, 5, 7, 8, 10, 11, 13, 14, 15, 0, 1, 2, 3, 4, 5, 7, 10, 12, 14, 15, 0, 1, 2, 3, 4, 5, 9, 14, 15, 0, 1, 2, 3, 6, 8, 9, 10, 11, 14, 0, 1, 2, 3, 4, 6, 7, 8, 11, 12, 14, 15, 0, 2, 3, 4, 7, 8, 11, 13, 14, 5, 6, 9, 10, 11, 14, 15, 2, 3, 5, 6, 10, 11, 12, 15], [0, 1, 3, 5, 7, 8, 11, 12, 0, 1, 2, 3, 6, 7, 9, 11, 14, 15, 0, 1, 2, 4, 6, 7, 11, 13, 14, 15, 2, 3, 4, 6, 10, 14, 15, 0, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 14, 15, 1, 2, 4, 5, 6, 9, 10, 11, 15, 4, 5, 6, 8, 13, 0, 1, 2, 4, 6, 9, 10, 11, 13, 14, 15, 1, 4, 6, 7, 8, 9, 12, 13, 14, 15, 1, 3, 4, 6, 8, 9, 10, 11, 12, 13, 15, 4, 6, 8, 9, 15, 0, 1, 2, 5, 6, 7, 8, 11, 12, 13, 14, 15, 1, 2, 5, 6, 8, 9, 11, 12, 13, 15, 3, 5, 6, 9, 10, 11, 12, 15, 0, 1, 2, 6, 7, 12, 14, 15, 2, 6, 7, 9, 10, 11, 15, 0, 4, 6, 7, 8, 10, 11, 13, 0, 5, 6, 8, 13, 14, 15, 3, 7, 9, 10, 11, 13, 15, 0, 3, 4, 6, 7, 8, 9, 11, 14, 15, 1, 2, 3, 4, 5, 9, 10, 14, 2, 3, 5, 8, 10, 12, 15, 0, 2, 8, 9, 11, 12, 0, 1, 3, 4, 10, 13, 15, 0, 1, 4, 5, 6, 7, 8, 11, 12, 13, 1, 2, 3, 6, 10, 11, 13, 1, 2, 3, 4, 5, 8, 9, 11, 12, 13, 14, 15, 2, 4, 5, 6, 9, 13, 14, 0, 5, 6, 9, 11, 15, 0, 1, 2, 4, 6, 8, 12, 13, 15, 0, 1, 2, 4, 5, 6, 8, 9, 11, 4, 6, 8, 13, 1, 2, 3, 6, 10, 13, 15, 0, 1, 2, 8, 13, 14, 15, 0, 2, 3, 4, 7, 9, 10, 12, 15, 0, 2, 4, 5, 7, 8, 10, 11, 15, 2, 4, 5, 7, 8, 10, 12, 15, 2, 8, 10, 13, 14, 0, 6, 9, 10, 12, 14, 15, 3, 4, 6, 7, 9, 10, 11, 14, 0, 3, 6, 7, 10, 11, 12, 13, 14, 15, 0, 2, 7, 8, 9, 11, 14, 15, 0, 1, 3, 5, 7, 8, 9, 12, 14, 15, 0, 3, 4, 5, 7, 8, 10, 11, 12, 0, 1, 2, 5, 6, 8, 9, 10, 11, 12, 14, 1, 2, 4, 5, 7, 8, 15, 1, 5, 8, 9, 10, 12, 13, 14, 0, 1, 2, 7, 8, 9, 10, 11, 12, 15, 0, 1, 6, 7, 10, 15, 0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 13, 15, 0, 1, 2, 3, 9, 11, 14, 0, 4, 8, 10, 0, 1, 2, 4, 8, 11, 12, 14, 15, 0, 1, 2, 5, 8, 9, 12, 13, 14, 1, 3, 4, 5, 8, 9, 11, 12, 15, 1, 5, 10, 11, 14, 0, 5, 6, 9, 14, 15, 1, 2, 13, 14, 15, 4, 5, 6, 7, 8, 11, 12, 0, 2, 6, 10, 12, 15, 1, 3, 5, 6, 8, 9, 12, 15, 0, 3, 4, 7, 11, 12, 15, 1, 2, 4, 7, 9, 10, 11, 0, 3, 5, 7, 8, 9, 11, 12, 14, 0, 2, 3, 5, 6, 10, 11, 13, 14, 0, 2, 4, 7, 10, 12, 13, 0, 3, 4, 5, 6, 7, 9, 10, 13, 14, 1, 4, 6, 7, 8, 9, 11, 15, 0, 1, 2, 4, 7, 8, 13, 14, 0, 2, 4, 5, 10, 11, 12, 13, 0, 2, 3, 6, 8, 9, 10, 11, 12, 15, 0, 1, 2, 4, 5, 6, 9, 11, 12, 0, 1, 5, 13, 14, 15, 0, 2, 3, 7, 11, 12, 0, 1, 2, 3, 6, 8, 9, 10, 12, 13, 1, 5, 7, 9, 14, 15]] -# Vals: [-250, 363, -637, 105, -364, -874, 861, -490, -222, 321, -667, -537, -151, -746, -36, 71, -32, -998, 438, 604, 436, -383, 27, -107, -106, -109, -707, 226, 953, 707, -210, -155, 377, -780, 102, -186, -798, 171, -467, 982, -397, 387, -395, 456, -864, -836, -271, -27, -99, -159, -815, 885, -662, -14, -951, 462, 617, -442, 433, 395, 82, 819, -938, 877, -57, -349, -993, -812, -541, -269, -284, -60, -242, -744, 62, -829, -976, 403, 621, -87, -548, 731, -643, -137, -264, -38, 613, 643, 882, 410, -124, -688, 86, 365, 632, 657, -955, 279, -80, -409, 357, 181, 527, 537, 340, -683, -434, 203, 68, 480, 452, 981, 890, 700, -559, 792, 241, -528, 275, 45, 211, -765, 511, 931, -560, -897, 836, -816, -881, 44, 726, -858, -322, 847, 247, -336, -276, -835, 732, 961, -30, -570, 924, -605, -399, 80, 952, 747, 546, 25, -743, -687, -981, 454, 579, -634, 924, 156, -708, 716, -447, -234, -370, -746, -864, -87, 633, -176, 839, -868, -26, 32, 896, 593, 298, -668, -225, -213, 516, -300, -98, 417, -549, 168, 978, -902, -79, -192, 789, 488, 188, 814, -1000, 205, 186, 494, -319, 613, -460, -645, -723, -692, -333, -953, -211, -291, -957, 797, 607, -30, 611, 931, -725, -809, 583, 672, 262, 620, 711, 774, 563, 157, 395, 60, -456, -130, 268, 999, 705, 989, -812, 223, 501, 920, -495, -371, 816, 671, -183, 205, 910, -553, 465, -316, 712, 410, -804, -744, -369, -665, 619, -617, 256, -823, 640, 75, -758, -710, -697, -659, 990, -858, -235, 476, -236, 248, -47, 259, 271, -163, -889, -216, 692, -418, -14, 614, -829, -670, 381, -385, 928, -797, 670, 246, 395, 474, 917, -717, 700, -214, 162, -517, 389, 437, 278, -45, 219, -805, 617, 533, -772, 255, -25, 978, -46, -873, 262, -978, -13, -289, 812, -988, 626, -442, 223, -272, 933, 180, 182, 35, -176, -770, -86, 547, 590, -551, -627, -463, -361, 267, 859, -147, 979, -437, 735, -112, -348, -395, -745, -328, -974, -602, -820, -4, -600, -273, 458, 634, 591, 975, -116, -432, -689, -916, 523, -227, -203, 719, 527, 992, -475, -103, 28, -229, 680, -62, -83, -105, -394, 606, 15, 919, -546, 803, 5, -52, -817, 647, -436, 858, -51, -987, -923, 835, -2, 88, 873, -444, -509, -185, -988, -951, -863, -523, -969, -373, -817, -629, -999, 467, 148, -168, 512, -446, -386, -582, 810, 326, 505, -707, -339, 155, -27, -104, -603, -207, -740, -138, -107, 998, -252, 160, 689, -585, -140, 396, 453, 42, -325, 782, -638, 973, -343, 942, 968, 979, 879, -745, 245, 917, -532, -278, -194, 912, -299, 248, -849, -565, -100, -779, -658, -110, -257, -39, -766, 885, -704, -4, 533, 77, -27, -704, 497, -934, 299, 528, 149, 583, -697, -107, -48, 299, 759, 377, -798, -825, -893, -299, -178, 183, 39, -367, -862, -660, -157, -93, -89, 36, 563, 547, 392, -977, -864, 376, 4, 19, -416, 485, -637, 156, -20, -714, 116, -383, -182, -771, -851, -973, -282, 716, -243, -899, -267, 792, 668, -921, 10, -612, 96, 354, -23, 415, 233, 934, -193, 619, 5, 281, -660, -299, 50, 105, -469, -141, 347, 440, 758, -620, 620, -460, 673, 16, -905, -468, 565, -416, -3, -248, 57, 870, -895, 890, 847, -127, -490, -881, -901, 455, 235, 260, -703, -255, -541, -238, -215, 886, 429, -350, -357, 828, 646, 448, -860, -894, -934, -314, 922, -640, 506, 117, 525, -606, -701, -582, 356, 172, -218, -780, -540, 272, -768, 397, -41, -795, -619, -568, 268, 691, 669, -452, -851, -218, -432, -767, -300, 178, 769, -191, -272, 349, 573, -291, 429, -236] -@pytest.mark.parametrize("arrs", [arrs_dict1]) -def test_SparseAccumulator2new_tensor_direct(arrs, debug_sim, fill=0): - in_mat_crds1 = arrs["crds"] - in_mat_segs1 = arrs["segs"] - in_mat_vals1 = arrs["vals"] - dim = arrs["dim"] - - in1_tup = convert_point_tuple(get_point_list(in_mat_crds1, in_mat_segs1, in_mat_vals1)) - - nd1 = convert_point_tuple_ndarr(in1_tup, dim) - gold_nd = np.sum(nd1, 0) - gold_tup = convert_ndarr_point_tuple(gold_nd) - - rdscan_B1 = CompressedCrdRdScan(crd_arr=in_mat_crds1[0], seg_arr=in_mat_segs1[0], debug=debug_sim) - rdscan_B2 = CompressedCrdRdScan(crd_arr=in_mat_crds1[1], seg_arr=in_mat_segs1[1], debug=debug_sim) - rdscan_B3 = CompressedCrdRdScan(crd_arr=in_mat_crds1[2], seg_arr=in_mat_segs1[2], debug=debug_sim) - - val_B = Array(init_arr=in_mat_vals1, debug=debug_sim) - sa = SparseAccumulator2(valtype=int, val_stkn=True, debug=debug_sim) - - vals_X = ValsWrScan(size=dim * dim, fill=fill, debug=debug_sim) - wrscan_X1 = CompressWrScan(seg_size=2, size=dim, fill=fill, debug=debug_sim) - wrscan_X2 = CompressWrScan(seg_size=dim + 1, size=dim * dim, fill=fill, debug=debug_sim) - - done = False - time = 0 - in_ref_B = [0, 'D'] - out_rdscan_B1 = [] - out_rdscan_B2 = [] - out_rdscan_B3 = [] - out_val_B = [] - while not done and time < TIMEOUT: - if len(in_ref_B) > 0: - rdscan_B1.set_in_ref(in_ref_B.pop(0)) - - rdscan_B2.set_in_ref(rdscan_B1.out_ref()) - rdscan_B3.set_in_ref(rdscan_B2.out_ref()) - val_B.set_load(rdscan_B3.out_ref()) - - # Inject random empty strings - out_rdscan_B1.append(rdscan_B1.out_crd()) - out_rdscan_B2.append(rdscan_B2.out_crd()) - out_rdscan_B3.append(rdscan_B3.out_crd()) - out_val_B.append(val_B.out_load()) - - # Inject random delay - if random.random() < 0.2: - out_rdscan_B1.append("") - if random.random() < 0.2: - out_rdscan_B2.append("") - if random.random() < 0.2: - out_val_B.append("") - - sa.set_in_crd2(out_rdscan_B1.pop(0)) - sa.set_in_crd1(out_rdscan_B2.pop(0)) - sa.set_in_crd0(out_rdscan_B3.pop(0)) - sa.set_val(out_val_B.pop(0)) - - vals_X.set_input(sa.out_val()) - wrscan_X1.set_input(sa.out_crd1()) - wrscan_X2.set_input(sa.out_crd0()) - - rdscan_B1.update() - rdscan_B2.update() - rdscan_B3.update() - val_B.update() - sa.update() - vals_X.update() - wrscan_X1.update() - wrscan_X2.update() - - if time % 100 == 0: - print("Timestep", time, "\t Done --", - "\tRdScan B1:", rdscan_B1.out_done(), - "\tRdScan B2:", rdscan_B2.out_done(), - "\tRdScan B3:", rdscan_B3.out_done(), - "\tSparseAccumulator1New:", sa.out_done(), - "\tArr:", val_B.out_done(), - "\tWrScan:", vals_X.out_done(), - "\tWrScan X1:", wrscan_X1.out_done(), - "\tWrScan X2:", wrscan_X2.out_done(), - ) - - done = wrscan_X2.out_done() and wrscan_X1.out_done() and vals_X.out_done() - time += 1 - - wrscan_X1.autosize() - wrscan_X2.autosize() - vals_X.autosize() - - out_crds = [wrscan_X1.get_arr(), wrscan_X2.get_arr()] - out_segs = [wrscan_X1.get_seg_arr(), wrscan_X2.get_seg_arr()] - out_val = vals_X.get_arr() - - if debug_sim: - print("Input", in_mat_segs1, in_mat_crds1, in_mat_vals1) - print(nd1) - print("X seg", out_segs) - print("X crd", out_crds) - print("X val", out_val) - print("Gold np", gold_nd) - print("Gold Tuple", gold_tup) - - if not out_val: - assert out_val == gold_tup - else: - out_tup = convert_point_tuple(get_point_list(out_crds, out_segs, out_val)) - out_tup = remove_zeros(out_tup) - print("\nSegs: " + str(in_mat_segs1) + "\nCrds: " + str(in_mat_crds1) + \ - "\nVals: " + str(in_mat_vals1)) - assert check_point_tuple(out_tup, gold_tup) - -@pytest.mark.parametrize("dim", [2 ** x for x in range(1, 5, 1)]) -def test_SparseAccumulator2new_tensor_rand(dim, debug_sim, max_val=1000, fill=0): - in_mat_crds1, in_mat_segs1 = gen_n_comp_arrs(3, dim) - in_mat_vals1 = gen_val_arr(len(in_mat_crds1[-1]), max_val, -max_val) - - in1_tup = convert_point_tuple(get_point_list(in_mat_crds1, in_mat_segs1, in_mat_vals1)) - - nd1 = convert_point_tuple_ndarr(in1_tup, dim) - gold_nd = np.sum(nd1, 0) - gold_tup = convert_ndarr_point_tuple(gold_nd) - - rdscan_B1 = CompressedCrdRdScan(crd_arr=in_mat_crds1[0], seg_arr=in_mat_segs1[0], debug=debug_sim) - rdscan_B2 = CompressedCrdRdScan(crd_arr=in_mat_crds1[1], seg_arr=in_mat_segs1[1], debug=debug_sim) - rdscan_B3 = CompressedCrdRdScan(crd_arr=in_mat_crds1[2], seg_arr=in_mat_segs1[2], debug=debug_sim) - - val_B = Array(init_arr=in_mat_vals1, debug=debug_sim) - sa = SparseAccumulator2(valtype=int, val_stkn=True, debug=debug_sim) - - vals_X = ValsWrScan(size=dim * dim, fill=fill, debug=debug_sim) - wrscan_X1 = CompressWrScan(seg_size=2, size=dim, fill=fill, debug=debug_sim) - wrscan_X2 = CompressWrScan(seg_size=dim + 1, size=dim * dim, fill=fill, debug=debug_sim) - - done = False - time = 0 - in_ref_B = [0, 'D'] - out_rdscan_B1 = [] - out_rdscan_B2 = [] - out_rdscan_B3 = [] - out_val_B = [] - while not done and time < TIMEOUT: - if len(in_ref_B) > 0: - rdscan_B1.set_in_ref(in_ref_B.pop(0)) - - rdscan_B2.set_in_ref(rdscan_B1.out_ref()) - rdscan_B3.set_in_ref(rdscan_B2.out_ref()) - val_B.set_load(rdscan_B3.out_ref()) - - # Inject random empty strings - out_rdscan_B1.append(rdscan_B1.out_crd()) - out_rdscan_B2.append(rdscan_B2.out_crd()) - out_rdscan_B3.append(rdscan_B3.out_crd()) - out_val_B.append(val_B.out_load()) - - # Inject random delay - if random.random() < 0.2: - out_rdscan_B1.append("") - if random.random() < 0.2: - out_rdscan_B2.append("") - if random.random() < 0.2: - out_val_B.append("") - - sa.set_in_crd2(out_rdscan_B1.pop(0)) - sa.set_in_crd1(out_rdscan_B2.pop(0)) - sa.set_in_crd0(out_rdscan_B3.pop(0)) - sa.set_val(out_val_B.pop(0)) - - vals_X.set_input(sa.out_val()) - wrscan_X1.set_input(sa.out_crd1()) - wrscan_X2.set_input(sa.out_crd0()) - - rdscan_B1.update() - rdscan_B2.update() - rdscan_B3.update() - val_B.update() - sa.update() - vals_X.update() - wrscan_X1.update() - wrscan_X2.update() - - if time % 100 == 0: - print("Timestep", time, "\t Done --", - "\tRdScan B1:", rdscan_B1.out_done(), - "\tRdScan B2:", rdscan_B2.out_done(), - "\tRdScan B3:", rdscan_B3.out_done(), - "\tSparseAccumulator1New:", sa.out_done(), - "\tArr:", val_B.out_done(), - "\tWrScan:", vals_X.out_done(), - "\tWrScan X1:", wrscan_X1.out_done(), - "\tWrScan X2:", wrscan_X2.out_done(), - ) - - done = wrscan_X2.out_done() and wrscan_X1.out_done() and vals_X.out_done() - time += 1 - - wrscan_X1.autosize() - wrscan_X2.autosize() - vals_X.autosize() - - out_crds = [wrscan_X1.get_arr(), wrscan_X2.get_arr()] - out_segs = [wrscan_X1.get_seg_arr(), wrscan_X2.get_seg_arr()] - out_val = vals_X.get_arr() - - if debug_sim: - print("Input", in_mat_segs1, in_mat_crds1, in_mat_vals1) - print(nd1) - print("X seg", out_segs) - print("X crd", out_crds) - print("X val", out_val) - print("Gold np", gold_nd) - print("Gold Tuple", gold_tup) - - if not out_val: - assert out_val == gold_tup - else: - out_tup = convert_point_tuple(get_point_list(out_crds, out_segs, out_val)) - print("\nSegs: " + str(in_mat_segs1) + "\nCrds: " + str(in_mat_crds1) + \ - "\nVals: " + str(in_mat_vals1)) - assert check_point_tuple(out_tup, gold_tup) \ No newline at end of file