-
Notifications
You must be signed in to change notification settings - Fork 0
/
collect_tmax_comparison.py
101 lines (70 loc) · 2.4 KB
/
collect_tmax_comparison.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import json
import os
import numpy as np
from joblib import Parallel, delayed
from lcs import *
data_dir = "Data/zkc_infer_vs_tmax/"
def collect_parameters(dir):
clist = set()
tlist = set()
rlist = set()
checked_matrix_size = False
for f in os.listdir(dir):
if not checked_matrix_size:
fname = os.path.join(dir, f)
with open(fname, "r") as file:
data = json.loads(file.read())
n = np.array(data["A"]).shape[0]
checked_matrix_size = True
d = f.split(".json")[0].split("_")
c = int(d[0])
t = float(d[1])
r = float(d[2])
clist.add(c)
tlist.add(t)
rlist.add(r)
c_dict = {c: i for i, c in enumerate(sorted(clist))}
t_dict = {t: i for i, t in enumerate(sorted(tlist))}
r_dict = {r: i for i, r in enumerate(sorted(rlist))}
return c_dict, t_dict, r_dict, n
def get_matrices(f, dir, c_dict, t_dict, r_dict):
fname = os.path.join(dir, f)
d = f.split(".json")[0].split("_")
c = int(d[0])
t = float(d[1])
r = float(d[2])
i = c_dict[c]
j = t_dict[t]
k = r_dict[r]
with open(fname, "r") as file:
data = json.loads(file.read())
A = np.array(data["A"], dtype=float)
samples = np.array(data["samples"], dtype=float)
print((i, j, k), flush=True)
return i, j, k, A, samples.mean(axis=0)
# get number of available cores
n_processes = len(os.sched_getaffinity(0))
c_dict, t_dict, r_dict, n = collect_parameters(data_dir)
n_c = len(c_dict)
n_t = len(t_dict)
n_r = len(r_dict)
Qsamples = np.zeros((n_c, n_t, n_r, n, n))
arglist = []
for f in os.listdir(data_dir):
arglist.append((f, data_dir, c_dict, t_dict, r_dict))
data = Parallel(n_jobs=n_processes)(delayed(get_matrices)(*arg) for arg in arglist)
node_performance_simple = np.zeros([n, n_t, n_r])
node_performance_complex = np.zeros([n, n_t, n_r])
for i, j, k, A, Q in data:
if i == 0:
node_performance_simple[:, j, k] = nodal_performance(Q, A)
if i == 1:
node_performance_complex[:, j, k] = nodal_performance(Q, A)
data = {}
data["tmax"] = list(t_dict)
data["A"] = A.tolist()
data["node-performance-simple"] = node_performance_simple.tolist()
data["node-performance-complex"] = node_performance_complex.tolist()
datastring = json.dumps(data)
with open("Data/zkc_tmax_comparison.json", "w") as output_file:
output_file.write(datastring)