forked from ion599/phi
-
Notifications
You must be signed in to change notification settings - Fork 1
/
x_matrix.py
123 lines (117 loc) · 5.86 KB
/
x_matrix.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import numpy as np
import pickle
from scipy.sparse import csr_matrix, lil_matrix
from lib.console_progress import ConsoleProgress
import scipy.io as sio
import csv
import math
# Used to find the index associated with an OD pair
od_back_map = {}
def sigmoid(x):
return 1.0 / (1.0 + math.exp(-float(x)))
class XMatrix:
data_prefix = ""
N_TAZ = 321
N_TAZ_CONDENSED = 150
N_ROUTES = 280691
N_ROUTES_CONDENSED = 60394
N_SENSORS = 1033
FIRST_ROUTE = 0
def __init__(self, compute=True, phi=None, condensed_map=None, generate_phi=None, use_travel_times=False):
if compute:
# Load the data.
data_progress = ConsoleProgress(1, message="Loading phi")
self.condensed_map = condensed_map
# TODO: replace with experiment id
data = generate_phi.phi_generation_sql(1)
phi.set_data(data)
data_progress.finish()
self.generate_routing_matrix(data, use_travel_times)
sio.savemat(self.__class__.data_prefix+'/X_matrix.mat', {
'X' : self.X,
'x' : self.x,
'U' : self.U
})
else:
x_load_progress = ConsoleProgress(1, message="Loading X matrix from file")
loaded_data = sio.loadmat(self.__class__.data_prefix+'/X_matrix.mat')
self.X = loaded_data['X']
self.U = loaded_data['U']
self.x = loaded_data['x']
x_load_progress.finish()
self.phi = phi
def generate_od_travel_time_pairs(self):
gen_tt = ConsoleProgress(self.N_TAZ, message="Loading travel times")
od_pair_matrix = [[{} for x in range(self.N_TAZ)] for y in range(self.N_TAZ)]
with open(self.__class__.data_prefix+'/travel_times.csv') as fopen:
reader = csv.reader(fopen,delimiter=',')
# skip reading first line
firstline = fopen.readline()
for row in reader:
od_pair_matrix[int(row[0])][int(row[1])][int(row[2])] = float(int(row[3]))
gen_tt.update_progress(int(row[0]))
gen_tt.finish()
return od_pair_matrix
def generate_routing_matrix(self, data, use_travel_times):
"""
Given the route index associated with each OD pair, generate a routing matrix.
"""
self.X = lil_matrix((self.N_SENSORS, self.N_TAZ*self.N_TAZ))
self.x = np.zeros(self.N_ROUTES_CONDENSED)
x_ind = 0
if use_travel_times:
od_pair_travel_times = self.generate_od_travel_time_pairs()
x_gen_progress = ConsoleProgress(self.N_ROUTES, message="Generating X and U matrices")
self.U = lil_matrix((self.N_TAZ_CONDENSED*(self.N_TAZ_CONDENSED-1), self.N_ROUTES_CONDENSED))
# For efficiency, the if statement is surrounding these loops so it doesn't check every iteration
if use_travel_times:
for i in np.arange(self.N_TAZ):
for j in np.arange(self.N_TAZ):
if data[i].get(j):
if data[i][j]:
travel_times = od_pair_travel_times[i][j]
mean_tt = np.mean(travel_times.values())
std_tt = np.std(travel_times.values())
if std_tt == 0:
std_tt = 1
travel_times = {rt : (float(tt-mean_tt) / std_tt) for rt, tt in travel_times.items()}
travel_times = {rt : sigmoid(-tt) for rt, tt in travel_times.items()}
normalizer = float(sum(travel_times.values()))
travel_times = {rt : float(tt)/normalizer for rt, tt in travel_times.items()}
for route, sensors in enumerate(data[i][j]):
tt = travel_times[route]
for s in sensors:
self.X[s,i*self.N_TAZ+j] += tt
if i in self.condensed_map and j in self.condensed_map:
od_back_map[x_ind] = (i, j)
i_ind = self.condensed_map[i]
j_ind = self.condensed_map[j]
self.x[x_ind] = tt
row_index = i_ind*(self.N_TAZ_CONDENSED-1)+j_ind
if j_ind > i_ind:
row_index -= 1
self.U[row_index, x_ind] = 1
x_ind = x_ind + 1
x_gen_progress.increment_progress()
else:
for i in np.arange(self.N_TAZ):
for j in np.arange(self.N_TAZ):
if data[i].get(j):
if data[i][j]:
for route, sensors in enumerate(data[i][j]):
if route == self.FIRST_ROUTE:
for s in sensors:
self.X[s,i*self.N_TAZ+j] = 1
self.x[x_ind] = 1
if i in self.condensed_map and j in self.condensed_map:
od_back_map[x_ind] = (i, j)
i_ind = self.condensed_map[i]
j_ind = self.condensed_map[j]
row_index = i_ind*(self.N_TAZ_CONDENSED-1)+j_ind
if j_ind > i_ind:
row_index -= 1
self.U[row_index, x_ind] = 1
x_ind += 1
x_gen_progress.increment_progress()
pickle.dump(od_back_map, open(self.__class__.data_prefix+'/od_back_map.pickle', 'wb'))
x_gen_progress.finish()