forked from marcin-osowski/igc_lib
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain_tracemap.py
175 lines (135 loc) · 6.57 KB
/
main_tracemap.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
from dateutil.rrule import rrule, DAILY
from injectable import load_injection_container, inject
from DailyCumulativeTrackBuilder import *
from FirestoreService import FirestoreService
from ServerCredentials import ServerCredentials
# Switch hour for the processing. Before 17 = day -1. After 17 = Current day ServerCredentials
SWITCH_HOUR = 17
PAST_DAYS_TO_CATCHUP = 15
load_injection_container()
def main_catchup(request):
'''
Catchup on tracemap for the past 15 days
Re-run the build for each of the past days
'''
# Get current time in the right time-zone
tz = pytz.timezone('Europe/Paris')
end_date = datetime.now(tz)
start_date = end_date - timedelta(days=PAST_DAYS_TO_CATCHUP)
for dt in rrule(DAILY, dtstart=start_date, until=end_date):
targetDate = dt.strftime("%Y_%m_%d")
print(f"[CumulativeTrackBuilder] Catching up for targetDate={targetDate}")
Request = type('Request', (object,), {})
request = Request()
request.args = {"targetDate": targetDate}
main(request)
def main_catchup_year(request):
'''
Catchup on tracemap from 01/01/xxx to the present date
Re-run the build for each of the past days
'''
# Get current time in the right time-zone
tz = pytz.timezone('Europe/Paris')
end_date = datetime.now(tz)
start_date = datetime(year=end_date.year, month=1, day=1, hour=0, tzinfo=pytz.utc)
for dt in rrule(DAILY, dtstart=start_date, until=end_date):
targetDate = dt.strftime("%Y_%m_%d")
print(f"[CumulativeTrackBuilder] Catching up for targetDate={targetDate}")
Request = type('Request', (object,), {})
request = Request()
request.args = {"targetDate": targetDate}
main(request)
def main_alternative_source(request):
'''
This will create the cumulated tracks files from igc files located in a GCP Bucket
This source bucket is netcoupe-igc-source
Inside this bucket the name of the "source" where to look for .igc file is the name of the folder
'''
if 'source' in request.args:
sourceFolder = request.args.get('source')
tz = pytz.timezone('Europe/Paris')
script_start_time = datetime.now(tz)
target_date = date(script_start_time.year, script_start_time.month, script_start_time.day)
cumulativeTrackBuilder = DailyCumulativeTrackBuilder(None, target_date, fileList=None, isOutToLocalFiles=False)
cumulativeTrackBuilder.run_alternative_source(sourceFolder)
'''
Main entry point for Google function
'''
def main(request):
'''
Args:
day=yyyy_mm_dd: The yyyy_mm_dd for which to do the processing
catchUpOnDay=d: Launch the processing for day = day-d
This is used to consolidate the processing on day d+1 with files that were submitted after day d.
Example: When executed with -1 on Tuesday this will get the flights from Monday, and also the fligths from Tuesday and look up
inside the IGC to see if the flight was done on monday. Will then take it into account
'''
# Get current time in the right time-zone
tz = pytz.timezone('Europe/Paris')
script_start_time = datetime.now(tz)
cumulative_track_builder = None
# ----------------------------- Parse request parameters -----------------------------
# Cumulative Track parameters
target_date = None
# HACK: This is used to debug localy
# Request = type('Request', (object,), {})
# request = Request()
# request.args = {"targetDate": "2020_07_12"}
# request.args = {}
# Parse request parameters
# ----- Cumulative Track -----
if 'targetDate' in request.args:
target_date = request.args.get('targetDate')
target_date = datetime.strptime(target_date, '%Y_%m_%d').date()
# No target date : Find which date to process
if target_date is None:
target_date = date(script_start_time.year, script_start_time.month, script_start_time.day)
if script_start_time.hour < SWITCH_HOUR:
target_date = target_date - timedelta(days=1) # Before 17:00, catchup on previous day
# Log Start of the process
print(f"##### Launching processing for: Tracemap target_date={target_date}")
# ----------------------------- Begin processing -----------------------------
global ftp_client_out # FTP client to write .geojson outpout
# Get FTP server credentials from environment variable
ftp_server_name = os.environ['FTP_SERVER_NAME'].strip()
ftp_login = os.environ['FTP_LOGIN'].strip()
ftp_password = os.environ['FTP_PASSWORD'].strip()
# ---------------------------------------------------- Cumulative Track ----------------------------------------------
is_update_needed = False
return_message = "[DailyCumulativeTrackBuidler] Track up to date. No updated needeed !"
storageService = StorageService(target_date)
firestoreService = FirestoreService(target_date)
# --- Run condition ---
# Find out if the list of files as been modified for the target date. If not, no need to rebuild the track
print(f"[DailyCumulativeTrackBuidler] Finding out if running is needed for: target_date={target_date}")
current_hash_for_date, current_files_list = storageService.GetFileListHashForDay()
last_processed_hash = firestoreService.GetProcessedFilesHashForDay()
print(
f"New files list / Processed files list: current_hash_for_date / last_processed_hash = {current_hash_for_date} / {last_processed_hash}")
# Start processing if needed
if current_hash_for_date != last_processed_hash:
print(f"[DailyCumulativeTrackBuidler] Track needs updating ! ...")
is_update_needed = True and current_files_list
else:
print(return_message)
if is_update_needed:
# --- Start the process
ftp_client_credentials = ServerCredentials(ftp_server_name, ftp_login, ftp_password)
cumulative_track_builder = DailyCumulativeTrackBuilder(target_date, ftp_client_credentials, current_files_list)
# Run !
cumulative_track_builder.run()
json_metadata = cumulative_track_builder.JsonMetaData()
return_message = json_metadata
# --- Update Firestore progress DB
firestoreService.UpdateProcessedFilesHasForDay(current_files_list) # Update firestore with hash of processed files
if cumulative_track_builder:
firestoreService.updateFilesStatisticsForDay(
cumulative_track_builder.runStatistics) # Update firestore with statistics
return return_message
if __name__ == "__main__":
try:
res = go(None)
print(res)
except SystemExit as e:
if not e is None:
print(e)