-
Notifications
You must be signed in to change notification settings - Fork 25
/
fn_parse_autorate_log.m
1862 lines (1595 loc) · 81.1 KB
/
fn_parse_autorate_log.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
function [ ] = fn_parse_autorate_log( log_FQN, plot_FQN, x_range_sec, selected_reflector_subset )
% This program is free software; you can redistribute it and/or modify
% it under the terms of the GNU General Public License version 2 as
% published by the Free Software Foundation.
%
% Copyright (C) 2022 Sebastian Moeller
% HOWTO:
% you need to install octave (https://octave.org)
% then navigate to the directory containing fn_parse_autorate_log.m and either:
% run 'octave --gui' in a terminal and open the file and run it (recommended if you want/need to edit values)
% or run 'octave ./fn_parse_autorate_log.m' from the terminal
% the following will work on the console without requiring interaction
% octave -qf --eval 'fn_parse_autorate_log("./SCRATCH/cake-autorate.log.20221001_1724_RRUL_fast.com.log", "./outpug.tif", [10, 500], {"1.1.1.1"})'
% symbolically: octave -qf --eval 'fn_parse_autorate_log("path/to/the/log.file", "path/to/the/output/plot.format", [starttime endtime], {selected_reflector_subset})'
% supported formats for the optional second argument: pdf, png, tif.
% the optional third argument is the range to plot in seconds after log file start
% the fourth argument is a list of reflector IDs, only samples from any of the listed reflectors will be plotted, use [] to select all reflectors
% by default the code will open a file selection dialog which should be used to select a CAKE-autorate log file.
% TODO:
% add CDF plots for RTTs/OWDs per reflector for low and high achieved rate states
% the goal is to show low versus high load delay CDFs, but we do not really know about the relative load so
% this will only be a heuristic, albeit a useful one, hopefully.
% - add OWD/RTT plots for both load directions
% - report 95 and 99%-iles of lowest load conditions as output to help selecting delay thresholds
%gts = available_graphics_toolkits()
%qt_available = any(strcmp(gts, 'qt'))
%available_graphics_toolkits
%graphics_toolkit("gnuplot");
%if ~(isoctave)
dbstop if error;
%endif
timestamps.(mfilename).start = tic;
fq_mfilename = mfilename('fullpath');
mfilepath = fileparts(fq_mfilename);
disp(['INFO: ', mfilepath]);
% for debugging anything else than '' or 'load_existing' will force the file to be reparsed
parse_command_string = ''; % load_existing or reload
% specific configuration options for different plot types
CDF.LowLoad_threshold_percent = 20; % max load% for low load condition
CDF.HighLoad_threshold_percent = 80; % min load% for high load condition
CDF.calc_range_ms = [0, 1000]; % what range to calculate the CDFs over? We can always reduce the plotted range later, see cumulative_range_percent
CDF.step_size_ms = 0.005; % we will see this as quantisation in the plots...
CDF.cumulative_range_percent = [0.001, 97.5]; % which range to show for CDFs (taken from the fastest/slowest reflector respectively)
% PDFs are mostly like CDFs except for the step_size
PDF = CDF;
PDF.step_size_ms = 0.2; % these histograms need to be coarser than the PDFs or we see odd PDFs
PDF.cumulative_range_percent = [0.001, 90.0]; % which range to show for PDFs (taken from the fastest/slowest reflector respectively)
% add all defined plots that should be created and saved
plot_list = {'rawCDFs', 'deltaCDFs', 'timecourse'}; % 'rawCDFs', 'deltaCDFs', 'rawPDFs', 'deltaPDFs', 'timecourse' % PDFs are currently broken
try
figure_visibility_string = 'on';
if ~exist('log_FQN', 'var') || isempty(log_FQN)
log_FQN = [];
% for debugging
%log_FQN = "./SCRATCH/cake-autorate_2022-10-29_23_29_45.log";
else
disp(['INFO: Processing log file: ', log_FQN]);
figure_visibility_string = 'off';
endif
figure_opts.figure_visibility_string = figure_visibility_string;
if ~exist('plot_FQN', 'var') || isempty(plot_FQN)
plot_FQN = [];
else
disp(['INFO: Trying to save plot as: ', plot_FQN]);
[plot_path, plot_name, plot_ext] = fileparts(plot_FQN);
endif
% load the data file
[autorate_log, log_FQN] = fn_parse_autorate_logfile(log_FQN, parse_command_string);
% dissect the fully qualified name
[log_dir, log_name, log_ext ] = fileparts(log_FQN);
% find the relevant number of samples and whether we have LOAD records to begin with
if isfield(autorate_log, 'LOAD') && isfield(autorate_log.LOAD, 'LISTS') && isfield(autorate_log.LOAD.LISTS, 'RECORD_TYPE') && ~isempty(autorate_log.LOAD.LISTS.RECORD_TYPE)
n_LOAD_samples = length(autorate_log.LOAD.LISTS.RECORD_TYPE);
else
n_LOAD_samples = 0;
endif
% check whether we did successfully load some data, other wise bail out:
if ~isfield(autorate_log, 'DATA') || ~isfield(autorate_log.DATA, 'LISTS') || ~isfield(autorate_log.DATA.LISTS, 'RECORD_TYPE') || isempty(autorate_log.DATA.LISTS.RECORD_TYPE)
if (n_LOAD_samples == 0)
disp('WARNING: No valid data found, nothing to plot? Exiting...');
return
endif
n_DATA_samples = 0;
autorate_log.DATA.LISTS = [];
else
n_DATA_samples = length(autorate_log.DATA.LISTS.RECORD_TYPE);
endif
% find the smallest and largest (or first and last) DATA or LOAD timestamps
if (n_DATA_samples > 0)
first_sample_timestamp = autorate_log.DATA.LISTS.PROC_TIME_US(1);
last_sample_timestamp = autorate_log.DATA.LISTS.PROC_TIME_US(end);
else
first_sample_timestamp = 60*60*24*365*1000; % make this larger than any realistic unix epoch in seconds is going to be...
last_sample_timestamp = 0; % we take the maximum, so will override this
endif
if (n_LOAD_samples > 0)
first_sample_timestamp = min([first_sample_timestamp, autorate_log.LOAD.LISTS.PROC_TIME_US(1)]);
last_sample_timestamp = max([last_sample_timestamp, autorate_log.LOAD.LISTS.PROC_TIME_US(end-1)]);
endif
% select the sample range to display:
% 0 denotes the start, the second value the maximum time to display
% if the end index is too large we clip to max timestamp
% [] denotes all samples...
% can be passed via argument, default to the full range
if ~exist('x_range_sec', 'var') || isempty(x_range_sec)
% use this to change the values if not calling this as a function
% select the time range to display in seconds since first sample
% with different time axis for DATA and LOAD, simple indices are not appropriate anymore
x_range_sec = [];
%x_range_sec = [900 1000];
else
%x_range_sec = [];
if ~isempty(x_range_sec);
disp(['INFO: requested x_range_sec: ', num2str(x_range_sec)]);
endif
endif
% clean up the time range somewhat
[x_range_sec, do_return] = fn_sanitize_x_range_sec(x_range_sec, first_sample_timestamp, last_sample_timestamp);
if (do_return)
return
endif
% now, get the data range indices for the selected record types
if (n_DATA_samples > 0)
x_range.DATA = fn_get_range_indices_from_range_timestamps((x_range_sec + first_sample_timestamp), autorate_log.DATA.LISTS.PROC_TIME_US);
[x_range.DATA, do_return] = fn_sanitize_x_range(x_range.DATA, n_DATA_samples);
endif
if (n_LOAD_samples > 0)
x_range.LOAD = fn_get_range_indices_from_range_timestamps((x_range_sec + first_sample_timestamp), autorate_log.LOAD.LISTS.PROC_TIME_US);
[x_range.LOAD, do_return] = fn_sanitize_x_range(x_range.LOAD, n_LOAD_samples);
if (n_DATA_samples == 0)
x_range.DATA = x_range.LOAD; % needed for plot naming...
endif
endif
if (do_return)
return
endif
% allow to restrict the plot to a subset of the reflectors.
if ~exist('selected_reflector_subset', 'var') || isempty(selected_reflector_subset)
selected_reflector_subset = {}; % default to all
%selected_reflector_subset = {"1.1.1.1"};
%selected_reflector_subset = {"1.1.1.1", "1.0.0.1"};
else
% take from input argument
%selected_reflector_subset = [];
disp(['INFO: requested selected_reflector_subset: ', selected_reflector_subset]);
endif
reflector_string = '';
if ~isempty(selected_reflector_subset)
reflector_string = '.R';
for i_reflector = 1 : length(selected_reflector_subset)
reflector_string = [reflector_string, '_', selected_reflector_subset{i_reflector}];
endfor
reflector_string(end+1) = '.';
endif
% new reflectors get initialised with a very high baseline prior (which quickly gets adjusted to a better estimate)
% resulting in very high baseline values that cause poor autoscaling of the delay y-axis
% this parameter will control the minimum delay sample sequence number to use, allowing to
% ignore the early initialisation phase with small sequence numbers
% this issue only affects delay data, so this will be ignored for the rates
% set to 0 to show all delay samples,
min_sequence_number = 1;
align_rate_and_delay_zeros = 1; % so that delay and rate 0s are aligned
output_format_extension = '.png'; % '.pdf', '.png', '.tif', '.ps', ...
line_width = 1.0;
figure_opts.line_width = line_width;
figure_opts.output_format_extension = output_format_extension;
% a few outlier will make the delay plots unreadable, if this is not empty [],
% use this factor on max(ADJ_DELAY_THR) to scale the delay axis
% this is done before align_rate_and_delay_zeros is applied.
scale_delay_axis_by_ADJ_DELAY_THR_factor = 2.0;
% if the following is set make sure we also scale to the actual data
% we calculate both y-axis scales and take the maximum if both are requested
scale_delay_axis_by_OWD_DELTA_QUANTILE_factor = 5.0; % ignore if empty []
OWD_DELTA_QUANTILE_pct = 99.0; % what upper quantile to use for scaling, 100 is max value
% set up the plots
rates.DATA.fields_to_plot_list = {'CAKE_DL_RATE_KBPS', 'CAKE_UL_RATE_KBPS', 'DL_ACHIEVED_RATE_KBPS', 'UL_ACHIEVED_RATE_KBPS'};
rates.DATA.color_list = {[241,182,218]/254, [184,225,134]/254, [208,28,139]/254, [77,172,38]/254};
rates.DATA.linestyle_list = {'-', '-', '-', '-'};
rates.DATA.sign_list = {1, -1, 1, -1}; % define the sign of a given data series, allows flipping a set into the negative range
rates.DATA.scale_factor = 1/1000; % conversion factor from Kbps to Mbps
% based on LOAD records replace the older 'DL_ACHIEVED_RATE_KBPS', 'UL_ACHIEVED_RATE_KBPS' fields from DATA
% this will allow to plot data from sleep epochs, at the cost of some x_value trickery.
rates.LOAD.scale_factor = 1/1000; % conversion factor from µs to ms
rates.LOAD.fields_to_plot_list = {};
rates.LOAD.color_list = {};
rates.LOAD.linestyle_list = {};
rates.LOAD.sign_list = {};
if (n_LOAD_samples > 0)
% % these two should only be shown during sleep periods?
% % otherwise LOAD and higher resolution DATA plots will "overlap'
% if isfield(autorate_log.DATA.LISTS, 'CAKE_DL_RATE_KBPS')
% rates.LOAD.fields_to_plot_list{end+1} = 'CAKE_DL_RATE_KBPS';
% rates.LOAD.color_list{end+1} = [241,182,218]/254;
% rates.LOAD.linestyle_list{end+1} = '-';
% rates.LOAD.sign_list{end+1} = 1;
% endif
% if isfield(autorate_log.DATA.LISTS, 'CAKE_UL_RATE_KBPS')
% rates.LOAD.fields_to_plot_list{end+1} = 'CAKE_UL_RATE_KBPS';
% rates.LOAD.color_list{end+1} = [184,225,134]/254;
% rates.LOAD.linestyle_list{end+1} = '-';
% rates.LOAD.sign_list{end+1} = -1;
% endif
% these can be replaced...
if isfield(autorate_log.LOAD.LISTS, 'DL_ACHIEVED_RATE_KBPS')
rates.LOAD.fields_to_plot_list{end+1} = 'DL_ACHIEVED_RATE_KBPS';
rates.LOAD.color_list{end+1} = [208,28,139]/254;
rates.LOAD.linestyle_list{end+1} = '-';
rates.LOAD.sign_list{end+1} = 1;
rate_DATA_idx = find(ismember(rates.DATA.fields_to_plot_list, {'DL_ACHIEVED_RATE_KBPS'}));
rates.DATA.fields_to_plot_list(rate_DATA_idx) = [];
rates.DATA.color_list(rate_DATA_idx) = [];
rates.DATA.linestyle_list(rate_DATA_idx) = [];
rates.DATA.sign_list(rate_DATA_idx) = [];
endif
if isfield(autorate_log.LOAD.LISTS, 'UL_ACHIEVED_RATE_KBPS')
rates.LOAD.fields_to_plot_list{end+1} = 'UL_ACHIEVED_RATE_KBPS';
rates.LOAD.color_list{end+1} = [77,172,38]/254;
rates.LOAD.linestyle_list{end+1} = '-';
rates.LOAD.sign_list{end+1} = -1;
rate_DATA_idx = find(ismember(rates.DATA.fields_to_plot_list, {'UL_ACHIEVED_RATE_KBPS'}));
rates.DATA.fields_to_plot_list(rate_DATA_idx) = [];
rates.DATA.color_list(rate_DATA_idx) = [];
rates.DATA.linestyle_list(rate_DATA_idx) = [];
rates.DATA.sign_list(rate_DATA_idx) = [];
endif
endif
% create the latency data ollection and configuration
delays.DATA.scale_factor = 1/1000; % conversion factor from µs to ms
delays.DATA.fields_to_plot_list = {};
delays.DATA.color_list = {};
delays.DATA.linestyle_list = {};
delays.DATA.sign_list = {};
% colors from https://colorbrewer2.org/#type=diverging&scheme=BrBG&n=8
% re-order the following to assign depth order in plot...
if isfield(autorate_log.DATA.LISTS, 'DL_OWD_BASELINE')
delays.DATA.fields_to_plot_list{end+1} = 'DL_OWD_BASELINE';
delays.DATA.color_list{end+1} = [246, 232, 195]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
if isfield(autorate_log.DATA.LISTS, 'UL_OWD_BASELINE')
delays.DATA.fields_to_plot_list{end+1} = 'UL_OWD_BASELINE';
delays.DATA.color_list{end+1} = [199, 234, 229]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
if isfield(autorate_log.DATA.LISTS, 'DL_OWD_US')
delays.DATA.fields_to_plot_list{end+1} = 'DL_OWD_US';
delays.DATA.color_list{end+1} = [223, 194, 125]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
if isfield(autorate_log.DATA.LISTS, 'UL_OWD_US')
delays.DATA.fields_to_plot_list{end+1} = 'UL_OWD_US';
delays.DATA.color_list{end+1} = [128, 205, 193]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
if isfield(autorate_log.DATA.LISTS, 'DL_OWD_DELTA_US')
delays.DATA.fields_to_plot_list{end+1} = 'DL_OWD_DELTA_US';
delays.DATA.color_list{end+1} = [191, 129, 45]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
if isfield(autorate_log.DATA.LISTS, 'UL_OWD_DELTA_US')
delays.DATA.fields_to_plot_list{end+1} = 'UL_OWD_DELTA_US';
delays.DATA.color_list{end+1} = [53, 151, 143]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
if isfield(autorate_log.DATA.LISTS, 'DL_AVG_OWD_DELTA_US')
delays.DATA.fields_to_plot_list{end+1} = 'DL_AVG_OWD_DELTA_US';
delays.DATA.color_list{end+1} = [0.33, 0 , 0]; %[191, 129, 45]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
if isfield(autorate_log.DATA.LISTS, 'UL_AVG_OWD_DELTA_US')
delays.DATA.fields_to_plot_list{end+1} = 'UL_AVG_OWD_DELTA_US';
delays.DATA.color_list{end+1} = [0.33, 0 , 0]; %[53, 151, 143]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
% to allow old (single ADJ_DELAY_THR) and new log files
if isfield(autorate_log.DATA.LISTS, 'DL_ADJ_AVG_OWD_DELTA_THR_US')
delays.DATA.fields_to_plot_list{end +1} = 'DL_ADJ_AVG_OWD_DELTA_THR_US';
delays.DATA.color_list{end+1} = [0.5, 0.0, 0.0];
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
% to allow old (single ADJ_DELAY_THR) and new log files
if isfield(autorate_log.DATA.LISTS, 'UL_ADJ_AVG_OWD_DELTA_THR_US')
delays.DATA.fields_to_plot_list{end+1} = 'UL_ADJ_AVG_OWD_DELTA_THR_US';
delays.DATA.color_list{end+1} = [0.5, 0.0, 0.0];
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
% to allow old (single ADJ_DELAY_THR) and new log files
if isfield(autorate_log.DATA.LISTS, 'ADJ_DELAY_THR') || isfield(autorate_log.DATA.LISTS, 'DL_ADJ_DELAY_THR')
if isfield(autorate_log.DATA.LISTS, 'DL_ADJ_DELAY_THR')
delays.DATA.fields_to_plot_list{end +1} = 'DL_ADJ_DELAY_THR';
elseif isfield(autorate_log.DATA.LISTS, 'ADJ_DELAY_THR')
delays.DATA.fields_to_plot_list{end+1} = 'ADJ_DELAY_THR';
endif
delays.DATA.color_list{end+1} = [1.0, 0.0, 0.0];
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
% to allow old (single ADJ_DELAY_THR) and new log files
if isfield(autorate_log.DATA.LISTS, 'ADJ_DELAY_THR') || isfield(autorate_log.DATA.LISTS, 'UL_ADJ_DELAY_THR')
if isfield(autorate_log.DATA.LISTS, 'UL_ADJ_DELAY_THR')
delays.DATA.fields_to_plot_list{end+1} = 'UL_ADJ_DELAY_THR';
elseif isfield(autorate_log.DATA.LISTS, 'ADJ_DELAY_THR')
delays.DATA.fields_to_plot_list{end+1} = 'ADJ_DELAY_THR';
endif
delays.DATA.color_list{end+1} = [1.0, 0.0, 0.0];
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
% if exist, plot the delta EWMA
if isfield(autorate_log.DATA.LISTS, 'DL_OWD_DELTA_EWMA_US')
delays.DATA.fields_to_plot_list{end+1} = 'DL_OWD_DELTA_EWMA_US';
delays.DATA.color_list{end+1} = [140, 81, 10]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = 1;
endif
if isfield(autorate_log.DATA.LISTS, 'UL_OWD_DELTA_EWMA_US')
delays.DATA.fields_to_plot_list{end+1} = 'UL_OWD_DELTA_EWMA_US';
delays.DATA.color_list{end+1} = [1, 102, 94]/254;
delays.DATA.linestyle_list{end+1} = '-';
delays.DATA.sign_list{end+1} = -1;
endif
% get x_vector data and which indices to display for each record type
x_vec.DATA = (1:1:n_DATA_samples);
if ~isempty(x_vec.DATA)
DATA_rates_x_idx = (x_range.DATA(1):1:x_range.DATA(2));
DATA_delays_x_idx = (x_range.DATA(1):1:x_range.DATA(2));
sequence_too_small_idx = find(autorate_log.DATA.LISTS.SEQUENCE < min_sequence_number);
if ~isempty(sequence_too_small_idx)
DATA_delays_x_idx = setdiff(DATA_delays_x_idx, sequence_too_small_idx);
endif
% allow to only plot a given reflector subset
if ~isempty(selected_reflector_subset)
cur_reflector_sample_idx = find(ismember(autorate_log.DATA.LISTS.REFLECTOR, selected_reflector_subset));
DATA_delays_x_idx = intersect(DATA_delays_x_idx, cur_reflector_sample_idx);
endif
if isempty(DATA_delays_x_idx)
disp('No valid samples found (for the current reflector subset).');
%return # we can still plot the load/rate data, for long sleep periods there might be no valid delay samples at all
endif
% use real sample times, PROC_TIME_US is seconds.NNNNNN
% to make things less odd report times in seconds since the log start
x_vec.DATA = (autorate_log.DATA.LISTS.PROC_TIME_US - first_sample_timestamp);
disp(['Selected DATA sample indices: ', num2str(x_range.DATA)]);
% use this later to set the XLim s for all time plots
x_vec_range = [x_vec.DATA(DATA_rates_x_idx(1)), x_vec.DATA(DATA_rates_x_idx(end))];
else
DATA_delays_x_idx = [];
x_vec_range = [(60*60*24*365*1000) 0]; % make sure the following MIN/MAX operation will update the fields
endif
if (n_LOAD_samples > 0)
LOAD_rates_x_idx = (x_range.LOAD(1):1:x_range.LOAD(2));
x_vec.LOAD = (autorate_log.LOAD.LISTS.PROC_TIME_US - first_sample_timestamp);
disp(['Selected LOAD sample indices: ', num2str(x_range.LOAD)]);
% XLims should fit both DATA and LOAD sample timestamps
x_vec_range(1) = min(x_vec_range(1), x_vec.LOAD(LOAD_rates_x_idx(1)));
x_vec_range(2) = max(x_vec_range(2), x_vec.LOAD(LOAD_rates_x_idx(end)));
endif
x_label_string = 'time from log file start [sec]'; % or 'autorate samples'
%TODO detect sleep periods and mark in graphs
% for plot naming
if ((x_range.DATA(1) ~= 1) ...
|| ((n_DATA_samples > 0) && (x_range.DATA(2) ~= length(autorate_log.DATA.LISTS.RECORD_TYPE))) ...
|| ((n_DATA_samples == 0) && (x_range.DATA(2) ~= length(autorate_log.LOAD.LISTS.RECORD_TYPE))))
n_range_digits = ceil(max(log10(x_range.DATA)));
range_string = ['.', 'sample_', num2str(x_range.DATA(1), ['%0', num2str(n_range_digits), 'd']), '_to_', num2str(x_range.DATA(2), ['%0', num2str(n_range_digits), 'd'])];
else
range_string = '';
endif
adjusted_ylim_delay = [];
if ~isempty(scale_delay_axis_by_ADJ_DELAY_THR_factor) && (n_DATA_samples > 0)
%ylim_delays = get(AX(2), 'YLim');
if isfield(autorate_log.DATA.LISTS, 'ADJ_DELAY_THR')
ul_max_adj_delay_thr = max(autorate_log.DATA.LISTS.ADJ_DELAY_THR(DATA_delays_x_idx));
dl_max_adj_delay_thr = max(autorate_log.DATA.LISTS.ADJ_DELAY_THR(DATA_delays_x_idx));
endif
if isfield(autorate_log.DATA.LISTS, 'UL_ADJ_DELAY_THR')
ul_max_adj_delay_thr = max(autorate_log.DATA.LISTS.UL_ADJ_DELAY_THR(DATA_delays_x_idx));
endif
if isfield(autorate_log.DATA.LISTS, 'DL_ADJ_DELAY_THR')
dl_max_adj_delay_thr = max(autorate_log.DATA.LISTS.DL_ADJ_DELAY_THR(DATA_delays_x_idx));
endif
% delays.DATA.sign_list is ordered DL*, UL*, DL*, ...
adjusted_ylim_delay(1) = (sign(delays.DATA.sign_list{2}) * ul_max_adj_delay_thr * scale_delay_axis_by_ADJ_DELAY_THR_factor);
adjusted_ylim_delay(2) = (sign(delays.DATA.sign_list{1}) * dl_max_adj_delay_thr * scale_delay_axis_by_ADJ_DELAY_THR_factor);
disp(['INFO: Adjusted y-limits based on ADJ_DELAY_THR_factor: ', num2str(adjusted_ylim_delay)]);
%set(AX(2), 'YLim', (adjusted_ylim_delay * delays.DATA.scale_factor));
endif
% find the 99%ile for the actual relevant delay data
if ~isempty(scale_delay_axis_by_OWD_DELTA_QUANTILE_factor) && (n_DATA_samples > 0)
sorted_UL_OWD_DELTA_US = sort(autorate_log.DATA.LISTS.UL_OWD_DELTA_US(DATA_delays_x_idx));
n_UL_OWD_DELTA_US_samples = length(sorted_UL_OWD_DELTA_US);
UL_OWD_DELTA_US_upper_quantile = sorted_UL_OWD_DELTA_US(round(n_UL_OWD_DELTA_US_samples * (OWD_DELTA_QUANTILE_pct / 100)));
sorted_DL_OWD_DELTA_US = sort(autorate_log.DATA.LISTS.DL_OWD_DELTA_US(DATA_delays_x_idx));
n_DL_OWD_DELTA_US_samples = length(sorted_DL_OWD_DELTA_US);
DL_OWD_DELTA_US_upper_quantile = sorted_DL_OWD_DELTA_US(round(n_DL_OWD_DELTA_US_samples * (OWD_DELTA_QUANTILE_pct / 100)));
% use this to correct the delay y-axis scaling
% delays.DATA.sign_list is orderd DL*, UL*, DL*, ...
DELAY_adjusted_ylim_delay(1) = (sign(delays.DATA.sign_list{2}) * UL_OWD_DELTA_US_upper_quantile * scale_delay_axis_by_OWD_DELTA_QUANTILE_factor);
DELAY_adjusted_ylim_delay(2) = (sign(delays.DATA.sign_list{1}) * DL_OWD_DELTA_US_upper_quantile * scale_delay_axis_by_OWD_DELTA_QUANTILE_factor);
% setting the range smaller or larger than minimum or maximum makes little sense...
sorted_UL_OWD_US = sort(autorate_log.DATA.LISTS.UL_OWD_US(DATA_delays_x_idx));
if DELAY_adjusted_ylim_delay(1) < (sign(delays.DATA.sign_list{2}) * 1.05 * sorted_UL_OWD_US(end))
DELAY_adjusted_ylim_delay(1) = (sign(delays.DATA.sign_list{2}) * 1.05 * sorted_UL_OWD_US(end));
endif
sorted_DL_OWD_US = sort(autorate_log.DATA.LISTS.DL_OWD_US(DATA_delays_x_idx));
if DELAY_adjusted_ylim_delay(2) > (sign(delays.DATA.sign_list{1}) * 1.05 * sorted_DL_OWD_US(end))
DELAY_adjusted_ylim_delay(2) = (sign(delays.DATA.sign_list{1}) * 1.05 * sorted_DL_OWD_US(end));
endif
if isempty(adjusted_ylim_delay)
adjusted_ylim_delay = DELAY_adjusted_ylim_delay;
disp(['INFO: Adjusted y-limits based on OWD_DELTA_QUANTILE_factor: ', num2str(DELAY_adjusted_ylim_delay)]);
else
adjusted_ylim_delay(1) = sign(delays.DATA.sign_list{2}) * max([abs(adjusted_ylim_delay(1)), abs(DELAY_adjusted_ylim_delay(1))]);
adjusted_ylim_delay(2) = max([adjusted_ylim_delay(2), DELAY_adjusted_ylim_delay(2)]);
disp(['INFO: Grand adjusted y-limits based on OWD_DELTA_QUANTILE_factor and ADJ_DELAY_THR_factor: ', num2str(adjusted_ylim_delay)]);
endif
endif
% for testing align_rate_and_delay_zeros
% autorate_log.DATA.LISTS.DL_OWD_US = 10*autorate_log.DATA.LISTS.DL_OWD_US;
% for testing align_rate_and_delay_zeros
% autorate_log.DATA.LISTS.UL_OWD_US = 10*autorate_log.DATA.LISTS.UL_OWD_US;
if (n_DATA_samples > 0)
% create CDFs for each reflector, for both DL_OWD_US and UL_OWD_US
% for low congestion state (low achieved rate with shaper at baseline rate)
% and for high congestion state (high achieved rate close ot shaper rate)?
% load conditions, ideally we want congestion condition, but the best estimate we have
% are load conditions, since we want to look at differences in delay we should not
% directly classify based on delay, hence load it is.
sample_idx_by_load = fn_get_samples_by_load(autorate_log.DATA.LISTS, 'LOAD_PERCENT', {'UL', 'DL'}, {'UL_LOAD_PERCENT', 'DL_LOAD_PERCENT'}, CDF.LowLoad_threshold_percent, CDF.HighLoad_threshold_percent);
if ismember('rawCDFs', plot_list);
% measures for raw RTT/OWD data
[raw_CDF, CDF_x_vec, unique_reflector_list] = fn_get_XDF_by_load('CDF', 'RAW', autorate_log.DATA.LISTS.UL_OWD_US, autorate_log.DATA.LISTS.DL_OWD_US, delays.DATA.scale_factor, ...
CDF.calc_range_ms, CDF.step_size_ms, autorate_log.DATA.LISTS.REFLECTOR, sample_idx_by_load, DATA_delays_x_idx);
if isempty(plot_FQN)
cur_plot_FQN = fullfile(log_dir, [log_name, log_ext, '.rawCDFs', range_string, reflector_string, figure_opts.output_format_extension]);
else
cur_plot_FQN = fullfile(plot_path, [plot_name, '.rawCDFs', range_string, reflector_string, plot_ext]);
endif
autorate_rawCDF_fh = fn_plot_CDF_by_measure_and_load_condition('CDF', figure_opts, raw_CDF, CDF.cumulative_range_percent, 'raw delay [ms]', 'cumulative density [%]', cur_plot_FQN);
% these can be pretty large, so make this somewhat lighter
clear raw_CDF;
clear CDF_x_vec;
endif
if ismember('rawPDFs', plot_list);
% measures for raw RTT/OWD data
[raw_PDF, PDF_x_vec, unique_reflector_list] = fn_get_XDF_by_load('PDF', 'RAW', autorate_log.DATA.LISTS.UL_OWD_US, autorate_log.DATA.LISTS.DL_OWD_US, delays.DATA.scale_factor, ...
PDF.calc_range_ms, PDF.step_size_ms, autorate_log.DATA.LISTS.REFLECTOR, sample_idx_by_load, DATA_delays_x_idx);
if isempty(plot_FQN)
cur_plot_FQN = fullfile(log_dir, [log_name, log_ext, '.rawPDFs', range_string, reflector_string, figure_opts.output_format_extension]);
else
cur_plot_FQN = fullfile(plot_path, [plot_name, '.rawPDFs', range_string, reflector_string, plot_ext]);
endif
autorate_rawPDF_fh = fn_plot_CDF_by_measure_and_load_condition('PDF', figure_opts, raw_PDF, PDF.cumulative_range_percent, 'raw delay [ms]', 'probability density [%]', cur_plot_FQN);
% these can be pretty large, so make this somewhat lighter
clear raw_PDF;
clear PDF_x_vec;
endif
if ismember('deltaCDFs', plot_list);
% measures for baseline corrected delta(RTT)/delta(OWD) data
[delta_CDF, CDF_x_vec, unique_reflector_list] = fn_get_XDF_by_load('CDF', 'DELTA', autorate_log.DATA.LISTS.UL_OWD_DELTA_US, autorate_log.DATA.LISTS.DL_OWD_DELTA_US, delays.DATA.scale_factor, ...
CDF.calc_range_ms, CDF.step_size_ms, autorate_log.DATA.LISTS.REFLECTOR, sample_idx_by_load, DATA_delays_x_idx);
if isempty(plot_FQN)
cur_plot_FQN = fullfile(log_dir, [log_name, log_ext, '.deltaCDFs', range_string, reflector_string, figure_opts.output_format_extension]);
else
cur_plot_FQN = fullfile(plot_path, [plot_name, '.deltaCDFs', range_string, reflector_string, plot_ext]);
endif
autorate_deltaCDF_fh = fn_plot_CDF_by_measure_and_load_condition('CDF', figure_opts, delta_CDF, CDF.cumulative_range_percent, 'delta delay [ms]', 'cumulative density [%]', cur_plot_FQN);
fn_propose_delay_thresholds(delta_CDF, CDF.calc_range_ms);
% these can be pretty large, so make this somewhat lighter
clear delta_CDF;
clear CDF_x_vec;
endif
if ismember('deltaPDFs', plot_list);
% measures for baseline corrected delta(RTT)/delta(OWD) data
[delta_PDF, PDF_x_vec, unique_reflector_list] = fn_get_XDF_by_load('PDF', 'DELTA', autorate_log.DATA.LISTS.UL_OWD_DELTA_US, autorate_log.DATA.LISTS.DL_OWD_DELTA_US, delays.DATA.scale_factor, ...
PDF.calc_range_ms, PDF.step_size_ms, autorate_log.DATA.LISTS.REFLECTOR, sample_idx_by_load, DATA_delays_x_idx);
if isempty(plot_FQN)
cur_plot_FQN = fullfile(log_dir, [log_name, log_ext, '.deltaPDFs', range_string, reflector_string, figure_opts.output_format_extension]);
else
cur_plot_FQN = fullfile(plot_path, [plot_name, '.deltaPDFs', range_string, reflector_string, plot_ext]);
endif
autorate_deltaCDF_fh = fn_plot_CDF_by_measure_and_load_condition('PDF', figure_opts, delta_PDF, PDF.cumulative_range_percent, 'delta delay [ms]', 'probability density [%]', cur_plot_FQN);
% these can be pretty large, so make this somewhat lighter
clear delta_PDF;
clear PDF_x_vec;
endif
endif
if ismember('timecourse', plot_list);
% plot timecourses
autorate_fh = figure('Name', 'CAKE-autorate log: rate & delay timecourses', 'visible', figure_visibility_string);
[ output_rect ] = fn_set_figure_outputpos_and_size( autorate_fh, 1, 1, 27, 19, 1, 'landscape', 'centimeters' );
if (n_DATA_samples > 0)
cur_sph = subplot(2, 2, [1 2]);
%plot data on both axes
% use this as dummy to create the axis:
cur_scaled_data_rates = autorate_log.DATA.LISTS.(rates.DATA.fields_to_plot_list{1})(DATA_rates_x_idx) * rates.DATA.scale_factor;
cur_scaled_data_delays = autorate_log.DATA.LISTS.(delays.DATA.fields_to_plot_list{1})(DATA_delays_x_idx) * delays.DATA.scale_factor;
if (isempty(cur_scaled_data_rates) || isempty(cur_scaled_data_delays))
disp('WARNING: We somehow ended up without data to plot, should not happen');
return
endif
% this is a dummy plot so we get the dual axis handles...
[AX H1 H2] = plotyy(x_vec.DATA(DATA_delays_x_idx), (delays.DATA.sign_list{1} * cur_scaled_data_delays)', x_vec.DATA(DATA_rates_x_idx)', (rates.DATA.sign_list{1} * cur_scaled_data_rates)', 'plot');
%hold both axes
legend_list = {};
hold(AX(1));
for i_field = 1 : length(delays.DATA.fields_to_plot_list)
legend_list{end+1} = delays.DATA.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.DATA.LISTS.(delays.DATA.fields_to_plot_list{i_field})(DATA_delays_x_idx) * delays.DATA.scale_factor;
plot(AX(1), x_vec.DATA(DATA_delays_x_idx)', (delays.DATA.sign_list{i_field} * cur_scaled_data)', 'Color', delays.DATA.color_list{i_field}, 'Linestyle', delays.DATA.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
%legend(legend_list, 'Interpreter', 'none');
hold off
xlabel(x_label_string);
ylabel('Delay [milliseconds]');
set(AX(1), 'XLim', x_vec_range);
if ~isempty(adjusted_ylim_delay)
set(AX(1), 'YLim', (adjusted_ylim_delay * delays.DATA.scale_factor));
endif
hold(AX(2));
for i_field = 1 : length(rates.DATA.fields_to_plot_list)
legend_list{end+1} = rates.DATA.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.DATA.LISTS.(rates.DATA.fields_to_plot_list{i_field})(DATA_rates_x_idx) * rates.DATA.scale_factor;
plot(AX(2), x_vec.DATA(DATA_rates_x_idx)', (rates.DATA.sign_list{i_field} * cur_scaled_data)', 'Color', rates.DATA.color_list{i_field}, 'Linestyle', rates.DATA.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
if (n_LOAD_samples > 0)
for i_field = 1 : length(rates.LOAD.fields_to_plot_list)
legend_list{end+1} = rates.LOAD.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.LOAD.LISTS.(rates.LOAD.fields_to_plot_list{i_field})(LOAD_rates_x_idx) * rates.LOAD.scale_factor;
plot(AX(2), x_vec.LOAD(LOAD_rates_x_idx)', (rates.LOAD.sign_list{i_field} * cur_scaled_data)', 'Color', rates.LOAD.color_list{i_field}, 'Linestyle', rates.LOAD.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
endif
%legend(legend_list, 'Interpreter', 'none');
hold off
xlabel(AX(2), x_label_string);
ylabel(AX(2), 'Rate [Mbps]');
set(AX(2), 'XLim', x_vec_range);
% make sure the zeros of both axes align
if (align_rate_and_delay_zeros)
ylim_rates = get(AX(2), 'YLim');
ylim_delays = get(AX(1), 'YLim');
rate_up_ratio = abs(ylim_rates(1)) / sum(abs(ylim_rates));
rate_down_ratio = abs(ylim_rates(2)) / sum(abs(ylim_rates));
delay_up_ratio = abs(ylim_delays(1)) / sum(abs(ylim_delays));
delay_down_ratio = abs(ylim_delays(2)) / sum(abs(ylim_delays));
if (delay_up_ratio >= rate_up_ratio)
% we need to adjust the upper limit
new_lower_y_delay = ylim_delays(1);
new_upper_y_delay = (abs(ylim_delays(1)) / rate_up_ratio) - abs(ylim_delays(1));
else
% we need to adjust the lower limit
new_lower_y_delay = sign(ylim_delays(1)) * ((abs(max(ylim_delays)) / rate_down_ratio) - abs(max(ylim_delays)));
new_upper_y_delay = ylim_delays(2);
endif
set(AX(1), 'YLim', [new_lower_y_delay, new_upper_y_delay]);
endif
% TODO: look at both DATA and LOAD timestamps to deduce the start and end timestamps
title(AX(2), ['Start: ', autorate_log.DATA.LISTS.LOG_DATETIME{DATA_rates_x_idx(1)}, '; ', num2str(autorate_log.DATA.LISTS.LOG_TIMESTAMP(DATA_rates_x_idx(1))), '; sample index: ', num2str(x_range.DATA(1)); ...
'End: ', autorate_log.DATA.LISTS.LOG_DATETIME{DATA_rates_x_idx(end)}, '; ', num2str(autorate_log.DATA.LISTS.LOG_TIMESTAMP(DATA_rates_x_idx(end))), '; sample index: ', num2str(x_range.DATA(2))]);
endif
cur_sph = subplot(2, 2, 3);
% rates
hold on
legend_list = {};
if (n_DATA_samples > 0)
for i_field = 1 : length(rates.DATA.fields_to_plot_list)
legend_list{end+1} = rates.DATA.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.DATA.LISTS.(rates.DATA.fields_to_plot_list{i_field})(DATA_rates_x_idx) * rates.DATA.scale_factor;
plot(x_vec.DATA(DATA_rates_x_idx)', (rates.DATA.sign_list{i_field} * cur_scaled_data)', 'Color', rates.DATA.color_list{i_field}, 'Linestyle', rates.DATA.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
endif
if (n_LOAD_samples > 0)
for i_field = 1 : length(rates.LOAD.fields_to_plot_list)
legend_list{end+1} = rates.LOAD.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.LOAD.LISTS.(rates.LOAD.fields_to_plot_list{i_field})(LOAD_rates_x_idx) * rates.LOAD.scale_factor;
plot(cur_sph, x_vec.LOAD(LOAD_rates_x_idx)', (rates.LOAD.sign_list{i_field} * cur_scaled_data)', 'Color', rates.LOAD.color_list{i_field}, 'Linestyle', rates.LOAD.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
endif
if ~isempty(legend_list)
try
if strcmp(graphics_toolkit, 'gnuplot')
legend(legend_list, 'Interpreter', 'none', 'box', 'off', 'location', 'northoutside', 'FontSize', 7);
else
legend(legend_list, 'Interpreter', 'none', 'numcolumns', 2, 'box', 'off', 'location', 'northoutside', 'FontSize', 7);
endif
catch
disp(['Triggered']);
legend(legend_list, 'Interpreter', 'none', 'box', 'off', 'FontSize', 7);
end_try_catch
endif
hold off
xlabel(x_label_string);
ylabel('Rate [Mbps]');
set(cur_sph, 'XLim', x_vec_range);
if (n_DATA_samples > 0)
cur_sph = subplot(2, 2, 4);
% delays
hold on
legend_list = {};
for i_field = 1 : length(delays.DATA.fields_to_plot_list)
legend_list{end+1} = delays.DATA.fields_to_plot_list{i_field};
cur_scaled_data = autorate_log.DATA.LISTS.(delays.DATA.fields_to_plot_list{i_field})(DATA_delays_x_idx) * delays.DATA.scale_factor;
plot(x_vec.DATA(DATA_delays_x_idx)', (delays.DATA.sign_list{i_field} * cur_scaled_data)', 'Color', delays.DATA.color_list{i_field}, 'Linestyle', delays.DATA.linestyle_list{i_field}, 'LineWidth', line_width);
endfor
if ~isempty(adjusted_ylim_delay)
set(cur_sph, 'YLim', (adjusted_ylim_delay * delays.DATA.scale_factor));
endif
if ~isempty(legend_list)
try
if strcmp(graphics_toolkit, 'gnuplot')
legend(legend_list, 'Interpreter', 'none', 'box', 'off', 'location', 'northoutside', 'FontSize', 7);
else
legend(legend_list, 'Interpreter', 'none', 'numcolumns', 3, 'box', 'off', 'location', 'northoutside', 'FontSize', 7);
endif
catch
legend(legend_list, 'Interpreter', 'none', 'box', 'off', 'FontSize', 7);
end_try_catch
endif
hold off
xlabel(x_label_string);
ylabel('Delay [milliseconds]');
set(cur_sph, 'XLim', x_vec_range);
endif
if isempty(plot_FQN)
cur_plot_FQN = fullfile(log_dir, [log_name, log_ext, '.timecourse', range_string, reflector_string, output_format_extension]);
else
cur_plot_FQN = fullfile(plot_path, [plot_name, '.timecourse', range_string, reflector_string, plot_ext]);
endif
disp(['INFO: Writing plot as: ', cur_plot_FQN]);
write_out_figure(autorate_fh, cur_plot_FQN, [], []);
endif
catch err
warning(err.identifier, err.message);
err
for i_stack = 1 : length(err.stack)
disp(['Stack #: ', num2str(i_stack), ':']);
disp(err.stack(i_stack));
endfor
disp('INFO: available graphics toolkits:');
disp(available_graphics_toolkits);
disp(['INFO: Selected graphics toolkit: ', graphics_toolkit]);
disp(['INFO: Octave version: ', version]);
disp('Please report any issue to https://github.com/lynxthecat/cake-autorate/issues and consider sharing the log file that revealed the problem.');
end_try_catch
% verbose exit
timestamps.(mfilename).end = toc(timestamps.(mfilename).start);
disp(['INFO: ', mfilename, ' took: ', num2str(timestamps.(mfilename).end), ' seconds.']);
return
endfunction
function [ autorate_log, log_FQN ] = fn_parse_autorate_logfile( log_FQN, command_string )
% variables
debug = 0;
delimiter_string = ";"; % what separator is used in the log file
line_increment = 100; % by what size to increment data structures on hitting the end
% enumerate all field names in HEADER that denote a string field on DATA records, otherwise default to numeric
string_field_identifier_list = {'RECORD_TYPE', 'LOG_DATETIME', 'REFLECTOR', '_LOAD_CONDITION'};
autorate_log = struct();
% global variables so we can grow these from helper functions without shuttling too much data around all the time...
global log_struct
log_struct = [];
log_struct.INFO = [];
log_struct.DEBUG = [];
log_struct.HEADER = [];
log_struct.DATA = [];
log_struct.LOAD_HEADER = [];
log_struct.LOAD = [];
log_struct.REFLECTOR_HEADER = [];
log_struct.REFLECTOR = [];
log_struct.INFO = [];
log_struct.SHAPER = [];
log_struct.metainformation = [];
log_struct.SUMMARY_HEADER = [];
log_struct.SUMMARY = [];
%TODO: merge current and old log file if they can be found...
if ~exist('log_FQN', 'var') || isempty(log_FQN)
% open a ui file picker
%[log_name, log_dir, fld_idx] = uigetfile("*.log", "Select one or more autorate log files:", "MultiSelect", "on");
[log_name, log_dir, fld_idx] = uigetfile({"*.log; *.log.old; *log.old.gz; *.log.gz; *.gz", "Known Log file extensions"}, "Select one or more autorate log files:");
log_FQN = fullfile(log_dir, log_name);
endif
if ~exist('command_string', 'var') || isempty(command_string)
command_string = 'load_existing';
endif
% dissect the fully qualified name
[log_dir, log_name, log_ext ] = fileparts(log_FQN);
% deal with gzipped log files
if strcmp(log_ext, '.GZ')
disp('INFO: Octave gunzip does not tolerate upper-case .GZ extensions, renaming to lower-case .gz');
movefile(log_FQN, fullfile(log_dir, [log_name, '.gz']));
log_FQN = fullfile(log_dir, [log_name, '.gz']);
[log_dir, log_name, log_ext ] = fileparts(log_FQN);
endif
if strcmp(log_ext, '.gz')
file_list = gunzip(log_FQN);
if (length(file_list) == 1)
orig_log_FQN = log_FQN;
log_FQN = file_list{1};
[log_dir, log_name, log_ext ] = fileparts(log_FQN);
else
error(['WARNING: Archive contains more than one file, bailing out: ', log_FQN]);
endif
endif
if exist(fullfile(log_dir, [log_name, log_ext, '.mat']), 'file') && strcmp(command_string, 'load_existing')
disp(['INFO: Found already parsed log file (', fullfile(log_dir, [log_name, log_ext, '.mat']), '), loading...']);
load(fullfile(log_dir, [log_name, log_ext, '.mat']));
return
endif
% now read the file line by line and steer each line into the correct structure.
% if this would not be interleaved things would be easier
log_fd = fopen(log_FQN);
if log_fd == -1
error(["ERROR: Could not open: ", log_FQN]);
endif
cur_record_type = "";
% get started
disp(['INFO: Parsing log file: ', log_FQN]);
disp('INFO: might take a while...');
line_count = 0;
while (!feof(log_fd) )
% get the next line
current_line = fgetl(log_fd);
line_count = line_count + 1;
if (debug)
disp([num2str(line_count), ': ', current_line]);
endif
if (length(current_line) < 5)
disp('WARN: line shorter than 5 chars, skip parsing of this line...');
continue
endif
cur_record_type = fn_get_record_type_4_line(current_line, delimiter_string, string_field_identifier_list);
try
fn_parse_current_line(cur_record_type, current_line, delimiter_string, line_increment);
catch
disp(['WARN: Parsing of line ', num2str(line_count), ' failed. Line content:']);
disp(current_line);
disp(['WARN: Will skip this line, but the rest of the parsing might fail, please check the raw log-file....']);
%keyboard
continue
end_try_catch
if ~(mod(line_count, 1000))
% give some feed back, however this is expensive so do so rarely
disp(['INFO: Processed line: ', num2str(line_count)]);
fflush(stdout) ;
endif
%disp(current_line)
endwhile
% clean-up
fclose(log_fd);
% shrink global data structures
fn_shrink_global_LISTS({"DEBUG", "INFO", "DATA", "SHAPER", "LOAD", "SUMMARY"});
% ready for export and
autorate_log = log_struct;
% save autorate_log as mat file...
disp(['INFO: Saving parsed data file as: ', fullfile(log_dir, [log_name, log_ext, '.mat'])]);
save(fullfile(log_dir, [log_name, log_ext, '.mat']), 'autorate_log', '-7');
if ~exist(fullfile(log_dir, [log_name, log_ext, '.gz']), 'file')
% compress the uncompressed log
FILELIST = gzip(fullfile(log_dir, [log_name, log_ext]));
endif
if exist(fullfile(log_dir, [log_name, log_ext]), 'file');
% delete the uncompressed log
delete(fullfile(log_dir, [log_name, log_ext]));
endif
return
endfunction
function in = isoctave()
persistent inout;
if isempty(inout),
inout = exist('OCTAVE_VERSION','builtin') ~= 0;
endif
in = inout;
return;
endfunction
function [ sanitized_name ] = sanitize_name_for_matlab( input_name )
% some characters are not really helpful inside matlab variable names, so
% replace them with something that should not cause problems
taboo_char_list = {' ', '-', '.', '=', '/', '[', ']'};
replacement_char_list = {'_', '_', '_dot_', '_eq_', '_', '_', '_'};
taboo_first_char_list = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
replacement_firts_char_list = {'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine'};
sanitized_name = input_name;
% check first character to not be a number
taboo_first_char_idx = find(ismember(taboo_first_char_list, input_name(1)));
if ~isempty(taboo_first_char_idx)
sanitized_name = [replacement_firts_char_list{taboo_first_char_idx}, input_name(2:end)];
endif
for i_taboo_char = 1: length(taboo_char_list)
current_taboo_string = taboo_char_list{i_taboo_char};
current_replacement_string = replacement_char_list{i_taboo_char};
current_taboo_processed = 0;
remain = sanitized_name;
tmp_string = '';
while (~current_taboo_processed)
[token, remain] = strtok(remain, current_taboo_string);
tmp_string = [tmp_string, token, current_replacement_string];
if isempty(remain)
current_taboo_processed = 1;
% we add one superfluous replacement string at the end, so
% remove that
tmp_string = tmp_string(1:end-length(current_replacement_string));
endif
endwhile
sanitized_name = tmp_string;
endfor
return
endfunction
function [ cur_record_type ] = fn_get_record_type_4_line( current_line, delimiter_string, string_field_identifier_list )
% define some information for the individual record types
global log_struct
cur_record_type = [];
% deal with CTRL-C?
%if strcmp(current_line(1:2), '')
switch current_line(1:5)
case {"DEBUG"}
cur_record_type = "DEBUG";
if ~isfield(log_struct.metainformation, 'DEBUG')
log_struct.metainformation.DEBUG.count = 1;
else
log_struct.metainformation.DEBUG.count = log_struct.metainformation.DEBUG.count + 1;
endif
case {"DATA_", "HEADE"}
cur_record_type = "HEADER";
if ~isfield(log_struct.metainformation, 'HEADER')
log_struct.metainformation.HEADER.count = 1;
else
log_struct.metainformation.HEADER.count = log_struct.metainformation.HEADER.count + 1;
endif
case {"DATA;"}
cur_record_type = "DATA";
if ~isfield(log_struct.metainformation, 'HEADER') || log_struct.metainformation.HEADER.count < 1
# we have not encountered a DATA_HEADER record yet and do not know how to parse DATA records, so SKIP
cur_record_type = "SKIP";
if ~isfield(log_struct.metainformation, 'SKIP_DATA')
log_struct.metainformation.SKIP_DATA.count = 1;
else
log_struct.metainformation.SKIP_DATA.count = log_struct.metainformation.SKIP_DATA.count + 1;
endif
disp(['Found DATA before DATA_HEADER record, unable to parse, skipping (N: ', num2str(log_struct.metainformation.SKIP_DATA.count), ').']);
else
# this is fine we already found a header
if ~isfield(log_struct.metainformation, 'DATA')
log_struct.metainformation.DATA.count = 1;
else
log_struct.metainformation.DATA.count = log_struct.metainformation.DATA.count + 1;
endif