-
Notifications
You must be signed in to change notification settings - Fork 0
/
functions.py
749 lines (603 loc) · 33 KB
/
functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
# Module imports
import os
import os.path
import sys
import re
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from scipy.interpolate import griddata
from scipy.optimize import curve_fit
from functools import partial
from sklearn.metrics import r2_score
import logging
import pandas as pd
def extract_data(s, data_folder, file_keyword, file_extension, begin_data, end_data, tol=1e-3):
"""
Function to extract data for a given sample, and apply correction if needed.
:param s:
:param data_folder:
:param file_keyword:
:param file_extension:
:param begin_data:
:param end_data:
:param prm: file parameters
:param tol:
:return: data_points_list
"""
# Gather all filenames
sample_initial_thickness_file = [f for f in os.listdir(data_folder) if s in f
if f.endswith(file_keyword + file_extension)][0]
# Extract all data point from first test file
data_points_list = extract_data_from_file(os.path.join(data_folder, sample_initial_thickness_file),
begin_data, end_data)
# Apply corrections
sample_correction_thickness_files = [f for f in os.listdir(data_folder) if s in f
if file_keyword in f if f.endswith(file_extension)
if sample_initial_thickness_file not in f]
if len(sample_correction_thickness_files) != 0:
logging.info("... Multiple test files found for this sample: launching data correction procedure")
data_points_list = correct_data_points(data_points_list, data_folder,
sample_initial_thickness_file, sample_correction_thickness_files,
begin_data, end_data, tol=tol)
return data_points_list
def extract_data_from_file(file_path, begin_data, end_data):
"""
Function to extract experimental data in the text file given as en input.
Multiple data point are stored in one file, each between <DATA> and <END DATA>.
:param file_path: chemin du fichier à traiter
:param begin_data: string after which data begin in data files
:param end_data: string before which data end in data files
:return: data_points_list: list of time, positions and Fz for each point
"""
data_points_list = [] # List to store data sets as dictionaries
# Dictionary that defines initial data names (key) vs. cleaned-up data name (value) for future use in the code
data_cleanup = {"Time, s": "time",
"Position (z), mm": "pos_z",
"Position (x), mm": "pos_x",
"Position (y), mm": "pos_y",
"Fz, gf": "Fz"}
with open(file_path, 'r') as file:
file_content = file.read()
# Use regex to find matches between begin_data and end_data
data_matches = re.finditer(repr(begin_data + '(.*?)' + end_data)[1:-1], file_content, re.DOTALL)
# repr()[1:-1] converts to raw string after string concatenation
for match in data_matches:
data_block = match.group(1).strip()
# Split the data block into lines and extract column names and values
lines = data_block.split('\n')
column_names = lines[0].split('\t')
# Clean up column names
for c in range(len(column_names)):
if column_names[c] in data_cleanup:
column_names[c] = data_cleanup[column_names[c]]
data_values = [line.split('\t') for line in lines[1:]]
# Create a dictionary for each data set
data_set = {column: [] for column in column_names}
# Populate the dictionary with values
for values in data_values:
for i, column in enumerate(column_names):
data_set[column].append(float(values[i]))
# Convert to array
for k in data_set:
data_set[k] = np.array(data_set[k])
# Add the data set to the list
data_points_list.append(data_set)
return data_points_list
def correct_data_points(data_points_list, data_folder, sample_initial_test_file, sample_correction_files, begin_data,
end_data, tol=1e-3):
"""
Function used to correct data_points_list of the initial test data with correction data saved in correction files.
:param data_points_list: data list as extracted by extract_data_from_file
:param data_folder: data folder, containing initial and correction files
:param sample_initial_test_file: test file containing data from the original test on this sample
:param sample_correction_files: test file containing data from the test correction on this sample
:param begin_data: string after which data begin in data files
:param end_data: string before which data end in data files
:param prm: file parameters
:param tol: (optional) tolerance for the x and y coordinates matching, default is 1e-3
:return: data_points_list, data list with corrections
"""
# Get x and y sensor position for initial data
xy_sensor_data = extract_xy_sensor(data_points_list, os.path.join(data_folder, sample_initial_test_file))
for i in range(len(sample_correction_files)):
logging.info("... Correction with data from: " + sample_correction_files[i])
# Extract all data point from secondary test files
data_file_path = os.path.join(data_folder, sample_correction_files[i])
data_points_list_correction = extract_data_from_file(data_file_path, begin_data, end_data)
# Get x and y sensor position for correction data
xy_sensor_corr = extract_xy_sensor(data_points_list_correction, data_file_path)
for j in range(len(data_points_list_correction)):
# Find line of corresponding point in initial data
pt_index = np.where((np.abs(xy_sensor_data[:, 0] - xy_sensor_corr[j, 0]) < tol) & # column 0 = x
(np.abs(xy_sensor_data[:, 1] - xy_sensor_corr[j, 1]) < tol)) # column 1 = y
if len(pt_index[0]) == 1:
pt_index = pt_index[0][0] # extract value from tuple result
elif len(pt_index[0]) == 0:
data_points_list.append([])
xy_sensor_data = np.append(xy_sensor_data, [xy_sensor_corr[j, :]], axis=0)
pt_index = len(data_points_list)-1
logging.warning("There is no coordinates in the initial data file (" + sample_initial_test_file +
") matching the point (x, y = " + str(xy_sensor_corr[j]) +
") extracted from the correction data file (" + sample_correction_files[i] + ").")
logging.warning("Adding a point at ID = " + str(pt_index+1) + ").")
logging.warning("If no point should be added, increase correction tolerance (current value: tol = " +
str(tol) + ").")
else:
logging.critical("There are multiple coordinates in the initial data file (" + sample_initial_test_file
+ ") matching the point (x, y = " + str(xy_sensor_corr[j]) +
") extracted from the correction data file (" + sample_correction_files[i] + ").")
logging.critical("Verify data or try to decrease correction tolerance (current value: tol = " +
str(tol) + ").")
raise ValueError
# Replace initial data with corrected data
data_points_list[pt_index] = data_points_list_correction[j]
logging.info("... point " + str(pt_index+1) + " replaced")
return data_points_list
def calculate_thickness(data_points_list):
"""
Function to calculate thickness and positions from raw data_points_list and convert them to arrays.
:param data_points_list: data list as extracted by extract_data_from_file
:return: result_thickness: dictionary containing arrays of positions and thickness for all measurement points
"""
# Initialize output dictionary
result_thickness = {'thickness': np.zeros(len(data_points_list)),
'pos_x': np.zeros(len(data_points_list)),
'pos_y': np.zeros(len(data_points_list)),
'ID': np.zeros(len(data_points_list))}
# Fill output dictionary
for i in range(len(data_points_list)):
result_thickness['thickness'][i] = -np.max(data_points_list[i]["pos_z"])
result_thickness['pos_x'][i] = np.mean(data_points_list[i]["pos_x"])
result_thickness['pos_y'][i] = np.mean(data_points_list[i]["pos_y"])
result_thickness['ID'][i] = i+1
return result_thickness
def remove_nan_values(x, y, data):
"""
Remove points skipped in extraction process = Nan (for "not a number") values in case test has failed on a point
:param x: x position
:param y: y position
:param data: value (thickness or Young modulus) which can contain Nan values
:return: cleaned arrays
"""
ind_to_remove = np.where(np.isnan(data))[0]
if len(ind_to_remove) != 0:
x = np.delete(x, ind_to_remove)
y = np.delete(y, ind_to_remove)
data = np.delete(data, ind_to_remove)
return x, y, data
def interpolate_data(x, y, z, nb_interp):
"""
Function to interpolate data for the thickness visualization.
:param x: x position from data
:param y: y position from data
:param z: z position from data
:param nb_interp: number of interpolation points
:return: xi, yi, zi: interpolated data
"""
# Create a regular grid
xi, yi = np.meshgrid(np.linspace(min(x), max(x), nb_interp),
np.linspace(min(y), max(y), nb_interp))
# Interpolate the values
zi = griddata((x, y), z, (xi, yi), method='cubic')
return xi, yi, zi
def extract_map(file_path, begin_map, end_map):
"""
Function to extract map data from the text file given in input.
Multiple data point are stored in one file, each between <DATA> and <END DATA>.
:param file_path: path of the map file
:param begin_map: string contained in the line before column name line (entire line will be removed)
:param end_map: string contained in the line after points data (entire line will be removed)
:return: map_points: array containing map values for each point
"""
with open(file_path, 'r') as file:
file_content = file.read()
# Use regex to find matches between prm.start and prm.end
data_matches = re.finditer(repr(begin_map + '(.*?)' + end_map)[1:-1], file_content, re.DOTALL)
# repr()[1:-1] converts to raw string after string concatenation
for match in data_matches:
data_block = match.group(1).strip()
# Split the data block into lines and extract column names and values
lines = data_block.split('\n')[1:-1] # remove first and last lines (= containing start and end strings)
column_names = lines[0].split('\t')
data_values = [line.split('\t') for line in lines[1:]]
# Create a dictionary for each data set
map_points = {column: [] for column in column_names}
# Populate the dictionary with values
for values in data_values:
for i, column in enumerate(column_names):
if values[i] == "":
values[i] = 0
map_points[column].append(float(values[i]))
# Convert to array
for k in map_points:
map_points[k] = np.array(map_points[k])
return map_points
def generate_map(data_points_list, ratio, Rot_mat, offset, data_file_path, prm):
"""
Function to generate missing map file, used to translate sensor coordinates to pixel (picture) coordinates.
:param data_points_list: data list as extracted by extract_data_from_file
:param ratio: expansion ratio between sensor and pixel coordinates
:param Rot_mat: rotation matrix between sensor and pixel coordinates
:param offset: translation value between sensor and pixel coordinates
:param data_file_path: path to data file used in warning message
:param prm: file parameters
:return: map_points: dictionary of x and y values in both sensor and pixel coordinates
"""
# Create map points dictionary results
map_points = {'PixelX': np.zeros((len(data_points_list)), dtype=int),
'PixelY': np.zeros((len(data_points_list)), dtype=int),
'PointID': np.arange(len(data_points_list))+1,
'ScanX(mm)': np.zeros((len(data_points_list))),
'ScanY(mm)': np.zeros((len(data_points_list)))}
# Extract position in sensor coordinates
xy_sensor = extract_xy_sensor(data_points_list, data_file_path)
# Store in map_points
map_points['ScanX(mm)'] = xy_sensor[:, 0]
map_points['ScanY(mm)'] = xy_sensor[:, 1]
# X-flip scan reference data
x_coordinate_adapted = -map_points['ScanX(mm)']
# Apply ratio
x_coordinate_adapted = x_coordinate_adapted * ratio
y_coordinate_adapted = map_points['ScanY(mm)'] * ratio
# Rotate data points
for i in range(len(map_points['ScanX(mm)'])):
map_points['PixelX'][i], map_points['PixelY'][i] = np.round(
np.matmul(Rot_mat, np.array([x_coordinate_adapted[i], y_coordinate_adapted[i]])), 0)
# Translate data points
map_points['PixelX'] += int(offset[0])
map_points['PixelY'] += int(offset[1])
return map_points
def extract_xy_sensor(data_points_list, data_file_path, data_keys=['pos_x', 'pos_y']):
"""
Function to extract given position (x or y) in data_points_list, with security in case position is not equal for all
time steps.
:param data_points_list: data list as extracted by extract_data_from_file
:param data_file_path: path to data file used in warning message
:param prm: file parameters
:param data_keys: (optional) list of data x and y position keys in data_points_list.
Default is ['pos_x', 'pos_y'], as defined in extract_data_from_file.
:return: xy_sensor: array with extracted sensor position, column = [x, y]
"""
xy_sensor = np.zeros([len(data_points_list), 2])
for i in range(len(data_points_list)):
# Loop on data_keys entries
for k in data_keys:
# Get corresponding column number for xy_sensor
c = data_keys.index(k)
if np.all(data_points_list[i][k]):
# Take first time value if position recorded for the test is equal at all test times
xy_sensor[i, c] = data_points_list[i][k][0]
else:
# Take average if position recorded for the test changes during test
logging.warning("Position " + k + " is not equal for all time for point: " + str(i) +
" in data:" + data_file_path + ". Taking average value rounded at 1e-6.")
xy_sensor[i, c] = np.round(np.average(data_points_list[i][k]), 6)
return xy_sensor
def calculate_fz(z, E, Fini, R, nu):
"""
Function to calculate Fz by equation.
:param z: penetration, in mm (given by data)
:param E: Young modulus, determined by data fit
:param Fini: force at the beginning of the fit range, determined by data fit
:param R: indenter radius (given in parameters)
:param nu: Poisson coefficient (given in parameters)
:return: Fz (force) value, used to fit experimental data
"""
return (4/3)*(np.sqrt(R)/(1-nu**2))*E*(np.abs(z)**(3/2)) + Fini
def skip_indentation_point(i, result_indentation):
"""
Skip point if indentation test failed
:param i: point ID
:param result_indentation: initial dictionary
:param prm: file parameters
:return: result_indentation: updated dictionary
"""
result_indentation['E'][i] = np.nan
result_indentation['Fz_0'][i] = np.nan
result_indentation['corr_coeff'][i] = np.nan
result_indentation['pos_z_fit_range'].append([])
result_indentation['Fz_fit_range'].append([])
result_indentation['Fz_fitted_curve'].append(np.nan)
logging.warning("Indentation test failed for data point:" + str(i + 1) + ".")
return result_indentation
def calculate_young_modulus(data_points_list, thickness, prm_indentation, prm):
"""
Function to calculate the Young modulus from Fz fitting.
:param data_points_list: data extracted from indentation experiments
:param thickness: data extracted from thickness experiments
:param prm_indentation: thickness parameter object, containing namely:
* fit_start_thickness_percent : start value for the Fz fit
* fit_range_thickness_percent : range value for the Fz fit
* radius : sensor radius, in mm
* nu : Poisson coefficient value
* gravity : standard acceleration of gravity, in m/s², used to convert gf/mm² in kPa
:param prm: file parameters
:return: result_indentation: dictionary containing fit results for all measurement points
* pos_x:
* pos_y:
* E: array of Young modulus for each point
* Fz_0_fit: array of Fz origin on the fitting range for each point
* corr_coeff: array of correlation coefficients for each point
* pos_z_fit_range:
* Fz_fit_range:
* Fz_fitted_curve:
"""
# Initialization (note: pos_z_fit_range, Fz_fit_range and Fz_fitted_curve are lists as the number of points to
# consider on the Fz curve is not the same from one point to the other)
result_indentation = {'pos_x': np.zeros(len(data_points_list)),
'pos_y': np.zeros(len(data_points_list)),
'ID': np.zeros(len(data_points_list)),
'E': np.zeros(len(data_points_list)),
'Fz_0': np.zeros(len(data_points_list)),
'corr_coeff': np.zeros(len(data_points_list)),
'pos_z_fit_range': [],
'Fz_fit_range': [],
'Fz_fitted_curve': []}
for i in range(len(data_points_list)):
# Get x and y coordinates for the current point
result_indentation['pos_x'][i] = np.mean(data_points_list[i]["pos_x"])
result_indentation['pos_y'][i] = np.mean(data_points_list[i]["pos_y"])
result_indentation['ID'][i] = i+1
# Define data
pos_z = data_points_list[i]["pos_z"]
Fz = data_points_list[i]["Fz"]
# Fit range from fit_start_thickness_percent
thickness_min = thickness[i] * (1 - prm_indentation.fit_start_thickness_percent / 100)
ind_z_min = np.min(np.where(pos_z > -thickness_min))
pos_z_fit_range = pos_z[ind_z_min:]
Fz_fit_range = Fz[ind_z_min:]
# Security check: fit range excludes first contact pike (Fz overshot + Fz below 0 for a few points)
ind_Fz_negative = np.where(Fz_fit_range < 0)[0]
if ind_Fz_negative.size > 0:
ind_Fz_min = np.max(ind_Fz_negative) + 1
if ind_Fz_min == len(Fz_fit_range):
# Skip this point: there is no positive value for Fz, so test failed
result_indentation = skip_indentation_point(i, result_indentation)
continue
# Remove negative values from fit
Fz_fit_range = Fz_fit_range[ind_Fz_min:]
pos_z_fit_range = pos_z_fit_range[ind_Fz_min:]
# Stopping point with thickness range
pos_z_max = pos_z_fit_range[0] + thickness[i] * prm_indentation.fit_range_thickness_percent / 100
if pos_z_fit_range[-1] < pos_z_max:
# Skip this point: test stopped before reaching pos_z_max, so test failed
logging.warning("Indentation test range not met: range is " +
str(round((pos_z_fit_range[-1]-pos_z_fit_range[0])/thickness[i]*100, 1)) +
"% of thickness value, whereas wanted fit_range_thickness_percent = " +
str(prm_indentation.fit_range_thickness_percent) +
"%. Skipping this point in Young modulus extraction")
result_indentation = skip_indentation_point(i, result_indentation)
continue
ind_z_max = np.max(np.where(pos_z_fit_range < pos_z_max))
pos_z_fit_range = pos_z_fit_range[:ind_z_max]
Fz_fit_range = Fz_fit_range[:ind_z_max]
# Least square fit with some parameters (R, nu) given
fitfunc = partial(calculate_fz, R=prm_indentation.radius, nu=prm_indentation.nu)
[E, Fz_0], _ = curve_fit(fitfunc, pos_z_fit_range-pos_z_fit_range[0], Fz_fit_range, method='lm')
Fz_fitted_curve = np.array([calculate_fz(z-pos_z_fit_range[0], E, Fz_0, prm_indentation.radius, prm_indentation.nu)
for z in pos_z_fit_range])
E = E*prm_indentation.gravity
corr_coeff = r2_score(Fz_fit_range, Fz_fitted_curve)
# Store results
result_indentation['E'][i] = E
result_indentation['Fz_0'][i] = Fz_0
result_indentation['corr_coeff'][i] = corr_coeff
result_indentation['pos_z_fit_range'].append(pos_z_fit_range)
result_indentation['Fz_fit_range'].append(Fz_fit_range)
result_indentation['Fz_fitted_curve'].append(Fz_fitted_curve)
return result_indentation
def plot_thickness_z_curve(s, i, data_point, thickness_value):
"""
Function to plot thickness curves, used for verification.
:param s: string, sample name
:param i: integer, point ID
:param data_point: indentation data results for the given point
:param thickness_value: thickness value for the given point
:return: fig object, 2D graph for data at sensor coordinates
"""
fig = plt.figure()
plt.plot([data_point['time'][0], data_point['time'][-1]], [thickness_value, thickness_value], '--b')
plt.plot(data_point['time'], -data_point['pos_z'], 'k-')
plt.ylabel("-(pos_z) [mm]")
plt.xlabel("time [s]")
plt.legend(["thickness value [mm] = " + str(round(thickness_value, 3))])
plt.title(s + " - point ID " + str(i+1))
return fig
def plot_interpolated_surface(s, result_dict, data_key, data_unit, nb_interp):
"""
Function to plot thickness interpolation surface on a 2D graph.
:param s: string of the sample ID
:param result_dict: dictionary containing results extracted from test, namely:
* pos_x: x position extracted from data
* pos_y: y position extracted from data
* data: data extracted
:return: fig object, 2D graph for data at sensor coordinates
"""
# Prepare data
x_clean, y_clean, data_clean = remove_nan_values(result_dict['pos_x'], result_dict['pos_y'], result_dict[data_key])
x_interp, y_interp, data_interp = interpolate_data(x_clean, y_clean, data_clean, nb_interp)
# Create graph
fig = plt.figure()
ax = fig.add_subplot(111)
pcm = ax.pcolormesh(x_interp, y_interp, data_interp, shading='nearest', cmap="jet",
vmin=np.min(data_clean), vmax=np.max(data_clean))
fig.colorbar(pcm, label=data_key+" "+data_unit)
plt.scatter(x_clean, y_clean, c=data_clean, ec='k', cmap="jet", vmin=np.min(data_clean), vmax=np.max(data_clean))
plt.xlabel("x [mm]")
plt.ylabel("y [mm]")
plt.title(s + " - sensor coordinates")
return fig
def plot_thickness_3d(s, result_thickness, nb_interp):
"""
Function to plot thickness interpolation surface on a 3D graph.
:param s: ID of the sample treated
:param result_thickness: dictionary containing results extracted from thickness test, namely:
* thickness: thickness extracted from data
* x_interp: x position interpolated
* y_interp: y position interpolated
* z_interp: z position (thickness) interpolated
:return: fig object, 3D graph for thickness at sensor coordinates
"""
# Prepare data
x_clean, y_clean, thickness_clean = remove_nan_values(result_thickness['pos_x'], result_thickness['pos_y'],
result_thickness['thickness'])
x_interp, y_interp, thickness_interp = interpolate_data(x_clean, y_clean, thickness_clean, nb_interp)
# Create graph
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(x_interp, y_interp, thickness_interp, cmap='jet', vmin=np.min(thickness_clean),
vmax=np.max(thickness_clean))
fig.colorbar(surf, ax=ax, label='thickness [mm]')
ax.set_xlabel('x [mm]')
ax.set_ylabel('y [mm]')
ax.set_zlabel('thickness [mm]')
plt.title(s + " - sensor coordinates")
return fig
def plot_on_picture(s, data, prm, nb_interp, colorbar_label="", contour_plot=False):
"""
Function to plot data interpolation 2D surface on the photo taken during experiments.
:param s: ID of the sample treated
:param data:
:param prm: general parameters
:param nb_interp:
:param colorbar_label: string, label of the color bar. Used if contour_plot=False
:param contour_plot: (optional) boolean that is True for contour plot, False for regular surface plot
:return: fig object, data plotted on pixel picture
"""
# Find picture file: contains sample name and picture keyword, and ends with appropriate extension
image_file = [f for f in os.listdir(prm.data_folder) if s in f and prm.picture_keyword in f and
f.endswith(prm.picture_extension)]
if len(image_file) == 0:
logging.critical("Image missing for this sample (" + s + "). Check that the image name in the folder: " +
os.listdir(prm.data_folder))
raise FileNotFoundError
elif len(image_file) > 1:
logging.warning("There is more than one image file with the same ID (" + s + "), Using image: " + image_file[0])
image_file = image_file[0]
# Load image
img = np.asarray(Image.open(os.path.join(prm.data_folder, image_file)))
# Extract map
map_points = extract_map(os.path.join(prm.data_folder, s + "_map.map"),
prm.begin_map, prm.end_map)
# Sort map by point ID (security check)
vect_px = np.array((map_points['PixelX'], map_points['PixelY'], map_points['PointID']))
vect_px_sorted_by_point_id = vect_px[:, vect_px[2, :].argsort()]
# Remove failed test points (Nan values)
x_clean, y_clean, data_clean = remove_nan_values(vect_px_sorted_by_point_id[0, :], vect_px_sorted_by_point_id[1, :],
data)
# Interpolate data on pixel map
xpx, ypx, zpx = interpolate_data(x_clean, y_clean, data_clean, nb_interp)
# 2D graph with pixel image coordinates
fig = plt.figure()
ax = fig.add_subplot(111)
plt.imshow(img)
if not contour_plot:
pcm = ax.pcolormesh(xpx, ypx, zpx, shading='nearest', cmap="jet", alpha=prm.alpha, vmin=np.min(data_clean),
vmax=np.max(data_clean))
fig.colorbar(pcm, label=colorbar_label)
# Scatter plot of measurement points
plt.scatter(vect_px_sorted_by_point_id[0, :], vect_px_sorted_by_point_id[1, :], s=2., c="k")
else:
# Surface plot with more transparence
ax.pcolormesh(xpx, ypx, zpx, shading='nearest', cmap="jet", alpha=prm.alpha/2, vmin=np.min(data_clean),
vmax=np.max(data_clean))
# Contour plot with inline labels
CS = ax.contour(xpx, ypx, zpx, cmap="jet", vmin=np.min(data_clean), vmax=np.max(data_clean))
ax.clabel(CS, inline=True, inline_spacing=-2, fontsize=8)
if prm.cropping_frame != 0:
# Perform cropping if wanted, with (0,0) at upper left corner
plt.xlim(np.min(xpx)-prm.cropping_frame, np.max(xpx)+prm.cropping_frame)
plt.ylim(np.max(ypx)+prm.cropping_frame, np.min(ypx)-prm.cropping_frame)
plt.title(s)
plt.axis('off')
return fig
def plot_comparison_on_picture(s, data_surface, data_contour, prm, nb_interp, colorbar_label=""):
"""
Function to plot data interpolation 2D surface on the photo taken during experiments.
:param s: ID of the sample treated
:param data:
:param prm: general parameters
:param nb_interp:
:param colorbar_label: string, label of the color bar. Used if contour_plot=False
:param contour_plot: (optional) boolean that is True for contour plot, False for regular surface plot
:return: fig object, data plotted on pixel picture
"""
# Find picture file: contains sample name and picture keyword, and ends with appropriate extension
image_file = [f for f in os.listdir(prm.data_folder) if s in f and prm.picture_keyword in f and
f.endswith(prm.picture_extension)]
if len(image_file) == 0:
logging.critical("Image missing for this sample (" + s + "). Check that the image name in the folder: " +
os.listdir(prm.data_folder))
raise FileNotFoundError
elif len(image_file) > 1:
logging.warning("There is more than one image file with the same ID (" + s + "), Using image: " + image_file[0])
image_file = image_file[0]
# Load image
img = np.asarray(Image.open(os.path.join(prm.data_folder, image_file)))
# Extract map
map_points = extract_map(os.path.join(prm.data_folder, s + "_map.map"),
prm.begin_map, prm.end_map)
# Sort map by point ID (security check)
vect_px = np.array((map_points['PixelX'], map_points['PixelY'], map_points['PointID']))
vect_px_sorted_by_point_id = vect_px[:, vect_px[2, :].argsort()]
# 2D graph with pixel image coordinates
fig = plt.figure()
ax = fig.add_subplot(111)
plt.imshow(img)
# Plot data_surface with surface plot
x_clean, y_clean, data_surface_clean = remove_nan_values(vect_px_sorted_by_point_id[0, :],
vect_px_sorted_by_point_id[1, :], data_surface)
xpx, ypx, zpx = interpolate_data(x_clean, y_clean, data_surface_clean, nb_interp)
pcm = ax.pcolormesh(xpx, ypx, zpx, shading='nearest', cmap="jet", alpha=prm.alpha, vmin=np.min(data_surface_clean),
vmax=np.max(data_surface_clean))
fig.colorbar(pcm, label=colorbar_label)
# Plot data_contour with contour plot
x_clean, y_clean, data_contour_clean = remove_nan_values(vect_px_sorted_by_point_id[0, :],
vect_px_sorted_by_point_id[1, :], data_contour)
xpx, ypx, zpx = interpolate_data(x_clean, y_clean, data_contour_clean, nb_interp)
CS = ax.contour(xpx, ypx, zpx, cmap="jet", vmin=np.min(data_contour_clean), vmax=np.max(data_contour_clean))
ax.clabel(CS, inline=True, inline_spacing=-2, fontsize=8)
if prm.cropping_frame != 0:
# Perform cropping if wanted, with (0,0) at upper left corner
plt.xlim(np.min(xpx)-prm.cropping_frame, np.max(xpx)+prm.cropping_frame)
plt.ylim(np.max(ypx)+prm.cropping_frame, np.min(ypx)-prm.cropping_frame)
plt.title(s)
plt.axis('off')
return fig
def plot_fz_curve_fit(s, i, data_point, result_indentation, fit_range_thickness_percent):
"""
Function to plot Fz evolution with z position of the sensor and the curve fit used to extract the Young modulus.
:param s: string, sample name
:param i: integer, point ID
:param data_point: indentation data results for the current point
:param result_indentation: dictionary containing fit results for all measurement points
:param fit_range_thickness_percent: thickness range used for the fit, used in the legend
:return: fig object, Fz graph for the given point
"""
fig = plt.figure()
plt.plot(data_point['pos_z'], data_point['Fz'], "--k", linewidth=0.5)
if len(result_indentation['pos_z_fit_range'][i]) != 0:
# Plot fitting range and curve, if test has not failed
plt.plot(result_indentation['pos_z_fit_range'][i], result_indentation['Fz_fit_range'][i], "-k", linewidth=0.5)
plt.plot(result_indentation['pos_z_fit_range'][i], result_indentation['Fz_fitted_curve'][i], '-b')
plt.plot([result_indentation['pos_z_fit_range'][i][0], result_indentation['pos_z_fit_range'][i][0]],
[np.min(data_point["Fz"]), np.max(data_point["Fz"])],
"-k", linewidth=0.5)
plt.plot([result_indentation['pos_z_fit_range'][i][-1], result_indentation['pos_z_fit_range'][i][-1]],
[np.min(data_point["Fz"]), np.max(data_point["Fz"])],
"-k", linewidth=0.5)
plt.xlabel("pos_z [mm]")
plt.ylabel("Fz [gF]")
plt.title(s + " - point ID " + str(i+1))
plt.legend(["original data", "data used for the fit search (" + str(fit_range_thickness_percent) +
" % of point thickness)", "curve fit for E=" + str(round(result_indentation['E'][i], 2)) +
" kPa (R² = " + str(round(result_indentation['corr_coeff'][i], 2)) + ")"])
return fig
def highlight_bad_correlation(s, threshold, column):
"""Styling function to highlight bad correlation coefficients."""
is_min = pd.Series(data=False, index=s.index)
is_min[column] = s.loc[column] <= threshold
# color = 'red' if s < 0.8 else 'black'
# return f'color: {color}'
return ['color: red' if is_min.any() else '' for v in is_min]