Skip to content

Commit

Permalink
more plots
Browse files Browse the repository at this point in the history
  • Loading branch information
raphaelchang committed Nov 17, 2019
1 parent d7454bd commit 2a55a71
Show file tree
Hide file tree
Showing 15 changed files with 263 additions and 81 deletions.
3 changes: 2 additions & 1 deletion launch/matching_eval.launch
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<launch>
<arg name="bag_file" default="" />
<arg name="detector_type" default="ORB" />
<arg name="detector_params" default="{nfeatures: 100}" />
<arg name="detector_params" default="{nfeatures: 50}" />
<arg name="descriptor_type" default="ORB" />
<arg name="descriptor_params" default="{}" />
<arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
Expand All @@ -24,6 +24,7 @@
matcher_max_dist: $(arg matcher_thresh)
feature_overlap_threshold: 0.5
feature_distance_threshold: 10
vignette_expansion: 0.01
</rosparam>
</node>
<rosparam command="load" file="$(arg camera_file)" ns="omni_slam_matching_eval_node" />
Expand Down
5 changes: 3 additions & 2 deletions launch/tracking_eval.launch
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
<launch>
<arg name="bag_file" default="" />
<arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
<arg name="results_file" default="$(eval ''.join(arg('bag_file').split('.')[:-1]) + '.' + ''.join(arg('camera_file').split('/')[-1].split('.')[:-1]) + '.tracking.hdf5')" />
<arg name="rate" default="1" />
<arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
<arg name="results_file" default="$(eval ''.join(arg('bag_file').split('.')[:-1]) + '.' + ''.join(arg('camera_file').split('/')[-1].split('.')[:-1]) + '.' + str(arg('rate')) + 'x.tracking.hdf5')" />
<node pkg="omni_slam_eval" type="omni_slam_tracking_eval_node" name="omni_slam_tracking_eval_node" required="true" output="screen">
<param name="bag_file" value="$(arg bag_file)" />
<param name="results_file" value="$(arg results_file)" />
Expand All @@ -24,6 +24,7 @@
min_features_per_region: 10
max_features_per_region: 999999
keyframe_interval: 1
vignette_expansion: 0.05
</rosparam>
</node>
<rosparam command="load" file="$(arg camera_file)" ns="omni_slam_tracking_eval_node" />
Expand Down
66 changes: 53 additions & 13 deletions scripts/matching_eval_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
df = pd.DataFrame(stats)
stats = df.groupby(0).mean().to_records()
stats = stats.view(np.float64).reshape(len(stats), -1)
framediff, nmatch, prec, rec = stats.T
framediff, nmatch, prec, rec, rep = stats.T

fig = plt.figure()
fig.suptitle('Matching - detector={}, descriptor={}, chi={}, alpha={}, fx={}, fy={}, cx={}, cy={}'.format(attrs["detector_type"], attrs["descriptor_type"], attrs["chi"][0], attrs["alpha"][0], attrs["fx"][0], attrs["fy"][0], attrs["cx"][0], attrs["cy"][0]))
Expand Down Expand Up @@ -88,10 +88,10 @@
# match_dict = [[] for i in range(10)]
# for row in good_radial_distances:
# r = int(min(row[0], 0.499999) / 0.05)
# match_dict[r].append((row[1], 0))
# match_dict[r].append((row[2], 0))
# for row in bad_radial_distances:
# r = int(min(row[0], 0.499999) / 0.05)
# match_dict[r].append((row[1], 1))
# match_dict[r].append((row[2], 1))
# si_bins = [0] * 10
# for r in range(len(match_dict)):
# match_dict[r] = sorted(match_dict[r], key=lambda x: x[0])
Expand Down Expand Up @@ -129,14 +129,28 @@
valid_good = [False for i in range(num_bins)]
valid_bad = [False for i in range(num_bins)]
valid = [False for i in range(num_bins)]
# for row in good_radial_distances:
# r = int(min(row[0], 0.499999) / (0.5 / num_bins))
# X_good[r] = np.append(X_good[r], row[2])
# labels_good[r] = np.append(labels_good[r], 0)
# valid_good[r] = True
# for row in bad_radial_distances:
# r = int(min(row[0], 0.499999) / (0.5 / num_bins))
# X_bad[r] = np.append(X_bad[r], row[2])
# labels_bad[r] = np.append(labels_bad[r], 1)
# valid_bad[r] = True
for row in good_radial_distances:
r = int(min(row[0], 0.499999) / (0.5 / num_bins))
X_good[r] = np.append(X_good[r], row[1])
if np.isnan(row[1]):
continue
r = int(min(row[1], np.pi) / (np.pi / num_bins))
X_good[r] = np.append(X_good[r], row[2])
labels_good[r] = np.append(labels_good[r], 0)
valid_good[r] = True
for row in bad_radial_distances:
r = int(min(row[0], 0.499999) / (0.5 / num_bins))
X_bad[r] = np.append(X_bad[r], row[1])
for row in good_radial_distances:
if np.isnan(row[1]):
continue
r = int(min(row[1], np.pi) / (np.pi / num_bins))
X_bad[r] = np.append(X_bad[r], row[2])
labels_bad[r] = np.append(labels_bad[r], 1)
valid_bad[r] = True
for i in range(num_bins):
Expand All @@ -150,9 +164,10 @@
if len(X_bad[i]) > num_samples:
idx_bad = np.random.choice(np.arange(len(X_bad[i])), num_samples, replace=False)
sr[i] = sklearn.metrics.silhouette_score(np.concatenate((X_good[i][idx_good], X_bad[i][idx_bad])).reshape(-1, 1), np.concatenate((labels_good[i][idx_good], labels_bad[i][idx_bad])), metric = 'l1')
ax4.plot([i * 0.5 / num_bins + 0.5 / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])
# ax4.plot([i * 0.5 / num_bins + 0.5 / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])
ax4.plot([i * np.pi / num_bins + np.pi / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])

df = pd.DataFrame({'Delta radial distance': ['{}-{}'.format(r * 0.05, (r + 1) * 0.05) for r in (np.minimum(np.hstack((good_radial_distances[:, 0], bad_radial_distances[:, 0])), 0.499999) / 0.05).astype(int)], 'Descriptor distance': np.hstack((good_radial_distances[:, 1], bad_radial_distances[:, 1])), 'Match': ['Good' for i in range(len(good_radial_distances))] + ['Bad' for i in range(len(bad_radial_distances))]})
df = pd.DataFrame({'Delta radial distance': ['{}-{}'.format(r * 0.05, (r + 1) * 0.05) for r in (np.minimum(np.hstack((good_radial_distances[:, 0], bad_radial_distances[:, 0])), 0.499999) / 0.05).astype(int)], 'Descriptor distance': np.hstack((good_radial_distances[:, 2], bad_radial_distances[:, 2])), 'Match': ['Good' for i in range(len(good_radial_distances))] + ['Bad' for i in range(len(bad_radial_distances))]})
sns.violinplot(x="Delta radial distance", y="Descriptor distance", hue="Match", data=df, split=True, ax=ax5, palette="Set2", inner="quart")
handles, labels = ax5.get_legend_handles_labels()
ax5.legend(handles=handles[0:], labels=labels[0:], fontsize='small')
Expand Down Expand Up @@ -181,6 +196,7 @@
nmatch = dict()
prec = dict()
rec = dict()
rep = dict()
detdesclist = []
for filename in os.listdir(os.path.dirname(args.results_path)):
bagname = os.path.splitext(os.path.basename(args.results_path))[0]
Expand All @@ -207,10 +223,11 @@
df = pd.DataFrame(stats)
statsavg = df.groupby(0).mean().to_records()
statsavg = statsavg.view(np.float64).reshape(len(statsavg), -1)
framediff[detdesc], nmatch[detdesc], prec[detdesc], rec[detdesc] = statsavg.T
framediff[detdesc], nmatch[detdesc], prec[detdesc], rec[detdesc], rep[detdesc] = statsavg.T

if len(detdesclist) > 0:
fig = plt.figure()
sns.set()

detdesclist = sorted(detdesclist)
legendlist = []
Expand Down Expand Up @@ -293,7 +310,7 @@

df = pd.DataFrame()
for d, detdesc in enumerate(detdesclist):
matches = np.hstack((good_radial_distances[detdesc][:, 1], bad_radial_distances[detdesc][:, 1]))
matches = np.hstack((good_radial_distances[detdesc][:, 2], bad_radial_distances[detdesc][:, 2]))
matches /= matches.max()
df = df.append(pd.DataFrame({legendtitle: [legendlist[d] for i in range(len(matches))], 'Normalized descriptor distance': matches, 'Match': ['Good' for i in range(len(good_radial_distances[detdesc]))] + ['Bad' for i in range(len(bad_radial_distances[detdesc]))]}))
sns.violinplot(x=legendtitle, y="Normalized descriptor distance", hue="Match", data=df, split=True, ax=ax5, palette="Set2", inner="quart")
Expand Down Expand Up @@ -363,10 +380,33 @@
idx_good = np.random.choice(np.arange(len(idx_good)), num_samples, replace=False)
if len(idx_bad) > num_samples:
idx_bad = np.random.choice(np.arange(len(idx_bad)), num_samples, replace=False)
s = sklearn.metrics.silhouette_score(np.concatenate((good_radial_distances[detdesc][idx_good, 1], bad_radial_distances[detdesc][idx_bad, 1])).reshape(-1, 1), np.concatenate((labels_good[idx_good], labels_bad[idx_bad])), metric = 'l1')
s = sklearn.metrics.silhouette_score(np.concatenate((good_radial_distances[detdesc][idx_good, 2], bad_radial_distances[detdesc][idx_bad, 2])).reshape(-1, 1), np.concatenate((labels_good[idx_good], labels_bad[idx_bad])), metric = 'l1')
df = df.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'Silhouette coefficient': [s], 'FOV': [detdesc[2]]}))
sns.catplot(x='FOV', y='Silhouette coefficient', hue='Detector+Descriptor', data=df, ax=ax6, palette="muted", kind='bar')

df_pr = pd.DataFrame()
frames = [1, 2, 5, 10, 15, 20, 30, 45, 60, 90, 120, 180]
for detdesc in detdesclist:
pr = precrec[detdesc][precrec[detdesc][:, 0] == 0]
for i in frames:
# if i >= int(detdesc[2]) and int(detdesc[2]) < 180:
# continue
y, x = np.absolute(pr[pr[:, 1] == i][:, 2:].T)
df_pr = df_pr.append(pd.DataFrame({'Detector+Descriptor': '{}+{}'.format(detdesc[0], detdesc[1]), 'FOV': detdesc[2], 'Precision': y, 'Recall': x, 'Baseline': i}))
sns.relplot(y='Precision', x='Recall', hue='Baseline', col='FOV', row='Detector+Descriptor', kind='line', data=df_pr, estimator=None, facet_kws={'margin_titles': True}, legend='full', palette=sns.cubehelix_palette(rot=-0.4, n_colors=len(frames)))

df_auc = pd.DataFrame()
for detdesc in detdesclist:
pr = precrec[detdesc][precrec[detdesc][:, 0] == 0]
for i in range(1, int(pr[:, 1].max(axis=0))):
# if i > int(detdesc[2]) and i < 360 - int(detdesc[2]) and int(detdesc[2]) < 180:
# df_auc = df_auc.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'FOV': [int(detdesc[2])], 'AUC': [0], 'Baseline': [i]}))
# continue
y, x = np.absolute(pr[pr[:, 1] == i][:, 2:].T)
auc = np.trapz(np.flip(y), np.flip(x))
df_auc = df_auc.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'FOV': [int(detdesc[2])], 'AUC': [auc], 'Baseline': [i]}))
ax_auc = sns.relplot(y='AUC', x='Baseline', hue='FOV', col='Detector+Descriptor', col_wrap=3, kind='line', data=df_auc, estimator=None, legend='full', palette=sns.color_palette('muted', n_colors=df_auc.FOV.unique().shape[0]))

plt.show()

else:
Expand Down
20 changes: 16 additions & 4 deletions scripts/misc/ang_res_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@
import math

fig, ax = plt.subplots()
fig2, ax2 = plt.subplots()

params = [(0.289, 0.3, 0.6666667), (0.5, 0, 0), (0.36, 0.66, 0.006), (0.213, -0.2, 0.59)]
params = [(205.824, -0.055, 0.577), (287, 0, 0.647), (250.88, -0.179, 0.591), (348.16, -0.271, 0.555), (898.048, 0.0, 0.0)]

for f, c, a in params:
camera_model = DoubleSphereModel(f, c, a)
camera_model = DoubleSphereModel(f / 1024., c, a)
dt = []
dt_vert = []
last_ang = 0
first = True
for i in np.linspace(0.5, 1.0, 1024):
Expand All @@ -19,10 +21,20 @@
last_ang = ang
first = False
continue
ray_vert = camera_model.unproj(np.array([i, 0.5 - 0.5 / 1024.]))
ang_vert = np.arctan2(ray_vert[0, 2], np.sqrt(ray_vert[0, 0] ** 2 + ray_vert[0, 1] ** 2))
dt.append(ang - last_ang)
dt_vert.append(ang_vert)
last_ang = ang

ax.plot(np.arange(0, len(dt)), np.array(dt) * 180 / math.pi, label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
ax.plot(np.arange(0, len(dt)), (np.array(dt) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
ax.plot(np.arange(0, len(dt_vert)), (np.array(dt_vert) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
ax2.plot(np.arange(0, len(dt_vert)), (np.array(dt) * 180 / math.pi) / (np.array(dt_vert) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
ax.set_xlabel('Distance from image center (pixels)')
ax.set_ylabel('Angular resolution (degrees / pixel)')
ax2.set_xlabel('Distance from image center (pixels)')
ax2.set_ylabel('Local aspect ratio')

plt.legend(loc='best')
ax.legend(loc='best')
ax2.legend(loc='best')
plt.show()
20 changes: 14 additions & 6 deletions scripts/run_matching_eval_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,23 @@
parser.add_argument("--rate", type=int, help='frame rate multiplier')
args = parser.parse_args()

d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('KAZE', 'KAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LUCID'), ('SIFT','LATCH'), ('SIFT','VGG'), ('SIFT','BOOST')]
# d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('KAZE', 'KAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LUCID'), ('SIFT','LATCH'), ('SIFT','VGG'), ('SIFT','BOOST')]
d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LATCH'), ('SIFT','BOOST')]
det_param_map = dict()
# det_param_map['SIFT'] = '{nfeatures: 5000}'
# det_param_map['SURF'] = '{hessianThreshold: 500}'
# det_param_map['ORB'] = '{nfeatures: 100}'
# det_param_map['BRISK'] = '{thresh: 35}'
# det_param_map['AGAST'] = '{threshold: 25}'
# det_param_map['AKAZE'] = '{threshold: 0.0005}'
# det_param_map['KAZE'] = '{threshold: 0.0005}'
det_param_map['SIFT'] = '{nfeatures: 5000}'
det_param_map['SURF'] = '{hessianThreshold: 500}'
det_param_map['ORB'] = '{nfeatures: 100}'
det_param_map['BRISK'] = '{thresh: 35}'
det_param_map['SURF'] = '{hessianThreshold: 2000}'
det_param_map['ORB'] = '{nfeatures: 50}'
det_param_map['BRISK'] = '{thresh: 50}'
det_param_map['AGAST'] = '{threshold: 25}'
det_param_map['AKAZE'] = '{threshold: 0.0005}'
det_param_map['KAZE'] = '{threshold: 0.0005}'
det_param_map['AKAZE'] = '{threshold: 0.001}'
det_param_map['KAZE'] = '{threshold: 0.001}'

parent = roslaunch.parent.ROSLaunchParent("", [], is_core=True)
parent.start()
Expand Down
22 changes: 17 additions & 5 deletions scripts/run_tracking_eval_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,24 @@

parser = argparse.ArgumentParser(description='Run tracking evaluation set')
parser.add_argument('working_dir', help='working directory')
parser.add_argument('--motion', type=str, help='motion type for motion set evaluation')
parser.add_argument("--rate", type=int, help='frame rate multiplier')
args = parser.parse_args()

parent = roslaunch.parent.ROSLaunchParent("", [], is_core=True)
parent.start()

if os.path.isdir(args.working_dir):
print ''
print '==========================================='
print 'Full motion+FOV dataset tracking evaluation'
print '==========================================='
if args.motion is None:
print ''
print '==========================================='
print 'Full motion+FOV dataset tracking evaluation'
print '==========================================='
else:
print ''
print '==========================================='
print '{} motion dataset tracking evaluation'.format(args.motion)
print '==========================================='
fovs = []
for yaml in os.listdir(args.working_dir):
if not os.path.isdir(os.path.join(args.working_dir, yaml)) and yaml.endswith('.yaml'):
Expand All @@ -25,9 +32,14 @@
fovs.sort(key=int)
for motion in os.listdir(args.working_dir):
if os.path.isdir(os.path.join(args.working_dir, motion)):
if args.motion is not None and motion != args.motion:
continue
bag_dir = os.path.join(args.working_dir, motion)
for fov in fovs:
printstr = "Motion type {}, FOV {}".format(motion, fov)
if args.motion is None:
printstr = "Motion type {}, FOV {}".format(motion, fov)
else:
printstr = "FOV {}".format(fov)
print ''
print '-' * len(printstr)
print printstr
Expand Down
Loading

0 comments on commit 2a55a71

Please sign in to comment.