From 2a55a7194faa82109c88a371191179ee5cf3acbd Mon Sep 17 00:00:00 2001
From: raphaelchang <raphaelchang1227@gmail.com>
Date: Sun, 17 Nov 2019 01:41:55 -0500
Subject: [PATCH] more plots

---
 launch/matching_eval.launch      |  3 +-
 launch/tracking_eval.launch      |  5 +-
 scripts/matching_eval_plot.py    | 66 ++++++++++++++++++++------
 scripts/misc/ang_res_plot.py     | 20 ++++++--
 scripts/run_matching_eval_set.py | 20 +++++---
 scripts/run_tracking_eval_set.py | 22 +++++++--
 scripts/tracking_eval_plot.py    | 79 +++++++++++++++++++++++---------
 src/camera/double_sphere.h       | 16 +++++--
 src/feature/detector.cc          |  6 ++-
 src/module/matching_module.cc    | 70 ++++++++++++++++++++++------
 src/module/matching_module.h     |  4 +-
 src/odometry/five_point.cc       |  4 +-
 src/odometry/pnp.cc              |  2 +-
 src/ros/eval_base.cc             | 25 ++++++++--
 src/ros/eval_base.h              |  2 +
 15 files changed, 263 insertions(+), 81 deletions(-)

diff --git a/launch/matching_eval.launch b/launch/matching_eval.launch
index f8c24ca..ed1efbf 100644
--- a/launch/matching_eval.launch
+++ b/launch/matching_eval.launch
@@ -1,7 +1,7 @@
 <launch>
     <arg name="bag_file" default="" />
     <arg name="detector_type" default="ORB" />
-    <arg name="detector_params" default="{nfeatures: 100}" />
+    <arg name="detector_params" default="{nfeatures: 50}" />
     <arg name="descriptor_type" default="ORB" />
     <arg name="descriptor_params" default="{}" />
     <arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
@@ -24,6 +24,7 @@
             matcher_max_dist: $(arg matcher_thresh)
             feature_overlap_threshold: 0.5
             feature_distance_threshold: 10
+            vignette_expansion: 0.01
         </rosparam>
     </node>
     <rosparam command="load" file="$(arg camera_file)" ns="omni_slam_matching_eval_node" />
diff --git a/launch/tracking_eval.launch b/launch/tracking_eval.launch
index fa73646..0a4a0da 100644
--- a/launch/tracking_eval.launch
+++ b/launch/tracking_eval.launch
@@ -1,8 +1,8 @@
 <launch>
     <arg name="bag_file" default="" />
-    <arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
-    <arg name="results_file" default="$(eval ''.join(arg('bag_file').split('.')[:-1]) + '.' + ''.join(arg('camera_file').split('/')[-1].split('.')[:-1]) + '.tracking.hdf5')" />
     <arg name="rate" default="1" />
+    <arg name="camera_file" default="$(find omni_slam_eval)/launch/default_camera.yaml" />
+    <arg name="results_file" default="$(eval ''.join(arg('bag_file').split('.')[:-1]) + '.' + ''.join(arg('camera_file').split('/')[-1].split('.')[:-1]) + '.' + str(arg('rate')) + 'x.tracking.hdf5')" />
     <node pkg="omni_slam_eval" type="omni_slam_tracking_eval_node" name="omni_slam_tracking_eval_node" required="true" output="screen">
         <param name="bag_file" value="$(arg bag_file)" />
         <param name="results_file" value="$(arg results_file)" />
@@ -24,6 +24,7 @@
             min_features_per_region: 10
             max_features_per_region: 999999
             keyframe_interval: 1
+            vignette_expansion: 0.05
         </rosparam>
     </node>
     <rosparam command="load" file="$(arg camera_file)" ns="omni_slam_tracking_eval_node" />
diff --git a/scripts/matching_eval_plot.py b/scripts/matching_eval_plot.py
index 42c8063..61e82b5 100644
--- a/scripts/matching_eval_plot.py
+++ b/scripts/matching_eval_plot.py
@@ -29,7 +29,7 @@
     df = pd.DataFrame(stats)
     stats = df.groupby(0).mean().to_records()
     stats = stats.view(np.float64).reshape(len(stats), -1)
-    framediff, nmatch, prec, rec = stats.T
+    framediff, nmatch, prec, rec, rep = stats.T
 
     fig = plt.figure()
     fig.suptitle('Matching - detector={}, descriptor={}, chi={}, alpha={}, fx={}, fy={}, cx={}, cy={}'.format(attrs["detector_type"], attrs["descriptor_type"], attrs["chi"][0], attrs["alpha"][0], attrs["fx"][0], attrs["fy"][0], attrs["cx"][0], attrs["cy"][0]))
@@ -88,10 +88,10 @@
     # match_dict = [[] for i in range(10)]
     # for row in good_radial_distances:
         # r = int(min(row[0], 0.499999) / 0.05)
-        # match_dict[r].append((row[1], 0))
+        # match_dict[r].append((row[2], 0))
     # for row in bad_radial_distances:
         # r = int(min(row[0], 0.499999) / 0.05)
-        # match_dict[r].append((row[1], 1))
+        # match_dict[r].append((row[2], 1))
     # si_bins = [0] * 10
     # for r in range(len(match_dict)):
         # match_dict[r] = sorted(match_dict[r], key=lambda x: x[0])
@@ -129,14 +129,28 @@
     valid_good = [False for i in range(num_bins)]
     valid_bad = [False for i in range(num_bins)]
     valid = [False for i in range(num_bins)]
+    # for row in good_radial_distances:
+        # r = int(min(row[0], 0.499999) / (0.5 / num_bins))
+        # X_good[r] = np.append(X_good[r], row[2])
+        # labels_good[r] = np.append(labels_good[r], 0)
+        # valid_good[r] = True
+    # for row in bad_radial_distances:
+        # r = int(min(row[0], 0.499999) / (0.5 / num_bins))
+        # X_bad[r] = np.append(X_bad[r], row[2])
+        # labels_bad[r] = np.append(labels_bad[r], 1)
+        # valid_bad[r] = True
     for row in good_radial_distances:
-        r = int(min(row[0], 0.499999) / (0.5 / num_bins))
-        X_good[r] = np.append(X_good[r], row[1])
+        if np.isnan(row[1]):
+            continue
+        r = int(min(row[1], np.pi) / (np.pi / num_bins))
+        X_good[r] = np.append(X_good[r], row[2])
         labels_good[r] = np.append(labels_good[r], 0)
         valid_good[r] = True
-    for row in bad_radial_distances:
-        r = int(min(row[0], 0.499999) / (0.5 / num_bins))
-        X_bad[r] = np.append(X_bad[r], row[1])
+    for row in good_radial_distances:
+        if np.isnan(row[1]):
+            continue
+        r = int(min(row[1], np.pi) / (np.pi / num_bins))
+        X_bad[r] = np.append(X_bad[r], row[2])
         labels_bad[r] = np.append(labels_bad[r], 1)
         valid_bad[r] = True
     for i in range(num_bins):
@@ -150,9 +164,10 @@
         if len(X_bad[i]) > num_samples:
             idx_bad = np.random.choice(np.arange(len(X_bad[i])), num_samples, replace=False)
         sr[i] = sklearn.metrics.silhouette_score(np.concatenate((X_good[i][idx_good], X_bad[i][idx_bad])).reshape(-1, 1), np.concatenate((labels_good[i][idx_good], labels_bad[i][idx_bad])), metric = 'l1')
-    ax4.plot([i * 0.5 / num_bins + 0.5 / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])
+    # ax4.plot([i * 0.5 / num_bins + 0.5 / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])
+    ax4.plot([i * np.pi / num_bins + np.pi / num_bins / 2 for i in range(0, len(sr)) if valid[i]], [sr[i] for i in range(0, len(sr)) if valid[i]])
 
-    df = pd.DataFrame({'Delta radial distance': ['{}-{}'.format(r * 0.05, (r + 1) * 0.05) for r in (np.minimum(np.hstack((good_radial_distances[:, 0], bad_radial_distances[:, 0])), 0.499999) / 0.05).astype(int)], 'Descriptor distance': np.hstack((good_radial_distances[:, 1], bad_radial_distances[:, 1])), 'Match': ['Good' for i in range(len(good_radial_distances))] + ['Bad' for i in range(len(bad_radial_distances))]})
+    df = pd.DataFrame({'Delta radial distance': ['{}-{}'.format(r * 0.05, (r + 1) * 0.05) for r in (np.minimum(np.hstack((good_radial_distances[:, 0], bad_radial_distances[:, 0])), 0.499999) / 0.05).astype(int)], 'Descriptor distance': np.hstack((good_radial_distances[:, 2], bad_radial_distances[:, 2])), 'Match': ['Good' for i in range(len(good_radial_distances))] + ['Bad' for i in range(len(bad_radial_distances))]})
     sns.violinplot(x="Delta radial distance", y="Descriptor distance", hue="Match", data=df, split=True, ax=ax5, palette="Set2", inner="quart")
     handles, labels = ax5.get_legend_handles_labels()
     ax5.legend(handles=handles[0:], labels=labels[0:], fontsize='small')
@@ -181,6 +196,7 @@
     nmatch = dict()
     prec = dict()
     rec = dict()
+    rep = dict()
     detdesclist = []
     for filename in os.listdir(os.path.dirname(args.results_path)):
         bagname = os.path.splitext(os.path.basename(args.results_path))[0]
@@ -207,10 +223,11 @@
                 df = pd.DataFrame(stats)
                 statsavg = df.groupby(0).mean().to_records()
                 statsavg = statsavg.view(np.float64).reshape(len(statsavg), -1)
-                framediff[detdesc], nmatch[detdesc], prec[detdesc], rec[detdesc] = statsavg.T
+                framediff[detdesc], nmatch[detdesc], prec[detdesc], rec[detdesc], rep[detdesc] = statsavg.T
 
     if len(detdesclist) > 0:
         fig = plt.figure()
+        sns.set()
 
         detdesclist = sorted(detdesclist)
         legendlist = []
@@ -293,7 +310,7 @@
 
         df = pd.DataFrame()
         for d, detdesc in enumerate(detdesclist):
-            matches = np.hstack((good_radial_distances[detdesc][:, 1], bad_radial_distances[detdesc][:, 1]))
+            matches = np.hstack((good_radial_distances[detdesc][:, 2], bad_radial_distances[detdesc][:, 2]))
             matches /= matches.max()
             df = df.append(pd.DataFrame({legendtitle: [legendlist[d] for i in range(len(matches))], 'Normalized descriptor distance': matches, 'Match': ['Good' for i in range(len(good_radial_distances[detdesc]))] + ['Bad' for i in range(len(bad_radial_distances[detdesc]))]}))
         sns.violinplot(x=legendtitle, y="Normalized descriptor distance", hue="Match", data=df, split=True, ax=ax5, palette="Set2", inner="quart")
@@ -363,10 +380,33 @@
                     idx_good = np.random.choice(np.arange(len(idx_good)), num_samples, replace=False)
                 if len(idx_bad) > num_samples:
                     idx_bad = np.random.choice(np.arange(len(idx_bad)), num_samples, replace=False)
-                s = sklearn.metrics.silhouette_score(np.concatenate((good_radial_distances[detdesc][idx_good, 1], bad_radial_distances[detdesc][idx_bad, 1])).reshape(-1, 1), np.concatenate((labels_good[idx_good], labels_bad[idx_bad])), metric = 'l1')
+                s = sklearn.metrics.silhouette_score(np.concatenate((good_radial_distances[detdesc][idx_good, 2], bad_radial_distances[detdesc][idx_bad, 2])).reshape(-1, 1), np.concatenate((labels_good[idx_good], labels_bad[idx_bad])), metric = 'l1')
                 df = df.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'Silhouette coefficient': [s], 'FOV': [detdesc[2]]}))
             sns.catplot(x='FOV', y='Silhouette coefficient', hue='Detector+Descriptor', data=df, ax=ax6, palette="muted", kind='bar')
 
+        df_pr = pd.DataFrame()
+        frames = [1, 2, 5, 10, 15, 20, 30, 45, 60, 90, 120, 180]
+        for detdesc in detdesclist:
+            pr = precrec[detdesc][precrec[detdesc][:, 0] == 0]
+            for i in frames:
+                # if i >= int(detdesc[2]) and int(detdesc[2]) < 180:
+                    # continue
+                y, x = np.absolute(pr[pr[:, 1] == i][:, 2:].T)
+                df_pr = df_pr.append(pd.DataFrame({'Detector+Descriptor': '{}+{}'.format(detdesc[0], detdesc[1]), 'FOV': detdesc[2], 'Precision': y, 'Recall': x, 'Baseline': i}))
+        sns.relplot(y='Precision', x='Recall', hue='Baseline', col='FOV', row='Detector+Descriptor', kind='line', data=df_pr, estimator=None, facet_kws={'margin_titles': True}, legend='full', palette=sns.cubehelix_palette(rot=-0.4, n_colors=len(frames)))
+
+        df_auc = pd.DataFrame()
+        for detdesc in detdesclist:
+            pr = precrec[detdesc][precrec[detdesc][:, 0] == 0]
+            for i in range(1, int(pr[:, 1].max(axis=0))):
+                # if i > int(detdesc[2]) and i < 360 - int(detdesc[2]) and int(detdesc[2]) < 180:
+                    # df_auc = df_auc.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'FOV': [int(detdesc[2])], 'AUC': [0], 'Baseline': [i]}))
+                    # continue
+                y, x = np.absolute(pr[pr[:, 1] == i][:, 2:].T)
+                auc = np.trapz(np.flip(y), np.flip(x))
+                df_auc = df_auc.append(pd.DataFrame({'Detector+Descriptor': ['{}+{}'.format(detdesc[0], detdesc[1])], 'FOV': [int(detdesc[2])], 'AUC': [auc], 'Baseline': [i]}))
+        ax_auc = sns.relplot(y='AUC', x='Baseline', hue='FOV', col='Detector+Descriptor', col_wrap=3, kind='line', data=df_auc, estimator=None, legend='full', palette=sns.color_palette('muted', n_colors=df_auc.FOV.unique().shape[0]))
+
         plt.show()
 
     else:
diff --git a/scripts/misc/ang_res_plot.py b/scripts/misc/ang_res_plot.py
index 476074e..5cbaec7 100644
--- a/scripts/misc/ang_res_plot.py
+++ b/scripts/misc/ang_res_plot.py
@@ -4,12 +4,14 @@
 import math
 
 fig, ax = plt.subplots()
+fig2, ax2 = plt.subplots()
 
-params = [(0.289, 0.3, 0.6666667), (0.5, 0, 0), (0.36, 0.66, 0.006), (0.213, -0.2, 0.59)]
+params = [(205.824, -0.055, 0.577), (287, 0, 0.647), (250.88, -0.179, 0.591), (348.16, -0.271, 0.555), (898.048, 0.0, 0.0)]
 
 for f, c, a in params:
-    camera_model = DoubleSphereModel(f, c, a)
+    camera_model = DoubleSphereModel(f / 1024., c, a)
     dt = []
+    dt_vert = []
     last_ang = 0
     first = True
     for i in np.linspace(0.5, 1.0, 1024):
@@ -19,10 +21,20 @@
             last_ang = ang
             first = False
             continue
+        ray_vert = camera_model.unproj(np.array([i, 0.5 - 0.5 / 1024.]))
+        ang_vert = np.arctan2(ray_vert[0, 2], np.sqrt(ray_vert[0, 0] ** 2 + ray_vert[0, 1] ** 2))
         dt.append(ang - last_ang)
+        dt_vert.append(ang_vert)
         last_ang = ang
 
-    ax.plot(np.arange(0, len(dt)), np.array(dt) * 180 / math.pi, label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
+    ax.plot(np.arange(0, len(dt)), (np.array(dt) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
+    ax.plot(np.arange(0, len(dt_vert)), (np.array(dt_vert) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
+    ax2.plot(np.arange(0, len(dt_vert)), (np.array(dt) * 180 / math.pi) / (np.array(dt_vert) * 180 / math.pi), label='{} FOV'.format(camera_model.calc_fov() * 180 / math.pi))
+    ax.set_xlabel('Distance from image center (pixels)')
+    ax.set_ylabel('Angular resolution (degrees / pixel)')
+    ax2.set_xlabel('Distance from image center (pixels)')
+    ax2.set_ylabel('Local aspect ratio')
 
-plt.legend(loc='best')
+ax.legend(loc='best')
+ax2.legend(loc='best')
 plt.show()
diff --git a/scripts/run_matching_eval_set.py b/scripts/run_matching_eval_set.py
index 2fdbf07..0d746a0 100644
--- a/scripts/run_matching_eval_set.py
+++ b/scripts/run_matching_eval_set.py
@@ -11,15 +11,23 @@
 parser.add_argument("--rate", type=int, help='frame rate multiplier')
 args = parser.parse_args()
 
-d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('KAZE', 'KAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LUCID'), ('SIFT','LATCH'), ('SIFT','VGG'), ('SIFT','BOOST')]
+# d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('KAZE', 'KAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LUCID'), ('SIFT','LATCH'), ('SIFT','VGG'), ('SIFT','BOOST')]
+d_list = [('SIFT','SIFT'), ('SURF','SURF'), ('ORB','ORB'), ('BRISK','BRISK'), ('AKAZE', 'AKAZE'), ('SIFT','FREAK'), ('SIFT','DAISY'), ('SIFT','LATCH'), ('SIFT','BOOST')]
 det_param_map = dict()
+# det_param_map['SIFT'] = '{nfeatures: 5000}'
+# det_param_map['SURF'] = '{hessianThreshold: 500}'
+# det_param_map['ORB'] = '{nfeatures: 100}'
+# det_param_map['BRISK'] = '{thresh: 35}'
+# det_param_map['AGAST'] = '{threshold: 25}'
+# det_param_map['AKAZE'] = '{threshold: 0.0005}'
+# det_param_map['KAZE'] = '{threshold: 0.0005}'
 det_param_map['SIFT'] = '{nfeatures: 5000}'
-det_param_map['SURF'] = '{hessianThreshold: 500}'
-det_param_map['ORB'] = '{nfeatures: 100}'
-det_param_map['BRISK'] = '{thresh: 35}'
+det_param_map['SURF'] = '{hessianThreshold: 2000}'
+det_param_map['ORB'] = '{nfeatures: 50}'
+det_param_map['BRISK'] = '{thresh: 50}'
 det_param_map['AGAST'] = '{threshold: 25}'
-det_param_map['AKAZE'] = '{threshold: 0.0005}'
-det_param_map['KAZE'] = '{threshold: 0.0005}'
+det_param_map['AKAZE'] = '{threshold: 0.001}'
+det_param_map['KAZE'] = '{threshold: 0.001}'
 
 parent = roslaunch.parent.ROSLaunchParent("", [], is_core=True)
 parent.start()
diff --git a/scripts/run_tracking_eval_set.py b/scripts/run_tracking_eval_set.py
index 39a35fe..f75b73d 100644
--- a/scripts/run_tracking_eval_set.py
+++ b/scripts/run_tracking_eval_set.py
@@ -6,6 +6,7 @@
 
 parser = argparse.ArgumentParser(description='Run tracking evaluation set')
 parser.add_argument('working_dir', help='working directory')
+parser.add_argument('--motion', type=str, help='motion type for motion set evaluation')
 parser.add_argument("--rate", type=int, help='frame rate multiplier')
 args = parser.parse_args()
 
@@ -13,10 +14,16 @@
 parent.start()
 
 if os.path.isdir(args.working_dir):
-    print ''
-    print '==========================================='
-    print 'Full motion+FOV dataset tracking evaluation'
-    print '==========================================='
+    if args.motion is None:
+        print ''
+        print '==========================================='
+        print 'Full motion+FOV dataset tracking evaluation'
+        print '==========================================='
+    else:
+        print ''
+        print '==========================================='
+        print '{} motion dataset tracking evaluation'.format(args.motion)
+        print '==========================================='
     fovs = []
     for yaml in os.listdir(args.working_dir):
         if not os.path.isdir(os.path.join(args.working_dir, yaml)) and yaml.endswith('.yaml'):
@@ -25,9 +32,14 @@
     fovs.sort(key=int)
     for motion in os.listdir(args.working_dir):
         if os.path.isdir(os.path.join(args.working_dir, motion)):
+            if args.motion is not None and motion != args.motion:
+                continue
             bag_dir = os.path.join(args.working_dir, motion)
             for fov in fovs:
-                printstr = "Motion type {}, FOV {}".format(motion, fov)
+                if args.motion is None:
+                    printstr = "Motion type {}, FOV {}".format(motion, fov)
+                else:
+                    printstr = "FOV {}".format(fov)
                 print ''
                 print '-' * len(printstr)
                 print printstr
diff --git a/scripts/tracking_eval_plot.py b/scripts/tracking_eval_plot.py
index caa9173..11a14ae 100644
--- a/scripts/tracking_eval_plot.py
+++ b/scripts/tracking_eval_plot.py
@@ -32,7 +32,7 @@
 
     df = pandas.DataFrame()
     re_filt = radial_errors[np.where((radial_errors[:, 1] < 50) & (radial_errors[:, 2] > 0))]
-    df = df.append(pandas.DataFrame({'Radial distance': np.round(re_filt[:, 0] / 0.005) * 0.005, 'Relative endpoint error': re_filt[:, 2]}))
+    df = df.append(pandas.DataFrame({'Radial distance': np.round(re_filt[:, 0] / 0.005) * 0.005, 'Relative endpoint error (pixels)': re_filt[:, 2]}))
     # binned_errors = [[0] for i in range(30)]
     # for r, aee, ree, ae, rbe in radial_errors:
         # if aee < 50:
@@ -45,7 +45,7 @@
     # ax1.set_ylim([0, 10])
     # ax1.violinplot(binned_errors[:21])
     # ax1.plot([i for i in range(1, 22)], [np.array(binned_errors[i]).mean() for i in range(21)], '-o', markersize=4, c='black')
-    sns.relplot(x="Radial distance", y="Relative endpoint error", kind="line", data=df, ci="sd", ax=ax1)
+    sns.relplot(x="Radial distance", y="Relative endpoint error (pixels)", kind="line", data=df, ci="sd", ax=ax1)
 
     ax2.set_title('Pixel errors over track lifetime')
     ax2.set_ylim([0, 0.1])
@@ -122,10 +122,11 @@
 elif os.path.isdir(args.results_path):
     fig1 = plt.figure(1)
     fig2 = plt.figure(2)
-    # fig3 = plt.figure(3)
+    fig3 = plt.figure(3)
     fig1.suptitle('KLT Tracking Accuracy for Various FOVs and Motions')
     fig2.suptitle('KLT Tracking Lifetime Distributions for Various FOVs and Motions')
-    # fig3.suptitle('KLT Tracking Error Distribution Over Radial Distance for Various FOVs and Motions')
+    fig3.suptitle('KLT Tracking Track Counts for Varying Rates')
+    sns.set()
     motion_count = 0
     fov_dict = dict()
     last_fov_num = 0
@@ -141,7 +142,7 @@
             motion_exists = False
             for fovstr in fovs:
                 for filename in os.listdir(bag_dir):
-                    if filename.endswith(fovstr + '.tracking.hdf5'):
+                    if filename.split('.')[1] == fovstr and filename.endswith('.tracking.hdf5'):
                         results_file = os.path.join(bag_dir, filename)
                         with h5py.File(results_file, 'r') as f:
                             attrs = dict(f['attributes'].attrs.items())
@@ -166,32 +167,50 @@
     df_ee = pandas.DataFrame()
     df_ae = pandas.DataFrame()
     df_be = pandas.DataFrame()
+    df_inlier = pandas.DataFrame()
+    df_length = pandas.DataFrame()
     for motion in os.listdir(args.results_path):
         if os.path.isdir(os.path.join(args.results_path, motion)):
             df_lifetime = pandas.DataFrame()
+            df_rate = pandas.DataFrame()
+            df_rate_errors = pandas.DataFrame()
             bag_dir = os.path.join(args.results_path, motion)
             motion_exists = False
+            num_fovs = 0
             for fovstr in fovs:
-                failures = np.empty(shape=(0,2))
-                radial_errors = np.empty(shape=(0,5))
+                failures = dict()
+                successes = dict()
+                radial_errors_dict = dict()
                 track_lengths = np.empty(shape=(1,0))
+                length_errors = np.empty(shape=(0,5))
                 file_exists = False
                 for filename in os.listdir(bag_dir):
-                    if filename.endswith(fovstr + '.tracking.hdf5'):
+                    if filename.split('.')[1] == fovstr and filename.endswith('.tracking.hdf5'):
                         results_file = os.path.join(bag_dir, filename)
                         with h5py.File(results_file, 'r') as f:
-                            failures = np.vstack((failures, f['failures'][:]))
-                            radial_errors = np.vstack((radial_errors, f['radial_errors'][:]))
-                            tl = f['track_lengths'][:]
-                            track_lengths = np.hstack((track_lengths, tl[:, tl[0, :] > 0]))
                             attrs = dict(f['attributes'].attrs.items())
+                            rate = int(attrs['rate'])
+                            if rate not in successes:
+                                successes[rate] = np.empty(shape=(0,2))
+                            if rate not in failures:
+                                failures[rate] = np.empty(shape=(0,2))
+                            if rate not in radial_errors_dict:
+                                radial_errors_dict[rate] = np.empty(shape=(0,5))
+                            successes[rate] = np.vstack((successes[rate], f['successes'][:]))
+                            failures[rate] = np.vstack((failures[rate], f['failures'][:]))
+                            radial_errors_dict[rate] = np.vstack((radial_errors_dict[rate], f['radial_errors'][:]))
+                            if rate == 1:
+                                tl = f['track_lengths'][:]
+                                track_lengths = np.hstack((track_lengths, tl[:, tl[0, :] > 0]))
+                                length_errors = np.vstack((length_errors, f['length_errors'][:]))
                             file_exists = True
                             motion_exists = True
                             # fov = int(round(attrs['fov']))
                 fov = int(fovstr)
                 if file_exists:
+                    radial_errors = radial_errors_dict[1]
                     ax = fig1.add_subplot(num_rows, num_cols, motion_inx * num_cols + fov_dict[fov] + 1)
-                    ax.hist([[r for r, aee, ree, ae, rbe in radial_errors if aee <= 5], [r for r, aee, ree, ae, rbe in radial_errors if 5 < aee <= 20], [r for r, aee, ree, ae, rbe in radial_errors if 20 < aee <= 50], [r for r, _ in failures]], bins=[i * 0.05 for i in range(11)], alpha=0.5, label=['<5', '5-20', '20-50', 'Failures'], stacked=False)
+                    ax.hist([[r for r, aee, ree, ae, rbe in radial_errors if aee <= 5], [r for r, aee, ree, ae, rbe in radial_errors if 5 < aee <= 20], [r for r, aee, ree, ae, rbe in radial_errors if 20 < aee <= 50], [r for r, _ in failures[1]]], bins=[i * 0.05 for i in range(11)], alpha=0.5, label=['<5', '5-20', '20-50', 'Failures'], stacked=False)
                     ax.legend(loc='best', title='Pixel error', fontsize='x-small')
 
                     if motion_inx == 0:
@@ -208,26 +227,42 @@
                     re_filt_ee = radial_errors[np.where((radial_errors[:, 1] < 50) & (radial_errors[:, 2] > 0))]
                     re_filt_ae = radial_errors[np.where(radial_errors[:, 1] < 50)]
                     re_filt_be = radial_errors[np.where((radial_errors[:, 1] < 50) & (radial_errors[:, 4] > 0))]
-                    df_ee = df_ee.append(pandas.DataFrame({'Radial distance': np.round(re_filt_ee[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_ee))], 'Motion': [motion for i in range(len(re_filt_ee))], 'Relative endpoint error': re_filt_ee[:, 2]}))
-                    df_ae = df_ae.append(pandas.DataFrame({'Radial distance': np.round(re_filt_ae[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_ae))], 'Motion': [motion for i in range(len(re_filt_ae))], 'Angular error': re_filt_ae[:, 3]}))
-                    df_be = df_be.append(pandas.DataFrame({'Radial distance': np.round(re_filt_be[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_be))], 'Motion': [motion for i in range(len(re_filt_be))], 'Relative bearing error': re_filt_be[:, 4]}))
+                    df_ee = df_ee.append(pandas.DataFrame({'Radial distance': np.round(re_filt_ee[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_ee))], 'Motion': [motion for i in range(len(re_filt_ee))], 'Relative endpoint error (pixels)': re_filt_ee[:, 2]}))
+                    df_ae = df_ae.append(pandas.DataFrame({'Radial distance': np.round(re_filt_ae[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_ae))], 'Motion': [motion for i in range(len(re_filt_ae))], 'Angular error (radians)': re_filt_ae[:, 3]}))
+                    df_be = df_be.append(pandas.DataFrame({'Radial distance': np.round(re_filt_be[:, 0] / 0.005) * 0.005, 'FOV': [fovstr for i in range(len(re_filt_be))], 'Motion': [motion for i in range(len(re_filt_be))], 'Relative bearing error (radians)': re_filt_be[:, 4]}))
+                    # df_inlier = df_inlier.append(pandas.DataFrame({'Inliers': [float(len(successes[1][successes[1][:, 1] == i + 1])) / (len(successes[1][successes[1][:, 1] == i + 1]) + len(failures[1][failures[1][:, 1] == i + 1])) for i in range(frame_num)], 'FOV': [fov for i in range(frame_num)]}))
+                    for rate, _ in failures.iteritems():
+                        frame_num = max(int(failures[rate].max(axis=0)[1]), int(successes[rate].max(axis=0)[1]))
+                        df_rate = df_rate.append(pandas.DataFrame({'Rate': [rate for i in range(frame_num)], 'Inliers': [float(len(successes[rate][successes[rate][:, 1] == i + 1])) / (len(successes[rate][successes[rate][:, 1] == i + 1]) + len(failures[rate][failures[rate][:, 1] == i + 1])) for i in range(frame_num)], 'FOV': [fov for i in range(frame_num)]}))
+                        df_rate_errors = df_rate_errors.append(pandas.DataFrame({'Rate': [rate for i in range(len(radial_errors_dict[rate]))], 'Endpoint error': [aee for _, aee, _, _, _ in radial_errors_dict[rate]], 'FOV': [fov for i in range(len(radial_errors_dict[rate]))]}))
+                    df_length = df_length.append(pandas.DataFrame({'Track length (frames)': length_errors[:, 0], 'Endpoint error': length_errors[:, 3], 'Bearing error': length_errors[:, 4], 'FOV': fovstr, 'Motion': motion}))
+                    num_fovs += 1
                 else:
                     print '[WARNING] No results files found in directory {} for FOV {}'.format(os.path.join(bag_dir), fovstr)
             if motion_exists:
                 ax2 = fig2.add_subplot(num_rows, 1, motion_inx + 1)
                 sns.violinplot(x='FOV', y='Track lifetime (frames)', data=df_lifetime, ax=ax2, palette="muted", inner='quart')
                 ax2.set_title('Motion {}'.format(motion))
+                ax_rate = fig3.add_subplot(motion_count, 2, 2 * motion_inx + 1)
+                ax_rate_errors = fig3.add_subplot(motion_count, 2, 2 * motion_inx + 2)
+                ax_rate.set_title('Motion {}'.format(motion))
+                sns.lineplot(x="Rate", y="Inliers", hue="FOV", data=df_rate, ax=ax_rate, marker='o', ci='sd', legend='full', palette=sns.color_palette('muted', n_colors=num_fovs))
+                sns.lineplot(x="Rate", y="Endpoint error", hue="FOV", data=df_rate_errors, ax=ax_rate_errors, marker='o', ci='sd', legend='full', palette=sns.color_palette('muted', n_colors=num_fovs))
+                ax_rate.set_xlabel('Degrees per frame')
+                ax_rate.set_ylabel('Inlier rate')
                 motion_inx += 1
 
     # ax_ee = fig3.add_subplot(1, 3, 1)
-    sns.relplot(x="Radial distance", y="Relative endpoint error", kind="line", data=df_ee, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
-    # ax_ee.set_title('Relative endpoint error')
+    sns.relplot(x="Radial distance", y="Relative endpoint error (pixels)", kind="line", data=df_ee, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
+    # ax_ee.set_title('Relative endpoint error (pixels)')
     # ax_ae = fig3.add_subplot(1, 3, 2)
-    sns.relplot(x="Radial distance", y="Angular error", kind="line", data=df_ae, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
-    # ax_ae.set_title('Angular error')
+    sns.relplot(x="Radial distance", y="Angular error (radians)", kind="line", data=df_ae, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
+    # ax_ae.set_title('Angular error (radians)')
     # ax_be = fig3.add_subplot(1, 3, 3)
-    sns.relplot(x="Radial distance", y="Relative bearing error", kind="line", data=df_be, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
-    # ax_be.set_title('Relative bearing error')
+    sns.relplot(x="Radial distance", y="Relative bearing error (radians)", kind="line", data=df_be, ci="sd", row='Motion', hue='FOV', legend="full", hue_order=fovs, palette=sns.color_palette("muted", n_colors=len(fovs)))
+    # ax_be.set_title('Relative bearing error (radians)')
+
+    sns.relplot(x='Track length (frames)', y='Endpoint error', kind='line', data=df_length, ci='sd', row='Motion', hue='FOV', legend='full', hue_order=fovs, palette=sns.color_palette('muted', n_colors=len(fovs)))
 
     plt.show()
 
diff --git a/src/camera/double_sphere.h b/src/camera/double_sphere.h
index 204538d..2784c39 100644
--- a/src/camera/double_sphere.h
+++ b/src/camera/double_sphere.h
@@ -14,14 +14,15 @@ class DoubleSphere : public CameraModel<T>
     template <typename> friend class DoubleSphere;
 
 public:
-    DoubleSphere(const T fx, const T fy, const T cx, const T cy, const T chi, const T alpha)
+    DoubleSphere(const T fx, const T fy, const T cx, const T cy, const T chi, const T alpha, const T vignette = T(0.))
         : CameraModel<T>("DoubleSphere"),
         fx_(fx),
         fy_(fy),
         cx_(cx),
         cy_(cy),
         chi_(chi),
-        alpha_(alpha)
+        alpha_(alpha),
+        vignette_(vignette)
     {
         cameraMat_ << fx_, 0., cx_, 0., fy_, cy_, 0., 0., 1.;
         fov_ = GetFOV();
@@ -122,8 +123,14 @@ class DoubleSphere : public CameraModel<T>
         T r2;
         if (alpha_ > 0.5)
         {
-            //r2 = std::min(mx * mx, 1. / (2. * alpha_ - 1.));
-            r2 = 1. / (2. * alpha_ - 1.);
+            if (vignette_ > 0.)
+            {
+                r2 = std::min(mx * mx * vignette_ * vignette_, 1. / (2. * alpha_ - 1.));
+            }
+            else
+            {
+                r2 = 1. / (2. * alpha_ - 1.);
+            }
         }
         else
         {
@@ -148,6 +155,7 @@ class DoubleSphere : public CameraModel<T>
     T alpha_;
     T fov_;
     T sinTheta_;
+    T vignette_;
     Matrix<T, 3, 3> cameraMat_;
 };
 
diff --git a/src/feature/detector.cc b/src/feature/detector.cc
index 52c4c09..c591ef3 100644
--- a/src/feature/detector.cc
+++ b/src/feature/detector.cc
@@ -448,15 +448,17 @@ int Detector::DetectInRadialRegion(data::Frame &frame, std::vector<data::Landmar
 {
     bool compressed = frame.IsCompressed();
     cv::Mat mask = cv::Mat::zeros(frame.GetImage().size(), CV_8U);
+    double start_r2 = start_r * start_r;
+    double end_r2 = end_r * end_r;
     for (int i = 0; i < mask.rows; i++)
     {
         for (int j = 0; j < mask.cols; j++)
         {
             double x = j - mask.cols / 2. + 0.5;
             double y = i - mask.rows / 2. + 0.5;
-            double r = sqrt(x * x + y * y);
+            double r2 = x * x + y * y;
             double t = util::MathUtil::FastAtan2(y, x);
-            if (r >= start_r && r < end_r && t >= start_t && t < end_t)
+            if (r2 >= start_r2 && r2 < end_r2 && t >= start_t && t < end_t)
             {
                 mask.at<unsigned char>(i, j) = 255;
             }
diff --git a/src/module/matching_module.cc b/src/module/matching_module.cc
index 9594e00..b36bdc9 100644
--- a/src/module/matching_module.cc
+++ b/src/module/matching_module.cc
@@ -83,6 +83,16 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
             double x_prev = prevFeat.GetKeypoint().pt.x - frames_.back()->GetImage().cols / 2. + 0.5;
             double y_prev = prevFeat.GetKeypoint().pt.y - frames_.back()->GetImage().rows / 2. + 0.5;
             double r_prev = sqrt(x_prev * x_prev + y_prev * y_prev) / imsize;
+            Vector2d prevPix;
+            prevPix << prevFeat.GetKeypoint().pt.x, prevFeat.GetKeypoint().pt.y;
+            Vector3d ray;
+            frames_.back()->GetCameraModel().UnprojectToBearing(curPix, ray);
+            Vector3d rayPrev;
+            frames_.back()->GetCameraModel().UnprojectToBearing(prevPix, rayPrev);
+            Vector3d z;
+            z << 0, 0, 1;
+            double bearingAngleCur = acos(ray.normalized().dot(z));
+            double bearingAnglePrev = acos(rayPrev.normalized().dot(z));
             Vector2d prevFeatPix;
             if (frames_.back()->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(frames_.back()->GetInversePose(), prevFeat.GetWorldPoint())), prevFeatPix))
             {
@@ -102,7 +112,7 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
                     #pragma omp critical
                     {
                         numGoodMatches[{prevFeat.GetFrame().GetID(), curFeat.GetFrame().GetID()}]++;
-                        stats_.goodRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), descDist});
+                        stats_.goodRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), fabs(bearingAngleCur - bearingAnglePrev), descDist});
                         goodDists[{prevFeat.GetFrame().GetID(), curFeat.GetFrame().GetID()}].push(descDist);
                         if (frameIdToNum_[prevFeat.GetFrame().GetID()] == 0)
                         {
@@ -119,7 +129,7 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
                         {
                             visualization_.AddBadMatch(curFeat.GetKeypoint(), kpt);
                         }
-                        stats_.badRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), descDist});
+                        stats_.badRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), fabs(bearingAngleCur - bearingAnglePrev), descDist});
                     }
                 }
             }
@@ -132,14 +142,33 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
                     {
                         visualization_.AddBadMatch(curFeat.GetKeypoint());
                     }
-                    stats_.badRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), descDist});
+                    stats_.badRadialDistances.emplace_back(vector<double>{fabs(r - r_prev), fabs(bearingAngleCur - bearingAnglePrev), descDist});
                 }
             }
         }
     }
-    map<pair<int, int>, set<data::Landmark*>> goodCorrPrev;
-    map<pair<int, int>, set<data::Landmark*>> goodCorrCur;
-    map<int, int> featuresInId;
+    unordered_map<int, Vector2d> projMap;
+    unordered_map<int, int> featuresInId;
+    #pragma omp parallel for
+    for (auto it = landmarks_.begin(); it < landmarks_.end(); it++)
+    {
+        data::Landmark &prevLandmark = *it;
+        const data::Feature &prevFeat = prevLandmark.GetObservations()[0];
+        #pragma omp critical
+        {
+            featuresInId[prevFeat.GetFrame().GetID()]++;
+        }
+        Vector2d prevFeatPix;
+        if (frames_.back()->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(frames_.back()->GetInversePose(), prevLandmark.GetGroundTruth())), prevFeatPix))
+        {
+            #pragma omp critical
+            {
+                projMap[prevLandmark.GetID()] = prevFeatPix;
+            }
+        }
+    }
+    map<pair<int, int>, unordered_set<data::Landmark*>> goodCorrPrev;
+    map<pair<int, int>, unordered_set<data::Landmark*>> goodCorrCur;
     #pragma omp parallel for collapse(2)
     for (auto it1 = curLandmarks.begin(); it1 < curLandmarks.end(); it1++)
     {
@@ -149,13 +178,9 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
             data::Landmark &prevLandmark = *it2;
             const data::Feature &curFeat = curLandmark.GetObservations()[0];
             const data::Feature &prevFeat = prevLandmark.GetObservations()[0];
-            #pragma omp critical
-            {
-                featuresInId[prevFeat.GetFrame().GetID()]++;
-            }
-            Vector2d prevFeatPix;
-            if (frames_.back()->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(frames_.back()->GetInversePose(), prevLandmark.GetGroundTruth())), prevFeatPix))
+            if (projMap.find(prevLandmark.GetID()) != projMap.end())
             {
+                Vector2d prevFeatPix = projMap.at(prevLandmark.GetID());
                 cv::KeyPoint kpt = prevFeat.GetKeypoint();
                 kpt.pt = cv::Point2f(prevFeatPix(0), prevFeatPix(1));
                 Vector2d curPix;
@@ -176,14 +201,29 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
             }
         }
     }
+    map<pair<int, int>, int> maxCorr;
+    #pragma omp parallel for
+    for (auto it2 = landmarks_.begin(); it2 < landmarks_.end(); it2++)
+    {
+        data::Landmark &prevLandmark = *it2;
+        const data::Feature &prevFeat = prevLandmark.GetObservations()[0];
+        Vector2d prevFeatPix;
+        if (frames_.back()->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(frames_.back()->GetInversePose(), prevLandmark.GetGroundTruth())), prevFeatPix))
+        {
+            #pragma omp critical
+            {
+                maxCorr[{prevFeat.GetFrame().GetID(), frames_.back()->GetID()}]++;
+            }
+        }
+    }
     map<pair<int, int>, int> numCorr;
     map<pair<int, int>, int> numNeg;
     map<pair<int, int>, vector<pair<double, double>>> rocCurves;
     map<pair<int, int>, vector<pair<double, double>>> prCurves;
     for (auto &corrPrevPair : goodCorrPrev)
     {
-        set<data::Landmark*> &corrPrev = corrPrevPair.second;
-        set<data::Landmark*> &corrCur = goodCorrCur[corrPrevPair.first];
+        unordered_set<data::Landmark*> &corrPrev = corrPrevPair.second;
+        unordered_set<data::Landmark*> &corrCur = goodCorrCur[corrPrevPair.first];
         numCorr[corrPrevPair.first] = min(corrPrev.size(), corrCur.size());
         numNeg[corrPrevPair.first] = featuresInId[corrPrevPair.first.first] * curLandmarks.size() - numCorr[corrPrevPair.first];
         priority_queue<double> &goodDist = goodDists[corrPrevPair.first];
@@ -225,7 +265,7 @@ void MatchingModule::Update(std::unique_ptr<data::Frame> &frame)
     for (auto &statPair : numGoodMatches)
     {
         int frameDiff = frameIdToNum_[statPair.first.second] - frameIdToNum_[statPair.first.first];
-        stats_.frameMatchStats.emplace_back(vector<double>{(double)frameDiff, (double)statPair.second, (double)statPair.second / numMatches[statPair.first], (double)statPair.second / numCorr[statPair.first]});
+        stats_.frameMatchStats.emplace_back(vector<double>{(double)frameDiff, (double)statPair.second, (double)statPair.second / numMatches[statPair.first], (double)statPair.second / numCorr[statPair.first], (double)numCorr[statPair.first] / maxCorr[statPair.first]});
         for (pair<double, double> &roc : rocCurves[statPair.first])
         {
             stats_.rocCurves.emplace_back(vector<double>{(double)frameIdToNum_[statPair.first.first], (double)frameIdToNum_[statPair.first.second], roc.first, roc.second});
diff --git a/src/module/matching_module.h b/src/module/matching_module.h
index 5735ca3..0d390f6 100644
--- a/src/module/matching_module.h
+++ b/src/module/matching_module.h
@@ -27,6 +27,8 @@ class MatchingModule
         std::vector<std::vector<double>> radialOverlapsErrors;
         std::vector<std::vector<double>> goodRadialDistances;
         std::vector<std::vector<double>> badRadialDistances;
+        std::vector<std::vector<double>> goodDeltaBearings;
+        std::vector<std::vector<double>> badDeltaBearings;
     };
 
     MatchingModule(std::unique_ptr<feature::Detector> &detector, std::unique_ptr<feature::Matcher> &matcher, double overlap_thresh = 0.5, double dist_thresh = 10.);
@@ -63,7 +65,7 @@ class MatchingModule
     double distThresh_;
 
     int frameNum_{0};
-    std::map<int, int> frameIdToNum_;
+    std::unordered_map<int, int> frameIdToNum_;
 
     Visualization visualization_;
     Stats stats_;
diff --git a/src/odometry/five_point.cc b/src/odometry/five_point.cc
index 429e3ea..4bc4610 100644
--- a/src/odometry/five_point.cc
+++ b/src/odometry/five_point.cc
@@ -149,7 +149,7 @@ int FivePoint::ERANSAC(const std::vector<Vector3d> &x1, const std::vector<Vector
         std::random_device rd;
         std::mt19937 eng(rd());
         std::uniform_int_distribution<> distr(0, x1.size() - 1);
-        std::set<int> randset;
+        std::unordered_set<int> randset;
         while (randset.size() < 5)
         {
             randset.insert(distr(eng));
@@ -328,7 +328,7 @@ int FivePoint::TranslationRANSAC(const std::vector<Vector3d> &xs, const std::vec
     std::random_device rd;
     std::mt19937 eng(rd());
     std::uniform_int_distribution<> distr(0, xs.size() - 1);
-    std::set<int> randset;
+    std::unordered_set<int> randset;
     while (randset.size() < std::min(transIterations_, (int)xs.size()))
     {
         randset.insert(distr(eng));
diff --git a/src/odometry/pnp.cc b/src/odometry/pnp.cc
index 88b3de7..372963e 100644
--- a/src/odometry/pnp.cc
+++ b/src/odometry/pnp.cc
@@ -97,7 +97,7 @@ int PNP::RANSAC(const std::vector<Vector3d> &xs, const std::vector<Vector3d> &ys
         std::random_device rd;
         std::mt19937 eng(rd());
         std::uniform_int_distribution<> distr(0, xs.size() - 1);
-        std::set<int> randset;
+        std::unordered_set<int> randset;
         while (randset.size() < 4)
         {
             randset.insert(distr(eng));
diff --git a/src/ros/eval_base.cc b/src/ros/eval_base.cc
index 51d13e4..8e450e6 100644
--- a/src/ros/eval_base.cc
+++ b/src/ros/eval_base.cc
@@ -27,10 +27,12 @@ EvalBase<false>::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &
     nhp_.param("image_topic", imageTopic_, std::string("/camera/image_raw"));
     nhp_.param("depth_image_topic", depthImageTopic_, std::string("/depth_camera/image_raw"));
     nhp_.param("pose_topic", poseTopic_, std::string("/pose"));
+    nhp_.param("vignette", vignette_, 0.0);
+    nhp_.param("vignette_expansion", vignetteExpansion_, 0.01);
 
     if (cameraModel == "double_sphere")
     {
-        cameraModel_.reset(new camera::DoubleSphere<>(cameraParams_["fx"], cameraParams_["fy"], cameraParams_["cx"], cameraParams_["cy"], cameraParams_["chi"], cameraParams_["alpha"]));
+        cameraModel_.reset(new camera::DoubleSphere<>(cameraParams_["fx"], cameraParams_["fy"], cameraParams_["cx"], cameraParams_["cy"], cameraParams_["chi"], cameraParams_["alpha"], vignette_));
     }
     else if (cameraModel == "perspective")
     {
@@ -64,11 +66,13 @@ EvalBase<true>::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &n
     Quaterniond q(stereoR[3], stereoR[0], stereoR[1], stereoR[2]);
     Vector3d t(stereoT[0], stereoT[1], stereoT[2]);
     stereoPose_ = util::TFUtil::QuaternionTranslationToPoseMatrix(q, t);
+    nhp_.param("vignette", vignette_, 0.0);
+    nhp_.param("vignette_expansion", vignetteExpansion_, 0.01);
 
     if (cameraModel == "double_sphere")
     {
-        cameraModel_.reset(new camera::DoubleSphere<>(cameraParams_["fx"], cameraParams_["fy"], cameraParams_["cx"], cameraParams_["cy"], cameraParams_["chi"], cameraParams_["alpha"]));
-        stereoCameraModel_.reset(new camera::DoubleSphere<>(stereoCameraParams["fx"], stereoCameraParams["fy"], stereoCameraParams["cx"], stereoCameraParams["cy"], stereoCameraParams["chi"], stereoCameraParams["alpha"]));
+        cameraModel_.reset(new camera::DoubleSphere<>(cameraParams_["fx"], cameraParams_["fy"], cameraParams_["cx"], cameraParams_["cy"], cameraParams_["chi"], cameraParams_["alpha"], vignette_));
+        stereoCameraModel_.reset(new camera::DoubleSphere<>(cameraParams_["fx"], cameraParams_["fy"], cameraParams_["cx"], cameraParams_["cy"], cameraParams_["chi"], cameraParams_["alpha"], vignette_));
     }
     else if (cameraModel == "perspective")
     {
@@ -129,6 +133,13 @@ void EvalBase<Stereo>::FrameCallback(const sensor_msgs::ImageConstPtr &image, co
     Quaterniond q(pose->pose.orientation.w, pose->pose.orientation.x, pose->pose.orientation.y, pose->pose.orientation.z);
     Vector3d t(pose->pose.position.x, pose->pose.position.y, pose->pose.position.z);
     Matrix<double, 3, 4> posemat = util::TFUtil::QuaternionTranslationToPoseMatrix(q, t);
+    if (vignette_ > 0)
+    {
+        cv::Mat mask = cv::Mat::zeros(cvImage->image.size(), CV_32FC3);
+        cv::circle(mask, cv::Point2f(cvImage->image.cols / 2 - 0.5, cvImage->image.rows / 2 - 0.5), std::max(cvImage->image.rows, cvImage->image.cols) * (vignette_ + vignetteExpansion_) / 2, cv::Scalar::all(1), -1);
+        cv::GaussianBlur(mask, mask, cv::Size(51, 51), 0);
+        cv::multiply(cvImage->image, mask, cvImage->image, 1, CV_8UC3);
+    }
     cv::Mat monoImg;
     cv::cvtColor(cvImage->image, monoImg, CV_BGR2GRAY);
     cv::Mat depthFloatImg;
@@ -184,6 +195,14 @@ void EvalBase<Stereo>::FrameCallback(const sensor_msgs::ImageConstPtr &image, co
     Quaterniond q(pose->pose.orientation.w, pose->pose.orientation.x, pose->pose.orientation.y, pose->pose.orientation.z);
     Vector3d t(pose->pose.position.x, pose->pose.position.y, pose->pose.position.z);
     Matrix<double, 3, 4> posemat = util::TFUtil::QuaternionTranslationToPoseMatrix(q, t);
+    if (vignette_ > 0)
+    {
+        cv::Mat mask = cv::Mat::zeros(cvImage->image.size(), CV_32FC3);
+        cv::circle(mask, cv::Point2f(cvImage->image.cols / 2 - 0.5, cvImage->image.rows / 2 - 0.5), std::max(cvImage->image.rows, cvImage->image.cols) * (vignette_ + vignetteExpansion_) / 2, cv::Scalar::all(1), -1);
+        cv::GaussianBlur(mask, mask, cv::Size(51, 51), 0);
+        cv::multiply(cvImage->image, mask, cvImage->image, 1, CV_8UC3);
+        cv::multiply(cvStereoImage->image, mask, cvStereoImage->image, 1, CV_8UC3);
+    }
     cv::Mat monoImg;
     cv::cvtColor(cvImage->image, monoImg, CV_BGR2GRAY);
     cv::Mat monoImg2;
diff --git a/src/ros/eval_base.h b/src/ros/eval_base.h
index 62524f9..99c7b31 100644
--- a/src/ros/eval_base.h
+++ b/src/ros/eval_base.h
@@ -68,6 +68,8 @@ class EvalBase
     Matrix<double, 3, 4> stereoPose_;
     std::unique_ptr<camera::CameraModel<>> cameraModel_;
     std::unique_ptr<camera::CameraModel<>> stereoCameraModel_;
+    double vignette_{0.0};
+    double vignetteExpansion_{0.0};
 
     bool first_{true};
 };