diff --git a/launch/matching_eval.launch b/launch/matching_eval.launch index f830eed..f8c24ca 100644 --- a/launch/matching_eval.launch +++ b/launch/matching_eval.launch @@ -1,22 +1,21 @@ - - - + + + - - - + + + - detector_type: '$(arg detector_type)' detector_parameters: $(arg detector_params) @@ -27,4 +26,5 @@ feature_distance_threshold: 10 + diff --git a/launch/odometry_eval.launch b/launch/odometry_eval.launch index b47f918..13e8a75 100644 --- a/launch/odometry_eval.launch +++ b/launch/odometry_eval.launch @@ -1,14 +1,14 @@ - + - - - + + + @@ -18,7 +18,6 @@ - detector_type: 'GFTT' detector_parameters: {maxCorners: 2000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} @@ -42,5 +41,6 @@ bundle_adjustment_num_threads: 20 + diff --git a/launch/reconstruction_eval.launch b/launch/reconstruction_eval.launch index 89dcb63..95c20bf 100644 --- a/launch/reconstruction_eval.launch +++ b/launch/reconstruction_eval.launch @@ -1,19 +1,18 @@ - + - + - detector_type: 'GFTT' detector_parameters: {maxCorners: 5000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} @@ -35,4 +34,5 @@ bundle_adjustment_num_threads: 20 + diff --git a/launch/slam_eval.launch b/launch/slam_eval.launch index 5f92a9d..8b47e89 100644 --- a/launch/slam_eval.launch +++ b/launch/slam_eval.launch @@ -1,15 +1,15 @@ - + - - - - + + + + @@ -21,7 +21,6 @@ - detector_type: 'GFTT' detector_parameters: {maxCorners: 5000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} @@ -52,6 +51,7 @@ stereo_matcher_epipolar_threshold: 0.008 + diff --git a/launch/slam_eval_kitti.launch b/launch/slam_eval_kitti.launch index 3b3422d..ce1933f 100644 --- a/launch/slam_eval_kitti.launch +++ b/launch/slam_eval_kitti.launch @@ -1,7 +1,7 @@ - + @@ -21,7 +21,6 @@ - detector_type: 'GFTT' detector_parameters: {maxCorners: 5000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} @@ -52,6 +51,7 @@ stereo_matcher_epipolar_threshold: 0.008 + diff --git a/launch/slam_eval_t265.launch b/launch/slam_eval_t265.launch index 34c9ab2..9c03fc1 100644 --- a/launch/slam_eval_t265.launch +++ b/launch/slam_eval_t265.launch @@ -1,7 +1,7 @@ - + @@ -21,7 +21,6 @@ - detector_type: 'GFTT' detector_parameters: {maxCorners: 5000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} @@ -52,6 +51,7 @@ stereo_matcher_epipolar_threshold: 0.008 + diff --git a/launch/slam_eval_tum.launch b/launch/slam_eval_tum.launch index 25b3e8a..01f28ba 100644 --- a/launch/slam_eval_tum.launch +++ b/launch/slam_eval_tum.launch @@ -1,7 +1,7 @@ - + @@ -21,7 +21,6 @@ - detector_type: 'ORB' detector_parameters: {nfeatures: 1000} @@ -53,6 +52,7 @@ stereo_matcher_epipolar_threshold: 0.008 + diff --git a/launch/stereo_eval.launch b/launch/stereo_eval.launch index cb5f968..8d43d8a 100644 --- a/launch/stereo_eval.launch +++ b/launch/stereo_eval.launch @@ -1,21 +1,18 @@ - - - + + - - - - + + + + - camera_model: '$(arg camera_model)' - camera_parameters: $(arg camera_params) detector_type: 'GFTT' detector_parameters: {maxCorners: 1000, qualityLevel: 0.001, minDistance: 5, blockSize: 5} stereo_matcher_window_size: 256 @@ -26,6 +23,7 @@ stereo_tf_r: [0.0, 0.0, 0.0, 1.0] + diff --git a/launch/tracking_eval.launch b/launch/tracking_eval.launch index 2a3f5e5..fa73646 100644 --- a/launch/tracking_eval.launch +++ b/launch/tracking_eval.launch @@ -1,17 +1,16 @@ - + - - - + + + - detector_type: 'GFTT' detector_parameters: {maxCorners: 100, qualityLevel: 0.01, minDistance: 5, blockSize: 5} @@ -27,4 +26,5 @@ keyframe_interval: 1 + diff --git a/scripts/run_matching_eval_set.py b/scripts/run_matching_eval_set.py index 11a56ff..2fdbf07 100644 --- a/scripts/run_matching_eval_set.py +++ b/scripts/run_matching_eval_set.py @@ -6,6 +6,8 @@ parser = argparse.ArgumentParser(description='Run matching evaluation set') parser.add_argument('working_dir', help='working directory for full motion+fov set evaluation or bag file for single detector+descriptor set evaluation') +parser.add_argument('--motion', type=str, help='motion type for motion set evaluation') +parser.add_argument('--camera_file', type=str, help='camera calibration file for single evaluation') parser.add_argument("--rate", type=int, help='frame rate multiplier') args = parser.parse_args() @@ -23,45 +25,56 @@ parent.start() if os.path.isdir(args.working_dir): - print '' - print '===========================================' - print 'Full motion+FOV dataset matching evaluation' - print '===========================================' + if args.motion is None: + print '' + print '===========================================' + print 'Full motion+FOV dataset matching evaluation' + print '===========================================' + else: + print '' + print '===========================================' + print '{} motion dataset matching evaluation'.format(args.motion) + print '===========================================' + fovs = [] + for yaml in os.listdir(args.working_dir): + if not os.path.isdir(os.path.join(args.working_dir, yaml)) and yaml.endswith('.yaml'): + fov = os.path.splitext(os.path.basename(yaml))[0] + fovs.append(fov) + fovs.sort(key=int) for motion in os.listdir(args.working_dir): if os.path.isdir(os.path.join(args.working_dir, motion)): + if args.motion is not None and motion != args.motion: + continue bag_dir = os.path.join(args.working_dir, motion) - for fov in os.listdir(bag_dir): - if os.path.isdir(os.path.join(bag_dir, fov)): - chi, alpha, fx, fy, cx, cy = parse('chi{:f}_alpha{:f}_fx{:f}_fy{:f}_cx{:f}_cy{:f}', fov) - printstr = "Motion type {}, chi={}, alpha={}, fx={}, fy={}, cx={}, cy={}".format(motion, chi, alpha, fx, fy, cx, cy) - print '' - print '-' * len(printstr) - print printstr - print '-' * len(printstr) - fov_dir = os.path.join(bag_dir, fov) - for filename in os.listdir(fov_dir): - if filename.endswith('.bag') and not filename.endswith('.orig.bag'): - bag_file = os.path.abspath(os.path.join(fov_dir, filename)) - for det, desc in d_list: - printstr = "Detector+Descriptor {}+{}".format(det, desc) - print '' - print '-' * len(printstr) - print printstr - print '-' * len(printstr) - print '' - sys.argv = ['roslaunch', 'omni_slam_eval', 'matching_eval.launch', 'bag_file:={}'.format(bag_file), 'results_file:={}.{}_{}.matching.hdf5'.format(bag_file, det, desc), 'camera_params:={{fx: {}, fy: {}, cx: {}, cy: {}, chi: {}, alpha: {}}}'.format(fx, fy, cx, cy, chi, alpha), 'detector_type:={}'.format(det), 'descriptor_type:={}'.format(desc), 'detector_params:={}'.format(det_param_map[det]), 'rate:={}'.format(args.rate)] - reload(roslaunch) - roslaunch.main() + for fov in fovs: + if args.motion is None: + printstr = "Motion type {}, FOV {}".format(motion, fov) + else: + printstr = "FOV {}".format(fov) + print '' + print '-' * len(printstr) + print printstr + print '-' * len(printstr) + fov_file = os.path.join(args.working_dir, fov + '.yaml') + for filename in os.listdir(bag_dir): + if filename.endswith('.bag') and not filename.endswith('.orig.bag'): + bag_file = os.path.abspath(os.path.join(bag_dir, filename)) + for det, desc in d_list: + printstr = "Detector+Descriptor {}+{}".format(det, desc) + print '' + print '-' * len(printstr) + print printstr + print '-' * len(printstr) + print '' + sys.argv = ['roslaunch', 'omni_slam_eval', 'matching_eval.launch', 'bag_file:={}'.format(bag_file), 'camera_file:={}'.format(fov_file), 'detector_type:={}'.format(det), 'descriptor_type:={}'.format(desc), 'detector_params:={}'.format(det_param_map[det]), 'rate:={}'.format(args.rate)] + reload(roslaunch) + roslaunch.main() else: print '' print '==================================================' print 'Single run detector+descriptor matching evaluation' print '==================================================' bag_dir = os.path.abspath(args.working_dir) - par_dir = os.path.basename(os.path.dirname(bag_dir)) - parsed = parse('chi{:f}_alpha{:f}_fx{:f}_fy{:f}_cx{:f}_cy{:f}', par_dir) - if parsed is not None: - chi, alpha, fx, fy, cx, cy = parsed for det, desc in d_list: printstr = "Detector+Descriptor {}+{}".format(det, desc) print '' @@ -69,9 +82,10 @@ print printstr print '-' * len(printstr) print '' - sys.argv = ['roslaunch', 'omni_slam_eval', 'matching_eval.launch', 'bag_file:={}'.format(bag_dir), 'results_file:={}.{}_{}.matching.hdf5'.format(bag_dir, det, desc), 'detector_type:={}'.format(det), 'descriptor_type:={}'.format(desc), 'detector_params:={}'.format(det_param_map[det]), 'rate:={}'.format(args.rate)] - if parsed is not None: - sys.argv.append('camera_params:={{fx: {}, fy: {}, cx: {}, cy: {}, chi: {}, alpha: {}}}'.format(fx, fy, cx, cy, chi, alpha)) + sys.argv = ['roslaunch', 'omni_slam_eval', 'matching_eval.launch', 'bag_file:={}'.format(bag_dir), 'detector_type:={}'.format(det), 'descriptor_type:={}'.format(desc), 'detector_params:={}'.format(det_param_map[det]), 'rate:={}'.format(args.rate)] + if args.camera_file is not None: + cam_file = os.path.abspath(args.camera_file) + sys.argv.append('camera_file:={}'.format(cam_file)) reload(roslaunch) roslaunch.main() diff --git a/scripts/run_tracking_eval_set.py b/scripts/run_tracking_eval_set.py index c7a6bdb..39a35fe 100644 --- a/scripts/run_tracking_eval_set.py +++ b/scripts/run_tracking_eval_set.py @@ -17,25 +17,29 @@ print '===========================================' print 'Full motion+FOV dataset tracking evaluation' print '===========================================' + fovs = [] + for yaml in os.listdir(args.working_dir): + if not os.path.isdir(os.path.join(args.working_dir, yaml)) and yaml.endswith('.yaml'): + fov = os.path.splitext(os.path.basename(yaml))[0] + fovs.append(fov) + fovs.sort(key=int) for motion in os.listdir(args.working_dir): if os.path.isdir(os.path.join(args.working_dir, motion)): bag_dir = os.path.join(args.working_dir, motion) - for fov in os.listdir(bag_dir): - if os.path.isdir(os.path.join(bag_dir, fov)): - chi, alpha, fx, fy, cx, cy = parse('chi{:f}_alpha{:f}_fx{:f}_fy{:f}_cx{:f}_cy{:f}', fov) - printstr = "Motion type {}, chi={}, alpha={}, fx={}, fy={}, cx={}, cy={}".format(motion, chi, alpha, fx, fy, cx, cy) - print '' - print '-' * len(printstr) - print printstr - print '-' * len(printstr) - print '' - fov_dir = os.path.join(bag_dir, fov) - for filename in os.listdir(fov_dir): - if filename.endswith('.bag') and not filename.endswith('.orig.bag'): - bag_file = os.path.abspath(os.path.join(fov_dir, filename)) - sys.argv = ['roslaunch', 'omni_slam_eval', 'tracking_eval.launch', 'bag_file:={}'.format(bag_file), 'results_file:={}.tracking.hdf5'.format(bag_file), 'camera_params:={{fx: {}, fy: {}, cx: {}, cy: {}, chi: {}, alpha: {}}}'.format(fx, fy, cx, cy, chi, alpha), 'rate:={}'.format(args.rate)] - reload(roslaunch) - roslaunch.main() + for fov in fovs: + printstr = "Motion type {}, FOV {}".format(motion, fov) + print '' + print '-' * len(printstr) + print printstr + print '-' * len(printstr) + print '' + fov_file = os.path.join(args.working_dir, fov + '.yaml') + for filename in os.listdir(bag_dir): + if filename.endswith('.bag') and not filename.endswith('.orig.bag'): + bag_file = os.path.abspath(os.path.join(bag_dir, filename)) + sys.argv = ['roslaunch', 'omni_slam_eval', 'tracking_eval.launch', 'bag_file:={}'.format(bag_file), 'camera_file:={}'.format(fov_file), 'rate:={}'.format(args.rate)] + reload(roslaunch) + roslaunch.main() print '' print '===================' print 'Evaluation complete' diff --git a/scripts/tracking_eval_plot.py b/scripts/tracking_eval_plot.py index 0090bda..4c53fc8 100644 --- a/scripts/tracking_eval_plot.py +++ b/scripts/tracking_eval_plot.py @@ -107,22 +107,31 @@ motion_count = 0 fov_dict = dict() last_fov_num = 0 + fovs = [] + for yaml in os.listdir(args.results_path): + if not os.path.isdir(os.path.join(args.results_path, yaml)) and yaml.endswith('.yaml'): + fov = os.path.splitext(os.path.basename(yaml))[0] + fovs.append(fov) + fovs.sort(key=int) for motion in os.listdir(args.results_path): if os.path.isdir(os.path.join(args.results_path, motion)): bag_dir = os.path.join(args.results_path, motion) - motion_count += 1 - for fov in os.listdir(bag_dir): - if os.path.isdir(os.path.join(bag_dir, fov)): - for filename in os.listdir(os.path.join(bag_dir, fov)): - if filename.endswith('.tracking.hdf5'): - results_file = os.path.join(bag_dir, fov, filename) - with h5py.File(results_file, 'r') as f: - attrs = dict(f['attributes'].attrs.items()) - fov = int(round(attrs['fov'])) - if fov not in fov_dict.keys(): - fov_dict[fov] = last_fov_num - last_fov_num += 1 - break + motion_exists = False + for fovstr in fovs: + for filename in os.listdir(bag_dir): + if filename.endswith(fovstr + '.tracking.hdf5'): + results_file = os.path.join(bag_dir, filename) + with h5py.File(results_file, 'r') as f: + attrs = dict(f['attributes'].attrs.items()) + # fov = int(round(attrs['fov'])) + fov = int(fovstr) + if fov not in fov_dict.keys(): + fov_dict[fov] = last_fov_num + last_fov_num += 1 + motion_exists = True + break + if motion_exists: + motion_count += 1 last_fov_num = 0 for fov in sorted(fov_dict.keys()): @@ -136,46 +145,48 @@ if os.path.isdir(os.path.join(args.results_path, motion)): df = pandas.DataFrame() bag_dir = os.path.join(args.results_path, motion) - for fov in os.listdir(bag_dir): - if os.path.isdir(os.path.join(bag_dir, fov)): - chi, alpha, fx, fy, cx, cy = parse('chi{:f}_alpha{:f}_fx{:f}_fy{:f}_cx{:f}_cy{:f}', fov) - failures = np.empty(shape=(1,0)) - radial_errors = np.empty(shape=(0,2)) - track_lengths = np.empty(shape=(1,0)) - file_exists = False - for filename in os.listdir(os.path.join(bag_dir, fov)): - if filename.endswith('.tracking.hdf5'): - results_file = os.path.join(bag_dir, fov, filename) - with h5py.File(results_file, 'r') as f: - failures = np.hstack((failures, f['failures'][:])) - radial_errors = np.vstack((radial_errors, f['radial_errors'][:])) - tl = f['track_lengths'][:] - track_lengths = np.hstack((track_lengths, tl[:, tl[0, :] > 0])) - attrs = dict(f['attributes'].attrs.items()) - file_exists = True - fov = int(round(attrs['fov'])) - if file_exists: - ax = fig1.add_subplot(num_rows, num_cols, motion_inx * num_cols + fov_dict[fov] + 1) - ax.hist([[r for r, e in radial_errors if e <= 5], [r for r, e in radial_errors if 5 < e <= 20], [r for r, e in radial_errors if 20 < e <= 50], [r for r in failures]], bins=[i * 0.05 for i in range(11)], alpha=0.5, label=['<5', '5-20', '20-50', 'Failures'], stacked=False) - ax.legend(loc='best', title='Pixel error', fontsize='x-small') + motion_exists = False + for fovstr in fovs: + failures = np.empty(shape=(1,0)) + radial_errors = np.empty(shape=(0,2)) + track_lengths = np.empty(shape=(1,0)) + file_exists = False + for filename in os.listdir(bag_dir): + if filename.endswith(fovstr + '.tracking.hdf5'): + results_file = os.path.join(bag_dir, filename) + with h5py.File(results_file, 'r') as f: + failures = np.hstack((failures, f['failures'][:])) + radial_errors = np.vstack((radial_errors, f['radial_errors'][:])) + tl = f['track_lengths'][:] + track_lengths = np.hstack((track_lengths, tl[:, tl[0, :] > 0])) + attrs = dict(f['attributes'].attrs.items()) + file_exists = True + motion_exists = True + # fov = int(round(attrs['fov'])) + fov = int(fovstr) + if file_exists: + ax = fig1.add_subplot(num_rows, num_cols, motion_inx * num_cols + fov_dict[fov] + 1) + ax.hist([[r for r, e in radial_errors if e <= 5], [r for r, e in radial_errors if 5 < e <= 20], [r for r, e in radial_errors if 20 < e <= 50], [r for r in failures]], bins=[i * 0.05 for i in range(11)], alpha=0.5, label=['<5', '5-20', '20-50', 'Failures'], stacked=False) + ax.legend(loc='best', title='Pixel error', fontsize='x-small') - if motion_inx == 0: - ax.set_title('FOV {} degrees'.format(fov)) - elif motion_inx == num_rows - 1: - ax.set_xlabel('Radial distance') - if fov_dict[fov] == 0: - ax.set_ylabel(motion, size='large') - elif fov_dict[fov] == num_cols - 1: - ax.set_ylabel('Number of tracks') - ax.yaxis.set_label_position("right") + if motion_inx == 0: + ax.set_title('FOV {} degrees'.format(fov)) + elif motion_inx == num_rows - 1: + ax.set_xlabel('Radial distance') + if fov_dict[fov] == 0: + ax.set_ylabel(motion, size='large') + elif fov_dict[fov] == num_cols - 1: + ax.set_ylabel('Number of tracks') + ax.yaxis.set_label_position("right") - df = df.append(pandas.DataFrame({'FOV': [fov for i in range(len(track_lengths[0]))], 'Track lifetime (frames)': track_lengths[0, :]})) - else: - print '[WARNING] No results files found in directory {}'.format(os.path.join(bag_dir, fov)) - ax2 = fig2.add_subplot(num_rows, 1, motion_inx + 1) - sns.violinplot(x='FOV', y='Track lifetime (frames)', data=df, ax=ax2, palette="muted", inner='quart') - ax2.set_title('Motion {}'.format(motion)) - motion_inx += 1 + df = df.append(pandas.DataFrame({'FOV': [fov for i in range(len(track_lengths[0]))], 'Track lifetime (frames)': track_lengths[0, :]})) + else: + print '[WARNING] No results files found in directory {} for FOV {}'.format(os.path.join(bag_dir), fovstr) + if motion_exists: + ax2 = fig2.add_subplot(num_rows, 1, motion_inx + 1) + sns.violinplot(x='FOV', y='Track lifetime (frames)', data=df, ax=ax2, palette="muted", inner='quart') + ax2.set_title('Motion {}'.format(motion)) + motion_inx += 1 plt.show() else: diff --git a/src/ros/tracking_eval.cc b/src/ros/tracking_eval.cc index e144d29..319a16b 100644 --- a/src/ros/tracking_eval.cc +++ b/src/ros/tracking_eval.cc @@ -102,6 +102,8 @@ void TrackingEval::ProcessFrame(unique_ptr &&frame) { trackingModule_->Update(frame); trackingModule_->Redetect(); + + visualized_ = false; } template