diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 diff --git a/NUSCENES-GUIDE.md b/NUSCENES-GUIDE.md old mode 100644 new mode 100755 diff --git a/README.md b/README.md old mode 100644 new mode 100755 diff --git a/builder/__init__.py b/builder/__init__.py old mode 100644 new mode 100755 diff --git a/builder/__pycache__/__init__.cpython-37.pyc b/builder/__pycache__/__init__.cpython-37.pyc new file mode 100755 index 0000000..d698592 Binary files /dev/null and b/builder/__pycache__/__init__.cpython-37.pyc differ diff --git a/builder/__pycache__/data_builder.cpython-37.pyc b/builder/__pycache__/data_builder.cpython-37.pyc new file mode 100755 index 0000000..9a32e44 Binary files /dev/null and b/builder/__pycache__/data_builder.cpython-37.pyc differ diff --git a/builder/__pycache__/loss_builder.cpython-37.pyc b/builder/__pycache__/loss_builder.cpython-37.pyc new file mode 100755 index 0000000..674cd4a Binary files /dev/null and b/builder/__pycache__/loss_builder.cpython-37.pyc differ diff --git a/builder/__pycache__/model_builder.cpython-37.pyc b/builder/__pycache__/model_builder.cpython-37.pyc new file mode 100755 index 0000000..e03743e Binary files /dev/null and b/builder/__pycache__/model_builder.cpython-37.pyc differ diff --git a/builder/data_builder.py b/builder/data_builder.py old mode 100644 new mode 100755 index 127437b..ee388f2 --- a/builder/data_builder.py +++ b/builder/data_builder.py @@ -30,7 +30,8 @@ def build(dataset_config, return_ref=train_ref, label_mapping=label_mapping, nusc=nusc) val_pt_dataset = SemKITTI(data_path, imageset=val_imageset, return_ref=val_ref, label_mapping=label_mapping, nusc=nusc) - + #import pdb + #pdb.set_trace() train_dataset = get_model_class(dataset_config['dataset_type'])( train_pt_dataset, grid_size=grid_size, diff --git a/builder/loss_builder.py b/builder/loss_builder.py old mode 100644 new mode 100755 diff --git a/builder/model_builder.py b/builder/model_builder.py old mode 100644 new mode 100755 diff --git a/config/__init__.py b/config/__init__.py old mode 100644 new mode 100755 diff --git a/config/__pycache__/__init__.cpython-37.pyc b/config/__pycache__/__init__.cpython-37.pyc old mode 100644 new mode 100755 index 5ce9ba9..0bfaea0 Binary files a/config/__pycache__/__init__.cpython-37.pyc and b/config/__pycache__/__init__.cpython-37.pyc differ diff --git a/config/__pycache__/config.cpython-37.pyc b/config/__pycache__/config.cpython-37.pyc old mode 100644 new mode 100755 index 867ceb4..9ffeca1 Binary files a/config/__pycache__/config.cpython-37.pyc and b/config/__pycache__/config.cpython-37.pyc differ diff --git a/config/config.py b/config/config.py old mode 100644 new mode 100755 diff --git a/config/label_mapping/.semantic-kitti.yaml.swp b/config/label_mapping/.semantic-kitti.yaml.swp new file mode 100755 index 0000000..3f769de Binary files /dev/null and b/config/label_mapping/.semantic-kitti.yaml.swp differ diff --git a/config/label_mapping/nuscenes.yaml b/config/label_mapping/nuscenes.yaml old mode 100644 new mode 100755 diff --git a/config/label_mapping/semantic-kitti-multiscan.yaml b/config/label_mapping/semantic-kitti-multiscan.yaml old mode 100644 new mode 100755 diff --git a/config/label_mapping/semantic-kitti.yaml b/config/label_mapping/semantic-kitti.yaml old mode 100644 new mode 100755 index 6281065..f3c4c9b --- a/config/label_mapping/semantic-kitti.yaml +++ b/config/label_mapping/semantic-kitti.yaml @@ -185,18 +185,19 @@ learning_ignore: # Ignore classes 19: False # "traffic-sign" split: # sequence numbers train: - - 0 - - 1 - - 2 +# - 0 +# - 1 +# - 2 - 3 - 4 - - 5 - - 6 - - 7 +# - 5 +# - 6 +# - 7 - 9 - 10 valid: - - 8 +# - 8 + - 22 test: - 11 - 12 diff --git a/config/nuScenes.yaml b/config/nuScenes.yaml old mode 100644 new mode 100755 diff --git a/config/semantickitti.yaml b/config/semantickitti.yaml old mode 100644 new mode 100755 index da71e5c..f7e0f2a --- a/config/semantickitti.yaml +++ b/config/semantickitti.yaml @@ -41,7 +41,7 @@ dataset_params: ################### ## Data_loader options train_data_loader: - data_path: "/data/dataset/semantic_kitti/data_semkitti/dataset/sequences/" + data_path: "/lustre/home/acct-stu/stu010/sequences/" imageset: "train" return_ref: True batch_size: 2 @@ -49,7 +49,7 @@ train_data_loader: num_workers: 4 val_data_loader: - data_path: "/data/dataset/semantic_kitti/data_semkitti/dataset/sequences/" + data_path: "/lustre/home/acct-stu/stu010/sequences/" imageset: "val" return_ref: True batch_size: 1 @@ -62,7 +62,7 @@ val_data_loader: train_params: model_load_path: "./model_load_dir/model_load.pt" model_save_path: "./model_save_dir/model_save.pt" - checkpoint_every_n_steps: 4599 + checkpoint_every_n_steps: 1932 max_num_epochs: 40 - eval_every_n_steps: 4599 + eval_every_n_steps: 1932 learning_rate: 0.001 diff --git a/dataloader/__init__.py b/dataloader/__init__.py old mode 100644 new mode 100755 diff --git a/dataloader/__pycache__/__init__.cpython-37.pyc b/dataloader/__pycache__/__init__.cpython-37.pyc new file mode 100755 index 0000000..7901899 Binary files /dev/null and b/dataloader/__pycache__/__init__.cpython-37.pyc differ diff --git a/dataloader/__pycache__/dataset_nuscenes.cpython-37.pyc b/dataloader/__pycache__/dataset_nuscenes.cpython-37.pyc new file mode 100755 index 0000000..07e073a Binary files /dev/null and b/dataloader/__pycache__/dataset_nuscenes.cpython-37.pyc differ diff --git a/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.1.nbc b/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.1.nbc new file mode 100755 index 0000000..20c0d7d Binary files /dev/null and b/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.1.nbc differ diff --git a/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.2.nbc b/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.2.nbc new file mode 100755 index 0000000..5d35946 Binary files /dev/null and b/dataloader/__pycache__/dataset_nuscenes.nb_process_label-137.py37m.2.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.1.nbc b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.1.nbc new file mode 100755 index 0000000..0090930 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.1.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.2.nbc b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.2.nbc new file mode 100755 index 0000000..004d55e Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-379.py37m.2.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.1.nbc b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.1.nbc new file mode 100755 index 0000000..87f0ba0 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.1.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.nbi b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.nbi new file mode 100755 index 0000000..153bd5c Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-391.py37m.nbi differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.1.nbc b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.1.nbc new file mode 100755 index 0000000..6f1b147 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.1.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.nbi b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.nbi new file mode 100755 index 0000000..1fdaf67 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-392.py37m.nbi differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.1.nbc b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.1.nbc new file mode 100755 index 0000000..aeca317 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.1.nbc differ diff --git a/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.nbi b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.nbi new file mode 100755 index 0000000..9e133d6 Binary files /dev/null and b/dataloader/__pycache__/dataset_semantickitti.nb_process_label-405.py37m.nbi differ diff --git a/dataloader/dataset_nuscenes.py b/dataloader/dataset_nuscenes.py old mode 100644 new mode 100755 diff --git a/dataloader/dataset_semantickitti.py b/dataloader/dataset_semantickitti.py old mode 100644 new mode 100755 index 75be1e2..53337a5 --- a/dataloader/dataset_semantickitti.py +++ b/dataloader/dataset_semantickitti.py @@ -16,6 +16,13 @@ REGISTERED_DATASET_CLASSES = {} +# triple +def triple_func(x): + return x ** (1/3) + +# ln +def ln_func(x): + return np.log(x) def register_dataset(cls, name=None): global REGISTERED_DATASET_CLASSES @@ -244,7 +251,26 @@ def __getitem__(self, index): intervals = crop_range / (cur_grid_size - 1) if (intervals == 0).any(): print("Zero interval!") - grid_ind = (np.floor((np.clip(xyz_pol, min_bound, max_bound) - min_bound) / intervals)).astype(np.int) + # fixed + # grid_ind = (np.floor((np.clip(xyz_pol, min_bound, max_bound) - min_bound) / intervals)).astype(np.int) + + # triple (quadratic interval) + #x_clip = np.clip(xyz_pol, min_bound, max_bound) + #a = (cur_grid_size[0] - 1) / (triple_func(max_bound[0] - min_bound[0])) + #tmp_y = a * triple_func(x_clip[:, 0] - min_bound[0]) + #grid_ind1 = (np.floor(tmp_y)).astype(np.int) + #grid_ind2 = (np.floor((x_clip[:, 1:] - min_bound[1:]) / ((max_bound[1:] - min_bound[1:]) / (cur_grid_size[1:] - 1)))).astype(np.int) + #grid_ind = np.concatenate((grid_ind1.reshape((grid_ind1.shape[0], 1)), grid_ind2), axis=1) + ################## + + #exponential (exponential interval) + x_clip = np.clip(xyz_pol, min_bound, max_bound) + a = (cur_grid_size[0] - 1) / (ln_func(max_bound[0] + 1 - min_bound[0])) + tmp_y = a * ln_func(x_clip[:, 0] + 1 - min_bound[0]) + grid_ind1 = (np.floor(tmp_y)).astype(np.int) + grid_ind2 = (np.floor((x_clip[:, 1:] - min_bound[1:]) / ((max_bound[1:] - min_bound[1:]) / (cur_grid_size[1:] - 1)))).astype(np.int) + grid_ind = np.concatenate((grid_ind1.reshape((grid_ind1.shape[0], 1)), grid_ind2), axis=1) + ###################### voxel_position = np.zeros(self.grid_size, dtype=np.float32) dim_array = np.ones(len(self.grid_size) + 1, int) diff --git a/dataloader/pc_dataset.py b/dataloader/pc_dataset.py old mode 100644 new mode 100755 diff --git a/demo_folder.py b/demo_folder.py old mode 100644 new mode 100755 diff --git a/img/leaderboard.png b/img/leaderboard.png old mode 100644 new mode 100755 diff --git a/img/leaderboard2.png b/img/leaderboard2.png old mode 100644 new mode 100755 diff --git a/img/pipeline.png b/img/pipeline.png old mode 100644 new mode 100755 diff --git a/logs_dir/cylinder_asym_networks_logs_tee.txt b/logs_dir/cylinder_asym_networks_logs_tee.txt new file mode 100755 index 0000000..032bb8e --- /dev/null +++ b/logs_dir/cylinder_asym_networks_logs_tee.txt @@ -0,0 +1,6 @@ +train_cylinder_asym.py +Namespace(config_path='config/semantickitti.yaml') +[480 360 32] + 0%| | 0/9565 [00:00= left_y), (unique_cell[:, 0] < right_y))) + full_cell_list[i] += full_cell_num + + elif func_type == "triple": + x_clip = np.clip(xyz_pol, min_bound, max_bound) + a = (cur_grid_size[0] - 1) / (triple_func(max_bound[0] - min_bound[0])) + tmp_y = a * triple_func(x_clip[:, 0] - min_bound[0]) + grid_ind1 = (np.floor(tmp_y)).astype(np.int) + grid_ind2 = (np.floor((x_clip[:, 1:] - min_bound[1:]) / ((max_bound[1:] - min_bound[1:]) / ( + cur_grid_size[1:] - 1)))).astype(np.int) + grid_ind = np.concatenate((grid_ind1.reshape((grid_ind1.shape[0], 1)), grid_ind2), axis=1) + + for i in range(5): + left_x = i * 10 + right_x = (i + 1) * 10 + left_y = np.floor(a * triple_func(left_x)) + right_y = np.floor(a * triple_func(right_x)) + unique_cell = np.unique(grid_ind, axis=0) + full_cell_num = np.sum(np.logical_and((unique_cell[:, 0] >= left_y), (unique_cell[:, 0] < right_y))) + full_cell_list[i] += full_cell_num + + elif func_type == "ln": + x_clip = np.clip(xyz_pol, min_bound, max_bound) + a = (cur_grid_size[0] - 1) / (ln_func(max_bound[0] + 1 - min_bound[0])) + tmp_y = a * ln_func(x_clip[:, 0] + 1 - min_bound[0]) + grid_ind1 = (np.floor(tmp_y)).astype(np.int) + grid_ind2 = (np.floor((x_clip[:, 1:] - min_bound[1:]) / ((max_bound[1:] - min_bound[1:]) / ( + cur_grid_size[1:] - 1)))).astype(np.int) + grid_ind = np.concatenate((grid_ind1.reshape((grid_ind1.shape[0], 1)), grid_ind2), axis=1) + for i in range(5): + left_x = i * 10 + right_x = (i + 1) * 10 + left_y = np.floor(a * ln_func(left_x)) + right_y = np.floor(a * ln_func(right_x)) + unique_cell = np.unique(grid_ind, axis=0) + full_cell_num = np.sum(np.logical_and((unique_cell[:, 0] >= left_y), (unique_cell[:, 0] < right_y))) + full_cell_list[i] += full_cell_num + + elif func_type == "original": + intervals = crop_range / (cur_grid_size - 1) + if (intervals == 0).any(): print("Zero interval!") + x_clip = np.clip(xyz_pol, min_bound, max_bound) + grid_ind = (np.floor((x_clip - min_bound) / intervals)).astype(np.int) + + for i in range(5): + left_x = i * 10 + right_x = (i + 1) * 10 + left_y = np.floor(left_x / intervals[0]) + right_y = np.floor(right_x / intervals[0]) + unique_cell = np.unique(grid_ind, axis=0) + full_cell_num = np.sum( + np.logical_and((unique_cell[:, 0] >= left_y), (unique_cell[:, 0] < right_y))) + full_cell_list[i] += full_cell_num + + + for j in range(5): + left_x = j * 10 + right_x = (j + 1) * 10 + if func_type == "quadratic": + a = (cur_grid_size[0] - 1) / (quadratic_func(max_bound[0] - min_bound[0])) + left_y = np.floor(a * quadratic_func(left_x)) + right_y = np.floor(a * quadratic_func(right_x)) + total_cell_list[j] = (right_y - left_y) * dim2 * dim3 * total_file_num + + elif func_type == "triple": + a = (cur_grid_size[0] - 1) / (triple_func(max_bound[0] - min_bound[0])) + left_y = np.floor(a * triple_func(left_x)) + right_y = np.floor(a * triple_func(right_x)) + total_cell_list[j] = (right_y - left_y) * dim2 * dim3 * total_file_num + + elif func_type == "ln": + a = (cur_grid_size[0] - 1) / (ln_func(max_bound[0] + 1 - min_bound[0])) + left_y = np.floor(a * ln_func(left_x + 1)) + right_y = np.floor(a * ln_func(right_x + 1)) + total_cell_list[j] = (right_y - left_y) * dim2 * dim3 * total_file_num + + elif func_type == "original": + intervals = crop_range / (cur_grid_size - 1) + left_y = np.floor((np.clip(left_x, min_bound[0], max_bound[0]) - min_bound[0]) / intervals[0]) + right_y = np.floor((np.clip(right_x, min_bound[0], max_bound[0]) - min_bound[0]) / intervals[0]) + total_cell_list[j] = (right_y - left_y) * dim2 * dim3 * total_file_num + + res.append(full_cell_list[j] / total_cell_list[j]) + print("res:", res) + + with open(func_type + '.txt', 'w') as file: + for i in res: + file.write(str(i) + '\n') + +if __name__ == '__main__': + func_type = ["quadratic", "triple", "ln", "original"] + cal_cell_num(func_type[2]) diff --git a/train.sh b/train.sh old mode 100644 new mode 100755 index 8f1c962..d4dd1d6 --- a/train.sh +++ b/train.sh @@ -1,5 +1,5 @@ name=cylinder_asym_networks gpuid=0 -CUDA_VISIBLE_DEVICES=${gpuid} python -u train_cylinder_asym.py \ -2>&1 | tee logs_dir/${name}_logs_tee.txt \ No newline at end of file +CUDA_VISIBLE_DEVICES=${gpuid} python3 -u train_cylinder_asym.py \ +2>&1 | tee logs_dir/${name}_logs_tee.txt diff --git a/train_cylinder_asym.py b/train_cylinder_asym.py old mode 100644 new mode 100755 index aa32d89..ecd7e4d --- a/train_cylinder_asym.py +++ b/train_cylinder_asym.py @@ -87,7 +87,6 @@ def main(args): with torch.no_grad(): for i_iter_val, (_, val_vox_label, val_grid, val_pt_labs, val_pt_fea) in enumerate( val_dataset_loader): - val_pt_fea_ten = [torch.from_numpy(i).type(torch.FloatTensor).to(pytorch_device) for i in val_pt_fea] val_grid_ten = [torch.from_numpy(i).to(pytorch_device) for i in val_grid] diff --git a/train_cylinder_asym_nuscenes.py b/train_cylinder_asym_nuscenes.py old mode 100644 new mode 100755 diff --git a/train_nusc.sh b/train_nusc.sh old mode 100644 new mode 100755 diff --git a/utils/__init__.py b/utils/__init__.py old mode 100644 new mode 100755 diff --git a/utils/__pycache__/__init__.cpython-37.pyc b/utils/__pycache__/__init__.cpython-37.pyc new file mode 100755 index 0000000..7a3c1b1 Binary files /dev/null and b/utils/__pycache__/__init__.cpython-37.pyc differ diff --git a/utils/__pycache__/load_save_util.cpython-37.pyc b/utils/__pycache__/load_save_util.cpython-37.pyc new file mode 100755 index 0000000..a696c47 Binary files /dev/null and b/utils/__pycache__/load_save_util.cpython-37.pyc differ diff --git a/utils/__pycache__/lovasz_losses.cpython-37.pyc b/utils/__pycache__/lovasz_losses.cpython-37.pyc new file mode 100755 index 0000000..3d34d00 Binary files /dev/null and b/utils/__pycache__/lovasz_losses.cpython-37.pyc differ diff --git a/utils/__pycache__/metric_util.cpython-37.pyc b/utils/__pycache__/metric_util.cpython-37.pyc new file mode 100755 index 0000000..4a02b11 Binary files /dev/null and b/utils/__pycache__/metric_util.cpython-37.pyc differ diff --git a/utils/load_save_util.py b/utils/load_save_util.py old mode 100644 new mode 100755 diff --git a/utils/log_util.py b/utils/log_util.py old mode 100644 new mode 100755 diff --git a/utils/lovasz_losses.py b/utils/lovasz_losses.py old mode 100644 new mode 100755 diff --git a/utils/metric_util.py b/utils/metric_util.py old mode 100644 new mode 100755