diff --git a/create_submission.py b/create_submission.py index be56167..07531be 100644 --- a/create_submission.py +++ b/create_submission.py @@ -8,7 +8,7 @@ if kaggle: os.system('pip install /kaggle/input/segmentation-models/pretrainedmodels-0.7.4/ > /dev/null') os.system('pip install /kaggle/input/segmentation-models/segmentation_models.pytorch/ > /dev/null') - package_path = 'kaggle/input/sources' # add unet script dataset + package_path = '/kaggle/input/sources' # add unet script dataset import sys sys.path.append(package_path) from datasets.steel_dataset import TestDataset @@ -82,9 +82,9 @@ def create_submission(n_splits, model_name, batch_size, num_workers, mean, std, n_splits = [1] # [0, 1, 2, 3, 4] if kaggle: - sample_submission_path = 'kaggel/input/severstal-steel-defect-detection/sample_submission.csv' - test_data_folder = "kaggle/input/severstal-steel-defect-detection/test_images" - model_path = 'kaggle/input/checkpoints' + sample_submission_path = '/kaggle/input/severstal-steel-defect-detection/sample_submission.csv' + test_data_folder = "/kaggle/input/severstal-steel-defect-detection/test_images" + model_path = '/kaggle/input/checkpoints' else: sample_submission_path = 'datasets/Steel_data/sample_submission.csv' test_data_folder = 'datasets/Steel_data/test_images' diff --git a/datasets/steel_dataset.py b/datasets/steel_dataset.py index 0fdfc10..ece6f98 100644 --- a/datasets/steel_dataset.py +++ b/datasets/steel_dataset.py @@ -18,7 +18,6 @@ from utils.data_augmentation import data_augmentation from utils.rle_parse import mask2rle, make_mask from utils.visualize import image_with_mask_torch -import pickle warnings.filterwarnings("ignore") diff --git a/uploads.sh b/uploads.sh index 9e7d34c..b093847 100644 --- a/uploads.sh +++ b/uploads.sh @@ -10,6 +10,7 @@ if [ ! -d "kaggle" ]; then mkdir -p kaggle/sources mkdir -p kaggle/sources/models mkdir -p kaggle/sources/datasets + mkdir -p kaggle/sources/utils mkdir -p kaggle/checkpoints mkdir -p kaggle/submission mkdir -p kaggle/segmentation_models @@ -28,6 +29,9 @@ cp models/model.py kaggle/sources/models cp datasets/steel_dataset.py kaggle/sources/datasets cp solver.py kaggle/sources cp classify_segment.py kaggle/sources +cp utils/data_augmentation.py kaggle/sources/utils +cp utils/rle_parse.py kaggle/sources/utils +cp utils/visualize.py kaggle/sources/utils # 复制权重文件 cp checkpoints/$model_name/*_best.pth kaggle/checkpoints diff --git a/utils/data_augmentation.py b/utils/data_augmentation.py index 9d10346..09f14cf 100644 --- a/utils/data_augmentation.py +++ b/utils/data_augmentation.py @@ -1,11 +1,6 @@ -import numpy as np import cv2 -import random -import glob from matplotlib import pyplot as plt -from PIL import Image import pandas as pd -from tqdm import tqdm import sys import os from copy import deepcopy diff --git a/utils/rle_parse.py b/utils/rle_parse.py index 4bb2568..9f35d1e 100644 --- a/utils/rle_parse.py +++ b/utils/rle_parse.py @@ -1,5 +1,4 @@ import numpy as np -import pandas as pd # https://www.kaggle.com/paulorzp/rle-functions-run-lenght-encode-decode diff --git a/utils/visualize.py b/utils/visualize.py index be764cf..a0621a2 100644 --- a/utils/visualize.py +++ b/utils/visualize.py @@ -1,7 +1,5 @@ # 可视化操作 -import cv2 import torch -from PIL import Image import numpy as np