forked from zengxianyu/mycvmodels
-
Notifications
You must be signed in to change notification settings - Fork 0
/
depth_loader.py
103 lines (84 loc) · 4.59 KB
/
depth_loader.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import os
import torch
from datasets import PBR, Make3d, ImageFiles, NYU2
home = os.path.expanduser("~")
def pbrmlt_train_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/PBR/images-jpg'%home
gt_dir = '%s/data/datasets/depth_dataset/PBR/depth'%home
train_split_file = '%s/data/datasets/depth_dataset/PBR/train.txt'%home
train_loader = torch.utils.data.DataLoader(
PBR(img_dir, gt_dir, train_split_file,
crop=0.9, flip=True, rotate=None, size=opt.imageSize, img_format='jpg', postfix='mlt',
mean=opt.mean, std=opt.std, training=True),
batch_size=opt.batchSize, shuffle=True, num_workers=4, pin_memory=True)
return train_loader
def pbrmlt_val_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/PBR/images-jpg'%home
gt_dir = '%s/data/datasets/depth_dataset/PBR/depth'%home
val_split_file = '%s/data/datasets/depth_dataset/PBR/test.txt'%home
val_loader = torch.utils.data.DataLoader(
PBR(img_dir, gt_dir, val_split_file,
crop=None, flip=False, rotate=None, size=opt.imageSize, img_format='jpg', postfix='mlt',
mean=opt.mean, std=opt.std, training=False),
batch_size=opt.batchSize, shuffle=False, num_workers=4, pin_memory=True)
return val_loader, gt_dir
def pbr_train_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/PBR/images'%home
gt_dir = '%s/data/datasets/depth_dataset/PBR/depth'%home
train_split_file = '%s/data/datasets/depth_dataset/PBR/train.txt'%home
train_loader = torch.utils.data.DataLoader(
PBR(img_dir, gt_dir, train_split_file,
crop=0.9, flip=True, rotate=None, size=opt.imageSize, img_format='jpg', postfix='color',
mean=opt.mean, std=opt.std, training=True),
batch_size=opt.batchSize, shuffle=True, num_workers=4, pin_memory=True)
return train_loader
def pbr_val_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/PBR/images'%home
gt_dir = '%s/data/datasets/depth_dataset/PBR/depth'%home
val_split_file = '%s/data/datasets/depth_dataset/PBR/test.txt'%home
val_loader = torch.utils.data.DataLoader(
PBR(img_dir, gt_dir, val_split_file,
crop=None, flip=False, rotate=None, size=opt.imageSize, img_format='jpg', postfix='color',
mean=opt.mean, std=opt.std, training=False),
batch_size=opt.batchSize, shuffle=False, num_workers=4, pin_memory=True)
return val_loader, gt_dir
def nyu2_train_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/NYU2/data/image'%home
gt_dir = '%s/data/datasets/depth_dataset/NYU2/data/depth'%home
split_file = '%s/data/datasets/depth_dataset/NYU2/data/split.npz'%home
train_loader = torch.utils.data.DataLoader(
NYU2(img_dir, gt_dir, split_file,
crop=0.9, flip=True, rotate=None, size=opt.imageSize,
mean=opt.mean, std=opt.std, training=True),
batch_size=opt.batchSize, shuffle=True, num_workers=4, pin_memory=True)
return train_loader
def nyu2_val_loader(opt):
img_dir = '%s/data/datasets/depth_dataset/NYU2/data/image'%home
gt_dir = '%s/data/datasets/depth_dataset/NYU2/data/depth'%home
split_file = '%s/data/datasets/depth_dataset/NYU2/data/split.npz'%home
val_loader = torch.utils.data.DataLoader(
NYU2(img_dir, gt_dir, split_file,
crop=None, flip=False, rotate=None, size=opt.imageSize,
mean=opt.mean, std=opt.std, training=False),
batch_size=opt.batchSize, shuffle=False, num_workers=4, pin_memory=True)
return val_loader, gt_dir
def make3d_train_loader(opt):
train_img_dir = '%s/data/datasets/depth_dataset/make3d/Train400Img'%home
train_gt_dir = '%s/data/datasets/depth_dataset/make3d/Train400Depth'%home
train_loader = torch.utils.data.DataLoader(
Make3d(train_img_dir, train_gt_dir,
crop=0.9, flip=True, rotate=None, size=opt.imageSize,
mean=opt.mean, std=opt.std, training=True),
batch_size=opt.batchSize, shuffle=True, num_workers=4, pin_memory=True)
return train_loader
def make3d_val_loader(opt):
val_img_dir = '%s/data/datasets/depth_dataset/make3d/Test134'%home
val_gt_dir = '%s/data/datasets/depth_dataset/make3d/depth'%home
val_loader = torch.utils.data.DataLoader(
ImageFiles(val_img_dir, crop=None, flip=False,
mean=opt.mean, std=opt.std),
# Make3d(val_img_dir, val_gt_dir,
# crop=0.9, flip=True, rotate=None, size=opt.imageSize,
# mean=opt.mean, std=opt.std, training=False),
batch_size=opt.batchSize, shuffle=True, num_workers=4, pin_memory=True)
return val_loader, val_gt_dir