forked from acannistra/planet-snowcover
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3-new-train.toml
55 lines (40 loc) · 1.31 KB
/
s3-new-train.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# RoboSat.pink Configuration
[dataset]
# The slippy map dataset's base directory.
aws_profile = "esip"
image_bucket = "planet-snowcover-imagery"
# regex defines each slippy-map directory, for buckets with many
imagery_directory_regex = '2018042\d_.*_tiled'
mask_bucket = "planet-snowcover-snow/ASO_3M_SD_USCAJW_20180423"
mask_directory_regex = "ASO_3M_SD_USCAJW_20180423-MASK_02-COG$" # $ = end of string = only dirs (no .tif)
train_percent = 0.7
[[classes]]
name = "snow"
color = "white"
# Indicate which dataset sub-directory and bands to take as input.
# You could so, add several channels blocks to compose your input Tensor. Orders are meaningful.
[[channels]]
bands = [1, 2, 3, 4]
mean = [0.485, 0.456, 0.406, 1.0]
std = [0.229, 0.224, 0.225, 1.0]
[model]
# Model name.
name = "albunet"
# Encoder model name.
encoder = "resnet50"
# Use, or not, ImageNet weights pretraining.
pretrained = true
# Loss function name.
loss = "lovasz"
# Batch size for training.
batch_size = 7
# tile side size in pixels.
tile_size = 512
# Total number of epochs to train for.
epochs = 10
# Learning rate for the optimizer.
lr = 0.000025
# Data augmentation, Flip or Rotate probability.
data_augmentation = 0.75
# Weight decay l2 penalty for the optimizer.
decay = 0.0