-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconfig.py
63 lines (57 loc) · 1.7 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import os
# dir paths used in several places
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
LABELS_DIR = os.path.join(ROOT_DIR, 'labels')
DATA_DIR = os.path.join(ROOT_DIR, 'data')
SAVED_MODELS_DIR = os.path.join(ROOT_DIR, 'saved_models')
PRETRAINED_DIR = os.path.join(SAVED_MODELS_DIR, 'pretrained')
EVALUATIONS_DIR = os.path.join(ROOT_DIR, 'evaluation')
LOGS_DIR = os.path.join(ROOT_DIR, 'logs')
# seeds for the five runs
SEEDS = {
"1": 3,
"2": 4,
"3": 7,
"4": 13,
"5": 21,
}
# FSL configuration
FSL_CONFIG = {
'training': {
'n_way': 10, # number of labels for each task/episode
'k_shot': 3, # number of support items per label
'n_query': 3, # number of query items per label
'n_task': 50, # number of tasks (episodes) per epoch
'n_valid_labels': 5, # number of labels to be used for validation
'n_valid_tasks': 20, # number of tasks during validation
'epochs': 200, # total number of epochs
'lr': 1e-5, # learning rate while training from scratch
'finetuning_lr': 1e-6, # learning rate while fine-tuning a pre-trained model
# number of consecutive epochs with worse validation metric before early stopping is activated
'early_stopping_patience': 20
}
}
DATASETS = [
'magnatagatune',
'fma',
'lyra',
'makam',
'hindustani',
'carnatic'
]
MODELS_CONFIG = {
'vgg_ish': {
'input_length_in_secs': 3.69,
'final_layer': 'dense2',
'penultimate_layer': 'dense1'
},
}
# audio and mel-spectrograms attributes
SPECTROGRAMS_ATTRIBUTES = {
'audio_sr': 16000,
'n_fft': 512,
'hop_length': 256,
'f_min': 0.0,
'f_max': 8000.0,
'n_mels': 128,
}