From 86b8e5b3ecb75abe2ba9a4dd008b10b8b20b8817 Mon Sep 17 00:00:00 2001 From: Aayush Garg Date: Wed, 11 May 2022 13:32:14 +0530 Subject: [PATCH] Update Readme --- README.md | 23 +++++++++++++++++++++++ pl-hydra/configs/train.yaml | 4 +++- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7716f3e..b2f23d9 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,30 @@ This repo provides different pytorch implementation for training a deep learning ``` ## Single-GPU implementation +``` +usage: train_simple.py [-h] --run_name RUN_NAME [--random_seed RANDOM_SEED] + [-et EPOCHS_PER_TEST] [-ep EPOCHS] [-bs BATCH_SIZE] + [-w NUM_WORKERS] [--learning_rate LEARNING_RATE] + [--weight_decay WEIGHT_DECAY] [--momentum MOMENTUM] + [--gamma GAMMA] +optional arguments: + -h, --help show this help message and exit + --run_name RUN_NAME + --random_seed RANDOM_SEED + -et EPOCHS_PER_TEST, --epochs_per_test EPOCHS_PER_TEST + Number of epochs per test/val + -ep EPOCHS, --epochs EPOCHS + Total number of training epochs to perform. + -bs BATCH_SIZE, --batch_size BATCH_SIZE + -w NUM_WORKERS, --num_workers NUM_WORKERS + --learning_rate LEARNING_RATE + The initial learning rate for SGD. + --weight_decay WEIGHT_DECAY + Weight deay if we apply some. + --momentum MOMENTUM Momentum value in SGD. + --gamma GAMMA gamma value for MultiStepLR. +``` ## Multi-GPU implementation ## Pytorch-lightning implementation diff --git a/pl-hydra/configs/train.yaml b/pl-hydra/configs/train.yaml index 1632f5c..d9f03d7 100644 --- a/pl-hydra/configs/train.yaml +++ b/pl-hydra/configs/train.yaml @@ -68,4 +68,6 @@ train: True test: True # seed for random number generators in pytorch, numpy and python.random -seed: 100 \ No newline at end of file +seed: 100 + +