diff options
author | Jordan Gong <jordan.gong@protonmail.com> | 2021-01-21 23:44:34 +0800 |
---|---|---|
committer | Jordan Gong <jordan.gong@protonmail.com> | 2021-01-21 23:44:34 +0800 |
commit | 0b76205ecef02dd62ef2fbc8e12d9389b7cf7868 (patch) | |
tree | 71927f5efe2dc3228f49326a89e2536785aa2eb4 /config.py | |
parent | 8572f5c8292e5798912ad54764c9d3a99afb49ec (diff) | |
parent | 04c9d3210ff659bbe00dedb2d193a748e7a97b54 (diff) |
Merge branch 'master' into python3.8
# Conflicts:
# utils/configuration.py
Diffstat (limited to 'config.py')
-rw-r--r-- | config.py | 16 |
1 files changed, 13 insertions, 3 deletions
@@ -33,7 +33,7 @@ config: Configuration = { # Batch size (pr, k) # `pr` denotes number of persons # `k` denotes number of sequences per person - 'batch_size': (8, 16), + 'batch_size': (4, 8), # Number of workers of Dataloader 'num_workers': 4, # Faster data transfer from RAM to GPU if enabled @@ -69,13 +69,23 @@ config: Configuration = { 'triplet_margin': 0.2, }, 'optimizer': { + # Global parameters # Initial learning rate of Adam Optimizer 'lr': 1e-4, # Coefficients used for computing running averages of # gradient and its square - 'betas': (0.9, 0.999), + # 'betas': (0.9, 0.999), + # Term added to the denominator + # 'eps': 1e-8, # Weight decay (L2 penalty) - 'weight_decay': 0.001, + # 'weight_decay': 0, + # Use AMSGrad or not + # 'amsgrad': False, + + # Local parameters (override global ones) + 'auto_encoder': { + 'weight_decay': 0.001 + }, }, 'scheduler': { # Period of learning rate decay |