summaryrefslogtreecommitdiff
path: root/config.py
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2021-01-21 23:47:52 +0800
committerJordan Gong <jordan.gong@protonmail.com>2021-01-21 23:47:52 +0800
commitc0c8299354bd41bfd668ff0fb3edb5997f590c5d (patch)
tree25502d76a218a71ad5c2d51001876b08d734ee47 /config.py
parent42847b721a99350e1eed423dce99574c584d97ef (diff)
parentd750dd9dafe3cda3b1331ad2bfecb53c8c2b1267 (diff)
Merge branch 'python3.8' into python3.7
# Conflicts: # utils/configuration.py
Diffstat (limited to 'config.py')
-rw-r--r--config.py16
1 files changed, 13 insertions, 3 deletions
diff --git a/config.py b/config.py
index f8e3711..9d73a3a 100644
--- a/config.py
+++ b/config.py
@@ -31,7 +31,7 @@ config = {
# Batch size (pr, k)
# `pr` denotes number of persons
# `k` denotes number of sequences per person
- 'batch_size': (8, 16),
+ 'batch_size': (4, 8),
# Number of workers of Dataloader
'num_workers': 4,
# Faster data transfer from RAM to GPU if enabled
@@ -67,13 +67,23 @@ config = {
'triplet_margin': 0.2,
},
'optimizer': {
+ # Global parameters
# Initial learning rate of Adam Optimizer
'lr': 1e-4,
# Coefficients used for computing running averages of
# gradient and its square
- 'betas': (0.9, 0.999),
+ # 'betas': (0.9, 0.999),
+ # Term added to the denominator
+ # 'eps': 1e-8,
# Weight decay (L2 penalty)
- 'weight_decay': 0.001,
+ # 'weight_decay': 0,
+ # Use AMSGrad or not
+ # 'amsgrad': False,
+
+ # Local parameters (override global ones)
+ 'auto_encoder': {
+ 'weight_decay': 0.001
+ },
},
'scheduler': {
# Period of learning rate decay