summaryrefslogtreecommitdiff
path: root/config.py
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2021-02-26 20:21:17 +0800
committerJordan Gong <jordan.gong@protonmail.com>2021-02-26 20:21:17 +0800
commit5f75d7ef65f6dcd0e72df320c58b6bd141937b5f (patch)
tree5649ab28c25dd34d35da815cdbc0e146569373d6 /config.py
parent850c525772969823eef6083e8018ac43a1e87e4c (diff)
parent9001f7e13d8985b220bd218d8de716bc586dbdcf (diff)
Merge branch 'master' into data_parallel
# Conflicts: # models/model.py
Diffstat (limited to 'config.py')
-rw-r--r--config.py24
1 files changed, 8 insertions, 16 deletions
diff --git a/config.py b/config.py
index 424bf5b..03f2f0d 100644
--- a/config.py
+++ b/config.py
@@ -37,7 +37,7 @@ config: Configuration = {
# Batch size (pr, k)
# `pr` denotes number of persons
# `k` denotes number of sequences per person
- 'batch_size': (4, 8),
+ 'batch_size': (4, 6),
# Number of workers of Dataloader
'num_workers': 4,
# Faster data transfer from RAM to GPU if enabled
@@ -49,22 +49,14 @@ config: Configuration = {
# Auto-encoder feature channels coefficient
'ae_feature_channels': 64,
# Appearance, canonical and pose feature dimensions
- 'f_a_c_p_dims': (128, 128, 64),
+ 'f_a_c_p_dims': (192, 192, 96),
# Use 1x1 convolution in dimensionality reduction
'hpm_use_1x1conv': False,
# HPM pyramid scales, of which sum is number of parts
'hpm_scales': (1, 2, 4),
# Global pooling method
'hpm_use_avg_pool': True,
- 'hpm_use_max_pool': False,
- # FConv feature channels coefficient
- 'fpfe_feature_channels': 32,
- # FConv blocks kernel sizes
- 'fpfe_kernel_sizes': ((5, 3), (3, 3), (3, 3)),
- # FConv blocks paddings
- 'fpfe_paddings': ((2, 1), (1, 1), (1, 1)),
- # FConv blocks halving
- 'fpfe_halving': (0, 2, 3),
+ 'hpm_use_max_pool': True,
# Attention squeeze ratio
'tfa_squeeze_ratio': 4,
# Number of parts after Part Net
@@ -72,7 +64,7 @@ config: Configuration = {
# Embedding dimension for each part
'embedding_dims': 256,
# Triplet loss margins for HPM and PartNet
- 'triplet_margins': (0.2, 0.2),
+ 'triplet_margins': (1.5, 1.5),
},
'optimizer': {
# Global parameters
@@ -91,15 +83,15 @@ config: Configuration = {
# 'amsgrad': False,
# Local parameters (override global ones)
- 'auto_encoder': {
- 'weight_decay': 0.001
- },
+ # 'auto_encoder': {
+ # 'weight_decay': 0.001
+ # },
},
'scheduler': {
# Period of learning rate decay
'step_size': 500,
# Multiplicative factor of decay
- 'gamma': 0.9,
+ 'gamma': 1,
}
},
# Model metadata