diff options
author | Jordan Gong <jordan.gong@protonmail.com> | 2021-02-26 20:23:26 +0800 |
---|---|---|
committer | Jordan Gong <jordan.gong@protonmail.com> | 2021-02-26 20:23:57 +0800 |
commit | 8745ffeb51b29cfe7d3eee2d845910086ba8b046 (patch) | |
tree | 4eccf715f0a17c0d54c75d38b54e1d682dc26b99 /config.py | |
parent | 9b1828be1db7fd1be8731a7cec66162de9145285 (diff) | |
parent | 5f75d7ef65f6dcd0e72df320c58b6bd141937b5f (diff) |
Merge branch 'data_parallel' into data_parallel_py3.8
# Conflicts:
# models/part_net.py
# models/rgb_part_net.py
Diffstat (limited to 'config.py')
-rw-r--r-- | config.py | 24 |
1 files changed, 8 insertions, 16 deletions
@@ -37,7 +37,7 @@ config: Configuration = { # Batch size (pr, k) # `pr` denotes number of persons # `k` denotes number of sequences per person - 'batch_size': (4, 8), + 'batch_size': (4, 6), # Number of workers of Dataloader 'num_workers': 4, # Faster data transfer from RAM to GPU if enabled @@ -49,22 +49,14 @@ config: Configuration = { # Auto-encoder feature channels coefficient 'ae_feature_channels': 64, # Appearance, canonical and pose feature dimensions - 'f_a_c_p_dims': (128, 128, 64), + 'f_a_c_p_dims': (192, 192, 96), # Use 1x1 convolution in dimensionality reduction 'hpm_use_1x1conv': False, # HPM pyramid scales, of which sum is number of parts 'hpm_scales': (1, 2, 4), # Global pooling method 'hpm_use_avg_pool': True, - 'hpm_use_max_pool': False, - # FConv feature channels coefficient - 'fpfe_feature_channels': 32, - # FConv blocks kernel sizes - 'fpfe_kernel_sizes': ((5, 3), (3, 3), (3, 3)), - # FConv blocks paddings - 'fpfe_paddings': ((2, 1), (1, 1), (1, 1)), - # FConv blocks halving - 'fpfe_halving': (0, 2, 3), + 'hpm_use_max_pool': True, # Attention squeeze ratio 'tfa_squeeze_ratio': 4, # Number of parts after Part Net @@ -72,7 +64,7 @@ config: Configuration = { # Embedding dimension for each part 'embedding_dims': 256, # Triplet loss margins for HPM and PartNet - 'triplet_margins': (0.2, 0.2), + 'triplet_margins': (1.5, 1.5), }, 'optimizer': { # Global parameters @@ -91,15 +83,15 @@ config: Configuration = { # 'amsgrad': False, # Local parameters (override global ones) - 'auto_encoder': { - 'weight_decay': 0.001 - }, + # 'auto_encoder': { + # 'weight_decay': 0.001 + # }, }, 'scheduler': { # Period of learning rate decay 'step_size': 500, # Multiplicative factor of decay - 'gamma': 0.9, + 'gamma': 1, } }, # Model metadata |