summaryrefslogtreecommitdiff
path: root/config.py
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2021-03-12 13:59:18 +0800
committerJordan Gong <jordan.gong@protonmail.com>2021-03-12 13:59:18 +0800
commit2a7d3c04eab1f3c2e5306d1597399582229a87e5 (patch)
tree060bbd3d0b9d1f3823219225097fb4d74eb311fe /config.py
parent39fb3e19601aaccd572ea023b117543b9d791b56 (diff)
parentd63b267dd15388dd323d9b8672cdb9461b96c885 (diff)
Merge branch 'python3.8' into python3.7
# Conflicts: # utils/configuration.py
Diffstat (limited to 'config.py')
-rw-r--r--config.py21
1 files changed, 12 insertions, 9 deletions
diff --git a/config.py b/config.py
index 4f1b787..f68ba2e 100644
--- a/config.py
+++ b/config.py
@@ -48,19 +48,17 @@ config = {
'ae_feature_channels': 64,
# Appearance, canonical and pose feature dimensions
'f_a_c_p_dims': (192, 192, 96),
- # Use 1x1 convolution in dimensionality reduction
- 'hpm_use_1x1conv': False,
# HPM pyramid scales, of which sum is number of parts
'hpm_scales': (1, 2, 4, 8),
# Global pooling method
'hpm_use_avg_pool': True,
'hpm_use_max_pool': True,
- # Attention squeeze ratio
- 'tfa_squeeze_ratio': 4,
# Number of parts after Part Net
'tfa_num_parts': 16,
- # Embedding dimension for each part
- 'embedding_dims': 256,
+ # Attention squeeze ratio
+ 'tfa_squeeze_ratio': 4,
+ # Embedding dimensions for each part
+ 'embedding_dims': (256, 256),
# Batch Hard or Batch All
'triplet_is_hard': True,
# Use non-zero mean or sum
@@ -89,9 +87,14 @@ config = {
},
'scheduler': {
# Step start to decay
- 'start_step': 15_000,
+ 'start_step': 500,
# Multiplicative factor of decay in the end
- 'final_gamma': 0.001,
+ 'final_gamma': 0.01,
+
+ # Local parameters (override global ones)
+ 'hpm': {
+ 'final_gamma': 0.001
+ }
}
},
# Model metadata
@@ -105,6 +108,6 @@ config = {
# Restoration iteration (multiple models, e.g. nm, bg and cl)
'restore_iters': (0, 0, 0),
# Total iteration for training (multiple models)
- 'total_iters': (25_000, 25_000, 25_000),
+ 'total_iters': (30_000, 40_000, 60_000),
},
}