diff options
author | Jordan Gong <jordan.gong@protonmail.com> | 2021-02-28 22:13:43 +0800 |
---|---|---|
committer | Jordan Gong <jordan.gong@protonmail.com> | 2021-02-28 22:13:43 +0800 |
commit | c96a6c88fa63d62ec62807abf957c9a8df307b43 (patch) | |
tree | d20b3ade49d7b16a253a23766988890b5ce59a68 | |
parent | bbd89fda7f6bf30c9ce4a3b576c0087858b407b3 (diff) |
Modify default parameters
1. Change ReLU to Leaky ReLU in decoder
2. Add 8-scale-pyramid in HPM
-rw-r--r-- | config.py | 2 | ||||
-rw-r--r-- | models/layers.py | 4 |
2 files changed, 4 insertions, 2 deletions
@@ -53,7 +53,7 @@ config: Configuration = { # Use 1x1 convolution in dimensionality reduction 'hpm_use_1x1conv': False, # HPM pyramid scales, of which sum is number of parts - 'hpm_scales': (1, 2, 4), + 'hpm_scales': (1, 2, 4, 8), # Global pooling method 'hpm_use_avg_pool': True, 'hpm_use_max_pool': True, diff --git a/models/layers.py b/models/layers.py index ef53a95..f1d72b6 100644 --- a/models/layers.py +++ b/models/layers.py @@ -80,7 +80,9 @@ class DCGANConvTranspose2d(BasicConvTranspose2d): if self.is_last_layer: return self.trans_conv(x) else: - return super().forward(x) + x = self.trans_conv(x) + x = self.bn(x) + return F.leaky_relu(x, 0.2, inplace=True) class BasicLinear(nn.Module): |