summaryrefslogtreecommitdiff
path: root/models/hpm.py
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2021-02-19 22:39:49 +0800
committerJordan Gong <jordan.gong@protonmail.com>2021-02-19 22:39:49 +0800
commitd12dd6b04a4e7c2b1ee43ab6f36f25d0c35ca364 (patch)
tree71b5209ce4b5cfb1d09b89fe133028bbfa481dc9 /models/hpm.py
parent4aa9044122878a8e2b887a8b170c036983431559 (diff)
New branch with auto-encoder only
Diffstat (limited to 'models/hpm.py')
-rw-r--r--models/hpm.py55
1 files changed, 0 insertions, 55 deletions
diff --git a/models/hpm.py b/models/hpm.py
deleted file mode 100644
index 9879cfb..0000000
--- a/models/hpm.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import torch
-import torch.nn as nn
-
-from models.layers import HorizontalPyramidPooling
-
-
-class HorizontalPyramidMatching(nn.Module):
- def __init__(
- self,
- in_channels: int,
- out_channels: int = 128,
- use_1x1conv: bool = False,
- scales: tuple[int, ...] = (1, 2, 4),
- use_avg_pool: bool = True,
- use_max_pool: bool = False,
- **kwargs
- ):
- super().__init__()
- self.in_channels = in_channels
- self.out_channels = out_channels
- self.use_1x1conv = use_1x1conv
- self.scales = scales
- self.use_avg_pool = use_avg_pool
- self.use_max_pool = use_max_pool
-
- self.pyramids = nn.ModuleList([
- self._make_pyramid(scale, **kwargs) for scale in self.scales
- ])
-
- def _make_pyramid(self, scale: int, **kwargs):
- pyramid = nn.ModuleList([
- HorizontalPyramidPooling(self.in_channels,
- self.out_channels,
- use_1x1conv=self.use_1x1conv,
- use_avg_pool=self.use_avg_pool,
- use_max_pool=self.use_max_pool,
- **kwargs)
- for _ in range(scale)
- ])
- return pyramid
-
- def forward(self, x):
- n, c, h, w = x.size()
- feature = []
- for scale, pyramid in zip(self.scales, self.pyramids):
- h_per_hpp = h // scale
- for hpp_index, hpp in enumerate(pyramid):
- h_filter = torch.arange(hpp_index * h_per_hpp,
- (hpp_index + 1) * h_per_hpp)
- x_slice = x[:, :, h_filter, :]
- x_slice = hpp(x_slice)
- x_slice = x_slice.view(n, -1)
- feature.append(x_slice)
- x = torch.stack(feature)
- return x