summaryrefslogtreecommitdiff
path: root/models/layers.py
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2020-12-23 19:21:29 +0800
committerJordan Gong <jordan.gong@protonmail.com>2020-12-23 19:21:29 +0800
commitfa745aa38edce88f5d92e7f7242a111c3d892161 (patch)
tree2decfb5ef1327b8def2bf5b2f5a06593cc3b8f46 /models/layers.py
parent96f345d25237c7e616ea5f524a2fc2d340ed8aff (diff)
Remove redundant Leaky ReLU in FocalConv2d
Diffstat (limited to 'models/layers.py')
-rw-r--r--models/layers.py3
1 files changed, 1 insertions, 2 deletions
diff --git a/models/layers.py b/models/layers.py
index e737df2..a0e35f0 100644
--- a/models/layers.py
+++ b/models/layers.py
@@ -2,7 +2,6 @@ from typing import Union, Tuple
import torch
import torch.nn as nn
-import torch.nn.functional as F
class FocalConv2d(nn.Module):
@@ -24,4 +23,4 @@ class FocalConv2d(nn.Module):
split_size = h // 2 ** self.halving
z = x.split(split_size, dim=2)
z = torch.cat([self.conv(_) for _ in z], dim=2)
- return F.leaky_relu(z, inplace=True)
+ return z