summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
authorJordan Gong <jordan.gong@protonmail.com>2021-02-28 23:12:58 +0800
committerJordan Gong <jordan.gong@protonmail.com>2021-02-28 23:12:58 +0800
commit0d2b643d7e04eba872e8b1fc9b04478a026bb3b0 (patch)
treea53d8aa69e050397d3443a602a42ed700a8e8bb5 /utils
parent10b1299eb46343e119bc413ba8774dcb1a68595e (diff)
parente04f54d0bfc8fc711e53561065d772dae1926b64 (diff)
Merge branch 'python3.8' into python3.7
# Conflicts: # utils/configuration.py
Diffstat (limited to 'utils')
-rw-r--r--utils/triplet_loss.py46
1 files changed, 31 insertions, 15 deletions
diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py
index 22ac2ab..77c7234 100644
--- a/utils/triplet_loss.py
+++ b/utils/triplet_loss.py
@@ -9,15 +9,19 @@ class BatchTripletLoss(nn.Module):
def __init__(
self,
is_hard: bool = True,
+ is_mean: bool = True,
margin: Optional[float] = 0.2,
):
super().__init__()
self.is_hard = is_hard
+ self.is_mean = is_mean
self.margin = margin
def forward(self, x, y):
p, n, c = x.size()
dist = self._batch_distance(x)
+ flat_dist = dist.tril(-1)
+ flat_dist = flat_dist[flat_dist != 0].view(p, -1)
if self.is_hard:
positive_negative_dist = self._hard_distance(dist, y, p, n)
@@ -25,12 +29,20 @@ class BatchTripletLoss(nn.Module):
positive_negative_dist = self._all_distance(dist, y, p, n)
if self.margin:
- all_loss = F.relu(self.margin + positive_negative_dist).view(p, -1)
- else:
- all_loss = F.softplus(positive_negative_dist).view(p, -1)
- non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss)
-
- return non_zero_mean, dist.mean((1, 2)), non_zero_counts
+ losses = F.relu(self.margin + positive_negative_dist).view(p, -1)
+ non_zero_counts = (losses != 0).sum(1).float()
+ if self.is_mean:
+ loss_metric = self._none_zero_mean(losses, non_zero_counts)
+ else: # is_sum
+ loss_metric = losses.sum(1)
+ return loss_metric, flat_dist, non_zero_counts
+ else: # Soft margin
+ losses = F.softplus(positive_negative_dist).view(p, -1)
+ if self.is_mean:
+ loss_metric = losses.mean(1)
+ else: # is_sum
+ loss_metric = losses.sum(1)
+ return loss_metric, flat_dist, None
@staticmethod
def _batch_distance(x):
@@ -65,13 +77,11 @@ class BatchTripletLoss(nn.Module):
return positive_negative_dist
@staticmethod
- def _none_zero_parted_mean(all_loss):
+ def _none_zero_mean(losses, non_zero_counts):
# Non-zero parted mean
- non_zero_counts = (all_loss != 0).sum(1).float()
- non_zero_mean = all_loss.sum(1) / non_zero_counts
+ non_zero_mean = losses.sum(1) / non_zero_counts
non_zero_mean[non_zero_counts == 0] = 0
-
- return non_zero_mean, non_zero_counts
+ return non_zero_mean
class JointBatchTripletLoss(BatchTripletLoss):
@@ -79,9 +89,10 @@ class JointBatchTripletLoss(BatchTripletLoss):
self,
hpm_num_parts: int,
is_hard: bool = True,
+ is_mean: bool = True,
margins: Tuple[float, float] = (0.2, 0.2)
):
- super().__init__(is_hard)
+ super().__init__(is_hard, is_mean)
self.hpm_num_parts = hpm_num_parts
self.margin_hpm, self.margin_pn = margins
@@ -100,7 +111,12 @@ class JointBatchTripletLoss(BatchTripletLoss):
pn_part_loss = F.relu(
self.margin_pn + positive_negative_dist[self.hpm_num_parts:]
)
- all_loss = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1)
- non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss)
+ losses = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1)
+
+ non_zero_counts = (losses != 0).sum(1).float()
+ if self.is_mean:
+ loss_metric = self._none_zero_mean(losses, non_zero_counts)
+ else: # is_sum
+ loss_metric = losses.sum(1)
- return non_zero_mean, dist.mean((1, 2)), non_zero_counts
+ return loss_metric, dist, non_zero_counts