From 46391257ff50848efa1aa251ab3f15dc8b7a2d2c Mon Sep 17 00:00:00 2001 From: Jordan Gong Date: Sat, 27 Feb 2021 22:14:21 +0800 Subject: Implement Batch Hard triplet loss and soft margin --- utils/triplet_loss.py | 84 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 56 insertions(+), 28 deletions(-) (limited to 'utils/triplet_loss.py') diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py index 0df2188..c3e5802 100644 --- a/utils/triplet_loss.py +++ b/utils/triplet_loss.py @@ -1,32 +1,36 @@ +from typing import Optional + import torch import torch.nn as nn import torch.nn.functional as F -class BatchAllTripletLoss(nn.Module): - def __init__(self, margin: float = 0.2): +class BatchTripletLoss(nn.Module): + def __init__( + self, + is_hard: bool = True, + margin: Optional[float] = 0.2, + ): super().__init__() + self.is_hard = is_hard self.margin = margin def forward(self, x, y): p, n, c = x.size() - dist = self._batch_distance(x) - positive_negative_dist = self._hard_distance(dist, y, p, n) - all_loss = F.relu(self.margin + positive_negative_dist).view(p, -1) - parted_loss_mean = self._none_zero_parted_mean(all_loss) - return parted_loss_mean + if self.is_hard: + positive_negative_dist = self._hard_distance(dist, y, p, n) + else: # is_all + positive_negative_dist = self._all_distance(dist, y, p, n) - @staticmethod - def _hard_distance(dist, y, p, n): - hard_positive_mask = y.unsqueeze(1) == y.unsqueeze(2) - hard_negative_mask = y.unsqueeze(1) != y.unsqueeze(2) - all_hard_positive = dist[hard_positive_mask].view(p, n, -1, 1) - all_hard_negative = dist[hard_negative_mask].view(p, n, 1, -1) - positive_negative_dist = all_hard_positive - all_hard_negative + if self.margin: + all_loss = F.relu(self.margin + positive_negative_dist).view(p, -1) + else: + all_loss = F.softplus(positive_negative_dist).view(p, -1) + non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) - return positive_negative_dist + return non_zero_mean, dist.mean((1, 2)), non_zero_counts @staticmethod def _batch_distance(x): @@ -38,41 +42,65 @@ class BatchAllTripletLoss(nn.Module): dist = torch.sqrt( F.relu(x1_squared_sum - 2 * x1_times_x2_sum + x2_squared_sum) ) - return dist + @staticmethod + def _hard_distance(dist, y, p, n): + positive_mask = y.unsqueeze(1) == y.unsqueeze(2) + negative_mask = y.unsqueeze(1) != y.unsqueeze(2) + hard_positive = dist[positive_mask].view(p, n, -1).max(-1).values + hard_negative = dist[negative_mask].view(p, n, -1).min(-1).values + positive_negative_dist = hard_positive - hard_negative + + return positive_negative_dist + + @staticmethod + def _all_distance(dist, y, p, n): + positive_mask = y.unsqueeze(1) == y.unsqueeze(2) + negative_mask = y.unsqueeze(1) != y.unsqueeze(2) + all_positive = dist[positive_mask].view(p, n, -1, 1) + all_negative = dist[negative_mask].view(p, n, 1, -1) + positive_negative_dist = all_positive - all_negative + + return positive_negative_dist + @staticmethod def _none_zero_parted_mean(all_loss): # Non-zero parted mean - non_zero_counts = (all_loss != 0).sum(1) - parted_loss_mean = all_loss.sum(1) / non_zero_counts - parted_loss_mean[non_zero_counts == 0] = 0 + non_zero_counts = (all_loss != 0).sum(1).float() + non_zero_mean = all_loss.sum(1) / non_zero_counts + non_zero_mean[non_zero_counts == 0] = 0 - return parted_loss_mean + return non_zero_mean, non_zero_counts -class JointBatchAllTripletLoss(BatchAllTripletLoss): +class JointBatchTripletLoss(BatchTripletLoss): def __init__( self, hpm_num_parts: int, + is_hard: bool = True, margins: tuple[float, float] = (0.2, 0.2) ): - super().__init__() + super().__init__(is_hard) self.hpm_num_parts = hpm_num_parts self.margin_hpm, self.margin_pn = margins def forward(self, x, y): p, n, c = x.size() - dist = self._batch_distance(x) - positive_negative_dist = self._hard_distance(dist, y, p, n) + + if self.is_hard: + positive_negative_dist = self._hard_distance(dist, y, p, n) + else: # is_all + positive_negative_dist = self._all_distance(dist, y, p, n) + hpm_part_loss = F.relu( self.margin_hpm + positive_negative_dist[:self.hpm_num_parts] - ).view(self.hpm_num_parts, -1) + ) pn_part_loss = F.relu( self.margin_pn + positive_negative_dist[self.hpm_num_parts:] - ).view(p - self.hpm_num_parts, -1) + ) all_loss = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1) - parted_loss_mean = self._none_zero_parted_mean(all_loss) + non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) - return parted_loss_mean + return non_zero_mean, dist.mean((1, 2)), non_zero_counts -- cgit v1.2.3 From b837336695213e3e660992fcd01c5a52c654ea4f Mon Sep 17 00:00:00 2001 From: Jordan Gong Date: Sun, 28 Feb 2021 22:14:27 +0800 Subject: Log n-ile embedding distance and norm --- utils/triplet_loss.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) (limited to 'utils/triplet_loss.py') diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py index c3e5802..52d676e 100644 --- a/utils/triplet_loss.py +++ b/utils/triplet_loss.py @@ -18,6 +18,8 @@ class BatchTripletLoss(nn.Module): def forward(self, x, y): p, n, c = x.size() dist = self._batch_distance(x) + flat_dist = dist.tril(-1) + flat_dist = flat_dist[flat_dist != 0].view(p, -1) if self.is_hard: positive_negative_dist = self._hard_distance(dist, y, p, n) @@ -26,11 +28,12 @@ class BatchTripletLoss(nn.Module): if self.margin: all_loss = F.relu(self.margin + positive_negative_dist).view(p, -1) - else: + loss_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) + return loss_mean, flat_dist, non_zero_counts + else: # Soft margin all_loss = F.softplus(positive_negative_dist).view(p, -1) - non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) - - return non_zero_mean, dist.mean((1, 2)), non_zero_counts + loss_mean = all_loss.mean(1) + return loss_mean, flat_dist, None @staticmethod def _batch_distance(x): @@ -103,4 +106,4 @@ class JointBatchTripletLoss(BatchTripletLoss): all_loss = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1) non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) - return non_zero_mean, dist.mean((1, 2)), non_zero_counts + return non_zero_mean, dist, non_zero_counts -- cgit v1.2.3 From fed5e6a9b35fda8306147e9ce772dfbf3142a061 Mon Sep 17 00:00:00 2001 From: Jordan Gong Date: Sun, 28 Feb 2021 23:11:05 +0800 Subject: Implement sum of loss default in [1] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [1]A. Hermans, L. Beyer, and B. Leibe, “In defense of the triplet loss for person re-identification,” arXiv preprint arXiv:1703.07737, 2017. --- utils/triplet_loss.py | 43 ++++++++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 15 deletions(-) (limited to 'utils/triplet_loss.py') diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py index 52d676e..db0cf0f 100644 --- a/utils/triplet_loss.py +++ b/utils/triplet_loss.py @@ -9,10 +9,12 @@ class BatchTripletLoss(nn.Module): def __init__( self, is_hard: bool = True, + is_mean: bool = True, margin: Optional[float] = 0.2, ): super().__init__() self.is_hard = is_hard + self.is_mean = is_mean self.margin = margin def forward(self, x, y): @@ -27,13 +29,20 @@ class BatchTripletLoss(nn.Module): positive_negative_dist = self._all_distance(dist, y, p, n) if self.margin: - all_loss = F.relu(self.margin + positive_negative_dist).view(p, -1) - loss_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) - return loss_mean, flat_dist, non_zero_counts + losses = F.relu(self.margin + positive_negative_dist).view(p, -1) + non_zero_counts = (losses != 0).sum(1).float() + if self.is_mean: + loss_metric = self._none_zero_mean(losses, non_zero_counts) + else: # is_sum + loss_metric = losses.sum(1) + return loss_metric, flat_dist, non_zero_counts else: # Soft margin - all_loss = F.softplus(positive_negative_dist).view(p, -1) - loss_mean = all_loss.mean(1) - return loss_mean, flat_dist, None + losses = F.softplus(positive_negative_dist).view(p, -1) + if self.is_mean: + loss_metric = losses.mean(1) + else: # is_sum + loss_metric = losses.sum(1) + return loss_metric, flat_dist, None @staticmethod def _batch_distance(x): @@ -68,13 +77,11 @@ class BatchTripletLoss(nn.Module): return positive_negative_dist @staticmethod - def _none_zero_parted_mean(all_loss): + def _none_zero_mean(losses, non_zero_counts): # Non-zero parted mean - non_zero_counts = (all_loss != 0).sum(1).float() - non_zero_mean = all_loss.sum(1) / non_zero_counts + non_zero_mean = losses.sum(1) / non_zero_counts non_zero_mean[non_zero_counts == 0] = 0 - - return non_zero_mean, non_zero_counts + return non_zero_mean class JointBatchTripletLoss(BatchTripletLoss): @@ -82,9 +89,10 @@ class JointBatchTripletLoss(BatchTripletLoss): self, hpm_num_parts: int, is_hard: bool = True, + is_mean: bool = True, margins: tuple[float, float] = (0.2, 0.2) ): - super().__init__(is_hard) + super().__init__(is_hard, is_mean) self.hpm_num_parts = hpm_num_parts self.margin_hpm, self.margin_pn = margins @@ -103,7 +111,12 @@ class JointBatchTripletLoss(BatchTripletLoss): pn_part_loss = F.relu( self.margin_pn + positive_negative_dist[self.hpm_num_parts:] ) - all_loss = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1) - non_zero_mean, non_zero_counts = self._none_zero_parted_mean(all_loss) + losses = torch.cat((hpm_part_loss, pn_part_loss)).view(p, -1) + + non_zero_counts = (losses != 0).sum(1).float() + if self.is_mean: + loss_metric = self._none_zero_mean(losses, non_zero_counts) + else: # is_sum + loss_metric = losses.sum(1) - return non_zero_mean, dist, non_zero_counts + return loss_metric, dist, non_zero_counts -- cgit v1.2.3 From 4bdc37bbd86a83647bbbda7bd1367c08e6c6f6d4 Mon Sep 17 00:00:00 2001 From: Jordan Gong Date: Mon, 1 Mar 2021 11:20:34 +0800 Subject: Remove identical sample in Batch All case --- utils/triplet_loss.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'utils/triplet_loss.py') diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py index db0cf0f..6822cf6 100644 --- a/utils/triplet_loss.py +++ b/utils/triplet_loss.py @@ -68,7 +68,10 @@ class BatchTripletLoss(nn.Module): @staticmethod def _all_distance(dist, y, p, n): - positive_mask = y.unsqueeze(1) == y.unsqueeze(2) + # Unmask identical samples + positive_mask = torch.eye( + n, dtype=torch.bool, device=y.device + ) ^ (y.unsqueeze(1) == y.unsqueeze(2)) negative_mask = y.unsqueeze(1) != y.unsqueeze(2) all_positive = dist[positive_mask].view(p, n, -1, 1) all_negative = dist[negative_mask].view(p, n, 1, -1) -- cgit v1.2.3 From 7318a09451852e3f7d5f68180964f03bd0b0f616 Mon Sep 17 00:00:00 2001 From: Jordan Gong Date: Mon, 1 Mar 2021 14:04:02 +0800 Subject: Change flat distance calculation method --- utils/triplet_loss.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'utils/triplet_loss.py') diff --git a/utils/triplet_loss.py b/utils/triplet_loss.py index 6822cf6..e05b69d 100644 --- a/utils/triplet_loss.py +++ b/utils/triplet_loss.py @@ -20,8 +20,8 @@ class BatchTripletLoss(nn.Module): def forward(self, x, y): p, n, c = x.size() dist = self._batch_distance(x) - flat_dist = dist.tril(-1) - flat_dist = flat_dist[flat_dist != 0].view(p, -1) + flat_dist_mask = torch.tril_indices(n, n, offset=-1, device=dist.device) + flat_dist = dist[:, flat_dist_mask[0], flat_dist_mask[1]] if self.is_hard: positive_negative_dist = self._hard_distance(dist, y, p, n) @@ -102,6 +102,8 @@ class JointBatchTripletLoss(BatchTripletLoss): def forward(self, x, y): p, n, c = x.size() dist = self._batch_distance(x) + flat_dist_mask = torch.tril_indices(n, n, offset=-1, device=dist.device) + flat_dist = dist[:, flat_dist_mask[0], flat_dist_mask[1]] if self.is_hard: positive_negative_dist = self._hard_distance(dist, y, p, n) @@ -122,4 +124,4 @@ class JointBatchTripletLoss(BatchTripletLoss): else: # is_sum loss_metric = losses.sum(1) - return loss_metric, dist, non_zero_counts + return loss_metric, flat_dist, non_zero_counts -- cgit v1.2.3