-
Notifications
You must be signed in to change notification settings - Fork 31
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Simplified graph sampling * Einstein summation for QAConv * Hard triplet loss * Adaptive epoch and learning rate scheduling * Automatic mixed precision training
- Loading branch information
Shengcai Liao
committed
Sep 16, 2021
1 parent
7d4cb8a
commit 9bdea33
Showing
10 changed files
with
330 additions
and
883 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
"""Class for the hard triplet loss | ||
Shengcai Liao and Ling Shao, "Graph Sampling Based Deep Metric Learning for Generalizable Person Re-Identification." In arXiv preprint, arXiv:2104.01546, 2021. | ||
Author: | ||
Shengcai Liao | ||
[email protected] | ||
Version: | ||
V1.0 | ||
April 1, 2021 | ||
""" | ||
|
||
import torch | ||
from torch.nn import Module | ||
from torch import nn | ||
|
||
|
||
class TripletLoss(Module): | ||
def __init__(self, matcher, margin=16): | ||
""" | ||
Inputs: | ||
matcher: a class for matching pairs of images | ||
margin: margin parameter for the triplet loss | ||
""" | ||
super(TripletLoss, self).__init__() | ||
self.matcher = matcher | ||
self.margin = margin | ||
self.ranking_loss = nn.MarginRankingLoss(margin=margin, reduction='none') | ||
|
||
def reset_running_stats(self): | ||
self.matcher.reset_running_stats() | ||
|
||
def reset_parameters(self): | ||
self.matcher.reset_parameters() | ||
|
||
def _check_input_dim(self, input): | ||
if input.dim() != 4: | ||
raise ValueError('expected 4D input (got {}D input)'.format(input.dim())) | ||
|
||
def forward(self, feature, target): | ||
self._check_input_dim(feature) | ||
self.matcher.make_kernel(feature) | ||
|
||
score = self.matcher(feature) # [b, b] | ||
|
||
target1 = target.unsqueeze(1) | ||
mask = (target1 == target1.t()) | ||
pair_labels = mask.float() | ||
|
||
min_pos = torch.min(score * pair_labels + | ||
(1 - pair_labels + torch.eye(score.size(0), device=score.device)) * 1e15, dim=1)[0] | ||
max_neg = torch.max(score * (1 - pair_labels) - pair_labels * 1e15, dim=1)[0] | ||
|
||
# Compute ranking hinge loss | ||
loss = self.ranking_loss(min_pos, max_neg, torch.ones_like(target)) | ||
|
||
with torch.no_grad(): | ||
acc = (min_pos >= max_neg).float() | ||
|
||
return loss, acc |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,14 +6,13 @@ | |
Shengcai Liao | ||
[email protected] | ||
Version: | ||
V1.2 | ||
Mar. 31, 2021 | ||
V1.3 | ||
July 1, 2021 | ||
""" | ||
|
||
import torch | ||
from torch import nn | ||
from torch.nn import Module | ||
from torch.nn import functional as F | ||
|
||
|
||
class QAConv(Module): | ||
|
@@ -29,41 +28,41 @@ def __init__(self, num_features, height, width): | |
self.height = height | ||
self.width = width | ||
self.bn = nn.BatchNorm1d(1) | ||
self.fc = nn.Linear(self.height * self.width * 2, 1) | ||
self.fc = nn.Linear(self.height * self.width, 1) | ||
self.logit_bn = nn.BatchNorm1d(1) | ||
self.kernel = None | ||
self.reset_parameters() | ||
|
||
def reset_running_stats(self): | ||
self.bn.reset_running_stats() | ||
self.logit_bn.reset_running_stats() | ||
|
||
def reset_parameters(self): | ||
self.bn.reset_parameters() | ||
self.fc.reset_parameters() | ||
self.logit_bn.reset_parameters() | ||
with torch.no_grad(): | ||
self.fc.weight.fill_(1. / (self.height * self.width)) | ||
|
||
def _check_input_dim(self, input): | ||
if input.dim() != 4: | ||
raise ValueError('expected 4D input (got {}D input)'.format(input.dim())) | ||
|
||
def make_kernel(self, features): # probe features | ||
kernel = features.permute([0, 2, 3, 1]) # [p, h, w, d] | ||
kernel = kernel.reshape(-1, self.num_features, 1, 1) # [phw, d, 1, 1] | ||
self.kernel = kernel | ||
self.kernel = features | ||
|
||
def forward(self, features): # gallery features | ||
self._check_input_dim(features) | ||
|
||
hw = self.height * self.width | ||
batch_size = features.size(0) | ||
|
||
score = F.conv2d(features, self.kernel) # [g, phw, h, w] | ||
score = torch.einsum('g c h w, p c y x -> g p y x h w', features, self.kernel) | ||
score = score.view(batch_size, -1, hw, hw) | ||
score = torch.cat((score.max(dim=2)[0], score.max(dim=3)[0]), dim=-1) | ||
|
||
score = score.view(-1, 1, 2 * hw) | ||
score = self.bn(score).view(-1, 2 * hw) | ||
score = score.view(-1, 1, hw) | ||
score = self.bn(score).view(-1, hw) | ||
score = self.fc(score) | ||
score = score.view(-1, 2).sum(dim=-1, keepdim=True) | ||
score = self.logit_bn(score) | ||
score = score.view(batch_size, -1).t() # [p, g] | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,8 +5,8 @@ | |
Shengcai Liao | ||
[email protected] | ||
Version: | ||
V1.1 | ||
Feb. 7, 2021 | ||
V1.2 | ||
July 4, 2021 | ||
""" | ||
|
||
from __future__ import absolute_import | ||
|
@@ -58,9 +58,8 @@ def __init__(self, depth, ibn_type=None, final_layer='layer3', neck=128, pretrai | |
out_planes = fea_dims[final_layer] | ||
|
||
if neck > 0: | ||
self.neck_conv = nn.Conv2d(out_planes, neck, kernel_size=3, padding=1, bias=False) | ||
self.neck_conv = nn.Conv2d(out_planes, neck, kernel_size=3, padding=1) | ||
out_planes = neck | ||
self.neck_bn = nn.BatchNorm2d(out_planes) | ||
|
||
self.num_features = out_planes | ||
|
||
|
@@ -73,7 +72,6 @@ def forward(self, inputs): | |
|
||
if self.neck > 0: | ||
x = self.neck_conv(x) | ||
x = self.neck_bn(x) | ||
|
||
x = F.normalize(x) | ||
|
||
|
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.