Skip to content

Commit

Permalink
Dayan-Guan#1 add DMLayer in model.init, still need to add in model.fo…
Browse files Browse the repository at this point in the history
…rward
  • Loading branch information
Shanru-Lin committed Aug 26, 2023
1 parent bcd6858 commit 8a27c93
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 1 deletion.
30 changes: 29 additions & 1 deletion models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,8 @@ def get_other_params(self):
return chain(self.encoder.get_module_params(), self.classifier.parameters())

class USRN(BaseModel):
def __init__(self, num_classes, conf, sup_loss=None, ignore_index=None, testing=False, pretrained=True):
def __init__(self, num_classes, conf, sup_loss=None, ignore_index=None, testing=False, pretrained=True,
num_features=512, nb_prototype = 80):
super(USRN, self).__init__()
assert int(conf['supervised']) + int(conf['semi']) == 1, 'one mode only'
if conf['supervised']:
Expand Down Expand Up @@ -324,6 +325,12 @@ def __init__(self, num_classes, conf, sup_loss=None, ignore_index=None, testing=
self.total_loss = 0
self.curr_losses = {}

# {
self.DMlayer = Distanceminimi_Layer_learned(in_features=(num_features // 16), out_features = nb_prototype, dist='cos')
self.DMBN = nn.BatchNorm2d(nb_prototype)
self.get_uncer = nn.Conv2d(nb_prototype, 1, 1)
# }

def forward(self, x_l=None, target_l=None, target_l_subcls=None, x_ul=None, target_ul=None,
curr_iter=None, epoch=None, gpu=None, gt_l=None, ul1=None, br1=None, ul2=None, br2=None, flip=None):

Expand Down Expand Up @@ -446,3 +453,24 @@ def get_other_params(self):
return chain(self.encoder.get_module_params(), self.classifier.parameters(),
self.classifier_SubCls.parameters())

# {
class Distanceminimi_Layer_learned(nn.Module):
def __init__(self, in_features=0, out_features=0, dist='lin'):
super(Distanceminimi_Layer_learned, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.dist=dist
self.omega = nn.Parameter(torch.Tensor(1, out_features, in_features, 1, 1))

self.reset_parameters()

def reset_parameters(self):

nn.init.normal_(self.omega, mean=0, std=1)#/self.out_features)

def forward(self, x):
x = x.unsqueeze(1)
out = F.cosine_similarity(x, self.omega, dim=2, eps=1e-30)

return out, self.omega
# }
2 changes: 2 additions & 0 deletions trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
import torch.distributed as dist
import os

import torch.nn as nn

class Test(BaseTrainer):
def __init__(self, model, resume, config, iter_per_epoch, val_loader=None, train_logger=None, gpu=None, test=False):
super(Test, self).__init__(model, resume, config, iter_per_epoch, train_logger, gpu=gpu, test=test)
Expand Down

0 comments on commit 8a27c93

Please sign in to comment.