def __init__(self, nr_cate=12, net_arch='alexnet', init_weights=True): _BaseReg_Net.__init__(self, nr_cate=nr_cate, net_arch=net_arch, init_weights=init_weights) self.nr_cate = nr_cate self.reg_n_D = 4 # Note: for a quaternion q=(a,b,c,d), we always ensure a>0, that this cos(theta/2)>0 --> theta in [0,pi] # Thus only b,c,d need sign prediction. dim_need_sign = 3 _signs = list(product( *([(-1, 1)] * dim_need_sign ))) # [(-1, -1, -1), (-1, -1, 1), ..., (1, 1, 1)], with len=8 self.signs = [ (1, ) + x for x in _signs ] # [(1, -1, -1, -1), (1, -1, -1, 1), ..., (1, 1, 1, 1)], with len=8 self.signs2label = odict(zip(self.signs, range(len(self.signs)))) self.label2signs = Variable(torch.FloatTensor( self.signs)).cuda() # make it as a Variable #-- Head architecture # Note: for quaternion, there's only one regression head (instead of 3 (for a,e,t)). # Thus, nr_fc8=996 (see design.py) self.head_sqrdprob_quat = self.head_seq(self.top_size, self.reg_n_D, nr_cate=nr_cate, nr_fc8=996, init_weights=init_weights) # each of 3 quaternion complex component can be + or -, that totally 2**3 possible sign categories. self.head_signcate_quat = self.head_seq(self.top_size, len(self.signs), nr_cate=nr_cate, nr_fc8=996, init_weights=init_weights) # for abs branch self.maskout = Maskout(nr_cate=nr_cate) self.softmax = nn.Softmax(dim=1).cuda() # for sgc branch self.maskout_sgc = Maskout( nr_cate=nr_cate ) # make a new layer to maskout sign classification only. # loss module self.loss_handler_abs_quat = Cos_Proximity_Loss_Handler( ) # Neg_Dot_Loss_Handler() # Cos_Proximity_Loss_Handler() # self.loss_handler_sgc_quat = Cross_Entropy_Loss_Handler() self.targets = ['abs_quat', 'sgc_quat'] self.gt_targets = ['quat']
def __init__(self, nr_cate=12, net_arch='alexnet', pretrained=True, nr_fc8=1024): _naiReg_Net.__init__(self, nr_cate=nr_cate, net_arch=net_arch, pretrained=pretrained) # super(regS1xy_Net, self).__init__() self.nr_cate = nr_cate #-- Head architecture # 2 categories for each {a,e,t}: (log(x^2), log(y^2)) self.head_fc8_a, self.head_xx_yy_a, self.head_sign_x_y_a = self.head_seq(self.top_size, nr_cate=nr_cate, nr_fc8=nr_fc8) self.head_fc8_e, self.head_xx_yy_e, self.head_sign_x_y_e = self.head_seq(self.top_size, nr_cate=nr_cate, nr_fc8=nr_fc8) self.head_fc8_t, self.head_xx_yy_t, self.head_sign_x_y_t = self.head_seq(self.top_size, nr_cate=nr_cate, nr_fc8=nr_fc8) # 4 category for each {a,e,t} # Given (x->cosθ, y->sinθ) # { quadrant_label : (sign(x),sign(y)) } = { 0:++, 1:-+, 2:--, 3:+- } # # 1 | 0 # -,+ | +,+ # -------|------- # -,- | +,- # 2 | 3 # for maskout a,e,t self.maskout = Maskout(nr_cate=nr_cate) self.logsoftmax = nn.LogSoftmax(dim=1).cuda() # loss module self.loss_handler_ccss = Neg_Dot_Loss_Handler() self.loss_handler_sign = Cross_Entropy_Loss_Handler() self.targets = ['ccss_a','ccss_e','ccss_t', 'sign_a','sign_e','sign_t'] # ['a','e','t']
def __init__(self, nr_cate=12, net_arch='alexnet', pretrained=True): _naiReg_Net.__init__(self, nr_cate=nr_cate, net_arch=net_arch, pretrained=pretrained) # super(reg2D_Net, self).__init__() self.nr_cate = nr_cate self.reg_n_D = 2 #-- Head architecture self.head_a = self.head_seq(self.top_size, self.reg_n_D, nr_cate=nr_cate) self.head_e = self.head_seq(self.top_size, self.reg_n_D, nr_cate=nr_cate) self.head_t = self.head_seq(self.top_size, self.reg_n_D, nr_cate=nr_cate) # for maskout a,e,t self.maskout = Maskout(nr_cate=nr_cate) # loss module self.loss_handler = Smooth_L1_Loss_Handler() self.targets= ['cos_a','sin_a', 'cos_e','sin_e', 'cos_t','sin_t']
def __init__(self, nr_cate=3, _Trunk=ResNet101_Trunk): super(Test_Net, self).__init__() self.truck = _Trunk() # or _Trunk(end='pool5') self.nr_cate = nr_cate #-- Head architecture self.head_s2 = nn.Sequential( nn.Linear(2048, 84), nn.ReLU(inplace=True), #nn.Dropout(), nn.Linear(84, self.nr_cate * 3), # 252=3*3 ) self.head_s1 = nn.Sequential( nn.Linear(2048, 84), nn.ReLU(inplace=True), #nn.Dropout(), nn.Linear(84, self.nr_cate * 2), ) self.maskout = Maskout(nr_cate=nr_cate) init_weights_by_filling(self.head_s2) init_weights_by_filling(self.head_s1)
def __init__(self, nr_cate=12, net_arch='alexnet', init_weights=True): _BaseReg_Net.__init__(self, nr_cate=nr_cate, net_arch=net_arch, init_weights=init_weights) self.nr_cate = nr_cate self.reg_n_D = 4 #-- Head architecture # Note: for quaternion, there's only one regression head (instead of 3 Euler angles (a,e,t)). # Thus, nr_fc8=996 (see design.py) self.head_quat = self.head_seq(self.top_size, self.reg_n_D, nr_cate=nr_cate, nr_fc8=996, init_weights=init_weights) # for maskout specific category self.maskout = Maskout(nr_cate=nr_cate) # loss module self.loss_handler = Smooth_L1_Loss_Handler() self.targets = ['quat']
def __init__(self, nr_cate=3, _Trunk=VGG16_Trunk): super(Test_Net, self).__init__() self.truck = _Trunk() #.copy_weights() self.nr_cate = nr_cate self.maskout = Maskout(nr_cate=nr_cate)
def __init__(self, nr_cate=3, _Trunk=AlexNet_Trunk): super(Test_AlexNet, self).__init__() self.truck = _Trunk(init_weights=True) self.nr_cate = nr_cate self.maskout = Maskout(nr_cate=nr_cate)