def __init__(self): super(__class__, self).__init__() classes = 2 self.sharedNet = SharedNet().double() self.newbatch = nn_spd.NewBatchNormSPD(50).double() self.re = nn_spd.ReEig() self.logeig = nn_spd.LogEig() self.linear = nn.Linear(50**2, classes, bias=True) self.dropout1 = nn.Dropout(p=0.3) self.dropout2 = nn.Dropout(p=0.3) self.dropout3 = nn.Dropout(p=0.5) self.linear.weight.data.normal_(0, 0.005)
def __init__(self): super(__class__, self).__init__() dim = 62 dim1 = 58 dim2 = 54 dim3 = 50 self.re = nn_spd.ReEig() self.bimap1 = nn_spd.BiMap(1, 1, dim, dim1) self.bimap2 = nn_spd.BiMap(1, 1, dim1, dim2) self.bimap3 = nn_spd.BiMap(1, 1, dim2, dim3) self.logeig = nn_spd.LogEig() self.dropout1 = nn.Dropout(p=0.3) self.dropout2 = nn.Dropout(p=0.3)
def __init__(self): super(__class__, self).__init__() dim = 62 dim1 = 58 dim2 = 54 dim3 = 50 classes = 2 self.re = nn_spd.ReEig() self.bimap1 = nn_spd.BiMap(1, 1, dim, dim1) self.batchnorm1 = nn_spd.BatchNormSPD(dim1) self.bimap2 = nn_spd.BiMap(1, 1, dim1, dim2) self.batchnorm2 = nn_spd.BatchNormSPD(dim2) self.bimap3 = nn_spd.BiMap(1, 1, dim2, dim3) self.batchnorm3 = nn_spd.BatchNormSPD(dim3) self.logeig = nn_spd.LogEig() self.linear = nn.Linear(dim3**2, classes, bias=True) self.dropout1 = nn.Dropout(p=0.3) self.dropout2 = nn.Dropout(p=0.3) self.dropout3 = nn.Dropout(p=0.5)