def __init__(self, in_features, out_features):#num_units=4096): super(BNN_1blk_100, self).__init__() # self.infl_ratio=1 self.num_internal_blocks = 1 self.output_size = out_features self.input_size = in_features self.name = 'bnn_1blk_100' self.fc1 = BinaryLinear(self.input_size, 100*self.infl_ratio) self.bn1 = nn.BatchNorm1d(100*self.infl_ratio, eps=1e-4) self.htanh1 = BinaryStraightThrough() self.fc5 = BinaryLinear(100*self.infl_ratio, self.output_size) self.logsoftmax = nn.LogSoftmax()
def __init__(self, in_features, out_features, num_units=2048): super(BinaryConnect, self).__init__() self.net = nn.Sequential( BinaryLinear(in_features, num_units), nn.BatchNorm1d(num_units, eps=1e-4, momentum=0.15), nn.ReLU(), BinaryLinear(num_units, num_units), nn.BatchNorm1d(num_units, eps=1e-4, momentum=0.15), nn.ReLU(), BinaryLinear(num_units, num_units), nn.BatchNorm1d(num_units, eps=1e-4, momentum=0.15), nn.ReLU(), BinaryLinear(num_units, out_features), nn.BatchNorm1d(out_features, eps=1e-4, momentum=0.15), #nn.LogSoftmax() )
def __init__(self, in_features, out_features, num_units=4096): super(BinaryNet, self).__init__() self.net = nn.Sequential( nn.Dropout(p=0.2), BinaryLinear(in_features, num_units), nn.BatchNorm1d(num_units, eps=1e-4), BinaryStraightThrough(), nn.Dropout(), BinaryLinear(num_units, num_units), nn.BatchNorm1d(num_units, eps=1e-4), BinaryStraightThrough(), nn.Dropout(), BinaryLinear(num_units, num_units), nn.BatchNorm1d(num_units, eps=1e-4), BinaryStraightThrough(), nn.Dropout(), BinaryLinear(num_units, 10), nn.BatchNorm1d(10, eps=1e-4), nn.LogSoftmax() )
def __init__(self, topology, batch_norm): super(BinaryConnect, self).__init__() assert len(topology) > 1 for item in topology: assert item > 0 layers = [] for dim_in, dim_out in zip(topology[:-1], topology[1:]): layers.append(BinaryLinear(dim_in, dim_out)) if batch_norm: layers.append(nn.BatchNorm1d(dim_out, eps=1e-4, momentum=0.15)) layers.append(nn.ReLU()) layers.pop() layers.append(nn.LogSoftmax()) self.net = nn.Sequential(*layers)