def __init__(self, i, o):
        super().__init__()

        self.alpha = 0.1
        b = torch.sign(torch.rand(o) - 0.5)  # bit information to store random
        self.register_buffer('b', b)
        self.sign_loss_private = SignLoss(self.alpha, self.b)

        self.linear = nn.Linear(i, o, bias=True)
        self.weight = self.linear.weight
        self.gn = nn.GroupNorm(o // 16, o, affine=False)
        self.dropout = nn.Dropout(p=0.4)

        self.fc = nn.Sequential(
            nn.Linear(o, o // 4, bias=False),
            nn.LeakyReLU(inplace=True),
            nn.Linear(o // 4, o, bias=False),
        )


        self.register_buffer('key_private', None)
        self.register_buffer('skey_private', None)

        self.init_scale(True)   #相比v1 此处是true
        self.init_bias(True)    #相比v1 此处是true

        self.reset_parameters()
        self.requires_reset_key = False
        self.key_type = 'random'
Example #2
0
    def __init__(self, i, o):
        super().__init__()

        self.alpha = 0.1
        b = torch.sign(torch.rand(o) - 0.5)  # bit information to store random
        self.register_buffer('b', b)
        self.sign_loss_private = SignLoss(self.alpha, self.b)

        self.linear = nn.Linear(i, o, bias=True)
        self.weight = self.linear.weight
        self.in_features = i
        self.register_buffer('key_private', None)
        self.register_buffer('skey_private', None)

        self.init_scale(True)  #相比v1 此处是true
        self.init_bias(True)  #相比v1 此处是true

        self.reset_parameters()
        self.requires_reset_key = False
        self.key_type = 'random'
    def __init__(self, i, o, ks=1):
        super().__init__()

        self.alpha = 0.1
        b = torch.sign(torch.rand(o) - 0.5)  # bit information to store random
        self.register_buffer('b', b)
        self.sign_loss_private = SignLoss(self.alpha, self.b)

        self.conv = nn.Conv1d(i, o, ks,  bias=False)
        self.gn = nn.GroupNorm(o // 16, o, affine=False)
        self.weight = self.conv.weight

        self.register_buffer('key_private', None)
        self.register_buffer('skey_private', None)

        self.init_scale(True)
        self.init_bias(True)

        self.reset_parameters()
        self.requires_reset_key = False
        self.key_type = 'random'
    def __init__(self, i, o, ks=1):
        super().__init__()

        self.alpha = 0.1
        b = torch.sign(torch.rand(o) - 0.5)  # bit information to store random
        self.register_buffer('b', b)
        self.sign_loss_private = SignLoss(self.alpha, self.b)

        self.conv = nn.Conv1d(i, o, ks,  bias=False)
        self.bn0 = nn.BatchNorm1d(o, affine=False)
        self.bn1 = nn.BatchNorm1d(o, affine=False)

        # self.fc = nn.Sequential( )

        self.fc = nn.Sequential(
            nn.Linear(o, o // 4, bias=False),
            nn.LeakyReLU(inplace=True),
            nn.Linear(o // 4, o, bias=False),
        )

        # self.fc = nn.Sequential(
        #     nn.Linear(o, o // 4, bias=False),
        #     nn.LeakyReLU(inplace=True),
        #     nn.Linear(o // 4, o, bias=False),
        #     nn.Sigmoid()
        # )


        self.weight = self.conv.weight

        self.register_buffer('key_private', None)
        self.register_buffer('skey_private', None)

        self.init_scale(True)
        self.init_bias(True)

        self.reset_parameters()
        self.requires_reset_key = False
        self.key_type = 'random'