Example #1
0
def ResNetWong(c, **kargs):
    return n.Seq(
        n.Conv(16, 3, padding=1, bias=False), n.WideBlock(16), n.WideBlock(16),
        n.WideBlock(32, True), n.WideBlock(64, True),
        n.FFNN([1000, c],
               ibp_init=True,
               bias=True,
               last_lin=True,
               last_zono=True,
               **kargs))
Example #2
0
def ResNetTiny(c, **kargs): # resnetWide also used by mixtrain and scaling provable adversarial defenses
    def wb(c, bias = True, **kargs):
        return n.WideBlock(c, False, bias=bias, ibp_init=True, batch_norm = False, **kargs)
    return n.Seq(n.Conv(16, 3, padding=1, bias=True, ibp_init = True),
                 wb(16),
                 wb(32),
                 wb(32),
                 wb(32),
                 wb(32),
                 n.FFNN([500, c], bias=True, last_lin=True, ibp_init = True, last_zono = True, **kargs))
Example #3
0
def ResNetTiny_ManyFixed(c, **kargs): # resnetWide also used by mixtrain and scaling provable adversarial defenses
    def wb(c, bias = True, **kargs):
        return n.WideBlock(c, False, bias=bias, ibp_init=True, batch_norm = False, **kargs)
    cmk = n.CorrFix
    dec = lambda x: n.DecorrMin(x, num_to_keep = True)
    return n.Seq(n.CorrMaxK(32),
                 n.Conv(16, 3, padding=1, bias=True, ibp_init = True), cmk(16), dec(16),
                 wb(16), cmk(8), dec(8),
                 wb(32), cmk(8), dec(8),
                 wb(32), cmk(4), dec(4),
                 wb(32), n.Concretize(),
                 wb(32),
                 n.FFNN([500, c], bias=True, last_lin=True, ibp_init = True, last_zono = True, **kargs))
Example #4
0
def ResNetLarge_LargeCombo(c, **kargs): # resnetWide also used by mixtrain and scaling provable adversarial defenses
    def wb(c, bias = True, **kargs):
        return n.WideBlock(c, False, bias=bias, ibp_init=True, batch_norm = False, **kargs)
    dl = n.DeepLoss
    cmk = n.CorrMaxK
    cm2d = n.CorrMaxPool2D
    cm3d = n.CorrMaxPool3D
    dec = lambda x: n.DecorrMin(x, num_to_keep = True)
    return n.Seq(n.Conv(16, 3, padding=1, bias=True, ibp_init = True), cmk(4),
                 wb(16), cmk(4), dec(4),
                 wb(32), cmk(4), dec(4),
                 wb(32), dl(S.Until(1, 0, S.Lin(0.5, 0, 50, 3))),
                 wb(32), cmk(4), dec(4),
                 wb(64), cmk(4), dec(2),
                 wb(64), dl(S.Until(24, S.Lin(0, 0.1, 20, 4), S.Lin(0.1, 0, 50))),
                 wb(64),
                 n.FFNN([1000, c], bias=True, last_lin=True, ibp_init = True, **kargs))
Example #5
0
def SkipNet18_Combo(c, **kargs):
    dl = n.DeepLoss
    cmk = n.CorrFix
    dec = lambda x: n.DecorrMin(x, num_to_keep=True)
    return n.Seq(
        n.ResNet([2, 2, 2, 2],
                 extra=[(cmk(20), 2), (dec(10), 2), (cmk(10), 3), (dec(5), 3),
                        (dl(S.Until(90, S.Lin(0, 0.2, 50, 40), 0)), 3),
                        (cmk(5), 4), (dec(2), 4)],
                 bias=True,
                 ibp_init=True,
                 skip_net=True),
        n.FFNN([512, 512, c],
               bias=True,
               last_lin=True,
               last_zono=True,
               ibp_init=True,
               **kargs))
def SemanticGenerator(latent_dim, bias=False, normal=False):
    conv_layers = [(latent_dim * 2, 5, 5, 2, 'ReLU'), (1, 4, 4, 2, 'tanh')]

    def transfer(tp):
        if isinstance(tp, InferModule):
            return tp
        if isinstance(tp[0], str):
            return MaxPool2D(*tp[1:])
        return Seq(
            ConvTranspose2D(out_channels=tp[0],
                            kernel_size=tp[1],
                            stride=tp[-2] if len(tp) == 5 else 1,
                            bias=bias,
                            normal=normal,
                            padding=0), activation(tp[-1]))

    conv = [transfer(s) for s in conv_layers]
    return n.Seq(
        #input_shape=(Z_DIM,)
        n.FFNN([latent_dim * 7 * 7 * 4], batch_norm=True, bias=bias),
        Unflatten2d((7, 7, LATENT_DIM * 4)),
        *conv)
Example #7
0
def resnet18small(c, **kargs):
    return n.Seq(n.ResNet([2, 2, 2]),
                 n.FFNN([100, c], bias=True, last_lin=False, **kargs))
Example #8
0
def ffnn(c, **kargs):
    return n.FFNN([100, 100, 100, 100, 100, c], **kargs)
Example #9
0
def ResNet18(c, **kargs):
    return n.Seq(n.ResNet([2,2,2,2], bias = True, ibp_init = True), n.FFNN([512, 512, c], bias=True, last_lin=True, last_zono = True, ibp_init = True, **kargs))
Example #10
0
def ffnnGGD(c, **kargs):
    return n.FFNN([500, 500, 500, 500, 500, 500, c],
                  last_lin=True,
                  activation="ReLU",
                  **kargs)
Example #11
0
def WordLevelSST2(c, **kargs):
    return n.Seq(n.Embedding(1, 300), n.Conv(100, 5, bias=True), n.AvgPool2D(5), n.ReduceToZono(), n.FFNN([c], last_lin=True, last_zono=True, **kargs))
Example #12
0
def FFNN(c, **kargs):
    return n.FFNN([100, 100, 100, 100, 100,c], last_lin = True, last_zono = True, **kargs)
Example #13
0
def ffnnSIGMOID(c, **kargs):
    return n.FFNN([500, 500, 500, 500, 500, 500, c],
                  last_lin=True,
                  activation="Sigmoid",
                  **kargs)
Example #14
0
def ffnnTANH(c, **kargs):
    return n.FFNN([500, 500, 500, 500, 500, 500, c],
                  last_lin=True,
                  activation="Tanh",
                  **kargs)
Example #15
0
def resnet18(c, **kargs):
    return n.Seq(n.ResNet([2, 2, 2, 2]),
                 n.FFNN([512, 512, c], bias=False, last_lin=True, **kargs))
Example #16
0
def CharLevelAGSub(c, **kargs):
    return n.Seq(n.EmbeddingWithSub(64, 64, 3), n.Conv(64, 10, bias=True), n.AvgPool2D(10), n.ReduceToZono(), n.FFNN([64, 64, c], last_lin=True, last_zono=True, **kargs)) #
Example #17
0
def resnet34(c, **kargs):
    return n.Seq(n.ResNet([3, 4, 6, 3]),
                 n.FFNN([512, 512, c], bias=False, last_lin=True, **kargs))
Example #18
0
def ffnnLin(c, **kargs):
    return n.FFNN([100, 100, 100, 100, 100, c], last_lin=True, **kargs)