Exemplo n.º 1
0
    def __init__(self,
                 dim=3,
                 z_dim=128,
                 c_dim=128,
                 hidden_size=256,
                 leaky=False,
                 legacy=False):
        super().__init__()
        self.z_dim = z_dim
        if not z_dim == 0:
            self.fc_z = nn.Linear(z_dim, hidden_size)

        self.fc_p = nn.Conv1d(dim, hidden_size, 1)
        self.block0 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block1 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block2 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block3 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block4 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)

        if not legacy:
            self.bn = CBatchNorm1d(c_dim, hidden_size)
        else:
            self.bn = CBatchNorm1d_legacy(c_dim, hidden_size)

        self.fc_out = nn.Conv1d(hidden_size, 1, 1)

        if not leaky:
            self.actvn = F.relu
        else:
            self.actvn = lambda x: F.leaky_relu(x, 0.2)
Exemplo n.º 2
0
    def __init__(self,
                 z_dim=128,
                 c_dim=128,
                 hidden_size=256,
                 leaky=False,
                 legacy=False):
        super().__init__()
        self.z_dim = z_dim
        if z_dim != 0:
            self.fc_z = tf.keras.layers.Dense(hidden_size)

        self.fc_p = tf.keras.layers.Conv1D(hidden_size, 1)

        self.block0 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block1 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block2 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block3 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block4 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)

        if not legacy:
            self.bn = CBatchNorm1d(c_dim, hidden_size)
        else:
            self.bn = CBatchNorm1dLegacy(c_dim, hidden_size)

        self.fc_out = tf.keras.layers.Conv1D(1, 1)

        if not leaky:
            self.actvn = tf.keras.layers.ReLU()
        else:
            self.actvn = tf.keras.layers.LeakyReLU(0.2)
Exemplo n.º 3
0
    def __init__(self, dim=3, z_dim=128, c_dim=128,
                 hidden_size=256, leaky=False, legacy=False):
        super().__init__()
        self.z_dim = z_dim
        if not z_dim == 0:
            self.fc_z = nn.Linear(z_dim, hidden_size)

        self.pe = positional_encoding()
        # print('hidden_size',hidden_size)
        # self.fc_p = nn.Conv1d(dim, hidden_size, 1)
        self.fc_pos = nn.Linear(60, 256)
        # self.block0 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        # self.block1 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        # self.block2 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        # self.block3 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        # self.block4 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block0 = CResnetBlockConv1d(256, hidden_size, legacy=legacy)
        self.block1 = CResnetBlockConv1d_alpa(67, hidden_size, alpa_dim=256, legacy=legacy)
        self.block2 = CResnetBlockConv1d_alpa(131, hidden_size, alpa_dim=67, legacy=legacy)
        self.block3 = CResnetBlockConv1d_alpa(259, hidden_size, alpa_dim=131, legacy=legacy)
        self.block4 = CResnetBlockConv1d_alpa(515, hidden_size, alpa_dim=259, legacy=legacy)

        if not legacy:
            self.bn = CBatchNorm1d(c_dim, hidden_size)
        else:
            self.bn = CBatchNorm1d_legacy(c_dim, hidden_size)

        self.fc_out = nn.Conv1d(hidden_size, 1, 1)

        if not leaky:
            self.actvn = F.relu
        else:
            self.actvn = lambda x: F.leaky_relu(x, 0.2)
Exemplo n.º 4
0
    def __init__(self,
                 dim=3,
                 z_dim=128,
                 c_dim=128,
                 hidden_size=256,
                 leaky=False,
                 legacy=False):
        super().__init__()
        self.z_dim = z_dim
        self.c_dim = c_dim
        p_hidden_size = 256
        hidden_size = 128

        if not z_dim == 0:
            self.fc_z = nn.Linear(z_dim, p_hidden_size)

        self.fc_p = nn.Conv1d(dim, p_hidden_size, 1)
        self.block0 = CResnetBlockConv1d(c_dim,
                                         p_hidden_size,
                                         size_out=hidden_size * 4,
                                         legacy=legacy)
        self.block1 = CResnetBlockConv1d(c_dim,
                                         hidden_size * 4 + p_hidden_size,
                                         size_out=hidden_size * 4,
                                         legacy=legacy)
        self.block2 = CResnetBlockConv1d(c_dim,
                                         hidden_size * 4 + p_hidden_size,
                                         size_out=hidden_size * 2,
                                         legacy=legacy)
        self.block3 = CResnetBlockConv1d(c_dim,
                                         hidden_size * 2 + p_hidden_size,
                                         size_out=hidden_size,
                                         legacy=legacy)

        if not legacy:
            self.bn = CBatchNorm1d(c_dim, hidden_size)
        else:
            self.bn = CBatchNorm1d_legacy(c_dim, hidden_size)

        self.fc_out = nn.Conv1d(hidden_size, 1, 1)

        if not leaky:
            self.actvn = nn.ReLU(inplace=True)
        else:
            self.actvn = lambda x: F.leaky_relu(x, 0.2, True)
Exemplo n.º 5
0
    def __init__(self,
                 dim=3,
                 z_dim=128,
                 c_dim=128,
                 hidden_size=256,
                 leaky=False,
                 legacy=False,
                 n_classes=1,
                 instance_loss=False):
        super().__init__()
        #print('using sigmoid')
        self.z_dim = z_dim
        if not z_dim == 0:
            self.fc_z = nn.Linear(z_dim, hidden_size)

        self.fc_p = nn.Conv1d(dim, hidden_size, 1)
        self.block0 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block1 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block2 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block3 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)
        self.block4 = CResnetBlockConv1d(c_dim, hidden_size, legacy=legacy)

        if not legacy:
            self.bn = CBatchNorm1d(c_dim, hidden_size)
        else:
            self.bn = CBatchNorm1d_legacy(c_dim, hidden_size)

        self.instance_loss = instance_loss

        self.fc_out = nn.Conv1d(hidden_size, n_classes, 1)
        self.fc_vote = None

        if self.instance_loss:
            self.fc_vote = nn.Conv1d(hidden_size, 3, 1)

        if not leaky:
            self.actvn = F.relu
        else:
            self.actvn = lambda x: F.leaky_relu(x, 0.2)
Exemplo n.º 6
0
    def __init__(self, dim=3, z_dim=0, c_dim=128, hidden_size=256, n_blocks=5):
        super().__init__()
        self.z_dim = z_dim
        if z_dim != 0:
            self.fc_z = nn.Linear(z_dim, c_dim)

        self.conv_p = nn.Conv1d(dim, hidden_size, 1)
        self.blocks = nn.ModuleList(
            [CResnetBlockConv1d(c_dim, hidden_size) for i in range(n_blocks)])

        self.bn = CBatchNorm1d(c_dim, hidden_size)
        self.conv_out = nn.Conv1d(hidden_size, 1, 1)
        self.actvn = nn.ReLU()
Exemplo n.º 7
0
    def __init__(self, z_dim=0, c_dim=128, hidden_size=256, n_blocks=5):
        super().__init__()
        self.z_dim = z_dim
        if z_dim != 0:
            self.fc_z = tf.keras.layers.Dense(c_dim)

        self.conv_p = tf.keras.layers.Conv1D(hidden_size, 1)
        self.blocks = [
            CResnetBlockConv1d(c_dim, hidden_size) for i in range(n_blocks)
        ]  # CHECK nn.ModuleList -> List

        self.bn = CBatchNorm1d(c_dim, hidden_size)
        self.conv_net = tf.keras.layers.Conv1D(1, 1)
        self.actvn = tf.keras.layers.ReLU()