Ejemplo n.º 1
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 A_binary,
                 num_scales,
                 window_size,
                 disentangled_agg=True,
                 use_Ares=True,
                 residual=False,
                 dropout=0,
                 activation='relu'):

        super().__init__()
        self.num_scales = num_scales
        self.window_size = window_size
        self.use_Ares = use_Ares
        A = self.build_spatial_temporal_graph(A_binary, window_size)

        if disentangled_agg:
            A_scales = [k_adjacency(A, k, with_self=True) for k in range(num_scales)]
            A_scales = np.concatenate([normalize_adjacency_matrix(g) for g in A_scales])
        else:
            # Self-loops have already been included in A
            A_scales = [normalize_adjacency_matrix(A) for k in range(num_scales)]
            A_scales = [np.linalg.matrix_power(g, k) for k, g in enumerate(A_scales)]
            A_scales = np.concatenate(A_scales)

        self.A_scales = torch.Tensor(A_scales)
        self.V = len(A_binary)

        if use_Ares:
            self.A_res = nn.init.uniform_(nn.Parameter(torch.randn(self.A_scales.shape)), -1e-6, 1e-6)
        else:
            self.A_res = torch.tensor(0)

        self.mlp = MLP(in_channels * num_scales, [out_channels], dropout=dropout, activation='linear')

        # Residual connection
        if not residual:
            self.residual = lambda x: 0
        elif (in_channels == out_channels):
            self.residual = lambda x: x
        else:
            self.residual = MLP(in_channels, [out_channels], activation='linear')

        self.act = activation_factory(activation)

        self.global_pool = nn.AdaptiveAvgPool2d(1)
        self.conv_down = nn.Conv2d(
            out_channels, out_channels // 4, kernel_size=1, bias=False)
#        nn.init.constant_(self.conv_down.weight, 0)
        nn.init.normal_(self.conv_down.weight, 0, 0.001)
        self.conv_up = nn.Conv2d(
            out_channels // 4, out_channels, kernel_size=1, bias=False)
        nn.init.constant_(self.conv_up.weight, 0)
        self.relu = nn.ReLU()
        self.sig = nn.Sigmoid()
Ejemplo n.º 2
0
 def __init__(self,
              in_channels,
              out_channels,
              activation='relu',
              dropout=0):
     super().__init__()
     channels = [in_channels] + out_channels
     self.layers = nn.ModuleList()
     for i in range(1, len(channels)):
         if dropout > 0.001:
             self.layers.append(nn.Dropout(p=dropout))
         self.layers.append(
             nn.Conv2d(channels[i - 1], channels[i], kernel_size=1))
         self.layers.append(nn.BatchNorm2d(channels[i]))
         self.layers.append(activation_factory(activation))
Ejemplo n.º 3
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 dilations=[1, 2, 3, 4],
                 residual=True,
                 residual_kernel_size=1,
                 activation='relu'):

        super().__init__()
        assert out_channels % (
            len(dilations) +
            2) == 0, '# out channels should be multiples of # branches'

        # Multiple branches of temporal convolution
        self.num_branches = len(dilations) + 2
        branch_channels = out_channels // self.num_branches

        # Temporal Convolution branches
        self.branches = nn.ModuleList([
            nn.Sequential(
                nn.Conv2d(in_channels,
                          branch_channels,
                          kernel_size=1,
                          padding=0),
                nn.BatchNorm2d(branch_channels),
                activation_factory(activation),
                TemporalConv(branch_channels,
                             branch_channels,
                             kernel_size=kernel_size,
                             stride=stride,
                             dilation=dilation),
            ) for dilation in dilations
        ])

        # Additional Max & 1x1 branch
        self.branches.append(
            nn.Sequential(
                nn.Conv2d(in_channels,
                          branch_channels,
                          kernel_size=1,
                          padding=0), nn.BatchNorm2d(branch_channels),
                activation_factory(activation),
                nn.MaxPool2d(kernel_size=(3, 1),
                             stride=(stride, 1),
                             padding=(1, 0)), nn.BatchNorm2d(branch_channels)))

        self.branches.append(
            nn.Sequential(
                nn.Conv2d(in_channels,
                          branch_channels,
                          kernel_size=1,
                          padding=0,
                          stride=(stride, 1)),
                nn.BatchNorm2d(branch_channels)))

        # Residual connection
        if not residual:
            self.residual = lambda x: 0
        elif (in_channels == out_channels) and (stride == 1):
            self.residual = lambda x: x
        else:
            self.residual = TemporalConv(in_channels,
                                         out_channels,
                                         kernel_size=residual_kernel_size,
                                         stride=stride)

        self.act = activation_factory(activation)