Example #1
0
    def _build_graph_conv(self, F_h):
        self.F_h = list(F_h) if isinstance(F_h, tuple) else F_h
        self.conv, self.bn = [], []
        for i, (f_in,
                f_out) in enumerate(zip([self.F_e] + self.F_h[:-1], self.F_h)):
            conv = modules.GraphConv(f_in, f_out, self.N_B + self.D)
            self.conv.append(conv)
            self.register_child(conv)

            if i != 0:
                bn = modules.BatchNorm(in_channels=f_in)
                self.register_child(bn)
            else:
                bn = None
            self.bn.append(bn)

        self.bn_skip = modules.BatchNorm(in_channels=sum(self.F_h))
        self.linear_skip = modules.Linear_BN(sum(self.F_h), self.F_skip)

        # projectors for conditional variable
        self.linear_c = []
        for i, f_out in enumerate(self.F_h):
            if self.rename:
                linear_c = nn.Dense(f_out,
                                    use_bias=False,
                                    in_units=self.N_C,
                                    prefix='cond_{}'.format(i))
            else:
                linear_c = nn.Dense(f_out, use_bias=False, in_units=self.N_C)
            self.register_child(linear_c)
            self.linear_c.append(linear_c)
Example #2
0
 def __init__(self):
     super(SimplerCNN, self).__init__()
     self.dropout2d_input = nn.Dropout2d(rate=0.3)
     self.conv1 = nn.Conv2d(in_channels=3,
                            out_channels=15,
                            kernel_size=3,
                            stride=3,
                            padding=2)
     self.relu1 = nn.LeakyRelu()
     self.conv2 = nn.Conv2d(in_channels=15,
                            out_channels=30,
                            kernel_size=3,
                            stride=3,
                            padding=3)
     self.relu2 = nn.LeakyRelu()
     self.dropout2d_conv1 = nn.Dropout2d(rate=0.5)
     self.conv3 = nn.Conv2d(in_channels=30, out_channels=40, kernel_size=4)
     self.relu3 = nn.LeakyRelu()
     self.flatten = nn.Flatten()
     self.dropout2d_conv2 = nn.Dropout2d(rate=0.2)
     self.linear = nn.Linear(in_dimension=360, out_dimension=180)
     self.relu4 = nn.LeakyRelu()
     self.bn1 = nn.BatchNorm()
     self.dropout3 = nn.Dropout(rate=0.3)
     self.linear2 = nn.Linear(in_dimension=180, out_dimension=10)
     self.bn2 = nn.BatchNorm()
     self.softmax = nn.Softmax()
     self.set_forward()
Example #3
0
    def _build_graph_conv(self, F_h):
        self.F_h = list(F_h) if isinstance(F_h, tuple) else F_h
        self.conv, self.bn = [], []
        for i, (f_in,
                f_out) in enumerate(zip([self.F_e] + self.F_h[:-1], self.F_h)):
            conv = modules.GraphConv(f_in, f_out, self.N_B + self.D)
            self.conv.append(conv)
            self.register_child(conv)

            if i != 0:
                bn = modules.BatchNorm(in_channels=f_in)
                self.register_child(bn)
            else:
                bn = None
            self.bn.append(bn)

        self.bn_skip = modules.BatchNorm(in_channels=sum(self.F_h))
        self.linear_skip = modules.Linear_BN(sum(self.F_h), self.F_skip)
Example #4
0
    def __init__(self, input_size, hidden_size, output_size):
        super(_TwoLayerDense, self).__init__()

        self.hidden_size = hidden_size
        self.output_size = output_size
        self.input_size = input_size

        with self.name_scope():
            self.input = nn.Dense(self.hidden_size,
                                  use_bias=False,
                                  in_units=self.input_size)
            self.bn_input = modules.BatchNorm(in_channels=self.hidden_size)
            self.output = nn.Dense(self.output_size,
                                   use_bias=True,
                                   in_units=self.hidden_size)