Exemplo n.º 1
0
 def __init__(self):
     super(Siamese, self).__init__()
     with self.init_scope():
         self.b1 = ResNet18()
         self.b2 = ResNet18()
         self.fc1 = Linear(in_size=512, out_size=10)
         self.fc2 = Linear(in_size=10, out_size=10)
Exemplo n.º 2
0
    def __init__(self,
                 n_class=None, pretrained_model=None, mean=None,
                 initialW=None, initial_bias=None):
        if n_class is None:
            if pretrained_model in self._models:
                n_class = self._models[pretrained_model]['n_class']
            else:
                n_class = 1000

        if mean is None:
            if pretrained_model in self._models:
                mean = self._models[pretrained_model]['mean']
            else:
                mean = _imagenet_mean
        self.mean = mean

        if initialW is None:
            # Employ default initializers used in the original paper.
            initialW = normal.Normal(0.01)
        if pretrained_model:
            # As a sampling process is time-consuming,
            # we employ a zero initializer for faster computation.
            initialW = constant.Zero()
        kwargs = {'initialW': initialW, 'initial_bias': initial_bias}

        super(VGG16, self).__init__()
        with self.init_scope():
            self.conv1_1 = Conv2DActiv(None, 64, 3, 1, 1, **kwargs)
            self.conv1_2 = Conv2DActiv(None, 64, 3, 1, 1, **kwargs)
            self.pool1 = _max_pooling_2d
            self.conv2_1 = Conv2DActiv(None, 128, 3, 1, 1, **kwargs)
            self.conv2_2 = Conv2DActiv(None, 128, 3, 1, 1, **kwargs)
            self.pool2 = _max_pooling_2d
            self.conv3_1 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
            self.conv3_2 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
            self.conv3_3 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
            self.pool3 = _max_pooling_2d
            self.conv4_1 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.conv4_2 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.conv4_3 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.pool4 = _max_pooling_2d
            self.conv5_1 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.conv5_2 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.conv5_3 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
            self.pool5 = _max_pooling_2d
            self.fc6 = Linear(None, 4096, **kwargs)
            self.fc6_relu = relu
            self.fc6_dropout = dropout
            self.fc7 = Linear(None, 4096, **kwargs)
            self.fc7_relu = relu
            self.fc7_dropout = dropout
            self.fc8 = Linear(None, n_class, **kwargs)
            self.prob = softmax

        if pretrained_model in self._models:
            path = download_model(self._models[pretrained_model]['url'])
            chainer.serializers.load_npz(path, self)
        elif pretrained_model:
            chainer.serializers.load_npz(pretrained_model, self)
Exemplo n.º 3
0
 def __init__(self, input_size, vocab_size, embed_size, hidden_size):
     super(Encoder, self).__init__(xi1=Linear(input_size, embed_size),
                                   ih1=Linear(embed_size, 4 * hidden_size),
                                   hh1=Linear(hidden_size, 4 * hidden_size),
                                   xi2=EmbedID(vocab_size, embed_size),
                                   ih2=Linear(hidden_size + embed_size,
                                              4 * hidden_size),
                                   hh2=Linear(hidden_size, 4 * hidden_size))
Exemplo n.º 4
0
 def __init__(self):
     super(Triplet, self).__init__()
     with self.init_scope():
         self.b1 = ResNet18()
         # self.fc = Linear(in_size=512, out_size=10) # wrong way of making predictions
         # self.fc = Linear(in_size=512, out_size=5) # right way with 5 traits
         # self.fc = Linear(in_size=512, out_size=1)  # with collapsed traits
         # self.fc1 = Linear(in_size=512, out_size=2)
         self.fc1 = Linear(in_size=512, out_size=1)
         self.fc2 = Linear(in_size=512, out_size=1)
 def __init__(self, x_channels=1, y_channels=3, ch=64, activation=F.relu):
     super().__init__()
     self.activation = activation
     initializer = chainer.initializers.GlorotUniform()
     with self.init_scope():
         self.block0_0 = OptimizedBlock(x_channels, ch // 2)
         self.block0_1 = OptimizedBlock(y_channels, ch // 2)
         self.block1 = Block(ch,
                             ch * 2,
                             activation=activation,
                             downsample=True)
         self.block2 = Block(ch * 2,
                             ch * 4,
                             activation=activation,
                             downsample=True)
         self.block3 = Block(ch * 4,
                             ch * 8,
                             activation=activation,
                             downsample=True)
         self.block4 = Block(ch * 8,
                             ch * 16,
                             activation=activation,
                             downsample=True)
         self.block5 = Block(ch * 16,
                             ch * 16,
                             activation=activation,
                             downsample=False)
         self.l6 = Linear(ch * 16, 1, initialW=initializer)
Exemplo n.º 6
0
 def __init__(self):
     super(DummyModel, self).__init__()
     with self.init_scope():
         self.l1 = Linear(3,
                          1,
                          initialW=numpy.array([[1, 3, 2]]),
                          nobias=True)
Exemplo n.º 7
0
    def __init__(self, num_layer, node_dim, edge_dim, gpu=0):
        super(EdgeUpdateNet, self).__init__()

        self.num_layer = num_layer
        self.edge_dim = edge_dim
        self.to_xpu = partial(to_device, gpu)

        with self.init_scope():
            self.gn = ElementLinear(node_dim)

            for layer in range(self.num_layer):
                self.add_link('eup{}'.format(layer), EdgeUpdate(edge_dim))
                self.add_link('int{}'.format(layer),
                              InteractionNetwork(node_dim))

            self.interaction1 = Linear(512)
            self.interaction2 = Linear(512)
            self.interaction3 = Linear(4)
Exemplo n.º 8
0
 def __init__(self,
              in_size,
              out_size=None,
              initial_bias=None,
              gain=sqrt(2)):
     super().__init__()
     self.c = gain * sqrt(1 / in_size)
     with self.init_scope():
         self.linear = Linear(in_size,
                              out_size,
                              initialW=Normal(1.0),
                              initial_bias=initial_bias)
Exemplo n.º 9
0
 def __init__(self,
              out_dim=64,
              hidden_channels=None,
              n_update_layers=None,
              n_atom_types=MAX_ATOMIC_NUM,
              n_edge_types=4,
              input_type='int',
              scale_adj=False):
     super(RelGCNSparse, self).__init__()
     if hidden_channels is None:
         hidden_channels = [16, 128, 64]
     elif isinstance(hidden_channels, int):
         if not isinstance(n_update_layers, int):
             raise ValueError(
                 'Must specify n_update_layers when hidden_channels is int')
         hidden_channels = [hidden_channels] * n_update_layers
     with self.init_scope():
         if input_type == 'int':
             self.embed = EmbedAtomID(out_size=hidden_channels[0],
                                      in_size=n_atom_types)
         elif input_type == 'float':
             self.embed = Linear(None, hidden_channels[0])
         else:
             raise ValueError(
                 "[ERROR] Unexpected value input_type={}".format(
                     input_type))
         self.rgcn_convs = chainer.ChainList(*[
             RelGCNSparseUpdate(hidden_channels[i], hidden_channels[i + 1],
                                n_edge_types)
             for i in range(len(hidden_channels) - 1)
         ])
         self.rgcn_readout = ScatterGGNNReadout(
             out_dim=out_dim,
             in_channels=hidden_channels[-1],
             nobias=True,
             activation=functions.tanh)
     # self.num_relations = num_edge_type
     self.input_type = input_type
     self.scale_adj = scale_adj
Exemplo n.º 10
0
 def __init__(self, hidden_size):
     super(Attention_concat, self).__init__(
         av=Linear(2 * hidden_size, 2 * hidden_size),
         vw=Linear(2 * hidden_size, 1),
     )
     self.hidden_size = hidden_size
Exemplo n.º 11
0
 def __init__(self):
     super(Siamese, self).__init__()
     with self.init_scope():
         self.fc1 = Linear(in_size=1, out_size=1, nobias=True)
Exemplo n.º 12
0
 def __init__(self):
     super(Branch, self).__init__()
     with self.init_scope():
         self.fc1 = Linear(in_size=1, out_size=1)
Exemplo n.º 13
0
 def __init__(self):
     super(Siamese0, self).__init__()
     with self.init_scope():
         self.b1 = Branch()
         self.fc1 = Linear(in_size=2, out_size=1)
Exemplo n.º 14
0
 def __init__(self):
     super(LastLayers, self).__init__()
     with self.init_scope():
         self.fc = Linear(in_size=256, out_size=5)
Exemplo n.º 15
0
 def __init__(self):
     super(SimpleAll, self).__init__()
     with self.init_scope():
         self.fc = Linear(in_size=1, out_size=5)
         self.bn = BatchNormalization(5)
Exemplo n.º 16
0
 def reinitialize(self, link: L.Linear):
     _, in_size = link.W.shape
     link._initialize_params(in_size)
Exemplo n.º 17
0
 def update_attributes(self, link: L.Linear):
     link.out_size, link.in_size = link.W.shape
Exemplo n.º 18
0
 def __init__(self):
     super(Deepimpression, self).__init__()
     with self.init_scope():
         self.b1 = ResNet18()
         self.fc = Linear(in_size=256, out_size=5)
Exemplo n.º 19
0
 def __init__(self):
     super(Siamese, self).__init__()
     with self.init_scope():
         self.b1 = ResNet18()
         self.fc = Linear(in_size=256, out_size=10)
    def __init__(self, n_class, pretrained_model='auto'):
        super(ResNet101FineTuning, self).__init__()

        with self.init_scope():
            self.base = ResNet101Layers(pretrained_model)
            self.fc6 = Linear(2048, n_class)
Exemplo n.º 21
0
 def __init__(self, n_units, n_out):
     super().__init__(l1=Linear(None, n_units),
                      l2=Linear(n_units, n_units),
                      l3=Linear(n_units, n_out))