Esempio n. 1
0
    def forward(self, x):
        c1 = self.block1(x)
        t = rm.max_pool2d(c1, filter=2, stride=2)
        c2 = self.block2(t)
        t = rm.max_pool2d(c2, filter=2, stride=2)
        c3 = self.block3(t)
        t = rm.max_pool2d(c3, filter=2, stride=2)
        c4 = self.block4(t)
        t = rm.max_pool2d(c4, filter=2, stride=2)
        c5 = self.block5(t)
        t = rm.max_pool2d(c5, filter=2, stride=2)

        t = self.center(t)
        t = rm.concat([t, c5])
        t = self.decoder_block5(t)
        t = rm.concat([t, c4])
        t = self.decoder_block4(t)
        t = rm.concat([t, c3])
        t = self.decoder_block3(t)
        t = rm.concat([t, c2])
        t = self.decoder_block2(t)
        t = rm.concat([t, c1])
        t = self.decoder_block1(t)

        t = self.final(t)

        return t
Esempio n. 2
0
    def forward(self, x):
        t = rm.relu(self.conv1_1(x))
        t = rm.relu(self.conv1_2(t))
        t = rm.max_pool2d(t, filter=2, stride=2)

        t = rm.relu(self.conv2_1(t))
        t = rm.relu(self.conv2_2(t))
        t = rm.max_pool2d(t, filter=2, stride=2)

        t = rm.relu(self.conv3_1(t))
        t = rm.relu(self.conv3_2(t))
        t = rm.relu(self.conv3_3(t))
        t = rm.max_pool2d(t, filter=2, stride=2)

        t = rm.relu(self.conv4_1(t))
        t = rm.relu(self.conv4_2(t))
        t = rm.relu(self.conv4_3(t))
        t = rm.max_pool2d(t, filter=2, stride=2)

        t = rm.relu(self.conv5_1(t))
        t = rm.relu(self.conv5_2(t))
        t = rm.relu(self.conv5_3(t))
        t = rm.max_pool2d(t, filter=2, stride=2)

        return t
 def forward(self, x):
     hidden = self.input(x)
     #print(hidden.shape)
     hidden = rm.max_pool2d(hidden, stride=1, padding=1)
     #print(hidden.shape)
     layers = self.hidden._layers
     for i in range(self.blocks):
         offset = i * (self.depth * 2 + 1)
         for j in range(self.depth):
             sub = rm.relu(layers[offset + 2 * j](hidden))
             #print('{}.{} b {}'.format(i,j,sub.shape))
             sub = layers[offset + 2 * j + 1](sub)
             #print('{}.{} + {}'.format(i,j,sub.shape))
             if self.dropout:
                 sub = rm.dropout(sub)
             hidden = rm.concat(hidden, sub)
             #print('{}.{} = {}'.format(i,j,hidden.shape))
         offset = (i + 1) * (self.depth * 2 + 1) - 1
         hidden = layers[offset](hidden)
         #print('{}.{} - {}'.format(i,j,hidden.shape))
         hidden = rm.average_pool2d(hidden, stride=2, padding=1)
         #print('{}.{} > {}'.format(i,j,hidden.shape))
     x = rm.flatten(hidden)
     layers = self.fcnn._layers
     for i in range(len(layers[:-2])):
         x = rm.relu(layers[i](x))
         #print(x.shape)
         if self.dropout:
             x = rm.dropout(x, dropout_ratio=0.5)
     z_mean = layers[-2](x)
     z_log_var = layers[-1](x)
     return z_mean, z_log_var
 def forward(self, x):
     layers = self.hidden._layers
     for i in range(self.depth):
         if self.batch_normal:
             x = layers[i * 4](x)
             x = rm.relu(layers[i * 4 + 1](x))
             x = layers[i * 4 + 2](x)
             x = rm.relu(layers[i * 4 + 3](x))
         else:
             x = rm.relu(layers[i * 2](x))
             #print(x.shape)
             x = rm.relu(layers[i * 2 + 1](x))
             #print(x.shape)
         if i == self.depth - 1:
             x = rm.average_pool2d(x, stride=2, padding=(1, 1))
         else:
             x = rm.max_pool2d(x, stride=2, padding=(1, 1))
         #print(x.shape)
     x = rm.flatten(x)
     layers = self.fcnn._layers
     for i in range(len(layers[:-2])):
         x = rm.relu(layers[i](x))
         #print(x.shape)
         if self.dropout:
             x = rm.dropout(x, dropout_ratio=0.5)
     z_mean = layers[-2](x)
     z_log_var = layers[-1](x)
     return z_mean, z_log_var
Esempio n. 5
0
    def forward(self, x):
        t1 = rm.relu(self.batch_norm1_reduced(self.conv1_reduced(x)))
        t1 = rm.relu(self.batch_norm1_1(self.conv1_1(t1)))
        t1 = rm.relu(self.batch_norm1_2(self.conv1_2(t1)))

        t2 = rm.relu(self.batch_norm2(self.conv2(x)))

        t3 = rm.max_pool2d(x, filter=3, stride=2)
        return rm.concat([t1, t2, t3])
Esempio n. 6
0
    def forward(self, x):
        t = rm.relu(self.batch_norm1(self.conv1(x)))
        t = rm.relu(self.batch_norm2(self.conv2(t)))
        t = rm.relu(self.batch_norm3(self.conv3(t)))

        t = rm.max_pool2d(t, filter=3, stride=2)
        t = rm.relu(self.batch_norm4(self.conv4(t)))
        t = rm.relu(self.batch_norm5(self.conv5(t)))
        t = rm.relu(self.batch_norm6(self.conv6(t)))
        return t
Esempio n. 7
0
    def forward(self, x):
        t1 = rm.relu(self.conv1(x))
        t2 = rm.relu(self.conv2_reduced(x))
        t2 = rm.relu(self.conv2(t2))
        t3 = rm.relu(self.conv3_reduced(x))
        t3 = rm.relu(self.conv3(t3))
        t4 = rm.max_pool2d(x, filter=3, stride=1, padding=1)
        t4 = rm.relu(self.conv4(t4))

        return rm.concat([t1, t2, t3, t4])
Esempio n. 8
0
    def forward(self, x):
        t = rm.relu(self.batch_norm1(self.conv1(x)))
        t = rm.relu(self.batch_norm2(self.conv2(t)))
        t = rm.relu(self.batch_norm3(self.conv3(t)))

        t1 = rm.max_pool2d(t, filter=3, stride=2)
        t2 = rm.relu(self.batch_norm4(self.conv4(t)))

        t = rm.concat([t1, t2])

        t1 = rm.relu(self.batch_norm5_1_1(self.conv5_1_1(t)))
        t1 = rm.relu(self.batch_norm5_1_2(self.conv5_1_2(t1)))

        t2 = rm.relu(self.batch_norm5_2_1(self.conv5_2_1(t)))
        t2 = rm.relu(self.batch_norm5_2_2(self.conv5_2_2(t2)))
        t2 = rm.relu(self.batch_norm5_2_3(self.conv5_2_3(t2)))
        t2 = rm.relu(self.batch_norm5_2_4(self.conv5_2_4(t2)))
        t = rm.concat([t1, t2])

        t1 = rm.relu(self.batch_norm6(self.conv6(t)))
        t2 = rm.max_pool2d(t, filter=3, stride=2)
        return rm.concat([t1, t2])
Esempio n. 9
0
    def forward(self, x):
        c1 = rm.relu(self.bn1_1(self.conv1_1(x)))
        t = rm.max_pool2d(c1, filter=2, stride=2)
        t = rm.dropout(t, 0.5)
        c2 = rm.relu(self.bn2_1(self.conv2_1(t)))
        t = rm.max_pool2d(c2, filter=2, stride=2)
        t = rm.dropout(t, 0.5)
        t = rm.relu(self.bn3_1(self.conv3_1(t)))
        c3 = rm.relu(self.bn3_2(self.conv3_2(t)))
        t = rm.max_pool2d(c3, filter=2, stride=2)
        t = rm.dropout(t, 0.5)
        t = rm.relu(self.bn4_1(self.conv4_1(t)))
        c4 = rm.relu(self.bn4_2(self.conv4_2(t)))
        t = rm.max_pool2d(c4, filter=2, stride=2)
        t = rm.dropout(t, 0.5)
        t = rm.relu(self.bn5_1(self.conv5_1(t)))
        t = rm.relu(self.bn5_2(self.conv5_2(t)))

        t = self.deconv1(t)[:, :, :c4.shape[2], :c4.shape[3]]
        t = rm.concat([c4, t])
        t = rm.relu(self.conv6(t))
        t = self.deconv2(t)[:, :, :c3.shape[2], :c3.shape[3]]
        t = rm.concat([c3, t])

        t = rm.relu(self.conv7(t))
        t = self.deconv3(t)[:, :, :c2.shape[2], :c2.shape[3]]
        t = rm.concat([c2, t])

        t = rm.relu(self.conv8(t))
        t = self.deconv4(t)[:, :, :c1.shape[2], :c1.shape[3]]
        t = rm.concat([c1, t])

        t = rm.relu(self.conv9(t))
        t = self.deconv5(t)[:, :, :c1.shape[2], :c1.shape[3]]
        t = rm.concat([c1, t])

        t = self.conv10(t)
        return t
Esempio n. 10
0
    def forward(self, x):
        t1 = rm.max_pool2d(x, filter=3, stride=2)

        t2 = rm.relu(self.batch_norm1_red(self.conv1_red(x)))
        t2 = rm.relu(self.batch_norm1(self.conv1(t2)))

        t3 = rm.relu(self.batch_norm2_red(self.conv2_red(x)))
        t3 = rm.relu(self.batch_norm2_1(self.conv2_1(t3)))
        t3 = rm.relu(self.batch_norm2_2(self.conv2_2(t3)))
        t3 = rm.relu(self.batch_norm2_3(self.conv2_3(t3)))

        return rm.concat([
            t1, t2, t3
        ])
Esempio n. 11
0
 def forward(self, x, print_parameter=False):
     hidden = self.input(x)
     if print_parameter:
         print('{}'.format('-' * 20))
         print('check network')
         print(x.shape)
         print('{}'.format('-' * 20))
     if self.dropout:
         hidden = rm.dropout(hidden)
     hidden = rm.max_pool2d(hidden, stride=1, padding=1)
     if print_parameter:
         print(hidden.shape)
         print('{}'.format('-' * 20))
     layers = self.hidden._layers
     blocks = self.blocks if isinstance(self.blocks, int) else len(
         self.blocks)
     for i in range(blocks):
         offset = i * (self.depth * 2 + 1)
         for j in range(self.depth):
             sub = rm.leaky_relu(layers[offset + 2 * j](hidden))
             if print_parameter:
                 print('{}.{} b {}'.format(i, j, sub.shape))
             sub = layers[offset + 2 * j + 1](sub)
             if print_parameter:
                 print('{}.{} + {}'.format(i, j, sub.shape))
             if self.dropout:
                 sub = rm.dropout(sub)
             hidden = rm.concat(hidden, sub)
             if print_parameter:
                 print('{}.{} = {}'.format(i, j, hidden.shape))
         offset = (i + 1) * (self.depth * 2 + 1) - 1
         hidden = layers[offset](hidden)
         if print_parameter:
             print('{}.{} * {}'.format(i, j, hidden.shape))
         if self.dropout:
             if print_parameter:
                 print('dropout')
             hidden = rm.dropout(hidden)
         hidden = rm.average_pool2d(hidden,
                                    padding=1,
                                    stride=(1, 2) if self.keep_v else 2)
         if print_parameter:
             print('{}.{} @ {}'.format(i, j, hidden.shape))
             print('{}'.format('-' * 20))
     x = rm.flatten(hidden)
     if print_parameter:
         print('  >>>  {} prameters'.format(x.shape))
     return x
Esempio n. 12
0
    def forward(self, x):
        t1 = rm.relu(self.batch_norm1(self.conv1(x)))

        t2 = rm.relu(self.batch_norm2_reduced(self.conv2_reduced(x)))
        t2_1 = rm.relu(self.batch_norm2_1(self.conv2_1(t2)))
        t2_2 = rm.relu(self.batch_norm2_2(self.conv2_2(t2)))
        t2 = rm.concat([t2_1, t2_2])

        t3 = rm.relu(self.batch_norm3_reduced(self.conv3_reduced(x)))
        t3 = rm.relu(self.batch_norm3_1(self.conv3_1(t3)))
        t3_1 = rm.relu(self.batch_norm3_2(self.conv3_2(t3)))
        t3_2 = rm.relu(self.batch_norm3_3(self.conv3_3(t3)))
        t3 = rm.concat([t3_1, t3_2])

        t4 = rm.max_pool2d(x, filter=3, stride=1, padding=1)
        t4 = rm.relu(self.batch_norm4(self.conv4(t4)))
        return rm.concat([
            t1, t2, t3, t4
        ])
Esempio n. 13
0
 def forward(self, x):
     i = 0
     t = self.base[i](x)
     i += 1
     t = rm.relu(self.base[i](t))
     i += 1
     t = rm.max_pool2d(t, filter=3, stride=2, padding=1)
     for j in self.layer_per_block[:-1]:
         for k in range(j):
             tmp = t
             t = self.base[i](t)
             i += 1
             t = rm.concat(tmp, t)
         t = self.base[i](t)
         i += 1
     for j in range(self.layer_per_block[-1]):
         tmp = t
         t = self.base[i](t)
         i += 1
         t = rm.concat(tmp, t)
     t = rm.average_pool2d(t, filter=7, stride=1)
     t = rm.flatten(t)
     t = self.fc(t)
     return t