Exemplo n.º 1
0
 def test_dropout(self):
     inputs = lib.Input((5, 5))
     net = lib.layers.Dropout(0.5)
     net.eval()
     outputs = net(inputs)
     self.assertListEqual(list(lib.engine.int_shape(outputs)), [1, 5, 5])
     with self.cached_session() as sess:
         sess.run(tf.global_variables_initializer())
         print(sess.run(outputs))
Exemplo n.º 2
0
 def test_rnn(self):
     inputs = lib.Input(batch_input_shape=(2, 10, 28))
     rnn = lib.layers.RNN(10)
     outputs = rnn(inputs)
     node = getattr(outputs, '_anchor')[0]
     self.assertEqual(node.layer.name, rnn.name)
     self.assertListEqual(list(lib.engine.int_shape(outputs)), [2, 10])
     # writer = tf.summary.FileWriter('D:/GeekGank/workspace/graph/model_graph', tf.get_default_graph())
     # writer.close()
     with self.cached_session() as sess:
         sess.run(tf.global_variables_initializer())
         print(sess.run(outputs))
Exemplo n.º 3
0
 def test_mobilenet_v2_forward(self):
     start = time.time()
     inputs = lib.Input((224, 224, 3))
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         model = lib.research.MobileNetV2(3,
                                          num_classes=1001,
                                          multiplier=0.25)
     outputs = model(inputs)
     print(time.time() - start)
     self.assertListEqual(outputs.get_shape().as_list(), [1, 1001])
     writer = tf.summary.FileWriter(
         "D:/GeekGank/workspace/graph/model_graph", tf.get_default_graph())
     writer.close()
Exemplo n.º 4
0
 def test_batch_normal(self):
     inputs = lib.Input(input_shape=(7, 7, 3), batch_size=2)
     outputs = lib.layers.Conv2D(3, 3, padding='VALID')(inputs)
     bn = lib.layers.BatchNorm()
     # bn.train()
     outputs = bn(outputs)
     writer = tf.summary.FileWriter('D:/GeekGank/workspace/graph/model_graph', tf.get_default_graph())
     writer.close()
     print(bn.trainable, True)
     self.assertListEqual(list(lib.engine.int_shape(outputs)), [2, 5, 5, 3])
     with self.cached_session() as sess:
         sess.run(tf.global_variables_initializer())
         print(sess.run(outputs))
Exemplo n.º 5
0
 def test_network_weights(self):
     inputs = lib.Input((224, 224, 3), batch_size=2)
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         net = lib.research.ResNet_V1_50_beta(num_classes=1001,
                                              global_pool=True)
     # net = lib.research.MobileNetV2(3, 10)
     net.train()
     _ = net(inputs)
     weights = set()
     for w in net.weights:
         if w in weights:
             print('repeat', w)
         else:
             weights.add(w)
             print(w)
Exemplo n.º 6
0
def create_model(input_shape):
    with tf.name_scope('model') as scope:
        model_inputs = lib.Input(batch_input_shape=input_shape)
        net = lib.research.MobileNetV2(3, base_only=True)(model_inputs)
        classes = lib.layers.Conv2D(out_channels=10, kernel_size=(1, 13))(net)
        pattern = lib.layers.Dense(num_units=24)(lib.layers.flatten(classes))
        pattern = lib.layers.reshape(pattern, (-1, 1, 1, 24))
        pattern = lib.layers.tile(pattern,
                                  [1, 1, lib.engine.int_shape(net)[2], 1])
        net = lib.layers.concat(classes, pattern, axis=3)
        net = lib.layers.Conv2D(out_channels=10, kernel_size=1)(net)
        model_outputs = lib.layers.squeeze(net, axis=1)
    return lib.engine.Network(inputs=model_inputs,
                              outputs=model_outputs,
                              name=scope)
Exemplo n.º 7
0
    def test_graph_model_estimator_style_fit(self):
        (train_images,
         train_labels), (test_images,
                         test_labels) = keras.datasets.mnist.load_data()
        # x_train = np.random.random((1000, 20))
        # y_train = np.random.randint(2, size=(1000, 1))
        # x_test = np.random.random((100, 20))
        # y_test = np.random.randint(2, size=(100, 1))
        train_images = train_images / 255.0
        test_images = test_images / 255.0

        class MNIST(lib.training.Model):
            def __init__(self, *args, **kwargs):
                super(MNIST, self).__init__(*args, **kwargs)
                self.flatten = lib.layers.Flatten()
                self.fc1 = lib.layers.Dense(units=128, activation='relu')
                self.fc2 = lib.layers.Dense(units=10)

            def forward(self, inputs):
                x = self.flatten(inputs)
                x = self.fc1(x)
                x = self.fc2(x)
                return x

        def model_fn(_model, x, y):
            loss = lib.training.SparseCategoricalCrossEntropy(
                from_logits=True)(y, _model.outputs[0])
            metric = lib.training.SparseCategoricalAccuracy()(
                y, _model.outputs[0])
            return lib.training.EstimatorSpec(outputs=_model.outputs,
                                              loss=loss,
                                              metrics=[metric])

        inputs = lib.Input(input_shape=(28, 28))
        outputs = MNIST()(inputs)
        model = lib.training.Model(inputs=inputs,
                                   outputs=outputs,
                                   name='mnist/')
        model.train()
        model.compile(model_fn=model_fn,
                      optimizer=tf.train.AdamOptimizer(),
                      checkpoint_dir='./test_ckpt',
                      session_cfg=dict(per_process_gpu_memory_fraction=0.4))
        model.fit(train_images,
                  train_labels,
                  test_images,
                  test_labels,
                  epochs=10)
Exemplo n.º 8
0
 def test_resnet(self):
     start = time.time()
     inputs = lib.Input((224, 224, 3))
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         model = lib.research.ResNet_V1_101_beta((224, 224, 3),
                                                 is_training=False,
                                                 global_pool=True,
                                                 num_classes=1001)
     outputs = model(inputs)
     print(time.time() - start)
     node = getattr(outputs, '_anchor')[0]
     self.assertEqual(node.layer.name, 'resnet_v1_101/')
     self.assertListEqual(outputs.get_shape().as_list(), [1, 1001])
     writer = tf.summary.FileWriter(
         "D:/GeekGank/workspace/graph/model_graph", tf.get_default_graph())
     writer.close()
Exemplo n.º 9
0
 def test_residual_block(self):
     start = time.time()
     inputs = lib.Input((54, 54, 64), batch_size=2)
     net = lib.research.Bottleneck(128 * 4,
                                   128,
                                   stride=2,
                                   downsample=lib.layers.Conv2D(
                                       128 * 4,
                                       kernel_size=1,
                                       strides=2,
                                       use_bias=False))
     outputs = net(inputs)
     print(time.time() - start)
     print(outputs)
     self.assertListEqual(outputs.get_shape().as_list(),
                          [2, 27, 27, 128 * 4])
     writer = tf.summary.FileWriter(
         "D:/GeekGank/workspace/graph/model_graph", tf.get_default_graph())
     writer.close()
Exemplo n.º 10
0
def _ResNetV1Beta(blocks,
                  num_classes=None,
                  is_training=None,
                  global_pool=False,
                  base_only=False,
                  root_block=None,
                  output_stride=None,
                  extract_blocks=None,
                  input_shape=None,
                  input_tensor=None,
                  name=None):
    if extract_blocks is not None:
        if input_shape is None:
            if input_tensor is None:
                raise ValueError("You must provide either `input_shape` or"
                                 " a traceable `input_tensor` to build graph")
        with lib.hooks.ExtractHook(extract_blocks, prefix=name) as hook:
            network = ResNetV1Beta(blocks=blocks,
                                   num_classes=num_classes,
                                   is_training=is_training,
                                   global_pool=global_pool,
                                   base_only=base_only,
                                   output_stride=output_stride,
                                   name=name)
            inputs = lib.Input(input_tensor=input_tensor,
                               input_shape=input_shape)
            _ = network(inputs)
            outputs = list(hook.get_endpoints().values())
            return lib.Network(inputs=inputs, outputs=outputs, name=name + '/')
    else:
        return ResNetV1Beta(blocks=blocks,
                            num_classes=num_classes,
                            is_training=is_training,
                            global_pool=global_pool,
                            base_only=base_only,
                            root_block=root_block,
                            output_stride=output_stride,
                            name=name)
Exemplo n.º 11
0
 def test_lambda(self):
     inputs = [lib.Input((2,)), lib.Input((2,))]
     outputs = lib.layers.Lambda(tf.add, name='add')(*inputs)
     self.assertListEqual(list(lib.engine.int_shape(outputs)), [1, 2])
     with self.cached_session() as sess:
         print(sess.run(outputs))
Exemplo n.º 12
0
 def test_convolution(self):
     inputs = lib.Input(input_shape=(7, 7, 3), batch_size=2)
     outputs = lib.layers.Conv2D(3, 3, strides=2)(inputs)
     print(outputs)