Exemplo n.º 1
0
def train_input_fn():
    train_files = inputs.get_tfrecord_paths(FLAGS.train_path)
    return input_fn.train_input_fn(
        tfrecord_fpaths=train_files,
        batch_size=FLAGS.train_batch_size,
        shuffle_buffer_size=FLAGS.shuffle_buffer_size,
    )
Exemplo n.º 2
0
    def test_train_input_fn_single(self):
        features, labels = train_input_fn(
            ['test_data/input_fn_test.tfrecord'],
            1,  # num_features
            1,  # buffer_size
            1,  # batch_size
            1,  # num_epochs
        )

        session = tf.Session()

        result_features, result_labels = session.run([features, labels])

        length = result_features[LENGTH]
        self.assertEqual(8, length)

        feature = result_features[FEATURE]
        self.assertListEqual([1., 0., 1., 2., 1., 2., 2., 1.],
                             list(feature.flatten()))

        self.assertListEqual([0, 0, 0, 1, 0, 2, 0, 3, 0, 4, 0, 5, 0, 6],
                             list(result_labels.indices.flatten()))

        self.assertListEqual([1, 0, 1, 2, 1, 2, 1], list(result_labels.values))

        self.assertListEqual([1, 7], list(result_labels.dense_shape))
def main(argv):
    param_namespace = parse(argv[1:])

    params = create_params(param_namespace)

    estimator = create_estimator(params)

    tf.logging.set_verbosity(tf.logging.INFO)

    estimator.train(input_fn=lambda: train_input_fn(
        param_namespace.filenames, param_namespace.num_features,
        param_namespace.buffer_size, param_namespace.batch_size,
        param_namespace.num_epochs),
                    steps=param_namespace.steps)

    predictions = estimator.predict(input_fn=eval_input_fn)

    for key, value in enumerate(predictions):
        print('------------------key:', key)
        for row in value[SOFTMAX]:
            row = list(row)
            print('max index: %1d' % row.index(max(row)))
Exemplo n.º 4
0
    def test_train_input_fn_batch(self):
        features, labels = train_input_fn(
            ['test_data/input_fn_test.tfrecord'],
            1,  # num_features
            5,  # buffer_size
            2,  # batch_size
            1,  # num_epochs
        )

        session = tf.Session()

        result_features, result_labels = session.run([features, labels])

        length = result_features[LENGTH]
        self.assertEqual(2, len(length))

        feature = result_features[FEATURE]
        self.assertEqual(2, len(feature))
        self.assertEqual(max(length), len(feature[0]))
        self.assertEqual(max(length), len(feature[1]))

        self.assertEqual(len(result_labels.values), len(result_labels.indices))
        self.assertEqual(2, len(result_labels.dense_shape))
Exemplo n.º 5
0
        #layers['prob'] = tf.nn.softmax(layers['fc1000'])
    with tf.name_scope('fc256'):
        layers['fc256'] = fully_connected(layers['fc1000'], tf.get_variable('weight_256',shape=[1000,256],initializer=tf.truncated_normal_initializer(stddev=0.1)),tf.get_variable('biases_256',[256],initializer=tf.constant_initializer(0.0)))
    return layers['fc256']

def resnet50(input):

    #weigths and biases for tensorflow
    net = np.load('./resnet50.npy',encoding='latin1').item()
    weights = {}
    for name in net.keys():
        weights[name] = {}
        for i in net[name].keys():
            if name == 'fc1000':
                weights[name][i] = tf.Variable(tf.constant(net[name][i]), dtype='float32' , name=name + '_' + i, trainable=True)
            else:
                weights[name][i] = tf.Variable(tf.constant(net[name][i]), dtype='float32' , name=name + '_' + i, trainable=False)

    return model(input, weights)

if __name__ == "__main__":
    from input_fn import train_input_fn
    x = train_input_fn('../dataset/train.csv')
    [y0,y1] = x.__next__()
    y0 = y0.astype(np.float32)
    tmp = resnet50(y0)
    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)
        z = sess.run(tmp)
        print(z)
Exemplo n.º 6
0
                coco_imgs.append((img_name, word_anno))

    return coco_imgs


if __name__ == '__main__':
    # python train.py --model_dir ./save/
    params = Params('./params.json')

    tf.reset_default_graph()
    tf.logging.set_verbosity(tf.logging.INFO)

    # Define the model
    config = tf.estimator.RunConfig(tf_random_seed=230,
                                    model_dir=args.model_dir,
                                    save_summary_steps=50,
                                    save_checkpoints_steps=10000)
    estimator = tf.estimator.Estimator(model_fn, params=params, config=config)

    # Read real image data
    coco_imgs = read_coco(args.coco_path)

    # Train the model
    # # Evaluate the model on the test set
    train_spec = tf.estimator.TrainSpec(
        input_fn=lambda: train_input_fn(params, coco_imgs))
    eval_spec = tf.estimator.EvalSpec(input_fn=lambda: val_input_fn(params),
                                      throttle_secs=100)

    # tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
    estimator.train(input_fn=lambda: train_input_fn(params, coco_imgs))