Пример #1
0
def run(args):
    # TODO: save the results of processing data for faster inference load
    if exists(args.data_reader_path):
        print 'Loading data reader...'
        with open(args.data_reader_path, 'rb') as f:
            data_reader = Unpickler(f).load()
            print 'Loaded'

            vocab = data_reader.get_vocab()
    else:
        print 'Creating data reader...'
        data_reader = DataReader(args.train_dir)

        vocab = data_reader.get_vocab()

        # Save the data reader
        with open(args.data_reader_path, 'wb') as f:
            Pickler(f).dump(data_reader)

    print 'Init model...'
    model = WordModel(args, vocab)

    if args.inference:
        model.generate(primer=args.primer)
    else:
        global_step = 0
        while global_step < args.max_steps:
            inputs, targets = data_reader.get_train_batch(
                args.batch_size, args.seq_len)
            global_step = model.train_step(inputs, targets)
Пример #2
0
def get_vocab():
    if os.path.exists(DATA_READER_PATH):
        print 'Loading vocab...'
        with open(DATA_READER_PATH, 'rb') as f:
            data_reader = Unpickler(f).load()
            vocab = data_reader.get_vocab()

            print 'Loaded!'
    else:
        assert os.path.exists(DATA_DIR), 'DATA_DIR not found'
        print 'Creating data reader...'
        data_reader = DataReader(DATA_DIR)

        vocab = data_reader.get_vocab()

    return vocab
Пример #3
0
def main(args):
    if os.path.exists(args.data_reader_path):
        print 'Loading data reader...'
        with open(args.data_reader_path, 'rb') as f:
            data_reader = Unpickler(f).load()
            print 'Loaded'

            vocab = data_reader.get_vocab()
    else:
        print "Couldn't load vocab"
        sys.exit()

    print 'Init model...'
    model = WordModel(args, vocab)

    export_dir = os.path.join(args.export_dir, str(args.version))
    print 'Exporting trained model to', export_dir
    if os.path.isdir(export_dir):
        shutil.rmtree(export_dir)
    builder = tf.saved_model.builder.SavedModelBuilder(export_dir)

    inputs_tensor_info = tf.saved_model.utils.build_tensor_info(model.inputs)
    keep_prob_tensor_info = tf.saved_model.utils.build_tensor_info(
        model.keep_prob)
    outputs_tensor_info = tf.saved_model.utils.build_tensor_info(model.gen_seq)

    prediction_signature = (
        tf.saved_model.signature_def_utils.build_signature_def(
            inputs={
                'inputs': inputs_tensor_info,
                'keep_prob': keep_prob_tensor_info
            },
            outputs={'outputs': outputs_tensor_info},
            method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)
    )

    legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

    builder.add_meta_graph_and_variables(
        model.sess, [tf.saved_model.tag_constants.SERVING],
        signature_def_map={'prediction': prediction_signature},
        legacy_init_op=legacy_init_op)

    builder.save()
    print 'Done exporting!'