Пример #1
0
    parser.add_argument('-m', '--model', help='LSTM type', required=True, type=str, **share_param)
    parser.add_argument('-e', '--epoch', help='LSTM type', required=True, type=int, **share_param)
    parser.add_argument('-v', '--version', help='', default=None, type=int, **share_param)
    return parser.parse_args()


if __name__ == '__main__':
    # Ignore warning message by tensor flow
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
    # checkpoint
    _parser = argparse.ArgumentParser(description='This script is ...', formatter_class=argparse.RawTextHelpFormatter)
    args = get_options(_parser)
    if args.version is not None:
        _checkpoint_dir, _parameter = \
            checkpoint_version('./checkpoint/%s' % args.model, version=args.version)
    else:
        _parameter = toml.load(open('./hyperparameters/%s.toml' % args.model))
        _checkpoint_dir, _ = checkpoint_version('./checkpoint/%s' % args.model, _parameter)

    # data
    raw_train, raw_validation, raw_test, vocab = ptb_raw_data("./simple-examples/data")

    iterators = dict()
    for raw_data, key in zip([raw_train, raw_validation, raw_test], ["batcher_train", "batcher_valid", "batcher_test"]):
        iterators[key] = BatchFeeder(batch_size=_parameter['batch_size'],
                                     num_steps=_parameter['config']['num_steps'],
                                     sequence=raw_data)

    model = LanguageModel(max_max_epoch=args.epoch, checkpoint_dir=_checkpoint_dir, **_parameter)
    model.train(verbose=True, **iterators)