예제 #1
0
    vocab = Vocab(params["vocab_path"], params["vocab_size"])

    # 构建模型
    print("Building the model ...")
    # model = Seq2Seq(params)
    model = PGN(params)

    # print("Creating the batcher ...")
    # dataset = batcher(params["train_seg_x_dir"], params["train_seg_y_dir"], vocab, params)
    # print('dataset is ', dataset)

    # 获取保存管理者
    print("Creating the checkpoint manager")
    checkpoint = tf.train.Checkpoint(Seq2Seq=model)
    checkpoint_manager = tf.train.CheckpointManager(checkpoint, CKPT_DIR, max_to_keep=5)
    checkpoint.restore(checkpoint_manager.latest_checkpoint)
    if checkpoint_manager.latest_checkpoint:
        print("Restored from {}".format(checkpoint_manager.latest_checkpoint))
    else:
        print("Initializing from scratch.")
    # 训练模型
    print("Starting the training ...")
    train_model(model, vocab, params, checkpoint_manager)


if __name__ == '__main__':
    # 获得参数
    params = get_params()
    # 训练模型
    train(params)
예제 #2
0
from utils.params import get_params
import model.FastText.FastText_params as FastText_params
import model.TextCNN.TextCNN_params as TextCNN_params

app = Flask(__name__)
app.config['JSON_AS_ASCII'] = False

err_result = {"errCode": "", "errMsg": "", "status": False}

FastText_model = tf.keras.models.load_model(
    './results/FastText/2020-03-27-15-01')
# FastText_model = tf.keras.models.load_model('./results/FastText/model.h5')
TextCNN_model = tf.keras.models.load_model(
    './results/TextCNN/2020-03-27-21-41')

_, _, _, _, vocab, mlb = data_loader(get_params())
labels = np.array(mlb.classes_)

fastText_params = FastText_params.get_params()
textCNN_params = TextCNN_params.get_params()


@app.route("/FastText_service/", methods=['GET', 'POST'])
def FastText_service():
    try:
        text_list = request.json
        predict_data = convert(text_list, fastText_params)
    except Exception as e:
        return jsonify(err_result)
    else:
        preds = FastText_model.predict(predict_data)
예제 #3
0
    # Build vector of repetitions
    for a in assignments:

        descriptor[a] += 1

    # L2 normalize
    descriptor = normalize(descriptor)

    return descriptor



if __name__ == "__main__":

    params = get_params()

    # Change to training set
    params['split'] = 'train'
    
    print "Stacking features together..."
    # Save features for training set
    t = time.time()
    X, pca, scaler = stack_features(params)
    print "Done. Time elapsed:", time.time() - t
    print "Number of training features", np.shape(X)

    print "Training codebook..."
    t = time.time()
    train_codebook(params,X)
    print "Done. Time elapsed:", time.time() - t
def main():
    args = get_params()
    args = create_exp_dirs(args)
    agent = Agent(args)
    agent.run()