def predict(self, dataset, save_dir):
        self.scaler = pickle_load(save_dir + self.save_scaler_filename)

        config = tf.ConfigProto()
        config.gpu_options.allow_growth = True

        with tf.Session(config=config) as sess:
            self.saver.restore(sess, save_dir + self.save_filename)

            xs = dataset2arrays(dataset, return_labels=False)
            self.scaler.transform(np.reshape(xs, (-1, 1)))
            batches_xs = batches(xs, self.conf["batch_size"])
            batches_p = []

            for x in batches_xs:
                feed_dict = {
                    self.tf_x: x,
                    self.is_training: False
                }
                p_val = sess.run(self.p, feed_dict=feed_dict)
                p_val = p_val[:, 0]
                batches_p.append(p_val)

        for example, p in zip(dataset, unbatch(batches_p)):
            example["p"] = p
            example["prediction"] = int(p >= 0.5)
示例#2
0
def main(exp, dataset, traindev_model):
    exp_name, dataset_name, use_traindev_model = exp, dataset, traindev_model
    del exp
    del dataset
    del traindev_model

    # Clean
    exp_dir = cfg.EXPERIMENTS_DIR + exp_name + "/"
    backup_files([__file__], exp_dir)

    # Data
    dataset = pickle_load(cfg.DATASETS[dataset_name])
    conf = json_load(exp_dir + "conf.json")

    # Model
    trained_on = "traindev" if use_traindev_model else "train"
    ModelClass = get_class(exp_dir + "used_model.py", conf["class"])
    model = ModelClass(conf, trained_on=trained_on)  # type: Model
    model.predict(dataset, exp_dir)
    # smooth_p(dataset)

    # Store
    pickle_dump(dataset, exp_dir + dataset_name + "_predictions.pkl")
    submission_dump(
        dataset, exp_dir +
        "{}Model_{}Data_submission.csv".format(trained_on, dataset_name))
示例#3
0
def main(conf, exp, short_run):
    conf_file, exp_name = conf, exp
    del conf, exp

    # Clean
    hypertuner_exp_dir = cfg.EXPERIMENTS_DIR + exp_name + "/"
    make_sure_path_exists(hypertuner_exp_dir)
    clean_folder(hypertuner_exp_dir)
    backup_files([
        __file__,
    ], hypertuner_exp_dir)

    hypertuner_conf = json_load(conf_file)
    json_dump(hypertuner_conf, hypertuner_exp_dir + "conf.json")
    trainset = pickle_load(
        cfg.DATASETS["train" if not short_run else "train_short"])
    devset = pickle_load(cfg.DATASETS["dev"])

    # Model
    module = importlib.import_module(hypertuner_conf["package"])
    shutil.copy(module.__file__, hypertuner_exp_dir + "used_model.py")
    ModelClass = getattr(module, hypertuner_conf["class"])

    csv_dump([["variant", "min_loss"]], hypertuner_exp_dir + "variants.csv")

    i = 0
    while True:
        i += 1
        exp_dir = cfg.EXPERIMENTS_DIR + "{}_{:03d}".format(exp_name, i) + "/"
        make_sure_path_exists(exp_dir)
        clean_folder(exp_dir)

        conf = pick_variant(hypertuner_conf)
        json_dump(conf, exp_dir + "conf.json")

        shutil.copy(module.__file__, exp_dir + "used_model.py")
        model = ModelClass(conf, trained_on="train")  # type: Model
        min_loss = model.train_and_save(trainset, devset, exp_dir)

        csv_dump([[i, min_loss]],
                 hypertuner_exp_dir + "variants.csv",
                 append=True)

        del model
def main(conf, exp, short_run):
    conf_file, exp_name = conf, exp
    del conf, exp

    # Clean
    exp_dir = prepare_exp_dir(exp_name, clean_dir=True)

    conf = json_load(conf_file)
    json_dump(conf, exp_dir + "conf.json")
    trainset = pickle_load(
        cfg.DATASETS["train" if not short_run else "train_short"])
    devset = pickle_load(cfg.DATASETS["dev"])

    # Model
    module = importlib.import_module(conf["package"])
    shutil.copy(module.__file__, exp_dir + "used_model.py")
    ModelClass = getattr(module, conf["class"])

    model = ModelClass(conf, trained_on="train")  # type: Model
    model.train_and_save(trainset, devset, exp_dir)