コード例 #1
0
def main(scenarios_file, device, epochs, patience, verbose, hyper_params_file):
    hps = get_hyperparameter_options(hyper_params_file)[0]
    batch_size = hps['batch_size']
    limit_vectors = hps['limit_vectors']

    dlf = MVSDataLoaderFactory(batch_size=batch_size, limit_vectors=limit_vectors)
    scenarios = load_scenarios(scenarios_file)
    print('training in', device)

    # dumb model only to save specs
    dmodel = HS_Model(vector_size=vector_size, device=device, patience=patience, **hps)
    save_config(dmodel, hps, results_config_file)

    for scenario in scenarios:
        reset_all_seeds(RANDOM_SEED)
        print('\nTraining scenario:',scenario)
        print('Hyperparameters:',hps)

        train_loader, dev_loader, test_loader = dlf.data_loaders_from_scenario(scenario)
        model = HS_Model(vector_size=vector_size, device=device, patience=patience,
            save_best=True, scenario=scenario, model_path=model_path, **hps)
        
        model.train(train_loader, dev_loader, epochs=epochs, verbose=verbose)
        best_model = load_model(model_path, scenario)
        save_summary(results_file, scenario, model, best_model, train_loader, dev_loader, test_loader, verbose=1)
        save_history(history_path, scenario, model)
        print('Finish training scenario:',scenario)
コード例 #2
0
def start_summary():
    path = utils.initialize_summary()
    latlong = utils.read_latlong()
    place_list = list(latlong.keys())
    for place in place_list:
        df = utils.load_year(place)
        summary = utils.make_summary(df)
        utils.save_summary(summary, place)
コード例 #3
0
ファイル: nasari.py プロジェクト: Bonny94ITA/TLN-Radicioni
def main():
    # path = "./asset/Donald-Trump-vs-Barack-Obama-on-Nuclear-Weapons-in-East-Asia.txt"
    path = "./asset/People-Arent-Upgrading-Smartphones-as-Quickly-and-That-Is-Bad-for-Apple.txt"
    # path = "./asset/The-Last-Man-on-the-Moon--Eugene-Cernan-gives-a-compelling-account.txt"
    path_synsets = "./asset/synsets.txt"
    path_nasari = "./asset/dd-nasari.txt"

    # Lettura del file Synset ottenuto con lo script titleSynset.py
    synsets = utils.read_file_synset(path_synsets)
    # Dizionario di synsets con parola come key e babel synset id come valore
    word_to_synset = utils.word_to_synset_dict(synsets)

    # Lettura del file nasari
    nasari = utils.read_file_nasari(path_nasari)

    # Lettura file da testare
    text = utils.read_file(path)

    # Individuazione di 10 keyword nel file
    keywords = utils.get_key_words(text)
    # print(keywords)

    # Divisione del testo in titolo e paragrafi
    dictionary = utils.paragraph(text)
    # Pulizia del titolo con unione dei nomi propri in unico token ed eliminazione delle stop words
    dictionary = utils.clean_title(dictionary)
    # print(dictionary)

    # Determinazione del contesto
    context = get_context(dictionary["Titolo"], word_to_synset, nasari)
    # print(context)
    # context = []

    # Determinazione dell'importanza/rank dei paragrafi
    rank_p = rank_paragraphs(dictionary, context, keywords)
    rank_p2 = copy.deepcopy(rank_p)

    print("\n\n\nORIGINAL\n\n\n" + utils.generate_summary(rank_p))

    # Creazione riassunti con metodo trivial
    summary = summarize_trivial(
        rank_p2, ratio=0.3
    )  # Il ratio si può cambiare in base alla percentuale di riassunto
    print("\n\n\nSUMMARY TRIVIAL\n\n\n" + utils.generate_summary(summary))

    # Creazione riassunti con metodo efficiente
    summary = summarize(
        rank_p, ratio=0.3
    )  # Il ratio si può cambiare in base alla percentuale di riassunto
    print("\n\n\nSUMMARY\n\n\n" + utils.generate_summary(summary))

    # Salvataggio riassunti
    utils.save_summary(summary)
コード例 #4
0
ファイル: paac.py プロジェクト: gikr/pytorch_paac_T-lab
    def _save_progress(self, dir, summaries=None, is_best=False):
        last_chkpt_path = join_path(dir, self.CHECKPOINT_LAST)
        state = {
            'last_step': self.global_step,
            'network_state_dict': self.network.state_dict(),
            'optimizer_state_dict': self.optimizer.state_dict()
        }
        th.save(state, last_chkpt_path)
        logging.info('The state of the agent is saved at step #%d' %
                     self.global_step)

        if (summaries is not None) and len(summaries) > 0:
            summaries_path = join_path(dir, self.SUMMARY_FILE)
            utils.save_summary(summaries, summaries_path)

        if is_best:
            best_chkpt_path = join_path(dir, self.CHECKPOINT_BEST)
            shutil.copyfile(last_chkpt_path, best_chkpt_path)
コード例 #5
0
    # metric in the 'metrics' MetricList
    lr_scheduler = ReduceLROnPlateau(
        optimizer, mode="max", patience=config["lr_patience"], verbose=True
    )
    model_checkpoint = engine.ModelCheckpoint(model_path, mode="max")
    early_stopping = engine.EarlyStopping(mode="max", patience=config["stop_patience"])

    # Train the model
    print()
    train = engine.Trainer(
        net,
        optimizer,
        criterion,
        metrics,
        config["epochs"],
        start_epoch=start_epoch,
        lr_scheduler=lr_scheduler,
        early_stop=early_stopping,
        model_checkpoint=model_checkpoint,
        device=config["device"],
    )
    net, checkpoint = train.fit(train_loader, val_loader, output_fn=sigmoid_threshold)

    # Save a summary file containing the args, losses, and metrics
    config_path = os.path.join(checkpoint_dir, os.path.basename(args.config))
    utils.save_config(config_path, config)
    summary_path = os.path.join(checkpoint_dir, "summary.json")
    utils.save_summary(
        summary_path, vars(args), config, checkpoint["losses"], checkpoint["metrics"]
    )
コード例 #6
0
os.makedirs(os.path.join(model_path, 'history'), exist_ok=True)
# os.makedirs(os.path.join(model_path, model_name + '-Checkpoint'), exist_ok = True)  # Create folder if not present
# checkpoint = ModelCheckpoint(os.path.join(model_path, model_name + '-Checkpoint', model_name) + '-Checkpoint-{epoch:03d}.h5')
# early_stop = EarlyStopping(monitor = 'val_loss', patience = 3, verbose = 1, min_delta = 1e-4)
reduce_lr = ReduceLROnPlateau(monitor='loss',
                              factor=0.5,
                              patience=3,
                              verbose=1,
                              min_delta=1e-4)
kappa_metrics = Metrics()
callbacks_list = [logger, reduce_lr, kappa_metrics]
# callbacks_list = [logger, reduce_lr]

for i in range(EPOCHS):
    print("Epoch:", i)
    for folder in train_folders:
        print("Folder:", folder)
        load_and_train(folder)

    if i == EPOCHS // 4:
        "Setting full model to trainable"
        model.trainable = True

# model.save(os.path.join(model_path, model_name) + '.h5')
# model = load_model(os.path.join(model_path, model_name) + '_best.h5', custom_objects = {'kappa_loss': kappa_loss, 'ordinal_loss': ordinal_loss, 'cauchy_loss': cauchy_loss, 'correntropy_loss': correntropy_loss})

#####
save_summary(model_name,
             best_kappa=best_kappa,
             epoch=best_kappa_epoch,
             filename='models/performance.csv')