Пример #1
0
def finetune_and_evaluate_pretrained_model(
    model: torch.nn.Module,
    criterion: torch.nn.Module,
    optimizer: torch.optim.Optimizer,
    device: torch.device,
    callbacks: list,
    dataloaders: Dict[str, DataLoader],
    epochs: int,
    metrics: List[str],
) -> Tuple[float, dict]:
    trainer = Trainer(model=model, criterion=criterion, optimizer=optimizer, device=device)

    best_model = trainer.train_dataloader(
        train_loader=dataloaders["train"],
        epochs=epochs,
        callbacks=callbacks,
        verbose=0,
        val_loader=dataloaders["val"],
    )

    results = evaluate_model(
        model=best_model,
        dataloader=dataloaders["test"],
        metrics=metrics,
        device=device,
        criterion=criterion,
    )

    return results
Пример #2
0
def init_seed_episodes(D: ExperienceReplay, trainer: Trainer, metrics: dict):
    epoch = 0
    while epoch < cfg.seed_episodes or D.len < cfg.batch_size:
        trainer.collect_interval(metrics, D, epoch)
        metrics['o_loss'].append(None)
        metrics['r_loss'].append(None)
        metrics['kl_loss'].append(None)
        metrics['t_scores'].append(None)
        epoch += 1
    return epoch
Пример #3
0
def evolve_with_supernet_cifar10():
    def pp_close():
        pdf_pages.close()
        print("Program terminated,closed pdf")

    atexit.register(pp_close)

    train_dataset, val_dataset, test_dataset = data_loader.load_cifar10()
    loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
    metrics = [tf.keras.metrics.SparseCategoricalAccuracy()]
    optimizer = tf.keras.optimizers.SGD(lr=0.001, momentum=0.9)
    input_layer = tf.keras.layers.Input(shape=(32, 32, 3))
    output_model = tf.keras.Sequential()
    output_model.add(tf.keras.layers.Flatten())
    output_model.add(tf.keras.layers.Dense(256))
    output_model.add(tf.keras.layers.Dense(10))
    pdf_pages = PdfPages(root_path + "/summary/" + "cifar10_conv_summary.pdf")
    trainer = Trainer(train_dataset, val_dataset, test_dataset, input_layer,
                      output_model, optimizer, loss_fn, metrics, 1, root_path)
    evolution = Evolution(nnlayers,
                          trainer,
                          pdfpages=pdf_pages,
                          root_path=root_path)

    evolution.evolve()
    pdf_pages.close()
Пример #4
0
    def test_image_transform(self):
        # Tests BOTH data loader as batch and image transform options
        # If None:
        bird_dataset = BirdsDataset(self.dataset_preprocess_path,
                                    split="train")
        dataloader = DataLoader(bird_dataset, batch_size=4)
        batch = next(iter(dataloader))
        self.assertEqual(batch.images[0].shape[0], 4)
        self.assertEqual(batch.images[0].shape[1], 3)
        self.assertEqual(batch.images[0].shape[2], 299)
        self.assertEqual(batch.images[0].shape[3], 299)

        # if transforms not None
        transform = Trainer.compose_image_transforms(299)
        bird_dataset = BirdsDataset(self.dataset_preprocess_path,
                                    split="train",
                                    image_transform=transform)
        dataloader = DataLoader(bird_dataset, batch_size=2)
        batch = next(iter(dataloader))
        self.assertEqual(batch.images[0].shape[0], 2)
        self.assertEqual(batch.images[0].shape[1], 3)
        self.assertEqual(batch.images[0].shape[2], 299)
        self.assertEqual(batch.images[0].shape[3], 299)
        self.assertLessEqual(batch.images[0].max(), 1)
        self.assertGreaterEqual(batch.images[0].min(), -1)
Пример #5
0
def train_multitask(argv):
    logging.info(f"Starting multitask training with {FLAGS.source} datset.")
    if not os.path.exists(FLAGS.save_path):
        os.makedirs(FLAGS.save_path)
    get_path = partial(os.path.join, FLAGS.save_path)
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

    FLAGS.flags_into_string()
    FLAGS.append_flags_into_file(get_path("flagfile.txt"))

    logging.info(f"Setting seed...")
    torch_utils.set_seed(FLAGS.seed)

    logging.info(f"Loading data...")
    loaders = dataloaders.get_multitask_loaders(
        source_path=FLAGS.source, batch_size=FLAGS.batch_size
    )
    model = GatedGraphNeuralNetwork(
        n_edge=1,
        in_dim=75,
        n_conv=FLAGS.n_conv,
        fc_dims=[FLAGS.fc_dims, loaders["train"].dataset.num_tasks],
    )
    model = model.to(device)

    criterion = loss.MaskedBCEWithLogitsLoss(ignore_index=-1)
    optimizer = torch.optim.Adam(model.parameters(), lr=10 ** (FLAGS.lr))
    trainer = Trainer(
        model=model, criterion=criterion, optimizer=optimizer, device=device
    )

    cbs = [EarlyStopping(patience=20, mode="min")]

    logging.info(f"Begin training!")
    best_model = trainer.train_dataloader(
        train_loader=loaders["train"],
        epochs=FLAGS.epochs,
        callbacks=cbs,
        verbose=1,
        save_dir=FLAGS.save_path,
        val_loader=loaders["val"],
    )
    summary = trainer.summary
    torch.save(best_model, get_path("best_model.pth"))
    json.dump(summary, open(get_path("summary.json"), "w"))
    logging.info(f"""Summary is saved at {get_path("summary.json")}""")
Пример #6
0
def train_all_models(filename):
    sentences = loadGIKRYGrammasFromFile(filename)
    predictors = [
        'pos',
        'gender',
        'case',
        'nounType',
        'fullForm',
        'aspect',
        'aspectual',
        'tense',
        'verbForm',
        'trans',
        'categoryOfAdjective',
        'syntaxType',
        'typeOfAnother',
        'typeOfAdposition',
        'structureOfAdposition',
        'categoryOfNumeral',
        'formOfNumeral',
    ]

    for predictor in predictors:
        print("start to train %s predictor" % predictor)
        trainer = Trainer(verbose=True)
        features_builder = FeaturesBuilder(predictor)
        for sentence in sentences:
            (features,
             results) = features_builder.make_features_and_results(sentence)
            trainer.append(features, results)
        print("trainer %s appended. Start to train" % predictor)
        trainer.train(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), '..',
                         '..', 'model', 'gikry', "crf_%s.model" % predictor))
Пример #7
0
def train_all_models(filename):
    sentences = loadUDGrammasFromFile(filename)
    predictors = [
        'pos',
        'mood',
        'voice',
        'nameType',
        'poss',
        'reflex',
        'degree',
        'number',
        'case',
        'gender',
        'verbForm',
    ]
    for predictor in predictors:
        trainer = Trainer(verbose=True)
        features_builder = FeaturesBuilder(predictor)
        for sentence in sentences:
            (features,
             results) = features_builder.make_features_and_results(sentence)
            trainer.append(features, results)
        print("trainer %s appended. Start to train" % predictor)
        trainer.train(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), '..',
                         '..', 'model', 'ud', 'crf_%s.model' % predictor))
Пример #8
0
def evolve_with_supernet():
    train_dataset, val_dataset = data_loader.load_iris()
    loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
    metrics = [tf.keras.metrics.SparseCategoricalAccuracy()]
    optimizer = "SGD"
    input_layer = tf.keras.layers.Input(shape=(4))
    output_layer = tf.keras.layers.Dense(3)
    pdf_pages = PdfPages(root_path + "/summary/" + "summary.pdf")
    trainer = Trainer(train_dataset, val_dataset, val_dataset, input_layer,
                      output_layer, optimizer, loss_fn, metrics, 1, root_path)
    evolution = Evolution(nnlayers, trainer, pdfpages=pdf_pages)

    evolution.evolve()
    pdf_pages.close()
Пример #9
0
def evolve_with_supernet_mnist():
    train_dataset, val_dataset, test_dataset = data_loader.load_mnist()
    loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
    metrics = [tf.keras.metrics.SparseCategoricalAccuracy()]
    optimizer = "SGD"
    input_layer = tf.keras.layers.Input(shape=(28, 28, 1))
    output_model = tf.keras.Sequential()
    output_model.add(tf.keras.layers.Flatten())
    output_model.add(tf.keras.layers.Dense(256))
    output_model.add(tf.keras.layers.Dense(10))
    pdf_pages = PdfPages(root_path + "/summary/" + "conv_summary.pdf")
    trainer = Trainer(train_dataset, val_dataset, test_dataset, input_layer,
                      output_model, optimizer, loss_fn, metrics, 1, root_path)
    evolution = Evolution(nnlayers,
                          trainer,
                          pdfpages=pdf_pages,
                          root_path=root_path)

    evolution.evolve()
    pdf_pages.close()
Пример #10
0
# Autoloader
import sys
import os
from pathlib import Path
path = Path(__file__).resolve()
sys.path.append(str(path.parents[0]))

# Import system
from src.training.trainer import Trainer
from database.sqlite import Database

print('Initialize database module')
database = Database()
print('Initialize treiner module')
trainer = Trainer(database=database)

while True:
    os.system('cls' if os.name == 'nt' else 'clear')
    print("""
     ____   ___  _____ _   _ ____    ____   ___ _____ 
    |  _ \ / _ \|  ___| | | / ___|  | __ ) / _ \_   _|
    | | | | | | | |_  | | | \___ \  |  _ \| | | || |  
    | |_| | |_| |  _| | |_| |___) | | |_) | |_| || |  
    |____/ \___/|_|    \___/|____/  |____/ \___/ |_|  
        By Lucas Sievers
    """)
    print('SELECT ONE OF THE OPTIONS')
    print('1  - Trainer')
    print('-1 - Exit')
    option = int(input("Select what you want: "))
    if option == 1:
Пример #11
0
def train_all_models(filename):
    # считываем строки из файла в формате OC и переводим в граммы    
    sentences = loadOCGrammasFromFile(filename)

    # создаем тренера и загружаем грамемы (предложения) в него

    predictors = [
        'pos',
        'gender',
        'animacy',
        'number',
        'case',
        'aspect',
        'mood',
        'person',
        'poss',
        'reflex',
        'tense',
        'verbForm',
        'voice',
        'degree',
        'nameType',
        'trans',
        'invl',
        'additional' # Дополнительные теги учим по одному
    ]
    for predictor in predictors:
        print("start to train %s" % predictor)
        trainer = Trainer(verbose=True)
        features_builder = FeaturesBuilder(predictor)
        for sencence in sentences:
            (features, results) = features_builder.make_features_and_results(sencence)
            trainer.append(features, results)
        print("trainer %s appended. Start to train" % predictor)
        trainer.train(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'model', 'oc', "crf_%s.model" % predictor)
        )

    # отдельные теги быстро учатся, но плохо и долго работают. не учим отдельно
    # additionalTags = [
    #     'Infr',
    #     'Slng',
    #     'Arch',
    #     'Litr',
    #     'Erro',
    #     'Dist',
    #     'Ques',
    #     'Dmns',
    #     'Prnt',
    #     'V-be',
    #     'V-en',
    #     'V-ie',
    #     'V-bi',
    #     'V-ey',
    #     'V-oy',
    #     'Coun',
    #     'Af-p',
    #     'Anph',
    #     'Subx',
    #     'Vpre',
    #     'Prdx',
    #     'Coll',
    #     'Adjx',
    #     'Qual',
    #     'Apro',
    #     'Anum',
    #     'Poss',
    #     'ms-f',
    #     'Ms-f',
    #     'Impe',
    #     'Impx',
    #     'Mult',
    #     'Abbr',
    #     'Fixd',
    # ]

    # for additionalTag in additionalTags:
    #     print("start to train additional tag %s" % additionalTag)
    #     trainer = Trainer()
    #     features_builder = FeaturesBuilder('additional', additionalTag)
    #     for sentence in sentences:
    #         (features, results) = features_builder.make_features_and_results(sentence)
    #         trainer.append(features, results)
    #     print("trainer additiona - %s appended. Start to train" % additionalTag)
    #     trainer.train(
    #         os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'model', 'oc', "crf_additional_%s.model" % additionalTag)
    #     )
Пример #12
0
def init_trainer(env, worldmodel, planner):
    trainer = Trainer(env, worldmodel, planner)
    return trainer
Пример #13
0
def init_trainer(env, worldmodel, planner, policy):
    trainer = Trainer(env, worldmodel, planner, policy)
    return trainer