Exemplo n.º 1
0
                                "GPU": 1
                            },
                            inter_op_parallelism_threads=10)
    with Sess(options, meta, config=config) as sess:

        workers = []
        for i in range(settings.NB_ACTORS):
            with tf.device("/device:CPU:" + str(i)):
                workers.append(Actor.Actor(sess, i + 1))

        print("Initializing learner...")
        with tf.device("/device:GPU:0"):
            learner = Learner(sess, *workers[0].get_env_features())
        print("Learner initialized !\n")
        if settings.LOAD:
            learner.load()

        threads = []
        for i in range(settings.NB_ACTORS):
            thread = threading.Thread(target=workers[i].run)
            threads.append(thread)

        threads.append(threading.Thread(target=learner.run))

        if settings.GUI:
            GUI_thread = threading.Thread(target=GUI.main)
            GUI_thread.start()

        sess.run(tf.global_variables_initializer())

        for t in threads:
Exemplo n.º 2
0
    thr_conf = 0.8
    alpha = 0.2


if __name__ == '__main__':
    seed_everything(config.seed)

    model = Unet(encoder_name='resnet50', encoder_weights='imagenet', decoder_use_batchnorm=True,
                 decoder_attention_type='scse', classes=2, activation=None)

    df = pd.read_csv(config.df_path)
    train_df = df[df.fold != config.fold].reset_index(drop=True)
    valid_df = df[df.fold == config.fold].reset_index(drop=True)
    transforms = get_transforms(config.input_size, need=('train', 'val'))

    train_dataset = dsbDataset(config.data_dir, config.scr_dir, config.mask_dir, train_df,
                               tfms=transforms['train'], return_id=False)
    valid_dataset = dsbDataset(config.data_dir, config.scr_dir, config.mask_dir, valid_df,
                               tfms=transforms['val'], return_id=True)
    train_loader = DataLoader(dataset=train_dataset, batch_size=config.batch_size, num_workers=config.num_workers,
                              shuffle=True)
    valid_loader = DataLoader(dataset=valid_dataset, batch_size=1, num_workers=config.num_workers,
                              shuffle=False)

    Learner = Learner(model, train_loader, valid_loader, config)
    pretrained_path = os.path.join(config.log_dir, 'best_model.pth')
    if os.path.isfile(pretrained_path):
        Learner.load(pretrained_path)
        Learner.log(f"Checkpoint Loaded: {pretrained_path}")
    Learner.fit(config.n_epochs)