Beispiel #1
0
def train_detector(model, datasets, cfg, logger=None):
    if logger is None:
        logger = log.get_root_logger(cfg.log_level)

    data_loaders = [
        build_dataloader(dataset,
                         cfg.data.imgs_per_gpu,
                         cfg.data.workers_per_gpu,
                         dist=True) for dataset in datasets
    ]

    # put model on gpus
    model = DistributedDataParallel(model.cuda())
    # build runner
    runner = Runner(model, batch_processor, cfg.optimizer, cfg.work_dir,
                    cfg.log_level)
    # register hooks
    runner.register_training_hooks(cfg.lr_config, cfg.optimizer_config,
                                   cfg.checkpoint_config, cfg.log_config)

    if cfg.resume_from:
        runner.resume(cfg.resume_from)
    elif cfg.load_from:
        runner.load_checkpoint(cfg.load_from)

    runner.run(data_loaders, cfg.workflow, cfg.total_epochs)
Beispiel #2
0
	def test_null_runner(self):

		# if you don't define a lot, an exception is raised
		runner = Runner()
		with self.assertRaises(RunnerException):
			runner.run()

		# A null runner can be run, once you specify the lot
		class MyRunner(Runner):
			lot = 'my_lot'

		runner = MyRunner()
		runner.run()
Beispiel #3
0
        #total_params = sum(p.numel() for p in model.parameters())
        #print(total_params)
        cbfs += [AvgStatsCallback(metrics=[])]
    else:
        loss_func = AdjointDiceLoss(0)
        cbfs += [
            lossScheduler(),
            AvgStatsCallback(metrics=[accuracy_large, accuracy_small])
        ]
        model = UNetAdjoint(3, 2, compression_factor=1)
    end = time.time()
    print("Loaded model", end - start)

    if is_sgd:
        opt = optim.SGD(model.parameters(), lr)
    else:
        opt = StatefulOptimizer(
            model.parameters(), [weight_decay, adam_step],
            stats=[AverageGrad(), AverageSqGrad(),
                   StepCount()],
            lr=0.001,
            wd=1e-2,
            beta1=0.9,
            beta2=0.99,
            eps=1e-6)

    learn = Learn(model, opt, loss_func, data)

    run = Runner(learn, cbs=cbfs)
    run.fit(epoch)
Beispiel #4
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--config_name', type=str, required=True,
                        help='Configuration name in experiments.conf')
    parser.add_argument('--model_identifier', type=str, required=True,
                        help='Model identifier to load')
    parser.add_argument('--gpu_id', type=int, default=None,
                        help='GPU id; CPU by default')
    parser.add_argument('--seg_len', type=int, default=512)
    parser.add_argument('--jsonlines_path', type=str, default=None,
                        help='Path to custom input from file; input from console by default')
    parser.add_argument('--output_path', type=str, default=None,
                        help='Path to save output')
    args = parser.parse_args()

    runner = Runner(args.config_name, args.gpu_id)
    model = runner.initialize_model(args.model_identifier)
    data_processor = CorefDataProcessor(runner.config)

    if args.jsonlines_path:
        # Input from file
        with open(args.jsonlines_path, 'r') as f:
            lines = f.readlines()
        docs = [json.loads(line) for line in lines]
        tensor_examples, stored_info = data_processor.get_tensor_examples_from_custom_input(docs)
        predicted_clusters, _, _ = runner.predict(model, tensor_examples)

        if args.output_path:
            with open(args.output_path, 'w') as f:
                for i, doc in enumerate(docs):
                    doc['predicted_clusters'] = predicted_clusters[i]
def test_run_in_ci():
    r = Runner("docker-compose")
    r._run_in_ci()
Beispiel #6
0
}

user_cf_param = {"knn": 140, "shrink": 0}

item_cf_param = {"knn": 310, "shrink": 0}

cbf_param = {
    "album_knn": 45,
    "album_shrink": 8,
    "artist_knn": 25,
    "artist_shrink": 0,
    "album_weight": 0.85
}

slim_param = {"epochs": 40, "topK": 200}

svd_param = {"n_factors": 2000, "knn": 100}

ALS_param = {"n_factors": 300, "reg": 0.15, "iterations": 30}

recommender = HybridRecommender(weights_seq=weights_seq,
                                weights_long=weights_long,
                                weights_short=weights_short,
                                user_cf_param=user_cf_param,
                                item_cf_param=item_cf_param,
                                cbf_param=cbf_param,
                                slim_param=slim_param,
                                svd_param=svd_param,
                                ALS_param=ALS_param)
Runner.run(is_test=True, recommender=recommender, split_type=None)
from run import Runner

Runner().extract_data_dubizzle()
from run import Runner

Runner().update_db()