'seed': 0, 'steps_per_epoch': 500, 'target_pdg_iscc_list': [(12, 1), (14, 1), (16, 1), (0, 0)], 'test_size': 200000, # Args 'outdir': 'prod4/02_model_selection/02_layers_post', }) search_space = [] for layers_post in range(10): search_space.append({ 'model': { 'kwargs': { 'layers_post': [ 128, ] * layers_post, } }, }) parse_concurrency_cmdargs(config) setup_logging(logging.DEBUG, os.path.join(ROOT_OUTDIR, config['outdir'], "train.log")) speval(lambda x: create_and_train_model(**config, extra_kwargs=x), search_space, os.path.join(ROOT_OUTDIR, config['outdir'], "trials.db"), timeout=24 * 60 * 60)
}, 'regularizer': { 'name': 'l1', 'kwargs': { 'l': 0.0001 }, }, 'schedule': { 'name': 'standard', 'kwargs': { 'monitor': 'val_loss', 'factor': 0.5, 'patience': 5, 'cooldown': 0 }, }, 'seed': 0, 'steps_per_epoch': 500, 'target_pdg_iscc_list': [(12, 1), (14, 1)], 'test_size': 200000, # Args 'outdir': 'prod4/01_initial_studies/02_adding_numu', }) parse_concurrency_cmdargs(config) setup_logging(logging.DEBUG, os.path.join(ROOT_OUTDIR, config['outdir'], "train.log")) create_and_train_model(**config)