'kwargs': { 'lstm3d_spec': [ (w, 'forward'), ] * N, } } } for N in range(2, 6)] search_space += [{ 'model': { 'kwargs': { 'lstm3d_spec': [ (w, 'backward'), ] * N, } } } for N in range(1, 6)] search_space += [{ 'model': { 'kwargs': { 'lstm3d_spec': [ (w, 'bidirectional'), ] * N, } } } for N in range(1, 6)] speval(lambda x: create_and_train_model(**config, extra_kwargs=x), search_space, os.path.join(ROOT_OUTDIR, config['outdir'], "trials.db"), timeout=10 * 60 * 60)
'kwargs': { 'l': 0.001 }, }, 'schedule': { 'name': 'standard', 'kwargs': { 'monitor': 'val_loss', 'factor': 0.5, 'patience': 5, 'cooldown': 0 }, }, 'seed': 0, 'steps_per_epoch': 250, 'test_size': 0.2, 'weights': None, # Args: 'vars_mod_png2d': None, 'vars_mod_png3d': None, 'vars_mod_slice': None, 'outdir': 'dune/numu/01_rnne_v1/', }) parse_concurrency_cmdargs(config) logger = setup_logging( log_file=os.path.join(ROOT_OUTDIR, config['outdir'], "train.log")) create_and_train_model(**config)