help='Number of tails (1 or 2)') args, unknown = argparser.parse_known_args() assert args.metric in ['loss', 'loglik' ], 'Metric must be one of ["loss", "loglik"].' if args.pool: args.ablation = True ablations = None basenames_to_pool = None exps_outdirs = [] for path in args.config_paths: p = Config(path) models = filter_models(p.model_list, args.models) cdr_models = [ x for x in models if (x.startswith('CDR') or x.startswith('DTSR')) ] partitions = get_partition_list(args.partition) partition_str = '-'.join(partitions) if args.metric == 'loss': file_name = 'losses_mse_%s.txt' % partition_str else: file_name = 'loglik_%s.txt' % partition_str if args.twostep: file_name = 'LM_2STEP_' + file_name if args.ablation:
'Fit ablated models to data convolved using the ablated model. Otherwise fits ablated models to data convolved using the full model.' ) argparser.add_argument( '-f', '--force', action='store_true', help= 'Refit and overwrite any previously trained models. Otherwise, previously trained models are skipped.' ) args, unknown = argparser.parse_known_args() for path in args.config_paths: p = Config(path) models = filter_models(p.model_list, args.models, cdr_only=True) models = [ x for x in models if (x.startswith('CDR') or x.startswith('DTSR')) ] partitions = get_partition_list(args.partition) partition_str = '-'.join(partitions) for m in models: dir_path = p.outdir + '/' + m if args.ablated_models: data_path = dir_path + '/X_conv_' + partition_str + '.csv' else: data_path = p.outdir + '/' + m.split( '!')[0] + '/X_conv_' + partition_str + '.csv'