def init(config, _run): args = SimpleNamespace(**config) assertions.validate_hypers(args) mlh.seed_all(args.seed) args.data_path = assertions.validate_dataset_path(args) if args.activation is not None: if 'relu' in args.activation: args.activation = torch.nn.ReLU() elif 'elu' in args.activation: args.activation = torch.nn.ELU() else: args.activation = torch.nn.ReLU() args._run = _run Path(args.artifact_dir).mkdir(exist_ok=True) args.loss_name = args.loss if args.cuda and torch.cuda.is_available(): args.device = torch.device('cuda') args.cuda = True else: args.device = torch.device('cpu') args.cuda = False args.partition_scheduler = updates.get_partition_scheduler(args) args.partition = util.get_partition(args) args.data_path = Path(args.data_path) return args
def init(config): # This gives dot access to all paths, hyperparameters, etc args = SimpleNamespace(**config) assertions.validate_hypers(args) args = mlh.detect_cuda(args) mlh.seed_all(args.seed) # get data args.data = data_handler.get_dataset(args) # get model args.model = model_handler.get_model(args) return args
def init(config, _run): args = SimpleNamespace(**config) assertions.validate_hypers(args) mlh.seed_all(args.seed) args.data_path = assertions.validate_dataset_path(args) if args.activation is not None: if 'relu' in args.activation: args.activation = torch.nn.ReLU() elif 'elu' in args.activation: args.activation = torch.nn.ELU() else: args.activation = torch.nn.ReLU() args._run = _run args.model_dir = args.artifact_dir if args.checkpoint or args.record: unique_directory = Path(args.model_dir) / str(uuid.uuid4()) unique_directory.mkdir(parents=True) args.unique_directory = unique_directory # Save args json for grepability with open(args.unique_directory / 'args.json', 'w') as outfile: json.dump(dict(config), outfile, indent=4) args.loss_name = args.loss if args.cuda and torch.cuda.is_available(): args.device = torch.device('cuda') args.cuda = True else: args.device = torch.device('cpu') args.cuda = False args.partition_scheduler = updates.get_partition_scheduler(args) args.partition = util.get_partition(args) args.per_batch = False if (args.per_batch and args.per_sample) else args.per_batch args.data_path = Path(args.data_path) return args