def _parse_args(): parser = argparse.ArgumentParser() parser.add_argument('model') parser.add_argument('--model_path') parser.add_argument('-o', '--output-dir', required=True) parser.add_argument('-p', '--protein', default='5ht1b') parser.add_argument('-d', '--debug', action='store_true') parser.add_argument('-n', '--n-molecules', type=int, default=250) parser.add_argument('-m', '--mode', default='minimize') parser.add_argument('-r', '--random-samples', type=int, default=100) parser.add_argument('-f', '--fine-tune-epochs', type=int, default=5) parser.add_argument('--dataset', default='default') parser.add_argument('--n-cpu', default=4, type=int) args = parser.parse_args() setup_and_get_logger(args.debug) scripting.set_keras_cores(args.n_cpu) disable_rdkit_logging() if args.model not in ALL_MODELS: logger.error(f'No model named {args.model}') raise ValueError(f'No model named {args.model}') if args.model_path is None: if 'pretrained' not in ALL_MODELS[args.model]: raise ValueError(f'No pretrained {args.model} model delivered. ' 'Provide the path to pretrained model.') args.model_path = ALL_MODELS[args.model]['pretrained'] args.output_dir = os.path.realpath(args.output_dir) if args.debug: args.n_molecules = 2 args.fine_tune_epochs = 0 args.random_samples = 2 logger.debug( 'Arguments updated due to debug mode on, current args: %s', str(args)) return args
'mse', # loss function to use with property prediction error for regression tasks 'logit_prop_pred_loss': 'binary_crossentropy', # loss function to use with property prediction for logistic tasks 'prop_pred_loss_weight': 0.5, 'prop_pred_dropout': 0.0, 'prop_batchnorm': True, # print output parameters "verbose_print": 0, 'MAX_LEN': 200, 'RAND_SEED': 0, 'PADDING': 'right' } logger = setup_and_get_logger(name=__name__) def _log_loaded_params(params): logger.info('CVAE overwritten hyper-parameters:') for key, value in params.items(): logger.info('{:25s} - {:12}'.format(key, str(value))) def load_params(param_file=None, verbose=True): if param_file is None: return deepcopy(DEFAULT_PARAMETERS) loaded_parameters = json.loads(open(param_file).read(), object_pairs_hook=OrderedDict)
import argparse import os from datetime import datetime from docking_baselines.datasets.loaders import Dataset from docking_baselines.models.models import ALL_MODELS from docking_baselines.utils.scripting import set_keras_cores from docking_benchmark.data.directories import PRETRAINED_MODELS from docking_benchmark.utils.logging import setup_and_get_logger logger = setup_and_get_logger(True, __name__) def _parse_args(): parser = argparse.ArgumentParser() parser.add_argument('dataset') parser.add_argument('-b', '--batch-size', type=int, default=64) parser.add_argument('-d', '--debug', action='store_true') parser.add_argument('-e', '--epochs', type=int, default=50) parser.add_argument('-m', '--mode', default='generator') parser.add_argument('--n-cpu', type=int, default=4) parser.add_argument('-s', '--save-path') arguments = parser.parse_args() set_keras_cores(arguments.n_cpu) if arguments.save_path is None: arguments.save_path = os.path.join( PRETRAINED_MODELS, 'gvae-' + arguments.dataset + '-' + datetime.today().strftime('%Y-%m-%d-%H:%M:%S') )