def initialize(args): sys.setrecursionlimit(10000) lg = rdkit.RDLogger.logger() lg.setLevel(rdkit.RDLogger.CRITICAL) torch.manual_seed(args.seed) torch.cuda.set_device(args.device) arg_info = '_%s_LR_%f_HS_%d_RS_%d_AR_%.2f_AI_%d_%s' % ( args.task_tag, args.lr, args.hidden_size, args.rand_size, args.anneal_rate, args.anneal_interval, 'share_embedding' if args.share_embedding else 'not_share_embedding') LOG.init(file_name=current_time + '_' + arg_info) logger = logging.getLogger('logger') logger.info(args) # create the tensorboard log saved folder if not os.path.isdir(args.tensorboard_save_dir): os.makedirs(args.tensorboard_save_dir) # set the tensorboard writer train_tb_log_dir = os.path.join(args.tensorboard_save_dir, current_time + '_' + arg_info + '_train') tb_suffix = '_' + arg_info train_writer = SummaryWriter(log_dir=train_tb_log_dir, filename_suffix=tb_suffix) # create the model saved folder if args.model_save_dir is not None: # args.model_save_dir = os.path.join(args.model_save_dir, current_time + '_' + arg_info) args.model_save_dir = os.path.join(args.model_save_dir, f'{args.task_tag}') if not os.path.isdir(args.model_save_dir): os.makedirs(args.model_save_dir) # save the model config with open(os.path.join(args.model_save_dir, 'model_config.json'), 'w') as f: json.dump( { 'vocab': args.vocab, 'hidden_size': args.hidden_size, 'rand_size': args.rand_size, 'share_embedding': args.share_embedding, 'use_molatt': args.use_molatt, 'depthT': args.depthT, 'depthG': args.depthG }, f) return logger, train_writer
def initialize(args): current_time = '{:%Y-%m-%d-%H-%M-%S}'.format(datetime.now()) sys.setrecursionlimit(10000) lg = rdkit.RDLogger.logger() lg.setLevel(rdkit.RDLogger.CRITICAL) arg_info = '%s_%s_HS_%d_RS_%d' % ( args.task_tag, args.metric_type, args.hidden_size, args.rand_size ) LOG.init(file_name=current_time + '_Evaluation' + '_' + arg_info) logger = logging.getLogger('logger') logger.info(args) torch.cuda.set_device(args.device) torch.manual_seed(args.seed) return logger