def netHO(trial): pp = { 'LATENT' : trial.suggest_categorical('LATENT', [40,70,100]), 'nC' : trial.suggest_int('nC', 1,3), 'nD' : trial.suggest_int('nD', 1,3), 'gruf' : trial.suggest_categorical('gruf', [64,128,256,501]), 'beta' : trial.suggest_categorical('beta', [0.6,1.0,2.0]), 'EPOCH' : trial.suggest_categorical('EPOCH', [30]), 'ngpu' : trial.suggest_categorical('ngpu', [4]), 'opt' : trial.suggest_categorical('opt', ['adam']) } fn = f'{pfix}{trial.number}' sgv = smilesGVAE(**pp) sgv.doFitG(genr,vgenr,fn) sc = sgv.evaluate(XTE) print(f'Score: {sc}') sc = 1.0 - sgv.jaccScore(XTE) #print(f'JaccLoss: {sc}') return sc
'nD': 3, 'beta': 1.0, 'gruf': 501, 'ngpu': gpus, 'opt': 'SGD', 'wFile': None } EPO = 2 lr_finder = LRFinder(min_lr=2.0e-4, max_lr=1.0e-2, steps_per_epoch=np.ceil(k / batch), epochs=EPO) sgv = smilesGVAE(**params) print('Training autoencoder.') if sgv.mgm is None: sgv.aen.fit_generator(generator=genr, validation_data=vgenr, use_multiprocessing=False, callbacks=[lr_finder], epochs=EPO) else: sgv.mgm.fit_generator(generator=genr, validation_data=vgenr, use_multiprocessing=False, callbacks=[lr_finder], epochs=EPO)
study.optimize(netHO,n_trials=1) df = study.trials_dataframe() df2 = df['params'].copy() df2['bxe'] = df['value'] df2.to_csv(f'data/{pfix}.csv') bp = study.best_params bt = study.best_trial.number bcn = smilesGVAE(**bp) fn = f'data/{pfix}{bt}.hdf5' bcn.loadWeights(fn) xp = bcn.aen.predict(XTE) bcn.evaluate(XTE) logdir = f'logs/{pfix}{bt}/' fn = getFileList(logdir) fn = fn[0]