def runner(models, learn_options, GP_likelihoods=None, orders=None, WD_kernel_degrees=None, where='local', cluster_user='******', cluster='RR1-N13-09-H44', test=False, exp_name = None, **kwargs): if where == 'local': results, all_learn_options = run_models(models, orders=orders, GP_likelihoods=GP_likelihoods, learn_options_set=learn_options, WD_kernel_degrees=WD_kernel_degrees, test=test, **kwargs) all_metrics, gene_names = azimuth.util.get_all_metrics(results, learn_options) azimuth.util.plot_all_metrics(all_metrics, gene_names, all_learn_options, save=True) # for non-local (i.e. cluster), the comparable code is in cli_run_model.py pickle_runner_results(exp_name, results, all_learn_options) return results, all_learn_options, all_metrics, gene_names elif where == 'cluster': import cluster_job # create random cluster directory, dump learn options, and create cluster file tempdir, user, clust_filename = cluster_job.create(cluster_user, models, orders, WD_kernel_degrees, GP_likelihoods, exp_name=exp_name, learn_options=learn_options, **kwargs) # raw_input("Submit job to HPC and press any key when it's finished: ") # util.plot_cluster_results(directory=tempdir) #stdout = tempdir + r"/stdout" #stderr = tempdir + r"/stderr" #if not os.path.exists(stdout): os.makedirs(stdout) #if not os.path.exists(stderr): os.makedirs(stderr) return tempdir, clust_filename, user#, stdout, stderr
def runner(models, learn_options, GP_likelihoods=None, orders=None, WD_kernel_degrees=None, where='local', cluster_user='******', cluster='RR1-N13-09-H44', test=False, exp_name = None, **kwargs): if where == 'local': results, all_learn_options = run_models(models, orders=orders, GP_likelihoods=GP_likelihoods, learn_options_set=learn_options, WD_kernel_degrees=WD_kernel_degrees, test=test, **kwargs) all_metrics, gene_names = util.get_all_metrics(results, learn_options) util.plot_all_metrics(all_metrics, gene_names, all_learn_options, save=True) # for non-local (i.e. cluster), the comparable code is in cli_run_model.py abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) + "/../" + "results" if not os.path.exists(dname): os.makedirs(dname) print "Created directory: %s" % str(dname) if exp_name is None: exp_name = results.keys()[0] myfile = dname+'/'+ exp_name + '.pickle' with open(myfile, 'wb') as f: print "writing results to %s" % myfile pickle.dump((results, all_learn_options), f, -1) return results, all_learn_options, all_metrics, gene_names elif where == 'cluster': import cluster_job # create random cluster directory, dump learn options, and create cluster file tempdir, user, clust_filename = cluster_job.create(cluster_user, models, orders, WD_kernel_degrees, GP_likelihoods, exp_name=exp_name, learn_options=learn_options, **kwargs) # raw_input("Submit job to HPC and press any key when it's finished: ") # util.plot_cluster_results(directory=tempdir) #stdout = tempdir + r"/stdout" #stderr = tempdir + r"/stderr" #if not os.path.exists(stdout): os.makedirs(stdout) #if not os.path.exists(stderr): os.makedirs(stderr) return tempdir, clust_filename, user#, stdout, stderr