Ejemplo n.º 1
0
 def benchmarking(self, optd):
     if optd['submit_cluster']:
         # Pickle dictionary so it can be opened by the job to get the parameters
         ample_util.save_amoptd(optd)
         script = benchmark_util.cluster_script(optd)
         workers_util.run_scripts(
             job_scripts=[script],
             monitor=monitor,
             nproc=optd['nproc'],
             job_time=43200,
             job_name='benchmark',
             submit_cluster=optd['submit_cluster'],
             submit_qtype=optd['submit_qtype'],
             submit_queue=optd['submit_queue'],
             submit_pe_lsf=optd['submit_pe_lsf'],
             submit_pe_sge=optd['submit_pe_sge'],
             submit_array=optd['submit_array'],
             submit_max_array=optd['submit_max_array'],
         )
         # queue finished so unpickle results
         optd.update(ample_util.read_amoptd(optd['results_path']))
     else:
         benchmark_util.analyse(optd)
         ample_util.save_amoptd(optd)
     return
Ejemplo n.º 2
0
    def benchmarking(self, optd):
        if optd['submit_qtype'] != 'local':
            # Pickle dictionary so it can be opened by the job to get the parameters
            ample_util.save_amoptd(optd)
            script = benchmark_util.cluster_script(optd)
            with TaskFactory(
                    optd['submit_qtype'],
                    script,
                    cwd=optd['work_dir'],
                    environment=optd['submit_pe'],
                    run_time=43200,
                    name='benchmark',
                    nprocesses=optd['nproc'],
                    max_array_size=optd['submit_max_array'],
                    queue=optd['submit_queue'],
                    shell="/bin/bash",
            ) as task:
                task.run()
                task.wait(interval=5, monitor_f=monitor)

            # queue finished so unpickle results
            optd.update(ample_util.read_amoptd(optd['results_path']))
        else:
            benchmark_util.analyse(optd)
            ample_util.save_amoptd(optd)
        return
Ejemplo n.º 3
0
 def test_benchmark(self):
     pklfile="/home/jmht/ample-dev1/examples/toxd-example/ROSETTA_MR_0/resultsd.pkl"
     with open(pklfile) as f: d=pickle.load(f)
     bd="/home/jmht/ample-dev1/python/foo"
     if not os.path.isdir(bd): os.mkdir(bd)
     d['benchmark_dir']=bd
     benchmark_util.analyse(d)
Ejemplo n.º 4
0
 def benchmarking(self, optd):
     if optd['submit_cluster']:
         # Pickle dictionary so it can be opened by the job to get the parameters
         ample_util.save_amoptd(optd)
         script = benchmark_util.cluster_script(optd)
         workers_util.run_scripts(
             job_scripts=[script],
             monitor=monitor,
             nproc=optd['nproc'],
             job_time=43200,
             job_name='benchmark',
             submit_cluster=optd['submit_cluster'],
             submit_qtype=optd['submit_qtype'],
             submit_queue=optd['submit_queue'],
             submit_pe_lsf=optd['submit_pe_lsf'],
             submit_pe_sge=optd['submit_pe_sge'],
             submit_array=optd['submit_array'],
             submit_max_array=optd['submit_max_array'])
         # queue finished so unpickle results
         optd.update(ample_util.read_amoptd(optd['results_path']))
     else:
         benchmark_util.analyse(optd)
         ample_util.save_amoptd(optd)
     return