def test_render_bad_input_file(self): print '\n' print_message( '---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok') render_target = os.path.join(os.getcwd(), 'tests', 'DOES_NOT_EXIST') render_output = os.path.join(os.getcwd(), 'tests', 'render_output.txt') self.assertFalse(render({}, render_target, render_output))
def test_render_bad_outout_file(self): print '\n' print_message( '---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok') render_target = os.path.join(os.getcwd(), 'tests', 'test_render_target.txt') render_output = '/usr/local/NO_PERMISSIONS' self.assertFalse(render({}, render_target, render_output))
def test_render(self): print '\n' print_message( '---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok') render_target = os.path.join(os.getcwd(), 'tests', 'test_render_target.txt') render_reference = os.path.join(os.getcwd(), 'tests', 'test_render_reference.txt') render_output = os.path.join(os.getcwd(), 'tests', 'render_output.txt') reference = '' with open(render_reference, 'r') as fp: for line in fp.readlines(): reference += line vals = {'a': 'a', 'b': 'b', 'd': 'd', 'e': 'e'} self.assertTrue(render(vals, render_target, render_output)) with open(render_output, 'r') as fp: self.assertTrue(fp.readline() in reference)
def _submit_cmd_to_manager(self, config, cmd): """ Takes the jobs main cmd, generates a batch script and submits the script to the resource manager controller Parameters: cmd (str): the command to submit config (dict): the global configuration object Retuns: job_id (int): the job_id from the resource manager """ # setup for the run script scripts_path = os.path.join(config['global']['project_path'], 'output', 'scripts') if self._run_type is not None: run_name = '{type}_{run_type}_{start:04d}_{end:04d}_{case}'.format( type=self.job_type, run_type=self._run_type, start=self.start_year, end=self.end_year, case=self.short_name) elif isinstance(self, Diag): run_name = '{type}_{start:04d}_{end:04d}_{case}_vs_{comp}'.format( type=self.job_type, run_type=self._run_type, start=self.start_year, end=self.end_year, case=self.short_name, comp=self._short_comp_name) else: run_name = '{type}_{start:04d}_{end:04d}_{case}'.format( type=self.job_type, start=self.start_year, end=self.end_year, case=self.short_name) run_script = os.path.join(scripts_path, run_name) self._console_output_path = '{}.out'.format(run_script) if os.path.exists(run_script): os.remove(run_script) try: manager = Slurm() manager_prefix = '#SBATCH' self._manager_args['slurm'].append('-o {}'.format( self._console_output_path)) except: try: manager = PBS() manager_prefix = '#PBS' self._manager_args['pbs'].append('-o {}'.format( self._console_output_path)) self._manager_args['pbs'].append('-e {}'.format( self._console_output_path.replace('.out', '.err'))) except: raise Exception("No resource manager found") # generate the run script using the manager arguments and command command = ' '.join(cmd) script_prefix = '' if isinstance(manager, Slurm): margs = self._manager_args['slurm'] else: margs = self._manager_args['pbs'] for item in margs: script_prefix += '{prefix} {value}\n'.format(prefix=manager_prefix, value=item) with open(run_script, 'w') as batchfile: batchfile.write('#!/bin/bash\n') batchfile.write(script_prefix) template_input_path = os.path.join(config['global']['resource_path'], 'env_loader.bash') variables = { 'user_env_path': os.environ['CONDA_PREFIX'], 'cmd': command } render(variables=variables, input_path=template_input_path, output_path=run_script) # with open(run_script, 'w+') as batchfile: # batchfile.write(command) # if this is a dry run, set the status and exit if self._dryrun: msg = '{}: dryrun is set, completing without running'.format( self.msg_prefix()) logging.info(msg) self.status = JobStatus.COMPLETED return False else: if not self.prevalidate(): return False if self.postvalidate(config): self.status = JobStatus.COMPLETED return True # submit the run script to the resource controller self._job_id = manager.batch(run_script) self._has_been_executed = True return self._job_id
template_input_path = os.path.join( config['global']['resource_path'], 'amwg_template_vs_obs.csh') else: template_input_path = os.path.join( config['global']['resource_path'], 'amwg_template_vs_model.csh') variables['cntl_casename'] = self.comparison variables['cntl_short_name'] = self._short_comp_name variables['cntl_path_history'] = input_path + os.sep variables['cntl_path_climo'] = input_path + os.sep # get environment path to use as NCARG_ROOT variables['NCARG_ROOT'] = os.environ['CONDA_PREFIX'] render(variables=variables, input_path=template_input_path, output_path=csh_template_out) if not dryrun: self._dryrun = False if not self.prevalidate(): return False if self.postvalidate(config): self.status = JobStatus.COMPLETED return True else: self._dryrun = True return self._change_input_file_names() # create the run command and submit it
def execute(self, dryrun=False): """ Perform the actual work """ # First check if the job has already been completed if self.postvalidate(): self.status = JobStatus.COMPLETED message = 'AMWG job already computed, skipping' self.event_list.push(message=message) logging.info(message) return 0 # Create directory of regridded climos regrid_path = os.path.join( os.sep.join(self.config['test_path_diag'].split(os.sep)[:-2]), 'climo_regrid') file_list = get_climo_output_files(input_path=regrid_path, start_year=self.start_year, end_year=self.end_year) if not file_list or len(file_list) == 0: print """ ERROR: AMWG: {start:04d}-{end:04d} could not find input climatologies at {path}\n did you add ncclimo to this year_set?""".format(start=self.start_year, end=self.end_year, path=regrid_path) self.status = JobStatus.FAILED return 0 if not os.path.exists(self.config['test_path_climo']): print 'creating temp directory for amwg' os.makedirs(self.config['test_path_climo']) create_symlink_dir(src_dir=regrid_path, src_list=file_list, dst=self.config['test_path_climo']) # Rename the files to the format amwg expects for item in os.listdir(self.config['test_path_climo']): search = re.search(r'\_\d\d\d\d\d\d\_', item) if not search: continue index = search.start() os.rename( os.path.join(self.config['test_path_climo'], item), os.path.join(self.config['test_path_climo'], item[:index] + '_climo.nc')) # render the csh script into the output directory self.output_path = self.config['output_path'] template_out = os.path.join(self.output_path, 'amwg.csh') render(variables=self.config, input_path=self.config.get('template_path'), output_path=template_out) expected_name = '{type}_{start:04d}-{end:04d}'.format( start=self.config.get('start_year'), end=self.config.get('end_year'), type=self.type) # Copy the rendered run script into the scripts directory run_script_template_out = os.path.join( self.config.get('run_scripts_path'), expected_name) copyfile(src=template_out, dst=run_script_template_out) # setup sbatch script run_script = os.path.join(self.config.get('run_scripts_path'), expected_name) if os.path.exists(run_script): os.remove(run_script) self.slurm_args['output_file'] = '-o {output_file}'.format( output_file=run_script + '.out') cmd = '\ncsh {template}'.format(template=template_out) slurm_args_str = [ '#SBATCH {value}'.format(value=v) for k, v in self.slurm_args.items() ] slurm_prefix = '\n'.join(slurm_args_str) with open(run_script, 'w') as batchfile: batchfile.write('#!/bin/bash\n') batchfile.write(slurm_prefix) batchfile.write(cmd) if dryrun: self.status = JobStatus.COMPLETED return 0 slurm = Slurm() print 'submitting to queue {type}: {start:04d}-{end:04d}'.format( type=self.type, start=self.start_year, end=self.end_year) self.job_id = slurm.batch(run_script, '--oversubscribe') status = slurm.showjob(self.job_id) self.status = StatusMap[status.get('JobState')] message = '{type} id: {id} changed state to {state}'.format( type=self.type, id=self.job_id, state=self.status) logging.info(message) self.event_list.push(message=message) return self.job_id
def execute(self, dryrun=False): # Check if the output already exists if self.postvalidate(): self.status = JobStatus.COMPLETED message = 'ACME diags already computed, skipping' self.event_list.push(message=message) logging.info(message) return 0 # render the parameters file self.output_path = self.config['output_path'] template_out = os.path.join(self.output_path, 'params.py') variables = { 'sets': self.config['sets'], 'backend': self.config['backend'], 'reference_data_path': self.config['reference_data_path'], 'test_data_path': self.config['regrided_climo_path'], 'test_name': self.config['test_name'], 'seasons': self.config['seasons'], 'results_dir': self.config['results_dir'] } render(variables=variables, input_path=self.config.get('template_path'), output_path=template_out) run_name = '{type}_{start:04d}_{end:04d}'.format( start=self.config.get('start_year'), end=self.config.get('end_year'), type=self.type) template_copy = os.path.join(self.config.get('run_scripts_path'), run_name) copyfile(src=template_out, dst=template_copy) # Create directory of regridded climos file_list = get_climo_output_files( input_path=self.config['regrid_base_path'], start_year=self.start_year, end_year=self.end_year) create_symlink_dir(src_dir=self.config['regrid_base_path'], src_list=file_list, dst=self.config['regrided_climo_path']) # setup sbatch script run_script = os.path.join(self.config.get('run_scripts_path'), run_name) if os.path.exists(run_script): os.remove(run_script) self.slurm_args['output_file'] = '-o {output_file}'.format( output_file=run_script + '.out') cmd = 'acme_diags_driver.py -p {template}'.format( template=template_out) slurm_args_str = [ '#SBATCH {value}\n'.format(value=v) for k, v in self.slurm_args.items() ] slurm_prefix = ''.join(slurm_args_str) with open(run_script, 'w') as batchfile: batchfile.write('#!/bin/bash\n') batchfile.write(slurm_prefix) batchfile.write(cmd) slurm = Slurm() print 'submitting to queue {type}: {start:04d}-{end:04d}'.format( type=self.type, start=self.start_year, end=self.end_year) self.job_id = slurm.batch(run_script, '--oversubscribe') status = slurm.showjob(self.job_id) self.status = StatusMap[status.get('JobState')] message = '{type} id: {id} changed state to {state}'.format( type=self.type, id=self.job_id, state=self.status) logging.info(message) self.event_list.push(message=message) return self.job_id
if self.comparison == 'obs': template_input_path = os.path.join( config['global']['resource_path'], 'e3sm_diags_template_vs_obs.py') variables['reference_data_path'] = config['diags']['e3sm_diags']['reference_data_path'] else: template_input_path = os.path.join( config['global']['resource_path'], 'e3sm_diags_template_vs_model.py') input_path, _ = os.path.split(self._input_file_paths[0]) variables['reference_data_path'] = input_path variables['ref_name'] = self.comparison variables['reference_name'] = config['simulations'][self.comparison]['short_name'] render( variables=variables, input_path=template_input_path, output_path=param_template_out) if not dryrun: self._dryrun = False if not self.prevalidate(): return False if self.postvalidate(config): self.status = JobStatus.COMPLETED return True else: self._dryrun = True return # create the run command and submit it variables = {'parameter_file_path': param_template_out}
def execute(self, dryrun=False): """ Perform the actual work """ # First check if the job has already been completed if self.postvalidate(): self.status = JobStatus.COMPLETED message = 'Coupled_diag job already computed, skipping' self.event_list.push(message=message) return 0 # create symlinks to the input data setup_status = self.setup_input_directory() if not setup_status: return -1 elif setup_status == 2: return False set_string = '{start:04d}_{end:04d}'.format( start=self.config.get('start_year'), end=self.config.get('end_year')) # Setup output directory if not os.path.exists(self.config['output_path']): os.makedirs(self.config['output_path']) # render run template template_out = os.path.join(self.output_path, 'run_aprime.bash') variables = { 'output_base_dir': self.output_path, 'test_casename': self.config['experiment'], 'test_archive_dir': self.config['input_path'], 'test_atm_res': self.config['test_atm_res'], 'test_mpas_mesh_name': self.config['test_mpas_mesh_name'], 'begin_yr': self.start_year, 'end_yr': self.end_year } render(variables=variables, input_path=self.config['template_path'], output_path=template_out) # copy the tempalte into the run_scripts directory run_name = '{type}_{start:04d}_{end:04d}'.format(start=self.start_year, end=self.end_year, type=self.type) template_copy = os.path.join(self.config.get('run_scripts_path'), run_name) copyfile(src=template_out, dst=template_copy) # create the slurm run script cmd = 'sh {run_aprime}'.format(run_aprime=template_out) run_script = os.path.join(self.config.get('run_scripts_path'), run_name) if os.path.exists(run_script): os.remove(run_script) self.slurm_args['out_file'] = '-o {out}'.format(out=run_script + '.out') self.slurm_args['working_dir'] = '--workdir {dir}'.format( dir=self.config.get('aprime_code_path')) slurm_args = [ '#SBATCH {}'.format(self.slurm_args[s]) for s in self.slurm_args ] slurm_prefix = '\n'.join(slurm_args) + '\n' with open(run_script, 'w') as batchfile: batchfile.write('#!/bin/bash\n') batchfile.write(slurm_prefix) batchfile.write('export OMP_NUM_THREADS=2\n') batchfile.write(cmd) slurm = Slurm() print 'submitting to queue {type}: {start:04d}-{end:04d}'.format( type=self.type, start=self.start_year, end=self.end_year) self.job_id = slurm.batch(run_script) status = slurm.showjob(self.job_id) self.status = StatusMap[status.get('JobState')] message = "## {job} id: {id} changed status to {status}".format( job=self.type, id=self.job_id, status=self.status) logging.info(message) return self.job_id