def run_task(self, fw_spec): assert isinstance(self["split"], basestring), self["split"] assert isinstance(fw_spec[self["split"]], list) if isinstance(self["task"]["inputs"], list): assert self["split"] in self["task"]["inputs"] else: assert self["split"] == self["task"]["inputs"] split_field = fw_spec[self["split"]] lensplit = len(split_field) assert lensplit != 0, ("input to split is empty:", self["split"]) nchunks = self.get("number of chunks") if not nchunks: nchunks = lensplit chunklen = lensplit // nchunks if lensplit % nchunks > 0: chunklen = chunklen + 1 chunks = [ split_field[i:i + chunklen] for i in range(0, lensplit, chunklen) ] fireworks = [] for index, chunk in enumerate(chunks): spec = fw_spec.copy() spec[self["split"]] = chunk task = load_object(self["task"]) task["chunk_number"] = index name = f"{self._fw_name} {index}" fireworks.append(Firework(task, spec=spec, name=name)) return FWAction(detours=fireworks)
def run_task(self, fw_spec): assert isinstance(self['split'], basestring), self['split'] assert isinstance(fw_spec[self['split']], list) if isinstance(self['task']['inputs'], list): assert self['split'] in self['task']['inputs'] else: assert self['split'] == self['task']['inputs'] split_field = fw_spec[self['split']] lensplit = len(split_field) assert lensplit != 0, ('input to split is empty:', self['split']) nchunks = self.get('number of chunks') if not nchunks: nchunks = lensplit chunklen = lensplit // nchunks if lensplit % nchunks > 0: chunklen = chunklen + 1 chunks = [ split_field[i:i + chunklen] for i in range(0, lensplit, chunklen) ] fireworks = [] for index, chunk in enumerate(chunks): spec = fw_spec.copy() spec[self['split']] = chunk task = load_object(self['task']) task['chunk_number'] = index name = self._fw_name + ' ' + str(index) fireworks.append(Firework(task, spec=spec, name=name)) return FWAction(detours=fireworks)
def run_task(self, fw_spec): assert isinstance(self['split'], basestring), self['split'] assert isinstance(fw_spec[self['split']], list) if isinstance(self['task']['inputs'], list): assert self['split'] in self['task']['inputs'] else: assert self['split'] == self['task']['inputs'] split_field = fw_spec[self['split']] lensplit = len(split_field) assert lensplit != 0, ('input to split is empty:', self['split']) nchunks = self.get('number of chunks') if not nchunks: nchunks = lensplit chunklen = lensplit // nchunks if lensplit % nchunks > 0: chunklen = chunklen + 1 chunks = [split_field[i:i+chunklen] for i in range(0, lensplit, chunklen)] fireworks = [] for index, chunk in enumerate(chunks): spec = fw_spec.copy() spec[self['split']] = chunk task = load_object(self['task']) task['chunk_number'] = index name = self._fw_name + ' ' + str(index) fireworks.append(Firework(task, spec=spec, name=name)) return FWAction(detours=fireworks)
def exactTasks(self, points): ''' Build a workflow to excute an exactTask for each point ''' # De-serialise the exact task from dict et = load_object(self.exactTask) tl = [ Firework(ParameterFitting(surrogateModelId=self._id)) ] dep = {} e = six.next(six.itervalues(points)) for i in xrange(len(e)): p = {} for k in points: p[k] = points[k][i] for m in self.substituteModels: p.update(m.callModel(p)) t = et t['point'] = p fw = Firework(t) tl.append(fw) dep[fw] = tl[0] return (tl, dep, tl[0])
def test_modifyincar(self): # create an INCAR incar = self.ref_incar incar.write_file(os.path.join(module_dir, "INCAR")) # modify and test ft = ModifyIncar({ "incar_update": { "ISMEAR": 1000 }, "incar_multiply": { "ENCUT": 1.5 }, "incar_dictmod": { "_inc": { "ISPIN": -1 } } }) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) incar_mod = Incar.from_file("INCAR") self.assertEqual(incar_mod['ISMEAR'], 1000) self.assertEqual(incar_mod['ENCUT'], 780) self.assertEqual(incar_mod['ISPIN'], 1)
def test_ioset_explicit(self): ft = WriteVaspFromIOSet( dict(structure=self.struct_si, vasp_input_set=MPRelaxSet(self.struct_si, force_gamma=True)) ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files()
def test_serialization(self): p = CommonAdapter( q_type="PBS", q_name="hello", template_file=os.path.join(os.path.dirname(__file__), "mypbs.txt"), hello="world", queue="random", ) p_new = load_object(p.to_dict()) # Make sure the original and deserialized version both work properly. for a in [p, p_new]: script = a.get_script_str("here") lines = script.split("\n") self.assertIn("# world", lines) self.assertIn("#PBS -q random", lines) p = CommonAdapter(q_type="PBS", q_name="hello", hello="world", queue="random") # this uses the default template, which does not have $${hello} self.assertNotEqual("# world", p.get_script_str("here").split("\n")[-1]) self.assertNotIn("_fw_template_file", p.to_dict())
def test_ioset_explicit(self): ft = WriteVaspFromIOSet( dict(structure=self.struct_si, vasp_input_set=MPRelaxSet(self.struct_si, force_gamma=True))) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files()
def test_pmgobjects(self): mpvis = MPRelaxSet(self.struct_si, force_gamma=True) ft = WriteVaspFromPMGObjects( {"incar": mpvis.incar, "poscar": mpvis.poscar, "kpoints": mpvis.kpoints, "potcar": mpvis.potcar} ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files()
def test_serialization_details(self): # This detects a weird bug found in early version of serializers pbs = CommonAdapter('PBS') self.assertTrue(isinstance(pbs, CommonAdapter)) self.assertTrue(isinstance(self.get_data(pbs.to_dict()), CommonAdapter)) self.assertTrue(isinstance(load_object(pbs.to_dict()), CommonAdapter)) self.assertTrue(isinstance(self.get_data(pbs.to_dict()), CommonAdapter)) # repeated test on purpose!
def initialisationFw(self): obj = load_object(self.initialisationStrategy) return Firework( Initialisation( surrogateModelId=self._id, points=obj.newPoints() ) )
def test_serialization_details(self): # This detects a weird bug found in early version of serializers pbs = CommonAdapter("PBS") self.assertTrue(isinstance(pbs, CommonAdapter)) self.assertTrue(isinstance(self.get_data(pbs.to_dict()), CommonAdapter)) self.assertTrue(isinstance(load_object(pbs.to_dict()), CommonAdapter)) self.assertTrue(isinstance(self.get_data(pbs.to_dict()), CommonAdapter)) # repeated test on purpose!
def test_potcar_spec(self): ft = WriteVaspFromIOSet( structure=self.struct_si, vasp_input_set="MPRelaxSet", potcar_spec=True, ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files(potcar_spec=True)
def test_pmgobjects(self): mpvis = MPRelaxSet(self.struct_si, force_gamma=True) ft = WriteVaspFromPMGObjects({"incar": mpvis.incar, "poscar": mpvis.poscar, "kpoints": mpvis.kpoints, "potcar": mpvis.potcar}) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files()
def run_task(self, fw_spec): assert isinstance(self['split'], (basestring, list)), self['split'] split_list = self['split'] if isinstance(split_list, basestring): split_list = [split_list] reflen = 0 for split in split_list: assert isinstance(fw_spec[split], list) split_field = fw_spec[split] lensplit = len(split_field) # update reflen on first iteration if reflen == 0: assert lensplit != 0, ('input to split is empty:', split) reflen = lensplit nchunks = self.get('number of chunks') if not nchunks: nchunks = lensplit chunklen = lensplit // nchunks if lensplit % nchunks > 0: chunklen = chunklen + 1 chunks = [{ split: split_field[i:i + chunklen] } for i in range(0, lensplit, chunklen)] else: assert lensplit == reflen, ('input lists not of equal length:', split) for i in range(0, lensplit, chunklen): chunks[i // chunklen].update( {split: split_field[i:i + chunklen]}) fireworks = [] chunk_index_spec = self.get('chunk index spec') # allow for multiple tasks task_list = self['task'] if not isinstance(task_list, list): task_list = [task_list] for index, chunk in enumerate(chunks): spec = fw_spec.copy() for split in split_list: spec[split] = chunk[split] tasks = [] for task_entry in task_list: task = load_object(task_entry) task['chunk_number'] = index tasks.append(task) if chunk_index_spec and isinstance(chunk_index_spec, basestring): spec[chunk_index_spec] = index name = self._fw_name + ' ' + str(index) fireworks.append(Firework(tasks, spec=spec, name=name)) return FWAction(detours=fireworks)
def from_dict(cls, m_dict): """ Note: The QueueAdapter is loaded based on its fw_name alone See the docs for to_dict() for more details """ qa_dict = {'_fw_name': m_dict['qa_name']} # load the QueueAdapter object dynamically qa = load_object(qa_dict) return QueueParams(qa, m_dict['params'], m_dict['logging_dir'])
def test_implicit_serialization(self): self.assertEqual( load_object({ "a": { "p1": { "p2": 3 } }, "_fw_name": "TestSerializer Export Name" }), self.obj_4, 'Implicit import fails!')
def test_ioset_params(self): ft = WriteVaspFromIOSet( dict(structure=self.struct_si, vasp_input_set="MPRelaxSet", vasp_input_params={"user_incar_settings": {"ISMEAR": 1000}})) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) incar = Incar.from_file("INCAR") self.assertEqual(incar["ISMEAR"], 1000) # make sure override works incar['ISMEAR'] = -5 # switch back to default incar.write_file("INCAR") self._verify_files(skip_kpoints=True)
def run_task(self, fw_spec): self.run_function(fw_spec) self.increment_counter(fw_spec) if not self.eval_condition(fw_spec): firework = Firework( tasks=[load_object(task) for task in fw_spec['_tasks']], spec=fw_spec, name=self._fw_name) return FWAction(detours=firework, exit=False) else: return FWAction(exit=True)
def test_modify_kpoints(self): # create an KPOINTS kpoints = self.ref_kpoints kpoints.write_file("KPOINTS") # modify and test ft = ModifyKpoints(kpoints_update={"kpts": [[3, 4, 5]]}, ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) kpoints_mod = Kpoints.from_file("KPOINTS") self.assertEqual(kpoints_mod.kpts, [[3, 4, 5]])
def test_modify_potcar(self): Potcar(["Si"]).write_file("POTCAR") potcar = Potcar.from_file("POTCAR") self.assertFalse("alt" in potcar[0].header) # modify/test ft = ModifyPotcar(potcar_symbols={"Si": "O"}) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) new_potcar = Potcar.from_file("POTCAR") self.assertEqual(len(new_potcar), 1) self.assertEqual(new_potcar[0].symbol, 'O')
def test_ioset_explicit(self): for fn in ["POSCAR.gz", "POTCAR.gz", "INCAR.gz"]: shutil.copy2(os.path.join(self.vasp_dir, fn), ".") ft = WriteLobsterinputfromIO( poscar_path="POSCAR.gz", potcar_path="POTCAR.gz", incar_path="INCAR.gz", option="standard", ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self.assertEqual(Lobsterin.from_file("lobsterin"), self.ref_lobsterin)
def test_modify_potcar(self): Potcar(["Si"]).write_file("POTCAR") potcar = Potcar.from_file("POTCAR") self.assertFalse("alt" in potcar[0].header) # modify/test ft = ModifyPotcar(potcar_symbols={"Si": "O"}) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) new_potcar = Potcar.from_file("POTCAR") self.assertEqual(len(new_potcar), 1) self.assertEqual(new_potcar[0].symbol, "O")
def test_ioset_settings(self): for fn in ["POSCAR.gz", "POTCAR.gz", "INCAR.gz"]: shutil.copy2(os.path.join(self.vasp_dir, fn), ".") # user supplied lobsterin inputs ft = WriteLobsterinputfromIO( poscar_path="POSCAR.gz", potcar_path="POTCAR.gz", incar_path="INCAR.gz", option="standard", user_lobsterin_settings={"COHPEndEnergy": 10.0}, ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self.assertEqual(Lobsterin.from_file("lobsterin"), self.ref_lobsterin2)
def test_modify_incar(self): # create an INCAR incar = self.ref_incar incar.write_file("INCAR") # modify and test ft = ModifyIncar( {"incar_update": {"ISMEAR": 1000}, "incar_multiply": {"ENCUT": 1.5}, "incar_dictmod": {"_inc": {"ISPIN": -1}}}) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) incar_mod = Incar.from_file("INCAR") self.assertEqual(incar_mod['ISMEAR'], 1000) self.assertEqual(incar_mod['ENCUT'], 780) self.assertEqual(incar_mod['ISPIN'], 1)
def test_modify_incar(self): # create an INCAR incar = self.ref_incar incar.write_file("INCAR") # modify and test ft = ModifyIncar( incar_update={"ISMEAR": 1000}, incar_multiply={"ENCUT": 1.5}, incar_dictmod={"_inc": { "ISPIN": -1 }}, ) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) incar_mod = Incar.from_file("INCAR") self.assertEqual(incar_mod["ISMEAR"], 1000) self.assertEqual(incar_mod["ENCUT"], 780) self.assertEqual(incar_mod["ISPIN"], 1)
def _steal_launches(self, thief_fw): stolen = False if thief_fw.state == 'READY' and '_dupefinder' in thief_fw.spec: m_dupefinder = load_object(thief_fw.spec['_dupefinder']) # get the query that will limit the number of results to check as duplicates m_query = m_dupefinder.query(thief_fw.spec) m_query['launches'] = {'$ne': []} # iterate through all potential duplicates in the DB for potential_match in self.fireworks.find(m_query): spec1 = dict(thief_fw.to_dict()['spec']) # defensive copy spec2 = dict(potential_match['spec']) # defensive copy if m_dupefinder.verify(spec1, spec2): # verify the match # steal the launches victim_fw = self.get_fw_by_id(potential_match['fw_id']) thief_launches = [l.launch_id for l in thief_fw.launches] valuable_launches = [l for l in victim_fw.launches if l.launch_id not in thief_launches] for launch in valuable_launches: thief_fw.launches.append(launch) stolen = True return stolen
def workflow(self, model): et = load_object({'_fw_name': '{{modena.Strategy.InitialDataPoints}}'}) points = self.newPoints() e = six.next(six.itervalues(points)) p = {k: [0] * len(points[k]) for k in points} for i in xrange(len(e)): for k in points: p[k][i] = points[k][i] t = et t['point'] = p t['indices'] = indices t['modelId'] = self._id fw = Firework(t) wf = Workflow2([fw], name='initialising to dataset') wf.addAfterAll(model.parameterFittingStrategy().workflow(model)) return wf
def workflow(self, model): et = load_object({'_fw_name': '{{modena.Strategy.InitialDataPoints}}'}) points = self.newPoints() e = six.next(six.itervalues(points)) p = { k:[0]*len(points[k]) for k in points } for i in xrange(len(e)): for k in points: p[k][i] = points[k][i] t = et t['point'] = p t['indices'] = indices t['modelId'] = self._id fw = Firework(t) wf = Workflow2( [fw], name='initialising to dataset') wf.addAfterAll(model.parameterFittingStrategy().workflow(model)) return wf
def _steal_launches(self, thief_fw): stolen = False if thief_fw.state == 'READY' and '_dupefinder' in thief_fw.spec: m_dupefinder = load_object(thief_fw.spec['_dupefinder']) # get the query that will limit the number of results to check as duplicates m_query = m_dupefinder.query(thief_fw.spec) m_query['launches'] = {'$ne': []} # iterate through all potential duplicates in the DB for potential_match in self.fireworks.find(m_query): spec1 = dict(thief_fw.to_dict()['spec']) # defensive copy spec2 = dict(potential_match['spec']) # defensive copy if m_dupefinder.verify(spec1, spec2): # verify the match # steal the launches victim_fw = self.get_fw_by_id(potential_match['fw_id']) thief_launches = [l.launch_id for l in thief_fw.launches] valuable_launches = [ l for l in victim_fw.launches if l.launch_id not in thief_launches ] for launch in valuable_launches: thief_fw.launches.append(launch) stolen = True return stolen
def test_serialization(self): p = CommonAdapter( q_type="PBS", q_name="hello", template_file=os.path.join(os.path.dirname(__file__), "mypbs.txt"), hello="world", queue="random") p_new = load_object(p.to_dict()) #Make sure the original and deserialized verison both work properly. for a in [p, p_new]: script = a.get_script_str("here") lines = script.split("\n") self.assertIn("# world", lines) self.assertIn("#PBS -q random", lines) p = CommonAdapter( q_type="PBS", q_name="hello", hello="world", queue="random") #this uses the default template, which does not have $${hello} self.assertNotEqual("# world", p.get_script_str("here").split("\n")[ -1]) self.assertNotIn("_fw_template_file", p.to_dict())
def parameterFittingFwAction(self, model, **kwargs): obj = load_object(self.parameterFittingStrategy) return obj.newPointsFWAction(model, **kwargs)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO'): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) # get the queue adapter l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode # make sure launch_dir exists: if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if launchpad.run_exists(fworker): try: # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) os.chdir(launcher_dir) oldlaunch_dir = None if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError( "Must use reservation mode (-r option) of qlaunch when using offline mode (--offline option) of rlaunch!!" ) elif reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad._reserve_fw(fworker, launcher_dir) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # set job name to the FW name job_name = get_slug(fw.name) job_name = job_name[0:20] if len(job_name) > 20 else job_name qadapter.update({'job_name': job_name}) # set the job name to FW name if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using FireWork spec..') qadapter.update(fw.spec['_queueadapter']) # update the exe to include the FW_id if 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) if '--offline' in qadapter['rocket_launch']: # handle _launch_dir parameter early... if '_launch_dir' in fw.spec: os.chdir(fw.spec['_launch_dir']) oldlaunch_dir = launcher_dir launcher_dir = os.path.abspath(os.getcwd()) launchpad._change_launch_dir(launch_id, launcher_dir) # write FW.json fw.to_file("FW.json") # write Launchid with open('FW_offline.json', 'w') as f: f.write('{"launch_id":%s}' % launch_id) launchpad.add_offline_run(launch_id, fw.fw_id, fw.name) # write and submit the queue script using the queue adapter l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError( 'queue script could not be submitted, check queue adapter and queue server status!' ) elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False finally: if oldlaunch_dir: os.chdir( oldlaunch_dir ) # this only matters in --offline mode with _launch_dir! else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return False
def test_ioset_implicit(self): ft = WriteVaspFromIOSet( dict(structure=self.struct_si, vasp_input_set="MPRelaxSet")) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files(skip_kpoints=True)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO'): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict()) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode oldlaunch_dir = None # only needed in --offline mode with _launch_dir option if not os.path.exists(launcher_dir): raise ValueError('Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError('Reservation mode of queue launcher only works for singleshot Rocket Launcher!') if launchpad.run_exists(fworker): try: # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir) if not fw: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:20] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug('updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) if '--offline' in qadapter['rocket_launch']: # handle _launch_dir parameter now b/c we can't call # launchpad.change_launch_dir() later on in offline mode if '_launch_dir' in fw.spec: os.chdir(fw.spec['_launch_dir']) oldlaunch_dir = launcher_dir launcher_dir = os.path.abspath(os.getcwd()) launchpad.change_launch_dir(launch_id, launcher_dir) setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: if reserve: l_logger.info('Un-reserving FW with fw_id, launch_id: {}, {}'.format(fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) raise RuntimeError('queue script could not be submitted, check queue script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False finally: if oldlaunch_dir: os.chdir(oldlaunch_dir) # this only matters in --offline mode with _launch_dir! else: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False
def outOfBoundsFwAction(self, model, caller, **kwargs): obj = load_object(self.outOfBoundsStrategy) return obj.newPointsFWAction(model, caller, **kwargs)
def test_ioset_implicit(self): ft = WriteVaspFromIOSet(dict(structure=self.struct_si, vasp_input_set="MPRelaxSet")) ft = load_object(ft.to_dict()) # simulate database insertion ft.run_task({}) self._verify_files(skip_kpoints=True)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages :param create_launcher_dir: (bool) Whether to create a subfolder launcher+timestamp, if needed """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError( "Must use reservation mode (-r option) of qlaunch when using offline option of rlaunch!!" ) if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) if launchpad.run_exists(fworker): try: if reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir try: os.makedirs(launcher_dir) except OSError as exception: if exception.errno != errno.EEXIST: raise launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: if reserve: l_logger.info( 'Un-reserving FW with fw_id, launch_id: {}, {}'. format(fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) raise RuntimeError( 'queue script could not be submitted, check queue script/queue adapter/queue server status!' ) elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return False
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False, fill_mode=False, fw_id=None): """ Submit a single job to the queue. Args: launchpad (LaunchPad) fworker (FWorker) qadapter (QueueAdapterBase) launcher_dir (str): The directory where to submit the job reserve (bool): Whether to queue in reservation mode strm_lvl (str): level at which to stream log messages create_launcher_dir (bool): Whether to create a subfolder launcher+timestamp, if needed fill_mode (bool): whether to submit jobs even when there is nothing to run (only in non-reservation mode) fw_id (int): specific fw_id to reserve (reservation mode only) """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch " "when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) if fill_mode and reserve: raise ValueError( "Fill_mode cannot be used in conjunction with reserve mode!") if fw_id and not reserve: raise ValueError( "qlaunch for specific fireworks may only be used in reservation mode." ) if fill_mode or launchpad.run_exists(fworker): launch_id = None try: if reserve: if fw_id: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir, fw_id=fw_id) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir makedirs_p(launcher_dir) launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError( 'queue script could not be submitted, check queue ' 'script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') if reserve and launch_id is not None: try: l_logger.info( 'Un-reserving FW with fw_id, launch_id: {}, {}'.format( fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) launchpad.forget_offline(launch_id) except: log_exception( l_logger, 'Error unreserving FW with fw_id {}'.format(fw.fw_id)) return False else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return None # note: this is a hack (rather than False) to indicate a soft failure to rapidfire()
def test_explicit_serialization(self): self.assertEqual(load_object(self.s_dict), self.s_obj)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False, fill_mode=False, fw_id=None): """ Submit a single job to the queue. Args: launchpad (LaunchPad) fworker (FWorker) qadapter (QueueAdapterBase) launcher_dir (str): The directory where to submit the job reserve (bool): Whether to queue in reservation mode strm_lvl (str): level at which to stream log messages create_launcher_dir (bool): Whether to create a subfolder launcher+timestamp, if needed fill_mode (bool): whether to submit jobs even when there is nothing to run (only in non-reservation mode) fw_id (int): specific fw_id to reserve (reservation mode only) """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict()) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError('Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch " "when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError('Reservation mode of queue launcher only works for singleshot Rocket Launcher!') if fill_mode and reserve: raise ValueError("Fill_mode cannot be used in conjunction with reserve mode!") if fw_id and not reserve: raise ValueError("qlaunch for specific fireworks may only be used in reservation mode.") if fill_mode or launchpad.run_exists(fworker): launch_id = None try: if reserve: if fw_id: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir, fw_id=fw_id) if not fw: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug('updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir makedirs_p(launcher_dir) launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError('queue script could not be submitted, check queue ' 'script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') if reserve and launch_id is not None: try: l_logger.info('Un-reserving FW with fw_id, launch_id: {}, {}'.format( fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) launchpad.forget_offline(launch_id) except: log_exception(l_logger, 'Error unreserving FW with fw_id {}'.format(fw.fw_id)) return False else: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return None # note: this is a hack (rather than False) to indicate a soft failure to rapidfire()
def test_implicit_serialization(self): self.assertEqual(load_object({"a": {"p1": {"p2": 3}}, "_fw_name": "TestSerializer Export Name"}), self.obj_4, 'Implicit import fails!')