def run_task(self, fw_spec): prev_dir = get_loc(fw_spec['prev_vasp_dir']) if '$ALL' in self.files: self.files = os.listdir(prev_dir) for file in self.files: prev_filename = last_relax(os.path.join(prev_dir, file)) dest_file = 'POSCAR' if file == 'CONTCAR' and self.use_contcar else file if prev_filename.endswith('.gz'): dest_file += '.gz' print 'COPYING', prev_filename, dest_file if self.missing_CHGCAR_OK and 'CHGCAR' in dest_file and not os.path.exists( zpath(prev_filename)): print 'Skipping missing CHGCAR' else: shutil.copy2(prev_filename, dest_file) if '.gz' in dest_file: # unzip dest file f = gzip.open(dest_file, 'rb') file_content = f.read() with open(dest_file[0:-3], 'wb') as f_out: f_out.writelines(file_content) f.close() os.remove(dest_file) return FWAction(stored_data={'copied_files': self.files})
def run_task(self, fw_spec): prev_dir = get_loc(fw_spec['prev_vasp_dir']) if '$ALL' in self.files: self.files = os.listdir(prev_dir) for file in self.files: prev_filename = last_relax(os.path.join(prev_dir, file)) dest_file = 'POSCAR' if file == 'CONTCAR' and self.use_contcar else file if prev_filename.endswith('.gz'): dest_file += '.gz' print 'COPYING', prev_filename, dest_file if self.missing_CHGCAR_OK and 'CHGCAR' in dest_file and not os.path.exists(zpath(prev_filename)): print 'Skipping missing CHGCAR' else: shutil.copy2(prev_filename, dest_file) if '.gz' in dest_file: # unzip dest file f = gzip.open(dest_file, 'rb') file_content = f.read() with open(dest_file[0:-3], 'wb') as f_out: f_out.writelines(file_content) f.close() os.remove(dest_file) return FWAction(stored_data={'copied_files': self.files})
def detect(self, dir_name): names = [ last_relax(os.path.join(dir_name, x)) for x in ['OUTCAR', 'OSZICAR', 'vasprun.xml', 'CHGCAR', 'vasp.out'] ] return set() if all([os.path.exists(file_name) for file_name in names ]) and os.stat(names[0]).st_size > 0 else set( ["OUTPUTS_DONT_EXIST"])
def detect(self, dir_name): names = [ last_relax(os.path.join(dir_name, x)) for x in ['POSCAR', 'INCAR', 'KPOINTS', 'POTCAR'] ] return set() if all([ os.path.exists(file_name) for file_name in names ]) and all([os.stat(file_name).st_size > 0 for file_name in names]) else set(["INPUTS_DONT_EXIST"])
def run_task(self, fw_spec): prev_dir = get_loc(fw_spec['prev_vasp_dir']) if '$ALL' in self.files: self.files = os.listdir(prev_dir) for file in self.files: prev_filename = last_relax(os.path.join(prev_dir, file)) dest_file = 'POSCAR' if file == 'CONTCAR' and self.use_contcar else file print 'COPYING', prev_filename, dest_file shutil.copy2(prev_filename, dest_file) return FWAction(stored_data={'copied_files': self.files})
def run_task(self, fw_spec): prev_dir = get_loc(fw_spec['prev_vasp_dir']) if '$ALL' in self.files: self.files = os.listdir(prev_dir) for file in self.files: prev_filename = last_relax(os.path.join(prev_dir, file)) dest_file = 'POSCAR' if file == 'CONTCAR' and self.use_contcar else file print 'COPYING', prev_filename, dest_file shutil.copy2(prev_filename, dest_file) return FWAction(stored_data={'copied_files': self.files})
def detect(self, dir_name): signals = set() for filename in self.filename_list: #find the strings that match in the file if not self.ignore_nonexistent_file or os.path.exists(os.path.join(dir_name, filename)): f = last_relax(os.path.join(dir_name, filename)) errors = string_list_in_file(self.signames_targetstrings.values(), f, ignore_case=self.ignore_case) if self.invert_search: errors_inverted = [item for item in self.targetstrings_signames.keys() if item not in errors] errors = errors_inverted #add the signal names for those strings for e in errors: signals.add(self.targetstrings_signames[e]) return signals
def detect(self, dir_name): signals = set() for filename in self.filename_list: #find the strings that match in the file if not self.ignore_nonexistent_file or os.path.exists( os.path.join(dir_name, filename)): f = last_relax(os.path.join(dir_name, filename)) errors = string_list_in_file( self.signames_targetstrings.values(), f, ignore_case=self.ignore_case) if self.invert_search: errors_inverted = [ item for item in self.targetstrings_signames.keys() if item not in errors ] errors = errors_inverted #add the signal names for those strings for e in errors: signals.add(self.targetstrings_signames[e]) return signals
def detect(self, dir_name): f_exists = 'relax2' in last_relax(os.path.join(dir_name, 'vasprun.xml')) return set() if f_exists else set(["NO_RELAX2"])
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc( fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters') } fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][ 0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone(host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate( prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[ 'snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] update_spec['vasp'] = { 'incar': d['calculations'][-1]['input']['incar'], 'kpoints': d['calculations'][-1]['input']['kpoints'] } update_spec["task_id"] = t_id return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format( fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) # TODO: make this a little more flexible if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority'] } # Pass elastic tensor spec if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition(snl.structure.composition.reduced_formula ).alphabetical_formula fws.append( Firework([ VaspCopyTask({ 'files': [ 'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR' ], 'use_CONTCAR': False }), SetupUnconvergedHandlerTask(), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = { 'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags']) } if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] spec['run_tags'].append(unconverged_tag) fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to \ fix this Firework! Can't continue with workflow...")
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc(fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone( host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate(prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d['snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format(fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority']} snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula fws.append(FireWork( [VaspCopyTask({'files': ['INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR'], 'use_CONTCAR': False}), SetupUnconvergedHandlerTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = {'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags'])} spec['run_tags'].append(unconverged_tag) fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to fix this FireWork! Can't continue with workflow...")
def detect(self, dir_name): f_exists = 'relax2' in last_relax(os.path.join(dir_name, 'vasprun.xml')) return set() if f_exists else set(["NO_RELAX2"])
def detect(self, dir_name): names = [last_relax(os.path.join(dir_name, x)) for x in ['OUTCAR', 'OSZICAR', 'vasprun.xml', 'CHGCAR', 'vasp.out']] return set() if all([os.path.exists(file_name) for file_name in names]) and os.stat(names[0]).st_size > 0 else set(["OUTPUTS_DONT_EXIST"])
def detect(self, dir_name): names = [last_relax(os.path.join(dir_name, x)) for x in ['POSCAR', 'INCAR', 'KPOINTS', 'POTCAR']] return set() if all([os.path.exists(file_name) for file_name in names]) and all([os.stat(file_name).st_size > 0 for file_name in names]) else set(["INPUTS_DONT_EXIST"])