예제 #1
0
파일: mdtasks.py 프로젝트: nwinner/mpmorph
 def run_task(self, fw_spec):
     default_list = [
         "INCAR", "POSCAR", "CONTCAR", "OUTCAR", "POTCAR", "vasprun.xml",
         "XDATCAR", "OSZICAR", "DOSCAR"
     ]
     files = self.get("files", default_list)
     calc_home = self["calc_home"]
     run_name = self["run_name"]
     target_dir = os.path.join(calc_home, run_name)
     if not os.path.exists(target_dir):
         os.mkdir(target_dir)
     for f in files:
         try:
             shutil.copy2(f, target_dir)
         except:
             pass
     return FWAction()
예제 #2
0
    def run_task(self, fw_spec):

        #create structure from CONTCAR
        struct = Poscar.from_file('CONTCAR').structure

        #repeat layers of bulk to create supercell
        struct.make_supercell([1, 1, self.get("num_layers", 2)])

        #add vacuum to create slab
        struct = add_vacuum(struct, self.get("vacuum", 15))

        #add selective dynamics
        selective_dynamics = []
        """
        min_bulk = self.get("surf_layers_to_relax",3)/(self.get("atomic_thickness")*self.get("num_layers",2)) * max([site.z for site in struct.sites])
        
        max_bulk = (self.get("atomic_thickness")*self.get("num_layers",2) - self.get("surf_layers_to_relax",3))/(self.get("atomic_thickness")*self.get("num_layers",2)) * max([site.z for site in struct.sites])
                     
        for site in struct.sites:
            if site.z > min_bulk and site.z <= max_bulk:
                selective_dynamics.append([False, False, False])
            else:
                selective_dynamics.append([True, True, True])
        struct.add_site_property("selective_dynamics", selective_dynamics)
        """
        #create optimize and static fireworks using the newly created slab
        slab_optimize = OptimizeFW(struct,
                                   name=name + '_slab_optimization' + time,
                                   vasp_cmd=">>vasp_cmd<<",
                                   db_file=">>db_file<<",
                                   parents=[bulk_optimize])

        slab_optimize = Workflow(slab_optimize)
        optimize_incar_settings = {"ISIF": 2}
        optimize_update = {"incar_update": optimize_incar_settings}
        slab_optimize = add_modify_incar(slab_optimize,
                                         modify_incar_params=optimize_update,
                                         fw_name_constraint='optimization')

        slab_static = StaticFW(struct,
                               name=name + '_slab_static_' + time,
                               parents=[slab_optimize],
                               prev_calc_loc=True,
                               vasp_cmd=">>vasp_cmd<<",
                               db_file=">>db_file<<")
        return FWAction(additions=[slab_optimize, slab_static])
예제 #3
0
    def run_task(self, fw_spec):
        # get the database connection
        db_file = env_chk(self["db_file"], fw_spec)
        mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
        mmdb.collection = mmdb.db["approx_neb"]

        wf_uuid = self["approx_neb_wf_uuid"]
        fields_to_pull = self["fields_to_pull"]

        # pulls desired fields from approx_neb collection and stores in pulled_fields
        pulled_doc = mmdb.collection.find_one({"wf_uuid": wf_uuid})
        pulled_fields = dict()
        for key in fields_to_pull.keys():
            pulled_fields[key] = get(pulled_doc, fields_to_pull[key])

        # update fw_spec with pulled fields (labeled according to fields_to_pass)
        return FWAction(update_spec=pulled_fields)
예제 #4
0
    def run_task(self, fw_spec):
        lammps_input = self["lammps_input"]
        diffusion_params = self.get("diffusion_params", {})

        # get the directory that contains the LAMMPS dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"]

        # parse the directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))
        d = {}
        d["dir_name"] = os.path.abspath(os.getcwd())
        d["last_updated"] = datetime.today()
        d["input"] = lammps_input.as_dict()
        log_file = lammps_input.config_dict["log"]
        if isinstance(lammps_input.config_dict["dump"], list):
            dump_file = lammps_input.config_dict["dump"][0].split()[4]
        else:
            dump_file = lammps_input.config_dict["dump"].split()[4]
        is_forcefield = hasattr(lammps_input.lammps_data, "bonds_data")
        lammpsrun = LammpsRun(lammps_input.data_filename, dump_file, log_file,
                              is_forcefield=is_forcefield)
        d["natoms"] = lammpsrun.natoms
        d["nmols"] = lammpsrun.nmols
        d["box_lengths"] = lammpsrun.box_lengths
        d["mol_masses"] = lammpsrun.mol_masses
        d["mol_config"] = lammpsrun.mol_config
        if diffusion_params:
            diffusion_analyzer = lammpsrun.get_diffusion_analyzer(**diffusion_params)
            d["analysis"]["diffusion"] = diffusion_analyzer.get_summary_dict()
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            mmdb = LammpsCalcDb.from_db_file(db_file)
            # insert the task document
            t_id = mmdb.insert(d)
            logger.info("Finished parsing with task_id: {}".format(t_id))
        return FWAction(stored_data={"task_id": d.get("task_id", None)})
예제 #5
0
    def run_task(self, fw_spec):

        # get the directory that contains the LAMMPS run parse.
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"],
                                    fw_spec["calc_locs"])["path"]

        # parse the directory
        logger.info(f"PARSING DIRECTORY: {calc_dir}")

        drone = LammpsDrone(
            additional_fields=self.get("additional_fields"),
            diffusion_params=self.get("diffusion_params", None),
        )

        task_doc = drone.assimilate(
            calc_dir,
            input_filename=self["input_filename"],
            log_filename=self.get("log_filename", "log.lammps"),
            is_forcefield=self.get("is_forcefield", False),
            data_filename=self.get("data_filename", None),
            dump_files=self.get("dump_filenames", None),
        )

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        db_file = env_chk(self.get("db_file"), fw_spec)

        # db insertion
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = LammpsCalcDb.from_db_file(db_file)
            # insert the task document
            t_id = mmdb.insert(task_doc)
            logger.info(f"Finished parsing with task_id: {t_id}")

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)})
예제 #6
0
파일: ftasks.py 프로젝트: kcbhamu/dfttk
    def run_task(self, fw_spec):
        # unrelaxed cell
        cell = Structure.from_file('POSCAR')
        cell.to(filename='str.out', fmt='mcsqs')

        # relaxed cell
        cell = Structure.from_file('CONTCAR')
        cell.to(filename='str_relax.out', fmt='mcsqs')

        # check the symmetry
        out = subprocess.run(['checkrelax', '-1'], stdout=subprocess.PIPE)
        relaxation = float(out.stdout)

        # we relax too much, add a volume relax and inflection detection WF as a detour
        if relaxation > self['tolerance']:
            from dfttk.fworks import OptimizeFW, InflectionDetectionFW
            from fireworks import Workflow
            from dfttk.input_sets import RelaxSet
            from dfttk.utils import add_modify_incar_by_FWname, add_modify_kpoints_by_FWname

            fws = []
            vis = RelaxSet(self.get('structure'), volume_relax=True)
            vol_relax_fw = OptimizeFW(self.get('structure'), symmetry_tolerance=None,
                                       job_type='normal', name='Volume relax', #record_path = True,
                                       vasp_input_set=vis, modify_incar = {'ISIF': 7},
                                       vasp_cmd=self.get('vasp_cmd'), db_file=self.get('db_file'),
                                       metadata=self.get('metadata'), run_isif2=self.get('run_isif2'),
                                       pass_isif4=self.get('pass_isif4')
                                      )
            fws.append(vol_relax_fw)

            modify_incar_params = self.get('modify_incar_params')
            modify_kpoints_params = self.get('modify_kpoints_params')

            # we have to add the calc locs for this calculation by hand
            # because the detour action seems to disable spec mods
            fws.append(InflectionDetectionFW(self.get('structure'), parents=[vol_relax_fw],
                                             run_isif2=self.get('run_isif2'), pass_isif4=self.get('pass_isif4'),
                                             metadata=self.get('metadata'), db_file=self.get('db_file'),
                                             spec={'calc_locs': extend_calc_locs(self.get('name', 'Full relax'), fw_spec)}))
            infdet_wf = Workflow(fws)
            add_modify_incar_by_FWname(infdet_wf, modify_incar_params = modify_incar_params)
            add_modify_kpoints_by_FWname(infdet_wf, modify_kpoints_params = modify_kpoints_params)
            return FWAction(detours=[infdet_wf])
예제 #7
0
    def run_task(self, fw_spec):
        """Run the job and handle any dynamic firework submissions."""
        from jobflow import SETTINGS, initialize_logger
        from jobflow.core.job import Job

        job: Job = self.get("job")
        store = self.get("store")

        if store is None:
            store = SETTINGS.JOB_STORE
        store.connect()

        if hasattr(self, "fw_id"):
            job.metadata.update({"fw_id": self.fw_id})

        initialize_logger()
        response = job.run(store=store)

        detours = None
        additions = None
        if response.replace is not None:
            # create a workflow from the new additions; be sure to use original store
            detours = [flow_to_workflow(response.replace, self.get("store"))]

        if response.addition is not None:
            additions = [
                flow_to_workflow(response.addition, self.get("store"))
            ]

        if response.detour is not None:
            detour_wf = flow_to_workflow(response.detour, self.get("store"))
            if detours is not None:
                detours.append(detour_wf)
            else:
                detours = [detour_wf]

        fwa = FWAction(
            stored_data=response.stored_data,
            detours=detours,
            additions=additions,
            defuse_workflow=response.stop_jobflow,
            defuse_children=response.stop_children,
        )
        return fwa
예제 #8
0
 def run_task(self, fw_spec):
     vrun, _ = get_vasprun_outcar(self.get("calc_dir", "."),
                                  parse_dos=False,
                                  parse_eigen=True)
     epsilon_static = vrun.epsilon_static
     epsilon_dict = {
         "mode": self["mode"],
         "displacement": self["displacement"],
         "epsilon": epsilon_static
     }
     return FWAction(mod_spec=[{
         '_set': {
             'raman_epsilon->{}_{}'.format(
                 str(self["mode"]),
                 str(self["displacement"]).replace("-", "m").replace(
                     ".", "d")):
             epsilon_dict
         }
     }])
예제 #9
0
    def run_task(self, fw_spec):
        # get the directory that contains the VASP dir to parse
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"]

        # parse the VASP directory
        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        drone = VaspDrone(additional_fields=self.get("additional_fields"),
                          parse_dos=self.get("parse_dos", False), compress_dos=1,
                          bandstructure_mode=self.get("bandstructure_mode", False), compress_bs=1)

        # assimilate (i.e., parse)
        task_doc = drone.assimilate(calc_dir)

        # Check for additional keys to set based on the fw_spec
        if self.get("fw_spec_field"):
            task_doc.update(fw_spec[self.get("fw_spec_field")])

        # get the database connection
        db_file = env_chk(self.get('db_file'), fw_spec)

        # db insertion or taskdoc dump
        if not db_file:
            with open("task.json", "w") as f:
                f.write(json.dumps(task_doc, default=DATETIME_HANDLER))
        else:
            mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
            t_id = mmdb.insert_task(task_doc,
                                    parse_dos=self.get("parse_dos", False),
                                    parse_bs=bool(self.get("bandstructure_mode", False)))
            logger.info("Finished parsing with task_id: {}".format(t_id))

        if self.get("defuse_unsuccessful", True):
            defuse_children = (task_doc["state"] != "successful")
        else:
            defuse_children = False

        return FWAction(stored_data={"task_id": task_doc.get("task_id", None)},
                        defuse_children=defuse_children)
예제 #10
0
    def run_task(self, fw_spec):
        logging.debug(fw_spec)
        calc_ids = fw_spec["temp"]["calc_ids"]
        descmatrix = read_descmatrix(fw_spec)
        logging.info("DESCRIPTOR matrix attributes")
        logging.info(descmatrix.shape)
        logging.info(np.sum(descmatrix))
        fps_ranking = cluskit.cluster._rank_fps(descmatrix, K = None, greedy =False)

        reordered_calc_ids = np.array(calc_ids)[fps_ranking]
        reordered_descmatrix = descmatrix[fps_ranking]
        update_spec = fw_spec
        update_spec["temp"]["fps_ranking"] = fps_ranking.tolist()
        update_spec["temp"]["calc_ids"] = reordered_calc_ids.tolist()
        update_spec["temp"]["descmatrix"] = write_descmatrix(reordered_descmatrix)
        update_spec.pop("_category")
        update_spec.pop("name")

        return FWAction(update_spec=update_spec)
예제 #11
0
 def run_task(self, fw_spec):
     x = fw_spec['_x']
     fin_len = x[0]
     fin_angle = x[1]
     fin_type = x[2]
     cost = (14.1 * fin_len**1.847 + 12.0 + fin_angle * 100.0) / 1000.0
     drag = fin_angle**0.653 * float(fin_len)**1.2
     failure_prob = 0.5 - fin_len / 290 + (fin_angle**2.0) / 16200
     if fin_type == "shark fin":
         cost = cost * 1.05
         drag = drag * 1.15
         failure_prob = failure_prob * 0.75
     elif fin_type == "dolphin fin":
         cost = cost * 1.6
         drag = drag * 0.84
         failure_prob = failure_prob * 1.75
     return FWAction(update_spec={
         '_y': [cost, drag, failure_prob],
         '_x': x
     })
예제 #12
0
    def run_task(self, fw_spec):
        pressure_volume = fw_spec.get('pressure_volume', [])

        # get volume
        osw = list(os.walk("."))[0]
        files = []
        for file_name in osw[2]:
            if "CONTCAR" in file_name:
                files.append(file_name)
        _poscar = Poscar.from_file(filename=files[-1], check_for_POTCAR=True, read_velocities=True)
        volume = _poscar.structure.volume

        # get pressure
        search_keys = ['external']
        outcar_data = md_data.get_MD_data("./OUTCAR.gz", search_keys=search_keys)

        _data = np.transpose(outcar_data)[0]
        pressure = np.mean(_data[int(0.5 * (len(_data) - 1)):])

        pressure_volume.append((volume, pressure))
        return FWAction(mod_spec={'_push_all': {'pressure_volume': pressure_volume}})
예제 #13
0
    def run_task(self, fw_spec):
        if fw_spec.get("prev_calc_molecule"):
            start_mol = fw_spec.get("prev_calc_molecule")
        # if a molecule is being passed through fw_spec
        elif self.get("molecule"):
            start_mol = self.get("molecule")
        else:
            raise KeyError(
                "No molecule present, add as an optional param or check fw_spec"
            )

        babe_mol = BabelMolAdaptor(start_mol).openbabel_mol
        babe_mol.SetTorsion(self["atom_indexes"][0], self["atom_indexes"][1],
                            self["atom_indexes"][2], self["atom_indexes"][3],
                            (self["angle"] * np.pi / 180.))
        rotated_mol = BabelMolAdaptor(babe_mol).pymatgen_mol

        # update the fw_spec with the rotated geometry
        update_spec = {"prev_calc_molecule": rotated_mol}

        return FWAction(update_spec=update_spec)
예제 #14
0
파일: mdtasks.py 프로젝트: nwinner/mpmorph
    def run_task(self, fw_spec):

        spawn_type = self.get('spawn_type')
        spawn_number = self.get('spawn_number')

        wall_time = self.get('wall_time', 19200)
        vasp_cmd = self.get('vasp_cmd', ">>vasp_cmd<<")
        num_checkpoints = self.get('num_checkpoints', 1)

        fws = []
        for i in range(spawn_number):
            t = []
            t.append(
                ProductionSpawnTask(wall_time=wall_time,
                                    vasp_cmd=vasp_cmd,
                                    db_file=None,
                                    spawn_count=0,
                                    production=num_checkpoints))
            fws.append(Firework(t, name="Multispawn_{}_FW".format(i + 1)))

        return FWAction(detours=[fws])
예제 #15
0
파일: tasks.py 프로젝트: FilipchukB/P1
    def run_task(self, fw_spec):
        if '_add_launchpad_and_fw_id' not in fw_spec:
            raise SkipTest("Couldn't load lunchpad")

        timeout = 20
        while not self.launchpad.workflows.find_one({
                'locked': {
                    "$exists": True
                },
                'nodes': self.fw_id
        }) and timeout > 0:
            time.sleep(1)
            timeout -= 1

        if timeout == 0:
            raise SkipTest("The WF wasn't locked")

        if fw_spec.get('fizzle', False):
            raise ValueError('Testing; this error is normal.')

        return FWAction(update_spec={"WaitWFLockTask": 1})
예제 #16
0
    def run_task(self, fw_spec):
        logging.basicConfig(level=logging.INFO)
        qchem_logger = logging.getLogger('QChemDrone')
        qchem_logger.setLevel(logging.INFO)
        sh = logging.StreamHandler(stream=sys.stdout)
        # sh.setLevel(getattr(logging, 'INFO'))
        qchem_logger.addHandler(sh)

        cur_dir = os.getcwd()
        src_qchem_dir = fw_spec['src_qchem_dir']
        for filename in glob.glob(os.path.join(src_qchem_dir, '*')):
            if os.path.isfile(filename):
                shutil.copy(filename, cur_dir)

        if os.path.exists("custodian.json") or os.path.exists(
                        "custodian.json" + ".gz"):
            with zopen(zpath("custodian.json", 'rt')) as f:
                custodian_out = json.load(f)
        else:
            custodian_out = []

        all_errors = set()
        for run in custodian_out:
            for correction in run['corrections']:
                all_errors.update(correction['errors'])

        prev_qchem_dir = os.getcwd()
        if MOVE_TO_EG_GARDEN:
            prev_qchem_dir = move_to_eg_garden(prev_qchem_dir)

        stored_data = {'error_list': list(all_errors)}
        update_spec = {'prev_qchem_dir': prev_qchem_dir,
                       'prev_task_type': fw_spec['task_type']}
        propagate_keys = ['egsnl', 'snlgroup_id', 'inchi_root',
                          'mixed_basis', 'mixed_aux_basis', 'mol']
        for k in propagate_keys:
            if k in fw_spec:
                update_spec[k] = fw_spec[k]

        return FWAction(stored_data=stored_data, update_spec=update_spec)
예제 #17
0
    def run_task(self, fw_spec):
        rescale_type = self.get('rescale_type', 'BirchMurnaghan_EOS')

        if rescale_type == 'BirchMurnaghan_EOS':
            pv_pairs = np.array(fw_spec["pressure_volume"])
            pv_pairs = np.flip(pv_pairs, axis=1)
            pv_pairs = np.flip(pv_pairs[pv_pairs[:, 1].argsort()], axis=0)

            try:
                params = fit_BirchMurnaghanPV_EOS(pv_pairs)
                equil_volume = params[0]
            except:
                warnings.warn(
                    "Could not converge Birch-Murnaghan EOS fit, trying linear regression"
                )
                rescale_type = 'linear_regression'

        pvs = fw_spec["pressure_volume"]
        p = [item[1] for item in pvs]
        v = [item[0] for item in pvs]
        if rescale_type == 'linear_regression':
            slope, intercept, r_value, p_value, std_err = stats.linregress(
                v, p)
            if slope >= 0:
                ## In future try building a hull with composition and volume. then getting composition volume
                raise ValueError(
                    "P and V should be inversely related. Try using larger NSW in the volume variation"
                )
            equil_volume = -intercept / slope

        frac_change = equil_volume / sorted(v)[int(np.floor(len(v) / 2))]
        if frac_change > 2 or frac_change < 0.5:
            # If volume is greater than 2x or 0.5x, use the lowest pressure volume.
            equil_volume = v[np.argmin(p)]

        poscar = Poscar.from_file("./POSCAR")
        poscar.structure.scale_lattice(equil_volume)
        poscar.write_file("./POSCAR")

        return FWAction()
예제 #18
0
파일: exchange.py 프로젝트: samblau/atomate
    def run_task(self, fw_spec):

        db_file = env_chk(self["db_file"], fw_spec)
        wf_uuid = self["wf_uuid"]
        mc_settings = self.get("mc_settings", {})

        # Get Heisenberg models from db
        mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
        mmdb.collection = mmdb.db["exchange"]

        # Get documents
        docs = list(
            mmdb.collection.find({"wf_meta.wf_uuid": wf_uuid},
                                 ["heisenberg_model", "nn_cutoff"]))

        hmodels = [
            HeisenbergModel.from_dict(d["heisenberg_model"]) for d in docs
        ]
        cutoffs = [hmodel.cutoff for hmodel in hmodels]
        ordered_hmodels = [
            h for _, h in sorted(zip(cutoffs, hmodels), reverse=False)
        ]
        # Take the model with smallest NN cutoff
        hmodel = ordered_hmodels[0]

        # Get a converged Heisenberg model if one was found
        # if fw_spec["converged_heisenberg_model"]:
        #     hmodel = HeisenbergModel.from_dict(fw_spec["converged_heisenberg_model"])

        vc = VampireCaller(hm=hmodel, **mc_settings)
        vampire_output = vc.output

        # Update FW spec
        update_spec = {"vampire_output": vampire_output}

        # Write to file
        dumpfn(vampire_output.as_dict(), "vampire_output.json")

        return FWAction(update_spec=update_spec)
예제 #19
0
    def run_task(self, fw_spec):

        wd = os.getcwd()
        Vasp2TraceCaller(wd)

        try:
            raw_struct = Structure.from_file(wd + "/POSCAR")
            formula = raw_struct.composition.formula
            structure = raw_struct.as_dict()

        except FileNotFoundError:
            formula = None
            structure = None

        data = Vasp2TraceOutput(wd + "/trace.txt")

        return FWAction(
            update_spec={
                "vasp2trace_out": data.as_dict(),
                "structure": structure,
                "formula": formula,
            })
예제 #20
0
    def run_task(self, fw_spec):
        mol = fw_spec["molecule"]
        mol_name = fw_spec["mol_name"]
        charge = fw_spec["charge"]
        spin_multiplicity = fw_spec["spin_multiplicity"]

        gaus_lines = gaussian.GaussianInput(
            mol,
            charge=charge,
            spin_multiplicity=spin_multiplicity,
            title='created by gaussian_geo_task from' + ' ' + mol_name,
            functional="b3lyp",
            basis_set="aug-cc-pvdz",
            route_parameters={
                'opt': "(calcfc,tight)",
                'int': "ultrafine",
                "\n# SCF": "tight"
            },
            input_parameters=None,
            link0_parameters={
                "%mem": "256MW",
                "%NProcShared": 4,
                "%LindaWorker": "localhost",
                "%chk": mol_name + ".chk"
            },
            dieze_tag="#",
            gen_basis=None)

        gaus_lines.write_file('mol_geo.gau', cart_coords=True)

        with open('mol_geo.gau') as f, open("mol_geo.out", 'w') as fo:
            subprocess.call(shlex.split("g09launch"), stdin=f, stdout=fo)

        prev_gaussian_geo = shlex.os.path.join(shlex.os.getcwd(),
                                               'mol_geo.out')
        update_spec = {'prev_gaussian_geo': prev_gaussian_geo}

        return FWAction(update_spec=update_spec)
예제 #21
0
    def run_task(self, fw_spec):
        filename = fw_spec['prev_gaussian_geo']
        gaus_geo = gaussian.GaussianOutput(filename)
        mol_opt = gaus_geo.final_structure
        mol_name = fw_spec["mol_name"]
        charge = fw_spec["charge"]
        spin_multiplicity = fw_spec["spin_multiplicity"]

        gaus_freq_charge = gaussian.GaussianInput(
            mol_opt,
            charge=charge,
            spin_multiplicity=spin_multiplicity,
            title='created by gaussian_frq_task from' + ' ' + mol_name,
            functional="b3lyp",
            basis_set="aug-cc-pvdz  freq",
            route_parameters={
                "SCF": "tight",
                "pop": "MK iop(6/33=2,6/41=10,6/42=10,7/33=1)"
            },
            input_parameters=None,
            link0_parameters={
                "%mem": "30GB",
                "%NProcShared": 4,
                "%LindaWorker": "localhost",
                "%chk": mol_name + ".chk"
            },
            dieze_tag="#",
            gen_basis=None)
        gaus_freq_charge.write_file('mol_freq.gau', cart_coords=True)

        with open('mol_freq.gau') as f, open("mol_freq.out", 'w') as fo:
            subprocess.call(shlex.split("g09launch"), stdin=f, stdout=fo)

        prev_gaussian_freq = shlex.os.path.join(shlex.os.getcwd(),
                                                'mol_freq.out')
        update_spec = {'prev_gaussian_freq': prev_gaussian_freq}

        return FWAction(update_spec=update_spec)
예제 #22
0
    def run_task(self, fw_spec):
        inserted_structure = fw_spec.get("optimal_structure")
        working_ion = fw_spec.get("working_ion")
        vasptodb_kwargs = fw_spec.get("vasptodb_kwargs")
        staticfw_kwargs = fw_spec.get("staticfw_kwargs", {})

        fw1 = StaticFW(
            inserted_structure,
            vasptodb_kwargs=vasptodb_kwargs,
            db_file=DB_FILE,
            **staticfw_kwargs,
        )
        n_ion = int(
            inserted_structure.composition.element_composition[working_ion])
        fw2 = Firework(
            [AnalyzeChgcar(), GetInsertionCalcs()],
            name=f"Charge Density Analysis-{n_ion}",
            parents=fw1,
        )
        wf = Workflow([fw1, fw2], name=f"Obtain inserted sites-{n_ion}")
        wf = get_powerup_wf(wf, fw_spec)
        update_wf_keys(wf, fw_spec)
        return FWAction(additions=[wf])
예제 #23
0
    def run_task(self, fw_spec):

        wd = os.getcwd()

        try:
            raw_struct = Structure.from_file(wd + "/POSCAR")
            formula = raw_struct.composition.formula
            structure = raw_struct.as_dict()

        except:
            formula = None
            structure = None

        cmd = env_chk(self["pyzfs_cmd"], fw_spec)
        logger.info("Running command: {}".format(cmd))
        return_code = subprocess.call([cmd], shell=True)
        logger.info("Command {} finished running with returncode: {}".format(
            cmd, return_code))

        return FWAction(update_spec={
            "structure": structure,
            "formula": formula,
        })
예제 #24
0
    def run_task(self, fw_spec):

        v2t = self["vasp2trace_out"] or fw_spec["vasp2trace_out"]

        v2t = jsanitize(v2t)

        d = {
            "formula": fw_spec["formula"],
            "structure": fw_spec["structure"],
            "vasp2trace": v2t,
        }

        # store the results
        db_file = env_chk(self.get("db_file"), fw_spec)
        if not db_file:
            with open("vasp2trace.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            db.collection = db.db["vasp2trace"]
            db.collection.insert_one(d)
            logger.info("Vasp2trace calculation complete.")
        return FWAction()
예제 #25
0
    def run_task(self, fw_spec):
        workflow_id = fw_spec.get("workflow", {"_id": -1}).get("_id", -1)
        n_initial_configurations = self["n_initial_configurations"]
        n_configurations = self["n_configurations"]
        shape = self["shape"]
        nanocluster_size = self["nanocluster_size"]
        compositions = self["compositions"]
        elements = self["elements"]
        generate_pure_nanoclusters = self["generate_pure_nanoclusters"],
        bondlength_dct = self["bondlength_dct"]

        db = get_external_database(fw_spec["extdb_connect"])
        simulations = db['simulations']

        # generate clusters
        nanoclusters, calc_ids = self.generate(
            n_initial_configurations,
            n_configurations,
            shape,
            nanocluster_size,
            compositions,
            elements,
            generate_pure_nanoclusters=generate_pure_nanoclusters,
            bondlength_dct=bondlength_dct,
            db=db,
            workflow_id=workflow_id)

        # upload all simulations at once
        simulations.insert_many(nanoclusters)

        # fireworks
        update_spec = fw_spec
        update_spec["calc_ids"] = calc_ids

        update_spec.pop("_category")
        update_spec.pop("name")
        return FWAction(update_spec=update_spec)
예제 #26
0
    def run_task(self, fw_spec):
        calc_dir = os.getcwd()
        if "calc_dir" in self:
            calc_dir = self["calc_dir"]
        elif self.get("calc_loc"):
            calc_dir = get_calc_loc(self["calc_loc"], fw_spec["calc_locs"])["path"]

        logger.info("PARSING DIRECTORY: {}".format(calc_dir))

        db_file = env_chk(self.get('db_file'), fw_spec)

        cluster_dict = None
        tags = Tags.from_file(filename="feff.inp")
        if "RECIPROCAL" not in tags:
            cluster_dict = Atoms.cluster_from_file("feff.inp").as_dict()
        doc = {"input_parameters": tags.as_dict(),
               "cluster": cluster_dict,
               "structure": self["structure"].as_dict(),
               "absorbing_atom": self["absorbing_atom"],
               "spectrum_type": self["spectrum_type"],
               "spectrum": np.loadtxt(os.path.join(calc_dir, self["output_file"])).tolist(),
               "edge": self.get("edge", None),
               "metadata": self.get("metadata", None),
               "dir_name": os.path.abspath(os.getcwd()),
               "last_updated": datetime.utcnow()}

        if not db_file:
            with open("feff_task.json", "w") as f:
                f.write(json.dumps(doc, default=DATETIME_HANDLER))

        else:
            db = FeffCalcDb.from_db_file(db_file, admin=True)
            db.insert(doc)

        logger.info("Finished parsing the spectrum")

        return FWAction(stored_data={"task_id": doc.get("task_id", None)})
예제 #27
0
    def run_task(self, fw_spec):
        v, _ = get_vasprun_outcar(self.get("calc_dir", "."),
                                  parse_dos=False,
                                  parse_eigen=False)
        stress = v.ionic_steps[-1]['stress']
        defo = self['deformation']
        d_ind = np.nonzero(defo - np.eye(3))
        delta = Decimal((defo - np.eye(3))[d_ind][0])
        # Shorthand is d_X_V, X is voigt index, V is value
        dtype = "_".join(
            ["d",
             str(reverse_voigt_map[d_ind][0]), "{:.0e}".format(delta)])
        strain = Strain.from_deformation(defo)
        defo_dict = {
            'deformation_matrix': defo,
            'strain': strain.tolist(),
            'stress': stress
        }

        return FWAction(mod_spec=[{
            '_set': {
                'deformation_tasks->{}'.format(dtype): defo_dict
            }
        }])
예제 #28
0
    def run_task(self, fw_spec):

        username = self["username"]
        password = self["password"]
        parameters = self["parameters"]
        extdb_connect = self["extdb_connect"]
        name = self["name"]
        workflow_type = self["workflow_type"]

        creation_time = str(datetime.datetime.now(tz=None))

        extdb_connect["username"] = username
        extdb_connect["password"] = password
        extdb_connect["host"] = extdb_connect.get("host",
            "nanolayers.dyndns.org:27017")

        extdb_connect["db_name"] = extdb_connect.get("db_name",
            "testdb")        
        extdb_connect["authsource"] = extdb_connect.get("authsource",
            extdb_connect["db_name"])

        workflow = update_workflows_collection(username, password, 
            creation_time, parameters = parameters,
            name = name, workflow_type = workflow_type, extdb_connect = extdb_connect)

        update_spec = fw_spec
        update_spec["temp"] = {}
        update_spec["simulations"] = {}
        update_spec["workflow"] = workflow
        update_spec["machine_learning"] = {}
        update_spec["extdb_connect"] = extdb_connect
        update_spec["temp"]["calc_analysis_ids_dict"] = {}
        update_spec["analysis_ids"] = []
        update_spec.pop("_category")
        update_spec.pop("name")
        return FWAction(update_spec=update_spec)
예제 #29
0
    def run_task(self, fw_spec):
        # Edison setting
        # vasp_cmd = ['aprun', '-n', str(fw_spec["_queueadapter"]["mppwidth"]), fw_spec["_fw_env"]['vasp_cmd']]
        # Vesta setting
        cobalt_partname = os.environ['COBALT_PARTNAME']
        vasp_cmd = [
            'runjob', '-n',
            str(fw_spec["_queueadapter"]["nnodes"]), '--block',
            cobalt_partname, '-p', '1', ":", fw_spec["_fw_env"]['vasp_cmd']
        ]
        job = VaspJob(vasp_cmd=vasp_cmd, auto_gamma=False, auto_npar=False)
        if self["handlers"] == "all":
            hnames = [
                "VaspErrorHandler", "MeshSymmetryErrorHandler",
                "UnconvergedErrorHandler", "NonConvergingErrorHandler",
                "PotimErrorHandler", "WalltimeHandler"
            ]
        else:
            hnames = self["handlers"]
        handlers = [load_class("custodian.vasp.handlers", n)() for n in hnames]
        c = Custodian(handlers, [job], **self.get("custodian_params", {}))
        output = c.run()

        chgcar_dir = os.getcwd()

        MyDB.db_access().connect()
        collection = MyDB.db_access().collection(fw_spec['collection'])
        collection.update(
            {
                "mp-id": fw_spec["mp-id"],
                "pair_index": fw_spec["pair_index"]
            }, {"$set": {
                "chgcar_dir": chgcar_dir
            }})
        MyDB.db_access().close()
        return FWAction(stored_data=output)
예제 #30
0
    def run_task(self, fw_spec):

        pyzfs_out = loadfn("pyzfs_out.json")
        pyzfs_out = jsanitize(pyzfs_out)

        additional_fields = self.get("additional_fields", {})
        d = additional_fields.copy()
        d["formula"] = fw_spec["formula"]
        d["structure"] = fw_spec["structure"]
        d["pyzfs_out"] = pyzfs_out
        d["dir_name"] = os.getcwd()
        # store the results
        db_file = env_chk(self.get("db_file"), fw_spec)
        if not db_file:
            with open("pyzfs_todb.json", "w") as f:
                f.write(json.dumps(d, default=DATETIME_HANDLER, indent=4))
        else:
            db = VaspCalcDb.from_db_file(db_file, admin=True)
            print(self.get("collection_name", db.collection.name))
            db.collection = db.db[self.get("collection_name",
                                           db.collection.name)]
            t_id = db.insert(d)
            logger.info("Pyzfs calculation complete.")
        return FWAction()
예제 #31
0
 def test_recursive_deserialize(self):
     my_dict = {'update_spec': {}, 'mod_spec': [], 'stored_data': {}, 'exit': False, 'detours': [], 'additions': [{'updated_on': '2014-10-14T00:56:27.758673', 'fw_id': -2, 'spec': {'_tasks': [{'use_shell': True, '_fw_name': 'ScriptTask', 'script': ['echo "1"']}]}, 'created_on': '2014-10-14T00:56:27.758669', 'name': 'Unnamed FW'}], 'defuse_children': False}
     FWAction.from_dict(my_dict)