Example #1
0
    def run_task(self, fw_spec):
        if fw_spec.get("prev_calc_molecule"):
            molecule = fw_spec.get("prev_calc_molecule")
        else:
            molecule = self.get("molecule")
        if molecule == None:
            raise ValueError(
                "No molecule passed and no prev_calc_molecule found in spec! Exiting..."
            )

        compress_at_end = False

        cube = self.get("cube_file")

        if cube[-3:] == ".gz":
            compress_at_end = True
            decompress_file(cube)
            cube = cube[:-3]

        input_script = ["molecule " + cube]
        input_script += ["load " + cube]
        input_script += ["auto"]
        input_script += ["CPREPORT cpreport.json"]
        input_script += ["YT JSON yt.json"]
        input_script += ["end"]
        input_script += [""]
        input_script = "\n".join(input_script)

        Critic2Caller(input_script)

        if compress_at_end:
            compress_file(cube)
Example #2
0
    def run_task(self, fw_spec):
        checkpoint_dirs = fw_spec.get('checkpoint_dirs')
        write_dir = self.get('write_dir', False)

        if write_dir:
            if not os.path.isdir(write_dir):
                os.mkdir(write_dir)
        else:
            write_dir = os.getcwd()

        # write each md run (comprised of n checkpoints) to a json file and zip it
        logger.info("LOGGER: Assimilating checkpoint data...")
        ionic_steps = []
        for directory in checkpoint_dirs:
            ionic_steps.extend(
                Vasprun(os.path.join(directory, "vasprun.xml.gz")).ionic_steps)
        dumpfn(ionic_steps, os.path.join(write_dir, 'ionic_steps.json'))
        compress_file(os.path.join(write_dir, 'ionic_steps.json'))

        # get composition info
        s = ionic_steps[0]['structure']
        composition = {'composition': s.composition.to_data_dict}
        composition.update({'density': float(s.density)})

        # write composition info to json and zip
        dumpfn(composition, os.path.join(write_dir, 'composition.json'))
        compress_file(os.path.join(write_dir, 'composition.json'))
Example #3
0
 def test_compress_and_decompress_file(self):
     fname = os.path.join(test_dir, "tempfile")
     for fmt in ["gz", "bz2"]:
         compress_file(fname, fmt)
         self.assertTrue(os.path.exists(fname + "." + fmt))
         self.assertFalse(os.path.exists(fname))
         decompress_file(fname + "." + fmt)
         self.assertTrue(os.path.exists(fname))
         self.assertFalse(os.path.exists(fname + "." + fmt))
     with open(fname) as f:
         txt = f.read()
         self.assertEqual(txt, "hello world")
     self.assertRaises(ValueError, compress_file, "whatever", "badformat")
Example #4
0
 def test_compress_and_decompress_file(self):
     fname = os.path.join(test_dir, "tempfile")
     for fmt in ["gz", "bz2"]:
         compress_file(fname, fmt)
         self.assertTrue(os.path.exists(fname + "." + fmt))
         self.assertFalse(os.path.exists(fname))
         decompress_file(fname + "." + fmt)
         self.assertTrue(os.path.exists(fname))
         self.assertFalse(os.path.exists(fname + "." + fmt))
     with open(fname) as f:
         txt = f.read()
         self.assertEqual(txt, "hello world")
     self.assertRaises(ValueError, compress_file, "whatever", "badformat")
Example #5
0
    def run_task(self, fw_spec):
        filepath = self.get('vasprun',
                            os.path.join(os.getcwd(), 'vasprun.xml.gz'))
        write_dir = self.get('write_dir', os.path.join(os.getcwd(),
                                                       'analysis'))
        filename = self.get('filename', 'ionic_steps.json')

        os.mkdir(write_dir)

        ionic_steps = Vasprun(filepath).ionic_steps
        dumpfn(ionic_steps, os.path.join(write_dir, filename))
        compress_file(os.path.join(write_dir, filename))

        s = ionic_steps[0]['structure']
        composition = {{'composition': s.composition.to_data_dict}}
        composition.update({'density': float(s.density)})

        dumpfn(composition, os.path.join(write_dir, 'composition.json'))
        compress_file(os.path.join(write_dir, 'composition.json'))
Example #6
0
 def postprocess(self):
     """
     will gzip relevant files (won't gzip custodian.json and other output files from the cluster)
     """
     if self.gzipped:
         for file in LOBSTEROUTPUT_FILES:
             if os.path.exists(file):
                 compress_file(file, compression="gz")
         for file in LOBSTERINPUT_FILES:
             if os.path.exists(file):
                 compress_file(file, compression="gz")
         if self.backup:
             if os.path.exists("lobsterin.orig"):
                 compress_file("lobsterin.orig", compression="gz")
         for file in self.add_files_to_gzip:
             compress_file(file, compression="gz")
Example #7
0
    def run_task(self, fw_spec):

        get_rdf = self.get('get_rdf') or True
        get_diffusion = self.get('get_diffusion') or True
        get_viscosity = self.get('get_viscosity') or True
        get_vdos = self.get('get_vdos') or True
        get_run_data = self.get('get_run_data') or True
        time_step = self.get('time_step') or 2
        checkpoint_dirs = fw_spec.get('checkpoint_dirs', False)

        calc_dir = get_calc_loc(True, fw_spec["calc_locs"])["path"]
        calc_loc = os.path.join(calc_dir, 'XDATCAR.gz')

        ionic_step_skip = self.get('ionic_step_skip') or 1
        ionic_step_offset = self.get('ionic_step_offset') or 0

        analysis_spec = self.get('analysis_spec') or {}

        logger.info("Reading in ionic_steps...")

        decompress_file("ionic_steps.json.gz")
        ionic_steps = loadfn("ionic_steps.json")
        structures = [s.structure for s in ionic_steps]
        compress_file("ionic_steps.json")

        db_dict = {}
        db_dict.update({'density': float(structures[0].density)})
        db_dict.update(structures[0].composition.to_data_dict)

        if get_rdf:
            logger.info("LOGGER: Calculating radial distribution functions...")
            rdf = RadialDistributionFunction(structures=structures)
            rdf_dat = rdf.get_radial_distribution_functions(nproc=4)
            db_dict.update({'rdf': rdf.get_rdf_db_dict()})
            del rdf
            del rdf_dat

        if get_vdos:
            logger.info("LOGGER: Calculating vibrational density of states...")
            vdos = VDOS(structures)
            vdos_dat = vdos.calc_vdos_spectrum(time_step=time_step *
                                               ionic_step_skip)
            vdos_diff = vdos.calc_diffusion_coefficient(time_step=time_step *
                                                        ionic_step_skip)
            db_dict.update({'vdos': vdos_dat})
            del vdos
            del vdos_dat

        if get_diffusion:
            logger.info("LOGGER: Calculating the diffusion coefficients...")
            diffusion = Diffusion(structures,
                                  t_step=time_step,
                                  l_lim=50,
                                  skip_first=250,
                                  block_l=1000,
                                  ci=0.95)
            D = {'msd': {}, 'vdos': {}}
            for s in structures[0].types_of_specie:
                D['msd'][s.symbol] = diffusion.getD(s.symbol)
            if vdos_diff:
                D['vdos'] = vdos_diff
            db_dict.update({'diffusion': D})
            del D

        if get_viscosity:
            logger.info("LOGGER: Calculating the viscosity...")
            viscosities = []
            if checkpoint_dirs:
                for dir in checkpoint_dirs:
                    visc = Viscosity(dir).calc_viscosity()
                    viscosities.append(visc['viscosity'])
            viscosity_dat = {
                'viscosity': np.mean(viscosities),
                'StdDev': np.std(viscosities)
            }
            db_dict.update({'viscosity': viscosity_dat})
            del viscosity_dat

        if get_run_data:
            if checkpoint_dirs:
                logger.info("LOGGER: Assimilating run stats...")
                data = MD_Data()
                for directory in checkpoint_dirs:
                    data.parse_md_data(directory)
                md_stats = data.get_md_stats()
            else:
                logger.info("LOGGER: Getting run stats...")
                data = MD_Data()
                data.parse_md_data(calc_dir)
                md_stats = data.get_md_stats()
            db_dict.update({'md_data': md_stats})

        if analysis_spec:
            logger.info("LOGGER: Adding user-specified data...")
            db_dict.update(analysis_spec)

        logger.info("LOGGER: Pushing data to database collection...")
        db_file = env_chk(">>db_file<<", fw_spec)
        db = VaspCalcDb.from_db_file(db_file, admin=True)
        db.collection = db.db["md_data"]
        db.collection.insert_one(db_dict)

        return FWAction()
Example #8
0
    def run_task(self, fw_spec):
        if fw_spec.get("prev_calc_molecule"):
            molecule = fw_spec.get("prev_calc_molecule")
        else:
            molecule = self.get("molecule")
        if molecule == None:
            raise ValueError("No molecule passed and no prev_calc_molecule found in spec! Exiting...")

        cube = self.get("cube_file")

        compress_at_end = False

        if cube[-3:] == ".gz":
            compress_at_end = True
            decompress_file(cube)
            cube = cube[:-3]

        input_script = ["molecule "+cube]
        input_script += ["load "+cube]
        input_script += ["auto"]
        input_script += ["CPREPORT CP.json"]
        input_script += ["YT JSON YT.json"]
        input_script += ["end"]
        input_script += [""]
        input_script = "\n".join(input_script)

        with open('input_script.cri', 'w') as f:
            f.write(input_script)
        args = ["critic2", "input_script.cri"]

        rs = subprocess.Popen(args,
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              close_fds=True)

        stdout, stderr = rs.communicate()
        stdout = stdout.decode()

        if stderr:
            stderr = stderr.decode()
            warnings.warn(stderr)
            with open('stdout.cri', 'w') as f:
                f.write(stdout)
            with open('stderr.cri', 'w') as f:
                f.write(stderr)

        if rs.returncode != 0:
            raise RuntimeError("critic2 exited with return code {}.".format(rs.returncode))

        CP = loadfn("CP.json")
        bohr_to_ang = 0.529177249

        species = {}
        for specie in CP["structure"]["species"]:
            if specie["name"][1] == "_":
                species[specie["id"]] = specie["name"][0]
            else:
                species[specie["id"]] = specie["name"]

        atoms = []
        centering_vector = CP["structure"]["molecule_centering_vector"]
        for ii,atom in enumerate(CP["structure"]["nonequivalent_atoms"]):
            specie = species[atom["species"]]
            atoms.append(specie)
            tmp = atom["cartesian_coordinates"]
            coords = []
            for jj,val in enumerate(tmp):
                coords.append((val+centering_vector[jj])*bohr_to_ang)
            if str(molecule[ii].specie) != specie:
                raise RuntimeError("Atom ordering different!")
            if molecule[ii].distance_from_point(coords) > 1*10**-5:
                raise RuntimeError("Atom position "+str(ii)+" inconsistent!")

        assert CP["critical_points"]["number_of_nonequivalent_cps"] == CP["critical_points"]["number_of_cell_cps"]

        bond_dict = {}
        for cp in CP["critical_points"]["nonequivalent_cps"]:
            if cp["rank"] == 3 and cp["signature"] == -1:
                bond_dict[cp["id"]] = {"field":cp["field"]}

        for cp in CP["critical_points"]["cell_cps"]:
            if cp["id"] in bond_dict:
                # Check if any bonds include fictitious atoms
                bad_bond = False
                for entry in cp["attractors"]:
                    if int(entry["cell_id"])-1 >= len(atoms):
                        bad_bond = True
                # If so, remove them from the bond_dict
                if bad_bond:
                    bond_dict.pop(cp["id"])
                else:
                    bond_dict[cp["id"]]["atom_ids"] = [entry["cell_id"] for entry in cp["attractors"]]
                    bond_dict[cp["id"]]["atoms"] = [atoms[int(entry["cell_id"])-1] for entry in cp["attractors"]]
                    bond_dict[cp["id"]]["distance"] = cp["attractors"][0]["distance"]*bohr_to_ang+cp["attractors"][1]["distance"]*bohr_to_ang
        dumpfn(bond_dict,"bonding.json")

        bonds = []
        for cpid in bond_dict:
            # identify and throw out fictitious bonds
            # NOTE: this should be re-examined and refined in the future
            if bond_dict[cpid]["field"] > 0.02 and bond_dict[cpid]["distance"] < 2.5:
                bonds.append([int(entry)-1 for entry in bond_dict[cpid]["atom_ids"]])

        YT = loadfn("YT.json")
        charges = []
        for site in YT["integration"]["attractors"]:
            charges.append(site["atomic_number"]-site["integrals"][0])

        processed_dict = {}
        processed_dict["bonds"] = bonds
        processed_dict["charges"] = charges
        dumpfn(processed_dict,"processed_critic2.json")

        if compress_at_end:
            compress_file(cube)