def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "lammps")) structure = Atoms( symbols="Fe2", positions=np.outer(np.arange(2), np.ones(3)), cell=2 * np.eye(3), ) cls.job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="lammps", ) cls.job.server.run_mode.interactive = True cls.job.structure = structure cls.minimize_job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="minimize_lammps", ) cls.minimize_control_job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="minimize_control_lammps", )
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, 'lammps')) cls.job = Lammps(project=ProjectHDFio(project=cls.project, file_name='lammps'), job_name='lammps') cls.job_water = Lammps(project=ProjectHDFio(project=cls.project, file_name='lammps_water'), job_name='lammps_water') cls.job_water_dump = Lammps(project=ProjectHDFio( project=cls.project, file_name='lammps_water_dump'), job_name='lammps_water_dump') cls.job_dump = Lammps(project=ProjectHDFio( project=cls.project, file_name='lammps_dump_static'), job_name='lammps_dump_static')
def _get_item_helper(self, item, convert_to_object=True): """ Internal helper function to get item from project Args: item (str, int): key convert_to_object (bool): convert the object to an pyiron object or only access the HDF5 file - default=True accessing only the HDF5 file is about an order of magnitude faster, but only provides limited functionality. Compare the GenericJob object to JobCore object. Returns: Project, GenericJob, JobCore, dict, list, float: basically any kind of item inside the project. """ if item == "..": return self.parent_group if item in self.list_nodes(): if self._inspect_mode or not convert_to_object: return self.inspect(item) return self.load(item) if item in self.list_files(extension="h5"): file_name = posixpath.join(self.path, "{}.h5".format(item)) return ProjectHDFio(project=self, file_name=file_name) if item in self.list_files(): file_name = posixpath.join(self.path, "{}".format(item)) with open(file_name) as f: return f.readlines() if item in self.list_dirs(): with self.open(item) as new_item: return new_item.copy() raise ValueError("Unknown item: {}".format(item))
def __init__(self, db, job_id=None, db_entry=None, user=None): if not db_entry: db_entry = db.get_item_by_id(job_id) if db_entry is None: raise ValueError("job ID {0} does not exist!".format(job_id)) job_name = db_entry["job"] h5_path = None sub_job = db_entry["subjob"] if sub_job is not None: if len(sub_job.strip()) > 0: h5_path = '/'.join(sub_job.split('/')[:-1]) hdf5_file = sub_job.split('/')[1] + '.h5' gp = GenericPath(root_path=db_entry["projectpath"], project_path=db_entry["project"]) hdf_project = ProjectHDFio(project=Project(path=gp, user=user), file_name=hdf5_file, h5_path=h5_path, mode="r") super(JobPath, self).__init__(hdf_project, job_name) self.__name__ = db_entry["hamilton"] self.__version__ = db_entry["hamversion"] if 'id' in db_entry: self._job_id = db_entry["id"] self._status = db_entry["status"] self._master_id = db_entry["masterid"] self._parent_id = db_entry["parentid"]
def create_job(self, job_type, job_name): """ Create one of the following jobs: - 'ExampleJob': example job just generating random number - 'SerialMaster': series of jobs run in serial - 'ParallelMaster': series of jobs run in parallel - 'ScriptJob': Python script or jupyter notebook job container - 'ListMaster': list of jobs Args: job_type (str): job type can be ['ExampleJob', 'SerialMaster', 'ParallelMaster', 'ScriptJob', 'ListMaster'] job_name (str): name of the job Returns: GenericJob: job object depending on the job_type selected """ job_name = job_name.replace(".", "_") job = JobType( job_type, project=ProjectHDFio(project=self.copy(), file_name=job_name), job_name=job_name, job_class_dict=self.job_type.job_class_dict, ) if self.user is not None: job.user = self.user return job
def create_job(self, job_type, job_name): """ Create one of the following jobs: - 'StructureContainer’: - ‘StructurePipeline’: - ‘AtomisticExampleJob’: example job just generating random number - ‘ExampleJob’: example job just generating random number - ‘Lammps’: - ‘KMC’: - ‘Sphinx’: - ‘Vasp’: - ‘GenericMaster’: - ‘SerialMaster’: series of jobs run in serial - ‘AtomisticSerialMaster’: - ‘ParallelMaster’: series of jobs run in parallel - ‘KmcMaster’: - ‘ThermoLambdaMaster’: - ‘RandomSeedMaster’: - ‘MeamFit’: - ‘Murnaghan’: - ‘MinimizeMurnaghan’: - ‘ElasticMatrix’: - ‘ConvergenceVolume’: - ‘ConvergenceEncutParallel’: - ‘ConvergenceKpointParallel’: - ’PhonopyMaster’: - ‘DefectFormationEnergy’: - ‘LammpsASE’: - ‘PipelineMaster’: - ’TransformationPath’: - ‘ThermoIntEamQh’: - ‘ThermoIntDftEam’: - ‘ScriptJob’: Python script or jupyter notebook job container - ‘ListMaster': list of jobs Args: job_type (str): job type can be ['StructureContainer’, ‘StructurePipeline’, ‘AtomisticExampleJob’, ‘ExampleJob’, ‘Lammps’, ‘KMC’, ‘Sphinx’, ‘Vasp’, ‘GenericMaster’, ‘SerialMaster’, ‘AtomisticSerialMaster’, ‘ParallelMaster’, ‘KmcMaster’, ‘ThermoLambdaMaster’, ‘RandomSeedMaster’, ‘MeamFit’, ‘Murnaghan’, ‘MinimizeMurnaghan’, ‘ElasticMatrix’, ‘ConvergenceVolume’, ‘ConvergenceEncutParallel’, ‘ConvergenceKpointParallel’, ’PhonopyMaster’, ‘DefectFormationEnergy’, ‘LammpsASE’, ‘PipelineMaster’, ’TransformationPath’, ‘ThermoIntEamQh’, ‘ThermoIntDftEam’, ‘ScriptJob’, ‘ListMaster'] job_name (str): name of the job Returns: GenericJob: job object depending on the job_type selected """ job = JobType( job_type, project=ProjectHDFio(project=self.copy(), file_name=job_name), job_name=job_name, job_class_dict=self.job_type.job_class_dict, ) if self.user is not None: job.user = self.user return job
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "test_quickff")) cls.job = cls.project.create_job("QuickFF", "trial") cls.job_complete = QuickFF( project=ProjectHDFio(project=cls.project, file_name="quickff_complete"), job_name="quickff_complete", )
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "lammps")) cls.job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="lammps", ) cls.job_water = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_water"), job_name="lammps_water", ) cls.job_water_dump = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_water_dump"), job_name="lammps_water_dump", ) cls.job_dump = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_dump_static"), job_name="lammps_dump_static", ) cls.job_vcsgc_input = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_vcsgc_input"), job_name="lammps_vcsgc_input", ) cls.minimize_job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="minimize_lammps", ) cls.minimize_control_job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="minimize_control_lammps", )
def test_hdf(self): pr = Project(self.file_location) file_name = os.path.join(self.file_location, 'genericpara.h5') hdf = ProjectHDFio(project=pr, file_name=file_name, h5_path="/test", mode="a") hdf.create_group('test') self.generic_parameters_str.to_hdf(hdf=hdf, group_name='input') gp_reload = GenericParameters(table_name='str') gp_reload.from_hdf(hdf=hdf, group_name='input') self.assertEqual(gp_reload.get("par___1"), 1) self.assertEqual(gp_reload.get("par_2"), 'all') self.assertEqual(gp_reload.get("count"), 0) self.assertTrue(gp_reload.get("write_restart")) self.assertFalse(gp_reload.get("read_restart")) self.assertEqual(gp_reload.get("dict"), {"a": 1, "b": 2}) self.assertEqual(gp_reload.get("list"), [1, "s"]) os.remove(file_name)
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "lammps")) cls.job = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps"), job_name="lammps", ) cls.job_water = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_water"), job_name="lammps_water", ) cls.job_water_dump = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_water_dump"), job_name="lammps_water_dump", ) cls.job_dump = Lammps( project=ProjectHDFio(project=cls.project, file_name="lammps_dump_static"), job_name="lammps_dump_static", )
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "test_job")) cls.job = AtomisticGenericJob( project=ProjectHDFio(project=cls.project, file_name="test_job"), job_name="test_job", ) cls.job.structure = CrystalStructure( element="Al", bravais_basis="fcc", lattice_constants=4 ).repeat(4)
def setUp(self): if self.project.load('trial') is not None: self.project.remove_job('trial') self.job = self.project.create_job("QuickFF", 'trial') if self.project.load('quickff_complete') is not None: self.project.remove_job("quickff_complete") self.job_complete = QuickFF( project=ProjectHDFio(project=self.project, file_name="quickff_complete"), job_name="quickff_complete", )
def create_hdf(path, job_name): """ Create an ProjectHDFio object to store project related information - for example aggregated data Args: path (str): absolute path job_name (str): name of the HDF5 container Returns: ProjectHDFio: HDF5 object """ return ProjectHDFio(project=Project(path), file_name=job_name, h5_path='/' + job_name)
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, "test_vasp")) cls.job = cls.project.create_job("Vasp", "trial") cls.job_complete = Vasp( project=ProjectHDFio(project=cls.project, file_name="vasp_complete"), job_name="vasp_complete", ) poscar_file = posixpath.join( cls.execution_path, "../static/vasp_test_files/full_job_sample/POSCAR" ) cls.job_complete.structure = read_atoms(poscar_file, species_from_potcar=True)
def setUp(self): if self.project.load('trial') is not None: self.project.remove_job("trial") self.job = self.project.create_job("Yaff", "trial") if self.project.load('yaff_complete') is not None: self.project.remove_job("yaff_complete") self.job_complete = Yaff( project=ProjectHDFio(project=self.project, file_name="yaff_complete"), job_name="yaff_complete", )
def __init__(self, job_path): job_path_lst = job_path.replace('\\', '/').split('.h5') if len(job_path_lst) != 2: raise ValueError sub_job = job_path_lst[1] h5_path = None if sub_job is not None: if len(sub_job.strip()) > 0: h5_path = '/'.join(sub_job.split('/')[:-1]) hdf_project = ProjectHDFio( project=Project(os.path.dirname(job_path_lst[0])), file_name=job_path_lst[0].split('/')[-1] + '.h5', h5_path=h5_path, mode="r") super(JobPathBase, self).__init__(project=hdf_project, job_name=job_path_lst[1].split('/')[-1])
def setUpClass(cls): cls.execution_path = os.path.dirname(os.path.abspath(__file__)) cls.project = Project(os.path.join(cls.execution_path, 'lammps')) cls.job = Lammps(project=ProjectHDFio(project=cls.project, file_name='lammps'), job_name='lammps') cls.job.server.run_mode.interactive = True cls.job.structure = Atoms(symbols='Fe2', positions=np.outer(np.arange(2), np.ones(3)), cell=2*np.eye(3))