예제 #1
0
    def test_references(self):
        # junk reference should not work
        self.assertRaises(ValueError,
                          StructureNL,
                          self.s,
                          self.hulk,
                          references=self.junk)

        # good references should be ok
        StructureNL(self.s, self.hulk, references=self.pmg)

        # unicode references should work
        StructureNL(self.s, self.hulk, references=self.unicode_title)

        # multi-line references should be OK
        StructureNL(self.s,
                    self.hulk,
                    references='\n'.join([self.matproj, self.pmg]))

        # super long references are bad
        self.assertRaises(ValueError,
                          StructureNL,
                          self.s,
                          self.hulk,
                          references=self.superlong)
예제 #2
0
    def test_historynodes(self):
        a = StructureNL(self.s, self.hulk, history=[self.valid_node])
        self.assertEqual(a.history[0].name, "DB 1")
        self.assertEqual(a.history[0].url, "www.db1URLgoeshere.com")
        self.assertEqual(a.history[0].description, {"db1_id": 12424})

        a = StructureNL(self.s,
                        self.hulk,
                        history=[self.valid_node, self.valid_node2])
        self.assertEqual(a.history[1].name, "DB 2")
        self.assertEqual(a.history[1].url, "www.db2URLgoeshere.com")
        self.assertEqual(a.history[1].description, {"db2_id": 12424})

        # invalid nodes should not work
        self.assertRaises(Exception,
                          StructureNL,
                          self.s,
                          self.hulk,
                          history=[self.invalid_node])

        # too many nodes should not work
        self.assertRaises(ValueError,
                          StructureNL,
                          self.s,
                          self.hulk,
                          history=[self.valid_node] * 1000)
예제 #3
0
 def run_task(self, fw_spec):
     # Read structure from previous relaxation
     relaxed_struct = fw_spec['output']['crystal']
     # Generate deformed structures
     d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06)
     wf=[]
     for i, d_struct in enumerate(d_struct_set.def_structs):
         fws=[]
         connections={}
         f = Composition(d_struct.formula).alphabetical_formula
         snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', 
                           projects=["Elasticity"])
         tasks = [AddSNLTask()]
         snl_priority = fw_spec.get('priority', 1)
         spec = {'task_type': 'Add Deformed Struct to SNL database', 
                 'snl': snl.as_dict(), 
                 '_queueadapter': QA_DB, 
                 '_priority': snl_priority}
         if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL):
             spec['force_mpsnl'] = snl.as_dict()
             spec['force_snlgroup_id'] = fw_spec['snlgroup_id']
             del spec['snl']
         fws.append(Firework(tasks, spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-1000+i*10))
         connections[-1000+i*10] = [-999+i*10]
         spec = snl_to_wf._snl_to_spec(snl, 
                                       parameters={'exact_structure':True})
         spec = update_spec_force_convergence(spec)
         spec['deformation_matrix'] = d_struct_set.deformations[i].tolist()
         spec['original_task_id'] = fw_spec["task_id"]
         spec['_priority'] = fw_spec['_priority']*2
         #Turn off dupefinder for deformed structure
         del spec['_dupefinder']
         spec['task_type'] = "Optimize deformed structure"
         fws.append(Firework([VaspWriterTask(), SetupElastConstTask(),
                              get_custodian_task(spec)], 
                             spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-999+i*10))
         
         priority = fw_spec['_priority']*3
         spec = {'task_type': 'VASP db insertion', 
                 '_priority': priority,
                 '_allow_fizzled_parents': True, 
                 '_queueadapter': QA_DB, 
                 'elastic_constant':"deformed_structure", 
                 'clean_task_doc':True,
                 'deformation_matrix':d_struct_set.deformations[i].tolist(), 
                 'original_task_id':fw_spec["task_id"]}
         fws.append(Firework([VaspToDBTask()], 
                             spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-998+i*10))
         connections[-999+i*10] = [-998+i*10]
         wf.append(Workflow(fws, connections))
     return FWAction(additions=wf)
예제 #4
0
 def run_task(self, fw_spec):
     # Read structure from previous relaxation
     relaxed_struct = fw_spec['output']['crystal']
     # Generate deformed structures
     d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06)
     wf=[]
     for i, d_struct in enumerate(d_struct_set.def_structs):
         fws=[]
         connections={}
         f = Composition(d_struct.formula).alphabetical_formula
         snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', 
                           projects=["Elasticity"])
         tasks = [AddSNLTask()]
         snl_priority = fw_spec.get('priority', 1)
         spec = {'task_type': 'Add Deformed Struct to SNL database', 
                 'snl': snl.as_dict(), 
                 '_queueadapter': QA_DB, 
                 '_priority': snl_priority}
         if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL):
             spec['force_mpsnl'] = snl.as_dict()
             spec['force_snlgroup_id'] = fw_spec['snlgroup_id']
             del spec['snl']
         fws.append(Firework(tasks, spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-1000+i*10))
         connections[-1000+i*10] = [-999+i*10]
         spec = snl_to_wf._snl_to_spec(snl, 
                                       parameters={'exact_structure':True})
         spec = update_spec_force_convergence(spec)
         spec['deformation_matrix'] = d_struct_set.deformations[i].tolist()
         spec['original_task_id'] = fw_spec["task_id"]
         spec['_priority'] = fw_spec['_priority']*2
         #Turn off dupefinder for deformed structure
         del spec['_dupefinder']
         spec['task_type'] = "Optimize deformed structure"
         fws.append(Firework([VaspWriterTask(), SetupElastConstTask(),
                              get_custodian_task(spec)], 
                             spec, 
                             name=get_slug(f + '--' + spec['task_type']), 
                             fw_id=-999+i*10))
         
         priority = fw_spec['_priority']*3
         spec = {'task_type': 'VASP db insertion', 
                 '_priority': priority,
                 '_allow_fizzled_parents': True, 
                 '_queueadapter': QA_DB, 
                 'elastic_constant':"deformed_structure", 
                 'clean_task_doc':True,
                 'deformation_matrix':d_struct_set.deformations[i].tolist(), 
                 'original_task_id':fw_spec["task_id"]}
         fws.append(Firework([VaspToDBTask(), AddElasticDataToDBTask()], spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-998+i*10))
         connections[-999+i*10] = [-998+i*10]
         wf.append(Workflow(fws, connections))
     return FWAction(additions=wf)
def structure_to_mock_job(structure):
    # Needs at least one author. This is for a mock job, so can put whatever.
    snl = StructureNL(structure, [{"name": "Saurabh Bajaj", "email": "*****@*****.**"},
                                  {"name": "Anubhav Jain", "email": "*****@*****.**"}])
    job = snl.as_dict()
    if 'is_valid' not in job:
        job.update(get_meta_from_structure(snl.structure))
    sorted_structure = snl.structure.get_sorted_structure()
    job.update(sorted_structure.as_dict())
    return job
예제 #6
0
    def test_to_from_dict(self):
        # no complicated objects in the 'data' or 'nodes' field
        a = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": "string"},
                        [self.valid_node, self.valid_node2])
        b = StructureNL.from_dict(a.as_dict())
        self.assertEqual(a, b)
        # complicated objects in the 'data' and 'nodes' field
        complicated_node = {
            "name": "complicated node",
            "url": "www.complicatednodegoeshere.com",
            "description": {
                "structure": self.s2
            }
        }
        a = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": {
                            "structure": self.s2
                        }}, [complicated_node, self.valid_node])
        b = StructureNL.from_dict(a.as_dict())
        self.assertEqual(
            a, b, 'to/from dict is broken when object embedding is '
            'used! Apparently MontyEncoding is broken...')

        #Test molecule
        molnl = StructureNL(self.mol, self.hulk, references=self.pmg)
        b = StructureNL.from_dict(molnl.as_dict())
        self.assertEqual(molnl, b)
예제 #7
0
 def test_data(self):
     # Structure data is OK due to PMGEncoder/Decoder
     a = StructureNL(self.s, self.hulk, data={"_structure": self.s2})
     self.assertEqual(a.data["_structure"], self.s2,
                      'Data storage is broken')
     self.assertRaises(ValueError, StructureNL, self.s, self.hulk,
                       data={"bad_key": 1})
예제 #8
0
 def to_snl(self,
            authors,
            projects=None,
            references='',
            remarks=None,
            data=None,
            created_at=None):
     if self.other_parameters:
         warn('Data in TransformedStructure.other_parameters discarded '
              'during type conversion to SNL')
     hist = []
     for h in self.history:
         snl_metadata = h.pop('_snl', {})
         hist.append({
             'name':
             snl_metadata.pop('name', 'pymatgen'),
             'url':
             snl_metadata.pop('url',
                              'http://pypi.python.org/pypi/pymatgen'),
             'description':
             h
         })
     from pymatgen.matproj.snl import StructureNL
     return StructureNL(self.final_structure, authors, projects, references,
                        remarks, data, hist, created_at)
예제 #9
0
파일: snl_tasks.py 프로젝트: matk86/MPWorks
    def run_task(self, fw_spec):
        sma = SNLMongoAdapter.auto_load()
        snl = StructureNL.from_dict(fw_spec['snl'])
        mpsnl, snlgroup_id, spec_group = sma.add_snl(snl)
        mod_spec = [{"_push": {"run_tags": "species_group={}".format(spec_group)}}] if spec_group else None

        return FWAction(update_spec={'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id}, mod_spec=mod_spec)
예제 #10
0
 def test_remarks(self):
     a = StructureNL(self.s, self.hulk, remarks="string format")
     self.assertEqual(a.remarks[0], "string format")
     self.assertRaises(ValueError,
                       StructureNL,
                       self.s,
                       self.hulk,
                       remarks=self.remark_fail)
예제 #11
0
 def get_snls(self, species):
     o = []
     for e in self.structures.find({
             'nspecies': len(species),
             'species': {
                 '$all': species
             }
     }):
         o.append(StructureNL.from_dict(e['snl']))
     return o
예제 #12
0
    def test_authors(self):
        a = StructureNL(self.s, self.hulk, references=self.pmg)
        self.assertEqual(a.authors[0].name, "Hulk")
        self.assertEqual(a.authors[0].email, "*****@*****.**")

        a = StructureNL(self.s, self.america, references=self.pmg)
        self.assertEqual(a.authors[0].name, "Captain America")
        self.assertEqual(a.authors[0].email, "*****@*****.**")

        a = StructureNL(self.s, self.thor, references=self.pmg)
        self.assertEqual(a.authors[0].name, "Thor")
        self.assertEqual(a.authors[0].email, "*****@*****.**")

        a = StructureNL(self.s, self.duo, references=self.pmg)
        self.assertEqual(a.authors[0].name, "Iron Man")
        self.assertEqual(a.authors[0].email, "*****@*****.**")
        self.assertEqual(a.authors[1].name, "Black Widow")
        self.assertEqual(a.authors[1].email, "*****@*****.**")
        StructureNL(self.s, self.hulk, references=self.pmg)
예제 #13
0
    def from_snl(snl, snl_id, pointgroup):
        # make a copy of SNL
        snl2 = StructureNL.from_dict(snl.as_dict())
        if '_electrolytegenome' not in snl2.data:
            snl2.data['_electrolytegenome'] = {}

        snl2.data['_electrolytegenome']['snl_id'] = snl_id
        snl2.data['_electrolytegenome']['pointgroup'] = pointgroup

        return EGStructureNL.from_dict(snl2.as_dict())
예제 #14
0
    def submit_new_workflow(self):
        # finds a submitted job, creates a workflow, and submits it to FireWorks
        job = self.jobs.find_and_modify({'state': 'SUBMITTED'},
                                        {'$set': {
                                            'state': 'WAITING'
                                        }})
        if job:
            submission_id = job['submission_id']
            try:
                if 'snl_id' in job:
                    snl = MPStructureNL.from_dict(job)
                else:
                    snl = StructureNL.from_dict(job)
                if len(snl.structure.sites) > SubmissionProcessor.MAX_SITES:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'too many sites', {})
                    print 'REJECTED WORKFLOW FOR {} - too many sites ({})'.format(
                        snl.structure.formula, len(snl.structure.sites))
                elif not job['is_valid']:
                    self.sma.update_state(
                        submission_id, 'REJECTED',
                        'invalid structure (atoms too close)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid structure'.format(
                        snl.structure.formula)
                elif len(set(NO_POTCARS) & set(job['elements'])) > 0:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'invalid structure (no POTCAR)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid element (No POTCAR)'.format(
                        snl.structure.formula)
                elif not job['is_ordered']:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'invalid structure (disordered)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid structure'.format(
                        snl.structure.formula)
                else:
                    snl.data['_materialsproject'] = snl.data.get(
                        '_materialsproject', {})
                    snl.data['_materialsproject'][
                        'submission_id'] = submission_id

                    # create a workflow
                    if "Elasticity" in snl.projects:
                        wf = snl_to_wf_elastic(snl, job['parameters'])
                    else:
                        wf = snl_to_wf(snl, job['parameters'])
                    self.launchpad.add_wf(wf)
                    print 'ADDED WORKFLOW FOR {}'.format(snl.structure.formula)
            except:
                self.jobs.find_and_modify({'submission_id': submission_id},
                                          {'$set': {
                                              'state': 'ERROR'
                                          }})
                traceback.print_exc()

            return submission_id
예제 #15
0
    def submit_structures(self,
                          structures,
                          authors,
                          projects=None,
                          references='',
                          remarks=None,
                          data=None,
                          histories=None,
                          created_at=None):
        """
        Submits a list of structures to the Materials Project as SNL files.
        The argument list mirrors the arguments for the StructureNL object,
        except that a list of structures with the same metadata is used as an
        input.

        .. note::

            As of now, this MP REST feature is open only to a select group of
            users. Opening up submissions to all users is being planned for
            the future.

        Args:
            structures:
                A list of Structure objects
            authors:
                *List* of {"name":'', "email":''} dicts,
                *list* of Strings as 'John Doe <*****@*****.**>',
                or a single String with commas separating authors
            projects:
                List of Strings ['Project A', 'Project B']. This applies to
                all structures.
            references:
                A String in BibTeX format. Again, this applies to all
                structures.
            remarks:
                List of Strings ['Remark A', 'Remark B']
            data:
                A list of free form dict. Namespaced at the root level with an
                underscore, e.g. {"_materialsproject":<custom data>}. The
                length of data should be the same as the list of structures
                if not None.
            histories:
                List of list of dicts - [[{'name':'', 'url':'',
                'description':{}}], ...] The length of histories should be the
                same as the list of structures if not None.
            created_at:
                A datetime object

        Returns:
            A list of inserted submission ids.
        """
        snl_list = StructureNL.from_structures(structures, authors, projects,
                                               references, remarks, data,
                                               histories, created_at)
        self.submit_snl(snl_list)
예제 #16
0
파일: snl_tasks.py 프로젝트: cmgtam/MPWorks
    def run_task(self, fw_spec):
        # get the SNL mongo adapter
        sma = SNLMongoAdapter.auto_load()

        # get the SNL
        snl = StructureNL.from_dict(fw_spec['snl'])

        # add snl
        mpsnl, snlgroup_id = sma.add_snl(snl)

        return FWAction(update_spec={'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id})
예제 #17
0
    def run_task(self, fw_spec):
        # pass-through option for when we start with an mpsnl and don't actually want to add
        if 'force_mpsnl' in fw_spec and 'force_snlgroup_id' in fw_spec:
            print 'USING FORCED MPSNL'
            return FWAction(update_spec={'mpsnl': fw_spec['force_mpsnl'], 'snlgroup_id': fw_spec['force_snlgroup_id']})

        sma = SNLMongoAdapter.auto_load()
        snl = StructureNL.from_dict(fw_spec['snl'])
        mpsnl, snlgroup_id = sma.add_snl(snl)

        return FWAction(update_spec={'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id})
예제 #18
0
    def test_to_from_dict(self):
        # no complicated objects in the 'data' or 'nodes' field
        a = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": "string"},
                        [self.valid_node, self.valid_node2])
        b = StructureNL.from_dict(a.to_dict)
        self.assertEqual(a, b)
        # complicated objects in the 'data' and 'nodes' field
        complicated_node = {
            "name": "complicated node",
            "url": "www.complicatednodegoeshere.com",
            "description": {
                "structure": self.s2
            }
        }
        a = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": {
                            "structure": self.s2
                        }}, [complicated_node, self.valid_node])
        b = StructureNL.from_dict(a.to_dict)
        self.assertEqual(
            a, b, 'to/from dict is broken when object embedding is '
            'used! Apparently PMGJSONEncoding is broken...')

        #Test molecule
        molnl = StructureNL(self.mol, self.hulk, references=self.pmg)
        b = StructureNL.from_dict(molnl.to_dict)
        self.assertEqual(molnl, b)
예제 #19
0
파일: mpsnl.py 프로젝트: cmgtam/MPWorks
    def from_snl(snl, snl_id, sg_num, sg_symbol, hall, xtal_system, lattice_type):
        # make a copy of SNL
        snl2 = StructureNL.from_dict(snl.to_dict)
        if '_materialsproject' not in snl2.data:
            snl2.data['_materialsproject'] = {}
        snl2.data['_materialsproject']['snl_id'] = snl_id
        snl2.data['_materialsproject']['sg_num'] = sg_num
        snl2.data['_materialsproject']['sg_symbol'] = sg_symbol
        snl2.data['_materialsproject']['hall'] = hall
        snl2.data['_materialsproject']['xtal_system'] = xtal_system
        snl2.data['_materialsproject']['lattice_type'] = lattice_type

        return MPStructureNL.from_dict(snl2.to_dict)
예제 #20
0
    def test_eq(self):
        # test basic Equal()
        created_at = datetime.datetime.now()
        a = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": self.s2},
                        [self.valid_node, self.valid_node2], created_at)
        b = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": self.s2},
                        [self.valid_node, self.valid_node2], created_at)
        self.assertEqual(a, b, "__eq__() method is broken! false negative")

        # change the created at date, now they are no longer equal
        c = StructureNL(self.s, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": self.s2},
                        [self.valid_node, self.valid_node2])
        self.assertNotEqual(a, c, "__eq__() method is broken! false positive")

        # or try a different structure, those should not be equal
        d = StructureNL(self.s2, self.hulk, ['test_project'], self.pmg,
                        ['remark1'], {"_my_data": self.s2},
                        [self.valid_node, self.valid_node2], created_at)
        self.assertNotEqual(a, d, "__eq__() method is broken! false positive")
예제 #21
0
    def test_from_structures(self):
        s1 = Structure([[5, 0, 0], [0, 5, 0], [0, 0, 5]], ["Fe"], [[0, 0, 0]])
        s2 = Structure([[5, 0, 0], [0, 5, 0], [0, 0, 5]], ["Mn"], [[0, 0, 0]])
        remarks = ["unittest"]
        authors="Test User <*****@*****.**>"
        snl_list = StructureNL.from_structures([s1, s2], authors, remarks=remarks)

        self.assertEqual(len(snl_list), 2)
        snl1 = snl_list[0]
        snl2 = snl_list[1]
        self.assertEqual(snl1.remarks, remarks)
        self.assertEqual(snl2.remarks, remarks)
        self.assertEqual(snl1.authors, [Author.parse_author(authors)])
        self.assertEqual(snl2.authors, [Author.parse_author(authors)])
예제 #22
0
파일: rest.py 프로젝트: isayev/pymatgen
    def submit_structures(self, structures, authors, projects=None,
                             references='', remarks=None, data=None,
                             histories=None, created_at=None):
        """
        Submits a list of structures to the Materials Project as SNL files.
        The argument list mirrors the arguments for the StructureNL object,
        except that a list of structures with the same metadata is used as an
        input.

        .. note::

            As of now, this MP REST feature is open only to a select group of
            users. Opening up submissions to all users is being planned for
            the future.

        Args:
            structures:
                A list of Structure objects
            authors:
                *List* of {"name":'', "email":''} dicts,
                *list* of Strings as 'John Doe <*****@*****.**>',
                or a single String with commas separating authors
            projects:
                List of Strings ['Project A', 'Project B']. This applies to
                all structures.
            references:
                A String in BibTeX format. Again, this applies to all
                structures.
            remarks:
                List of Strings ['Remark A', 'Remark B']
            data:
                A list of free form dict. Namespaced at the root level with an
                underscore, e.g. {"_materialsproject":<custom data>}. The
                length of data should be the same as the list of structures
                if not None.
            histories:
                List of list of dicts - [[{'name':'', 'url':'',
                'description':{}}], ...] The length of histories should be the
                same as the list of structures if not None.
            created_at:
                A datetime object

        Returns:
            A list of inserted submission ids.
        """
        snl_list = StructureNL.from_structures(structures,
                    authors, projects, references, remarks, data,
                    histories, created_at)

        self.submit_snl(snl_list)
예제 #23
0
    def test_from_structures(self):
        s1 = Structure([[5, 0, 0], [0, 5, 0], [0, 0, 5]], ["Fe"], [[0, 0, 0]])
        s2 = Structure([[5, 0, 0], [0, 5, 0], [0, 0, 5]], ["Mn"], [[0, 0, 0]])
        remarks = ["unittest"]
        authors="Test User <*****@*****.**>"
        snl_list = StructureNL.from_structures([s1, s2], authors, remarks=remarks)

        self.assertEqual(len(snl_list), 2)
        snl1 = snl_list[0]
        snl2 = snl_list[1]
        self.assertEqual(snl1.remarks, remarks)
        self.assertEqual(snl2.remarks, remarks)
        self.assertEqual(snl1.authors, [Author.parse_author(authors)])
        self.assertEqual(snl2.authors, [Author.parse_author(authors)])
예제 #24
0
파일: snl_tasks.py 프로젝트: matk86/MPWorks
    def run_task(self, fw_spec):
        sma = SNLMongoAdapter.auto_load()
        snl = StructureNL.from_dict(fw_spec['snl'])
        mpsnl, snlgroup_id, spec_group = sma.add_snl(snl)
        mod_spec = [{
            "_push": {
                "run_tags": "species_group={}".format(spec_group)
            }
        }] if spec_group else None

        return FWAction(update_spec={
            'mpsnl': mpsnl.to_dict,
            'snlgroup_id': snlgroup_id
        },
                        mod_spec=mod_spec)
예제 #25
0
    def submit_new_workflow(self):
        # finds a submitted job, creates a workflow, and submits it to FireWorks
        job = self.jobs.find_and_modify({'state': 'SUBMITTED'}, {'$set': {'state': 'WAITING'}})
        if job:
            submission_id = job['submission_id']
            try:
                if 'snl_id' in job:
                    snl = MPStructureNL.from_dict(job)
                else:
                    snl = StructureNL.from_dict(job)
                if len(snl.structure.sites) > SubmissionProcessor.MAX_SITES:
                    self.sma.update_state(submission_id, 'REJECTED', 'too many sites', {})
                    print 'REJECTED WORKFLOW FOR {} - too many sites ({})'.format(
                        snl.structure.formula, len(snl.structure.sites))
                elif not job['is_valid']:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'invalid structure (atoms too close)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid structure'.format(
                        snl.structure.formula)
                elif len(set(NO_POTCARS) & set(job['elements'])) > 0:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'invalid structure (no POTCAR)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid element (No POTCAR)'.format(
                        snl.structure.formula)
                elif not job['is_ordered']:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'invalid structure (disordered)', {})
                    print 'REJECTED WORKFLOW FOR {} - invalid structure'.format(
                        snl.structure.formula)
                else:
                    snl.data['_materialsproject'] = snl.data.get('_materialsproject', {})
                    snl.data['_materialsproject']['submission_id'] = submission_id

                    # create a workflow
                    if "Elasticity" in snl.projects:
                        from mpworks.workflows.snl_to_wf_phonon import snl_to_wf_phonon
                        wf=snl_to_wf_phonon(snl, job['parameters'])
                    else:
                        wf = snl_to_wf(snl, job['parameters'])
                    self.launchpad.add_wf(wf)
                    print 'ADDED WORKFLOW FOR {}'.format(snl.structure.formula)
            except:
                self.jobs.find_and_modify({'submission_id': submission_id},
                                          {'$set': {'state': 'ERROR'}})
                traceback.print_exc()

            return submission_id
예제 #26
0
 def test_snl(self):
     self.trans.set_parameter('author', 'will')
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         snl = self.trans.to_snl([('will', '*****@*****.**')])
         self.assertEqual(len(w), 1, 'Warning not raised on type conversion '
                          'with other_parameters')
     ts = TransformedStructure.from_snl(snl)
     self.assertEqual(ts.history[-1]['@class'], 'SubstitutionTransformation')
     
     h = ('testname', 'testURL', {'test' : 'testing'})
     snl = StructureNL(ts.final_structure,[('will', '*****@*****.**')], 
                       history = [h])
     snl = TransformedStructure.from_snl(snl).to_snl([('notwill', 
                                                       '*****@*****.**')])
     self.assertEqual(snl.history, [h])
     self.assertEqual(snl.authors, [('notwill', '*****@*****.**')])
예제 #27
0
파일: mpsnl.py 프로젝트: matk86/MPWorks
    def from_snl(snl, snl_id, sg_num, sg_symbol, hall, xtal_system, lattice_type, pointgroup):
        # make a copy of SNL
        snl2 = StructureNL.from_dict(snl.to_dict)
        if '_materialsproject' not in snl2.data:
            snl2.data['_materialsproject'] = {}

        snl2.data['_materialsproject']['snl_id'] = snl_id
        snl2.data['_materialsproject']['spacegroup'] = {}
        sg = snl2.data['_materialsproject']['spacegroup']
        sg['symbol'] = sg_symbol
        sg['number'] = sg_num
        sg['point_group'] = pointgroup
        sg['crystal_system'] = xtal_system
        sg['hall'] = hall
        sg['lattice_type'] = lattice_type

        return MPStructureNL.from_dict(snl2.to_dict)
예제 #28
0
파일: mpsnl.py 프로젝트: ctoher/MPWorks
    def from_snl(snl, snl_id, sg_num, sg_symbol, hall, xtal_system, lattice_type, pointgroup):
        # make a copy of SNL
        snl2 = StructureNL.from_dict(snl.as_dict())
        if "_materialsproject" not in snl2.data:
            snl2.data["_materialsproject"] = {}

        snl2.data["_materialsproject"]["snl_id"] = snl_id
        snl2.data["_materialsproject"]["spacegroup"] = {}
        sg = snl2.data["_materialsproject"]["spacegroup"]
        sg["symbol"] = sg_symbol
        sg["number"] = sg_num
        sg["point_group"] = pointgroup
        sg["crystal_system"] = xtal_system
        sg["hall"] = hall
        sg["lattice_type"] = lattice_type

        return MPStructureNL.from_dict(snl2.as_dict())
예제 #29
0
파일: mpsnl.py 프로젝트: mrunal3092/MPWorks
    def from_snl(snl, snl_id, sg_num, sg_symbol, hall, xtal_system, lattice_type, pointgroup):
        # make a copy of SNL
        snl2 = StructureNL.from_dict(snl.as_dict())
        if '_materialsproject' not in snl2.data:
            snl2.data['_materialsproject'] = {}

        snl2.data['_materialsproject']['snl_id'] = snl_id
        snl2.data['_materialsproject']['spacegroup'] = {}
        sg = snl2.data['_materialsproject']['spacegroup']
        sg['symbol'] = sg_symbol
        sg['number'] = sg_num
        sg['point_group'] = pointgroup
        sg['crystal_system'] = xtal_system
        sg['hall'] = hall
        sg['lattice_type'] = lattice_type

        return MPStructureNL.from_dict(snl2.as_dict())
예제 #30
0
    def submit_new_workflow(self):
        # finds a submitted job, creates a workflow, and submits it to FireWorks
        job = self.jobs.find_and_modify({'state': 'SUBMITTED'},
                                        {'$set': {
                                            'state': 'WAITING'
                                        }})
        if job:
            submission_id = job['submission_id']
            # noinspection PyBroadException
            try:
                if 'snl_id' in job:
                    snl = EGStructureNL.from_dict(job)
                else:
                    snl = StructureNL.from_dict(job)
                if len(snl.structure.sites) > SubmissionProcessorEG.MAX_SITES:
                    self.sma.update_state(submission_id, 'REJECTED',
                                          'too many sites', {})
                    print('REJECTED WORKFLOW FOR {} - too many sites ' \
                          '({})'.format(snl.structure.formula,
                                        len(snl.structure.sites)))
                elif not job['is_valid']:
                    self.sma.update_state(
                        submission_id, 'REJECTED',
                        'invalid structure (atoms too close)', {})
                    print('REJECTED WORKFLOW FOR {} - invalid ' \
                          'structure'.format(snl.structure.formula))
                else:
                    snl.data['_electrolytegenome'] = \
                        snl.data.get('_electrolytegenome', {})
                    snl.data['_electrolytegenome']['submission_id'] \
                        = submission_id

                    # create a workflow
                    wf = snl_to_eg_wf(snl, job['parameters'])
                    self.launchpad.add_wf(wf)
                    print('ADDED WORKFLOW FOR {}'.format(
                        snl.structure.formula))
            except:
                self.jobs.find_and_modify({'submission_id': submission_id},
                                          {'$set': {
                                              'state': 'ERROR'
                                          }})
                traceback.print_exc()

            return submission_id
예제 #31
0
    def run_task(self, fw_spec):
        # pass-through option for when we start with an mpsnl and don't actually want to add
        if 'force_mpsnl' in fw_spec and 'force_snlgroup_id' in fw_spec:
            print 'USING FORCED MPSNL'
            return FWAction(
                update_spec={
                    'mpsnl': fw_spec['force_mpsnl'],
                    'snlgroup_id': fw_spec['force_snlgroup_id']
                })

        sma = SNLMongoAdapter.auto_load()
        snl = StructureNL.from_dict(fw_spec['snl'])
        mpsnl, snlgroup_id = sma.add_snl(snl)

        return FWAction(update_spec={
            'mpsnl': mpsnl.to_dict,
            'snlgroup_id': snlgroup_id
        })
예제 #32
0
    def submit_new_workflow(self):
        # finds a submitted job, creates a workflow, and submits it to FireWorks
        job = self.jobs.find_and_modify({'state': 'submitted'}, {'$set': {'state': 'waiting'}})
        if job:
            submission_id = job['submission_id']
            try:
                snl = StructureNL.from_dict(job)
                snl.data['_materialsproject'] = snl.data.get('_materialsproject', {})
                snl.data['_materialsproject']['submission_id'] = submission_id

                # create a workflow
                wf = snl_to_wf(snl)
                self.launchpad.add_wf(wf)
                print 'ADDED WORKFLOW FOR {}'.format(snl.structure.formula)
            except:
                self.jobs.find_and_modify({'submission_id': submission_id}, {'$set': {'state': 'error'}})
                traceback.print_exc()

            return submission_id
예제 #33
0
    def run_task(self, fw_spec):

        sma = EGSNLMongoAdapter.auto_load()
        if isinstance(fw_spec['snl'], dict):
            snl = StructureNL.from_dict(fw_spec['snl'])
        else:
            snl = fw_spec['snl']
        egsnl, snlgroup_id = sma.add_snl(snl)

        mol = egsnl.structure
        bb = BabelMolAdaptor(mol)
        pbmol = bb.pybel_mol
        inchi_root = pbmol.write(str("inchi")).strip()

        return FWAction(
            update_spec={
                'egsnl': egsnl.as_dict(),
                'snlgroup_id': snlgroup_id,
                'inchi_root': inchi_root
            })
def job_is_submittable(job):
    snl = StructureNL.from_dict(job)
    # mpworks.processors.process_submissions.SubmissionProcessor#submit_new_workflow
    max_sites = 200  # SubmissionProcessor.MAX_SITES above
    # from mpworks.workflows.wf_utils import NO_POTCARS
    no_potcars = ['Po', 'At', 'Rn', 'Fr', 'Ra', 'Am', 'Cm', 'Bk', 'Cf', 'Es', 'Fm', 'Md', 'No', 'Lr']
    if len(snl.structure.sites) > max_sites:
        print 'REJECTED WORKFLOW FOR {} - too many sites ({})'.format(
            snl.structure.formula, len(snl.structure.sites))
    elif not job['is_valid']:
        print 'REJECTED WORKFLOW FOR {} - invalid structure (atoms too close)'.format(
            snl.structure.formula)
    elif len(set(no_potcars) & set(job['elements'])) > 0:
        print 'REJECTED WORKFLOW FOR {} - invalid element (No POTCAR)'.format(
            snl.structure.formula)
    elif not job['is_ordered']:
        print 'REJECTED WORKFLOW FOR {} - invalid structure (disordered)'.format(
            snl.structure.formula)
    else:
        return True
    return False
예제 #35
0
def icsd_dict_to_snl(icsd_dict):
    if 'tstruct' not in icsd_dict:
        return None

    struct = Structure.from_dict(icsd_dict['tstruct'])
    references = _get_icsd_reference(icsd_dict)

    data = {'_icsd': {}}
    excluded_data = [
        '_id', 'a_len', 'b_len', 'c_len', 'alpha', 'beta', 'gamma',
        'compostion', 'composition', 'created_at', 'crystal_id', 'idnum',
        'journal', 'tstruct', 'updated_at', 'username'
    ]
    for k, v in icsd_dict.iteritems():
        if k not in excluded_data:
            if isinstance(v, datetime.datetime):
                v = v.strftime(format='%Y-%m-%d %H:%M:%S')
            data['_icsd'][k] = v

    projects = None
    remarks = None

    history = [{
        'name': 'Inorganic Crystal Structure Database (ICSD)',
        'url': 'http://icsd.fiz-karlsruhe.de/',
        'description': {
            'icsd_id': data['_icsd']['icsd_id']
        }
    }, {
        'name': 'pymatgen',
        'url': 'https://pypi.python.org/pypi/pymatgen',
        'description': {
            'comment': 'converted to explicit structure'
        }
    }]

    authors = 'William Davidson Richards <*****@*****.**>, Shyue Ping Ong <*****@*****.**>, Stephen Dacek <*****@*****.**>, Anubhav Jain <*****@*****.**>'

    return StructureNL(struct, authors, projects, references, remarks, data,
                       history)
예제 #36
0
    fws = []
    connections = {}

    # add the root FW (GGA+U)
    spec = _snl_to_spec(snl, enforce_gga=False)
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(Firework(tasks, spec, fw_id=1))

    # add GGA insertion to DB
    spec = {'task_type': 'VASP db insertion', '_priority': 2,
            '_category': 'VASP', '_queueadapter': QA_VASP}
    fws.append(Firework([VaspToDBTask()], spec, fw_id=2))
    connections[1] = 2
    mpvis = MPVaspInputSet()

    spec['vaspinputset_name'] = mpvis.__class__.__name__

    return Workflow(fws, connections, name=Composition(snl.structure.composition.reduced_formula).alphabetical_formula)
"""

if __name__ == '__main__':
    s1 = CifParser('test_wfs/Si.cif').get_structures()[0]
    s2 = CifParser('test_wfs/FeO.cif').get_structures()[0]

    snl1 = StructureNL(s1, "Anubhav Jain <*****@*****.**>")
    snl2 = StructureNL(s2, "Anubhav Jain <*****@*****.**>")

    snl_to_wf(snl1).to_file('test_wfs/wf_si_dupes.json', indent=4)
    snl_to_wf(snl2).to_file('test_wfs/wf_feo_dupes.json', indent=4)
예제 #37
0
 def get_snls(self, species):
     o = []
     for e in self.structures.find({'nspecies' : len(species), 
                                    'species' : {'$all' : species}}):
         o.append(StructureNL.from_dict(e['snl']))
     return o
예제 #38
0
    def process_fw(self, old_task, d):
        # AJ - this whole section is different
        sma = SNLMongoAdapter.auto_load()

        d["old_engine"] = old_task.get("engine")
        if "fw_id" in old_task:
            d["old_fw_id"] = old_task["fw_id"]

        d["fw_id"] = None
        d["task_type"] = "GGA+U optimize structure (2x)" if old_task["is_hubbard"] else "GGA optimize structure (2x)"
        d["submission_id"] = None
        d["vaspinputset_name"] = None

        snl_d = sma.snl.find_one({"about._materialsproject.deprecated.mps_ids": old_task["mps_id"]})
        if old_task.get("mps_id", -1) > 0 and snl_d:
            # grab the SNL from the SNL db
            del snl_d["_id"]
            d["snl"] = snl_d
            d["snlgroup_id"] = sma.snlgroups.find_one({"all_snl_ids": d["snl"]["snl_id"]}, {"snlgroup_id": 1})[
                "snlgroup_id"
            ]

        elif "mps" in old_task and old_task["mps"]:
            snl = mps_dict_to_snl(old_task["mps"])
            mpsnl, snlgroup_id = sma.add_snl(snl)
            d["snl"] = mpsnl.as_dict()
            d["snlgroup_id"] = snlgroup_id
        else:
            s = Structure.from_dict(old_task["input"]["crystal"])
            snl = StructureNL(s, "Anubhav Jain <*****@*****.**>", remarks=["origin unknown"])
            mpsnl, snlgroup_id = sma.add_snl(snl)
            d["snl"] = mpsnl.as_dict()
            d["snlgroup_id"] = snlgroup_id

        if "optimize structure" in d["task_type"] and "output" in d:
            # create a new SNL based on optimized structure
            new_s = Structure.from_dict(d["output"]["crystal"])
            old_snl = StructureNL.from_dict(d["snl"])
            history = old_snl.history
            history.append(
                {
                    "name": "Materials Project structure optimization",
                    "url": "http://www.materialsproject.org",
                    "description": {"task_type": d["task_type"], "fw_id": d["fw_id"], "task_id": d["task_id"]},
                }
            )
            new_snl = StructureNL(
                new_s, old_snl.authors, old_snl.projects, old_snl.references, old_snl.remarks, old_snl.data, history
            )

            # add snl
            mpsnl, snlgroup_id = sma.add_snl(new_snl, snlgroup_guess=d["snlgroup_id"])

            d["snl_final"] = mpsnl.as_dict()
            d["snlgroup_id_final"] = snlgroup_id
            d["snlgroup_changed"] = d["snlgroup_id"] != d["snlgroup_id_final"]

        # custom processing for detecting errors
        dir_name = old_task["dir_name"]
        new_style = os.path.exists(os.path.join(dir_name, "FW.json"))
        vasp_signals = {}
        critical_errors = [
            "INPUTS_DONT_EXIST",
            "OUTPUTS_DONT_EXIST",
            "INCOHERENT_POTCARS",
            "VASP_HASNT_STARTED",
            "VASP_HASNT_COMPLETED",
            "CHARGE_UNCONVERGED",
            "NETWORK_QUIESCED",
            "HARD_KILLED",
            "WALLTIME_EXCEEDED",
            "ATOMS_TOO_CLOSE",
            "DISK_SPACE_EXCEEDED",
        ]

        last_relax_dir = dir_name

        if not new_style:
            # get the last relaxation dir
            # the order is relax2, current dir, then relax1. This is because
            # after completing relax1, the job happens in the current dir.
            # Finally, it gets moved to relax2.
            # There are some weird cases where both the current dir and relax2
            # contain data. The relax2 is good, but the current dir is bad.
            if is_valid_vasp_dir(os.path.join(dir_name, "relax2")):
                last_relax_dir = os.path.join(dir_name, "relax2")
            elif is_valid_vasp_dir(dir_name):
                pass
            elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")):
                last_relax_dir = os.path.join(dir_name, "relax1")

        vasp_signals["last_relax_dir"] = last_relax_dir
        ## see what error signals are present

        print "getting signals for dir :{}".format(last_relax_dir)

        sl = SignalDetectorList()
        sl.append(VASPInputsExistSignal())
        sl.append(VASPOutputsExistSignal())
        sl.append(VASPOutSignal())
        sl.append(HitAMemberSignal())
        sl.append(SegFaultSignal())
        sl.append(VASPStartedCompletedSignal())

        signals = sl.detect_all(last_relax_dir)

        signals = signals.union(WallTimeSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(WallTimeSignal().detect(root_dir))

        signals = signals.union(DiskSpaceExceededSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(DiskSpaceExceededSignal().detect(root_dir))

        signals = list(signals)

        critical_signals = [val for val in signals if val in critical_errors]

        vasp_signals["signals"] = signals
        vasp_signals["critical_signals"] = critical_signals

        vasp_signals["num_signals"] = len(signals)
        vasp_signals["num_critical"] = len(critical_signals)

        if len(critical_signals) > 0 and d["state"] == "successful":
            d["state"] = "error"

        d["analysis"] = d.get("analysis", {})
        d["analysis"]["errors_MP"] = vasp_signals

        d["run_tags"] = ["PBE"]
        d["run_tags"].extend(d["pseudo_potential"]["labels"])
        d["run_tags"].extend([e + "=" + str(d["hubbards"].get(e, 0)) for e in d["elements"]])
예제 #39
0
def submit_tests(names=None, params=None):
    sma = SubmissionMongoAdapter.auto_load()

    # note: TiO2 is duplicated twice purposely, duplicate check should catch this
    compounds = {
        "Si": 149,
        "Al": 134,
        "ZnO": 2133,
        "FeO": 18905,
        "LiCoO2": 601860,
        "LiFePO4": 585433,
        "GaAs": 2534,
        "Ge": 32,
        "PbTe": 19717,
        "YbO": 1216,
        "SiC": 567551,
        "Fe3C": 510623,
        "SiO2": 547211,
        "Na2O": 2352,
        "InSb (unstable)": 10148,
        "Sb2O5": 1705,
        "N2O5": 554368,
        "BaTiO3": 5020,
        "Rb2O": 1394,
        "TiO2": 554278,
        "TiO2 (2)": 554278,
        'BaNbTePO8': 560794,
        "AgCl": 22922,
        "AgCl (2)": 570858,
        "SiO2 (2)": 555211,
        "Mg2SiO4": 2895,
        "CO2": 20066,
        "PbSO4": 22298,
        "SrTiO3": 5532,
        "FeAl": 2658,
        "AlFeCo2": 10884,
        "NaCoO2": 554427,
        "ReO3": 547271,
        "LaH2": 24153,
        "SiH3I": 28538,
        "LiBH4": 30209,
        "H8S5N2": 28143,
        "LiOH": 23856,
        "SrO2": 2697,
        "Mn": 35,
        "Hg4Pt": 2312,
        "PdF4": 13868,
        "Gd2WO6": 651333,
        'MnO2': 19395,
        'VO2': 504800
    }

    mpr = MPRester()

    for name, sid in compounds.iteritems():
        if not names or name in names:
            sid = mpr.get_materials_id_from_task_id("mp-{}".format(sid))
            s = mpr.get_structure_by_material_id(sid, final=False)

            snl = StructureNL(s, 'Anubhav Jain <*****@*****.**>')

            parameters = {'priority': 10} if name == 'Si' else None
            if params:
                parameters.update(params)
            sma.submit_snl(snl, '*****@*****.**', parameters=parameters)
예제 #40
0
    def run_task(self, fw_spec):
        if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec:
            prev_dir = get_loc(fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir'])
            update_spec = {}  # add this later when creating new FW
            fizzled_parent = True
            parse_dos = False
        else:
            prev_dir = get_loc(fw_spec['prev_vasp_dir'])
            update_spec = {'prev_vasp_dir': prev_dir,
                           'prev_task_type': fw_spec['prev_task_type'],
                           'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')}
            fizzled_parent = False
            parse_dos = 'Uniform' in fw_spec['prev_task_type']
        if 'run_tags' in fw_spec:
            self.additional_fields['run_tags'] = fw_spec['run_tags']
        else:
            self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][0]['spec']['run_tags']

        if MOVE_TO_GARDEN_DEV:
            prev_dir = move_to_garden(prev_dir, prod=False)

        elif MOVE_TO_GARDEN_PROD:
            prev_dir = move_to_garden(prev_dir, prod=True)

        # get the directory containing the db file
        db_dir = os.environ['DB_LOC']
        db_path = os.path.join(db_dir, 'tasks_db.json')

        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger('MPVaspDrone')
        logger.setLevel(logging.INFO)
        sh = logging.StreamHandler(stream=sys.stdout)
        sh.setLevel(getattr(logging, 'INFO'))
        logger.addHandler(sh)

        with open(db_path) as f:
            db_creds = json.load(f)
            drone = MPVaspDrone(
                host=db_creds['host'], port=db_creds['port'],
                database=db_creds['database'], user=db_creds['admin_user'],
                password=db_creds['admin_password'],
                collection=db_creds['collection'], parse_dos=parse_dos,
                additional_fields=self.additional_fields,
                update_duplicates=self.update_duplicates)
            t_id, d = drone.assimilate(prev_dir, launches_coll=LaunchPad.auto_load().launches)

        mpsnl = d['snl_final'] if 'snl_final' in d else d['snl']
        snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d['snlgroup_id']
        update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id})

        print 'ENTERED task id:', t_id
        stored_data = {'task_id': t_id}
        if d['state'] == 'successful':
            update_spec['analysis'] = d['analysis']
            update_spec['output'] = d['output']
            return FWAction(stored_data=stored_data, update_spec=update_spec)

        # not successful - first test to see if UnconvergedHandler is needed
        if not fizzled_parent:
            unconverged_tag = 'unconverged_handler--{}'.format(fw_spec['prev_task_type'])
            output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml'))
            ueh = UnconvergedErrorHandler(output_filename=output_dir)
            if ueh.check() and unconverged_tag not in fw_spec['run_tags']:
                print 'Unconverged run! Creating dynamic FW...'

                spec = {'prev_vasp_dir': prev_dir,
                        'prev_task_type': fw_spec['task_type'],
                        'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id,
                        'task_type': fw_spec['prev_task_type'],
                        'run_tags': list(fw_spec['run_tags']),
                        'parameters': fw_spec.get('parameters'),
                        '_dupefinder': DupeFinderVasp().to_dict(),
                        '_priority': fw_spec['_priority']}

                snl = StructureNL.from_dict(spec['mpsnl'])
                spec['run_tags'].append(unconverged_tag)
                spec['_queueadapter'] = QA_VASP

                fws = []
                connections = {}

                f = Composition.from_formula(
                    snl.structure.composition.reduced_formula).alphabetical_formula

                fws.append(FireWork(
                    [VaspCopyTask({'files': ['INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR'],
                                   'use_CONTCAR': False}), SetupUnconvergedHandlerTask(),
                     get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']),
                    fw_id=-2))

                spec = {'task_type': 'VASP db insertion', '_allow_fizzled_parents': True,
                        '_priority': fw_spec['_priority'], '_queueadapter': QA_DB,
                        'run_tags': list(fw_spec['run_tags'])}
                spec['run_tags'].append(unconverged_tag)
                fws.append(
                    FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-1))
                connections[-2] = -1

                wf = Workflow(fws, connections)

                return FWAction(detours=wf)

        # not successful and not due to convergence problem - FIZZLE
        raise ValueError("DB insertion successful, but don't know how to fix this FireWork! Can't continue with workflow...")
예제 #41
0
    def run_task(self, fw_spec):
        from mpworks.workflows.snl_to_wf import _get_metadata, \
            _get_custodian_task
        # TODO: only add the workflow if the gap is > 1.0 eV
        # TODO: add stored data?

        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA'

            snl = StructureNL.from_dict(fw_spec['mpsnl'])

            fws = []
            connections = {}

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            #  one
            spec.update({'task_type': '{} static'.format(type_name),
                         '_dupefinder': DupeFinderVasp().to_dict()})
            spec.update(_get_metadata(snl))
            fws.append(
                FireWork(
                    [VaspCopyTask({'extension': '.relax2'}), SetupStaticRunTask(),
                     _get_custodian_task(spec)], spec, name=spec['task_type'], fw_id=-10))

            # insert into DB - GGA static
            spec = {'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True}
            spec.update(_get_metadata(snl))
            fws.append(
                FireWork([VaspToDBTask()], spec, name=spec['task_type'], fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {'task_type': '{} Uniform'.format(type_name),
                    '_dupefinder': DupeFinderVasp().to_dict()}
            spec.update(_get_metadata(snl))
            fws.append(FireWork(
                [VaspCopyTask(), SetupNonSCFTask({'mode': 'uniform'}),
                 _get_custodian_task(spec)], spec, name=spec['task_type'], fw_id=-8))
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True}
            spec.update(_get_metadata(snl))
            fws.append(
                FireWork([VaspToDBTask({'parse_uniform': True})], spec, name=spec['task_type'],
                         fw_id=-7))
            connections[-8] = -7

            # run GGA Band structure
            spec = {'task_type': '{} band structure'.format(type_name),
                    '_dupefinder': DupeFinderVasp().to_dict()}
            spec.update(_get_metadata(snl))
            fws.append(FireWork([VaspCopyTask(), SetupNonSCFTask({'mode': 'line'}),
                                 _get_custodian_task(spec)], spec, name=spec['task_type'],
                                fw_id=-6))
            connections[-7] = -6

            # insert into DB - GGA Band structure
            spec = {'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True}
            spec.update(_get_metadata(snl))
            fws.append(FireWork([VaspToDBTask({})], spec, name=spec['task_type'], fw_id=-5))
            connections[-6] = -5

            wf = Workflow(fws, connections)

            return FWAction(additions=wf)
        return FWAction()
예제 #42
0
    def run_task(self, fw_spec):
        print "sleeping 10s for Mongo"
        time.sleep(10)
        print "done sleeping"
        print "the gap is {}, the cutoff is {}".format(fw_spec["analysis"]["bandgap"], self.gap_cutoff)

        if fw_spec["analysis"]["bandgap"] >= self.gap_cutoff:
            print "Adding more runs..."
            type_name = "GGA+U" if "GGA+U" in fw_spec["prev_task_type"] else "GGA"

            snl = StructureNL.from_dict(fw_spec["mpsnl"])
            f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula

            fws = []
            connections = {}

            priority = fw_spec["_priority"]

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            #  one
            spec.update(
                {
                    "task_type": "{} static".format(type_name),
                    "_queueadapter": QA_VASP,
                    "_dupefinder": DupeFinderVasp().to_dict(),
                    "_priority": priority,
                }
            )
            fws.append(
                Firework(
                    [VaspCopyTask({"use_CONTCAR": True}), SetupStaticRunTask(), get_custodian_task(spec)],
                    spec,
                    name=get_slug(f + "--" + spec["task_type"]),
                    fw_id=-10,
                )
            )

            # insert into DB - GGA static
            spec = {
                "task_type": "VASP db insertion",
                "_queueadapter": QA_DB,
                "_allow_fizzled_parents": True,
                "_priority": priority,
                "_dupefinder": DupeFinderDB().to_dict(),
            }
            fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {
                "task_type": "{} Uniform".format(type_name),
                "_queueadapter": QA_VASP,
                "_dupefinder": DupeFinderVasp().to_dict(),
                "_priority": priority,
            }
            fws.append(
                Firework(
                    [
                        VaspCopyTask({"use_CONTCAR": False}),
                        SetupNonSCFTask({"mode": "uniform"}),
                        get_custodian_task(spec),
                    ],
                    spec,
                    name=get_slug(f + "--" + spec["task_type"]),
                    fw_id=-8,
                )
            )
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {
                "task_type": "VASP db insertion",
                "_queueadapter": QA_DB,
                "_allow_fizzled_parents": True,
                "_priority": priority,
                "_dupefinder": DupeFinderDB().to_dict(),
            }
            fws.append(
                Firework(
                    [VaspToDBTask({"parse_uniform": True})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-7
                )
            )
            connections[-8] = -7

            # run GGA Band structure
            spec = {
                "task_type": "{} band structure".format(type_name),
                "_queueadapter": QA_VASP,
                "_dupefinder": DupeFinderVasp().to_dict(),
                "_priority": priority,
            }
            fws.append(
                Firework(
                    [VaspCopyTask({"use_CONTCAR": False}), SetupNonSCFTask({"mode": "line"}), get_custodian_task(spec)],
                    spec,
                    name=get_slug(f + "--" + spec["task_type"]),
                    fw_id=-6,
                )
            )
            connections[-7] = -6

            # insert into DB - GGA Band structure
            spec = {
                "task_type": "VASP db insertion",
                "_queueadapter": QA_DB,
                "_allow_fizzled_parents": True,
                "_priority": priority,
                "_dupefinder": DupeFinderDB().to_dict(),
            }
            fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-5))
            connections[-6] = -5

            wf = Workflow(fws, connections)

            print "Done adding more runs..."

            return FWAction(additions=wf)
        return FWAction()
예제 #43
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(
            fw_spec['analysis']['bandgap'], self.gap_cutoff)

        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            print 'Adding more runs...'
            type_name = 'GGA+U' if 'GGA+U' in fw_spec[
                'prev_task_type'] else 'GGA'

            snl = StructureNL.from_dict(fw_spec['mpsnl'])
            f = Composition(
                snl.structure.composition.reduced_formula).alphabetical_formula

            fws = []
            connections = {}

            priority = fw_spec['_priority']

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            #  one
            spec.update({
                'task_type': '{} static'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            })
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': True}),
                    SetupStaticRunTask(),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-10))

            # insert into DB - GGA static
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {
                'task_type': '{} Uniform'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'uniform'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-8))
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask({'parse_uniform': True})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-7))
            connections[-8] = -7

            # run GGA Band structure
            spec = {
                'task_type': '{} band structure'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'line'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-6))
            connections[-7] = -6

            # insert into DB - GGA Band structure
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask({})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-5))
            connections[-6] = -5

            wf = Workflow(fws, connections)

            print 'Done adding more runs...'

            return FWAction(additions=wf)
        return FWAction()
예제 #44
0
    def process_fw(self, dir_name, d):
        d["task_id_deprecated"] = int(
            d["task_id"].split('-')[-1])  # useful for WC and AJ

        # update the run fields to give species group in root, if exists
        for r in d['run_tags']:
            if "species_group=" in r:
                d["species_group"] = int(r.split("=")[-1])
                break

        # custom Materials Project post-processing for FireWorks
        with zopen(zpath(os.path.join(dir_name, 'FW.json'))) as f:
            fw_dict = json.load(f)
            d['fw_id'] = fw_dict['fw_id']
            d['snl'] = fw_dict['spec']['mpsnl']
            d['snlgroup_id'] = fw_dict['spec']['snlgroup_id']
            d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name')
            d['task_type'] = fw_dict['spec']['task_type']
            # Process data for deformed structures
            if 'deformed' in d['task_type']:
                d['deformation_matrix'] = fw_dict['spec']['deformation_matrix']
                d['original_task_id'] = fw_dict['spec']['original_task_id']
            if not self.update_duplicates:
                if 'optimize structure' in d['task_type'] and 'output' in d:
                    # create a new SNL based on optimized structure
                    new_s = Structure.from_dict(d['output']['crystal'])
                    old_snl = StructureNL.from_dict(d['snl'])
                    history = old_snl.history
                    history.append({
                        'name': 'Materials Project structure optimization',
                        'url': 'http://www.materialsproject.org',
                        'description': {
                            'task_type': d['task_type'],
                            'fw_id': d['fw_id'],
                            'task_id': d['task_id']
                        }
                    })
                    new_snl = StructureNL(new_s, old_snl.authors,
                                          old_snl.projects, old_snl.references,
                                          old_snl.remarks, old_snl.data,
                                          history)

                    # enter new SNL into SNL db
                    # get the SNL mongo adapter
                    sma = SNLMongoAdapter.auto_load()

                    # add snl
                    mpsnl, snlgroup_id, spec_group = sma.add_snl(
                        new_snl, snlgroup_guess=d['snlgroup_id'])
                    d['snl_final'] = mpsnl.as_dict()
                    d['snlgroup_id_final'] = snlgroup_id
                    d['snlgroup_changed'] = (d['snlgroup_id'] !=
                                             d['snlgroup_id_final'])
                else:
                    d['snl_final'] = d['snl']
                    d['snlgroup_id_final'] = d['snlgroup_id']
                    d['snlgroup_changed'] = False

        # custom processing for detecting errors
        new_style = os.path.exists(zpath(os.path.join(dir_name, 'FW.json')))
        vasp_signals = {}
        critical_errors = [
            "INPUTS_DONT_EXIST", "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS",
            "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED", "CHARGE_UNCONVERGED",
            "NETWORK_QUIESCED", "HARD_KILLED", "WALLTIME_EXCEEDED",
            "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", "NO_RELAX2",
            "POSITIVE_ENERGY"
        ]

        last_relax_dir = dir_name

        if not new_style:
            # get the last relaxation dir
            # the order is relax2, current dir, then relax1. This is because
            # after completing relax1, the job happens in the current dir.
            # Finally, it gets moved to relax2.
            # There are some weird cases where both the current dir and relax2
            # contain data. The relax2 is good, but the current dir is bad.
            if is_valid_vasp_dir(os.path.join(dir_name, "relax2")):
                last_relax_dir = os.path.join(dir_name, "relax2")
            elif is_valid_vasp_dir(dir_name):
                pass
            elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")):
                last_relax_dir = os.path.join(dir_name, "relax1")

        vasp_signals['last_relax_dir'] = last_relax_dir
        ## see what error signals are present

        print "getting signals for dir :{}".format(last_relax_dir)

        sl = SignalDetectorList()
        sl.append(VASPInputsExistSignal())
        sl.append(VASPOutputsExistSignal())
        sl.append(VASPOutSignal())
        sl.append(HitAMemberSignal())
        sl.append(SegFaultSignal())
        sl.append(VASPStartedCompletedSignal())

        if d['state'] == 'successful' and 'optimize structure' in d[
                'task_type']:
            sl.append(Relax2ExistsSignal())

        signals = sl.detect_all(last_relax_dir)

        signals = signals.union(WallTimeSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(WallTimeSignal().detect(root_dir))

        signals = signals.union(DiskSpaceExceededSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(DiskSpaceExceededSignal().detect(root_dir))

        if d.get('output', {}).get('final_energy', None) > 0:
            signals.add('POSITIVE_ENERGY')

        signals = list(signals)

        critical_signals = [val for val in signals if val in critical_errors]

        vasp_signals['signals'] = signals
        vasp_signals['critical_signals'] = critical_signals

        vasp_signals['num_signals'] = len(signals)
        vasp_signals['num_critical'] = len(critical_signals)

        if len(critical_signals) > 0 and d['state'] == "successful":
            d["state"] = "error"

        d['analysis'] = d.get('analysis', {})
        d['analysis']['errors_MP'] = vasp_signals
예제 #45
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(fw_spec['analysis']['bandgap'], self.gap_cutoff)
        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            static_dens = 90
            uniform_dens = 1000
            line_dens = 20
        else:
            static_dens = 450
            uniform_dens = 1500
            line_dens = 30

        if fw_spec['analysis']['bandgap'] <= self.metal_cutoff:
            user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2}
        else:
            user_incar_settings = {}

        print 'Adding more runs...'

        type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA'

        snl = StructureNL.from_dict(fw_spec['mpsnl'])
        f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula

        fws = []
        connections = {}

        priority = fw_spec['_priority']
        trackers = [Tracker('FW_job.out'), Tracker('FW_job.error'), Tracker('vasp.out'), Tracker('OUTCAR'), Tracker('OSZICAR')]
        trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')]

        # run GGA static
        spec = fw_spec  # pass all the items from the current spec to the new
        spec.update({'task_type': '{} static v2'.format(type_name), '_queueadapter': QA_VASP_SMALL,
                     '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers})
        fws.append(
            Firework(
                [VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True}), SetupStaticRunTask({"kpoints_density": static_dens, 'user_incar_settings': user_incar_settings}),
                 get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-10))

        # insert into DB - GGA static
        spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db}
        fws.append(
            Firework([VaspToDBTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-9))
        connections[-10] = -9

        # run GGA Uniform
        spec = {'task_type': '{} Uniform v2'.format(type_name), '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers}
        fws.append(Firework(
            [VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'uniform', "kpoints_density": uniform_dens}),
             get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-8))
        connections[-9] = -8

        # insert into DB - GGA Uniform
        spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db}
        fws.append(
            Firework([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f+'--'+spec['task_type']),
                     fw_id=-7))
        connections[-8] = -7

        # run GGA Band structure
        spec = {'task_type': '{} band structure v2'.format(type_name), '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers}
        fws.append(Firework([VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'line', "kpoints_line_density": line_dens}),
                             get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']),
                            fw_id=-6))
        connections[-7] = [-6]

        # insert into DB - GGA Band structure
        spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db}
        fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-5))
        connections[-6] = -5


        if fw_spec.get('parameters') and fw_spec['parameters'].get('boltztrap'):
            # run Boltztrap
            spec = {'task_type': '{} Boltztrap'.format(type_name), '_queueadapter': QA_DB,
                    '_dupefinder': DupeFinderDB().to_dict(), '_priority': priority}
            fws.append(Firework(
                [BoltztrapRunTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-4))
            connections[-7].append(-4)

        wf = Workflow(fws, connections)

        print 'Done adding more runs...'

        return FWAction(additions=wf)
예제 #46
0
    def run_task(self, fw_spec):
        if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec:
            prev_dir = get_loc(
                fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir'])
            update_spec = {}  # add this later when creating new FW
            fizzled_parent = True
            parse_dos = False
        else:
            prev_dir = get_loc(fw_spec['prev_vasp_dir'])
            update_spec = {
                'prev_vasp_dir': prev_dir,
                'prev_task_type': fw_spec['prev_task_type'],
                'run_tags': fw_spec['run_tags'],
                'parameters': fw_spec.get('parameters')
            }
            fizzled_parent = False
            parse_dos = 'Uniform' in fw_spec['prev_task_type']
        if 'run_tags' in fw_spec:
            self.additional_fields['run_tags'] = fw_spec['run_tags']
        else:
            self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][
                0]['spec']['run_tags']

        if MOVE_TO_GARDEN_DEV:
            prev_dir = move_to_garden(prev_dir, prod=False)

        elif MOVE_TO_GARDEN_PROD:
            prev_dir = move_to_garden(prev_dir, prod=True)

        # get the directory containing the db file
        db_dir = os.environ['DB_LOC']
        db_path = os.path.join(db_dir, 'tasks_db.json')

        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger('MPVaspDrone')
        logger.setLevel(logging.INFO)
        sh = logging.StreamHandler(stream=sys.stdout)
        sh.setLevel(getattr(logging, 'INFO'))
        logger.addHandler(sh)
        with open(db_path) as f:
            db_creds = json.load(f)
            drone = MPVaspDrone(host=db_creds['host'],
                                port=db_creds['port'],
                                database=db_creds['database'],
                                user=db_creds['admin_user'],
                                password=db_creds['admin_password'],
                                collection=db_creds['collection'],
                                parse_dos=parse_dos,
                                additional_fields=self.additional_fields,
                                update_duplicates=self.update_duplicates)
            t_id, d = drone.assimilate(
                prev_dir, launches_coll=LaunchPad.auto_load().launches)

        mpsnl = d['snl_final'] if 'snl_final' in d else d['snl']
        snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[
            'snlgroup_id']
        update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id})

        print 'ENTERED task id:', t_id
        stored_data = {'task_id': t_id}
        if d['state'] == 'successful':
            update_spec['analysis'] = d['analysis']
            update_spec['output'] = d['output']
            update_spec['vasp'] = {
                'incar': d['calculations'][-1]['input']['incar'],
                'kpoints': d['calculations'][-1]['input']['kpoints']
            }
            update_spec["task_id"] = t_id
            return FWAction(stored_data=stored_data, update_spec=update_spec)

        # not successful - first test to see if UnconvergedHandler is needed
        if not fizzled_parent:
            unconverged_tag = 'unconverged_handler--{}'.format(
                fw_spec['prev_task_type'])
            output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml'))
            ueh = UnconvergedErrorHandler(output_filename=output_dir)
            # TODO: make this a little more flexible
            if ueh.check() and unconverged_tag not in fw_spec['run_tags']:
                print 'Unconverged run! Creating dynamic FW...'

                spec = {
                    'prev_vasp_dir': prev_dir,
                    'prev_task_type': fw_spec['task_type'],
                    'mpsnl': mpsnl,
                    'snlgroup_id': snlgroup_id,
                    'task_type': fw_spec['prev_task_type'],
                    'run_tags': list(fw_spec['run_tags']),
                    'parameters': fw_spec.get('parameters'),
                    '_dupefinder': DupeFinderVasp().to_dict(),
                    '_priority': fw_spec['_priority']
                }
                # Pass elastic tensor spec
                if 'deformation_matrix' in fw_spec.keys():
                    spec['deformation_matrix'] = fw_spec['deformation_matrix']
                    spec['original_task_id'] = fw_spec['original_task_id']
                snl = StructureNL.from_dict(spec['mpsnl'])
                spec['run_tags'].append(unconverged_tag)
                spec['_queueadapter'] = QA_VASP

                fws = []
                connections = {}

                f = Composition(snl.structure.composition.reduced_formula
                                ).alphabetical_formula

                fws.append(
                    Firework([
                        VaspCopyTask({
                            'files': [
                                'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR',
                                'CONTCAR'
                            ],
                            'use_CONTCAR':
                            False
                        }),
                        SetupUnconvergedHandlerTask(),
                        get_custodian_task(spec)
                    ],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-2))

                spec = {
                    'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True,
                    '_priority': fw_spec['_priority'],
                    '_queueadapter': QA_DB,
                    'run_tags': list(fw_spec['run_tags'])
                }
                if 'deformation_matrix' in fw_spec.keys():
                    spec['deformation_matrix'] = fw_spec['deformation_matrix']
                    spec['original_task_id'] = fw_spec['original_task_id']
                spec['run_tags'].append(unconverged_tag)
                fws.append(
                    Firework([VaspToDBTask()],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-1))
                connections[-2] = -1

                wf = Workflow(fws, connections)

                return FWAction(detours=wf)

        # not successful and not due to convergence problem - FIZZLE
        raise ValueError("DB insertion successful, but don't know how to \
                         fix this Firework! Can't continue with workflow...")
예제 #47
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(fw_spec['analysis']['bandgap'], self.gap_cutoff)

        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            print 'Adding more runs...'
            type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA'

            snl = StructureNL.from_dict(fw_spec['mpsnl'])
            f = Composition.from_formula(snl.structure.composition.reduced_formula).alphabetical_formula

            fws = []
            connections = {}

            priority = fw_spec['_priority']

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            #  one
            spec.update({'task_type': '{} static'.format(type_name), '_queueadapter': QA_VASP,
                         '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority})
            fws.append(
                FireWork(
                    [VaspCopyTask({'use_CONTCAR': True}), SetupStaticRunTask(),
                     get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-10))

            # insert into DB - GGA static
            spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                    '_allow_fizzled_parents': True, '_priority': priority}
            fws.append(
                FireWork([VaspToDBTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {'task_type': '{} Uniform'.format(type_name), '_queueadapter': QA_VASP,
                    '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority}
            fws.append(FireWork(
                [VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'uniform'}),
                 get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-8))
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                    '_allow_fizzled_parents': True, '_priority': priority}
            fws.append(
                FireWork([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f+'--'+spec['task_type']),
                         fw_id=-7))
            connections[-8] = -7

            # run GGA Band structure
            spec = {'task_type': '{} band structure'.format(type_name), '_queueadapter': QA_VASP,
                    '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority}
            fws.append(FireWork([VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'line'}),
                                 get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']),
                                fw_id=-6))
            connections[-7] = -6

            # insert into DB - GGA Band structure
            spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB,
                    '_allow_fizzled_parents': True, '_priority': priority}
            fws.append(FireWork([VaspToDBTask({})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-5))
            connections[-6] = -5

            wf = Workflow(fws, connections)

            print 'Done adding more runs...'

            return FWAction(additions=wf)
        return FWAction()
예제 #48
0
    def process_fw(self, dir_name, d):
        d["task_id_deprecated"] = int(d["task_id"].split('-')[-1])  # useful for WC and AJ

        # update the run fields to give species group in root, if exists
        for r in d['run_tags']:
            if "species_group=" in r:
                d["species_group"] = int(r.split("=")[-1])
                break

        # custom Materials Project post-processing for FireWorks
        with zopen(zpath(os.path.join(dir_name, 'FW.json'))) as f:
            fw_dict = json.load(f)
            d['fw_id'] = fw_dict['fw_id']
            d['snl'] = fw_dict['spec']['mpsnl']
            d['snlgroup_id'] = fw_dict['spec']['snlgroup_id']
            d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name')
            d['task_type'] = fw_dict['spec']['task_type']
            # Process data for deformed structures
            if 'deformed' in d['task_type']:
                d['deformation_matrix'] = fw_dict['spec']['deformation_matrix']
                d['original_task_id'] = fw_dict['spec']['original_task_id']
            if not self.update_duplicates:
                if 'optimize structure' in d['task_type'] and 'output' in d:
                    # create a new SNL based on optimized structure
                    new_s = Structure.from_dict(d['output']['crystal'])
                    old_snl = StructureNL.from_dict(d['snl'])
                    history = old_snl.history
                    history.append(
                        {'name': 'Materials Project structure optimization',
                         'url': 'http://www.materialsproject.org',
                         'description': {'task_type': d['task_type'],
                                         'fw_id': d['fw_id'],
                                         'task_id': d['task_id']}})
                    new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects,
                                          old_snl.references, old_snl.remarks,
                                          old_snl.data, history)

                    # enter new SNL into SNL db
                    # get the SNL mongo adapter
                    sma = SNLMongoAdapter.auto_load()

                    # add snl
                    mpsnl, snlgroup_id, spec_group = sma.add_snl(new_snl, snlgroup_guess=d['snlgroup_id'])
                    d['snl_final'] = mpsnl.as_dict()
                    d['snlgroup_id_final'] = snlgroup_id
                    d['snlgroup_changed'] = (d['snlgroup_id'] !=
                                             d['snlgroup_id_final'])
                else:
                    d['snl_final'] = d['snl']
                    d['snlgroup_id_final'] = d['snlgroup_id']
                    d['snlgroup_changed'] = False

        # custom processing for detecting errors
        new_style = os.path.exists(zpath(os.path.join(dir_name, 'FW.json')))
        vasp_signals = {}
        critical_errors = ["INPUTS_DONT_EXIST",
                           "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS",
                           "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED",
                           "CHARGE_UNCONVERGED", "NETWORK_QUIESCED",
                           "HARD_KILLED", "WALLTIME_EXCEEDED",
                           "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", "NO_RELAX2", "POSITIVE_ENERGY"]

        last_relax_dir = dir_name

        if not new_style:
            # get the last relaxation dir
            # the order is relax2, current dir, then relax1. This is because
            # after completing relax1, the job happens in the current dir.
            # Finally, it gets moved to relax2.
            # There are some weird cases where both the current dir and relax2
            # contain data. The relax2 is good, but the current dir is bad.
            if is_valid_vasp_dir(os.path.join(dir_name, "relax2")):
                last_relax_dir = os.path.join(dir_name, "relax2")
            elif is_valid_vasp_dir(dir_name):
                pass
            elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")):
                last_relax_dir = os.path.join(dir_name, "relax1")

        vasp_signals['last_relax_dir'] = last_relax_dir
        ## see what error signals are present

        print "getting signals for dir :{}".format(last_relax_dir)

        sl = SignalDetectorList()
        sl.append(VASPInputsExistSignal())
        sl.append(VASPOutputsExistSignal())
        sl.append(VASPOutSignal())
        sl.append(HitAMemberSignal())
        sl.append(SegFaultSignal())
        sl.append(VASPStartedCompletedSignal())

        if d['state'] == 'successful' and 'optimize structure' in d['task_type']:
            sl.append(Relax2ExistsSignal())

        signals = sl.detect_all(last_relax_dir)

        signals = signals.union(WallTimeSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(WallTimeSignal().detect(root_dir))

        signals = signals.union(DiskSpaceExceededSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(DiskSpaceExceededSignal().detect(root_dir))

        if d.get('output',{}).get('final_energy', None) > 0:
            signals.add('POSITIVE_ENERGY')

        signals = list(signals)

        critical_signals = [val for val in signals if val in critical_errors]

        vasp_signals['signals'] = signals
        vasp_signals['critical_signals'] = critical_signals

        vasp_signals['num_signals'] = len(signals)
        vasp_signals['num_critical'] = len(critical_signals)

        if len(critical_signals) > 0 and d['state'] == "successful":
            d["state"] = "error"

        d['analysis'] = d.get('analysis', {})
        d['analysis']['errors_MP'] = vasp_signals
예제 #49
0
def equilibrium_constant_fws(mission, solvent, solvent_method, use_vdw_surface,
                             qm_method, reaction_id,
                             dupefinder=None, priority=1, parent_fwid=None,
                             additional_user_tags=None,
                             depend_on_parent=False):
    energy_method, sol_qm_method, geom_method = qm_method.split("//")
    if '||' in energy_method:
        sp_qm_method, bsse_qm_method = energy_method.split("||")
        qm_method = "//".join([sp_qm_method, sol_qm_method, geom_method])
    else:
        bsse_qm_method = energy_method
    coll = get_reactions_collection()
    reaction_doc = coll.find_one(filter={"reaction_id": reaction_id})
    reactant_snls = [StructureNL.from_dict(s) for s in
                     reaction_doc["reactant_snls"]]
    product_snls = [StructureNL.from_dict(s) for s in
                    reaction_doc["product_snls"]]
    reactant_nicknames = reaction_doc['reactant_nicknames']
    product_nicknames = reaction_doc['product_nicknames']
    reactant_charges = reaction_doc['reactant_charges']
    product_charges = reaction_doc['product_charges']
    reactant_spin_multiplicities = reaction_doc['reactant_spin_multiplicities']
    product_spin_multiplicities = reaction_doc['product_spin_multiplicities']
    reactant_fragments = reaction_doc['reactant_fragments']
    product_fragments = reaction_doc['product_fragments']

    fwid_base = 1
    if parent_fwid:
        if not (isinstance(parent_fwid, int) or isinstance(parent_fwid, list)):
            raise ValueError("Parent FireWork ID must be integer or list")
        parent_fwid = parent_fwid if isinstance(parent_fwid, list) \
            else [parent_fwid]
        fwid_base = max(parent_fwid) + 1

    current_fwid = fwid_base
    fws = []
    links_dict = dict()

    for snl, nick_name, charge, spin, fragments in \
            zip(reactant_snls + product_snls,
                reactant_nicknames + product_nicknames,
                reactant_charges + product_charges,
                reactant_spin_multiplicities + product_spin_multiplicities,
                reactant_fragments + product_fragments):
        mol = snl.structure
        mol.set_charge_and_spin(charge, spin)

        snl_tasks = [AddEGSNLTask()]
        snl_spec = {'task_type': 'Add to SNL database',
                    'snl': snl.as_dict(),
                    '_priority': priority}
        priority *= 2  # once we start a job, keep going!

        snl_fw = Firework(snl_tasks, snl_spec,
                          name=get_slug(nick_name + ' -- Add to SNL database'),
                          fw_id=current_fwid)
        fws.append(snl_fw)

        sp_fws, sp_links_dict = single_point_energy_fws(
            mol, name=nick_name, mission=mission, solvent=solvent,
            solvent_method=solvent_method,
            use_vdW_surface=use_vdw_surface, qm_method=qm_method,
            pop_method=None, dupefinder=dupefinder,
            priority=priority, parent_fwid=snl_fw.fw_id,
            additional_user_tags=additional_user_tags,
            depend_on_parent_fw=True, large=True)
        fws.extend(sp_fws)
        sp_children = set()
        sp_parents = set()
        for k, v2 in sp_links_dict.items():
            v1 = links_dict.get(k, [])
            links_dict[k] = list((set(v1) if isinstance(v1, list) else {v1}) |
                                 (set(v2) if isinstance(v2, list) else {v2}))
            if isinstance(k, list):
                sp_parents |= set(k)
            else:
                sp_parents.add(k)
            if isinstance(v2, list):
                sp_children |= set(v2)
            else:
                sp_children.add(v2)
        sp_last_fwids = list(sp_children - sp_parents)

        bsse_fws, bsse_links_dict = counterpoise_correction_generation_fw(
            molname=nick_name, charge=charge, spin_multiplicity=spin,
            qm_method=bsse_qm_method, fragments=fragments,
            mission=mission, priority=priority, parent_fwid=sp_last_fwids,
            additional_user_tags=additional_user_tags, large=True)
        fws.extend(bsse_fws)
        for k, v2 in bsse_links_dict.items():
            v1 = links_dict.get(k, [])
            links_dict[k] = list((set(v1) if isinstance(v1, list) else {v1}) |
                                 (set(v2) if isinstance(v2, list) else {v2}))
        current_fwid = max([fw.fw_id for fw in bsse_fws]) + 1

    if depend_on_parent:
        all_fwids = [fw.fw_id for fw in fws]
        for p_fwid in parent_fwid:
            links_dict[p_fwid] = all_fwids

    return fws, links_dict
예제 #50
0
    def run_task(self, fw_spec):
        print "sleeping 10s for Mongo"
        time.sleep(10)
        print "done sleeping"
        print "the gap is {}, the cutoff is {}".format(fw_spec["analysis"]["bandgap"], self.gap_cutoff)
        if fw_spec["analysis"]["bandgap"] >= self.gap_cutoff:
            static_dens = 90
            uniform_dens = 1000
            line_dens = 20
        else:
            static_dens = 450
            uniform_dens = 1500
            line_dens = 30

        if fw_spec["analysis"]["bandgap"] <= self.metal_cutoff:
            user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2}
        else:
            user_incar_settings = {}

        print "Adding more runs..."

        type_name = "GGA+U" if "GGA+U" in fw_spec["prev_task_type"] else "GGA"

        snl = StructureNL.from_dict(fw_spec["mpsnl"])
        f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula

        fws = []
        connections = {}

        priority = fw_spec["_priority"]
        trackers = [
            Tracker("FW_job.out"),
            Tracker("FW_job.error"),
            Tracker("vasp.out"),
            Tracker("OUTCAR"),
            Tracker("OSZICAR"),
        ]
        trackers_db = [Tracker("FW_job.out"), Tracker("FW_job.error")]

        # run GGA static
        spec = fw_spec  # pass all the items from the current spec to the new
        spec.update(
            {
                "task_type": "{} static v2".format(type_name),
                "_queueadapter": QA_VASP_SMALL,
                "_dupefinder": DupeFinderVasp().to_dict(),
                "_priority": priority,
                "_trackers": trackers,
            }
        )
        fws.append(
            Firework(
                [
                    VaspCopyTask({"use_CONTCAR": True, "skip_CHGCAR": True}),
                    SetupStaticRunTask({"kpoints_density": static_dens, "user_incar_settings": user_incar_settings}),
                    get_custodian_task(spec),
                ],
                spec,
                name=get_slug(f + "--" + spec["task_type"]),
                fw_id=-10,
            )
        )

        # insert into DB - GGA static
        spec = {
            "task_type": "VASP db insertion",
            "_queueadapter": QA_DB,
            "_allow_fizzled_parents": True,
            "_priority": priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            "_trackers": trackers_db,
        }
        fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-9))
        connections[-10] = -9

        # run GGA Uniform
        spec = {
            "task_type": "{} Uniform v2".format(type_name),
            "_queueadapter": QA_VASP,
            "_dupefinder": DupeFinderVasp().to_dict(),
            "_priority": priority,
            "_trackers": trackers,
        }
        fws.append(
            Firework(
                [
                    VaspCopyTask({"use_CONTCAR": False}),
                    SetupNonSCFTask({"mode": "uniform", "kpoints_density": uniform_dens}),
                    get_custodian_task(spec),
                ],
                spec,
                name=get_slug(f + "--" + spec["task_type"]),
                fw_id=-8,
            )
        )
        connections[-9] = -8

        # insert into DB - GGA Uniform
        spec = {
            "task_type": "VASP db insertion",
            "_queueadapter": QA_DB,
            "_allow_fizzled_parents": True,
            "_priority": priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            "_trackers": trackers_db,
        }
        fws.append(
            Firework(
                [VaspToDBTask({"parse_uniform": True})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-7
            )
        )
        connections[-8] = -7

        # run GGA Band structure
        spec = {
            "task_type": "{} band structure v2".format(type_name),
            "_queueadapter": QA_VASP,
            "_dupefinder": DupeFinderVasp().to_dict(),
            "_priority": priority,
            "_trackers": trackers,
        }
        fws.append(
            Firework(
                [
                    VaspCopyTask({"use_CONTCAR": False}),
                    SetupNonSCFTask({"mode": "line", "kpoints_line_density": line_dens}),
                    get_custodian_task(spec),
                ],
                spec,
                name=get_slug(f + "--" + spec["task_type"]),
                fw_id=-6,
            )
        )
        connections[-7] = [-6]

        # insert into DB - GGA Band structure
        spec = {
            "task_type": "VASP db insertion",
            "_queueadapter": QA_DB,
            "_allow_fizzled_parents": True,
            "_priority": priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            "_trackers": trackers_db,
        }
        fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-5))
        connections[-6] = -5

        if fw_spec.get("parameters") and fw_spec["parameters"].get("boltztrap"):
            # run Boltztrap
            from mpworks.firetasks.boltztrap_tasks import BoltztrapRunTask

            spec = {
                "task_type": "{} Boltztrap".format(type_name),
                "_queueadapter": QA_DB,
                "_dupefinder": DupeFinderDB().to_dict(),
                "_priority": priority,
            }
            fws.append(Firework([BoltztrapRunTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-4))
            connections[-7].append(-4)

        wf = Workflow(fws, connections)

        print "Done adding more runs..."

        return FWAction(additions=wf)
예제 #51
0
    def process_fw(self, old_task, d):
        # AJ - this whole section is different
        sma = SNLMongoAdapter.auto_load()

        d['old_engine'] = old_task.get('engine')
        if 'fw_id' in old_task:
            d['old_fw_id'] = old_task['fw_id']

        d['fw_id'] = None
        d['task_type'] = 'GGA+U optimize structure (2x)' if old_task[
            'is_hubbard'] else 'GGA optimize structure (2x)'
        d['submission_id'] = None
        d['vaspinputset_name'] = None

        snl_d = sma.snl.find_one({'about._materialsproject.deprecated.mps_ids': old_task['mps_id']})
        if old_task.get('mps_id', -1) > 0 and snl_d:
            # grab the SNL from the SNL db
            del snl_d['_id']
            d['snl'] = snl_d
            d['snlgroup_id'] = sma.snlgroups.find_one({'all_snl_ids': d['snl']['snl_id']}, {'snlgroup_id': 1})['snlgroup_id']

        elif 'mps' in old_task and old_task['mps']:
            snl = mps_dict_to_snl(old_task['mps'])
            mpsnl, snlgroup_id = sma.add_snl(snl)
            d['snl'] = mpsnl.to_dict
            d['snlgroup_id'] = snlgroup_id
        else:
            s = Structure.from_dict(old_task['input']['crystal'])
            snl = StructureNL(s, 'Anubhav Jain <*****@*****.**>', remarks=['origin unknown'])
            mpsnl, snlgroup_id = sma.add_snl(snl)
            d['snl'] = mpsnl.to_dict
            d['snlgroup_id'] = snlgroup_id


        if 'optimize structure' in d['task_type'] and 'output' in d:
            # create a new SNL based on optimized structure
            new_s = Structure.from_dict(d['output']['crystal'])
            old_snl = StructureNL.from_dict(d['snl'])
            history = old_snl.history
            history.append(
                {'name': 'Materials Project structure optimization',
                 'url': 'http://www.materialsproject.org',
                 'description': {'task_type': d['task_type'],
                                 'fw_id': d['fw_id'],
                                 'task_id': d['task_id']}})
            new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects,
                                  old_snl.references, old_snl.remarks,
                                  old_snl.data, history)

            # add snl
            mpsnl, snlgroup_id = sma.add_snl(new_snl, snlgroup_guess=d['snlgroup_id'])

            d['snl_final'] = mpsnl.to_dict
            d['snlgroup_id_final'] = snlgroup_id
            d['snlgroup_changed'] = (d['snlgroup_id'] !=
                                     d['snlgroup_id_final'])

        # custom processing for detecting errors
        dir_name = old_task['dir_name']
        new_style = os.path.exists(os.path.join(dir_name, 'FW.json'))
        vasp_signals = {}
        critical_errors = ["INPUTS_DONT_EXIST",
                           "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS",
                           "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED",
                           "CHARGE_UNCONVERGED", "NETWORK_QUIESCED",
                           "HARD_KILLED", "WALLTIME_EXCEEDED",
                           "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED"]

        last_relax_dir = dir_name

        if not new_style:
            # get the last relaxation dir
            # the order is relax2, current dir, then relax1. This is because
            # after completing relax1, the job happens in the current dir.
            # Finally, it gets moved to relax2.
            # There are some weird cases where both the current dir and relax2
            # contain data. The relax2 is good, but the current dir is bad.
            if is_valid_vasp_dir(os.path.join(dir_name, "relax2")):
                last_relax_dir = os.path.join(dir_name, "relax2")
            elif is_valid_vasp_dir(dir_name):
                pass
            elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")):
                last_relax_dir = os.path.join(dir_name, "relax1")

        vasp_signals['last_relax_dir'] = last_relax_dir
        ## see what error signals are present

        print "getting signals for dir :{}".format(last_relax_dir)

        sl = SignalDetectorList()
        sl.append(VASPInputsExistSignal())
        sl.append(VASPOutputsExistSignal())
        sl.append(VASPOutSignal())
        sl.append(HitAMemberSignal())
        sl.append(SegFaultSignal())
        sl.append(VASPStartedCompletedSignal())

        signals = sl.detect_all(last_relax_dir)

        signals = signals.union(WallTimeSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(WallTimeSignal().detect(root_dir))

        signals = signals.union(DiskSpaceExceededSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(DiskSpaceExceededSignal().detect(root_dir))

        signals = list(signals)

        critical_signals = [val for val in signals if val in critical_errors]

        vasp_signals['signals'] = signals
        vasp_signals['critical_signals'] = critical_signals

        vasp_signals['num_signals'] = len(signals)
        vasp_signals['num_critical'] = len(critical_signals)

        if len(critical_signals) > 0 and d['state'] == "successful":
            d["state"] = "error"

        d['analysis'] = d.get('analysis', {})
        d['analysis']['errors_MP'] = vasp_signals

        d['run_tags'] = ['PBE']
        d['run_tags'].extend(d['pseudo_potential']['labels'])
        d['run_tags'].extend([e+"="+str(d['hubbards'].get(e, 0)) for e in d['elements']])
예제 #52
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(
            fw_spec['analysis']['bandgap'], self.gap_cutoff)
        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            static_dens = 90
            uniform_dens = 1000
            line_dens = 20
        else:
            static_dens = 450
            uniform_dens = 1500
            line_dens = 30

        if fw_spec['analysis']['bandgap'] <= self.metal_cutoff:
            user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2}
        else:
            user_incar_settings = {}

        print 'Adding more runs...'

        type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA'

        snl = StructureNL.from_dict(fw_spec['mpsnl'])
        f = Composition(
            snl.structure.composition.reduced_formula).alphabetical_formula

        fws = []
        connections = {}

        priority = fw_spec['_priority']
        trackers = [
            Tracker('FW_job.out'),
            Tracker('FW_job.error'),
            Tracker('vasp.out'),
            Tracker('OUTCAR'),
            Tracker('OSZICAR')
        ]
        trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')]

        # run GGA static
        spec = fw_spec  # pass all the items from the current spec to the new
        spec.update({
            'task_type': '{} static v2'.format(type_name),
            '_queueadapter': QA_VASP_SMALL,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        })
        fws.append(
            Firework([
                VaspCopyTask({
                    'use_CONTCAR': True,
                    'skip_CHGCAR': True
                }),
                SetupStaticRunTask({
                    "kpoints_density": static_dens,
                    'user_incar_settings': user_incar_settings
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-10))

        # insert into DB - GGA static
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-9))
        connections[-10] = -9

        # run GGA Uniform
        spec = {
            'task_type': '{} Uniform v2'.format(type_name),
            '_queueadapter': QA_VASP,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        }
        fws.append(
            Firework([
                VaspCopyTask({'use_CONTCAR': False}),
                SetupNonSCFTask({
                    'mode': 'uniform',
                    "kpoints_density": uniform_dens
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-8))
        connections[-9] = -8

        # insert into DB - GGA Uniform
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask({'parse_uniform': True})],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-7))
        connections[-8] = -7

        # run GGA Band structure
        spec = {
            'task_type': '{} band structure v2'.format(type_name),
            '_queueadapter': QA_VASP,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        }
        fws.append(
            Firework([
                VaspCopyTask({'use_CONTCAR': False}),
                SetupNonSCFTask({
                    'mode': 'line',
                    "kpoints_line_density": line_dens
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-6))
        connections[-7] = [-6]

        # insert into DB - GGA Band structure
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask({})],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-5))
        connections[-6] = -5

        if fw_spec.get('parameters') and fw_spec['parameters'].get(
                'boltztrap'):
            # run Boltztrap
            from mpworks.firetasks.boltztrap_tasks import BoltztrapRunTask
            spec = {
                'task_type': '{} Boltztrap'.format(type_name),
                '_queueadapter': QA_DB,
                '_dupefinder': DupeFinderDB().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([BoltztrapRunTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-4))
            connections[-7].append(-4)

        wf = Workflow(fws, connections)

        print 'Done adding more runs...'

        return FWAction(additions=wf)
예제 #53
0
    def submit_reaction(self,
                        reactant_snls,
                        product_snls,
                        reactant_fragments,
                        product_fragments,
                        submitter_email,
                        parameters=None):
        """
            Submit a reaction. This task will be separated to several single point energy calculations, and submitted
            as individual molecule.

            Args:
                reactant_snls: List of tuple(snl, count, nickname).
                product_snls: List of tuple(snl, count, nickname).
                reactant_fragments: BSSE fragments definition. (BSSEFragment)
                product_fragments: BSSE fragments definition. (BSSEFragment)
                submitter_email: Email.
                parameters: dict of parameter. Expected parameters are 1) method: QChem theoretival method. e.g.
                    B3LYP-XDM/6-31+G*; 2) solvent: implicit solvent in energy calcuation. e.g. THF; ...
        """
        reaction_element_count = defaultdict(lambda: 0)
        for snl, n, nick_name in reactant_snls:
            mol = snl.structure
            for site in mol.sites:
                element = site.specie.symbol
                reaction_element_count[element] += n
        product_element_count = defaultdict(lambda: 0)
        for snl, n, nick_name in product_snls:
            mol = snl.structure
            for site in mol.sites:
                element = site.specie.symbol
                product_element_count[element] += n
        if reaction_element_count != product_element_count:
            raise Exception(
                "Number of atoms is inconsistant in reactant and product")
        reactant_inchis = []
        product_inchis = []
        num_reactants = []
        num_products = []
        reactant_nicknames = []
        product_nicknames = []
        reactant_charges = []
        product_charges = []
        reactant_spin_multiplicities = []
        product_spin_multiplicities = []
        for snl, n, nick_name in reactant_snls:
            mol = snl.structure
            bb = BabelMolAdaptor(mol)
            pbmol = bb.pybel_mol
            inchi = pbmol.write("inchi").strip()
            reactant_inchis.append(inchi)
            reactant_nicknames.append(nick_name)
            num_reactants.append(n)
            reactant_charges.append(mol.charge)
            reactant_spin_multiplicities.append(mol.spin_multiplicity)
        for snl, n, nick_name in product_snls:
            mol = snl.structure
            bb = BabelMolAdaptor(mol)
            pbmol = bb.pybel_mol
            inchi = pbmol.write("inchi").strip()
            product_inchis.append(inchi)
            product_nicknames.append(nick_name)
            num_products.append(n)
            product_charges.append(mol.charge)
            product_spin_multiplicities.append(mol.spin_multiplicity)
        all_inchis = reactant_inchis + product_inchis
        d = dict()
        d['submitter_email'] = submitter_email
        d['parameters'] = parameters
        d['state'] = 'SUBMITTED'
        d['reaction_id'] = self._get_next_reaction_id()
        d['submitted_at'] = datetime.datetime.utcnow().isoformat()
        d["reactant_snls"] = [s[0].as_dict() for s in reactant_snls]
        d["product_snls"] = [s[0].as_dict() for s in product_snls]
        d['all_inchis'] = all_inchis
        d['reactant_inchis'] = reactant_inchis
        d['product_inchis'] = product_inchis
        d['num_reactants'] = num_reactants
        d['num_products'] = num_products
        d['reactant_nicknames'] = reactant_nicknames
        d['product_nicknames'] = product_nicknames
        d['reactant_charges'] = reactant_charges
        d['product_charges'] = product_charges
        d['reactant_spin_multiplicities'] = reactant_spin_multiplicities
        d['product_spin_multiplicities'] = product_spin_multiplicities
        d['reactant_fragments'] = [[frag.to_dict() for frag in specie]
                                   for specie in reactant_fragments]
        d['product_fragments'] = [[frag.to_dict() for frag in specie]
                                  for specie in product_fragments]
        self.reactions.insert(d)
        dummy_snl = StructureNL.from_dict(d["reactant_snls"][0])
        parameters['reaction_id'] = d['reaction_id']
        self.submit_snl(dummy_snl, submitter_email, parameters)
        return d['reaction_id']
예제 #54
0
import json
import os
from fireworks.core.launchpad import LaunchPad
from mpworks.submission.submission_mongo import SubmissionMongoAdapter
from pymatgen.matproj.snl import StructureNL

__author__ = 'Anubhav Jain'
__copyright__ = 'Copyright 2014, The Materials Project'
__version__ = '0.1'
__maintainer__ = 'Anubhav Jain'
__email__ = '*****@*****.**'
__date__ = 'Jan 24, 2014'

if __name__ == "__main__":
    sma = SubmissionMongoAdapter.from_file('submission.yaml')

    module_dir = os.path.dirname(os.path.abspath(__file__))
    lp_f = os.path.join(module_dir, 'my_launchpad.yaml')
    lpdb = LaunchPad.from_file(lp_f)

    for s in os.listdir(os.path.join(module_dir, "snls")):
        if '.json' in s:
            print 'submitting', s
            with open(os.path.join(module_dir, "snls",s)) as f:
                snl = StructureNL.from_dict(json.load(f))
                sma.submit_snl(snl, '*****@*****.**', {"priority": 10})
            print 'DONE submitting', s


print 'DONE!'