Example #1
0
    def test_offline_fw_passinfo(self):
        fw1 = Firework([AdditionTask()], {"input_array": [1,1]}, name="1")
        fw2 = Firework([AdditionTask()], {"input_array": [2,2]}, name="2")
        fw3 = Firework([AdditionTask()], {"input_array": [3]}, parents=[fw1, fw2], name="3")

        wf = Workflow([fw1, fw2, fw3])
        self.lp.add_wf(wf)

        # make dirs for launching jobs
        cur_dir = os.path.dirname(os.path.abspath(__file__))

        os.mkdir(os.path.join(cur_dir, "launcher_1"))
        os.mkdir(os.path.join(cur_dir, "launcher_2"))
        os.mkdir(os.path.join(cur_dir, "launcher_3"))

        # launch two parent jobs
        os.chdir(os.path.join(cur_dir, "launcher_1"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        os.chdir(os.path.join(cur_dir, "launcher_2"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find({"completed": False, "deprecated": False}, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # launch child job
        os.chdir(os.path.join(cur_dir, "launcher_3"))
        fw, launch_id = self.lp.reserve_fw(self.fworker, os.getcwd())
        last_fw_id = fw.fw_id
        setup_offline_job(self.lp, fw, launch_id)
        launch_rocket(None, self.fworker)

        # recover jobs
        for l in self.lp.offline_runs.find({"completed": False, "deprecated": False}, {"launch_id": 1}):
            fw = self.lp.recover_offline(l['launch_id'])

        # confirm the sum in the child job
        child_fw = self.lp.get_fw_by_id(last_fw_id)
        self.assertEqual(set(child_fw.spec['input_array']), set([2, 3, 4]))
        self.assertEqual(child_fw.launches[0].action.stored_data["sum"], 9)
Example #2
0
def add_scripts(args):
    lp = get_lp(args)
    args.names = args.names if args.names else [None] * len(args.scripts)
    args.wf_name = args.wf_name if args.wf_name else args.names[0]
    fws = []
    links = {}
    for idx, s in enumerate(args.scripts):
        fws.append(
            Firework(ScriptTask({
                'script': s,
                'use_shell': True
            }),
                     name=args.names[idx],
                     fw_id=idx))
        if idx != 0:
            links[idx - 1] = idx

    lp.add_wf(Workflow(fws, links, args.wf_name))
Example #3
0
            def add_wf(j, dest, tracker, name):
                fts = []
                for i in range(j,j+25):
                    ft = ScriptTask.from_str('echo "' + str(i) + '" >> '+ dest,
                                              {'store_stdout':True})
                    fts.append(ft)
                fw1 = Firework(fts, spec={'_trackers':[tracker]},
                               fw_id=j+1, name=name+'1')

                fts = []
                for i in range(j+25,j+50):
                    ft = ScriptTask.from_str('echo "' + str(i) + '" >> ' + dest,
                                              {'store_stdout':True})
                    fts.append(ft)
                fw2 = Firework(fts, spec={'_trackers':[tracker]},
                               fw_id=j+2, name=name+'2')
                wf = Workflow([fw1, fw2], links_dict={fw1:[fw2]})
                self.lp.add_wf(wf)
Example #4
0
    def test_createfolder(self):

        folder_name = "test_folder"
        fw1 = Firework([
            CreateFolder(folder_name=folder_name, change_dir=False),
            PassCalcLocs(name="fw1")
        ],
                       name="fw3")
        fw2 = Firework([PassCalcLocs(name="fw2")], name="fw2", parents=fw1)
        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw2 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw2"})[0])
        calc_locs = fw2.spec["calc_locs"]

        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw1", calc_locs)["path"] + "/" + folder_name))
Example #5
0
    def test_category(self):
        task1 = ScriptTask.from_str('echo "Task 1"')
        task2 = ScriptTask.from_str('echo "Task 2"')

        spec = {'_category': 'dummy_category'}

        fw1 = Firework(task1, fw_id=1, name='Task 1', spec=spec)
        fw2 = Firework(task2, fw_id=2, name='Task 2', spec=spec)

        self.lp.add_wf(Workflow([fw1, fw2]))

        self.assertTrue(self.lp.run_exists(FWorker(category="dummy_category")))
        self.assertFalse(self.lp.run_exists(
            FWorker(category="other category")))
        self.assertFalse(self.lp.run_exists(FWorker(category="__none__")))
        self.assertTrue(self.lp.run_exists(FWorker()))  # can run any category
        self.assertTrue(
            self.lp.run_exists(
                FWorker(category=["dummy_category", "other category"])))
Example #6
0
    def __init__(self, vasp_input_set, spec):
        # Initializes fws list and links_dict
        fws = []
        links_dict = {}

        if 'additional_controllers' in spec:
            additional_controllers = spec['additional_controllers']
            spec.pop('additional_controllers')
        else:
            additional_controllers = [
                WalltimeController(),
                MemoryController(),
                VaspXMLValidatorController()
            ]

        control_procedure = ControlProcedure(
            controllers=additional_controllers)
        task_helper = MPRelaxTaskHelper()
        task_type = task_helper.task_type
        src_fws = createVaspSRCFireworks(vasp_input_set=vasp_input_set,
                                         task_helper=task_helper,
                                         task_type=task_type,
                                         control_procedure=control_procedure,
                                         custodian_handlers=[],
                                         max_restarts=10,
                                         src_cleaning=None,
                                         task_index=None,
                                         spec=None,
                                         setup_spec_update=None,
                                         run_spec_update=None)

        fws.extend(src_fws['fws'])
        links_dict_update(links_dict=links_dict,
                          links_update=src_fws['links_dict'])

        self.wf = Workflow(fireworks=fws,
                           links_dict=links_dict,
                           metadata={
                               'workflow_class': self.workflow_class,
                               'workflow_module': self.workflow_module
                           },
                           name='MPRelaxFWWorkflowSRC')
Example #7
0
    def test_preserve_fworker(self):
        fw1 = Firework([ScriptTask.from_str('echo "Testing preserve FWorker"')], spec={"_preserve_fworker": True}, fw_id=1)
        fw2 = Firework([ScriptTask.from_str('echo "Testing preserve FWorker pt 2"')], spec={"target": 1}, parents=[fw1], fw_id=2)
        self.lp.add_wf(Workflow([fw1, fw2]))
        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 1})[0]

        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec

        """
        cnt = 0
        while '_fworker' not in modified_spec and cnt < 5:
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            print(modified_spec)
            time.sleep(5)
            cnt += 1
        """

        self.assertIsNotNone(modified_spec['_fworker'])
Example #8
0
    def test_copyfilesfromcalcloc(self):
        fw1 = Firework([
            CopyVaspOutputs(calc_dir=self.plain_outdir),
            PassCalcLocs(name="fw1")
        ],
                       name="fw1")

        fw2 = Firework([
            CopyVaspOutputs(calc_dir=self.relax2_outdir),
            PassCalcLocs(name="fw2")
        ],
                       name="fw2")

        fw3 = Firework([
            CopyFilesFromCalcLoc(calc_loc="fw1",
                                 filenames=["POSCAR"],
                                 name_prepend="",
                                 name_append="_0"),
            CopyFilesFromCalcLoc(calc_loc="fw2",
                                 filenames=["POSCAR"],
                                 name_prepend="",
                                 name_append="_1"),
            PassCalcLocs(name="fw3")
        ],
                       name="fw3",
                       parents=[fw1, fw2])
        fw4 = Firework([PassCalcLocs(name="fw4")], name="fw4", parents=fw3)

        wf = Workflow([fw1, fw2, fw3, fw4])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw4 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw4"})[0])

        calc_locs = fw4.spec["calc_locs"]
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR_0"))
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR_1"))
Example #9
0
    def test_files_in_out(self):
        # create the Workflow that passes files_in and files_out
        fw1 = Firework(
            [ScriptTask.from_str('echo "This is the first FireWork" > test1')],
            spec={"_files_out": {
                "fwtest1": "test1"
            }},
            fw_id=1,
        )
        fw2 = Firework(
            [ScriptTask.from_str("gzip hello")],
            fw_id=2,
            parents=[fw1],
            spec={
                "_files_in": {
                    "fwtest1": "hello"
                },
                "_files_out": {
                    "fw2": "hello.gz"
                }
            },
        )
        fw3 = Firework([ScriptTask.from_str("cat fwtest.2")],
                       fw_id=3,
                       parents=[fw2],
                       spec={"_files_in": {
                           "fw2": "fwtest.2"
                       }})
        wf = Workflow([fw1, fw2, fw3], {fw1: [fw2], fw2: [fw3]})

        # store workflow and launch it locally
        self.lp.add_wf(wf)
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("test1"))
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("hello.gz"))
        launch_rocket(self.lp, self.fworker)
        self.assertTrue(os.path.exists("fwtest.2"))
        for f in ["test1", "hello.gz", "fwtest.2"]:
            os.remove(f)
Example #10
0
    def test_cleanupfiles(self):
        fw1 = Firework(
            [
                CreateFolder(folder_name="to_remove.relax0"),
                CreateFolder(folder_name="to_remove.relax1"),
                CreateFolder(folder_name="dont_remove.relax0"),
                CreateFolder(folder_name="shouldnt_touch"),
                DeleteFilesPrevFolder(files=["to_remove*", "dont_remove"]),
                PassCalcLocs(name="fw1"),
            ],
            name="fw1",
        )
        fw2 = Firework([PassCalcLocs(name="fw2")], name="fw2", parents=fw1)

        wf = Workflow([fw1, fw2])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw2 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw2"})[0])
        calc_locs = fw2.spec["calc_locs"]

        self.assertTrue(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "dont_remove.relax0")))
        self.assertTrue(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"], "shouldnt_touch")))
        self.assertFalse(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "to_remove.relax0")))
        self.assertFalse(
            os.path.exists(
                os.path.join(
                    get_calc_loc("fw1", calc_locs)["path"],
                    "to_remove.relax1")))
Example #11
0
    def test_job_info(self):
        fw1 = Firework([ScriptTask.from_str('echo "Testing job info"')], spec={"_pass_job_info": True}, fw_id=1)
        fw2 = Firework([DummyJobPassTask()], parents=[fw1], spec={"_pass_job_info": True, "target": 1}, fw_id=2)
        fw3 = Firework([DummyJobPassTask()], parents=[fw2], spec={"target":2}, fw_id=3)
        self.lp.add_wf(Workflow([fw1, fw2, fw3]))
        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 1})[0]
        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
        """
        cnt = 0
        while '_job_info' not in modified_spec and cnt < 5:
            print(modified_spec)
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            time.sleep(5)
            cnt += 1
        """

        self.assertIsNotNone(modified_spec['_job_info'])
        self.assertIsNotNone(modified_spec['_job_info'][0]["launch_dir"])
        self.assertEqual(modified_spec['_job_info'][0]['name'], 'Unnamed FW')
        self.assertEqual(modified_spec['_job_info'][0]['fw_id'], 1)

        launch_rocket(self.lp, self.fworker)

        target_fw_id = self.lp.get_fw_ids({"spec.target": 2})[0]
        modified_spec = self.lp.get_fw_by_id(target_fw_id).spec

        """
        cnt = 0
        while '_job_info' not in modified_spec and cnt < 5:
            print(modified_spec)
            modified_spec = self.lp.get_fw_by_id(target_fw_id).spec
            time.sleep(5)
            cnt += 1
        """

        self.assertEqual(len(modified_spec['_job_info']), 2)
Example #12
0
def basic_wf_ex():
    print("--- BASIC WORKFLOW EXAMPLE ---")

    # setup
    launchpad = setup()

    # add FireWorks
    task1 = ScriptTask.from_str('echo "Ingrid is the CEO."')
    task2 = ScriptTask.from_str('echo "Jill is a manager."')
    task3 = ScriptTask.from_str('echo "Jack is a manager."')
    task4 = ScriptTask.from_str('echo "Kip is an intern."')

    fw1 = Firework(task1, fw_id=1)
    fw2 = Firework(task2, fw_id=2)
    fw3 = Firework(task3, fw_id=3)
    fw4 = Firework(task4, fw_id=4)

    # make workflow
    workflow = Workflow([fw1, fw2, fw3, fw4], {1: [2, 3], 2: [4], 3: [4]})
    launchpad.add_wf(workflow)

    # launch Rocket
    rapidfire(launchpad, FWorker())
Example #13
0
    def test_passcalclocs(self):
        fw1 = Firework([PassCalcLocs(name="fw1")], name="fw1")
        fw2 = Firework([PassCalcLocs(name="fw2")], name="fw2", parents=fw1)
        fw3 = Firework([PassCalcLocs(name="fw3")], name="fw3", parents=fw2)

        wf = Workflow([fw1, fw2, fw3])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw2 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw2"})[0])
        fw3 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw3"})[0])

        self.assertEqual(len(fw2.spec["calc_locs"]), 1)
        self.assertEqual(len(fw3.spec["calc_locs"]), 2)
        self.assertEqual(fw3.spec["calc_locs"][0]["name"], "fw1")
        self.assertEqual(fw3.spec["calc_locs"][1]["name"], "fw2")
        self.assertNotEqual(fw3.spec["calc_locs"][0]["path"],
                            fw3.spec["calc_locs"][1]["path"])

        calc_locs = fw3.spec["calc_locs"]
        self.assertEqual(get_calc_loc("fw1", calc_locs), calc_locs[0])
        self.assertEqual(get_calc_loc("fw2", calc_locs), calc_locs[1])
        self.assertEqual(get_calc_loc(True, calc_locs), calc_locs[1])
Example #14
0
    def test_parallel_fibadder(self):
        # this is really testing to see if a Workflow can handle multiple FWs updating it at once
        parent = Firework(
            ScriptTask.from_str("python -c 'print(\"test1\")'",
                                {'store_stdout': True}))
        fib1 = Firework(FibonacciAdderTask(), {
            'smaller': 0,
            'larger': 1,
            'stop_point': 30
        },
                        parents=[parent])
        fib2 = Firework(FibonacciAdderTask(), {
            'smaller': 0,
            'larger': 1,
            'stop_point': 30
        },
                        parents=[parent])
        fib3 = Firework(FibonacciAdderTask(), {
            'smaller': 0,
            'larger': 1,
            'stop_point': 30
        },
                        parents=[parent])
        fib4 = Firework(FibonacciAdderTask(), {
            'smaller': 0,
            'larger': 1,
            'stop_point': 30
        },
                        parents=[parent])
        wf = Workflow([parent, fib1, fib2, fib3, fib4])
        self.lp.add_wf(wf)

        p = Pool(NCORES_PARALLEL_TEST)

        creds_array = [self.lp.to_dict()] * NCORES_PARALLEL_TEST
        p.map(random_launch, creds_array)
Example #15
0
    def test_init(self):

        fws = []
        for i in range(5):
            fw = Firework([PyTask(func="print", args=[i])], fw_id=i)
            fws.append(fw)
        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})
        self.assertIsInstance(wf, Workflow)
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              100: [4]
                          })
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              2: [100]
                          })
Example #16
0
    def __init__(self, abiinput):
        abitask = AbiFireTask(abiinput)

        self.fw = Firework(abitask)

        self.wf = Workflow([self.fw])
Example #17
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.as_dict()
    spec['vasp']['poscar'] = poscar.as_dict()
    spec['vasp']['kpoints'] = kpoints.as_dict()
    spec['vasp']['potcar'] = potcar.as_dict()
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('')
    for j in jobs:  # turn off auto npar, it doesn't work for >1 node
        j.auto_npar = False
    handlers = [
        VaspErrorHandler(),
        FrozenJobErrorHandler(),
        MeshSymmetryErrorHandler(),
        NonConvergingErrorHandler()
    ]
    c_params = {
        'jobs': [j.as_dict() for j in jobs],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors': 5
    }
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st Firework - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=1))

    # 2nd Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    # 3rd Firework - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({
        'jobs': [VaspJob('', auto_npar=False).as_dict()],
        'handlers': [h.as_dict() for h in handlers],
        'max_errors':
        5
    })
    fws.append(
        Firework([copytask, setuptask, custodiantask],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=3))
    connections[2] = [3]

    # 4th Firework - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        Firework([VaspToDBTaskEx()],
                 spec,
                 name=get_name(structure, spec['task_type']),
                 fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
def parchg_get_for_localization_check(dir, vasp_cmd, label):
    """
    Args:
        dir: directory need to scan
        vasp_cmd: vasp run command executed by os.system, e.g. 'vasp_std'
        label: a label for these jobs
    """
    scan = subprocess.Popen(['find', dir, '-name', 'wavecar'],
                            stdout=subprocess.PIPE)
    scan.wait()
    dir_coll = scan.stdout.read().split()
    exclude = re.compile('.*charge0|.*charge_0|.*skip')
    include = re.compile('.*subst|.*sub|.*vac|.*inter|.*as')
    work_dirs = [
        dir for dir in dir_coll
        if not exclude.match(dir) and include.match(dir)
    ]
    fws = []
    njobs = 0
    dir_end = 'parchg'
    for work_dir in work_dirs:
        with cd(work_dir):
            print work_dir
            if os.path.isdir(dir_end):
                if glob.glob(os.path.join('parchg', 'PARCHG*')) != []:
                    print work_dir, ' was already well done! Skip now!'
                    continue
                else:
                    delect = subprocess.Popen(['rm', '-rf', dir_end])
                    delect.wait()
            os.mkdir(dir_end)
            iband = get_iband(work_dir)
            incar = Incar.from_file('INCAR')
            incar['ICHARG'] = 0
            incar['ISTART'] = 1
            incar['LPARD'] = True
            incar['IBAND'] = iband
            incar['KPUSE'] = 1
            incar['LSEPB'] = True
            incar['LSEPK'] = True
            incar.write_file(os.path.join(dir_end, 'INCAR'))
            shutil.copyfile('IBZKPT', os.path.join(dir_end, 'KPOINTS'))
            inputs_other = ['POTCAR', 'POSCAR', 'WAVECAR']
            for input in inputs_other:
                shutil.copy(input, dir_end)
            try:
                transf = json.load(open('transformations.json'))
            except:
                transf = json.load(open('transformation.json'))
            queue = {}
            queue['job_name'] = label + '_' + transf[
                'defect_type'] + '_' + str(transf['charge'])
            queue['ntasks'] = 1
            queue['vmem'] = '40000mb'
            queue['walltime'] = '01:00:00'
            fw_name = transf['defect_type'] + '_' + str(transf['charge'])
            ftask = CommandRun()
            fw = Firework(
                [ftask],
                spec={
                    'cmd': vasp_cmd,
                    '_launch_dir': os.path.join(work_dir, dir_end),
                    '_queueadapter': queue,
                    '_fworker': fworker
                },
                name=fw_name)
            fws.append(fw)
            njobs = njobs + 1
    wf = Workflow(fws, name=label)
    launchpad.add_wf(wf)
    return njobs
Example #19
0
    def __init__(self,
                 neb_vasp_input_set,
                 spec,
                 neb_terminals,
                 relax_terminals=True,
                 n_insert=1,
                 n_nebs=3,
                 relax_vasp_input_set=None,
                 initial_neb_structures=None,
                 climbing_image=True):
        user_incar_settings = {'NPAR': 4, 'ISIF': 0, 'SIGMA': 0.2, 'ISMEAR': 0}
        if n_nebs < 1:
            raise ValueError('Minimum one NEB ...')
        if relax_terminals and initial_neb_structures is not None:
            raise ValueError(
                'Cannot relax terminals and start from initial NEB structures')
        # Initializes fws list and links_dict
        fws = []
        links_dict = {}

        if 'additional_controllers' in spec:
            additional_controllers = spec['additional_controllers']
            spec.pop('additional_controllers')
        else:
            additional_controllers = [
                WalltimeController(),
                MemoryController(),
                VaspXMLValidatorController()
            ]

        # Control procedure
        control_procedure = ControlProcedure(
            controllers=additional_controllers)

        # First NEB
        gen_neb_spec = spec.copy()
        if relax_terminals:
            gen_neb_spec['terminal_start'] = None
            gen_neb_spec['terminal_end'] = None
        else:
            gen_neb_spec['terminal_start'] = neb_terminals[0]
            gen_neb_spec['terminal_end'] = neb_terminals[1]

        # gen_neb_spec['structures'] = neb_terminals
        gen_neb_spec = set_short_single_core_to_spec(gen_neb_spec)

        terminal_start_relax_task_type = 'MPRelaxVasp-start'
        terminal_end_relax_task_type = 'MPRelaxVasp-end'
        terminal_start_task_type = None
        terminal_end_task_type = None
        if relax_terminals:
            terminal_start_task_type = terminal_start_relax_task_type
            terminal_end_task_type = terminal_end_relax_task_type
        gen_neb_task = GenerateNEBRelaxationTask(
            n_insert=n_insert,
            user_incar_settings=user_incar_settings,
            climbing_image=climbing_image,
            task_index='neb1',
            terminal_start_task_type=terminal_start_task_type,
            terminal_end_task_type=terminal_end_task_type)
        gen_neb_fw = Firework([gen_neb_task],
                              spec=gen_neb_spec,
                              name='gen-neb1')
        fws.append(gen_neb_fw)

        if relax_terminals:
            # Start terminal
            relax_task_helper = MPRelaxTaskHelper()
            vis_start = relax_vasp_input_set(
                neb_terminals[0], user_incar_settings=user_incar_settings)
            start_src_fws = createVaspSRCFireworks(
                vasp_input_set=vis_start,
                task_helper=relax_task_helper,
                task_type=terminal_start_relax_task_type,
                control_procedure=control_procedure,
                custodian_handlers=[],
                max_restarts=10,
                src_cleaning=None,
                task_index=None,
                spec=None,
                setup_spec_update=None,
                run_spec_update=None)
            fws.extend(start_src_fws['fws'])
            links_dict_update(links_dict=links_dict,
                              links_update=start_src_fws['links_dict'])
            linkupdate = {start_src_fws['control_fw'].fw_id: gen_neb_fw.fw_id}
            links_dict_update(links_dict=links_dict, links_update=linkupdate)

            # End terminal

            vis_end = relax_vasp_input_set(
                neb_terminals[1], user_incar_settings=user_incar_settings)
            end_src_fws = createVaspSRCFireworks(
                vasp_input_set=vis_end,
                task_helper=relax_task_helper,
                task_type=terminal_end_relax_task_type,
                control_procedure=control_procedure,
                custodian_handlers=[],
                max_restarts=10,
                src_cleaning=None,
                task_index=None,
                spec=None,
                setup_spec_update=None,
                run_spec_update=None)
            fws.extend(end_src_fws['fws'])
            links_dict_update(links_dict=links_dict,
                              links_update=end_src_fws['links_dict'])
            linkupdate = {end_src_fws['control_fw'].fw_id: gen_neb_fw.fw_id}
            links_dict_update(links_dict=links_dict, links_update=linkupdate)

        if n_nebs > 1:
            for ineb in range(2, n_nebs + 1):
                prev_gen_neb_fw = gen_neb_fw
                gen_neb_spec = spec.copy()
                gen_neb_spec['structures'] = None
                gen_neb_spec = set_short_single_core_to_spec(gen_neb_spec)
                gen_neb_task = GenerateNEBRelaxationTask(
                    n_insert=n_insert,
                    user_incar_settings=user_incar_settings,
                    climbing_image=climbing_image,
                    task_index='neb{:d}'.format(ineb),
                    prev_neb_task_type='neb{:d}'.format(ineb - 1),
                    terminal_start_task_type=terminal_start_task_type,
                    terminal_end_task_type=terminal_end_task_type)
                gen_neb_fw = Firework([gen_neb_task],
                                      spec=gen_neb_spec,
                                      name='gen-neb{:d}'.format(ineb))
                fws.append(gen_neb_fw)
                linkupdate = {prev_gen_neb_fw.fw_id: gen_neb_fw.fw_id}
                links_dict_update(links_dict=links_dict,
                                  links_update=linkupdate)
                if relax_terminals:
                    linkupdate = {
                        start_src_fws['control_fw'].fw_id: gen_neb_fw.fw_id
                    }
                    links_dict_update(links_dict=links_dict,
                                      links_update=linkupdate)
                    linkupdate = {
                        end_src_fws['control_fw'].fw_id: gen_neb_fw.fw_id
                    }
                    links_dict_update(links_dict=links_dict,
                                      links_update=linkupdate)

        if climbing_image:
            wfname = "MPcNEBRelaxFWWorkflowSRC"
        else:
            wfname = "MPcNEBRelaxFWWorkflowSRC"
        self.wf = Workflow(fireworks=fws,
                           links_dict=links_dict,
                           metadata={
                               'workflow_class': self.workflow_class,
                               'workflow_module': self.workflow_module
                           },
                           name=wfname)
Example #20
0
    def run_task(self, fw_spec):
        if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec:
            prev_dir = get_loc(
                fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir'])
            update_spec = {}  # add this later when creating new FW
            fizzled_parent = True
            parse_dos = False
        else:
            prev_dir = get_loc(fw_spec['prev_vasp_dir'])
            update_spec = {
                'prev_vasp_dir': prev_dir,
                'prev_task_type': fw_spec['prev_task_type'],
                'run_tags': fw_spec['run_tags'],
                'parameters': fw_spec.get('parameters')
            }
            fizzled_parent = False
            parse_dos = 'Uniform' in fw_spec['prev_task_type']
        if 'run_tags' in fw_spec:
            self.additional_fields['run_tags'] = fw_spec['run_tags']
        else:
            self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][
                0]['spec']['run_tags']

        if MOVE_TO_GARDEN_DEV:
            prev_dir = move_to_garden(prev_dir, prod=False)

        elif MOVE_TO_GARDEN_PROD:
            prev_dir = move_to_garden(prev_dir, prod=True)

        # get the directory containing the db file
        db_dir = os.environ['DB_LOC']
        db_path = os.path.join(db_dir, 'tasks_db.json')

        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger('MPVaspDrone')
        logger.setLevel(logging.INFO)
        sh = logging.StreamHandler(stream=sys.stdout)
        sh.setLevel(getattr(logging, 'INFO'))
        logger.addHandler(sh)
        with open(db_path) as f:
            db_creds = json.load(f)
            drone = MPVaspDrone(host=db_creds['host'],
                                port=db_creds['port'],
                                database=db_creds['database'],
                                user=db_creds['admin_user'],
                                password=db_creds['admin_password'],
                                collection=db_creds['collection'],
                                parse_dos=parse_dos,
                                additional_fields=self.additional_fields,
                                update_duplicates=self.update_duplicates)
            t_id, d = drone.assimilate(
                prev_dir, launches_coll=LaunchPad.auto_load().launches)

        mpsnl = d['snl_final'] if 'snl_final' in d else d['snl']
        snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[
            'snlgroup_id']
        update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id})

        print 'ENTERED task id:', t_id
        stored_data = {'task_id': t_id}
        if d['state'] == 'successful':
            update_spec['analysis'] = d['analysis']
            update_spec['output'] = d['output']
            update_spec['vasp'] = {
                'incar': d['calculations'][-1]['input']['incar'],
                'kpoints': d['calculations'][-1]['input']['kpoints']
            }
            update_spec["task_id"] = t_id
            return FWAction(stored_data=stored_data, update_spec=update_spec)

        # not successful - first test to see if UnconvergedHandler is needed
        if not fizzled_parent:
            unconverged_tag = 'unconverged_handler--{}'.format(
                fw_spec['prev_task_type'])
            output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml'))
            ueh = UnconvergedErrorHandler(output_filename=output_dir)
            # TODO: make this a little more flexible
            if ueh.check() and unconverged_tag not in fw_spec['run_tags']:
                print 'Unconverged run! Creating dynamic FW...'

                spec = {
                    'prev_vasp_dir': prev_dir,
                    'prev_task_type': fw_spec['task_type'],
                    'mpsnl': mpsnl,
                    'snlgroup_id': snlgroup_id,
                    'task_type': fw_spec['prev_task_type'],
                    'run_tags': list(fw_spec['run_tags']),
                    'parameters': fw_spec.get('parameters'),
                    '_dupefinder': DupeFinderVasp().to_dict(),
                    '_priority': fw_spec['_priority']
                }
                # Pass elastic tensor spec
                if 'deformation_matrix' in fw_spec.keys():
                    spec['deformation_matrix'] = fw_spec['deformation_matrix']
                    spec['original_task_id'] = fw_spec['original_task_id']
                snl = StructureNL.from_dict(spec['mpsnl'])
                spec['run_tags'].append(unconverged_tag)
                spec['_queueadapter'] = QA_VASP

                fws = []
                connections = {}

                f = Composition(snl.structure.composition.reduced_formula
                                ).alphabetical_formula

                fws.append(
                    Firework([
                        VaspCopyTask({
                            'files': [
                                'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR',
                                'CONTCAR'
                            ],
                            'use_CONTCAR':
                            False
                        }),
                        SetupUnconvergedHandlerTask(),
                        get_custodian_task(spec)
                    ],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-2))

                spec = {
                    'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True,
                    '_priority': fw_spec['_priority'],
                    '_queueadapter': QA_DB,
                    'run_tags': list(fw_spec['run_tags'])
                }
                if 'deformation_matrix' in fw_spec.keys():
                    spec['deformation_matrix'] = fw_spec['deformation_matrix']
                    spec['original_task_id'] = fw_spec['original_task_id']
                spec['run_tags'].append(unconverged_tag)
                fws.append(
                    Firework([VaspToDBTask()],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-1))
                connections[-2] = -1

                wf = Workflow(fws, connections)

                return FWAction(detours=wf)

        # not successful and not due to convergence problem - FIZZLE
        raise ValueError("DB insertion successful, but don't know how to \
                         fix this Firework! Can't continue with workflow...")
Example #21
0
def mol_to_ipea_wf(mol, name, **kwargs):
    fireworks, links_dict = multistep_ipea_fws(mol, name, **kwargs)
    return Workflow(fireworks, links_dict, name)
Example #22
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(
            fw_spec['analysis']['bandgap'], self.gap_cutoff)
        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            static_dens = 90
            uniform_dens = 1000
            line_dens = 20
        else:
            static_dens = 450
            uniform_dens = 1500
            line_dens = 30

        if fw_spec['analysis']['bandgap'] <= self.metal_cutoff:
            user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2}
        else:
            user_incar_settings = {}

        print 'Adding more runs...'

        type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA'

        snl = StructureNL.from_dict(fw_spec['mpsnl'])
        f = Composition(
            snl.structure.composition.reduced_formula).alphabetical_formula

        fws = []
        connections = {}

        priority = fw_spec['_priority']
        trackers = [
            Tracker('FW_job.out'),
            Tracker('FW_job.error'),
            Tracker('vasp.out'),
            Tracker('OUTCAR'),
            Tracker('OSZICAR')
        ]
        trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')]

        # run GGA static
        spec = fw_spec  # pass all the items from the current spec to the new
        spec.update({
            'task_type': '{} static v2'.format(type_name),
            '_queueadapter': QA_VASP_SMALL,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        })
        fws.append(
            Firework([
                VaspCopyTask({
                    'use_CONTCAR': True,
                    'skip_CHGCAR': True
                }),
                SetupStaticRunTask({
                    "kpoints_density": static_dens,
                    'user_incar_settings': user_incar_settings
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-10))

        # insert into DB - GGA static
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-9))
        connections[-10] = -9

        # run GGA Uniform
        spec = {
            'task_type': '{} Uniform v2'.format(type_name),
            '_queueadapter': QA_VASP,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        }
        fws.append(
            Firework([
                VaspCopyTask({'use_CONTCAR': False}),
                SetupNonSCFTask({
                    'mode': 'uniform',
                    "kpoints_density": uniform_dens
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-8))
        connections[-9] = -8

        # insert into DB - GGA Uniform
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask({'parse_uniform': True})],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-7))
        connections[-8] = -7

        # run GGA Band structure
        spec = {
            'task_type': '{} band structure v2'.format(type_name),
            '_queueadapter': QA_VASP,
            '_dupefinder': DupeFinderVasp().to_dict(),
            '_priority': priority,
            '_trackers': trackers
        }
        fws.append(
            Firework([
                VaspCopyTask({'use_CONTCAR': False}),
                SetupNonSCFTask({
                    'mode': 'line',
                    "kpoints_line_density": line_dens
                }),
                get_custodian_task(spec)
            ],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-6))
        connections[-7] = [-6]

        # insert into DB - GGA Band structure
        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority * 2,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask({})],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=-5))
        connections[-6] = -5

        if fw_spec.get('parameters') and fw_spec['parameters'].get(
                'boltztrap'):
            # run Boltztrap
            from mpworks.firetasks.boltztrap_tasks import BoltztrapRunTask
            spec = {
                'task_type': '{} Boltztrap'.format(type_name),
                '_queueadapter': QA_DB,
                '_dupefinder': DupeFinderDB().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([BoltztrapRunTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-4))
            connections[-7].append(-4)

        wf = Workflow(fws, connections)

        print 'Done adding more runs...'

        return FWAction(additions=wf)
Example #23
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(
            fw_spec['analysis']['bandgap'], self.gap_cutoff)

        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            print 'Adding more runs...'
            type_name = 'GGA+U' if 'GGA+U' in fw_spec[
                'prev_task_type'] else 'GGA'

            snl = StructureNL.from_dict(fw_spec['mpsnl'])
            f = Composition(
                snl.structure.composition.reduced_formula).alphabetical_formula

            fws = []
            connections = {}

            priority = fw_spec['_priority']

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            #  one
            spec.update({
                'task_type': '{} static'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            })
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': True}),
                    SetupStaticRunTask(),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-10))

            # insert into DB - GGA static
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {
                'task_type': '{} Uniform'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'uniform'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-8))
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask({'parse_uniform': True})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-7))
            connections[-8] = -7

            # run GGA Band structure
            spec = {
                'task_type': '{} band structure'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority
            }
            fws.append(
                Firework([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'line'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-6))
            connections[-7] = -6

            # insert into DB - GGA Band structure
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority,
                "_dupefinder": DupeFinderDB().to_dict()
            }
            fws.append(
                Firework([VaspToDBTask({})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-5))
            connections[-6] = -5

            wf = Workflow(fws, connections)

            print 'Done adding more runs...'

            return FWAction(additions=wf)
        return FWAction()
def PBEsol_Relax_then_HSE_Gap_with_Vasp(material_id, vasp_cmd, struct,
                                        is_spin_polarized):
    """
    add the defect relax calculations(by Vasp) into the launchpad.
    return the total calculation number
    Args:
       material_id: the material under consideration with id from materials project
       vasp_cmd1: the vasp command for PBEsol relaxation, e.g. ['mpirun','vasp']
       vasp_cmd2: the vasp command for HSE static run, e.g. ['mpirun', 'vasp']
       hse: True means considering the hse during the vasp calculations. Default: False
    """

    ftaska1 = Generate_VaspInputFiles_for_Relax_with_PBEsol()
    ftaska2 = VaspRun()

    ftaskb1 = Generate_VaspInputfiles_for_Static_Run()
    ftaskb2 = VaspRun()

    ftaskc1 = Generate_VaspInputfiles_for_BS()
    ftaskc2 = VaspRun()

    ftaskd1 = Generate_VaspInputFiles_for_HSE_certain_shots()
    ftaskd2 = VaspRun()

    ftaskf1 = Insert_Gap_into_monogdb()

    queue1 = {}  ### for PBE
    queue2 = {}  ### for HSE
    queue1['job_name'] = material_id + '_PBEsol'
    queue2['job_name'] = material_id + '_HSE'
    nsites = struct.num_sites
    if nsites <= 5:
        queue1['ntasks'] = 6
        queue1['walltime'] = '6:00:00'
        queue2['ntasks'] = 24
        queue2['walltime'] = '24:00:00'
    elif 5 < nsites <= 10:
        queue1['ntasks'] = 12
        queue1['walltime'] = '12:00:00'
        queue2['ntasks'] = 48
        queue2['walltime'] = '48:00:00'
    elif 10 < nsites <= 20:
        queue1['ntasks'] = 24
        queue1['walltime'] = '24:00:00'
        queue2['ntasks'] = 60
        queue2['walltime'] = '48:00:00'
    elif 20 < nsites <= 50:
        queue1['ntasks'] = 36
        queue1['walltime'] = '24:00:00'
        queue2['ntasks'] = 96
        queue2['walltime'] = '72:00:00'
    else:
        queue1['ntasks'] = 48
        queue1['walltime'] = '24:00:00'
        queue2['ntasks'] = 96
        queue2['walltime'] = '72:00:00'

    ##insert info to Mongodb
    queue3 = {}
    queue3['ntasks'] = 1
    queue3['walltime'] = '01:00:00'
    queue3['vmem'] = '1024mb'
    queue3['job_name'] = material_id + '_Insert'

    ## PBEsol relax
    fw1 = Firework(
        [ftaska1, ftaska2],
        spec={
            '_pass_job_info': True,
            '_preserve_fworker': True,
            '_fworker': fworker,
            'vasp_cmd': vasp_cmd,
            'workdir': './',
            'structure': struct,
            'material_id': material_id,
            'is_spin_polarized': is_spin_polarized,
            '_queueadapter': queue1
        },
        name=material_id + '_PBEsol_Relax')
    ## PBEsol static run
    fw2 = Firework(
        [ftaskb1, ftaskb2],
        parents=[fw1],
        spec={
            '_pass_job_info': True,
            'vasp_cmd': vasp_cmd,
            'workdir': './',
            'material_id': material_id,
            '_queueadapter': queue1
        },
        name=material_id + '_PBEsol_StaticRun')
    ## PBEsol band structure
    fw3 = Firework(
        [ftaskc1, ftaskc2],
        parents=[fw2],
        spec={
            '_pass_job_info': True,
            'vasp_cmd': vasp_cmd,
            'workdir': './',
            'material_id': material_id,
            '_queueadapter': queue1
        },
        name=material_id + '_PBEsol_BS')

    ## HSE static run with PBEsol k_VBM and k_CBM
    fw4 = Firework(
        [ftaskd1, ftaskd2],
        parents=[fw3],
        spec={
            '_pass_job_info': True,
            'vasp_cmd': vasp_cmd,
            'workdir': './',
            'material_id': material_id,
            'is_spin_polarized': is_spin_polarized,
            '_queueadapter': queue2
        },
        name=material_id + '_HSE_scf')

    ## insert results into mongodb
    fw5 = Firework([ftaskf1],
                   parents=[fw4],
                   spec={
                       'material_id': material_id,
                       '_queueadapter': queue3
                   },
                   name=material_id + '_Insert')

    wf = Workflow([fw1, fw2, fw3, fw4, fw5], {
        fw1: [fw2],
        fw2: [fw3],
        fw3: [fw4],
        fw4: [fw5]
    },
                  name=material_id + '_' + struct.composition.reduced_formula +
                  '_HSE_Gap')
    launchpad.add_wf(wf)
def wavecar_get_for_localization_check(dir, vasp_cmd, label):
    """
    Args:
        dir: directory need to scan
        vasp_cmd: vasp run command executed by subprocess.Popen, e.g. ['mpirun','vasp_std'] or ['srun','vasp_std']
        label: a label for these jobs
    """
    scan = subprocess.Popen(['find', dir, '-name', 'POSCAR'],
                            stdout=subprocess.PIPE)
    scan.wait()
    dir_coll = [os.path.split(dir)[0] for dir in scan.stdout.read().split()]
    exclude = re.compile('.*charge0|.*charge_0|.*skip')
    include = re.compile('.*subst|.*sub|.*vac|.*inter|.*as')
    work_dirs = [
        dir for dir in dir_coll if not exclude.match(dir)
        and 'charge' in dir.split('/')[-1] and include.match(dir)
    ]
    #work_dirs=[dir for dir in dir_coll
    #              if ('charge0' not in dir or 'charge_0' not in dir) and 'charge' in dir.split('/')[-1] and ('subst' in dir or 'vac' in dir)]
    fws = []
    njobs = 0
    dir_end = 'wavecar'
    for work_dir in work_dirs:
        with cd(work_dir):
            os.mkdir(dir_end)
            incar = Incar.from_file('INCAR')
            incar['LVHAR'] = True
            incar['LWAVE'] = True
            incar['IBRION'] = -1
            incar['NSW'] = 0
            incar['EDIFF'] = 0.000001
            incar.write_file(os.path.join(dir_end, 'INCAR'))
            shutil.copyfile('CONTCAR', os.path.join(dir_end, 'POSCAR'))
            inputs_other = ['KPOINTS', 'POTCAR']
            for input in inputs_other:
                shutil.copy(input, dir_end)
            try:
                shutil.copy('transformations.json', dir_end)
                transf = json.load(open('transformations.json'))
            except:
                shutil.copy('transformation.json', dir_end)
                transf = json.load(open('transformation.json'))
            queue = {}
            queue['job_name'] = label + '_' + transf[
                'defect_type'] + '_' + str(transf['charge']) + '_wavecar'
            fw_name = transf['defect_type'] + '_' + str(transf['charge'])
            ftask = VaspRun()
            fw = Firework(
                [ftask],
                spec={
                    'vasp_cmd': vasp_cmd,
                    '_launch_dir': os.path.join(work_dir, dir_end),
                    '_queueadapter': queue,
                    '_fworker': fworker
                },
                name=fw_name)
            fws.append(fw)
            njobs = njobs + 1
    wf = Workflow(fws, name=label)
    launchpad.add_wf(wf)
    return njobs
Example #26
0
    def run_task(self, fw_spec):
        # Read structure from previous relaxation
        relaxed_struct = fw_spec['output']['crystal']
        # Generate deformed structures
        d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06)
        wf = []
        for i, d_struct in enumerate(d_struct_set.def_structs):
            fws = []
            connections = {}
            f = Composition(d_struct.formula).alphabetical_formula
            snl = StructureNL(d_struct,
                              'Joseph Montoya <*****@*****.**>',
                              projects=["Elasticity"])
            tasks = [AddSNLTask()]
            snl_priority = fw_spec.get('priority', 1)
            spec = {
                'task_type': 'Add Deformed Struct to SNL database',
                'snl': snl.as_dict(),
                '_queueadapter': QA_DB,
                '_priority': snl_priority
            }
            if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL):
                spec['force_mpsnl'] = snl.as_dict()
                spec['force_snlgroup_id'] = fw_spec['snlgroup_id']
                del spec['snl']
            fws.append(
                Firework(tasks,
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-1000 + i * 10))
            connections[-1000 + i * 10] = [-999 + i * 10]
            spec = snl_to_wf._snl_to_spec(snl,
                                          parameters={'exact_structure': True})
            spec = update_spec_force_convergence(spec)
            spec['deformation_matrix'] = d_struct_set.deformations[i].tolist()
            spec['original_task_id'] = fw_spec["task_id"]
            spec['_priority'] = fw_spec['_priority'] * 2
            #Turn off dupefinder for deformed structure
            del spec['_dupefinder']
            spec['task_type'] = "Optimize deformed structure"
            fws.append(
                Firework([
                    VaspWriterTask(),
                    SetupElastConstTask(),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-999 + i * 10))

            priority = fw_spec['_priority'] * 3
            spec = {
                'task_type': 'VASP db insertion',
                '_priority': priority,
                '_allow_fizzled_parents': True,
                '_queueadapter': QA_DB,
                'elastic_constant': "deformed_structure",
                'clean_task_doc': True,
                'deformation_matrix': d_struct_set.deformations[i].tolist(),
                'original_task_id': fw_spec["task_id"]
            }
            fws.append(
                Firework(
                    [VaspToDBTask(), AddElasticDataToDBTask()],
                    spec,
                    name=get_slug(f + '--' + spec['task_type']),
                    fw_id=-998 + i * 10))
            connections[-999 + i * 10] = [-998 + i * 10]
            wf.append(Workflow(fws, connections))
        return FWAction(additions=wf)
def HSE_Gap_with_Vasp_from_icsd_data(file, vasp_cmd):
    fws = []
    all = json.load(open(file))
    njobs = 0
    g = open('/SCRATCH/acad/htbase/yugd/fw_workplace/hse_gaps_icsd/ids')
    ids = g.read().split()
    f = open('wrong_list.dat', 'a+')
    for task in all:
        icsd_id = 'icsd-' + str(task['icsd_ids'][0])
        if icsd_id not in ids:
            continue
        try:
            struct = Structure.from_dict(task['structure'])
            pbe_bs = BandStructureSymmLine.from_dict(
                task['band_structures']['line'])
            is_spin_polarized = pbe_bs.is_spin_polarized
            k_vbm = pbe_bs.get_vbm()['kpoint']._fcoords
            k_cbm = pbe_bs.get_cbm()['kpoint']._fcoords
        except:
            f.write('all[' + str(all.index(task)) + ']')
            continue


##########################################################
        njobs = njobs + 1
        queue_hse = {}
        queue_pbesol = {}
        queue_hse['job_name'] = icsd_id + '_HSE'
        queue_pbesol['job_name'] = icsd_id + '_PBEsol'
        nsites = struct.num_sites
        if nsites <= 5:
            queue_pbesol['ntasks'] = 6
            queue_pbesol['walltime'] = '6:00:00'
            queue_hse['ntasks'] = 12
            queue_hse['walltime'] = '24:00:00'
        elif 5 < nsites <= 10:
            queue_pbesol['ntasks'] = 12
            queue_pbesol['walltime'] = '6:00:00'
            queue_hse['ntasks'] = 24
            queue_hse['walltime'] = '48:00:00'
        elif 10 < nsites <= 20:
            queue_pbesol['ntasks'] = 12
            queue_pbesol['walltime'] = '24:00:00'
            queue_hse['ntasks'] = 48
            queue_hse['walltime'] = '48:00:00'
        elif 20 < nsites <= 50:
            queue_pbesol['ntasks'] = 24
            queue_pbesol['walltime'] = '48:00:00'
            queue_hse['ntasks'] = 60
            queue_hse['walltime'] = '72:00:00'
        else:
            queue_pbesol['ntasks'] = 36
            queue_pbesol['walltime'] = '48:00:00'
            queue_hse['ntasks'] = 96
            queue_hse['walltime'] = '72:00:00'

        queue_insert = {}
        queue_insert['ntasks'] = 1
        queue_insert['walltime'] = '01:00:00'
        queue_insert['vmem'] = '1024mb'
        queue_insert['job_name'] = icsd_id + '_Insert'
        ##########################################################

        if pbe_bs.get_band_gap()['direct']:
            ks_add = [k_vbm]
        else:
            ks_add = [k_vbm, k_cbm]
        ftask00 = Generate_VaspInputFiles_for_Relax_with_PBEsol_icsd()
        ftask01 = VaspRun()
        fw0 = Firework(
            [ftask00, ftask01],
            spec={
                '_pass_job_info': True,
                '_preserve_fworker': True,
                '_fworker': fworker,
                'vasp_cmd': vasp_cmd,
                'workdir': './',
                'structure': struct,
                'icsd_id': icsd_id,
                'is_spin_polarized': is_spin_polarized,
                '_queueadapter': queue_pbesol
            },
            name=icsd_id + '_pbesol')

        ftask10 = Generate_VaspInputFiles_for_HSE_scf_icsd()
        ftask11 = VaspRun()
        fw1 = Firework(
            [ftask10, ftask11],
            parents=[fw0],
            spec={
                '_pass_job_info': True,
                '_preserve_fworker': True,
                '_fworker': fworker,
                'vasp_cmd': vasp_cmd,
                'workdir': './',
                'icsd_id': icsd_id,
                'ks_add': ks_add,
                'is_spin_polarized': is_spin_polarized,
                '_queueadapter': queue_hse
            },
            name=icsd_id + '_HSE')
        finsert = Insert_Gap_into_monogdb_icsd()
        fw2 = Firework([finsert],
                       parents=[fw1],
                       spec={
                           'icsd_id': icsd_id,
                           '_queueadapter': queue_insert,
                           'pbe_bs': pbe_bs
                       },
                       name=icsd_id + '_Insert')
        fws.append(fw0)
        fws.append(fw1)
        fws.append(fw2)
    wf = Workflow(fws, name='HSE_Gap_for_icsds')
    launchpad.add_wf(wf)
    f.close()
    return njobs
Example #28
0
def snl_to_wf_phonon(snl, parameters=None):
    fws = []
    connections = {}
    parameters = parameters if parameters else {}

    snl_priority = parameters.get('priority', 1)
    priority = snl_priority * 2  # once we start a job, keep going!

    f = Composition(
        snl.structure.composition.reduced_formula).alphabetical_formula

    # add the SNL to the SNL DB and figure out duplicate group
    tasks = [AddSNLTask()]
    spec = {
        'task_type': 'Add to SNL database',
        'snl': snl.as_dict(),
        '_queueadapter': QA_DB,
        '_priority': snl_priority
    }
    if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL):
        spec['force_mpsnl'] = snl.as_dict()
        spec['force_snlgroup_id'] = parameters['snlgroup_id']
        del spec['snl']
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=0))
    connections[0] = [1]

    # run GGA structure optimization for force convergence
    spec = snl_to_wf._snl_to_spec(snl, parameters=parameters)
    spec = update_spec_force_convergence(spec)
    spec['run_tags'].append("origin")
    spec['_priority'] = priority
    spec['_queueadapter'] = QA_VASP
    spec['task_type'] = "Vasp force convergence"
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=1))

    # insert into DB - GGA structure optimization
    spec = {
        'task_type': 'VASP db insertion',
        '_priority': priority,
        '_allow_fizzled_parents': True,
        '_queueadapter': QA_DB
    }
    fws.append(
        Firework([VaspToDBTask()],
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    spec = {
        'task_type': 'Setup Deformed Struct Task',
        '_priority': priority,
        '_queueadapter': QA_CONTROL
    }
    fws.append(
        Firework([SetupDeformedStructTask()],
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=3))
    connections[2] = [3]

    wf_meta = get_meta_from_structure(snl.structure)
    wf_meta['run_version'] = 'May 2013 (1)'

    if '_materialsproject' in snl.data and 'submission_id' in snl.data[
            '_materialsproject']:
        wf_meta['submission_id'] = snl.data['_materialsproject'][
            'submission_id']

    return Workflow(
        fws,
        connections,
        name=Composition(
            snl.structure.composition.reduced_formula).alphabetical_formula,
        metadata=wf_meta)
Example #29
0
    def test_writevaspfrominterpolatedposcar(self):
        nimages = 5
        this_image = 1
        autosort_tol = 0.5

        fw1 = Firework([
            CopyVaspOutputs(calc_dir=self.static_outdir,
                            contcar_to_poscar=False,
                            additional_files=["CONTCAR"]),
            PassCalcLocs(name="fw1")
        ],
                       name="fw1")

        fw2 = Firework([
            CopyVaspOutputs(calc_dir=self.opt_outdir,
                            contcar_to_poscar=False,
                            additional_files=["CONTCAR"]),
            PassCalcLocs(name="fw2")
        ],
                       name="fw2")

        fw3 = Firework([
            WriteVaspFromIOSetFromInterpolatedPOSCAR(
                start="fw1",
                end="fw2",
                this_image=this_image,
                nimages=nimages,
                autosort_tol=autosort_tol,
                vasp_input_set="MPStaticSet"),
            PassCalcLocs(name="fw3")
        ],
                       name="fw3",
                       parents=[fw1, fw2])

        fw4 = Firework([PassCalcLocs(name="fw4")], name="fw4", parents=fw3)

        wf = Workflow([fw1, fw2, fw3, fw4])
        self.lp.add_wf(wf)
        rapidfire(self.lp)

        fw4 = self.lp.get_fw_by_id(self.lp.get_fw_ids({"name": "fw4"})[0])

        calc_locs = fw4.spec["calc_locs"]

        print(get_calc_loc("fw3", calc_locs)["path"])

        # Check existence of structure files.
        self.assertTrue(
            os.path.exists(get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR"))
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] +
                "/interpolate/CONTCAR_0"))
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] +
                "/interpolate/CONTCAR_1"))

        self.assertTrue(
            os.path.exists(get_calc_loc("fw3", calc_locs)["path"] + "/INCAR"))
        self.assertTrue(
            os.path.exists(
                get_calc_loc("fw3", calc_locs)["path"] + "/KPOINTS"))
        self.assertTrue(
            os.path.exists(get_calc_loc("fw3", calc_locs)["path"] + "/POTCAR"))

        # Check interpolation.
        struct_start = Structure.from_file(
            get_calc_loc("fw3", calc_locs)["path"] + "/interpolate/CONTCAR_0")
        struct_end = Structure.from_file(
            get_calc_loc("fw3", calc_locs)["path"] + "/interpolate/CONTCAR_1")
        struct_inter = Structure.from_file(
            get_calc_loc("fw3", calc_locs)["path"] + "/POSCAR")

        structs = struct_start.interpolate(struct_end,
                                           nimages,
                                           interpolate_lattices=True,
                                           autosort_tol=autosort_tol)

        # Check x of 1st site.
        self.assertAlmostEqual(structs[this_image][1].coords[0],
                               struct_inter[1].coords[0])
        # Check c lattice parameter
        self.assertAlmostEqual(structs[this_image].lattice.abc[0],
                               struct_inter.lattice.abc[0])
Example #30
0
def snl_to_wf(snl, parameters=None):
    fws = []
    connections = defaultdict(list)
    parameters = parameters if parameters else {}

    snl_priority = parameters.get('priority', 1)
    priority = snl_priority * 2  # once we start a job, keep going!

    f = Composition(
        snl.structure.composition.reduced_formula).alphabetical_formula

    snl_spec = {}
    if 'snlgroup_id' in parameters:
        if 'mpsnl' in parameters:
            snl_spec['mpsnl'] = parameters['mpsnl']
        elif isinstance(snl, MPStructureNL):
            snl_spec['mpsnl'] = snl.as_dict()
        else:
            raise ValueError("improper use of force SNL")
        snl_spec['snlgroup_id'] = parameters['snlgroup_id']
    else:
        # add the SNL to the SNL DB and figure out duplicate group
        tasks = [AddSNLTask()]
        spec = {
            'task_type': 'Add to SNL database',
            'snl': snl.as_dict(),
            '_queueadapter': QA_DB,
            '_priority': snl_priority
        }
        fws.append(
            Firework(tasks,
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=0))
        connections[0] = [1]

    trackers = [
        Tracker('FW_job.out'),
        Tracker('FW_job.error'),
        Tracker('vasp.out'),
        Tracker('OUTCAR'),
        Tracker('OSZICAR'),
        Tracker('OUTCAR.relax1'),
        Tracker('OUTCAR.relax2')
    ]
    trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')]
    # run GGA structure optimization
    spec = _snl_to_spec(snl, enforce_gga=True, parameters=parameters)
    spec.update(snl_spec)
    spec['_priority'] = priority
    spec['_queueadapter'] = QA_VASP
    spec['_trackers'] = trackers
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(
        Firework(tasks,
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=1))

    # insert into DB - GGA structure optimization
    spec = {
        'task_type': 'VASP db insertion',
        '_priority': priority * 2,
        '_allow_fizzled_parents': True,
        '_queueadapter': QA_DB,
        "_dupefinder": DupeFinderDB().to_dict(),
        '_trackers': trackers_db
    }
    fws.append(
        Firework([VaspToDBTask()],
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    # determine if GGA+U FW is needed
    incar = MPVaspInputSet().get_incar(snl.structure).as_dict()
    ggau_compound = ('LDAU' in incar and incar['LDAU'])

    if not parameters.get('skip_bandstructure', False) and (
            not ggau_compound
            or parameters.get('force_gga_bandstructure', False)):
        spec = {
            'task_type': 'Controller: add Electronic Structure v2',
            '_priority': priority,
            '_queueadapter': QA_CONTROL
        }
        fws.append(
            Firework([AddEStructureTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=3))
        connections[2] = [3]

    if ggau_compound:
        spec = _snl_to_spec(snl, enforce_gga=False, parameters=parameters)
        del spec[
            'vasp']  # we are stealing all VASP params and such from previous run
        spec['_priority'] = priority
        spec['_queueadapter'] = QA_VASP
        spec['_trackers'] = trackers
        fws.append(
            Firework(
                [VaspCopyTask(),
                 SetupGGAUTask(),
                 get_custodian_task(spec)],
                spec,
                name=get_slug(f + '--' + spec['task_type']),
                fw_id=10))
        connections[2].append(10)

        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority,
            "_dupefinder": DupeFinderDB().to_dict(),
            '_trackers': trackers_db
        }
        fws.append(
            Firework([VaspToDBTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=11))
        connections[10] = [11]

        if not parameters.get('skip_bandstructure', False):
            spec = {
                'task_type': 'Controller: add Electronic Structure v2',
                '_priority': priority,
                '_queueadapter': QA_CONTROL
            }
            fws.append(
                Firework([AddEStructureTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=12))
            connections[11] = [12]

    wf_meta = get_meta_from_structure(snl.structure)
    wf_meta['run_version'] = 'May 2013 (1)'

    if '_materialsproject' in snl.data and 'submission_id' in snl.data[
            '_materialsproject']:
        wf_meta['submission_id'] = snl.data['_materialsproject'][
            'submission_id']
    return Workflow(
        fws,
        connections,
        name=Composition(
            snl.structure.composition.reduced_formula).alphabetical_formula,
        metadata=wf_meta)