Example #1
0
def basic_wf_ex():
    print '--- BASIC WORKFLOW EXAMPLE ---'

    # setup
    launchpad = setup()

    # add FireWorks
    task1 = ScriptTask.from_str('echo "Ingrid is the CEO."')
    task2 = ScriptTask.from_str('echo "Jill is a manager."')
    task3 = ScriptTask.from_str('echo "Jack is a manager."')
    task4 = ScriptTask.from_str('echo "Kip is an intern."')

    fw1 = FireWork(task1, fw_id=-1)
    fw2 = FireWork(task2, fw_id=-2)
    fw3 = FireWork(task3, fw_id=-3)
    fw4 = FireWork(task4, fw_id=-4)

    # make workflow
    workflow = Workflow([fw1, fw2, fw3, fw4], {
        -1: [-2, -3],
        -2: [-4],
        -3: [-4]
    })
    launchpad.add_wf(workflow)

    # launch Rocket
    rapidfire(launchpad)
Example #2
0
    def add_work(self, parameters):
        from fireworks.core.firework import FireWork
        tasks = []
        job = parameters['job']
        print('adding job ' + job + ' to the workslist as ', self.fw_id)
        if job == 'prep':
            launch_spec = {
                'task_type': 'Preparation job',
                '_category': 'cluster',
                '_queueadapter': 'qadapterdict'
            }
            task = VaspGWInputTask(parameters)
            tasks.append(task)
            task = VaspGWExecuteTask(parameters)
            tasks.append(task)
            task = VaspGWToDiagTask(parameters)
            tasks.append(task)
            task = VaspGWExecuteTask(parameters)
            tasks.append(task)
            fw = FireWork(tasks,
                          spec=launch_spec,
                          name=job,
                          created_on=now(),
                          fw_id=self.fw_id)
            self.connections[self.fw_id] = []
            self.prep_id = self.fw_id
            self.fw_id += 1
            print(self.connections)
        elif job in ['G0W0', 'GW0', 'scGW0']:
            launch_spec = {
                'task_type': 'GW job',
                '_category': 'cluster',
                '_queueadapter': 'qadapterdict'
            }
            task = VaspGWInputTask(parameters)
            tasks.append(task)
            task = VaspGWGetPrepResTask(parameters)
            tasks.append(task)
            task = VaspGWExecuteTask(parameters)
            tasks.append(task)
            if parameters['spec']['converge']:
                task = VaspGWWriteConDatTask(parameters)
                tasks.append(task)
                task = VaspGWTestConTask(parameters)
                tasks.append(task)
            fw = FireWork(tasks,
                          spec=launch_spec,
                          name=job,
                          created_on=now(),
                          fw_id=self.fw_id)
            self.connections[self.fw_id] = []
            self.connections[self.prep_id].append(self.fw_id)
            self.fw_id += 1
        else:
            fw = []
            print('unspecified job, this should have been captured before !!')
            exit()

        self.work_list.append(fw)
Example #3
0
def snl_to_wf_phonon(snl, parameters=None):
    fws = []
    connections = {}
    parameters = parameters if parameters else {}

    snl_priority = parameters.get('priority', 1)
    priority = snl_priority * 2  # once we start a job, keep going!

    f = Composition.from_formula(snl.structure.composition.reduced_formula).alphabetical_formula

    # add the SNL to the SNL DB and figure out duplicate group
    tasks = [AddSNLTask()]
    spec = {'task_type': 'Add to SNL database', 'snl': snl.to_dict, '_queueadapter': QA_DB, '_priority': snl_priority}
    if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL):
        spec['force_mpsnl'] = snl.to_dict
        spec['force_snlgroup_id'] = parameters['snlgroup_id']
        del spec['snl']
    fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0))
    connections[0] = [1]

    # run GGA structure optimization for force convergence
    spec = snl_to_wf._snl_to_spec(snl, parameters=parameters)
    spec = update_spec_force_convergence(spec)
    spec['run_tags'].append("origin")
    spec['_priority'] = priority
    spec['_queueadapter'] = QA_VASP
    spec['task_type'] = "Vasp force convergence"
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1))

    # insert into DB - GGA structure optimization
    spec = {'task_type': 'VASP db insertion', '_priority': priority,
            '_allow_fizzled_parents': True, '_queueadapter': QA_DB}
    fws.append(
        FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2))
    connections[1] = [2]

    spec = {'task_type': 'Setup Deformed Struct Task', '_priority': priority,
                '_queueadapter': QA_CONTROL}
    fws.append(
            FireWork([SetupDeformedStructTask()], spec, name=get_slug(f + '--' + spec['task_type']),
                     fw_id=3))
    connections[2] = [3]

    wf_meta = get_meta_from_structure(snl.structure)
    wf_meta['run_version'] = 'May 2013 (1)'

    if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']:
        wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id']

    return Workflow(fws, connections, name=Composition.from_formula(
        snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
Example #4
0
 def test_org_wf(self):
     test1 = ScriptTask.from_str("python -c 'print(\"test1\")'",
                                 {'store_stdout': True})
     test2 = ScriptTask.from_str("python -c 'print(\"test2\")'",
                                 {'store_stdout': True})
     fw1 = FireWork(test1, fw_id=-1)
     fw2 = FireWork(test2, fw_id=-2)
     wf = Workflow([fw1, fw2], {-1: -2})
     self.lp.add_wf(wf)
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data['stdout'],
                      str(six.b('test1\n')))
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(2).action.stored_data['stdout'],
                      str(six.b('test2\n')))
 def submit_new_workflow(self):
     # finds a submitted job, creates a workflow, and submits it to FireWorks
     job = self.jobs.find_and_modify({'state': 'SUBMITTED'},
                                     {'$set': {
                                         'state': 'WAITING'
                                     }})
     if job:
         submission_id = job['submission_id']
         try:
             firework = FireWork(
                 [StructurePredictionTask()],
                 spec={
                     'species': job['species'],
                     'threshold': job['threshold'],
                     'submission_id': submission_id
                 })
             wf = Workflow([firework],
                           metadata={'submission_id': submission_id})
             self.launchpad.add_wf(wf)
             print 'ADDED WORKFLOW FOR {}'.format(job['species'])
         except:
             self.jobs.find_and_modify({'submission_id': submission_id},
                                       {'$set': {
                                           'state': 'ERROR'
                                       }})
             traceback.print_exc()
         return submission_id
Example #6
0
 def test_basic_fw(self):
     test1 = ScriptTask.from_str("python -c 'print(\"test1\")'",
                                 {'store_stdout': True})
     fw = FireWork(test1)
     self.lp.add_wf(fw)
     launch_rocket(self.lp, self.fworker)
     self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data[
         'stdout'], str(six.b('test1\n')))
Example #7
0
 def test_add_fw(self):
     fw = FireWork(AdditionTask(), {'input_array': [5, 7]})
     self.lp.add_wf(fw)
     rapidfire(self.lp,
               m_dir=MODULE_DIR,
               nlaunches=1,
               sleep_time=5,
               max_loops=60)
     self.assertEqual(
         self.lp.get_launch_by_id(1).action.stored_data['sum'], 12)
Example #8
0
 def test_basic_fw(self):
     test1 = ScriptTask.from_str("python -c 'print \"test1\"'",
                                 {'store_stdout': True})
     fw = FireWork(test1)
     self.lp.add_wf(fw)
     launch_rocket(self.lp)
     time.sleep(3)
     self.assertEqual(
         self.lp.get_launch_by_id(1).action.stored_data['stdout'],
         'test1\n')
Example #9
0
    def test_fibadder(self):
        fib = FibonacciAdderTask()
        fw = FireWork(fib, {'smaller': 0, 'larger': 1, 'stop_point': 3})
        self.lp.add_wf(fw)
        rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR)

        self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data['next_fibnum'], 1)
        self.assertEqual(self.lp.get_launch_by_id(2).action.stored_data['next_fibnum'], 2)
        self.assertEqual(self.lp.get_launch_by_id(3).action.stored_data, {})
        self.assertFalse(self.lp.run_exists())
Example #10
0
def rapid_fire_ex():
    print('--- RAPIDFIRE EXAMPLE ---')

    # setup
    launchpad = setup()

    # add FireWorks
    firetask = ScriptTask.from_str(
        'echo "howdy, your job launched successfully!"')
    fw1 = FireWork(firetask)
    launchpad.add_wf(fw1)

    # re-add multiple times
    fw2 = FireWork(firetask)
    launchpad.add_wf(fw2)
    fw3 = FireWork(firetask)
    launchpad.add_wf(fw3)

    # launch Rocket
    rapidfire(launchpad, FWorker())
Example #11
0
    def from_tarfile(cls, tar_filename):
        t = tarfile.open(tar_filename, 'r')
        links = None
        fws = []
        for f_name in t.getnames():
            m_file = t.extractfile(f_name)
            m_format = m_file.name.split('.')[-1]
            m_contents = m_file.read()
            if 'links' in f_name:
                links = Workflow.Links.from_format(m_contents, m_format)
            else:
                fws.append(FireWork.from_format(m_contents, m_format))

        return Workflow(fws, dict(links))
Example #12
0
    def from_tarfile(cls, tar_filename):
        t = tarfile.open(tar_filename, 'r')
        links = None
        fws = []
        for f_name in t.getnames():
            m_file = t.extractfile(f_name)
            m_format = m_file.name.split('.')[-1]
            m_contents = m_file.read()
            if 'links' in f_name:
                links = Workflow.Links.from_format(m_contents, m_format)
            else:
                fws.append(FireWork.from_format(m_contents, m_format))

        return Workflow(fws, dict(links))
Example #13
0
def basic_fw_ex():
    print('--- BASIC FIREWORK EXAMPLE ---')

    # setup
    launchpad = setup()

    # add FireWork
    firetask = ScriptTask.from_str(
        'echo "howdy, your job launched successfully!"')
    firework = FireWork(firetask)
    launchpad.add_wf(firework)

    # launch Rocket
    launch_rocket(launchpad, FWorker())
Example #14
0
    def run_task(self, fw_spec):
        smaller = fw_spec['smaller']
        larger = fw_spec['larger']
        stop_point = fw_spec['stop_point']

        m_sum = smaller + larger
        if m_sum < stop_point:
            print 'The next Fibonacci number is: {}'.format(m_sum)
            # create a new Fibonacci Adder to add to the workflow
            new_fw = FireWork(FibonacciAdderTask(), {'smaller': larger, 'larger': m_sum, 'stop_point': stop_point})
            return FWAction('CREATE', {'next_fibnum': m_sum}, {'create_fw': new_fw})

        else:
            print 'We have now exceeded our limit; (the next Fibonacci number would have been: {})'.format(m_sum)
            return FWAction('CONTINUE')
Example #15
0
def multiple_tasks_ex():
    print('--- MULTIPLE FIRETASKS EXAMPLE ---')

    # setup
    launchpad = setup()

    # add FireWorks
    firetask1 = ScriptTask.from_str('echo "This is TASK #1"')
    firetask2 = ScriptTask.from_str('echo "This is TASK #2"')
    firetask3 = ScriptTask.from_str('echo "This is TASK #3"')
    fw = FireWork([firetask1, firetask2, firetask3])
    launchpad.add_wf(fw)

    # launch Rocket
    rapidfire(launchpad, FWorker())
Example #16
0
def task_dict_to_wf(task_dict, launchpad):
    fw_id = launchpad.get_new_fw_id()
    l_id = launchpad.get_new_launch_id()

    spec = {'task_type': task_dict['task_type'], 'run_tags': task_dict['run_tags'],
            'vaspinputset_name': None, 'vasp': None, 'mpsnl': task_dict['snl'],
            'snlgroup_id': task_dict['snlgroup_id']}
    tasks = [DummyLegacyTask()]

    launch_dir = task_dict['dir_name_full']

    stored_data = {'error_list': []}
    update_spec = {'prev_vasp_dir': task_dict['dir_name'],
                   'prev_task_type': spec['task_type'],
                   'mpsnl': spec['mpsnl'], 'snlgroup_id': spec['snlgroup_id'],
                   'run_tags': spec['run_tags']}

    fwaction = FWAction(stored_data=stored_data, update_spec=update_spec)

    if task_dict['completed_at']:
        complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S")
        state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}]
    else:
        state_history = []

    launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction,
                       state_history=state_history, launch_id=l_id, fw_id=fw_id)]

    f = Composition.from_formula(task_dict['pretty_formula']).alphabetical_formula


    fw = FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None,
                 fw_id=fw_id)

    wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl']))
    wf_meta['run_version'] = 'preproduction (0)'

    wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta)

    launchpad.add_wf(wf, reassign_all=False)
    launchpad._upsert_launch(launches[0])

    print 'ADDED', fw_id
    # return fw_id
    return fw_id
Example #17
0
    def test_backgroundtask(self):
        dest1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'hello.txt')
        self._teardown([dest1])

        try:
            test1 = ScriptTask.from_str("python -c 'print(\"testing background...\")'",
                                        {'store_stdout': True})

            bg_task1 = BackgroundTask(FileWriteTask({'files_to_write': [{'filename': dest1, 'contents': 'hello'}]}), num_launches=1, run_on_finish=True)
            fw = FireWork(test1, spec={'_background_tasks': [bg_task1]})
            self.lp.add_wf(fw)
            launch_rocket(self.lp, self.fworker)

            with open(dest1) as f:
                    self.assertEqual(f.read(), 'hello')

        finally:
            self._teardown([dest1])
Example #18
0
def add_scripts(args):
    lp = get_lp(args)
    args.names = args.names if args.names else [None] * len(args.scripts)
    args.wf_name = args.wf_name if args.wf_name else args.names[0]
    fws = []
    links = {}
    for idx, s in enumerate(args.scripts):
        fws.append(
            FireWork(ScriptTask({
                'script': s,
                'use_shell': True
            }),
                     name=args.names[idx],
                     fw_id=idx))
        if idx != 0:
            links[idx - 1] = idx

    lp.add_wf(Workflow(fws, links, args.wf_name))
Example #19
0
    def get_fw_by_id(self, fw_id):
        """
        Given a FireWork id, give back a FireWork object

        :param fw_id: FireWork id (int)
        :return: FireWork object
        """
        fw_dict = self.fireworks.find_one({'fw_id': fw_id})

        if not fw_dict:
            raise ValueError('No FireWork exists with id: {}'.format(fw_id))
            # recreate launches from the launch collection
        launches = []
        for launch_id in fw_dict['launches']:
            launches.append(self.get_launch_by_id(launch_id).to_dict())
        fw_dict['launches'] = launches

        return FireWork.from_dict(fw_dict)
Example #20
0
    def get_fw_by_id(self, fw_id):
        """
        Given a FireWork id, give back a FireWork object

        :param fw_id: FireWork id (int)
        :return: FireWork object
        """
        fw_dict = self.fireworks.find_one({'fw_id': fw_id})

        if not fw_dict:
            raise ValueError('No FireWork exists with id: {}'.format(fw_id))
            # recreate launches from the launch collection
        launches = []
        for launch_id in fw_dict['launches']:
            launches.append(self.get_launch_by_id(launch_id).to_dict())
        fw_dict['launches'] = launches

        return FireWork.from_dict(fw_dict)
Example #21
0
    def get_fw_by_id(self, fw_id):
        """
        Given a FireWork id, give back a FireWork object

        :param fw_id: FireWork id (int)
        :return: FireWork object
        """
        fw_dict = self.fireworks.find_one({'fw_id': fw_id})

        if not fw_dict:
            raise ValueError('No FireWork exists with id: {}'.format(fw_id))
            # recreate launches from the launch collection

        fw_dict['launches'] = list(self.launches.find(
                    {'launch_id': {"$in": fw_dict['launches']}}))

        fw_dict['archived_launches'] = list(self.launches.find(
                    {'launch_id': {"$in": fw_dict['archived_launches']}}))

        return FireWork.from_dict(fw_dict)
Example #22
0
    def get_fw_by_id(self, fw_id):
        """
        Given a FireWork id, give back a FireWork object

        :param fw_id: FireWork id (int)
        :return: FireWork object
        """
        fw_dict = self.fireworks.find_one({'fw_id': fw_id})

        if not fw_dict:
            raise ValueError('No FireWork exists with id: {}'.format(fw_id))
            # recreate launches from the launch collection

        fw_dict['launches'] = list(self.launches.find(
                    {'launch_id': {"$in": fw_dict['launches']}}))

        fw_dict['archived_launches'] = list(self.launches.find(
                    {'launch_id': {"$in": fw_dict['archived_launches']}}))

        return FireWork.from_dict(fw_dict)
Example #23
0
    def test_multi_fw_complex(self):

        dest1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'inputs.txt')
        dest2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'temp_file.txt')
        self._teardown([dest1, dest2])
        try:
            # create the FireWork consisting of multiple tasks
            firetask1 = TemplateWriterTask({'context': {'opt1': 5.0, 'opt2': 'fast method'}, 'template_file': 'simple_template.txt', 'output_file': dest1})
            firetask2 = FileTransferTask({'files': [{'src': dest1, 'dest': dest2}], 'mode': 'copy'})
            fw = FireWork([firetask1, firetask2])

            # store workflow and launch it locally, single shot
            self.lp.add_wf(fw)
            launch_rocket(self.lp, FWorker())

            # read inputs.txt, words.txt, dest
            for d in [dest1, dest2]:
                with open(d) as f:
                    self.assertEqual(f.read(), 'option1 = 5.0\noption2 = fast method')

        finally:
            self._teardown([dest1, dest2])
Example #24
0
    def test_fibadder(self):
        fib = FibonacciAdderTask()
        fw = FireWork(fib, {'smaller': 0, 'larger': 1, 'stop_point': 3})
        self.lp.add_wf(fw)
        rapidfire(self.lp,
                  m_dir=MODULE_DIR,
                  nlaunches=3,
                  sleep_time=5,
                  max_loops=30)
        time.sleep(5)
        self.assertEqual(
            self.lp.get_launch_by_id(1).action.stored_data['next_fibnum'], 1)
        self.assertEqual(
            self.lp.get_launch_by_id(2).action.stored_data['next_fibnum'], 2)
        try:
            self.assertEqual(
                self.lp.get_launch_by_id(3).action.stored_data, {})
        except:
            time.sleep(30)  # TODO: this is really ugly! but needed ... ugh
            self.assertEqual(
                self.lp.get_launch_by_id(3).action.stored_data, {})

        self.assertFalse(self.lp.run_exists())
Example #25
0
    def test_init(self):

        fws = []
        for i in range(5):
            fw = FireWork([PyTask(func="print", args=[i])], fw_id=i)
            fws.append(fw)
        wf = Workflow(fws, links_dict={0: [1, 2, 3], 1: [4], 2: [4]})
        self.assertIsInstance(wf, Workflow)
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              100: [4]
                          })
        self.assertRaises(ValueError,
                          Workflow,
                          fws,
                          links_dict={
                              0: [1, 2, 3],
                              1: [4],
                              2: [100]
                          })
Example #26
0
def structure_to_wf(structure):
    """
    This method starts with a Structure object and creates a Workflow object
    The workflow has two steps - a structure relaxation and a static run
    :param structure:
    :return:
    """
    fws = []  # list of FireWorks to run
    connections = defaultdict(list)  # dependencies between FireWorks

    # generate VASP input objects for 1st VASP run - this is put in the FW spec
    mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2})
    incar = mpvis.get_incar(structure)
    poscar = mpvis.get_poscar(structure)
    kpoints = mpvis.get_kpoints(structure)
    potcar = mpvis.get_potcar(structure)

    # serialize the VASP input objects to the FW spec
    spec = {}
    spec['vasp'] = {}
    spec['vasp']['incar'] = incar.to_dict
    spec['vasp']['poscar'] = poscar.to_dict
    spec['vasp']['kpoints'] = kpoints.to_dict
    spec['vasp']['potcar'] = potcar.to_dict
    spec['vaspinputset_name'] = mpvis.__class__.__name__
    spec['task_type'] = 'GGA optimize structure (2x) example'

    # set up the custodian that we want to run
    jobs = VaspJob.double_relaxation_run('', gzipped=False)
    for j in jobs: # turn off auto npar, it doesn't work for >1 node
            j.auto_npar = False
    handlers = [VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler(),
                    NonConvergingErrorHandler()]
    c_params = {'jobs': [j.to_dict for j in jobs], 'handlers': [h.to_dict for h in handlers], 'max_errors': 5}
    custodiantask = VaspCustodianTaskEx(c_params)

    # 1st FireWork - run GGA optimize structure
    # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec
    # CustodianTaskEx - run VASP within a custodian
    tasks = [VaspWriterTask(), custodiantask]
    fws.append(FireWork(tasks, spec, name=get_name(structure, spec['task_type']), fw_id=1))

    # 2nd FireWork - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        FireWork([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=2))
    connections[1] = [2]

    # 3rd FireWork - static run.
    # VaspCopyTask - copy output from previous run to this directory
    # SetupStaticRunTask - override old parameters for static run
    # CustodianTaskEx - run VASP within a custodian
    spec = {'task_type': 'GGA static example'}
    copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True})
    setuptask = SetupStaticRunTask()
    custodiantask = VaspCustodianTaskEx({'jobs': [VaspJob('', auto_npar=False).to_dict], 'handlers': [h.to_dict for h in handlers], 'max_errors': 5})
    fws.append(FireWork([copytask, setuptask, custodiantask], spec, name=get_name(structure, spec['task_type']), fw_id=3))
    connections[2] = [3]

    # 4th FireWork - insert previous run into DB
    spec = {'task_type': 'VASP db insertion example'}
    fws.append(
        FireWork([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=4))
    connections[3] = [4]

    return Workflow(fws, connections, name=get_slug(structure.formula))
Example #27
0
 def from_dict(cls, m_dict):
     return Workflow([FireWork.from_dict(f) for f in m_dict['fws']], Workflow.Links.from_dict(m_dict['links']))
Example #28
0
    def run_task(self, fw_spec):
        if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec:
            prev_dir = get_loc(
                fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir'])
            update_spec = {}  # add this later when creating new FW
            fizzled_parent = True
            parse_dos = False
        else:
            prev_dir = get_loc(fw_spec['prev_vasp_dir'])
            update_spec = {
                'prev_vasp_dir': prev_dir,
                'prev_task_type': fw_spec['prev_task_type'],
                'run_tags': fw_spec['run_tags'],
                'parameters': fw_spec.get('parameters')
            }
            fizzled_parent = False
            parse_dos = 'Uniform' in fw_spec['prev_task_type']
        if 'run_tags' in fw_spec:
            self.additional_fields['run_tags'] = fw_spec['run_tags']
        else:
            self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][
                0]['spec']['run_tags']

        if MOVE_TO_GARDEN_DEV:
            prev_dir = move_to_garden(prev_dir, prod=False)

        elif MOVE_TO_GARDEN_PROD:
            prev_dir = move_to_garden(prev_dir, prod=True)

        # get the directory containing the db file
        db_dir = os.environ['DB_LOC']
        db_path = os.path.join(db_dir, 'tasks_db.json')

        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger('MPVaspDrone')
        logger.setLevel(logging.INFO)
        sh = logging.StreamHandler(stream=sys.stdout)
        sh.setLevel(getattr(logging, 'INFO'))
        logger.addHandler(sh)

        with open(db_path) as f:
            db_creds = json.load(f)
            drone = MPVaspDrone(host=db_creds['host'],
                                port=db_creds['port'],
                                database=db_creds['database'],
                                user=db_creds['admin_user'],
                                password=db_creds['admin_password'],
                                collection=db_creds['collection'],
                                parse_dos=parse_dos,
                                additional_fields=self.additional_fields,
                                update_duplicates=self.update_duplicates)
            t_id, d = drone.assimilate(
                prev_dir, launches_coll=LaunchPad.auto_load().launches)

        mpsnl = d['snl_final'] if 'snl_final' in d else d['snl']
        snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[
            'snlgroup_id']
        update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id})

        print 'ENTERED task id:', t_id
        stored_data = {'task_id': t_id}
        if d['state'] == 'successful':
            update_spec['analysis'] = d['analysis']
            update_spec['output'] = d['output']
            return FWAction(stored_data=stored_data, update_spec=update_spec)

        # not successful - first test to see if UnconvergedHandler is needed
        if not fizzled_parent:
            unconverged_tag = 'unconverged_handler--{}'.format(
                fw_spec['prev_task_type'])
            output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml'))
            ueh = UnconvergedErrorHandler(output_filename=output_dir)
            if ueh.check() and unconverged_tag not in fw_spec['run_tags']:
                print 'Unconverged run! Creating dynamic FW...'

                spec = {
                    'prev_vasp_dir': prev_dir,
                    'prev_task_type': fw_spec['task_type'],
                    'mpsnl': mpsnl,
                    'snlgroup_id': snlgroup_id,
                    'task_type': fw_spec['prev_task_type'],
                    'run_tags': list(fw_spec['run_tags']),
                    'parameters': fw_spec.get('parameters'),
                    '_dupefinder': DupeFinderVasp().to_dict(),
                    '_priority': fw_spec['_priority']
                }

                snl = StructureNL.from_dict(spec['mpsnl'])
                spec['run_tags'].append(unconverged_tag)
                spec['_queueadapter'] = QA_VASP

                fws = []
                connections = {}

                f = Composition.from_formula(
                    snl.structure.composition.reduced_formula
                ).alphabetical_formula

                fws.append(
                    FireWork([
                        VaspCopyTask({
                            'files': [
                                'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR',
                                'CONTCAR'
                            ],
                            'use_CONTCAR':
                            False
                        }),
                        SetupUnconvergedHandlerTask(),
                        get_custodian_task(spec)
                    ],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-2))

                spec = {
                    'task_type': 'VASP db insertion',
                    '_allow_fizzled_parents': True,
                    '_priority': fw_spec['_priority'],
                    '_queueadapter': QA_DB,
                    'run_tags': list(fw_spec['run_tags'])
                }
                spec['run_tags'].append(unconverged_tag)
                fws.append(
                    FireWork([VaspToDBTask()],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-1))
                connections[-2] = -1

                wf = Workflow(fws, connections)

                return FWAction(detours=wf)

        # not successful and not due to convergence problem - FIZZLE
        raise ValueError(
            "DB insertion successful, but don't know how to fix this FireWork! Can't continue with workflow..."
        )
Example #29
0
    def run_task(self, fw_spec):
        print 'sleeping 10s for Mongo'
        time.sleep(10)
        print 'done sleeping'
        print 'the gap is {}, the cutoff is {}'.format(
            fw_spec['analysis']['bandgap'], self.gap_cutoff)

        if fw_spec['analysis']['bandgap'] >= self.gap_cutoff:
            print 'Adding more runs...'

            type_name = 'GGA+U' if 'GGA+U' in fw_spec[
                'prev_task_type'] else 'GGA'

            snl = StructureNL.from_dict(fw_spec['mpsnl'])
            f = Composition.from_formula(
                snl.structure.composition.reduced_formula).alphabetical_formula

            fws = []
            connections = {}

            priority = fw_spec['_priority']
            trackers = [
                Tracker('FW_job.out'),
                Tracker('FW_job.error'),
                Tracker('vasp.out'),
                Tracker('OUTCAR'),
                Tracker('OSZICAR')
            ]
            trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')]

            # run GGA static
            spec = fw_spec  # pass all the items from the current spec to the new
            spec.update({
                'task_type': '{} static v2'.format(type_name),
                '_queueadapter': QA_VASP_SMALL,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority,
                '_trackers': trackers
            })
            fws.append(
                FireWork([
                    VaspCopyTask({
                        'use_CONTCAR': True,
                        'skip_CHGCAR': True
                    }),
                    SetupStaticRunTask(),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-10))

            # insert into DB - GGA static
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority * 2,
                "_dupefinder": DupeFinderDB().to_dict(),
                '_trackers': trackers_db
            }
            fws.append(
                FireWork([VaspToDBTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-9))
            connections[-10] = -9

            # run GGA Uniform
            spec = {
                'task_type': '{} Uniform v2'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority,
                '_trackers': trackers
            }
            fws.append(
                FireWork([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'uniform'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-8))
            connections[-9] = -8

            # insert into DB - GGA Uniform
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority * 2,
                "_dupefinder": DupeFinderDB().to_dict(),
                '_trackers': trackers_db
            }
            fws.append(
                FireWork([VaspToDBTask({'parse_uniform': True})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-7))
            connections[-8] = -7

            # run GGA Band structure
            spec = {
                'task_type': '{} band structure v2'.format(type_name),
                '_queueadapter': QA_VASP,
                '_dupefinder': DupeFinderVasp().to_dict(),
                '_priority': priority,
                '_trackers': trackers
            }
            fws.append(
                FireWork([
                    VaspCopyTask({'use_CONTCAR': False}),
                    SetupNonSCFTask({'mode': 'line'}),
                    get_custodian_task(spec)
                ],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-6))
            connections[-7] = [-6]

            # insert into DB - GGA Band structure
            spec = {
                'task_type': 'VASP db insertion',
                '_queueadapter': QA_DB,
                '_allow_fizzled_parents': True,
                '_priority': priority * 2,
                "_dupefinder": DupeFinderDB().to_dict(),
                '_trackers': trackers_db
            }
            fws.append(
                FireWork([VaspToDBTask({})],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=-5))
            connections[-6] = -5

            if fw_spec.get('parameters') and fw_spec['parameters'].get(
                    'boltztrap'):
                # run Boltztrap
                spec = {
                    'task_type': '{} Boltztrap'.format(type_name),
                    '_queueadapter': QA_DB,
                    '_dupefinder': DupeFinderDB().to_dict(),
                    '_priority': priority
                }
                fws.append(
                    FireWork([BoltztrapRunTask()],
                             spec,
                             name=get_slug(f + '--' + spec['task_type']),
                             fw_id=-4))
                connections[-7].append(-4)

            wf = Workflow(fws, connections)

            print 'Done adding more runs...'

            return FWAction(additions=wf)
        return FWAction()
Example #30
0
def snl_to_wf(snl, parameters=None):
    fws = []
    connections = {}
    parameters = parameters if parameters else {}

    snl_priority = parameters.get('priority', 1)
    priority = snl_priority * 2  # once we start a job, keep going!

    f = Composition.from_formula(
        snl.structure.composition.reduced_formula).alphabetical_formula

    # add the SNL to the SNL DB and figure out duplicate group
    tasks = [AddSNLTask()]
    spec = {
        'task_type': 'Add to SNL database',
        'snl': snl.to_dict,
        '_queueadapter': QA_DB,
        '_priority': snl_priority
    }
    if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL):
        spec['force_mpsnl'] = snl.to_dict
        spec['force_snlgroup_id'] = parameters['snlgroup_id']
        del spec['snl']
    fws.append(
        FireWork(tasks,
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=0))
    connections[0] = [1]

    # run GGA structure optimization
    spec = _snl_to_spec(snl, enforce_gga=True)
    spec['_priority'] = priority
    spec['_queueadapter'] = QA_VASP
    tasks = [VaspWriterTask(), get_custodian_task(spec)]
    fws.append(
        FireWork(tasks,
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=1))

    # insert into DB - GGA structure optimization
    spec = {
        'task_type': 'VASP db insertion',
        '_priority': priority,
        '_allow_fizzled_parents': True,
        '_queueadapter': QA_DB
    }
    fws.append(
        FireWork([VaspToDBTask()],
                 spec,
                 name=get_slug(f + '--' + spec['task_type']),
                 fw_id=2))
    connections[1] = [2]

    if not parameters.get('skip_bandstructure', False):
        spec = {
            'task_type': 'Controller: add Electronic Structure v2',
            '_priority': priority,
            '_queueadapter': QA_CONTROL
        }
        fws.append(
            FireWork([AddEStructureTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=3))
        connections[2] = [3]

    # determine if GGA+U FW is needed
    incar = MPVaspInputSet().get_incar(snl.structure).to_dict

    if 'LDAU' in incar and incar['LDAU']:
        spec = _snl_to_spec(snl, enforce_gga=False)
        del spec[
            'vasp']  # we are stealing all VASP params and such from previous run
        spec['_priority'] = priority
        spec['_queueadapter'] = QA_VASP
        fws.append(
            FireWork(
                [VaspCopyTask(),
                 SetupGGAUTask(),
                 get_custodian_task(spec)],
                spec,
                name=get_slug(f + '--' + spec['task_type']),
                fw_id=10))
        connections[2].append(10)

        spec = {
            'task_type': 'VASP db insertion',
            '_queueadapter': QA_DB,
            '_allow_fizzled_parents': True,
            '_priority': priority
        }
        fws.append(
            FireWork([VaspToDBTask()],
                     spec,
                     name=get_slug(f + '--' + spec['task_type']),
                     fw_id=11))
        connections[10] = [11]

        if not parameters.get('skip_bandstructure', False):
            spec = {
                'task_type': 'Controller: add Electronic Structure v2',
                '_priority': priority,
                '_queueadapter': QA_CONTROL
            }
            fws.append(
                FireWork([AddEStructureTask()],
                         spec,
                         name=get_slug(f + '--' + spec['task_type']),
                         fw_id=12))
            connections[11] = [12]

    wf_meta = get_meta_from_structure(snl.structure)
    wf_meta['run_version'] = 'May 2013 (1)'

    if '_materialsproject' in snl.data and 'submission_id' in snl.data[
            '_materialsproject']:
        wf_meta['submission_id'] = snl.data['_materialsproject'][
            'submission_id']
    return Workflow(
        fws,
        connections,
        name=Composition.from_formula(
            snl.structure.composition.reduced_formula).alphabetical_formula,
        metadata=wf_meta)
Example #31
0
            lp.reset(args.password)

        elif args.command == 'detect_fizzled':
            # TODO: report when fixed
            print lp.detect_fizzled(args.time, args.fix)

        elif args.command == 'detect_unreserved':
            # TODO: report when fixed
            print lp.detect_unreserved(args.time, args.fix)

        elif args.command == 'add':
            # TODO: make this cleaner, e.g. make TAR option explicit
            # fwf = Workflow.from_FireWork(FireWork.from_file(args.wf_file))
            # lp.add_wf(fwf)
            try:
                fwf = Workflow.from_FireWork(FireWork.from_file(args.wf_file))
                lp.add_wf(fwf)
            except:
                try:
                    if '.tar' in args.wf_file:
                        fwf = Workflow.from_tarfile(args.wf_file)
                    else:
                        fwf = Workflow.from_file(args.wf_file)
                    lp.add_wf(fwf)
                except:
                    print 'Error reading FireWork/Workflow file.'
                    traceback.print_exc()

        elif args.command == 'get_fw':
            fw = lp.get_fw_by_id(args.fw_id)
            fw_dict = fw.to_dict()
Example #32
0
    def run(self):
        """
        Run the rocket (check out a job from the database and execute it)
        """
        all_stored_data = {}  # combined stored data for *all* the Tasks
        all_update_spec = {}  # combined update_spec for *all* the Tasks
        all_mod_spec = []  # combined mod_spec for *all* the Tasks

        lp = self.launchpad
        launch_dir = os.path.abspath(os.getcwd())

        # check a FW job out of the launchpad
        if lp:
            m_fw, launch_id = lp.checkout_fw(self.fworker, launch_dir,
                                             self.fw_id)
        else:  # offline mode
            m_fw = FireWork.from_file(os.path.join(os.getcwd(), "FW.json"))

            # set the run start time
            with open('FW_offline.json', 'r+') as f:
                d = json.loads(f.read())
                d['started_on'] = datetime.utcnow().isoformat()
                f.seek(0)
                f.write(json.dumps(d))
                f.truncate()

            launch_id = None  # we don't need this in offline mode...

        if not m_fw:
            print("No FireWorks are ready to run and match query! {}".format(
                self.fworker.query))
            return False

        if '_launch_dir' in m_fw.spec:
            prev_dir = launch_dir
            os.chdir(m_fw.spec['_launch_dir'])
            launch_dir = os.path.abspath(os.getcwd())

            if lp:
                lp._change_launch_dir(launch_id, launch_dir)

            if not os.listdir(prev_dir) and REMOVE_USELESS_DIRS:
                try:
                    os.rmdir(prev_dir)
                except:
                    pass

        # write FW.json and/or FW.yaml to the directory
        if PRINT_FW_JSON:
            m_fw.to_file('FW.json', indent=4)
        if PRINT_FW_YAML:
            m_fw.to_file('FW.yaml')

        try:
            my_spec = dict(
                m_fw.spec)  # make a copy of spec, don't override original

            # set up heartbeat (pinging the server that we're still alive)
            ping_stop = start_ping_launch(lp, launch_id)

            # start background tasks
            btask_stops = []
            if '_background_tasks' in my_spec:
                for bt in my_spec['_background_tasks']:
                    btask_stops.append(start_background_task(bt, m_fw.spec))

            # execute the FireTasks!
            for my_task in m_fw.tasks:
                m_action = my_task.run_task(my_spec)

                # read in a FWAction from a file, in case the task is not Python and cannot return it explicitly
                if os.path.exists('FWAction.json'):
                    m_action = FWAction.from_file('FWAction.json')
                elif os.path.exists('FWAction.yaml'):
                    m_action = FWAction.from_file('FWAction.yaml')

                if not m_action:
                    m_action = FWAction()

                # update the global stored data with the data to store and update from this particular Task
                all_stored_data.update(m_action.stored_data)
                all_update_spec.update(m_action.update_spec)
                all_mod_spec.extend(m_action.mod_spec)

                # update spec for next task as well
                my_spec.update(m_action.update_spec)
                for mod in m_action.mod_spec:
                    apply_mod(mod, my_spec)

                if m_action.skip_remaining_tasks:
                    break

            # add job packing info if this is needed
            if FWData().MULTIPROCESSING and STORE_PACKING_INFO:
                all_stored_data[
                    'multiprocess_name'] = multiprocessing.current_process(
                    ).name

            # perform finishing operation
            stop_backgrounds(ping_stop, btask_stops)
            for b in btask_stops:
                b.set()
            do_ping(lp, launch_id)  # one last ping, esp if there is a monitor
            # last background monitors
            if '_background_tasks' in my_spec:
                for bt in my_spec['_background_tasks']:
                    if bt.run_on_finish:
                        for task in bt.tasks:
                            task.run_task(m_fw.spec)

            m_action.stored_data = all_stored_data
            m_action.mod_spec = all_mod_spec
            m_action.update_spec = all_update_spec

            if lp:
                lp.complete_launch(launch_id, m_action, 'COMPLETED')
            else:
                with open('FW_offline.json', 'r+') as f:
                    d = json.loads(f.read())
                    d['fwaction'] = m_action.to_dict()
                    d['state'] = 'COMPLETED'
                    d['completed_on'] = datetime.utcnow().isoformat()
                    f.seek(0)
                    f.write(json.dumps(d))
                    f.truncate()

            return True

        except:
            stop_backgrounds(ping_stop, btask_stops)
            traceback.print_exc()
            try:
                m_action = FWAction(stored_data={
                    '_message': 'runtime error during task',
                    '_task': my_task.to_dict(),
                    '_exception': traceback.format_exc()
                },
                                    exit=True)
            except:
                m_action = FWAction(stored_data={
                    '_message': 'runtime error during task',
                    '_task': None,
                    '_exception': traceback.format_exc()
                },
                                    exit=True)
            if lp:
                lp.complete_launch(launch_id, m_action, 'FIZZLED')
            else:
                with open('FW_offline.json', 'r+') as f:
                    d = json.loads(f.read())
                    d['fwaction'] = m_action.to_dict()
                    d['state'] = 'FIZZLED'
                    f.seek(0)
                    f.write(json.dumps(d))
                    f.truncate()

            return True
Example #33
0
            lp.reset(args.password)

        elif args.command == 'detect_fizzled':
            # TODO: report when fixed
            print lp.detect_fizzled(args.time, args.fix)

        elif args.command == 'detect_unreserved':
            # TODO: report when fixed
            print lp.detect_unreserved(args.time, args.fix)

        elif args.command == 'add':
            # TODO: make this cleaner, e.g. make TAR option explicit
            # fwf = Workflow.from_FireWork(FireWork.from_file(args.wf_file))
            # lp.add_wf(fwf)
            try:
                fwf = Workflow.from_FireWork(FireWork.from_file(args.wf_file))
                lp.add_wf(fwf)
            except:
                try:
                    if '.tar' in args.wf_file:
                        fwf = Workflow.from_tarfile(args.wf_file)
                    else:
                        fwf = Workflow.from_file(args.wf_file)
                    lp.add_wf(fwf)
                except:
                    print 'Error reading FireWork/Workflow file.'
                    traceback.print_exc()

        elif args.command == 'get_fw':
            fw = lp.get_fw_by_id(args.fw_id)
            fw_dict = fw.to_dict()
Example #34
0
 def from_dict(cls, m_dict):
     return Workflow([FireWork.from_dict(f) for f in m_dict['fws']],
                     Workflow.Links.from_dict(m_dict['links']))
Example #35
0
    def run(self):
        """
        Run the rocket (check out a job from the database and execute it)
        """
        all_stored_data = {}  # combined stored data for *all* the Tasks
        all_update_spec = {}  # combined update_spec for *all* the Tasks
        all_mod_spec = []  # combined mod_spec for *all* the Tasks

        lp = self.launchpad
        launch_dir = os.path.abspath(os.getcwd())

        # check a FW job out of the launchpad
        if lp:
            m_fw, launch_id = lp.checkout_fw(self.fworker, launch_dir, self.fw_id)
        else:  # offline mode
            m_fw = FireWork.from_file(os.path.join(os.getcwd(), "FW.json"))

            # set the run start time
            with open('FW_offline.json', 'r+') as f:
                d = json.loads(f.read())
                d['started_on'] = datetime.utcnow().isoformat()
                f.seek(0)
                f.write(json.dumps(d))
                f.truncate()

            launch_id = None  # we don't need this in offline mode...

        if not m_fw:
            print("No FireWorks are ready to run and match query! {}".format(self.fworker.query))
            return False

        if '_launch_dir' in m_fw.spec:
            prev_dir = launch_dir
            os.chdir(m_fw.spec['_launch_dir'])
            launch_dir = os.path.abspath(os.getcwd())

            if lp:
                lp.change_launch_dir(launch_id, launch_dir)

            if not os.listdir(prev_dir) and REMOVE_USELESS_DIRS:
                try:
                    os.rmdir(prev_dir)
                except:
                    pass

        if lp:
            message = 'RUNNING fw_id: {} in directory: {}'.\
                format(m_fw.fw_id, os.getcwd())
            lp.log_message(logging.INFO, message)

        # write FW.json and/or FW.yaml to the directory
        if PRINT_FW_JSON:
            m_fw.to_file('FW.json', indent=4)
        if PRINT_FW_YAML:
            m_fw.to_file('FW.yaml')

        try:
            my_spec = dict(m_fw.spec)  # make a copy of spec, don't override original
            my_spec["_fw_env"] = self.fworker.env

            # set up heartbeat (pinging the server that we're still alive)
            ping_stop = start_ping_launch(lp, launch_id)

            # start background tasks
            btask_stops = []
            if '_background_tasks' in my_spec:
                for bt in my_spec['_background_tasks']:
                    btask_stops.append(start_background_task(bt, m_fw.spec))

            # execute the FireTasks!
            for my_task in m_fw.tasks:
                m_action = my_task.run_task(my_spec)

                # read in a FWAction from a file, in case the task is not Python and cannot return it explicitly
                if os.path.exists('FWAction.json'):
                    m_action = FWAction.from_file('FWAction.json')
                elif os.path.exists('FWAction.yaml'):
                    m_action = FWAction.from_file('FWAction.yaml')

                if not m_action:
                    m_action = FWAction()

                # update the global stored data with the data to store and update from this particular Task
                all_stored_data.update(m_action.stored_data)
                all_update_spec.update(m_action.update_spec)
                all_mod_spec.extend(m_action.mod_spec)

                # update spec for next task as well
                my_spec.update(m_action.update_spec)
                for mod in m_action.mod_spec:
                    apply_mod(mod, my_spec)

                if m_action.skip_remaining_tasks:
                    break

            # add job packing info if this is needed
            if FWData().MULTIPROCESSING and STORE_PACKING_INFO:
                all_stored_data['multiprocess_name'] = multiprocessing.current_process().name

            # perform finishing operation
            stop_backgrounds(ping_stop, btask_stops)
            for b in btask_stops:
                b.set()
            do_ping(lp, launch_id)  # one last ping, esp if there is a monitor
            # last background monitors
            if '_background_tasks' in my_spec:
                for bt in my_spec['_background_tasks']:
                    if bt.run_on_finish:
                        for task in bt.tasks:
                            task.run_task(m_fw.spec)

            m_action.stored_data = all_stored_data
            m_action.mod_spec = all_mod_spec
            m_action.update_spec = all_update_spec

            if lp:
                lp.complete_launch(launch_id, m_action, 'COMPLETED')
            else:
                with open('FW_offline.json', 'r+') as f:
                    d = json.loads(f.read())
                    d['fwaction'] = m_action.to_dict()
                    d['state'] = 'COMPLETED'
                    d['completed_on'] = datetime.utcnow().isoformat()
                    f.seek(0)
                    f.write(json.dumps(d))
                    f.truncate()

            return True

        except:
            stop_backgrounds(ping_stop, btask_stops)
            traceback.print_exc()
            try:
                m_action = FWAction(stored_data={'_message': 'runtime error during task', '_task': my_task.to_dict(),
                                             '_exception': traceback.format_exc()}, exit=True)
            except:
                m_action = FWAction(stored_data={'_message': 'runtime error during task', '_task': None,
                                             '_exception': traceback.format_exc()}, exit=True)
            if lp:
                lp.complete_launch(launch_id, m_action, 'FIZZLED')
            else:
                with open('FW_offline.json', 'r+') as f:
                    d = json.loads(f.read())
                    d['fwaction'] = m_action.to_dict()
                    d['state'] = 'FIZZLED'
                    f.seek(0)
                    f.write(json.dumps(d))
                    f.truncate()

            return True
Example #36
0
 def test_add_fw(self):
     fw = FireWork(AdditionTask(), {'input_array': [5, 7]})
     self.lp.add_wf(fw)
     rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR)
     self.assertEqual(self.lp.get_launch_by_id(1).action.stored_data['sum'], 12)