def __init__(self, *args, **kwargs): super(OptTask, self).__init__(*args, **kwargs) # Configuration attrs lp = self.get("launchpad", LaunchPad.auto_load()) if isinstance(lp, LaunchPad): lp = lp.to_dict() self.lpad = LaunchPad.from_dict(lp) self.opt_label = self.get("opt_label", "opt_default") self.c = getattr(self.lpad.db, self.opt_label) self.config = self.c.find_one({"doctype": "config"}) if self.config is None: raise NotConfiguredError("Please use MissionControl().configure to " "configure the optimization database " "({} - {}) before running OptTask." "".format(self.lpad.db, self.opt_label)) self.wf_creator = deserialize(self.config["wf_creator"]) self.x_dims = self.config["dimensions"] self._xdim_types = self.config["dim_types"] self.is_discrete_all = self.config["is_discrete_all"] self.is_discrete_any = self.config["is_discrete_any"] self.wf_creator_args = self.config["wf_creator_args"] or [] self.wf_creator_kwargs = self.config["wf_creator_kwargs"] or {} self.predictor = self.config["predictor"] self.predictor_args = self.config["predictor_args"] or [] self.predictor_kwargs = self.config["predictor_kwargs"] or {} self.maximize = self.config["maximize"] self.n_search_pts = self.config["n_search_pts"] self.n_train_pts = self.config["n_train_pts"] self.n_bootstraps = self.config["n_bootstraps"] self.acq = self.config["acq"] self.space_file = self.config["space_file"] self.onehot_categorical = self.config["onehot_categorical"] self.duplicate_check = self.config["duplicate_check"] self.get_z = self.config["get_z"] if self.get_z: self.get_z = deserialize(self.config['get_z']) else: self.get_z = lambda *ars, **kws: [] self.get_z_args = self.config["get_z_args"] or [] self.get_z_kwargs = self.config["get_z_kwargs"] or {} self.z_file = self.config["z_file"] self.enforce_sequential = self.config["enforce_sequential"] self.tolerances = self.config["tolerances"] self.batch_size = self.config["batch_size"] self.timeout = self.config["timeout"] # Declared attrs self.n_objs = None plist = [RandomForestRegressor, GaussianProcessRegressor, ExtraTreesRegressor, GradientBoostingRegressor] self.builtin_predictors = {p.__name__: p for p in plist} self._n_cats = 0 self._encoding_info = [] # Query formats self._completed = {'x': {'$exists': 1}, 'y': {'$exists': 1, '$ne': 'reserved'}, 'z': {'$exists': 1}} self._manager = {'lock': {'$exists': 1}, 'queue': {'$exists': 1}}
def get_launchpad(launchpad_file=None): """ Returns a LaunchPad object. If the launchpad_file is None, then try to auto load from environment Args: launchpad_file (File-like): A file-like or file path to the LaunchPad file. Returns: LaunchPad """ if launchpad_file: if isinstance(launchpad_file, file): # a file object was found ext = launchpad_file.name.split('.')[-1] if ext == 'yaml': launchpad = LaunchPad.from_format(launchpad_file.read(), f_format='yaml') else: # assume json launchpad = LaunchPad.from_format(launchpad_file.read()) else: # assume launchpad_file is a path launchpad = LaunchPad.from_file(launchpad_file) else: launchpad = LaunchPad.auto_load() return launchpad
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [ os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries) ] subdirs = [ 'unzipped', 'trimmed', 'aligned_kallisto', 'bammed', 'sorted', 'counted', 'pythonized' ] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "Count_%s" % os.path.basename(library_dir) fw_count = Firework( [ CountTask(library_path=library_dir, aligned_name="aligned_kallisto", bammed_name="bammed", counted_name="counted", spikeids=['Spike1', 'Spike4', 'Spike7']) ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_count) lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
def __init__(self): self.simManager = SimManager() self.launchpad = LaunchPad() self.ids = [] self.fws = [] self.last = 0 self.rerun = False
def lp(request): lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') lp.reset(password=None, require_password=False) def fin(): lp.connection.drop_database(TESTDB_NAME) request.addfinalizer(fin) return lp
def lp(request): lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') lp.reset(password=None, require_password=False) def fin(): lp.connection.drop_database(TESTDB_NAME) # request.addfinalizer(fin) return lp
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [ os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries) ] subdirs = ["aligned_star", "quant_rsem", "counted_rsem"] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "AlignSTAR_%s" % os.path.basename(library_dir) fw_align = Firework( [ Align_star_Task(library_path=library_dir, trimmed_name="trimmed", aligned_name="aligned_star/", quant_name="quant_rsem/") ], name=name, spec={ "_queueadapter": { "job_name": name, "ntasks_per_node": 8, "walltime": '24:00:00' } }, ) workflow_fireworks.append(fw_align) name = "Count_%s" % os.path.basename(library_dir) fw_count = Firework( [ Count_rsem_Task(library_path=library_dir, aligned_name="aligned_star", quant_name="quant_rsem", counted_name="counted_rsem", spikeids=[ 'AM1780SpikeIn1', 'AM1780SpikeIn4', 'AM1780SpikeIn7' ]) ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_count) workflow_dependencies[fw_align].append(fw_count) lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
def __init__(self, wfname, fxyz, fprot, reset=False): self.launchpad = LaunchPad() if reset: self.launchpad.reset('', require_password=False) self.wfname = wfname self.fxyz = fxyz self.fprot = fprot self.ftxyz = wfname + '.txyz' self.fkey = wfname + '.key' self.fref = wfname + '.hdf5' return
def populate_launchpad(software, systems, optimizer=None): """ A simple function to fill a workflow with a set of systems """ # load in fireworks launch_pad = yaml.load(open('../config/my_launchpad.yaml', 'r')) # this is messy, but it has to be done del launch_pad['ssl_ca_file'] del launch_pad['strm_lvl'] del launch_pad['user_indices'] del launch_pad['wf_user_indices'] lpad = LaunchPad(**launch_pad) # set up Abinit's input settings db_file = os.getcwd() + '/../config/db.json' for system_class in systems: # load in the json file systems = json.load( open( '{}{}.json'.format(base_dir + '/staging/structures/', system_class), 'rb')) parameters = json.load( open( '{}{}.json'.format(base_dir + '/staging/parameters/', system_class), 'rb')) # reformat into lists ids = [] systems_list = [] parameters_list = [] for id_, system in systems.items(): systems_list.append(system) parameters_list.append(parameters[id_]) ids.append(id_) # convert from pymatgen structures to ase atoms objects systems_list = [ AseAtomsAdaptor.get_atoms(Structure.from_dict(a)) for a in systems_list ] wf = get_ase_wflows( systems_list, parameters=parameters, calculator=pkg_info[software]['calculator'], to_db=True, db_file=db_file, optimizer=optimizer, calculator_module=pkg_info[software]['calculator_module'], identifiers=None) # add the workflow lpad.add_wf(wf)
def setUp(self): lpad = LaunchPad(name="test_emmet") lpad.reset('', require_password=False) self.lpad = lpad self.nofilter = PropertyWorkflowBuilder( self.elasticity, self.materials, wf_elastic_constant, material_filter=None, lpad=self.lpad) self.nofilter.connect() self.filter = PropertyWorkflowBuilder( self.elasticity, self.materials, wf_elastic_constant, material_filter={"task_id": {"$lt": 3}}, lpad=self.lpad) self.filter.connect()
def main(reset, launchpad_args, rocket_args): if rocket_args["m_dir"]: try: os.makedirs(rocket_args["m_dir"]) except OSError: pass launchpad = LaunchPad(**launchpad_args) if reset: launchpad.reset("", require_password=False) rapidfire(launchpad=launchpad, **rocket_args)
def __init__(self, source, materials, wf_function, material_filter=None, lpad=None, **kwargs): """ Adds workflows to a launchpad based on material inputs. This is primarily to be used for derivative property workflows but could in principles used to generate workflows for any workflow that can be invoked from structure data Args: source (Store): store of properties materials (Store): Store of materials properties material_filter (dict): dict filter for getting items to process e. g. {"elasticity": None} wf_function (string or method): method to generate a workflow based on structure in document with missing property can be a string to be loaded or a custom method. Note that the builder/runner will not be serializable with custom methods. lpad (LaunchPad or dict): fireworks launchpad to use for adding workflows, can either be None (autoloaded), a LaunchPad instance, or a dict from which the LaunchPad will be invoked **kwargs (kwargs): kwargs for builder """ self.source = source self.materials = materials # Will this be pickled properly for multiprocessing? could just put # it into the processor if that's the case if isinstance(wf_function, six.string_types): self.wf_function = load_class(*wf_function.rsplit('.', 1)) self._wf_function_string = wf_function elif callable(wf_function): self.wf_function = wf_function self._wf_function_string = None else: raise ValueError("wf_function must be callable or a string " "corresponding to a loadable method") self.material_filter = material_filter if lpad is None: self.lpad = LaunchPad.auto_load() elif isinstance(lpad, dict): self.lpad = LaunchPad.from_dict(lpad) else: self.lpad = lpad super().__init__(sources=[source, materials], targets=[], **kwargs)
def initiate_cluster(inputs): # check how many image folders are there contents_list = multi_call(inputs) lpad = LaunchPad(**yaml.load(open(join(celltkroot, "fireworks", "my_launchpad.yaml")))) wf_fws = [] for contents in contents_list: fw_name = "cluster_celltk" fw = Firework(clustercelltk(contents=contents), name = fw_name, spec = {"_queueadapter": {"job_name": fw_name, "walltime": "47:00:00"}}, ) wf_fws.append(fw) # end loop over input values workflow = Workflow(wf_fws, links_dict={}) lpad.add_wf(workflow)
def static(structure_file, functional, directory, write_chgcar, in_custodian, number_nodes, launchpad_file, lpad_name): """ Set up an static calculation workflow. """ from pybat.workflow.workflows import get_wf_static # Process the input options if number_nodes == 0: number_nodes = None functional = string_to_functional(functional) # Set up the calculation directory directory = set_up_directory(directory, functional, "static") cat = Cathode.from_file(structure_file) if launchpad_file: lpad = LaunchPad.from_file(launchpad_file) else: lpad = _load_launchpad(lpad_name) lpad.add_wf( get_wf_static(structure=cat, functional=functional, directory=directory, write_chgcar=write_chgcar, in_custodian=in_custodian, number_nodes=number_nodes))
def test_get_lp_and_fw_id_from_task(self): """ Tests the get_lp_and_fw_id_from_task. This test relies on the fact that the LaunchPad loaded from auto_load will be different from what is defined in TESTDB_NAME. If this is not the case the test will be skipped. """ lp = LaunchPad.auto_load() if not lp or lp.db.name == TESTDB_NAME: raise unittest.SkipTest( "LaunchPad lp {} is not suitable for this test. Should be available and different" "from {}".format(lp, TESTDB_NAME)) task = LpTask() # this will pass the lp fw1 = Firework([task], spec={'_add_launchpad_and_fw_id': True}, fw_id=1) # this will not have the lp and should fail fw2 = Firework([task], spec={}, fw_id=2, parents=[fw1]) wf = Workflow([fw1, fw2]) self.lp.add_wf(wf) rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1) fw = self.lp.get_fw_by_id(1) assert fw.state == "COMPLETED" rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1) fw = self.lp.get_fw_by_id(2) assert fw.state == "FIZZLED"
def __init__(self, *args, **kwargs): ''' :param args: (VaspFirework objects) objects to create Workflow from. No limit on the amount of VaspInputInterface objects to be given. Entered as just comma separated objects passed to class. :param deps_dict: (dict) specifies the dependency of the VaspInputInterface objects given. If no dependency is given, Firworks are assumed to be sequentially dependent. :param name (str) Name given to Workflow ''' self.fws = [] self.name = kwargs.get('name', 'Sequential WF') self.deps_dict = kwargs.get('deps_dict', {}) self.dependency = {} if self.deps_dict: for i in self.deps_dict.keys(): fw_deps = [] for j in self.deps_dict[i]: fw_deps.append(j.Firework) self.dependency[i.Firework]=fw_deps self.deps = True if self.dependency else False for id, fw_task in enumerate(args): self.fws.append(fw_task.Firework) if not self.deps and id != 0: self.dependency[self.fws[id-1]]=[fw_task.Firework] self.wf = Workflow(self.fws, self.dependency, name=self.name) # Try to establish connection with Launchpad try: self.LaunchPad=LaunchPad.from_file(os.path.join(os.environ["HOME"], ".fireworks", "my_launchpad.yaml")) except: self.LaunchPad = None
def __init__(self,vasp_task=None,name='vaspfw',handlers=None, handler_params=None, config_file=None): self.name = name self.handlers=handlers if handlers else [] self.handler_params=handler_params if handler_params else {} if config_file: config_dict = loadfn(config_file) elif os.path.exists(os.path.join(os.environ['HOME'], 'vasp_interface_defaults.yaml')): config_dict = loadfn(os.path.join(os.environ['HOME'], 'vasp_interface_defaults.yaml')) else: config_dict = {} if config_dict: self.custodian_opts = config_dict.get('CUSTODIAN_PARAMS', {}) if self.custodian_opts.get('handlers', []): self.handlers.extend(self.custodian_opts.get('handlers', [])) self.handler_params.update(self.custodian_opts.get('handler_params', {})) self.tasks=[vasp_task.input,RunCustodianTask(handlers=self.handlers, handler_params=self.handler_params)] if isinstance(vasp_task, VaspInputInterface) else [vasp_task] self.Firework=Firework(self.tasks,name=self.name) # Try to establish connection with Launchpad try: self.LaunchPad=LaunchPad.from_file(os.path.join(os.environ["HOME"], ".fireworks", "my_launchpad.yaml")) except: self.LaunchPad = None
def load_config(config, name="base"): """ Load a LaunchPad, FWorker of QueueAdapter from the configuration files. Args: config (str): Type of configuration file to load. Either "launchpad", "fworker" or "qadapter". name (str): Name of the configuration. Defaults to "base". Returns: Either a LaunchPad, FWorker or QueuAdapter, depending on the "config" argument. """ try: if config == "launchpad": return LaunchPad.from_file( os.path.join(os.path.expanduser("~"), ".workflow_config", "launchpad", name + "_launchpad.yaml")) if config == "fworker": return FWorker.from_file( os.path.join(os.path.expanduser("~"), ".workflow_config", "fworker", name + "_fworker.yaml")) if config == "qadapter": return CommonAdapter.from_file( os.path.join(os.path.expanduser("~"), ".workflow_config", "fworker", name + "_qadapter.yaml")) except FileNotFoundError: raise FileNotFoundError( "Did not find the corresponding configuration file in " + os.path.join(os.path.expanduser("~"), ".workflow_config") + ". Use 'vsc config " + config + "' to set up the " + name + " configuration for the " + config + ".")
def setUpClass(cls): cls.lp = None try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') cls.lp.reset(password=None, require_password=False) except: raise unittest.SkipTest('MongoDB is not running in localhost:27017! Skipping tests.')
def _get_launchpad(submission_script='FW_submit.script'): ''' This function assumes that you're in a directory where a "FW_submit.script" exists and contains the location of your launchpad.yaml file. It then uses this yaml file to instantiate a LaunchPad object for you. Arg: submission_script String indicating the path of the job submission script used to launch this firework. It should contain an `rlaunch` command in it. Returns: lpad A configured and authenticated `fireworks.LaunchPad` object ''' # Look for the line in the submission script that has `rlaunch` with open(submission_script, 'r') as file_handle: for line in file_handle.readlines(): if line.startswith('rlaunch'): break # The line with `rlaunch` should also have the location of the launchpad words = line.split(' ') for i, word in enumerate(words): if word == '-l': lpad_file = words[i + 1] break # Instantiate the lpad with the yaml and return it lpad = LaunchPad.from_file(lpad_file) return lpad
def optimize(structure_file, functional, directory, is_metal, in_custodian, number_nodes, launchpad_file, lpad_name): """ Set up a geometry optimization workflow. """ from pybat.workflow.workflows import get_wf_optimize cat = Cathode.from_file(structure_file) # Process the input options if number_nodes == 0: number_nodes = None functional = string_to_functional(functional) # Set up the calculation directory directory = set_up_directory(directory, functional, "optimize") if launchpad_file: lpad = LaunchPad.from_file(launchpad_file) else: lpad = _load_launchpad(lpad_name) lpad.add_wf( get_wf_optimize(structure=cat, functional=functional, directory=directory, is_metal=is_metal, in_custodian=in_custodian, number_nodes=number_nodes))
def setUp(self): lp_filedir = os.path.dirname(os.path.realpath(__file__)) with open(lp_filedir + '/tests_launchpad.yaml', 'r') as lp_file: yaml = YAML() lp_dict = dict(yaml.load(lp_file)) self.lp = LaunchPad.from_dict(lp_dict) self.db = self.lp.db
def dimer(structure_file, dimer_indices, distance, functional, is_metal, in_custodian, number_nodes, launchpad_file, lpad_name): """ Set up dimer calculation workflows. """ from pybat.workflow.workflows import get_wf_dimer cat = LiRichCathode.from_file(structure_file) # Process the input options if number_nodes == 0: number_nodes = None if launchpad_file: lpad = LaunchPad.from_file(launchpad_file) else: lpad = _load_launchpad(lpad_name) lpad.add_wf( get_wf_dimer(structure=cat, dimer_indices=dimer_indices, distance=distance, functional=string_to_functional(functional), is_metal=is_metal, in_custodian=in_custodian, number_nodes=number_nodes))
def test_get_lp_and_fw_id_from_task(self): """ Tests the get_lp_and_fw_id_from_task. This test relies on the fact that the LaunchPad loaded from auto_load will be different from what is defined in TESTDB_NAME. If this is not the case the test will be skipped. """ lp = LaunchPad.auto_load() if not lp or lp.db.name == TESTDB_NAME: raise unittest.SkipTest("LaunchPad lp {} is not suitable for this test. Should be available and different" "from {}".format(lp, TESTDB_NAME)) task = LpTask() # this will pass the lp fw1 = Firework([task], spec={'_add_launchpad_and_fw_id': True}, fw_id=1) # this will not have the lp and should fail fw2 = Firework([task], spec={}, fw_id=2, parents=[fw1]) wf = Workflow([fw1, fw2]) self.lp.add_wf(wf) rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1) fw = self.lp.get_fw_by_id(1) assert fw.state == "COMPLETED" rapidfire(self.lp, self.fworker, m_dir=MODULE_DIR, nlaunches=1) fw = self.lp.get_fw_by_id(2) assert fw.state == "FIZZLED"
def setUp(self, lpad=True): """ Create scratch directory(removes the old one if there is one) and change to it. Also initialize launchpad. """ if not SETTINGS.get("PMG_VASP_PSP_DIR"): SETTINGS["PMG_VASP_PSP_DIR"] = os.path.abspath( os.path.join(MODULE_DIR, "..", "vasp", "test_files")) print( 'This system is not set up to run VASP jobs. ' 'Please set PMG_VASP_PSP_DIR variable in your ~/.pmgrc.yaml file.' ) self.scratch_dir = os.path.join(MODULE_DIR, "scratch") if os.path.exists(self.scratch_dir): shutil.rmtree(self.scratch_dir) os.makedirs(self.scratch_dir) os.chdir(self.scratch_dir) if lpad: try: self.lp = LaunchPad.from_file( os.path.join(DB_DIR, "my_launchpad.yaml")) self.lp.reset("", require_password=False) except: raise unittest.SkipTest( 'Cannot connect to MongoDB! Is the database server running? ' 'Are the credentials correct?')
def submit(self): from fireworks import LaunchPad if self.new(): launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') wflow = self.wflow() launchpad.add_wf(wflow) time.sleep( 2 ) # folder names are unique due to timestamp to the nearest second return 1 else: print 'repeat!' return 0
def setUpClass(cls): cls.fworker = FWorker() try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl="ERROR") cls.lp.reset(password=None, require_password=False) except Exception: raise unittest.SkipTest( "MongoDB is not running in localhost:27017! Skipping tests.")
def _setup_db(self, fw_spec): """ Sets up a MongoDB database for storing optimization data. Args: fw_spec (dict): The spec of the Firework which contains this Firetask. Returns: None """ # TODO: @ardunn - doesn't look like this process will work with password-protected LaunchPad. Most people have their FWS databases password-protected. - AJ opt_label = self['opt_label'] if 'opt_label' in self else 'opt_default' db_reqs = ('host', 'port', 'name') db_defined = [req in self for req in db_reqs] if all(db_defined): host, port, name = [self[k] for k in db_reqs] elif any(db_defined): raise AttributeError("Host, port, and name must all be specified!") elif 'lpad' in self: lpad = self['lpad'] host, port, name = [lpad[req] for req in db_reqs] elif '_add_launchpad_and_fw_id' in fw_spec: if fw_spec['_add_launchpad_and_fw_id']: try: host, port, name = [getattr(self.launchpad, req) for req in db_reqs] except AttributeError: # launchpad tried to get attributes of a multiprocessing proxy object. raise Exception("_add_launchpad_and_fw_id is currently working with parallel workflows.") # TODO: @ardunn - is this still an issue? - AJ else: try: host, port, name = [getattr(LaunchPad.auto_load(), req) for req in db_reqs] except AttributeError: # auto_load did not return any launchpad object, so nothing was defined. raise AttributeError("The optimization database must be specified explicitly (with host, port, and " "name), with Launchpad object (lpad), by setting _add_launchpad_and_fw_id to True " "in the fw_spec, or by defining LAUNCHPAD_LOC in fw_config.py for " "LaunchPad.auto_load()") # TODO: @ardunn - LAUNCHPAD_LOC is typically not set through fw_config.py (that requires modifying FWS source code), it's set through a config file: https://materialsproject.github.io/fireworks/config_tutorial.html - AJ mongo = MongoClient(host, port) db = getattr(mongo, name) self.collection = getattr(db, opt_label) # TODO: @ardunn - put the below in a different function, e.g. "set_queries"(?) Or just put in root level of run_task - AJ # TODO: @ardunn - document what these queries are better. Also, instead of "format" maybe call it "query", e.g. self._explored_query - AJ x = fw_spec['_x_opt'] self._explored_format = {'x': {'$exists': 1}, 'yi': {'$ne': [], '$exists': 1}, 'z': {'$exists': 1}} self._unexplored_inclusive_format = {'x': {'$exists': 1}, 'yi': {'$exists': 0}} self._unexplored_noninclusive_format = {'x': {'$ne': x, '$exists': 1}, 'yi': {'$exists': 0}} self._manager_format = {'lock': {'$exists': 1}, 'queue': {'$exists': 1}}
def create_launchpad(username, password, server="serenity", lpadname=None): """ Creates the fireworks launchpad on specific preset servers. Args: username (str) : username for the mongodb database password (str) : password for the mongodb database server (str) : server name: "serinity" (default) or "atlas" lpadname (str) : name of the fireworks internal database. If not given, the name is inferred. Returns: fireworks object : Launchpad for internal fireworks use. """ if server == "atlas": name = username[:2] + "fireworks" lp = LaunchPad(host="austerity-shard-00-01-hgeov.mongodb.net:27017", port=27017, name=name, username=username, password=password, logdir=".", strm_lvl="INFO", ssl=True, authsource="admin") elif server == "serenity": if lpadname: name = lpadname else: name = username[:2] + "fireworks" lp = LaunchPad( host="nanolayers.dyndns.org:27017", port=27017, name=name, username=username, password=password, logdir=".", strm_lvl="INFO", #ssl = True, authsource=name) else: lp = LaunchPad() return lp
def submit(self, listOfIncompleteJobStrs=[]): """ use manageIncompleteJobs.listOfIncompleteJobStrs()""" from fireworks import LaunchPad self.generalCheck() if self.new(listOfIncompleteJobStrs): launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') wflow = self.wflow() launchpad.add_wf(wflow) time.sleep( 2 ) # folder names are unique due to timestamp to the nearest second return 1 else: print 'Repeat job!' return 0
def launch(): descr = "Smart rocket launcher."\ "Uses the execution termination emails send by the batch system to "\ "launch fireworks that depend on other fireworks i.e fireworks that "\ "have 'cal_objs' in their spec."\ "Takes in fw_ids of fireworks to be launched. "\ "Range specification of fw_ids is also supported."\ "All the jobs are launched to hipergator remotely using fabric."\ "Note: Ensure that the submit scripts have the appropriate email settings and "\ "that the mail daemon is fetching emails to the folder polled by this script."\ "Note: All rocket launches take place in the home directory. If "\ "_launch_dir spec is set for the firework the job files will be written to "\ "that folder and jobs to the batch system will be done from there." parser = ArgumentParser(description=descr) parser.add_argument('-f', '--fw_ids', help='one or more of fw_ids to run', default=None, type=int, nargs='+') parser.add_argument('-r', '--r_fw_ids', help='start and end fw_ids of the range of fw_ids to run', default=None, type=int, nargs=2) args = parser.parse_args() fw_ids = args.fw_ids if args.r_fw_ids is not None: fw_ids += range(args.r_fw_ids[0], args.r_fw_ids[1]) job_ids = None lp = LaunchPad.from_file(LAUNCHPAD_LOC) print('Firework ids: ',fw_ids) if fw_ids is None: print('No fw ids given') return for fid in fw_ids: m_fw = lp._get_a_fw_to_run(fw_id=fid, checkout=False) if m_fw is None: print('No firework with that id') return fw_spec = dict(m_fw.spec) done = [] if fw_spec.get('cal_objs',None) is not None: for calparams in fw_spec['cal_objs']: if calparams.get('job_ids', None) is not None: job_ids = calparams.get('job_ids', None) print(fid,' depends on jobs with ids : ',job_ids) if job_ids is not None: for jid in job_ids: done.append(check_done(jid)) else: print('job_ids not set') else: print('This firework doesnt depend on any other fireworks.') done.append(True) if done and all(done): print('Launching ', fid, ' ...') with settings(host_string='*****@*****.**'): run("ssh dev1 rlaunch singleshot -f "+str(fid)) else: print("Haven't recieved execution termination confirmation for the jobs in the firework from hipergator resource manager") time.sleep(3) return
def no_connection(self): """Check for connection to local MongoDB.""" try: lp = LaunchPad.from_file(os.path.join(DB_DIR, "my_launchpad.yaml")) lp.reset("", require_password=False) return False except: return True
def submit(self): from fireworks import LaunchPad launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') if self.jobkind == 'bulkrelax': wflow = self.submitBulkRelax() elif self.jobkind == 'relax': wflow = self.submitRelax() elif self.jobkind == 'vib': wflow = self.submitVib() elif self.jobkind == 'neb': wflow = self.submitNEB() elif self.jobkind == 'dos': wflow = self.submitDOS() print "Submitting job with ID = %d" % self.jobid updateDB('status', 'jobid', self.jobid, 'queued', None, 'job') launchpad.add_wf(wflow) """if query1('status','jobid',self.jobid,'job')=='initialized': updateStatus(self.jobid,'initialized','queued') """
def initiate_cluster(ia_path, args): # check how many image folders are there imgdirs = read_imgdirs_from_parentdir(ia_path) if args.skip: imgdirs = ignore_if_df_existed(imgdirs, ia_path) lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) wf_fws = [] for iv in imgdirs: # start loop over input val fw_name = "clustercovertrack" fw = Firework( clustercovertrack(input_args_path=ia_path, imgdir=iv, args=args), name = fw_name, spec = {"_queueadapter": {"job_name": fw_name, "walltime": "47:00:00"}}, ) wf_fws.append(fw) # end loop over input values workflow = Workflow(wf_fws, links_dict={}) lpad.add_wf(workflow)
def add_workflow(workflow): """ Use Fireworks to add a generated workflow. :param workflow: a Workflow object (should have been generated by one of the workflow-generating functions about). :return: """ launchpad = LaunchPad.auto_load() launchpad.add_wf(workflow)
def setUp(self): if os.path.exists(self.scratch_dir): shutil.rmtree(self.scratch_dir) os.makedirs(self.scratch_dir) os.chdir(self.scratch_dir) try: self.lp = LaunchPad.from_file(os.path.join(db_dir, "my_launchpad.yaml")) self.lp.reset("", require_password=False) except: raise unittest.SkipTest( 'Cannot connect to MongoDB! Is the database server running? ' 'Are the credentials correct?')
def main(): # set up the LaunchPad and reset it launchpad = LaunchPad() launchpad.reset('', require_password=False) # Build the flow nflows = 2 for i in range(nflows): flow = build_flow("flow_" + str(i)) flow.build_and_pickle_dump() # create the Firework consisting of a single task firework = Firework(FireTaskWithFlow(flow=flow)) # store workflow launchpad.add_wf(firework) #launch it locally #launch_rocket(launchpad) return 0
def setUp(self, lpad=True): """ Create scratch directory(removes the old one if there is one) and change to it. Also initialize launchpad. """ if not SETTINGS.get("PMG_VASP_PSP_DIR"): SETTINGS["PMG_VASP_PSP_DIR"] = os.path.abspath(os.path.join(MODULE_DIR, "..", "vasp", "test_files")) print('This system is not set up to run VASP jobs. ' 'Please set PMG_VASP_PSP_DIR variable in your ~/.pmgrc.yaml file.') self.scratch_dir = os.path.join(MODULE_DIR, "scratch") if os.path.exists(self.scratch_dir): shutil.rmtree(self.scratch_dir) os.makedirs(self.scratch_dir) os.chdir(self.scratch_dir) if lpad: try: self.lp = LaunchPad.from_file(os.path.join(DB_DIR, "my_launchpad.yaml")) self.lp.reset("", require_password=False) except: raise unittest.SkipTest('Cannot connect to MongoDB! Is the database server running? ' 'Are the credentials correct?')
structure (Structure): input structure to be optimized and run vasp_cmd (str): command to run db_file (str): path to file containing the database credentials. Returns: Workflow """ fws = [] fws.append(StaticFW(structure=structure, vasp_cmd=vasp_cmd, db_file=db_file)) fws.append(NonSCFFW(structure=structure, vasp_cmd=vasp_cmd, db_file=db_file, parents=fws[0])) fws.append(BandGapCheckFW(structure=structure, parents=fws[1])) fws.append(InterpolateFW(structure=structure, parents=fws[2])) wfname = "{}:{}".format(structure.composition.reduced_formula, "dipole_moment") return Workflow(fws, name=wfname) if __name__ == "__main__": from pymatgen.util.testing import PymatgenTest from matmethods.vasp.workflows.presets.core import wf_ferroelectric structure = PymatgenTest.get_structure("Si") # wf = get_wf_ferroelectric(structure) my_wf = wf_ferroelectric(structure) # lp = LaunchPad() lp = LaunchPad.from_file(os.path.join("/global/homes/s/sbajaj/mm_installdir/config", "my_launchpad.yaml")) lp.reset('', require_password=False) lp.add_wf(my_wf)
workflows including multiple jobs and advanced dependencies. Please see the complex example, or the Fireworks and rocketsled documentation pages for more information: https://hackingmaterials.github.io/rocketsled/ https://materialsproject.github.io/fireworks/ """ from fireworks.utilities.fw_utilities import explicit_serialize from fireworks.core.rocket_launcher import rapidfire from fireworks import Workflow, Firework, LaunchPad, FireTaskBase, FWAction from rocketsled import OptTask, MissionControl # Setting up the FireWorks LaunchPad launchpad = LaunchPad(name='rsled') opt_label = "opt_default" db_info = {"launchpad": launchpad, "opt_label": opt_label} # We constrain our dimensions to 3 integers, each between 1 and 5 x_dim = [(1, 5), (1, 5), (1, 5)] @explicit_serialize class ObjectiveFuncTask(FireTaskBase): """ An example task which just evaluates the following simple function: f(x) = x[0] * x[1] / x[2] Replace this code with your objective function if your objective function
objective function is multi-objective. Additionally, the objective function has dimensions of differing data types (int, float, categorical), which is automatically handled by rocketsled as long as the dimensions are passed into MissionControl.configure(...). Finally, we add some arguments to the MissionControl configuration before launching. """ from fireworks.utilities.fw_utilities import explicit_serialize from fireworks.core.rocket_launcher import rapidfire from fireworks import Workflow, Firework, LaunchPad, FireTaskBase, FWAction from rocketsled import OptTask, MissionControl launchpad = LaunchPad(name='rsled') opt_label = "opt_complex" db_info = {"launchpad": launchpad, "opt_label": opt_label} x_dim = [(16, 145), (0.0, 90.0), ["industry standard", "shark fin", "dolphin fin"]] @explicit_serialize class ComplexMultiObjTask(FireTaskBase): """ An example of a complex, multiobjective task with directly competing objectives. The input vector is defined on a search space with numerical and categorical inputs. This task accepts a 3-vector of the form [int, float, str]. """
from fireworks import LaunchPad, Firework, Workflow from fireworks.core.rocket_launcher import launch_rocket from fireworks.examples.custom_firetasks.hello_world.hello_world_task import HelloTask if __name__ == "__main__": # initialize the database lp = LaunchPad() # you might need to modify the connection settings here # lp.reset() # uncomment this line and set the appropriate parameters if you want to reset the database # create the workflow and store it in the database my_fw = Firework([HelloTask()]) my_wflow = Workflow.from_Firework(my_fw) lp.add_wf(my_wflow) # run the workflow launch_rocket(lp)
from fireworks.core.firework import FireTaskBase from fireworks.scripts.rlaunch_run import launch_multiprocess from fireworks.utilities.fw_utilities import explicit_serialize from fw_tutorials.firetask.addition_task import AdditionTask from rocketsled import OptTask, MissionControl from rocketsled.utils import ExhaustedSpaceError __author__ = "Alexander Dunn" __email__ = "*****@*****.**" lp_filedir = os.path.dirname(os.path.realpath(__file__)) with open(lp_filedir + '/tests_launchpad.yaml', 'r') as lp_file: yaml = YAML() lp_dict = dict(yaml.load(lp_file)) launchpad = LaunchPad.from_dict(lp_dict) opt_label = "test" db_info = {"launchpad": launchpad, "opt_label": opt_label} test_db_name = launchpad.db common_kwargs = {"predictor": "RandomForestRegressor", "acq": None} @explicit_serialize class BasicTestTask(FireTaskBase): _fw_name = "BasicTestTask" def run_task(self, fw_spec): x = fw_spec['_x'] y = np.sum(x[:-1]) # sum all except the final string element return FWAction(update_spec={'_y': y})
def test_dict_from_file(self): lp = LaunchPad.from_file(self.LP_LOC) lp_dict = lp.to_dict() new_lp = LaunchPad.from_dict(lp_dict) self.assertIsInstance(new_lp, LaunchPad)
'KPOINTS': [k for k in range(20, 30, 10)] } job_dir = 'calBulk' job_cmd = ['mpirun', '/home/km468/Software/VASP/vasp.5.3.5/vasp'] qparams= dict(nnodes='1', ppnode='16', job_name='vasp_job', pmem='1000mb', walltime='24:00:00', rocket_launch=''.join(job_cmd)) # set qadapter to None to launch via qlaunch # reserve and launch offline # qlaunch -r singleshot # lpad recover_offline qadapter = None #CommonAdapter(q_type="PBS",**qparams) cal = Calibrate(incar, poscar, potcar, kpoints, turn_knobs = turn_knobs, qadapter = qadapter, job_dir = job_dir, job_cmd=job_cmd) caltask = MPINTCalibrateTask(cal.as_dict()) #firework with launch directory set to $FW_JOB_DIR, an environment variable #spec={'_launch_dir':'$FW_JOB_DIR'}, fw_calibrate = Firework([caltask], name="fw_test") wf = Workflow([fw_calibrate], name="mpint_wf_test") lp = LaunchPad.from_file(LAUNCHPAD_LOC) print('fireworks in the database before adding the workflow: \n', lp.get_fw_ids()) lp.add_wf(wf) print('fireworks in the database: \n', lp.get_fw_ids())
from fireworks import Firework, Workflow, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import rapidfire # set up the LaunchPad and reset it launchpad = LaunchPad() launchpad.reset('', require_password=False) # create the individual FireWorks and Workflow fw1 = Firework(ScriptTask.from_str('echo "hello"'), name="hello") fw2 = Firework(ScriptTask.from_str('echo "goodbye"'), name="goodbye", parents=[fw1,]) wf = Workflow([fw1, fw2], name="test workflow") # store workflow and launch it locally launchpad.add_wf(wf) rapidfire(launchpad)
""" # Parameters: box_scale = 8.9 # edge length of MD box in Angstroms, can also be a numpy array that scales the lattice packmol_path = "~/packmol/packmol/packmol" # Revise as appropriate structure = {'H2O':20} # "structure" in this context can be a dict of number of atoms or molecules. temperature = 320 # Note one can use a pymatgen Structure object also # E.g. p = Poscar.from_file("POSCAR") # structure = p.structure copy_calcs = True # MD runs can be backed up in a desired location calc_home = '~/test_H2O_wflows' # This is the location to copy the calculations if copy_calcs=True # Since we specified a molecule, we must also give the path to xyz # file of a single sample molecule. xyz_paths = ['H2O.xyz'] name = 'H2O_df_'+str(temperature) from mpmorph.workflow.workflows import get_wf_density from fireworks import LaunchPad amorphous_maker_params = {'box_scale':box_scale, 'packmol_path':packmol_path, 'xyz_paths': xyz_paths, 'tol': 2.0} wf = get_wf_density(structure, temperature=temperature, pressure_threshold=0.5, nsteps=1000, wall_time=19200, max_rescales=5, amorphous_maker_params=amorphous_maker_params, copy_calcs=copy_calcs, calc_home=calc_home, name=name) lp = LaunchPad.auto_load() lp.add_wf(wf)
@explicit_serialize class TaskB(FireTaskBase): def run_task(self, fw_spec): print("This is task B") return FWAction(update_spec={"param_B": 20}) @explicit_serialize class TaskC(FireTaskBase): def run_task(self, fw_spec): print("This is task C.") print("Task A gave me: {}".format(fw_spec["param_A"])) print("Task B gave me: {}".format(fw_spec["param_B"])) if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) fw_A = Firework([TaskA()]) fw_B = Firework([TaskB()]) fw_C = Firework([TaskC()], parents=[fw_A, fw_B]) # assemble Workflow from FireWorks and their connections by id workflow = Workflow([fw_A, fw_B, fw_C]) # store workflow and launch it locally launchpad.add_wf(workflow) rapidfire(launchpad)
if scan: wf_name += " - SCAN" wf = Workflow(fws, name=wf_name) wf = add_additional_fields_to_taskdocs(wf, {"wf_meta": self.wf_meta}) tag = "magnetic_orderings group: >>{}<<".format(self.uuid) wf = add_tags(wf, [tag, ordered_structure_origins]) return wf if __name__ == "__main__": # for trying workflows from fireworks import LaunchPad latt = Lattice.cubic(4.17) species = ["Ni", "O"] coords = [[0.00000, 0.00000, 0.00000], [0.50000, 0.50000, 0.50000]] NiO = Structure.from_spacegroup(225, latt, species, coords) wf_deformation = get_wf_magnetic_deformation(NiO) wf_orderings = MagneticOrderingsWF(NiO).get_wf() lpad = LaunchPad.auto_load() lpad.add_wf(wf_orderings) lpad.add_wf(wf_deformation)
""" This code is described in the Introductory tutorial, https://materialsproject.github.io/fireworks/introduction.html """ from fireworks import Firework, LaunchPad, ScriptTask from fireworks.core.rocket_launcher import launch_rocket if __name__ == "__main__": # set up the LaunchPad and reset it launchpad = LaunchPad() # launchpad.reset('', require_password=False) # create the Firework consisting of a single task firetask = ScriptTask.from_str('echo "howdy, your job launched successfully!"') firework = Firework(firetask) # store workflow and launch it locally launchpad.add_wf(firework) launch_rocket(launchpad)