def setUp(self): lp_filedir = os.path.dirname(os.path.realpath(__file__)) with open(lp_filedir + '/tests_launchpad.yaml', 'r') as lp_file: yaml = YAML() lp_dict = dict(yaml.load(lp_file)) self.lp = LaunchPad.from_dict(lp_dict) self.db = self.lp.db
def __init__(self, *args, **kwargs): super(OptTask, self).__init__(*args, **kwargs) # Configuration attrs lp = self.get("launchpad", LaunchPad.auto_load()) if isinstance(lp, LaunchPad): lp = lp.to_dict() self.lpad = LaunchPad.from_dict(lp) self.opt_label = self.get("opt_label", "opt_default") self.c = getattr(self.lpad.db, self.opt_label) self.config = self.c.find_one({"doctype": "config"}) if self.config is None: raise NotConfiguredError("Please use MissionControl().configure to " "configure the optimization database " "({} - {}) before running OptTask." "".format(self.lpad.db, self.opt_label)) self.wf_creator = deserialize(self.config["wf_creator"]) self.x_dims = self.config["dimensions"] self._xdim_types = self.config["dim_types"] self.is_discrete_all = self.config["is_discrete_all"] self.is_discrete_any = self.config["is_discrete_any"] self.wf_creator_args = self.config["wf_creator_args"] or [] self.wf_creator_kwargs = self.config["wf_creator_kwargs"] or {} self.predictor = self.config["predictor"] self.predictor_args = self.config["predictor_args"] or [] self.predictor_kwargs = self.config["predictor_kwargs"] or {} self.maximize = self.config["maximize"] self.n_search_pts = self.config["n_search_pts"] self.n_train_pts = self.config["n_train_pts"] self.n_bootstraps = self.config["n_bootstraps"] self.acq = self.config["acq"] self.space_file = self.config["space_file"] self.onehot_categorical = self.config["onehot_categorical"] self.duplicate_check = self.config["duplicate_check"] self.get_z = self.config["get_z"] if self.get_z: self.get_z = deserialize(self.config['get_z']) else: self.get_z = lambda *ars, **kws: [] self.get_z_args = self.config["get_z_args"] or [] self.get_z_kwargs = self.config["get_z_kwargs"] or {} self.z_file = self.config["z_file"] self.enforce_sequential = self.config["enforce_sequential"] self.tolerances = self.config["tolerances"] self.batch_size = self.config["batch_size"] self.timeout = self.config["timeout"] # Declared attrs self.n_objs = None plist = [RandomForestRegressor, GaussianProcessRegressor, ExtraTreesRegressor, GradientBoostingRegressor] self.builtin_predictors = {p.__name__: p for p in plist} self._n_cats = 0 self._encoding_info = [] # Query formats self._completed = {'x': {'$exists': 1}, 'y': {'$exists': 1, '$ne': 'reserved'}, 'z': {'$exists': 1}} self._manager = {'lock': {'$exists': 1}, 'queue': {'$exists': 1}}
def __init__(self, source, materials, wf_function, material_filter=None, lpad=None, **kwargs): """ Adds workflows to a launchpad based on material inputs. This is primarily to be used for derivative property workflows but could in principles used to generate workflows for any workflow that can be invoked from structure data Args: source (Store): store of properties materials (Store): Store of materials properties material_filter (dict): dict filter for getting items to process e. g. {"elasticity": None} wf_function (string or method): method to generate a workflow based on structure in document with missing property can be a string to be loaded or a custom method. Note that the builder/runner will not be serializable with custom methods. lpad (LaunchPad or dict): fireworks launchpad to use for adding workflows, can either be None (autoloaded), a LaunchPad instance, or a dict from which the LaunchPad will be invoked **kwargs (kwargs): kwargs for builder """ self.source = source self.materials = materials # Will this be pickled properly for multiprocessing? could just put # it into the processor if that's the case if isinstance(wf_function, six.string_types): self.wf_function = load_class(*wf_function.rsplit('.', 1)) self._wf_function_string = wf_function elif callable(wf_function): self.wf_function = wf_function self._wf_function_string = None else: raise ValueError("wf_function must be callable or a string " "corresponding to a loadable method") self.material_filter = material_filter if lpad is None: self.lpad = LaunchPad.auto_load() elif isinstance(lpad, dict): self.lpad = LaunchPad.from_dict(lpad) else: self.lpad = lpad super().__init__(sources=[source, materials], targets=[], **kwargs)
def __init__(self, pj_store, lpad, incremental=True, query=None, base_priority=2500, site_penalty=10, vote_weight=10, **kwargs): """ Takes a propjockey collection and sets the priority of a fireworks in a fireworks collection from a LaunchPad Args: pj_store (Store): store corresponding to propjockey collection lpad (LaunchPad): fireworks launchpad query (dict): query to filter the propjockey store base_priority (int): base priority to assign to fireworks site_penalty (int): per-site penalty to priority vote_weight (int): priority boost per vote **kwargs (kwargs): kwargs for builder """ self.pj_store = pj_store if isinstance(lpad, dict): self.lpad = LaunchPad.from_dict(lpad) else: self.lpad = lpad self.fws_store = MongoStore.from_collection(self.lpad.fireworks) self.fws_store.lu_field = "_pj_lu" self.incremental = incremental self.start_date = datetime.utcnow() self.query = query or {} self.base_priority = base_priority self.site_penalty = site_penalty self.vote_weight = vote_weight super().__init__(sources=[self.pj_store], targets=[self.fws_store], **kwargs)
def test_dict_from_file(self): lp = LaunchPad.from_file(self.LP_LOC) lp_dict = lp.to_dict() new_lp = LaunchPad.from_dict(lp_dict) self.assertIsInstance(new_lp, LaunchPad)
from fireworks.core.firework import FireTaskBase from fireworks.scripts.rlaunch_run import launch_multiprocess from fireworks.utilities.fw_utilities import explicit_serialize from fw_tutorials.firetask.addition_task import AdditionTask from rocketsled import OptTask, MissionControl from rocketsled.utils import ExhaustedSpaceError __author__ = "Alexander Dunn" __email__ = "*****@*****.**" lp_filedir = os.path.dirname(os.path.realpath(__file__)) with open(lp_filedir + '/tests_launchpad.yaml', 'r') as lp_file: yaml = YAML() lp_dict = dict(yaml.load(lp_file)) launchpad = LaunchPad.from_dict(lp_dict) opt_label = "test" db_info = {"launchpad": launchpad, "opt_label": opt_label} test_db_name = launchpad.db common_kwargs = {"predictor": "RandomForestRegressor", "acq": None} @explicit_serialize class BasicTestTask(FireTaskBase): _fw_name = "BasicTestTask" def run_task(self, fw_spec): x = fw_spec['_x'] y = np.sum(x[:-1]) # sum all except the final string element return FWAction(update_spec={'_y': y})
3.840198 0.000000 0.000000 1.920099 3.325710 0.000000 0.000000 -2.217138 3.135509 Si 2 direct 0.000000 0.000000 0.000000 Si 0.750000 0.500000 0.750000 Si""" STRUCT = Structure.from_str(POSCAR_STR, fmt='POSCAR') TEST_DIR = os.path.join(MODULE_DIR, 'tmp_fw_test_dir') LPAD = LaunchPad.from_dict({ 'host': 'localhost', 'logdir': None, 'name': 'prlworkflows_unittest', 'password': None, 'port': 27017, 'ssl_ca_file': None, 'strm_lvl': 'DEBUG', 'user_indices': [], 'username': None, 'wf_user_indices': [] }) # TODO: enable debug mode by having a launchpad that does not reset # Can this be done by still having other tests pass? # Should we only run one test? # Stop on failure? @pytest.fixture def lpad(): """A LaunchPad object for test instances to use. Always gives a clean (reset) LaunchPad. """ LPAD.reset(None, require_password=False, max_reset_wo_password=5)
def __init__(self, *args, **kwargs): super(OptTask, self).__init__(*args, **kwargs) # Configuration attrs lp = self.get("launchpad", LaunchPad.auto_load()) if isinstance(lp, LaunchPad): lp = lp.to_dict() self.lpad = LaunchPad.from_dict(lp) self.opt_label = self.get("opt_label", "opt_default") self.c = getattr(self.lpad.db, self.opt_label) self.config = self.c.find_one({"doctype": "config"}) if self.config is None: raise NotConfiguredError( "Please use MissionControl().configure to " "configure the optimization database " "({} - {}) before running OptTask." "".format(self.lpad.db, self.opt_label)) self.wf_creator = deserialize(self.config["wf_creator"]) self.x_dims = self.config["dimensions"] self._xdim_types = self.config["dim_types"] self.is_discrete_all = self.config["is_discrete_all"] self.is_discrete_any = self.config["is_discrete_any"] self.wf_creator_args = self.config["wf_creator_args"] or [] self.wf_creator_kwargs = self.config["wf_creator_kwargs"] or {} self.predictor = self.config["predictor"] self.predictor_args = self.config["predictor_args"] or [] self.predictor_kwargs = self.config["predictor_kwargs"] or {} self.maximize = self.config["maximize"] self.n_search_pts = self.config["n_search_pts"] self.n_train_pts = self.config["n_train_pts"] self.n_bootstraps = self.config["n_bootstraps"] self.acq = self.config["acq"] self.space_file = self.config["space_file"] self.onehot_categorical = self.config["onehot_categorical"] self.duplicate_check = self.config["duplicate_check"] self.get_z = self.config["get_z"] if self.get_z: self.get_z = deserialize(self.config["get_z"]) else: self.get_z = lambda *ars, **kws: [] self.get_z_args = self.config["get_z_args"] or [] self.get_z_kwargs = self.config["get_z_kwargs"] or {} self.z_file = self.config["z_file"] self.enforce_sequential = self.config["enforce_sequential"] self.tolerances = self.config["tolerances"] self.batch_size = self.config["batch_size"] self.timeout = self.config["timeout"] # Declared attrs self.n_objs = None self.builtin_predictors = {p.__name__: p for p in BUILTIN_PREDICTORS} self._n_cats = 0 self._encoding_info = [] # Query formats self._completed = { "x": { "$exists": 1 }, "y": { "$exists": 1, "$ne": "reserved" }, "z": { "$exists": 1 }, } self._manager = {"lock": {"$exists": 1}, "queue": {"$exists": 1}}