def test_index_property_metadata(): tmp_dir = scratch_path / "index_property_metadata" dmf = DMF(path=tmp_dir, create=True) propindex.index_property_metadata( dmf, pkg=idaes.dmf, expr=".*IndexMePlease[0-9]", exclude_testdirs=False ) # Check the resource for rsrc in dmf.find(): assert rsrc.v[rsrc.TYPE_FIELD] == resource.ResourceTypes.code
def init(path, create, name, desc, html): """Initialize the current workspace used for the data management framework commands. Optionally, create a new workspace. """ _log.info( f"Initialize with workspace path={path} cwd={os.path.abspath(os.curdir)}" ) if create: _log.info("Create new workspace") # pre-check that there is no file/dir by this name try: wspath = pathlib.Path(path) if wspath.exists() and ( wspath / workspace.Workspace.WORKSPACE_CONFIG).exists(): click.echo( f"Cannot create workspace: '{path}/{workspace.Workspace.WORKSPACE_CONFIG}' already exists" ) sys.exit(Code.DMF_OPER.value) except PermissionError: click.echo( f"Cannot create workspace: path '{path}' not accessible") sys.exit(Code.DMF_OPER.value) if not name: name = click.prompt("New workspace name") if not desc: desc = click.prompt("New workspace description") # Note: default HTML paths, and all other default values, are included # in the JSON schema at `idaes.dmf.workspace.CONFIG_SCHEMA` hpath = [html] if html else None try: d = DMF(path=path, create=True, name=name, desc=desc, html_paths=hpath, add_defaults=True) except errors.WorkspaceError as err: click.echo(f"Cannot create workspace: {err}") sys.exit(Code.DMF_OPER.value) click.echo(f"Configuration in '{d.configuration_file}") else: _log.info("Use existing workspace") # In either case, switch to provided config try: _ = DMF(path=path, create=False, save_path=True) # noqa: F841 except errors.WorkspaceConfNotFoundError: click.echo(f"Workspace configuration not found at path='{path}'") sys.exit(Code.WORKSPACE_NOT_FOUND.value) except errors.WorkspaceNotFoundError: click.echo(f"Existing workspace not found at path='{path}'") click.echo("Add --create flag to create a workspace.") sys.exit(Code.WORKSPACE_NOT_FOUND.value)
def init(path, create, name, desc, html): """Initialize the current workspace used for the data management framework commands. Optionally, create a new workspace. """ _log.info(f"Initialize with workspace path={path}") if create: _log.info("Create new workspace") # pre-check that there is no file/dir by this name try: if pathlib.Path(path).exists(): click.echo( f"Cannot create workspace: path '{path}' already exists") sys.exit(Code.DMF_OPER.value) except PermissionError: click.echo( f"Cannot create workspace: path '{path}' not accessible") sys.exit(Code.DMF_OPER.value) if not name: name = click.prompt("New workspace name") if not desc: desc = click.prompt("New workspace description") if html is None: # guess html path # XXX: don't try to verify the guess errfile = pathlib.Path(errors.__file__) docsdir = errfile.parent.parent.parent / 'docs' hpath = [str(docsdir / 'build')] else: hpath = [html] try: d = DMF(path=path, create=True, name=name, desc=desc, html_paths=hpath) except errors.WorkspaceError as err: click.echo(f"Cannot create workspace: {err}") sys.exit(Code.DMF_OPER.value) click.echo(f"Configuration in '{d.configuration_file}") else: _log.info("Use existing workspace") # In either case, switch to provided config try: _ = DMF(path=path, create=False, save_path=True) # noqa: F841 except errors.WorkspaceConfNotFoundError: click.echo(f"Workspace configuration not found at path='{path}'") sys.exit(Code.WORKSPACE_NOT_FOUND.value) except errors.WorkspaceNotFoundError: click.echo(f"Existing workspace not found at path='{path}'") click.echo("Add --create flag to create a workspace.") sys.exit(Code.WORKSPACE_NOT_FOUND.value)
def ls(color, show, sort_by, reverse, prefix): d = DMF() if not show: show = ["type", "desc", "modified"] # note: 'id' is always first else: try: show = _split_and_validate_fields(show) except ValueError as err: click.echo(f"Bad fields for --show option: {err}") sys.exit(Code.INPUT_VALUE.value) reverse = bool(reverse == "yes") if not sort_by: sort_by = ["id"] resources = list(d.find()) _print_resource_table(resources, show, sort_by, reverse, prefix, color)
def test_create_many(): tmp_dir = scratch_path / "create_many" dmf = DMF(path=tmp_dir, create=True) for i in range(100): exp = experiment.Experiment(dmf, name="try{i}".format(i=i)) assert exp.name == "try{i}".format(i=i) assert exp.id
def test_bad_columns_mock(): tmp_dir = scratch_path / "bad_columns_mock" dmf = DMF(path=tmp_dir, create=True) with AlamoMock() as mock: m = surrmod.SurrogateModel(Experiment(dmf)) with pytest.raises(KeyError): m.set_input_data(model_data, ["snork"], "cost")
def test_relation_with_remove(): tmp_dir = scratch_path / "relation_with_remove" dmf = DMF(path=tmp_dir, create=True) e1 = experiment.Experiment(dmf, name="1") n, added = 10, [] for i in range(n): a = resource.Resource({"name": "foo"}) e1.add(a) added.append(a) assert len(e1.v["relations"]) == n # remove, then update e1 for a in added: dmf.remove(identifier=a.id) e1.update() # relation to removed 'a' should be gone n -= 1 assert (len(e1.v["relations"])) == n
def test_relation_in_experiment(): tmp_dir = scratch_path / "relation_in_experiment" dmf = DMF(path=tmp_dir, create=True) e1 = experiment.Experiment(dmf, name="1") a = resource.Resource(value={"name": "foo"}) e1.add(a) assert len(a.v["relations"]) == 1 assert len(e1.v["relations"]) == 1
def test_mock_alamo(): tmp_dir = scratch_path / "mock_alamo" dmf = DMF(path=tmp_dir, create=True) with AlamoMock() as mock: sml = surrmod.SurrogateModel(Experiment(dmf)) assert not mock.doalamo_called sml.set_input_data(model_data, ["scoops", "bowl"], "cost") sml.run() assert mock.doalamo_called # assert tuple(mock.xdata) == tuple(model_data["scoops"],model_data["bowl"]) assert tuple(mock.zdata) == tuple(model_data["cost"])
def rm(identifier, yes, multiple, list_resources): _log.info(f"remove resource '{identifier}'") try: resource.identifier_str(identifier, allow_prefix=True) except ValueError as errmsg: click.echo(f"Invalid identifier. Details: {errmsg}") sys.exit(Code.INPUT_VALUE.value) rsrc_list = list(find_by_id(identifier)) found_multiple = len(rsrc_list) > 1 if found_multiple and not multiple: click.echo( f"Too many ({len(rsrc_list)}) resources match prefix '{identifier}'. " "Add option --multiple to allow multiple matches.") sys.exit(Code.DMF_OPER.value) fields = ["type", "desc", "modified"] # "id" is prepended by _ls_basic() if list_resources: _print_resource_table(rsrc_list, fields, ["id"], False, False, True) if yes != "yes": if found_multiple: s = f"these {len(rsrc_list)} resources" else: s = "this resource" do_remove = click.confirm(f"Remove {s}", prompt_suffix="? ", default=False) if not do_remove: click.echo("aborted") sys.exit(Code.CANCELED.value) d = DMF() for r in rsrc_list: _log.debug(f"begin remove-resource id={r.id}") d.remove(identifier=r.id) _log.debug(f"end remove-resource id={r.id}") if found_multiple: s = f"{len(rsrc_list)} resources removed" else: s = "resource removed" click.echo(s)
def main(): args = _parse_arguments() if args.vb > 1: _log.setLevel(logging.DEBUG) elif args.vb > 0: _log.setLevel(logging.INFO) else: _log.setLevel(logging.WARN) if args.command == 'create-sample-file': assert args.number_samples is not None assert args.sample_file is not None assert args.convergence_evaluation_class_str is not None try: conv_eval_class = cb._class_import( args.convergence_evaluation_class_str) conv_eval = conv_eval_class() except Exception as e: print('Failed to find the specified convergence_evaluation_class ' 'with error: {}'.format(str(e))) raise ValueError( 'Invalid convergence_evaluation_class specified (-e).') spec = conv_eval.get_specification() cb.write_sample_file(eval_spec=spec, filename=args.sample_file, convergence_evaluation_class_str=args. convergence_evaluation_class_str, n_points=args.number_samples, seed=args.seed) else: if args.dmfcfg is None: dmf = None else: try: dmf = DMF(args.dmfcfg) except DMFError as err: _log.error('Unable to init DMF: {}'.format(err)) return -1 (inputs, samples, results) = \ cb.run_convergence_evaluation_from_sample_file( sample_file=args.sample_file) if results is not None: cb.save_convergence_statistics(inputs, results, dmf=dmf) return 0
def status(color, show, show_all): if show_all == "yes": show = ["all"] _log.debug( f"Get status. Show items: {' '.join(show) if show else '(basic)'}") t = _cterm if color else _noterm if not DMFConfig.configuration_exists(): click.echo( f"No configuration found at '{DMFConfig.configuration_path()}'") sys.exit(Code.CONFIGURATION_NOT_FOUND.value) try: d = DMF() except errors.WorkspaceConfNotFoundError as err: _log.fatal(f"Cannot get status: {err}") click.echo(str(err)) sys.exit(Code.WORKSPACE_NOT_FOUND.value) # pretty-display a key/value pair or list value def item(key, value=None, before="", color=t.green): after_key = "" if key == "" else ":" if value is None: return f"{before}{color}{key}{after_key}{t.normal}" elif key is None: return f"{before}{color}{value}{t.normal}" return f"{before}{color}{key}{after_key}{t.normal} {value}" indent_spc = " " print(item("settings", color=t.blue)) indent = indent_spc conf = DMFConfig() # note: must exist due to earlier check for key, value in conf.c.items(): print(item(key, value, before=indent)) print(item("workspace", color=t.blue)) indent = indent_spc for key, value in ( ("location", d.root), ("name", d.name), ("description", d.description), ("created", d.meta[d.CONF_CREATED]), ("modified", d.meta[d.CONF_MODIFIED]), ): print(item(key, value, before=indent)) _show_optional_workspace_items(d, show, indent_spc, item, t=t)
def get_workspace(path='', name=None, desc=None, create=False, errs=None, **kwargs): """Create or load a DMF workspace. If the :class:`DMF` constructor, throws an exception, this catches it and prints the error to the provided stream (or stdout). See :class:`DMF` for details on arguments. Args: path (str): Path to workspace. name (str): Name to be used for workspace. desc (str): Longer description of workspace. create (bool): If the path to the workspace does not exist, this controls whether to create it. errs (object): Stream for errors, stdout is used if None Returns: DMF: New instance, or None if it failed. """ dmf = None try: dmf = DMF(path=path, name=name, desc=desc, create=create, **kwargs) except errors.DMFError as err: if errs is None: errs = sys.stdout msg = 'Error creating DMF workspace\n' if isinstance(err, errors.DMFError) and not create: msg += 'Directory not found, and "create" flag is False\n' msg += 'If you want to create the workspace, try again with ' \ 'create=True\n' else: msg += '{}\n'.format(err) msg += '\npath: {}\nname: {}\ndesc: {}\n'.format(path, name, desc) errs.write(msg) return dmf
def test_circular(): # # r0 -> derived -> r1 -> derived >- r2 -+ # ^ | # +------------------------------------+ # uses tmp_dir = scratch_path / "circular" dmf = DMF(path=tmp_dir, create=True) r = [resource.Resource({"name": "r{}".format(i)}) for i in range(3)] resource.create_relation(r[0], Predicates.derived, r[1]) resource.create_relation(r[1], Predicates.derived, r[2]) resource.create_relation(r[2], Predicates.uses, r[0]) for rr in r: dmf.add(rr) # outgoing from r0 names = [] for d, rr, m in dmf.find_related(r[0], meta=["aliases"]): names.append(m["aliases"][0]) names.sort() assert names == ["r0", "r1", "r2"] # incoming to r1 names = [] for d, rr, m in dmf.find_related(r[0], meta=["aliases"], outgoing=False): names.append(m["aliases"][0]) names.sort() assert names == ["r0", "r1", "r2"] # reducing depth shortens output names = [] for d, rr, m in dmf.find_related(r[0], meta=["aliases"], maxdepth=2): names.append(m["aliases"][0]) names.sort() assert names == ["r1", "r2"] names = [] for d, rr, m in dmf.find_related(r[0], meta=["aliases"], maxdepth=1): names.append(m["aliases"][0]) names.sort() assert names == ["r1"]
def test_remove_workflow(): tmp_dir = scratch_path / "remove_workflow" dmf = DMF(path=tmp_dir, create=True) # A workflow of copy/remove # make an experiment e1 = experiment.Experiment(dmf, name="one", version="0.0.1") # make a new version of it e2 = e1.copy(version="0.0.2") # link the two together e1.link(e2, predicate=Predicates.version) # remove the first one (what happens to the link?) e1.remove() # check that the first one can't be used any more with pytest.raises(errors.BadResourceError): e1.update() with pytest.raises(errors.BadResourceError): e1.link(e2, predicate=Predicates.version) # check that the copy can still be modified e2.v["desc"] = "This is a copy of e1" e2.update() # this fixes relations in the DB # check that the link is gone, i.e. there are no # relations in e2 any more assert len(e2.v["relations"]) == 0
def test_find_related(): # # r0 # | uses # v # r1 # | version # v # r2 # /\ # / \ derived # v v # r3 r4 # tmp_dir = scratch_path / "find_related" dmf = DMF(path=tmp_dir, create=True) r = [resource.Resource({"name": "r{}".format(i)}) for i in range(5)] # r3 <-- derived <-- r2 <-- version <-- r1 cr = resource.create_relation # shortcut cr(r[2], Predicates.derived, r[3]) cr(r[1], Predicates.version, r[2]) # r4 <-- derived <-- r2 cr(r[2], Predicates.derived, r[4]) # r0 -- Uses --> r1 cr(r[0], Predicates.uses, r[1]) # add to dmf for i in range(5): dmf.add(r[i]) # outgoing from r0 should include 1,2,3,4 names = [] for d, rr, m in dmf.find_related(r[0], meta=["aliases"]): names.append(m["aliases"][0]) names.sort() assert names == ["r1", "r2", "r3", "r4"] # incoming to r4 should include r0, r1, r2 names = [] for d, rr, m in dmf.find_related(r[4], meta=["aliases"], outgoing=False): names.append(m["aliases"][0]) names.sort() assert names == ["r0", "r1", "r2"]
def testdmf(tmpd): dmf = DMF(path=tmpd, create=True) return dmf
def related(identifier, direction, color, unicode): _log.info(f"related to resource id='{identifier}'") t = _cterm if color else _noterm dmf = DMF() try: resource.identifier_str(identifier, allow_prefix=True) except ValueError as err: click.echo(f"{err}") sys.exit(Code.INPUT_VALUE.value) _log.debug(f"begin: finding root resource {identifier}") rsrc_list = list(find_by_id(identifier, dmf=dmf)) n = len(rsrc_list) if n > 1: click.echo(f"Too many resources matching `{identifier}`") sys.exit(Code.INPUT_VALUE) rsrc = rsrc_list[0] _log.debug(f"end: finding root resource {identifier}") # get related resources _log.debug(f"begin: finding related resources for {identifier}") outgoing = direction == "out" rr = list( dmf.find_related(rsrc, meta=["aliases", "type"], outgoing=outgoing)) _log.debug(f"end: finding related resources for {identifier}") # stop if no relations if not rr: _log.warning(f"no resource related to {identifier}") click.echo(f"No relations for resource `{identifier}`") sys.exit(0) _log.info(f"got {len(rr)} related resources") # debugging if _log.isEnabledFor(logging.DEBUG): dbgtree = '\n'.join([' ' + str(x) for x in rr]) _log.debug(f"related resources:\n{dbgtree}") # extract uuids & determine common UUID prefix length uuids = [item[2][resource.Resource.ID_FIELD] for item in rr] pfx = util.uuid_prefix_len(uuids) # initialize queue with depth=1 items q = [item for item in rr if item[0] == 1] # print root resource print(_related_item(rsrc.id, rsrc.name, rsrc.type, pfx, t, unicode)) # set up printing style if unicode: # connector chars vrt, vrd, relbow, relbow2, rarr = ( '\u2502', '\u2506', '\u2514', '\u251C', '\u2500\u2500', ) # relation prefix and arrow relpre, relarr = ( ['\u2500\u25C0\u2500\u2524', '\u2524'][outgoing], ['\u2502', '\u251C\u2500\u25B6'][outgoing], ) else: # connector chars vrt, vrd, relbow, relbow2, rarr = '|', '.', '+', '+', '--' # relation prefix and arrow relpre, relarr = ['<-[', '-['][outgoing], [']-', ']->'][outgoing] # create map of #items at each level, so we can easily # know when there are more at a given level, for drawing n_at_level = {0: 0} for item in rr: depth = item[0] if depth in n_at_level: n_at_level[depth] += 1 else: n_at_level[depth] = 1 # print tree while q: depth, rel, meta = q.pop() n_at_level[depth] -= 1 indent = ''.join([ f" {t.blue}{vrd if n_at_level[i - 1] else ' '}{t.normal} " for i in range(1, depth + 1) ]) print(f"{indent} {t.blue}{vrt}{t.normal}") rstr = f"{t.blue}{relpre}{t.yellow}{rel.predicate}{t.blue}{relarr}{t.normal}" if meta["aliases"]: item_name = meta["aliases"][0] else: item_name = meta.get("desc", "-") istr = _related_item(meta[resource.Resource.ID_FIELD], item_name, meta["type"], pfx, t, unicode) # determine correct connector (whether there is another one down the stack) elbow = relbow if (not q or q[-1][0] != depth) else relbow2 print(f"{indent} {t.blue}{elbow}{rarr}{t.normal}{rstr} {istr}") new_rr = [] for d2, rel2, _ in rr: if outgoing: is_same = rel2.subject == rel.object else: is_same = rel2.object == rel.subject if d2 == depth + 1 and is_same: q.append((d2, rel2, _)) else: new_rr.append((d2, rel2, _)) rr = new_rr
def find( output_format, color, show, sort_by, prefix, reverse, by, created, filedesc, modified, name, datatype, ): d = DMF() if output_format == "list": if not show: show = ["type", "desc", "modified"] # note: 'id' is always first else: try: show = _split_and_validate_fields(show) except ValueError as err: click.echo(f"Bad fields for --show option: {err}") sys.exit(Code.INPUT_VALUE.value) reverse = bool(reverse == "yes") if not sort_by: sort_by = ["id"] # Build query query = {} if by: query["creator.name"] = by if created: try: query["created"] = _date_query(created) except ValueError as err: click.echo(f"bad date for 'created': {err}") sys.exit(Code.INPUT_VALUE.value) if filedesc: query["datafiles"] = [{"desc": filedesc}] if modified: try: query["modified"] = _date_query(modified) except ValueError as err: click.echo(f"bad date for 'modified': {err}") sys.exit(Code.INPUT_VALUE.value) if name: query["aliases"] = [name] if datatype: query["type"] = datatype # Execute query _log.info(f"find: query = '{query}'") _log.debug("find.begin") resources = list(d.find(query)) _log.debug("find.end") # Print result if output_format == "list": # print resources like `ls` _print_resource_table(resources, show, sort_by, reverse, prefix, color) elif output_format == "info": # print resources one by one si = _ShowInfo("term", 32, color=color) for rsrc in resources: si.show(rsrc) elif output_format == "json": # print resources as JSON for r in resources: print(json.dumps(r.v, indent=2))
def test_run(): tmp_dir = scratch_path / "run" dmf = DMF(path=tmp_dir, create=True) sml = surrmod.SurrogateModel(Experiment(dmf)) sml.set_input_data(model_data, ["scoops", "bowl"], "cost") sml.run()
) from idaes.core.util.initialization import solve_indexed_blocks from idaes.core.util.misc import add_object_reference from idaes.core.util.model_statistics import ( degrees_of_freedom, number_unfixed_variables, ) from idaes.core.util.misc import extract_data from idaes.dmf import DMF from idaes.dmf.resource import Resource, TidyUnitData # Set up logger _log = logging.getLogger(__name__) # Set up DMF (assume in same directory as this file) _dmf = DMF(os.path.dirname(__file__)) @declare_process_block_class("HDAParameterBlock") class HDAParameterData(PhysicalParameterBlock): CONFIG = PhysicalParameterBlock.CONFIG() def _get_param(self, name: str): """Helper method to convert data stored in a DMF resource into a Pyomo rule. The intermediate form, :class:`TidyUnitData`, is constructed from the data stored in the JSON of the DMF resource. Args: name: Primary alias of DMF resource with stored data.
def find_by_id(identifier, dmf=None): if dmf is None: dmf = DMF() return dmf.find_by_id(identifier)
def test_init_create(magics_impl): tmp_dir = scratch_path / "init_create" dmf = DMF(path=tmp_dir, create=True) magics_impl.dmf_init(str(tmp_dir), "create") assert magics_impl.initialized
def test_init_existing(magics_impl): tmp_dir = scratch_path / "init_existing" dmf = DMF(path=tmp_dir, create=True) magics_impl.dmf_init(str(tmp_dir))
def test_init(): tmp_dir = scratch_path / "init" dmf = DMF(path=tmp_dir, create=True) exp = experiment.Experiment(dmf, name="try1", desc="Nice try") assert exp.name == "try1" assert exp.id
def register( resource_type, url, info, copy, strict, unique, contained, derived, used, prev, is_subject, version, ): _log.debug(f"Register object type='{resource_type}' url/path='{url.path}'") # process url if url.scheme in ("file", ""): path = url.path else: click.echo("Currently, URL must be a file") sys.exit(Code.NOT_SUPPORTED.value) # create the resource _log.debug("create resource") try: rsrc = resource.Resource.from_file(path, as_type=resource_type, strict=strict, do_copy=copy) except resource.Resource.InferResourceTypeError as err: click.echo(f"Failed to infer resource: {err}") sys.exit(Code.IMPORT_RESOURCE.value) except resource.Resource.LoadResourceError as err: click.echo(f"Failed to load resource: {err}") sys.exit(Code.IMPORT_RESOURCE.value) # connect to DMF try: dmf = DMF() except errors.WorkspaceError as err: click.echo(f"Failed to connect to DMF: {err}") sys.exit(Code.WORKSPACE_NOT_FOUND.value) except errors.DMFError as err: click.echo(f"Failed to connect to DMF: {err}") sys.exit(Code.DMF.value) # check uniqueness if unique: df = rsrc.v["datafiles"][0] # file info for this upload query = {"datafiles": [{"sha1": df["sha1"]}]} query_result, dup_ids = dmf.find(query), [] for dup in query_result: dup_df = dup.v["datafiles"][0] if dup_df["path"] in df["path"]: dup_ids.append(dup.id) n_dup = len(dup_ids) if n_dup > 0: click.echo(f"This file is already in {n_dup} resource(s): " f"{' '.join(dup_ids)}") sys.exit(Code.DMF_OPER.value) # process relations _log.debug("add relations") rel_to_add = { # translate into standard relation names resource.PR_CONTAINS: contained, resource.PR_DERIVED: derived, resource.PR_USES: used, resource.PR_VERSION: prev, } target_resources = {} # keep target resources in dict, update at end for rel_name, rel_ids in rel_to_add.items(): for rel_id in rel_ids: if rel_id in target_resources: rel_subj = target_resources[rel_id] else: rel_subj = dmf.fetch_one(rel_id) target_resources[rel_id] = rel_subj if rel_subj is None: click.echo(f"Relation {rel_name} target not found: {rel_id}") sys.exit(Code.DMF_OPER.value) if is_subject == "yes": resource.create_relation_args(rsrc, rel_name, rel_subj) else: resource.create_relation_args(rel_subj, rel_name, rsrc) _log.debug(f"added relation {rsrc.id} <-- {rel_name} -- {rel_id}") _log.debug("update resource relations") for rel_rsrc in target_resources.values(): dmf.update(rel_rsrc) # add metadata if version: try: vlist = resource.version_list(version) except ValueError: click.echo(f"Invalid version `{version}`") sys.exit(Code.INPUT_VALUE.value) else: rsrc.v["version_info"]["version"] = vlist # add the resource _log.debug("add resource begin") try: new_id = dmf.add(rsrc) except errors.DuplicateResourceError as err: click.echo(f"Failed to add resource: {err}") sys.exit(Code.DMF_OPER.value) _log.debug(f"added resource: {new_id}") if info == "yes": pfxlen = len(new_id) si = _ShowInfo("term", pfxlen) for rsrc in dmf.find_by_id(new_id): si.show(rsrc) else: click.echo(new_id)
def test_init(): tmp_dir = scratch_path / "init" dmf = DMF(path=tmp_dir, create=True) surrmod.SurrogateModel(Experiment(dmf))
def test_index_multiple_versions(): tmp_dir = scratch_path / "index_multiple_versions" dmf = DMF(path=tmp_dir, create=True) v1, v2, v3 = "1.0.0", "6.6.6", "9.9.0" # index initial version propindex.index_property_metadata( dmf, pkg=idaes.dmf, expr=".*IndexMePlease[0-9]", exclude_testdirs=False, default_version=v1, ) # index again propindex.index_property_metadata( dmf, pkg=idaes.dmf, expr=".*IndexMePlease[0-9]", exclude_testdirs=False, default_version=v2, ) # check that we now have two resources, and # a relation between them rlist = list(dmf.find({})) assert len(rlist) == 2 rcodes = [r.v["codes"][0] for r in rlist] if rcodes[0]["version"][:3] == ("6", "6", "6"): first, second = 1, 0 else: first, second = 0, 1 # Debugging only # print('CODES:') # print(' - first -') # print(rcodes[first]) # print(rlist[first].v['relations']) # print(' - second -') # print(rcodes[second]) # print(rlist[second].v['relations']) # Each resource has 1 relation assert len(rlist[first].v["relations"]) == 1 assert len(rlist[second].v["relations"]) == 1 first_rel = rlist[first].v["relations"][0] second_rel = rlist[second].v["relations"][0] # First resource is pointed at by second assert first_rel[resource.RR_ROLE] == resource.RR_OBJ assert first_rel[resource.RR_PRED] == resource.Predicates.version assert first_rel[resource.RR_ID] == rlist[second].id # Second resource points at first assert second_rel[resource.RR_ROLE] == resource.RR_SUBJ assert second_rel[resource.RR_PRED] == resource.Predicates.version assert second_rel[resource.RR_ID] == rlist[first].id # Add the same version propindex.index_property_metadata( dmf, pkg=idaes.dmf, expr=".*IndexMePlease[0-9]", exclude_testdirs=False, default_version=v2, ) # check that we still have two resources rlist = list(dmf.find({})) assert len(rlist) == 2 # Now add another version propindex.index_property_metadata( dmf, pkg=idaes.dmf, expr=".*IndexMePlease[0-9]", exclude_testdirs=False, default_version=v3, ) # check that we now have three resources rlist = list(dmf.find({})) assert len(rlist) == 3 # check that we have 0 <--> 1 <--> 2 # first sort by version and save that in the 'indexes' array indexes = [(r.v["codes"][0]["version"], i) for i, r in enumerate(rlist)] indexes.sort() # pull out relations into 'rel' array, in version order rel = [rlist[indexes[i][1]].v["relations"] for i in range(3)] # check first resource's relations assert len(rel[0]) == 1 # 0 <-- 1 assert rel[0][0][resource.RR_ID] == rlist[indexes[1][1]].id assert rel[0][0][resource.RR_ROLE] == resource.RR_OBJ # check second resource's relations assert len(rel[1]) == 2 for j in range(2): if rel[1][j][resource.RR_ROLE] == resource.RR_SUBJ: # 1 --> 0 assert rel[1][j][resource.RR_ID] == rlist[indexes[0][1]].id else: # 1 <-- 2 assert rel[1][j][resource.RR_ID] == rlist[indexes[2][1]].id assert rel[1][j][resource.RR_ROLE] == resource.RR_OBJ # check third resource's relations # check third resource's relations assert len(rel[2]) == 1 # 2 --> 1 assert rel[2][0][resource.RR_ID] == rlist[indexes[1][1]].id assert rel[2][0][resource.RR_ROLE] == resource.RR_SUBJ
MaterialBalanceType, EnergyBalanceType) from idaes.core.util.initialization import solve_indexed_blocks from idaes.core.util.misc import add_object_reference from idaes.core.util.model_statistics import ( degrees_of_freedom, number_unfixed_variables, ) from idaes.core.util.misc import extract_data from idaes.dmf import DMF from idaes.dmf.resource import Resource, TidyUnitData # Set up logger _log = logging.getLogger(__name__) # Set up DMF (use working directory, wherever that is) _dmf = DMF(".") @declare_process_block_class("HDAParameterBlock") class HDAParameterData(PhysicalParameterBlock): CONFIG = PhysicalParameterBlock.CONFIG() def _get_param(self, name: str): """Helper method to convert data stored in a DMF resource into a Pyomo rule. The intermediate form, :class:`TidyUnitData`, is constructed from the data stored in the JSON of the DMF resource. Args: name: Primary alias of DMF resource with stored data.
def test_init_extraignored(magics_impl): tmp_dir = scratch_path / "init_extraignored" dmf = DMF(path=tmp_dir, create=True) magics_impl.dmf_init(str(tmp_dir), "foo") # just generate warning magics_impl.dmf_init(str(tmp_dir), "foo", "bar") # ditto