示例#1
0
    def initialize(cls, scenario, with_data=False):
        """Initialize the problem.

        If *with_data* is :obj:`True` (default: :obj:`False`), the set and
        parameter values from the original problem are also populated.
        Otherwise, the sets and parameters are left empty.
        """
        # Initialize the ixmp items
        cls.initialize_items(scenario, ITEMS)

        if not with_data:
            return

        checkout = maybe_check_out(scenario)

        # Add set elements
        scenario.add_set("i", DATA["i"])
        scenario.add_set("j", DATA["j"])

        # Add parameter values
        update_par(scenario, "a", DATA["a"])
        update_par(scenario, "b", DATA["b"])
        update_par(scenario, "d", DATA["d"])

        # TODO avoid overwriting the existing value
        scenario.change_scalar("f", *DATA["f"])

        maybe_commit(scenario, checkout, f"{cls.__name__}.initialize")
示例#2
0
def test_maybe_commit(caplog, test_mp):
    s = Scenario(test_mp, "maybe_commit", "maybe_commit", version="new")

    # A new Scenario is not committed, so this works
    assert utils.maybe_commit(s, True, message="foo") is True

    # *s* is already commited. No commit is performed, but the function call
    # succeeds and a message is logged
    caplog.set_level(logging.INFO, logger="ixmp")
    assert utils.maybe_commit(s, True, message="foo") is False
    assert caplog.messages[-1].startswith("maybe_commit() didn't commit: ")
示例#3
0
    def transact(self, message: str = "", condition: bool = True):
        """Context manager to wrap code in a 'transaction'.

        If `condition` is :obj:`True`, the TimeSeries (or :class:`.Scenario`) is
        checked out *before* the block begins. When the block ends, the object is
        committed with `message`. If `condition` is :obj:`False`, nothing occurs before
        or after the block.

        Example
        -------
        >>> # `ts` is currently checked in/locked
        >>> with ts.transact(message="replace 'foo' with 'bar' in set x"):
        >>>    # `ts` is checked out and may be modified
        >>>    ts.remove_set("x", "foo")
        >>>    ts.add_set("x", "bar")
        >>> # Changes to `ts` have been committed
        """
        # TODO implement __enter__ and __exit__ to allow simpler "with ts: …"
        if condition:
            maybe_check_out(self)
        try:
            yield
        finally:
            maybe_commit(self, condition, message)
示例#4
0
    def initialize_items(cls, scenario, items):
        """Helper for :meth:`initialize`.

        All of the `items` are added to `scenario`. Existing items are not
        modified. Errors are logged if the description in `items` conflicts
        with the index set(s) and/or index name(s) of existing items.

        initialize_items may perform one commit. `scenario` is in the same
        state (checked in, or checked out) after initialize_items is complete.

        Parameters
        ----------
        scenario : .Scenario
            Object to initialize.
        items : dict of (str -> dict)
            Each key is the name of an ixmp item (set, parameter, equation, or
            variable) to initialize. Each dict **must** have the key 'ix_type';
            one of 'set', 'par', 'equ', or 'var'; any other entries are keyword
            arguments to the methods :meth:`.init_set` etc.

        Raises
        ------
        ValueError
            if `scenario` has a solution, i.e. :meth:`~.Scenario.has_solution`
            is :obj:`True`.

        See also
        --------
        .init_equ
        .init_par
        .init_set
        .init_var
        """
        # Don't know if the Scenario is checked out
        checkout = None

        # Lists of items in the Scenario
        existing_items = dict()

        # Lists of items initialized
        items_initialized = []

        for name, item_info in items.items():
            # Copy so that pop() below does not modify *items*
            item_info = item_info.copy()

            # Check that the item exists
            ix_type = item_info.pop('ix_type')

            if ix_type not in existing_items:
                # Store a list of items of *ix_type*
                method = getattr(scenario, f'{ix_type}_list')
                existing_items[ix_type] = method()

            # Item must be initialized if it does not exist

            if name in existing_items[ix_type]:
                # Item exists; check its index sets and names
                for key, values in item_info.items():
                    values = values or []
                    existing = getattr(scenario, key)(name)
                    if existing != values:
                        # The existing index sets or names do not match
                        log.warning(
                            f"Existing index {key.split('_')[-1]} of "
                            f"{repr(name)} {repr(existing)} do not match "
                            f"{repr(values)}"
                        )

                # Skip; can't do anything to existing items
                continue

            # Item doesn't exist and must be initialized

            # Possibly check out the Scenario
            try:
                checkout = maybe_check_out(scenario, checkout)
            except ValueError as exc:  # pragma: no cover
                # The Scenario has a solution. This indicates an inconsistent
                # situation: the Scenario lacks the item *name*, but somehow it
                # was successfully solved without it, and the solution stored.
                # Can't proceed further.
                log.error(str(exc))
                return

            # Get the appropriate method, e.g. init_set, and add the item
            log.info(f'Initialize {ix_type} {repr(name)} as {item_info}')
            getattr(scenario, f'init_{ix_type}')(name=name, **item_info)

            # Record
            items_initialized.append(name)

        maybe_commit(scenario, len(items_initialized),
                     f'{cls.__name__}.initialize_items')

        if len(items_initialized) and not checkout:
            # Scenario was originally in a checked out state; restore
            maybe_check_out(scenario)
示例#5
0
def s_read_excel(be,
                 s,
                 path,
                 add_units=False,
                 init_items=False,
                 commit_steps=False):
    """Read data from a Microsoft Excel file at *path* into *s*.

    See also
    --------
    Scenario.read_excel
    """
    log.info(f"Read data from {path}")

    # Get item name -> ixmp type mapping as a pd.Series
    xf = pd.ExcelFile(path, engine="openpyxl")
    name_type = xf.parse("ix_type_mapping", index_col="item")["ix_type"]

    # Queue of (set name, data) to add
    sets_to_add = deque((n, None) for n in name_type.index[name_type == "set"])

    def parse_item_sheets(name):
        """Read data for item *name*, possibly across multiple sheets."""
        dfs = [xf.parse(name)]

        # Collect data from repeated sheets due to max_row limit
        for x in filter(lambda n: n.startswith(name + "("), xf.sheet_names):
            dfs.append(xf.parse(x))

        # Concatenate once and return
        return pd.concat(dfs, axis=0)

    # Add sets in two passes:
    # 1. Index sets, required to initialize other sets.
    # 2. Sets indexed by others.
    while True:
        try:
            # Get an item from the queue
            name, data = sets_to_add.popleft()
        except IndexError:
            break  # Finished

        log.info(name)

        first_pass = data is None
        if first_pass:
            # Read data
            data = parse_item_sheets(name)

        # Determine index set(s) for this set
        idx_sets = data.columns.to_list()
        if len(idx_sets) == 1:
            if idx_sets == [0]:  # pragma: no cover
                # Old-style export with uninformative '0' as a column header;
                # assume it is an index set
                log.warning(f"Add {name} with header '0' as index set")
                idx_sets = None
            elif idx_sets == [name]:
                # Set's own name as column header -> an index set
                idx_sets = None
            else:
                pass  # 1-D set indexed by another set

        if first_pass and idx_sets is not None:
            # Indexed set; append to the queue to process later
            sets_to_add.append((name, data))
            continue

        # At this point: either an index set, or second pass when all index
        # sets have been init'd and populated
        if init_items:
            try:
                maybe_init_item(s, "set", name, idx_sets, path)
            except ValueError:
                continue  # Ambiguous or conflicting; skip this set

        # Convert data as expected by add_set
        if len(data.columns) == 1:
            # Convert data frame into 1-D vector
            data = data.iloc[:, 0].values

            if idx_sets is not None:
                # Indexed set must be input as list of list of str
                data = list(map(as_str_list, data))

        try:
            s.add_set(name, data)
        except KeyError:
            raise ValueError(f"no set {repr(name)}; try init_items=True")

    maybe_commit(s, commit_steps, f"Loaded sets from {path}")

    # List of existing units for reference
    units = set(be.get_units())

    # Add equ/par/var data
    for name, ix_type in name_type[name_type != "set"].items():
        if ix_type in ("equ", "var"):
            log.info(f"Cannot read {ix_type} {repr(name)}")
            continue

        # Only parameters beyond this point

        df = parse_item_sheets(name)

        maybe_check_out(s)

        if add_units:
            # New units appearing in this parameter
            to_add = set(df["unit"].unique()) - units

            for unit in to_add:
                log.info(f"Add missing unit: {unit}")
                # FIXME cannot use the comment f'Loaded from {path}' here; too
                #       long for JDBCBackend
                be.set_unit(unit, "Loaded from file")

            # Update the reference set to avoid re-adding these units
            units |= to_add

        # NB if equ/var were imported, also need to filter 'lvl', 'mrg' here
        idx_sets = list(
            filter(lambda v: v not in ("value", "unit"), df.columns))

        if init_items:
            try:
                # Same as init_scalar if idx_sets == []
                maybe_init_item(s, ix_type, name, idx_sets, path)
            except ValueError:
                continue  # Ambiguous or conflicting; skip this parameter

        if not len(idx_sets):
            # No index sets -> scalar parameter; must supply empty 'key' column
            # for add_par()
            df["key"] = None

        s.add_par(name, df)

        # Commit after every parameter
        maybe_commit(s, commit_steps,
                     f"Loaded {ix_type} {repr(name)} from {path}")

    maybe_commit(s, not commit_steps, f"Import from {path}")
示例#6
0
def apply_spec(
    scenario: Scenario,
    spec: Mapping[str, ScenarioInfo],
    data: Callable = None,
    **options,
):
    """Apply `spec` to `scenario`.

    Parameters
    ----------
    spec
        A 'specification': :class:`dict` with 'require', 'remove', and 'add' keys and
        :class:`.ScenarioInfo` objects as values.
    data : callable, optional
        Function to add data to `scenario`. `data` can either manipulate the scenario
        directly, or return a :class:`dict` compatible with :func:`.add_par_data`.

    Other parameters
    ----------------
    dry_run : bool
        Don't modify `scenario`; only show what would be done. Default :obj:`False`.
        Exceptions will still be raised if the elements from ``spec['required']`` are
        missing; this serves as a check that the scenario has the required features for
        applying the spec.
    fast : bool
        Do not remove existing parameter data; increases speed on large scenarios.
    quiet : bool
        Only show log messages at level ``ERROR`` and higher. If :obj:`False` (default),
        show log messages at level ``DEBUG`` and higher.
    message : str
        Commit message.

    See also
    --------
    .add_par_data
    .strip_par_data
    .Code
    .ScenarioInfo
    """
    dry_run = options.get("dry_run", False)

    log.setLevel(logging.ERROR if options.get("quiet", False) else logging.DEBUG)

    if not dry_run:
        try:
            scenario.remove_solution()
        except ValueError:
            pass
        maybe_check_out(scenario)

    dump: Dict[str, pd.DataFrame] = {}  # Removed data

    for set_name in scenario.set_list():
        # Check whether this set is mentioned at all in the spec
        if 0 == sum(map(lambda info: len(info.set[set_name]), spec.values())):
            # Not mentioned; don't do anything
            continue

        log.info(f"Set {repr(set_name)}")

        # Base contents of the set
        base_set = scenario.set(set_name)
        # Unpack a multi-dimensional/indexed set to a list of tuples
        base = (
            list(base_set.itertuples(index=False))
            if isinstance(base_set, pd.DataFrame)
            else base_set.tolist()
        )

        log.info(f"  {len(base)} elements")
        # log.debug(', '.join(map(repr, base)))  # All elements; verbose

        # Check for required elements
        require = spec["require"].set[set_name]
        log.info(f"  Check {len(require)} required elements")

        # Raise an exception about the first missing element
        missing = list(filter(lambda e: e not in base, require))
        if len(missing):
            log.error(f"  {len(missing)} elements not found: {repr(missing)}")
            raise ValueError

        # Remove elements and associated parameter values
        remove = spec["remove"].set[set_name]
        for element in remove:
            msg = f"{repr(element)} and associated parameter elements"

            if options.get("fast", False):
                log.info(f"  Skip removing {msg} (fast=True)")
                continue

            log.info(f"  Remove {msg}")
            strip_par_data(scenario, set_name, element, dry_run=dry_run, dump=dump)

        # Add elements
        add = [] if dry_run else spec["add"].set[set_name]
        for element in add:
            scenario.add_set(
                set_name,
                element.id if isinstance(element, Code) else element,
            )

        if len(add):
            log.info(f"  Add {len(add)} element(s)")
            log.debug("  " + ellipsize(add))

        log.info("  ---")

    N_removed = sum(len(d) for d in dump.values())
    log.info(f"{N_removed} parameter elements removed")

    # Add units to the Platform before adding data
    for unit in spec["add"].set["unit"]:
        unit = unit if isinstance(unit, Code) else Code(id=unit, name=unit)
        log.info(f"Add unit {repr(unit)}")
        scenario.platform.add_unit(unit.id, comment=str(unit.name))

    # Add data
    if callable(data):
        result = data(scenario, dry_run=dry_run)
        if result:
            # `data` function returned some data; use add_par_data()
            add_par_data(scenario, result, dry_run=dry_run)

    # Finalize
    log.info("Commit results.")
    maybe_commit(
        scenario,
        condition=not dry_run,
        message=options.get("message", f"{__name__}.apply_spec()"),
    )