Example #1
0
def config(db_config_path=None, default_dbprops_file=None):
    """Update configuration file with new values"""
    config = {}

    if db_config_path:
        db_config_path = os.path.abspath(os.path.expanduser(db_config_path))
        config['DB_CONFIG_PATH'] = db_config_path

    if default_dbprops_file:
        default_dbprops_file = os.path.abspath(
            os.path.expanduser(default_dbprops_file))
        config['DEFAULT_DBPROPS_FILE'] = default_dbprops_file

    if os.path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, mode='r') as f:
            data = json.load(f)
        data.update(config)
        config = data

    if config:
        dirname = os.path.dirname(CONFIG_PATH)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
        with open(CONFIG_PATH, mode='w') as f:
            logger().info('Updating configuration file: {}'.format(CONFIG_PATH))
            json.dump(config, f)
Example #2
0
def config(db_config_path=None, default_dbprops_file=None):
    """Update configuration file with new values"""
    config = {}

    if db_config_path:
        db_config_path = os.path.abspath(os.path.expanduser(db_config_path))
        config['DB_CONFIG_PATH'] = db_config_path

    if default_dbprops_file:
        default_dbprops_file = os.path.abspath(
            os.path.expanduser(default_dbprops_file))
        config['DEFAULT_DBPROPS_FILE'] = default_dbprops_file

    if os.path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, mode='r') as f:
            data = json.load(f)
        data.update(config)
        config = data

    if config:
        dirname = os.path.dirname(CONFIG_PATH)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
        with open(CONFIG_PATH, mode='w') as f:
            logger().info('Updating configuration file: {}'.format(CONFIG_PATH))
            json.dump(config, f)
Example #3
0
def do_config(model_path=None, overwrite=False):
    config = {}

    # make directory for config if doesn't exist
    dirname = os.path.dirname(CONFIG_PATH)
    if not os.path.exists(dirname):
        os.makedirs(dirname)

    # update default path to model directory
    if model_path:
        model_path = os.path.abspath(os.path.expanduser(model_path))
        if not os.path.exists(model_path):
            logger().info('Creating model directory: {}'.format(model_path))
            os.makedirs(model_path)
        recursive_copy(DEFAULT_MODEL_PATH,
                       model_path,
                       overwrite=overwrite,
                       skip_ext=['gdx'])
        config['MODEL_PATH'] = model_path

    # overwrite config if already exists
    if os.path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, mode='r') as f:
            data = json.load(f)
        data.update(config)
        config = data

    # write new config
    if config:
        with open(CONFIG_PATH, mode='w') as f:
            logger().info(
                'Updating configuration file: {}'.format(CONFIG_PATH))
            json.dump(config, f)
Example #4
0
def do_dl(tag=None, branch=None, repo_path=None, local_path='.'):
    if tag is not None and branch is not None:
        raise ValueError('Can only provide one of `tag` and `branch`')
    if tag is None and branch is None:
        tag = '{}'.format(message_ix.__version__)

    zipname = '{}.zip'.format(branch or 'v' + tag)
    url = 'https://github.com/iiasa/message_ix/archive/{}'.format(zipname)

    tmp = tempdir_name()
    os.makedirs(tmp)
    try:
        logger().info('Retrieving {}'.format(url))
        dst = os.path.join(tmp, zipname)
        urlretrieve(url, dst)

        archive = zipfile.ZipFile(dst)
        logger().info('Unzipping {} to {}'.format(dst, tmp))
        archive.extractall(tmp)

        if not os.path.exists(local_path):
            os.makedirs(local_path)

        cpfrom = '{}/message_ix-{}/{}'.format(tmp, branch or tag, repo_path)
        cpto = '{}/{}'.format(local_path, repo_path)
        logger().info('Copying {} to {}'.format(cpfrom, cpto))
        recursive_copy(cpfrom, cpto, overwrite=True)

        shutil.rmtree(tmp)

    except Exception as e:
        logger().info("Could not delete {} because {}".format(tmp, e))
Example #5
0
    def load_scenario_data(self):
        """Completely load a scenario into cached memory"""
        if not self._cache:
            raise ValueError('Cache must be enabled to load scenario data')

        funcs = {
            'set': (self.set_list, self.set),
            'par': (self.par_list, self.par),
            'var': (self.var_list, self.var),
            'equ': (self.equ_list, self.equ),
        }
        for ix_type, (list_func, get_func) in funcs.items():
            logger().info('Caching {} data'.format(ix_type))
            for item in list_func():
                get_func(item)
Example #6
0
def recursive_copy(src, dst, overwrite=False, skip_ext=[]):
    """Copy src to dst recursively"""
    for root, dirs, files in os.walk(src):
        for f in [f for f in files if os.path.splitext(f)[1] not in skip_ext]:
            rel_path = root.replace(src, '').lstrip(os.sep)
            dst_path = os.path.join(dst, rel_path)

            if not os.path.isdir(dst_path):
                os.makedirs(dst_path)

            fromf = os.path.join(root, f)
            tof = os.path.join(dst_path, f)
            exists = os.path.exists(tof)
            if exists and not overwrite:
                logger().info('{} exists, will not overwrite'.format(tof))
            else:
                logger().info('Writing to {} (overwrite is {})'.format(
                    tof, 'ON' if overwrite else 'OFF'))
                shutil.copyfile(fromf, tof)
Example #7
0
    def save(self):
        """Write configuration keys to file.

        `config.json` is created in the first of the ixmp configuration
        directories that exists. Only non-null values are written.
        """
        # Use the first identifiable path
        _, config_dir = next(self._iter_paths())
        path = config_dir / 'config.json'

        # TODO merge with existing configuration

        # Make the directory to contain the configuration file
        path.parent.mkdir(parents=True, exist_ok=True)

        # Write the file
        logger().info('Updating configuration file: {}'.format(path))
        # str() here is for py2 compatibility
        with open(str(path), 'w') as f:
            json.dump({k: str(self.values[k]) for k in self._keys if
                       self.values[k] is not None}, f)
Example #8
0
    def __init__(self, dbprops=None, dbtype=None, jvmargs=None):
        start_jvm(jvmargs)
        self.dbtype = dbtype

        try:
            # if no dbtype is specified, launch Platform with properties file
            if dbtype is None:
                dbprops = default_dbprops_file() if dbprops is None \
                    else find_dbprops(dbprops)
                logger().info(
                    "launching ixmp.Platform using config file at '{}'".format(
                        dbprops))
                self._jobj = java.ixmp.Platform("Python", dbprops)
            # if dbtype is specified, launch Platform with local database
            elif dbtype == 'HSQLDB':
                dbprops = dbprops or DEFAULT_LOCAL_DB_PATH
                logger().info(
                    "launching ixmp.Platform with local {} database at '{}'".
                    format(dbtype, dbprops))
                self._jobj = java.ixmp.Platform("Python", dbprops, dbtype)
            else:
                raise ValueError('Unknown dbtype: {}'.format(dbtype))
        except TypeError:
            msg = ("Could not launch the JVM for the ixmp.Platform."
                   "Make sure that all dependencies of ixmp.jar"
                   "are included in the 'ixmp/lib' folder.")
            logger().info(msg)
            raise
Example #9
0
    def set_log_level(self, level):
        """Set global logger level (for both Python and Java)

        Parameters
        ----------
        level : str, optional, default: None
            set the logger level if specified, see
            https://docs.python.org/3/library/logging.html#logging-levels
        """
        py_to_java = {
            'CRITICAL': 'ALL',
            'ERROR': 'ERROR',
            'WARNING': 'WARN',
            'INFO': 'INFO',
            'DEBUG': 'DEBUG',
            'NOTSET': 'OFF',
        }
        if level not in py_to_java.keys():
            msg = '{} not a valid Python logger level, see ' + \
                'https://docs.python.org/3/library/logging.html#logging-level'
            raise ValueError(msg.format(level))
        logger().setLevel(level)
        self._jobj.setLogLevel(py_to_java[level])
Example #10
0
    def read_excel(self, fname, add_units=False, commit_steps=False):
        """Read Excel file data and load into the scenario.

        Parameters
        ----------
        fname : string
            path to file
        add_units : bool
            add missing units, if any,  to the platform instance.
            default: False
        commit_steps : bool
            commit changes after every data addition.
            default: False
        """
        funcs = {
            'set': self.add_set,
            'par': self.add_par,
        }

        logger().info('Reading data from {}'.format(fname))
        dfs = pd_read(fname, sheet_name=None)

        # get item-type mapping
        df = dfs['ix_type_mapping']
        ix_types = dict(zip(df['item'], df['ix_type']))

        # fill in necessary items first (only sets for now)
        col = 0  # special case for prefill set Series

        def is_prefill(x):
            return dfs[x].columns[0] == col and len(dfs[x].columns) == 1

        prefill = [x for x in dfs if is_prefill(x)]
        for name in prefill:
            data = list(dfs[name][col])
            if len(data) > 0:
                ix_type = ix_types[name]
                logger().info('Loading data for {}'.format(name))
                funcs[ix_type](name, data)
        if commit_steps:
            self.commit('Loaded initial data from {}'.format(fname))
            self.check_out()

        # fill all other pars and sets, skipping those already done
        skip_sheets = ['ix_type_mapping'] + prefill
        for sheet_name, df in dfs.items():
            if sheet_name not in skip_sheets and not df.empty:
                logger().info('Loading data for {}'.format(sheet_name))
                if add_units and 'unit' in df.columns:
                    # add missing units
                    units = set(self.platform.units())
                    missing = set(df['unit'].unique()) - units
                    for unit in missing:
                        logger().info('Adding missing unit: {}'.format(unit))
                        self.platform.add_unit(unit)
                # load data
                ix_type = ix_types[sheet_name]
                funcs[ix_type](sheet_name, df)
                if commit_steps:
                    self.commit('Loaded {} from {}'.format(sheet_name, fname))
                    self.check_out()
Example #11
0
def test_logger_deprecated():
    with pytest.warns(DeprecationWarning):
        utils.logger()
Example #12
0
    def clone(self,
              model=None,
              scenario=None,
              annotation=None,
              keep_solution=True,
              shift_first_model_year=None,
              platform=None):
        """Clone the current scenario and return the clone.

        If the (`model`, `scenario`) given already exist on the
        :class:`Platform`, the `version` for the cloned Scenario follows the
        last existing version. Otherwise, the `version` for the cloned Scenario
        is 1.

        .. note::
            :meth:`clone` does not set or alter default versions. This means
            that a clone to new (`model`, `scenario`) names has no default
            version, and will not be returned by
            :meth:`Platform.scenario_list` unless `default=False` is given.

        Parameters
        ----------
        model : str, optional
            New model name. If not given, use the existing model name.
        scenario : str, optional
            New scenario name. If not given, use the existing scenario name.
        annotation : str, optional
            Explanatory comment for the clone commit message to the database.
        keep_solution : bool, default True
            If :py:const:`True`, include all timeseries data and the solution
            (vars and equs) from the source scenario in the clone.
            Otherwise, only timeseries data marked as `meta=True` (see
            :meth:`TimeSeries.add_timeseries`) or prior to `first_model_year`
            (see :meth:`TimeSeries.add_timeseries`) are cloned.
        shift_first_model_year: int, optional
            If given, the values of the solution are transfered to parameters
            `historical_*`, parameter `resource_volume` is updated, and the
            `first_model_year` is shifted. The solution is then discarded,
            see :meth:`TimeSeries.remove_solution`.
        platform : :class:`Platform`, optional
            Platform to clone to (default: current platform)
        """
        err = 'Cloning across platforms is only possible {}'
        if platform is not None and not keep_solution:
            raise ValueError(err.format('with `keep_solution=True`!'))

        if platform is not None and shift_first_model_year is not None:
            raise ValueError(err.format('without shifting model horizon!'))

        if shift_first_model_year is not None:
            keep_solution = False
            msg = 'Shifting first model year to {} and removing solution'
            logger().info(msg.format(shift_first_model_year))

        platform = platform or self.platform
        model = model or self.model
        scenario = scenario or self.scenario
        args = [platform._jobj, model, scenario, annotation, keep_solution]
        if check_year(shift_first_model_year, 'shift_first_model_year'):
            args.append(shift_first_model_year)

        return Scenario(platform,
                        model,
                        scenario,
                        cache=self._cache,
                        version=self._jobj.clone(*args))