Example #1
0
def do_config(model_path=None, overwrite=False):
    config = {}

    # make directory for config if doesn't exist
    dirname = os.path.dirname(CONFIG_PATH)
    if not os.path.exists(dirname):
        os.makedirs(dirname)

    # update default path to model directory
    if model_path:
        model_path = os.path.abspath(os.path.expanduser(model_path))
        if not os.path.exists(model_path):
            logger().info('Creating model directory: {}'.format(model_path))
            os.makedirs(model_path)
        recursive_copy(DEFAULT_MODEL_PATH, model_path, overwrite=overwrite)
        config['MODEL_PATH'] = model_path

    # overwrite config if already exists
    if os.path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, mode='r') as f:
            data = json.load(f)
        data.update(config)
        config = data

    # write new config
    if config:
        with open(CONFIG_PATH, mode='w') as f:
            logger().info(
                'Updating configuration file: {}'.format(CONFIG_PATH))
            json.dump(config, f)
Example #2
0
def do_dl(tag=None, branch=None, repo_path=None, local_path='.'):
    if tag is not None and branch is not None:
        raise ValueError('Can only provide one of `tag` and `branch`')
    if tag is None and branch is None:
        tag = '{}'.format(message_ix.__version__)

    zipname = '{}.zip'.format(branch or 'v' + tag)
    url = 'https://github.com/iiasa/message_ix/archive/{}'.format(zipname)

    tmp = tempdir_name()
    os.makedirs(tmp)
    try:
        logger().info('Retrieving {}'.format(url))
        dst = os.path.join(tmp, zipname)
        urlretrieve(url, dst)

        archive = zipfile.ZipFile(dst)
        logger().info('Unzipping {} to {}'.format(dst, tmp))
        archive.extractall(tmp)

        if not os.path.exists(local_path):
            os.makedirs(local_path)

        cpfrom = '{}/message_ix-{}/{}'.format(tmp, branch or tag, repo_path)
        cpto = '{}/{}'.format(local_path, repo_path)
        logger().info('Copying {} to {}'.format(cpfrom, cpto))
        recursive_copy(cpfrom, cpto, overwrite=True)

        shutil.rmtree(tmp)

    except Exception as e:
        logger().info("Could not delete {} because {}".format(tmp, e))
Example #3
0
def recursive_copy(src, dst, overwrite=False):
    """Copy src to dst recursively"""
    for root, dirs, files in os.walk(src):
        for f in files:
            rel_path = root.replace(src, '').lstrip(os.sep)
            dst_path = os.path.join(dst, rel_path)

            if not os.path.isdir(dst_path):
                os.makedirs(dst_path)

            fromf = os.path.join(root, f)
            tof = os.path.join(dst_path, f)
            exists = os.path.exists(tof)
            if exists and not overwrite:
                logger().info('{} exists, will not overwrite'.format(tof))
            else:
                logger().info('Writing to {} (overwrite is {})'.format(
                    tof, 'ON' if overwrite else 'OFF'))
                shutil.copyfile(fromf, tof)
Example #4
0
def do_config(model_path=None, overwrite=False):
    config = {}

    if model_path:
        model_path = os.path.abspath(os.path.expanduser(model_path))
        if not os.path.exists(model_path):
            logger().info('Creating model directory: {}'.format(model_path))
            os.makedirs(model_path)
        recursive_copy(DEFAULT_MODEL_PATH, model_path, overwrite=overwrite)
        config['MODEL_PATH'] = model_path

    if os.path.exists(CONFIG_PATH):
        with open(CONFIG_PATH, mode='r') as f:
            data = json.load(f)
        data.update(config)
        config = data

    if config:
        with open(CONFIG_PATH, mode='w') as f:
            logger().info(
                'Updating configuration file: {}'.format(CONFIG_PATH))
            json.dump(config, f)
Example #5
0
    def read_excel(self, fname, add_units=False, commit_steps=False):
        """Read Excel file data and load into the scenario.

        Parameters
        ----------
        fname : string
            path to file
        add_units : bool
            add missing units, if any,  to the platform instance.
            default: False
        commit_steps : bool
            commit changes after every data addition.
            default: False
        """
        funcs = {
            'set': self.add_set,
            'par': self.add_par,
        }

        logger().info('Reading data from {}'.format(fname))
        dfs = pd_read(fname, sheet_name=None)

        # get item-type mapping
        df = dfs['ix_type_mapping']
        ix_types = dict(zip(df['item'], df['ix_type']))

        # fill in necessary items first (only sets for now)
        col = 0  # special case for prefill set Series

        def is_prefill(x):
            return dfs[x].columns[0] == col and len(dfs[x].columns) == 1

        prefill = [x for x in dfs if is_prefill(x)]
        for name in prefill:
            data = list(dfs[name][col])
            if len(data) > 0:
                ix_type = ix_types[name]
                logger().info('Loading data for {}'.format(name))
                funcs[ix_type](name, data)
        if commit_steps:
            self.commit('Loaded initial data from {}'.format(fname))
            self.check_out()

        # fill all other pars and sets, skipping those already done
        skip_sheets = ['ix_type_mapping'] + prefill
        for sheet_name, df in dfs.items():
            if sheet_name not in skip_sheets and not df.empty:
                logger().info('Loading data for {}'.format(sheet_name))
                if add_units and 'unit' in df.columns:
                    # add missing units
                    units = set(self.platform.units())
                    missing = set(df['unit'].unique()) - units
                    for unit in missing:
                        logger().info('Adding missing unit: {}'.format(unit))
                        self.platform.add_unit(unit)
                # load data
                ix_type = ix_types[sheet_name]
                funcs[ix_type](sheet_name, df)
                if commit_steps:
                    self.commit('Loaded {} from {}'.format(sheet_name, fname))
                    self.check_out()