Ejemplo n.º 1
0
def main(output_folder, narrow_folder):
    """Read in a folder of irregular wide-format files and write as narrow csvs
    """
    config = read_packaged_file("config.yaml", "otoole.preprocess")

    for parameter, details in config.items():
        logger.info("Looking for %s", parameter)
        config_details = config[parameter]

        filepath = os.path.join(output_folder, parameter + ".csv")

        try:
            df = pd.read_csv(filepath)
        except pd.errors.EmptyDataError:
            logger.error("No data found in file for %s", parameter)
            expected_columns = config_details["indices"]
            default_columns = expected_columns + ["VALUE"]
            df = pd.DataFrame(columns=default_columns)

        entity_type = config[parameter]["type"]

        if entity_type == "param":
            narrow = check_parameter(df, config_details, parameter)
            if not narrow.empty:
                narrow_checked = check_datatypes(narrow, config, parameter)
            else:
                narrow_checked = narrow
        elif entity_type == "set":
            narrow = check_set(df, config_details, parameter)
            if not narrow.empty:
                narrow_checked = check_set_datatype(narrow, config, parameter)
            else:
                narrow_checked = narrow

        write_out_dataframe(narrow_folder, parameter, narrow_checked)
Ejemplo n.º 2
0
def read_config(path_to_user_config: str = None) -> Dict:
    """Reads the config file holding expected OSeMOSYS set and parameter dimensions

    Arguments
    ---------
    path_to_user_config : str, optional, default=None
        Optional path to a user defined configuration file

    Returns
    -------
    dict
    """
    if path_to_user_config:
        config = read_packaged_file(path_to_user_config, None)
    else:
        config = read_packaged_file("config.yaml", "otoole.preprocess")
    return config
Ejemplo n.º 3
0
def validate_model(args):
    file_format = args.format

    if args.config:
        config = read_packaged_file(args.config)
        validate(file_format, args.filepath, config)
    else:
        validate(file_format, args.filepath)
Ejemplo n.º 4
0
    def __init__(self, datapackage: str, datafilepath: str, sql: bool = False):

        self.datapackage = datapackage
        self.datafilepath = datafilepath
        self.sql = sql
        self.package = self._get_package()
        self.default_values = self._get_default_values()
        self.config = read_packaged_file("config.yaml", "otoole.preprocess")
Ejemplo n.º 5
0
def write_default_values(filepath):

    config = read_packaged_file("config.yaml", "otoole.preprocess")

    default_values_path = os.path.join(filepath, "data", "default_values.csv")
    with open(default_values_path, "w") as csv_file:
        csv_file.write("name,default_value\n")

        for name, contents in config.items():
            if contents["type"] == "param":
                csv_file.write("{},{}\n".format(name, contents["default"]))
Ejemplo n.º 6
0
def convert_file_to_package(path_to_datafile: str, path_to_datapackage: str):
    """Converts an OSeMOSYS datafile to a Tabular Data Package

    Arguments
    ---------
    path_to_datafile: str
    path_to_datapackage: str
        Path to the folder in which to write the new Tabular Data Package

    """
    config = read_packaged_file("config.yaml", "otoole.preprocess")
    amply_datafile = read_in_datafile(path_to_datafile, config)
    dict_of_dataframes = convert_amply_to_dataframe(amply_datafile, config)
    if not os.path.exists(path_to_datapackage):
        os.mkdir(path_to_datapackage)
    for name, df in dict_of_dataframes.items():
        write_out_dataframe(path_to_datapackage, name, df)
    datapackage = read_packaged_file("datapackage.json", "otoole.preprocess")
    filepath = os.path.join(path_to_datapackage, "datapackage.json")
    with open(filepath, "w") as destination:
        destination.writelines(datapackage)
    write_default_values(path_to_datapackage)
Ejemplo n.º 7
0
def generate_package(path_to_package):
    """Creates a datapackage in folder ``path_to_package``

    [{'fields': 'REGION', 'reference': {'resource': 'REGION', 'fields': 'VALUE'}}]
    """

    datapath = os.path.join(path_to_package)
    package = Package(base_path=datapath)

    package.infer("data/*.csv")

    package.descriptor["licenses"] = [{
        "name":
        "CC-BY-4.0",
        "path":
        "https://creativecommons.org/licenses/by/4.0/",
        "title":
        "Creative Commons Attribution 4.0",
    }]

    package.descriptor["title"] = "The OSeMOSYS Simplicity Example Model"

    package.descriptor["name"] = "osemosys_model_simplicity"

    package.descriptor["contributors"] = [{
        "title": "Will Usher",
        "email": "*****@*****.**",
        "path": "http://www.kth.se/wusher",
        "role": "author",
    }]

    package.commit()

    config = read_packaged_file("config.yaml", "otoole.preprocess")

    new_resources = []
    for resource in package.resources:

        descriptor = resource.descriptor

        name = resource.name
        if config[name]["type"] == "param":

            indices = config[name]["indices"]
            logger.debug("Indices of %s are %s", name, indices)

            foreign_keys = []
            for index in indices:
                key = {
                    "fields": index,
                    "reference": {
                        "resource": index,
                        "fields": "VALUE"
                    },
                }
                foreign_keys.append(key)

            descriptor["schema"]["foreignKeys"] = foreign_keys
            descriptor["schema"]["primaryKey"] = indices
            descriptor["schema"]["missingValues"] = [""]

        new_resources.append(descriptor)

    package.descriptor["resources"] = new_resources
    package.commit()

    filepath = os.path.join(path_to_package, "datapackage.json")
    package.save(filepath)
Ejemplo n.º 8
0
def read_validation_config():
    return read_packaged_file("validate.yaml", "otoole")