Esempio n. 1
0
    def to_exposures(self, dataset, dump_dir=SYSTEM_DIR):
        """Downloads hdf5 files belonging to the given datasets reads them into Exposures and
        concatenates them into a single climada.Exposures object.

        Parameters
        ----------
        dataset : DatasetInfo
            Dataset to download and read into climada.Exposures objects.
        dump_dir : str, optional
            Directory where the files should be downoladed. Default: SYSTEM_DIR (as configured in
            climada.conf, i.g. ~/climada/data).
            If the directory is the SYSTEM_DIR, the eventual target directory is organized into
            dump_dir > exposures_type > dataset name > version

        Returns
        -------
        climada.entity.exposures.Exposures
            The combined exposures object
        """
        target_dir = self._organize_path(dataset, dump_dir) \
                     if dump_dir == SYSTEM_DIR else dump_dir
        exposures_list = [
            Exposures.from_hdf5(self._download_file(target_dir, dsf))
            for dsf in dataset.files
            if dsf.file_format == 'hdf5'
        ]
        if not exposures_list:
            raise ValueError("no hdf5 files found in dataset")
        if len(exposures_list) == 1:
            return exposures_list[0]
        exposures_concat = Exposures()
        exposures_concat = exposures_concat.concat(exposures_list)
        exposures_concat.check()
        return exposures_concat
Esempio n. 2
0
def exp_dem(x_exp=1, exp=None):
    while not exp:
        try:
            exp = Exposures.from_hdf5(EXP_DEMO_H5)
        except HDF5ExtError:
            # possibly raised by pd.HDFStore when the file is locked by another process due to multiprocessing
            time.sleep(0.1)
    exp_tmp = exp.copy(deep=True)
    exp_tmp.gdf.value *= x_exp
    return exp_tmp