Beispiel #1
0
    def test_exposure_string_is_provided___file_content_is_loaded(self, data):
        columns=['first', 'second']

        exposure_str = _unicode(pd.DataFrame(columns=columns, data=data).to_csv(index=False))

        res_str = olf.get_exposure(source_exposure=exposure_str).to_csv(index=False)

        self.assertEqual(exposure_str, res_str)
Beispiel #2
0
def safe_decode(s, encoding=None):
    '''Similar to bytes `decode` method returning unicode.

    Decodes `s` using the given `encoding`, or determining one from the system.

    Returning type depend on python version; if 2.x is `unicode` if 3.x `str`.

    .. versionadded:: 1.1.3

    '''
    if isinstance(s, _unicode):
        return s
    else:
        encoding = force_encoding(encoding)
        try:
            # In Python 3 str(b'm') returns the string "b'm'" and not just "m",
            # this fixes this.
            return _unicode(s, encoding, 'replace')
        except:
            # For numbers and other stuff.
            return _unicode(s)
Beispiel #3
0
    def test_file_is_provided___file_content_is_loaded(self, data):
        columns = ['first', 'second']

        exposure_str = _unicode(pd.DataFrame(columns=columns, data=data).to_csv(index=False))

        with NamedTemporaryFile('w') as f:
            f.writelines(exposure_str)
            f.flush()

            res_str = olf.get_exposure(source_exposure_fp=f.name).to_csv(index=False)

            self.assertEqual(exposure_str, res_str)
Beispiel #4
0
def safe_encode(u, encoding=None):
    '''Similar to unicode `encode` method returning bytes.

    Encodes `u` using the given `encoding`, or determining one from the system.

    Returning type is always `bytes`; but in python 2.x is also `str`.

    .. versionadded:: 1.1.3

    '''
    if isinstance(u, bytes):
        return u
    else:
        encoding = force_encoding(encoding)
        try:
            if isinstance(u, _str_base):
                # In Python 2.x bytes does not allows an encoding argument.
                return bytes(u)
            else:
                return _unicode(u).encode(encoding, 'replace')
        except:
            return _unicode(u).encode(encoding, 'replace')
Beispiel #5
0
    def test_file_is_provided___file_content_is_loaded(self, data):
        columns = ['first', 'second']

        exposure_str = _unicode(
            pd.DataFrame(columns=columns, data=data).to_csv(index=False))

        f = NamedTemporaryFile('w', delete=False)

        try:
            f.writelines(exposure_str)
            f.close()

            res_str = olf.get_exposure(source_exposure_fp=f.name).to_csv(
                index=False)

            self.assertEqual(exposure_str, res_str)
        finally:
            os.remove(f.name)
Beispiel #6
0
    def generate_oasis_files(
        self,
        target_dir,
        exposure_fp,
        exposure_profile=None,
        exposure_profile_fp=None,
        keys_fp=None,
        lookup_config=None,
        lookup_config_fp=None,
        keys_data_fp=None,
        model_version_fp=None,
        lookup_package_fp=None,
        supported_oed_coverage_types=None,
        accounts_fp=None,
        accounts_profile=None,
        accounts_profile_fp=None,
        fm_aggregation_profile=None,
        fm_aggregation_profile_fp=None,
        ri_info_fp=None,
        ri_scope_fp=None,
        oasis_files_prefixes=None
    ):
        # Check whether the invocation indicates a deterministic or model
        # analysis/run - the CLI supports deterministic analyses via a command
        # `oasislmf exposure run` which requires a preexisting input files
        # directory, which is usually the same as the analysis/output directory
        deterministic = not(keys_fp or (lookup_config or lookup_config_fp) or (keys_data_fp and model_version_fp and lookup_package_fp))

        # Prepare the target directory and copy the source files, profiles and
        # model version file into it
        target_dir = prepare_input_files_directory(
            target_dir,
            exposure_fp,
            exposure_profile_fp=exposure_profile_fp,
            keys_fp=keys_fp,
            lookup_config_fp=lookup_config_fp,
            model_version_fp=model_version_fp,
            accounts_fp=accounts_fp,
            accounts_profile_fp=accounts_profile_fp,
            fm_aggregation_profile_fp=fm_aggregation_profile_fp,
            ri_info_fp=ri_info_fp,
            ri_scope_fp=ri_scope_fp
        )

        # Get the profiles defining the exposure and accounts files, ID related
        # terms in these files, and FM aggregation hierarchy
        exposure_profile = exposure_profile or (get_json(src_fp=exposure_profile_fp) if exposure_profile_fp else self.exposure_profile)
        accounts_profile = accounts_profile or (get_json(src_fp=accounts_profile_fp) if accounts_profile_fp else self.accounts_profile)
        id_terms = unified_id_terms(profiles=(exposure_profile, accounts_profile,))
        loc_id = id_terms['locid']
        acc_id = id_terms['accid']
        portfolio_num = id_terms['portid']
        fm_aggregation_profile = (
            fm_aggregation_profile or
            ({int(k): v for k, v in viewitems(get_json(src_fp=fm_aggregation_profile_fp))} if fm_aggregation_profile_fp else {}) or
            self.fm_aggregation_profile
        )

        # If a pre-generated keys file path has not been provided,
        # then it is asssumed some model lookup assets have been provided, so
        # as to allow the lookup to be instantiated and called to generated
        # the keys file. Otherwise if no model keys file path or lookup assets
        # were provided then a "deterministic" keys file is generated.
        _keys_fp = _keys_errors_fp = None
        if not keys_fp:
            _keys_fp = os.path.join(target_dir, 'keys.csv')
            _keys_errors_fp = os.path.join(target_dir, 'keys-errors.csv')

            cov_types = supported_oed_coverage_types or self.supported_oed_coverage_types

            if deterministic:
                loc_numbers = (loc_num[loc_id] for _, loc_num in get_dataframe(
                    src_fp=exposure_fp,
                    col_dtypes={loc_id: 'str', acc_id: 'str', portfolio_num: 'str'},
                    empty_data_error_msg='No exposure found in the source exposure (loc.) file'
                )[[loc_id]].iterrows())
                keys = [
                    {loc_id: loc_num, 'peril_id': 1, 'coverage_type': cov_type, 'area_peril_id': i + 1, 'vulnerability_id': i + 1}
                    for i, (loc_num, cov_type) in enumerate(product(loc_numbers, cov_types))
                ]
                _, _ = olf.write_oasis_keys_file(keys, _keys_fp)
            else:
                lookup_config = get_json(src_fp=lookup_config_fp) if lookup_config_fp else lookup_config
                if lookup_config:
                    lookup_config['keys_data_path'] = os.path.abspath(os.path.dirname(lookup_config_fp))

                _, lookup = olf.create(
                    lookup_config=lookup_config,
                    model_keys_data_path=keys_data_fp,
                    model_version_file_path=model_version_fp,
                    lookup_package_path=lookup_package_fp
                )
                f1, n1, f2, n2 = olf.save_results(
                    lookup,
                    loc_id_col=loc_id,
                    successes_fp=_keys_fp,
                    errors_fp=_keys_errors_fp,
                    source_exposure_fp=exposure_fp
                )
        else:
            _keys_fp = os.path.join(target_dir, os.path.basename(keys_fp))

        # Get the GUL input items and exposure dataframes
        gul_inputs_df, exposure_df = get_gul_input_items(
            exposure_fp, _keys_fp, exposure_profile=exposure_profile
        )

        # Write the GUL input files
        files_prefixes = oasis_files_prefixes or self.oasis_files_prefixes
        gul_input_files = write_gul_input_files(
            gul_inputs_df,
            target_dir,
            oasis_files_prefixes=files_prefixes['gul']
        )

        # If no source accounts file path has been provided assume that IL
        # input files, and therefore also RI input files, are not needed
        if not accounts_fp:
            return gul_input_files

        # Get the IL input items
        il_inputs_df, _ = get_il_input_items(
            exposure_df,
            gul_inputs_df,
            accounts_fp=accounts_fp,
            exposure_profile=exposure_profile,
            accounts_profile=accounts_profile,
            fm_aggregation_profile=fm_aggregation_profile
        )

        # Write the IL/FM input files
        il_input_files = write_il_input_files(
            il_inputs_df,
            target_dir,
            oasis_files_prefixes=files_prefixes['il']
        )

        # Combine the GUL and IL input file paths into a single dict (for convenience)
        oasis_files = {k: v for k, v in chain(gul_input_files.items(), il_input_files.items())}

        # If no RI input file paths (info. and scope) have been provided then
        # no RI input files are needed, just return the GUL and IL Oasis files
        if not (ri_info_fp or ri_scope_fp):
            return oasis_files

        # Write the RI input files, and write the returned RI layer info. as a
        # file, which can be reused by the model runner (in the model execution
        # stage) to set the number of RI iterations
        ri_layers = write_ri_input_files(
            exposure_fp,
            accounts_fp,
            oasis_files['items'],
            oasis_files['coverages'],
            oasis_files['gulsummaryxref'],
            oasis_files['fm_xref'],
            oasis_files['fmsummaryxref'],
            ri_info_fp,
            ri_scope_fp,
            target_dir
        )
        with io_open(os.path.join(target_dir, 'ri_layers.json'), 'w', encoding='utf-8') as f:
            f.write(_unicode(json.dumps(ri_layers, ensure_ascii=False, indent=4)))
            oasis_files['ri_layers'] = os.path.abspath(f.name)
            for layer, layer_info in viewitems(ri_layers):
                oasis_files['RI_{}'.format(layer)] = layer_info['directory']

        return oasis_files