Beispiel #1
0
class OasisLookupFactoryWriteJsonFiles(TestCase):
    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(successes=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS),
                               size=5),
           nonsuccesses=keys_data(from_statuses=sampled_from(
               [KEYS_STATUS_FAIL, KEYS_STATUS_NOMATCH]),
                                  size=5))
    def test_records_are_given___records_are_written_to_json_keys_files_correctly(
            self, successes, nonsuccesses):

        with TemporaryDirectory() as d:
            keys_file_path = os.path.join(d, 'keys.json')
            keys_errors_file_path = os.path.join(d, 'keys-errors.json')

            _, successes_count = OasisLookupFactory.write_json_keys_file(
                successes, keys_file_path)
            _, nonsuccesses_count = OasisLookupFactory.write_json_keys_file(
                nonsuccesses, keys_errors_file_path)

            with io.open(keys_file_path, 'r', encoding='utf-8') as f1, io.open(
                    keys_errors_file_path, 'r', encoding='utf-8') as f2:
                written_successes = json.load(f1)
                written_nonsuccesses = json.load(f2)

            self.assertEqual(successes_count, len(successes))
            self.assertEqual(written_successes, successes)

            self.assertEqual(nonsuccesses_count, len(nonsuccesses))
            self.assertEqual(written_nonsuccesses, nonsuccesses)
Beispiel #2
0
class OasisLookupFactoryWriteOasisKeysFiles(TestCase):
    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(successes=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS),
                               size=5),
           nonsuccesses=keys_data(from_statuses=sampled_from(
               [KEYS_STATUS_FAIL, KEYS_STATUS_NOMATCH]),
                                  size=5))
    def test_records_are_given___records_are_written_to_oasis_keys_files_correctly(
            self, successes, nonsuccesses):

        oasis_keys_file_to_record_metadict = {
            'LocID': 'id',
            'PerilID': 'peril_id',
            'CoverageTypeID': 'coverage_type',
            'AreaPerilID': 'area_peril_id',
            'VulnerabilityID': 'vulnerability_id'
        }
        oasis_keys_errors_file_to_record_metadict = {
            'LocID': 'id',
            'PerilID': 'peril_id',
            'CoverageTypeID': 'coverage_type',
            'Message': 'message'
        }

        with TemporaryDirectory() as d:
            keys_file_path = os.path.join(d, 'keys.csv')
            keys_errors_file_path = os.path.join(d, 'keys-errors.csv')

            _, successes_count = OasisLookupFactory.write_oasis_keys_file(
                successes, keys_file_path)
            _, nonsuccesses_count = OasisLookupFactory.write_oasis_keys_errors_file(
                nonsuccesses, keys_errors_file_path)

            with io.open(keys_file_path, 'r', encoding='utf-8') as f1, io.open(
                    keys_errors_file_path, 'r', encoding='utf-8') as f2:
                written_successes = [
                    dict((oasis_keys_file_to_record_metadict[k], r[k])
                         for k in r)
                    for r in pd.read_csv(f1).T.to_dict().values()
                ]
                written_nonsuccesses = [
                    dict((oasis_keys_errors_file_to_record_metadict[k], r[k])
                         for k in r)
                    for r in pd.read_csv(f2).T.to_dict().values()
                ]

            success_matches = list(
                filter(
                    lambda r: (r['id'] == ws['id']
                               for ws in written_successes), successes))
            nonsuccess_matches = list(
                filter(
                    lambda r: (r['id'] == ws['id']
                               for ws in written_nonsuccesses), nonsuccesses))

            self.assertEqual(successes_count, len(successes))
            self.assertEqual(success_matches, successes)

            self.assertEqual(nonsuccesses_count, len(nonsuccesses))
            self.assertEqual(nonsuccess_matches, nonsuccesses)
Beispiel #3
0
class OasisExposuresManagerGenerateOasisFiles(FileGenerationTestCase):
    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(
        keys=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), min_size=10),
        exposures=canonical_exposure_data(10, min_value=1)
    )
    def test_paths_are_stored_in_the_model___model_paths_are_used(self, keys, exposures):
        profile = {
            'profile_element': {'ProfileElementName': 'profile_element', 'FieldName': 'TIV', 'CoverageTypeID': 1}
        }

        with NamedTemporaryFile('w') as keys_file, NamedTemporaryFile('w') as exposures_file, TemporaryDirectory() as out_dir:
            write_input_files(keys, keys_file.name, exposures, exposures_file.name)

            model = fake_model(resources={'canonical_exposures_profile': profile})
            model.resources['oasis_files_pipeline'].keys_file_path = keys_file.name
            model.resources['oasis_files_pipeline'].canonical_exposures_file_path = exposures_file.name
            model.resources['oasis_files_pipeline'].items_file_path = os.path.join(out_dir, self.items_filename)
            model.resources['oasis_files_pipeline'].coverages_file_path = os.path.join(out_dir, self.coverages_filename)
            model.resources['oasis_files_pipeline'].gulsummaryxref_file_path = os.path.join(out_dir, self.gulsummaryxref_filename)

            OasisExposuresManager().generate_oasis_files(oasis_model=model)

            self.check_items_file(keys, out_dir)
            self.check_coverages_file(exposures, out_dir)
            self.check_gul_file(exposures, out_dir)

    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(
        keys=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), min_size=10),
        exposures=canonical_exposure_data(10, min_value=1)
    )
    def test_paths_are_stored_in_the_kwargs___kwarg_paths_are_used(self, keys, exposures):
        profile = {
            'profile_element': {'ProfileElementName': 'profile_element', 'FieldName': 'TIV', 'CoverageTypeID': 1}
        }

        with NamedTemporaryFile('w') as keys_file, NamedTemporaryFile('w') as exposures_file, TemporaryDirectory() as out_dir:
            write_input_files(keys, keys_file.name, exposures, exposures_file.name)

            model = fake_model()

            OasisExposuresManager().generate_oasis_files(
                oasis_model=model,
                canonical_exposures_profile=profile,
                keys_file_path=keys_file.name,
                canonical_exposures_file_path=exposures_file.name,
                items_file_path=os.path.join(out_dir, self.items_filename),
                coverages_file_path=os.path.join(out_dir, self.coverages_filename),
                gulsummaryxref_file_path=os.path.join(out_dir, self.gulsummaryxref_filename)
            )

            self.check_items_file(keys, out_dir)
            self.check_coverages_file(exposures, out_dir)
            self.check_gul_file(exposures, out_dir)
Beispiel #4
0
class OasisLookupFactoryWriteKeys(TestCase):
    def create_fake_lookup(self):
        self.lookup_instance = Mock()
        return self.lookup_instance

    def test_no_model_exposures_are_provided___oasis_exception_is_raised(self):
        with self.assertRaises(OasisException):
            list(OasisLookupFactory.get_keys(self.create_fake_lookup()))

    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(data=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), size=10))
    def test_produced_keys_are_passed_to_write_oasis_keys_file(self, data):
        with TemporaryDirectory() as d,\
             patch('oasislmf.keys.lookup.OasisLookupFactory.get_keys', Mock(return_value=(r for r in data))) as get_keys_mock,\
             patch('oasislmf.keys.lookup.OasisLookupFactory.write_oasis_keys_file') as write_oasis_keys_file_mock:

            keys_file_path = os.path.join(d, 'piwind-keys.csv')
            OasisLookupFactory.save_keys(lookup=self.create_fake_lookup(),
                                         keys_file_path=keys_file_path,
                                         model_exposures=json.dumps(data))

            get_keys_mock.assert_called_once_with(
                lookup=self.lookup_instance,
                model_exposures=json.dumps(data),
                model_exposures_file_path=None,
                success_only=True)
            write_oasis_keys_file_mock.assert_called_once_with(data,
                                                               keys_file_path,
                                                               id_col='id')
Beispiel #5
0
class OasisExposureManagerLoadMasterDataframe(TestCase):
    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(profile_element_name=text(alphabet=string.ascii_letters,
                                     min_size=1),
           keys=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), size=10),
           exposures=canonical_exposure_data(10, min_value=1))
    def test_row_in_keys_data_is_missing_from_exposure_data___oasis_exception_is_raised(
            self, profile_element_name, keys, exposures):
        matching_exposures = [
            e for e in exposures if e[0] in map(lambda k: k['id'], keys)
        ]
        exposures.pop(exposures.index(matching_exposures[0]))
        profile = {
            profile_element_name: {
                'ProfileElementName': profile_element_name,
                'FieldName': 'TIV',
                'CoverageTypeID': 1
            }
        }

        with NamedTemporaryFile('w') as keys_file, NamedTemporaryFile(
                'w') as exposures_file:
            write_input_files(keys,
                              keys_file.name,
                              exposures,
                              exposures_file.name,
                              profile_element_name=profile_element_name)

            with self.assertRaises(OasisException):
                OasisExposuresManager().load_master_data_frame(
                    exposures_file.name, keys_file.name, profile)

    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(profile_element_name=text(alphabet=string.ascii_letters,
                                     min_size=1),
           keys=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), size=10),
           exposures=canonical_exposure_data(num_rows=10, min_value=1))
    def test_each_row_has_a_single_row_per_element_with_each_row_having_a_positive_value_for_the_profile_element___each_row_is_present(
            self, profile_element_name, keys, exposures):
        profile = {
            profile_element_name: {
                'ProfileElementName': profile_element_name,
                'FieldName': 'TIV',
                'CoverageTypeID': 1
            }
        }

        expected = []
        keys_values_tuples = map(
            lambda li: tuple(filter(lambda v: type(v) == int, li)),
            [k.values() for k in keys])
        for i, zipped_data in enumerate(zip(keys_values_tuples, exposures)):
            expected.append((
                i + 1,
                zipped_data[0],
                zipped_data[1][1],
            ))

        with NamedTemporaryFile('w') as keys_file, NamedTemporaryFile(
                'w') as exposures_file:
            write_input_files(keys,
                              keys_file.name,
                              exposures,
                              exposures_file.name,
                              profile_element_name=profile_element_name)

            result = OasisExposuresManager().load_master_data_frame(
                exposures_file.name,
                keys_file.name,
                profile,
            )
        self.assertEqual(len(expected), len(result))

        for i in range(len(result)):
            row = {
                k: (int(v) if k != 'tiv' else v)
                for k, v in result.iloc[i].to_dict().items()
            }
            self.assertEqual(i + 1, row['item_id'])
            self.assertEqual(i + 1, row['coverage_id'])
            self.assertEqual(exposures[i][1], row['tiv'])
            self.assertEqual(keys[i]['area_peril_id'], row['areaperil_id'])
            self.assertEqual(keys[i]['vulnerability_id'],
                             row['vulnerability_id'])
            self.assertEqual(i + 1, row['group_id'])
            self.assertEqual(1, row['summary_id'])
            self.assertEqual(1, row['summaryset_id'])

    @settings(suppress_health_check=[HealthCheck.too_slow])
    @given(profile_element_name=text(alphabet=string.ascii_letters,
                                     min_size=1),
           keys=keys_data(from_statuses=just(KEYS_STATUS_SUCCESS), size=10),
           exposures=canonical_exposure_data(num_rows=10, min_value=1))
    def test_each_row_has_a_single_row_per_element_with_each_row_having_any_value_for_the_profile_element___rows_with_profile_elements_gt_0_are_present(
            self, profile_element_name, keys, exposures):
        profile = {
            profile_element_name: {
                'ProfileElementName': profile_element_name,
                'FieldName': 'TIV',
                'CoverageTypeID': 1
            }
        }

        expected = []
        keys_values_tuples = map(
            lambda li: tuple(filter(lambda v: type(v) == int, li)),
            [k.values() for k in keys])
        row_id = 0
        for zipped_keys, zipped_exposure in zip(keys_values_tuples, exposures):
            if zipped_exposure[1] > 0:
                row_id += 1
                expected.append((
                    row_id,
                    zipped_keys,
                    zipped_exposure[1],
                ))

        with NamedTemporaryFile('w') as keys_file, NamedTemporaryFile(
                'w') as exposures_file:
            write_input_files(keys,
                              keys_file.name,
                              exposures,
                              exposures_file.name,
                              profile_element_name=profile_element_name)

            result = OasisExposuresManager().load_master_data_frame(
                exposures_file.name,
                keys_file.name,
                profile,
            )

        self.assertEqual(len(expected), len(result))
        for i in range(len(result)):
            row = {
                k: (int(v) if k != 'tiv' else v)
                for k, v in result.iloc[i].to_dict().items()
            }
            self.assertEqual(i + 1, row['item_id'])
            self.assertEqual(i + 1, row['coverage_id'])
            self.assertEqual(exposures[i][1], row['tiv'])
            self.assertEqual(keys[i]['area_peril_id'], row['areaperil_id'])
            self.assertEqual(keys[i]['vulnerability_id'],
                             row['vulnerability_id'])
            self.assertEqual(i + 1, row['group_id'])
            self.assertEqual(1, row['summary_id'])
            self.assertEqual(1, row['summaryset_id'])