def test_records_are_given___records_are_written_to_oasis_keys_files_correctly( self, successes, nonsuccesses): oasis_keys_file_to_record_metadict = { 'LocID': 'id', 'PerilID': 'peril_id', 'CoverageTypeID': 'coverage_type', 'AreaPerilID': 'area_peril_id', 'VulnerabilityID': 'vulnerability_id' } oasis_keys_errors_file_to_record_metadict = { 'LocID': 'id', 'PerilID': 'peril_id', 'CoverageTypeID': 'coverage_type', 'Message': 'message' } with TemporaryDirectory() as d: keys_file_path = os.path.join(d, 'keys.csv') keys_errors_file_path = os.path.join(d, 'keys-errors.csv') _, successes_count = OasisLookupFactory.write_oasis_keys_file( successes, keys_file_path) _, nonsuccesses_count = OasisLookupFactory.write_oasis_keys_errors_file( nonsuccesses, keys_errors_file_path) with io.open(keys_file_path, 'r', encoding='utf-8') as f1, io.open( keys_errors_file_path, 'r', encoding='utf-8') as f2: written_successes = [ dict((oasis_keys_file_to_record_metadict[k], r[k]) for k in r) for r in pd.read_csv(f1).T.to_dict().values() ] written_nonsuccesses = [ dict((oasis_keys_errors_file_to_record_metadict[k], r[k]) for k in r) for r in pd.read_csv(f2).T.to_dict().values() ] success_matches = list( filter( lambda r: (r['id'] == ws['id'] for ws in written_successes), successes)) nonsuccess_matches = list( filter( lambda r: (r['id'] == ws['id'] for ws in written_nonsuccesses), nonsuccesses)) self.assertEqual(successes_count, len(successes)) self.assertEqual(success_matches, successes) self.assertEqual(nonsuccesses_count, len(nonsuccesses)) self.assertEqual(nonsuccess_matches, nonsuccesses)
def action(self, args): """ Generates a new a model testing dockerfile from the supplied template :param args: The arguments from the command line :type args: Namespace """ dockerfile_src = os.path.join(os.path.dirname(__file__), os.path.pardir, '_data', 'Dockerfile.model_api_tester') version_file = args.model_version_file or os.path.join( args.model_data_directory, 'ModelVersion.csv') version_info = OasisLookupFactory.get_model_info(version_file) dockerfile_dst = os.path.join( args.model_data_directory, 'Dockerfile.{}_{}_model_api_tester'.format( version_info['supplier_id'].lower(), version_info['model_id'].lower()), ) replace_in_file(dockerfile_src, dockerfile_dst, ['%{}%'.format(s) for s in self.var_names], [ __version__, args.api_server_url, version_info['supplier_id'], version_info['model_id'], version_info['model_version'], args.model_data_directory, ]) self.logger.info('File created at {}.'.format(dockerfile_dst)) return 0
def test_produced_keys_are_passed_to_write_oasis_keys_file(self, data): with TemporaryDirectory() as d,\ patch('oasislmf.keys.lookup.OasisLookupFactory.get_keys', Mock(return_value=(r for r in data))) as get_keys_mock,\ patch('oasislmf.keys.lookup.OasisLookupFactory.write_oasis_keys_file') as write_oasis_keys_file_mock: keys_file_path = os.path.join(d, 'piwind-keys.csv') OasisLookupFactory.save_keys(lookup=self.create_fake_lookup(), keys_file_path=keys_file_path, model_exposures=json.dumps(data)) get_keys_mock.assert_called_once_with( lookup=self.lookup_instance, model_exposures=json.dumps(data), model_exposures_file_path=None, success_only=True) write_oasis_keys_file_mock.assert_called_once_with(data, keys_file_path, id_col='id')
def test_exposures_string_is_provided___file_content_is_loaded(self, data): stream = StringIO() data = [('first', 'second')] + data csv.writer(stream).writerows(data) res = OasisLookupFactory.get_model_exposures( model_exposures=stream.getvalue()) res = [tuple(res)] + [tuple(res.iloc[i]) for i in range(len(res))] self.assertEqual(res, data)
def test_entries_are_dictionaries_success_only_is_false___all_entries_are_included( self, data): with patch( 'oasislmf.keys.lookup.OasisLookupFactory.get_model_exposures'): self.create_fake_lookup(return_value=data) res = list( OasisLookupFactory.get_keys(lookup=self.lookup_instance, model_exposures_file_path='path', success_only=False)) self.assertEqual(res, data)
def test_entries_are_dictionaries_success_only_is_true___only_successes_are_included( self, data): with patch( 'oasislmf.keys.lookup.OasisLookupFactory.get_model_exposures'): self.create_fake_lookup(return_value=data) res = list( OasisLookupFactory.get_keys(lookup=self.lookup_instance, model_exposures_file_path='path')) self.assertEqual(res, [d for d in data if d['status'] == 'success'])
def test_file_is_provided___file_content_is_loaded(self, data): data = [('first', 'second')] + data with NamedTemporaryFile('w') as f: csv.writer(f).writerows(data) f.flush() res = OasisLookupFactory.get_model_exposures( model_exposures_file_path=f.name) res = [tuple(res)] + [tuple(res.iloc[i]) for i in range(len(res))] self.assertEqual(res, data)
def test_model_exposures_are_provided___exposures_are_passed_to_get_model_exposures_result_is_passed_to_lookup_process_locations( self, exposures, result): with patch( 'oasislmf.keys.lookup.OasisLookupFactory.get_model_exposures', Mock(return_value=result)): list( OasisLookupFactory.get_keys(self.create_fake_lookup(), model_exposures=exposures)) OasisLookupFactory.get_model_exposures.assert_called_once_with( model_exposures=exposures, model_exposures_file_path=None) self.lookup_instance.process_locations.assert_called_once_with( result)
def test_records_are_given___records_are_written_to_json_keys_files_correctly( self, successes, nonsuccesses): with TemporaryDirectory() as d: keys_file_path = os.path.join(d, 'keys.json') keys_errors_file_path = os.path.join(d, 'keys-errors.json') _, successes_count = OasisLookupFactory.write_json_keys_file( successes, keys_file_path) _, nonsuccesses_count = OasisLookupFactory.write_json_keys_file( nonsuccesses, keys_errors_file_path) with io.open(keys_file_path, 'r', encoding='utf-8') as f1, io.open( keys_errors_file_path, 'r', encoding='utf-8') as f2: written_successes = json.load(f1) written_nonsuccesses = json.load(f2) self.assertEqual(successes_count, len(successes)) self.assertEqual(written_successes, successes) self.assertEqual(nonsuccesses_count, len(nonsuccesses)) self.assertEqual(written_nonsuccesses, nonsuccesses)
def test_keys_path_is_supplied___correct_instance_is_created_with_correct_model_info_and_keys_path( self, supplier, model, version): with TemporaryDirectory() as d: keys_path = os.path.join(d, 'keys') os.mkdir(keys_path) version_path = os.path.join(d, 'version.csv') self.write_version_file(supplier, model, version, version_path) module_path = os.path.join(d, '{}_lookup.py'.format(model)) self.write_py_module(model, module_path) _, instance = OasisLookupFactory.create( model_keys_data_path=keys_path, model_version_file_path=version_path, lookup_package_path=module_path, ) self.assertEqual( type(instance).__name__, '{}KeysLookup'.format(model)) self.assertEqual(instance.supplier, supplier) self.assertEqual(instance.model_name, model) self.assertEqual(instance.model_version, version) self.assertEqual(instance.keys_data_directory, keys_path)
def test_no_model_exposures_are_provided___oasis_exception_is_raised(self): with self.assertRaises(OasisException): list(OasisLookupFactory.get_keys(self.create_fake_lookup()))
def test_no_file_or_exposures_are_provided___oasis_exception_is_raised( self): with self.assertRaises(OasisException): OasisLookupFactory.get_model_exposures()
from shapely.geometry import Point, Polygon, MultiPolygon, box from oasislmf.utils.data import get_dataframe from oasislmf.utils.peril import PerilAreasIndex, DEFAULT_RTREE_INDEX_PROPS from oasislmf.keys.lookup import OasisLookupFactory as olf # Create a copy of the default Rtree properties dict and set # leaf capacity and fill factor to create as small an index as possible props = copy.deepcopy(DEFAULT_RTREE_INDEX_PROPS) props['leaf_capacity'] = 1000 props['fill_factor'] = 0.9 # Create an EUWS lookup (combines peril and vulnerability lookup) using the # Oasis lookup factory info, meeq = olf.create(lookup_config_fp='lookup_meeq.json') print('\nmodel info: {}'.format(info)) print('\ncombined lookup (peril + vuln): {}'.format(meeq)) # Get the config dict from the lookup (loaded in from the file path you # provided to create the lookup) config = meeq.config #print('lookup config: {}'.format(config)) # Check that keys data path look correct keys_data_path = config.get('keys_data_path') print('\nkeys data path: {}'.format(keys_data_path), end='') print('; exists: {}'.format(os.path.exists(keys_data_path))) # Check that the peril and vulnerability configs look OK print('\nperil config: {}'.format(config.get('peril')))
from shapely.geometry import Point, Polygon, MultiPolygon, box from oasislmf.utils.data import get_dataframe from oasislmf.utils.peril import PerilAreasIndex, DEFAULT_RTREE_INDEX_PROPS from oasislmf.keys.lookup import OasisLookupFactory as olf # Create a copy of the default Rtree properties dict and set # leaf capacity and fill factor to create as small an index as possible props = copy.deepcopy(DEFAULT_RTREE_INDEX_PROPS) props['leaf_capacity'] = 1000 props['fill_factor'] = 0.9 # Create an EUWS lookup (combines peril and vulnerability lookup) using the # Oasis lookup factory info, euws = olf.create(lookup_config_fp='lookup.json') print('\nEUWS model info: {}'.format(info)) print('\nCombined EUWS lookup (peril + vuln): {}'.format(euws)) # Get the config dict from the lookup (loaded in from the file path you # provided to create the lookup) config = euws.config #print('lookup config: {}'.format(config)) # Check that keys data path look correct keys_data_path = config.get('keys_data_path') print('\nkeys data path: {}'.format(keys_data_path), end='') print('; exists: {}'.format(os.path.exists(keys_data_path))) # Check that the peril and vulnerability configs look OK print('\nperil config: {}'.format(config.get('peril')))