def test_sorted_impacted_cities(self): """Test getting impacted cities sorted by mmi then population.""" shake_event = ShakeEvent(SHAKE_ID, data_is_local_flag=True) table = shake_event.sorted_impacted_cities() file_path = unique_filename( prefix='test_sorted_impacted_cities', suffix='.txt', dir=temp_dir('test')) cities_file = file(file_path, 'w') cities_file.writelines(str(table)) cities_file.close() table = str(table).replace(', \'', ',\n\'') table += '\n' fixture_path = os.path.join( data_dir(), 'tests', 'test_sorted_impacted_cities.txt') cities_file = file(fixture_path) expected_string = cities_file.read() cities_file.close() expected_string = expected_string.replace(', \'', ',\n\'') self.max_diff = None message = 'Expectation:\n%s, Got\n%s' % (expected_string, table) self.assertEqual(expected_string, table, message)
def test_sorted_impacted_cities(self): """Test getting impacted cities sorted by mmi then population.""" working_dir = shakemap_extract_dir() shake_event = ShakeEvent( working_dir=working_dir, event_id=SHAKE_ID, data_is_local_flag=True) table = shake_event.sorted_impacted_cities() file_path = unique_filename( prefix='test_sorted_impacted_cities', suffix='.txt', dir=temp_dir('test')) cities_file = file(file_path, 'w') cities_file.writelines(str(table)) cities_file.close() table = str(table).replace(', \'', ',\n\'') table += '\n' fixture_path = os.path.join( data_dir(), 'tests', 'test_sorted_impacted_cities.txt') cities_file = file(fixture_path) expected_string = cities_file.read() cities_file.close() expected_string = expected_string.replace(', \'', ',\n\'') self.max_diff = None message = 'Expectation:\n%s, Got\n%s' % (expected_string, table) self.assertEqual(expected_string, table, message)
def test_local_cities(self): """Test that we can retrieve the cities local to the event""" working_dir = shakemap_extract_dir() shake_event = ShakeEvent( working_dir=working_dir, event_id=SHAKE_ID, data_is_local_flag=True) # Get teh mem layer cities_layer = shake_event.local_cities_memory_layer() provider = cities_layer.dataProvider() expected_feature_count = 2 self.assertEquals(provider.featureCount(), expected_feature_count) strings = [] request = QgsFeatureRequest() for feature in cities_layer.getFeatures(request): # fetch map of attributes attributes = cities_layer.dataProvider().attributeIndexes() for attribute_key in attributes: strings.append("%d: %s\n" % ( attribute_key, feature[attribute_key])) strings.append('------------------\n') LOGGER.debug('Mem table:\n %s' % strings) file_path = unique_filename(prefix='test_local_cities', suffix='.txt', dir=temp_dir('test')) cities_file = file(file_path, 'w') cities_file.writelines(strings) cities_file.close() fixture_path = os.path.join(data_dir(), 'tests', 'test_local_cities.txt') cities_file = file(fixture_path) expected_string = cities_file.readlines() cities_file.close() diff = difflib.unified_diff(expected_string, strings) diff_list = list(diff) diff_string = '' for _, myLine in enumerate(diff_list): diff_string += myLine message = ('Diff is not zero length:\n' 'Control file: %s\n' 'Test file: %s\n' 'Diff:\n%s' % (fixture_path, file_path, diff_string)) self.assertEqual(diff_string, '', message)
def test_sorted_impacted_cities(self): """Test getting impacted cities sorted by mmi then population.""" working_dir = shakemap_extract_dir() shake_event = ShakeEvent(working_dir=working_dir, event_id=SHAKE_ID, data_is_local_flag=True) table = shake_event.sorted_impacted_cities() file_path = unique_filename(prefix="test_sorted_impacted_cities", suffix=".txt", dir=temp_dir("test")) cities_file = file(file_path, "w") cities_file.writelines(str(table)) cities_file.close() table = str(table).replace(", '", ",\n'") table += "\n" fixture_path = os.path.join(data_dir(), "tests", "test_sorted_impacted_cities.txt") cities_file = file(fixture_path) expected_string = cities_file.read() cities_file.close() expected_string = expected_string.replace(", '", ",\n'") self.max_diff = None message = "Expectation:\n%s, Got\n%s" % (expected_string, table) self.assertEqual(expected_string, table, message)
def process_event(event_id=None, locale='en'): """Launcher that actually runs the event processing. :param event_id: The event id to process. If None the latest event will be downloaded and processed. :type event_id: str :param locale: The locale that will be used. Default to en. :type locale: str """ population_path = os.path.join( data_dir(), 'exposure', 'IDN_mosaic', 'popmap10_all.tif') # Use cached data where available # Whether we should always regenerate the products force_flag = False if 'INASAFE_FORCE' in os.environ: force_string = os.environ['INASAFE_FORCE'] if str(force_string).capitalize() == 'Y': force_flag = True # We always want to generate en products too so we manipulate the locale # list and loop through them: locale_list = [locale] if 'en' not in locale_list: locale_list.append('en') # Now generate the products for locale in locale_list: # Extract the event # noinspection PyBroadException try: if os.path.exists(population_path): shake_event = ShakeEvent( event_id=event_id, locale=locale, force_flag=force_flag, population_raster_path=population_path) else: shake_event = ShakeEvent( event_id=event_id, locale=locale, force_flag=force_flag) except (BadZipfile, URLError): # retry with force flag true if os.path.exists(population_path): shake_event = ShakeEvent( event_id=event_id, locale=locale, force_flag=True, population_raster_path=population_path) else: shake_event = ShakeEvent( event_id=event_id, locale=locale, force_flag=True) except: LOGGER.exception('An error occurred setting up the shake event.') return LOGGER.info('Event Id: %s', shake_event) LOGGER.info('-------------------------------------------') shake_event.render_map(force_flag)
def process_event(working_dir=None, event_id=None, locale='en'): """Launcher that actually runs the event processing. :param event_id: The event id to process. If None the latest event will be downloaded and processed. :type event_id: str :param locale: The locale that will be used. Default to en. :type locale: str """ population_path = os.path.join(data_dir(), 'exposure', 'population.tif') # Use cached data where available # Whether we should always regenerate the products force_flag = False if 'INASAFE_FORCE' in os.environ: force_string = os.environ['INASAFE_FORCE'] if str(force_string).capitalize() == 'Y': force_flag = True # We always want to generate en products too so we manipulate the locale # list and loop through them: locale_list = [locale] if 'en' not in locale_list: locale_list.append('en') # Now generate the products for locale in locale_list: # Extract the event # noinspection PyBroadException try: if os.path.exists(population_path): shake_event = ShakeEvent( working_dir=working_dir, event_id=event_id, locale=locale, force_flag=force_flag, population_raster_path=population_path) else: shake_event = ShakeEvent(working_dir=working_dir, event_id=event_id, locale=locale, force_flag=force_flag) except (BadZipfile, URLError): # retry with force flag true if os.path.exists(population_path): shake_event = ShakeEvent( working_dir=working_dir, event_id=event_id, locale=locale, force_flag=True, population_raster_path=population_path) else: shake_event = ShakeEvent(working_dir=working_dir, event_id=event_id, locale=locale, force_flag=True) except EmptyShakeDirectoryError as ex: LOGGER.info(ex) return except Exception: # pylint: disable=broad-except LOGGER.exception('An error occurred setting up the shake event.') return LOGGER.info('Event Id: %s', shake_event) LOGGER.info('-------------------------------------------') shake_event.render_map(force_flag)
def process_event(working_directory, locale_option='en'): """Process floodmap event :param working_dir: :return: """ population_path = os.environ['INASAFE_FLOOD_POPULATION_PATH'] # check settings file settings_file = os.path.join( data_dir(), 'settings', 'flood-settings.json') if not os.path.exists(settings_file): default_settings = { 'duration': 6, 'level': 'rw' } with open(settings_file, 'w+') as f: f.write(json.dumps(default_settings)) with open(settings_file) as f: settings = json.loads(f.read()) duration = settings['duration'] level = settings['level'] allowed_duration = [1, 3, 6] try: duration = int(duration) if duration not in allowed_duration: raise Exception() except Exception as e: sys.exit( 'Valid duration are: %s' % ( ','.join([str(i) for i in allowed_duration]), ) ) allowed_level = ['subdistrict', 'village', 'rw'] if level not in allowed_level: sys.exit( 'Valid level are: %s' % ','.join(allowed_level) ) # We always want to generate en products too so we manipulate the locale # list and loop through them: locale_list = [locale_option] if 'en' not in locale_list: locale_list.append('en') for locale in locale_list: LOGGER.info('Creating Flood Event for locale %s.' % locale) now = datetime.utcnow() event = FloodEvent( working_dir=working_directory, locale=locale, population_raster_path=population_path, duration=duration, level=level, year=now.year, month=now.month, day=now.day, hour=now.hour) event.calculate_impact() event.generate_report() ret = push_flood_event_to_rest(flood_event=event) LOGGER.info('Is Push successful? %s.' % bool(ret))
def process_event(working_dir=None, event_id=None, locale='en'): """Launcher that actually runs the event processing. :param event_id: The event id to process. If None the latest event will be downloaded and processed. :type event_id: str :param locale: The locale that will be used. Default to en. :type locale: str """ population_path = os.path.join( data_dir(), 'exposure', 'population.tif') # Use cached data where available # Whether we should always regenerate the products force_flag = False if 'INASAFE_FORCE' in os.environ: force_string = os.environ['INASAFE_FORCE'] if str(force_string).capitalize() == 'Y': force_flag = True # We always want to generate en products too so we manipulate the locale # list and loop through them: locale_list = [locale] if 'en' not in locale_list: locale_list.append('en') # Now generate the products for locale in locale_list: # Extract the event # noinspection PyBroadException try: shake_events = create_shake_events( event_id=event_id, force_flag=force_flag, locale=locale, population_path=population_path, working_dir=working_dir) except (BadZipfile, URLError): # retry with force flag true shake_events = create_shake_events( event_id=event_id, force_flag=True, locale=locale, population_path=population_path, working_dir=working_dir) except EmptyShakeDirectoryError as ex: LOGGER.info(ex) return except Exception: # pylint: disable=broad-except LOGGER.exception('An error occurred setting up the shake event.') return LOGGER.info('Event Id: %s', [s.event_id for s in shake_events]) LOGGER.info('-------------------------------------------') for shake_event in shake_events: shake_event.render_map(force_flag) # push the shakemap to realtime server push_shake_event_to_rest(shake_event)