def _get_met_root_dir(fires_manager): # Note: ArlFinder will raise an exception if met_root_dir is undefined # or is not a valid directory # TODO: specify domain instead of met_root_dir, and somehow configure (not # in the code, since this is open source), per domain, the root dir, arl file # name pattern, etc. met_root_dir = Config().get('findmetdata', 'met_root_dir') if not met_root_dir: raise BlueSkyConfigurationError("Config setting 'met_root_dir' " "required by findmetdata module") logging.debug("Met root dir: %s", met_root_dir) return met_root_dir
def __init__(self, dest_dir, **kwargs): ptinv = (kwargs.get('ptinv_filename') or Config().get('extrafiles', 'smokeready', 'ptinv_filename')) self._ptinv_pathname = os.path.join(dest_dir, ptinv) ptday = (kwargs.get('ptday_filename') or Config().get('extrafiles', 'smokeready', 'ptday_filename')) self._ptday_pathname = os.path.join(dest_dir, ptday) pthour = (kwargs.get('pthour_filename') or Config().get( 'extrafiles', 'smokeready', 'pthour_filename')) self._pthour_pathname = os.path.join(dest_dir, pthour) separate_smolder = (kwargs.get('separate_smolder') or Config().get( 'extrafiles', 'smokeready', 'separate_smolder')) self._separate_smolder = separate_smolder write_ptinv_totals = (kwargs.get('write_ptinv_totals') or Config().get( 'extrafiles', 'smokeready', 'write_ptinv_totals')) self._write_ptinv_totals = write_ptinv_totals write_ptday_file = (kwargs.get('write_ptday_file') or Config().get( 'extrafiles', 'smokeready', 'write_ptday_file')) self._write_ptday_file = write_ptday_file # Pull the file year out of the dynamically set timestamp self.file_year = int(pthour.split('-')[1][:4])
def message_file_name(self): if self._message_file_name is None: # this code will run again if no message files are found working_dir = Config().get('dispersion', 'working_dir') mp1 = os.path.join(working_dir, 'MESSAGE') if os.path.exists(mp1): self._message_file_name = mp1 else: mpN = os.path.join(working_dir, 'MESSAGE.001') if os.path.exists(mpN): self._message_file_name = mpN return self._message_file_name
def __init__(self, fires_manager): model = Config().get('plumerise', 'model').lower() fires_manager.processed(__name__, __version__, plumerise_version=plumerise_version, model=model) logging.debug('Generating %s plumerise compution function', model) generator = getattr(self, '_{}'.format(model), None) if not generator: raise BlueSkyConfigurationError( INVALID_PLUMERISE_MODEL_MSG.format(model)) config = Config().get('plumerise', model) self._compute_func = generator(config) if config.get('working_dir'): fires_manager.plumerise = { 'output': { 'directory': config['working_dir'] } }
def test_invalid_config(self, reset_config): scenarios = ( # empty config ({}, fires.FireActivityFilter.MISSING_FILTER_CONFIG_MSG), # either min nor max is specified ({'foo': 'bar'}, fires.FireActivityFilter.SPECIFY_MIN_OR_MAX_MSG), ## Invalid min/max # both negative ({'min': -20, 'max': -2}, fires.FireActivityFilter.INVALID_MIN_MAX_MUST_BE_POS_MSG), # min is negative ({'min': -20, 'max': 2}, fires.FireActivityFilter.INVALID_MIN_MAX_MUST_BE_POS_MSG), ({'min': -20}, fires.FireActivityFilter.INVALID_MIN_MAX_MUST_BE_POS_MSG), # max is negative ({'min': 20, 'max': -2}, fires.FireActivityFilter.INVALID_MIN_MAX_MUST_BE_POS_MSG), ({'max': -2}, fires.FireActivityFilter.INVALID_MIN_MAX_MUST_BE_POS_MSG), # min > max ({'min': 20, 'max': 2}, fires.FireActivityFilter.INVALID_MIN_MUST_BE_LTE_MAX_MSG), ) for config, err_msg in scenarios: Config().set(config, 'filter', 'area') # don't skip failures Config().set(False, 'filter', 'skip_failures') with raises(fires.FireActivityFilter.FilterError) as e_info: self.fm.filter_fires() assert self.fm.num_fires == 11 assert self.init_fires == sorted(self.fm.fires, key=lambda e: int(e.id)) assert e_info.value.args[0] == err_msg # skip failures Config().set(True, 'filter', 'skip_failures') self.fm.filter_fires() assert self.fm.num_fires == 11 assert self.init_fires == sorted(self.fm.fires, key=lambda e: int(e.id))
def _set_output_info(self): if self._fires_manager.trajectories: o = self._fires_manager.trajectories.get('output') if o and o.get('geojson_file_name') and o.get('directory'): self._geojson_file_name = os.path.join(o['directory'], o['geojson_file_name']) self._kml_file_name = os.path.join( o['directory'], Config().get('visualization', 'trajectories', 'hysplit', 'kml_file_name')) return raise RuntimeError("No trajectories GeoJSON file to convert to KML")
def _validate_input(fires_manager): ecoregion_lookup = None # instantiate only if necessary for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): active_areas = fire.active_areas if not active_areas: raise ValueError(VALIDATION_ERROR_MSGS['NO_ACTIVITY']) for aa in active_areas: locations = aa.locations if not locations: raise ValueError(VALIDATION_ERROR_MSGS["NO_LOCATIONS"]) for loc in locations: if not loc.get('fuelbeds'): raise ValueError(VALIDATION_ERROR_MSGS["NO_FUELBEDS"]) # only 'area' is required from location if not loc.get('area'): raise ValueError( VALIDATION_ERROR_MSGS["AREA_UNDEFINED"]) if not loc.get('ecoregion'): # import EcoregionLookup here so that, if fires do have # ecoregion defined, consumption can be run without mapscript # and other dependencies installed try: latlng = LatLng(loc) if not ecoregion_lookup: from bluesky.ecoregion.lookup import EcoregionLookup implemenation = Config().get( 'consumption', 'ecoregion_lookup_implemenation') ecoregion_lookup = EcoregionLookup( implemenation) loc['ecoregion'] = ecoregion_lookup.lookup( latlng.latitude, latlng.longitude) if not loc['ecoregion']: logging.warning( "Failed to look up ecoregion for " "{}, {}".format(latlng.latitude, latlng.longitude)) _use_default_ecoregion(fires_manager, loc) except exceptions.MissingDependencyError as e: _use_default_ecoregion(fires_manager, loc, e) for fb in loc['fuelbeds']: if not fb.get('fccs_id') or not fb.get('pct'): raise ValueError( "Each fuelbed must define 'fccs_id' and 'pct'")
def run(args): pattern = '{}/data/{}/scen_{}.csv'.format( os.path.abspath(os.path.dirname(__file__)), args.data_dir, args.scenario_id or '[0-9]') input_filenames = glob.glob(pattern) if not input_filenames: logging.error("No matching scnarios") sys.exit(1) logging.info("Scanarios: {}".format(', '.join( [os.path.basename(n) for n in input_filenames]))) success = True for input_filename in input_filenames: fires_manager = load_scenario(input_filename) Config().set(args.emissions_model, 'emissions', 'model') Config().set(args.include_emissions_details, 'emissions', 'include_emissions_details') fires_manager.modules = ['consumption', 'emissions'] fires_manager.run() actual = fires_manager.dump() expected_partials, expected_totals = load_output(input_filename, args) success = success and check(actual, expected_partials, expected_totals) return success
def run_input(module, input_file): output_file = input_file.replace('input/', 'output/').replace( '.json', '-EXPECTED-OUTPUT.json') config_file = input_file.replace('input/', 'config/').replace( '.json', '-CONFIG.json') with open(config_file) as f: config = json.loads(f.read()).get('config') logging.debug('Running bsp on %s', input_file) try: if 'skip_failed_fires' not in config: # regression test output data was generated when # skip_failed_fires defaulted to false config['skip_failed_fires'] = False Config().set(config) fires_manager = models.fires.FiresManager() fires_manager.loads(input_file=input_file) fires_manager.modules = [module] fires_manager.run() except exceptions.BlueSkyModuleError as e: # The error was added to fires_manager's meta data, and will be # included in the output data pass except Exception as e: # if output file doesn't exist, it means this expection was expected # TODO: confirm that this is valid logic if os.path.isfile(output_file): logging.error('FAILED - %s - %s', input_file, str(e)) return False else: logging.debug('Caught expected exception') return True try: logging.debug('Loading expected output file %s', output_file) with open(output_file, 'r') as f: expected = json.loads(f.read()) except FileNotFoundError as e: logging.error('FAILED - %s - missing output file', input_file) return False # dumps and loads actual to convert datetimest, etc. actual = json.loads( json.dumps(fires_manager.dump(), cls=models.fires.FireEncoder)) success = check(expected, actual) logging.info('PASSED - %s', input_file) if success else logging.error( 'FAILED - %s', input_file) return success
def test_successful_filter_min_and_max(self, reset_config): # both min and max Config().set({'min': 52, 'max': 77.0}, 'filter', 'area') expected = [ fires.Fire({'id': '3', 'activity': [{'active_areas':[{'specified_points': [{'area': 55}]}]} ]}), fires.Fire({'id': '4', 'activity': [{'active_areas':[{'perimeter': {'area': 65}}]} ]}), fires.Fire({'id': '6', 'activity': [{'active_areas':[{'specified_points': [{'area': 75}], 'perimeter': {'area': 90}}]}]}), fires.Fire({'id': '10', 'activity': [ {'active_areas':[{'perimeter': {'area': 65}}]} ]}) ] self.fm.filter_fires() assert self.fm.num_fires == 4 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # both min and max Config().set({'min': 65, 'max': 65.0}, 'filter', 'area') expected = [ fires.Fire({'id': '4', 'activity': [{'active_areas':[{'perimeter': {'area': 65}}]} ]}), fires.Fire({'id': '10', 'activity': [ {'active_areas':[{'perimeter': {'area': 65}}]} ]}) ] self.fm.filter_fires() assert self.fm.num_fires == 2 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # filter out the rest Config().set({'min': 76, 'max': 77.0}, 'filter', 'area') self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == sorted(self.fm.fires, key=lambda e: int(e.id)) # call again with no fires self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == sorted(self.fm.fires, key=lambda e: int(e.id))
def run(fires_manager): """runs the export module Args: - fires_manager -- bluesky.models.fires.FiresManager object """ modes = [m.lower() for m in Config.get('export', 'modes')] fires_manager.processed(__name__, __version__, modes=modes) extra_exports = Config.get('export', 'extra_exports') exporters = [] for mode in modes: exporter_klass = EXPORTERS.get(mode) if not exporter_klass: raise BlueSkyConfigurationError( "Invalid exporter - {}".format(exporter_klass)) exporters.append(exporter_klass(extra_exports)) # Note: export modules update fires_manager with export info, since that # info needs to be in the fires_manager before it's dumped to json for exporter in exporters: exporter.export(fires_manager)
def test_with_dispersion_and_configured_time_window_no_fires( self, reset_config): fm = FiresManager() Config.set({ "dispersion": { "start": "2014-05-29T19:00:00Z", "num_hours": 12 }, "findmetdata": { "time_window": { "first_hour": "2016-01-04T04:00:00Z", "last_hour": "2016-01-05T13:00:00Z" } } }) expected = [{ 'start': datetime.datetime(2014, 5, 29, 19, 0, 0), 'end': datetime.datetime(2014, 5, 30, 7, 0, 0), }, { 'start': datetime.datetime(2016, 1, 4, 4, 0, 0), 'end': datetime.datetime(2016, 1, 5, 13, 0, 0), }] assert expected == findmetdata._get_time_windows(fm)
def test_with_configured_time_window_no_fires(self, reset_config): fm = FiresManager() Config().set({ "findmetdata": { "time_window": { "first_hour": "2016-01-04T04:00:00Z", "last_hour": "2016-01-05T13:00:00Z" } } }) expected = [{ 'start': datetime.datetime(2016, 1, 4, 4, 0, 0), 'end': datetime.datetime(2016, 1, 5, 13, 0, 0), }] assert expected == findmetdata._get_time_windows(fm)
def __init__(self, fires_manager): """Constructor args: - fires_manager -- FiresManager object whose fires are to be merged """ super(FireActivityFilter, self).__init__(fires_manager) self._filter_config = Config.get('filter') self._filter_fields = set(self._filter_config.keys()) - set( ['skip_failures']) if not self._filter_fields: if not self._skip_failures: raise self.FilterError(self.NO_FILTERS_MSG) # else, just log and return logging.warning(self.NO_FILTERS_MSG)
def test_remove_all_but_first_aa_by_end(self, reset_config): Config().set({"end": "2019-01-02T07:00:00"},'filter', 'time') expected = [ fires.Fire({'id': '1', 'activity': [ {'active_areas': [ {'start': '2019-01-01T17:00:00','end': "2019-01-02T17:00:00","utc_offset": "-07:00", 'specified_points':[{'lat': 40.0, 'lng': -80.0, "area": 90.0}]}, ]} ]}) ] self.fm.filter_fires() assert self.fm.num_fires == 1 assert self.fm.num_locations == 1 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id))
def test_grid(self, reset_config): Config.set( { "spacing": 6.0, "boundary": { "ne": { "lat": 45.25, "lng": -106.5 }, "sw": { "lat": 27.75, "lng": -131.5 } } }, "dispersion", "hysplit", "grid") expected = { 'center_latitude': 36.5, 'center_longitude': -119.0, 'height_latitude': 17.5, 'spacing_latitude': 0.06705008458605, 'spacing_longitude': 0.06705008458605, 'width_longitude': 25.0 } assert expected == hysplit_utils.get_grid_params()
def run(fires_manager): """Split each of the fire's activity windows to be able to process points separately. Args: - fires_manager -- bluesky.models.fires.FiresManager object """ logging.info("Running merge module") fires_manager.processed(__name__, __version__) record_original_activity = Config.get( 'splitactivity', 'record_original_activity') for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): _split(fire, record_original_activity)
def estimate(self, loc): """Estimates fuelbed composition based on lat/lng or polygon """ if not loc: raise ValueError( "Insufficient data for looking up fuelbed information") elif loc.get('polygon'): geo_data = {"type": "Polygon", "coordinates": [loc['polygon']]} logging.debug("Converted polygon to geojson: %s", geo_data) fuelbed_info = self.lookup.look_up(geo_data) # If loc['area'] is defined, then we want to keep it. We're dealing # with a perimeter which may not be all burning. If it isn't # defined, then set loc['area'] to fuelbed_info['area'] if not loc.get('area') and fuelbed_info and fuelbed_info.get( 'area'): # fuelbed_info['area'] is in m^2 loc['area'] = fuelbed_info['area'] * ACRES_PER_SQUARE_METER elif loc.get('lat') and loc.get('lng'): geo_data = { "type": "Point", "coordinates": [loc['lng'], loc['lat']] } logging.debug("Converted lat,lng to geojson: %s", geo_data) fuelbed_info = self.lookup.look_up(geo_data) else: raise ValueError( "Insufficient data for looking up fuelbed information") if not fuelbed_info or not fuelbed_info.get('fuelbeds'): # TODO: option to ignore failures ? raise RuntimeError("Failed to lookup fuelbed information") elif Config().get('fuelbeds', 'total_pct_threshold') < abs( 100.0 - sum([d['percent'] for d in fuelbed_info['fuelbeds'].values()])): raise RuntimeError( "Fuelbed percentages don't add up to 100% - {fuelbeds}".format( fuelbeds=fuelbed_info['fuelbeds'])) fuelbeds = [{ 'fccs_id': f, 'pct': d['percent'] } for f, d in fuelbed_info['fuelbeds'].items()] loc.update(**self._truncate(fuelbeds))
def _ingest_activity_emissions(self, activity, src): if Config.get('ingestion', 'keep_emissions'): logging.debug("Ingesting emissions") emissions = {"summary": {}} for e in self.EMISSIONS_SPECIES: keys = [e] if hasattr(e, "lower") else e keys = [i for j in [(e, e.lower()) for e in keys] for i in j] # TODO: look for value in src['emissions']['summary'][k], # for any k in keys, first v = self._get_numeric_val(src, *keys) if v is not None: emissions["summary"][keys[0]] = v if emissions["summary"]: logging.debug("Recording emissions in activity object") activity["emissions"] = emissions
def test_successful_filter_min(self): Config().set({'min': 47}, 'filter', 'area') expected = [ fires.Fire({'id': '2', 'activity': [{'active_areas':[{'specified_points': [{'area': 55}, {'area': 40}]}]} ]}), fires.Fire({'id': '3', 'activity': [{'active_areas':[{'specified_points': [{'area': 55}]}]} ]}), fires.Fire({'id': '4', 'activity': [{'active_areas':[{'perimeter': {'area': 65}}]} ]}), fires.Fire({'id': '5', 'activity': [{'active_areas':[{'specified_points': [{'area': 85}]}]} ]}), fires.Fire({'id': '6', 'activity': [{'active_areas':[{'specified_points': [{'area': 75}], 'perimeter': {'area': 90}}]}]}), fires.Fire({'id': '7', 'activity': [{'active_areas':[{'specified_points': [{'area': 50}]}]} ]}), fires.Fire({'id': '10', 'activity': [ {'active_areas':[{'perimeter': {'area': 65}}]} ]}) ] self.fm.filter_fires() assert self.fm.num_fires == 7 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id))
def _get_configured_time_windows(fires_manager): time_window = Config.get('findmetdata', 'time_window') if time_window: logging.debug("Met time window specified in the config") time_window = parse_datetimes(time_window, 'first_hour', 'last_hour') # met data finder doesn't require round hours, but .... if (not is_round_hour(time_window['first_hour']) or not is_round_hour(time_window['last_hour'])): raise BlueSkyConfigurationError( "Met first and last hours must be round hours") time_window = { 'start': time_window['first_hour'], 'end': time_window['last_hour'] # TODO: round this up to the next hour? } return time_window
def test_different_event_ids(self, reset_config): # test in both skip and no-skip modes for s in (True, False): fm = fires.FiresManager() Config().set(s, 'merge', 'skip_failures') f = fires.Fire({ 'id': '1', "event_of": { "id": "ABC" }, # activity just used for assertion, below "activity": [{ "active_areas": [{ 'specified_points': [{ 'area': 123 }] }] }] }) f2 = fires.Fire({ 'id': '1', "event_of": { "id": "SDF" }, # activity just used for assertion, below "activity": [{ "active_areas": [{ 'specified_points': [{ 'area': 456 }] }] }] }) fm.fires = [f, f2] if not s: with raises(ValueError) as e_info: fm.merge_fires() assert fm.num_fires == 2 assert e_info.value.args[0].index( fires.FiresMerger.EVENT_MISMATCH_MSG) > 0 else: fm.merge_fires() assert fm.num_fires == 2 assert [f2, f] == fm.fires
def today(self, today): previous_today = self.today self._processed_today = False self._today = today # HACK (sort of): we need to call self.today to trigger replacement # of wildcards and then converstion to datetime object (that's a # hack), but we need to access it anyway to set in Config new_today = self.today # now that today is sure to be a datetime object, make sure that, # if previously manually set, the two values are the same if self._manually_set_today and previous_today != new_today: raise TypeError(self.TODAY_IS_IMMUTABLE_MSG) Config().set_today(new_today) self._manually_set_today = True
def _validate_input(fires_manager): ecoregion_lookup = None # instantiate only if necessary for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): if not fire.get('activity'): raise ValueError( "Missing activity data required for computing consumption") for a in fire.activity: for k in ('fuelbeds', 'location'): if not a.get(k): raise ValueError("Missing activity '{}' data required " "for computing consumption".format(k)) # only 'area' is required from location if not a['location'].get('area'): raise ValueError("Fire activity location data must " "define area for computing consumption") if not a['location'].get('ecoregion'): # import EcoregionLookup here so that, if fires do have # ecoregion defined, consumption can be run without mapscript # and other dependencies installed try: latlng = LatLng(a['location']) if not ecoregion_lookup: from bluesky.ecoregion.lookup import EcoregionLookup implemenation = Config.get( 'consumption', 'ecoregion_lookup_implemenation') ecoregion_lookup = EcoregionLookup(implemenation) a['location']['ecoregion'] = ecoregion_lookup.lookup( latlng.latitude, latlng.longitude) if not a['location']['ecoregion']: logging.warning("Failed to look up ecoregion for " "{}, {}".format( latlng.latitude, latlng.longitude)) _use_default_ecoregion(fires_manager, a) except exceptions.MissingDependencyError as e: _use_default_ecoregion(fires_manager, a, e) for fb in a['fuelbeds']: if not fb.get('fccs_id') or not fb.get('pct'): raise ValueError( "Each fuelbed must define 'fccs_id' and 'pct'")
def test_user_defined_grid(self, reset_config): Config().set(True, "dispersion", "hysplit" , "USER_DEFINED_GRID") Config().set(36.5, "dispersion", "hysplit" , "CENTER_LATITUDE") Config().set(-119.0, "dispersion", "hysplit", "CENTER_LONGITUDE") Config().set(25.0, "dispersion", "hysplit" , "WIDTH_LONGITUDE") Config().set(17.5, "dispersion", "hysplit" , "HEIGHT_LATITUDE") Config().set(0.05, "dispersion", "hysplit" , "SPACING_LONGITUDE") Config().set(0.05, "dispersion", "hysplit" , "SPACING_LATITUDE") expected = { 'center_latitude': 36.5, 'center_longitude': -119.0, 'height_latitude': 17.5, 'spacing_latitude': 0.05, 'spacing_longitude': 0.05, 'width_longitude': 25.0 } assert expected == hysplit_utils.get_grid_params()
def dump(self): # Don't include 'modules' in the output. The modules to be run may have # been specified on the command line or in the input json. Either way, # 'processing' contains a record of what modules were run (though it may # be fewer modules than what were requested, which would be the case # if there was a failure). We don't want to include 'modules' in the # output because that breaks the ability to pipe the results into # another run of bsp. # TODO: keep track of whether modules were specified in the input # json or on the command line, and add them to the output if they # were in the input return dict(self._meta, fires=self.fires, today=self.today, run_id=self.run_id, counts=self.counts, bluesky_version=__version__, run_config=Config.get())
def _fill_missing_fires(self, fires_manager): """Fill-in (persist) that do not extend to the end of the emissions period""" n_created = 0 last_hour = Config().get('growth', 'forecast_end') # This is to ensure we are not double counting any fires. # Future fires that already exist (have an event_id) will # not have past data persisted overtop of the existing data # fire_events = {} # for fire in fires_manager.fires: # event = fire["event_of"]["id"] # start = fire["activity"][0]["active_areas"][0]["start"] # if event not in fire_events: # fire_events[event] = [start] # else: # fire_events[event].append(start) for fire in fires_manager.fires: with fires_manager.fire_failure_handler(fire): aa = fire["activity"] # event = fire["event_of"]["id"] if len(aa) != 1: raise ValueError("Each fire must have only 1 activity object when running persistence") if len(aa[0]["active_areas"]) != 1: raise ValueError("Each fire must have only 1 active area when running persistence") start = aa[0]["active_areas"][0]["start"] + timedelta(days=1) end = aa[0]["active_areas"][0]["end"] + timedelta(days=1) start_utc = start - timedelta(hours=int(aa[0]["active_areas"][0]["utc_offset"])) while start_utc < last_hour: # if start in fire_events[event]: # break n_created += 1 new_aa = copy.deepcopy(aa[0]) new_aa["active_areas"][0]["start"] = start new_aa["active_areas"][0]["end"] = end fire["activity"].append(new_aa) start += timedelta(days=1) end += timedelta(days=1) start_utc += timedelta(days=1) # self._remove_unused_fires(fires_manager,first_hour,last_hour) return n_created
def test_invalid_keys(self, reset_config): # test in both skip and no-skip modes for s in (True, False): # i.e. top-level location is old structure fm = fires.FiresManager() Config().set(s, 'merge', 'skip_failures') f = fires.Fire({'id': '1', 'location': {'area': 132}}) f2 = fires.Fire({'id': '1', 'location': {'area': 132}}) fm.fires = [f, f2] if not s: with raises(ValueError) as e_info: fm.merge_fires() assert fm.num_fires == 2 assert e_info.value.args[0].index( fires.FiresMerger.INVALID_KEYS_MSG) > 0 else: fm.merge_fires() assert fm.num_fires == 2 assert [f, f2] == sorted(fm.fires, key=lambda e: int(e.id))
def test_with_details_PM_only(self, reset_config): Config.set("consume", 'emissions', "model") Config.set(True, 'emissions', "include_emissions_details") Config.set(['PM2.5', 'PM10'], 'emissions', "species") emissions.Consume(fire_failure_manager).run(self.fires) assert self.fires[0]['error'] == ( 'Missing fuelbed data required for computing emissions') assert 'emissions_details' in self.fires[1]['activity'][0]['fuelbeds'][ 0] self._check_emissions( self.EXPECTED_FIRE1_EMISSIONS_PM_ONLY, self.fires[1]['activity'][0]['fuelbeds'][0]['emissions'])
def test_with_details_PM_only(self, reset_config): Config.set("prichard-oneill", 'emissions', "model") Config.set(True, 'emissions', "include_emissions_details") Config.set(self.SPECIES, 'emissions', "species") emissions.PrichardOneill(fire_failure_manager).run(self.fires) assert self.fires[0]['error'] == ( 'Missing fuelbed data required for computing emissions') assert 'emissions_details' in self.fires[1]['activity'][0]['fuelbeds'][ 0] self._check_emissions( self.EXPECTED_FIRE1_EMISSIONS, self.fires[1]['activity'][0]['fuelbeds'][0]['emissions'])