def test_different_fire_and_fuel_type(self, reset_config): # test in both skip and no-skip modes for s in (True, False): fm = fires.FiresManager() Config().set(s, 'merge', 'skip_failures') f = fires.Fire({ 'id': '1', "type": "rx", "fuel_type": "natural", # activity just used for assertion, below "activity": [{ "active_areas": [{ 'specified_points': [{ 'area': 123 }] }] }] }) f2 = fires.Fire({ 'id': '1', "type": "wf", "fuel_type": "natural", # activity just used for assertion, below "activity": [{ "active_areas": [{ 'specified_points': [{ 'area': 456 }] }] }] }) fm.fires = [f, f2] assert fm.num_fires == 2 if not s: with raises(ValueError) as e_info: fm.merge_fires() assert fm.num_fires == 2 assert e_info.value.args[0].index( fires.FiresMerger.FIRE_TYPE_MISMATCH_MSG) > 0 else: fm.merge_fires() assert fm.num_fires == 2 assert [f2, f] == fm.fires f2.type = f.type f2.fuel_type = "activity" fm.fires = [f, f2] assert fm.num_fires == 2 if not s: with raises(ValueError) as e_info: fm.merge_fires() assert fm.num_fires == 2 assert e_info.value.args[0].index( fires.FiresMerger.FUEL_TYPE_MISMATCH_MSG) > 0 else: fm.merge_fires() assert fm.num_fires == 2 assert [f2, f] == fm.fires
def run(self, fires_manager, start, num_hours, output_dir, working_dir=None): """Runs hysplit args: - fires_manager - FiresManager object - start - model run start hour - num_hours - number of hours in model run - output_dir - directory to contain output kwargs: - working_dir -- working directory to write input files and output files (before they're copied over to final output directory); if not specified, a temp directory is created """ logging.info("Running %s", self.__class__.__name__) self._warnings = [] if start.minute or start.second or start.microsecond: raise ValueError("Dispersion start time must be on the hour.") if type(num_hours) != int: raise ValueError("Dispersion num_hours must be an integer.") self._model_start = start self._num_hours = num_hours self._run_output_dir = output_dir # already created self._working_dir = working_dir and os.path.abspath(working_dir) # osutils.create_working_dir will create working dir if necessary counts = {'fires': len(fires_manager.fires)} self._set_fire_data(fires_manager.fires) counts['locations'] = len(self._fires) # TODO: only merge fires if hysplit, or make it configurable ??? self._fires = firemerge.FireMerger().merge(self._fires) # TODO: should we pop 'end' from each fire object, since it's # only used in _merge_fires logic? counts['distinct_locations'] = len(self._fires) pm_config = Config().get('dispersion', 'plume_merge') if pm_config: # TODO: make sure pm_config boundary includes all of disperion # boundary, and raise BlueSkyConfigurationError if not? self._fires = firemerge.PlumeMerger(pm_config).merge(self._fires) counts['plumes'] = len(self._fires) notes = "Plumes to be modeled by dispersion" fires_manager.log_status('Good', 'dispersion', 'Continue', number_of_locations=counts['plumes'], notes=notes) with osutils.create_working_dir(working_dir=self._working_dir) as wdir: r = self._run(wdir) r["counts"] = counts r["output"].update({ "directory": self._run_output_dir, "start_time": self._model_start.isoformat(), "num_hours": self._num_hours }) if self._working_dir: r["output"]["working_dir"] = self._working_dir if self._warnings: r["warnings"] = self._warnings return r
def _is_hysplit(self): if self.m == 'dispersion': model = Config().get('dispersion', 'model') return model == 'hysplit' return False
def vis_hysplit_config(*keys): return Config().get('visualization', 'hysplit', *keys)
def _log_config(self): # TODO: bail if logging level is less than DEBUG (to avoid list and # set operations) _c = Config().get('dispersion', self._model) for c in sorted(_c.keys()): logging.debug('Dispersion config setting - %s = %s', c, _c[c])
def setup(self): Config().set('sev', 'plumerise', 'model') Config().set(False, 'skip_failed_fires') self.fm = FiresManager()
def setup(self): lookup = mock.Mock() Config().set(None, "fuelbeds", "truncation_percentage_threshold") Config().set(None, "fuelbeds", "truncation_count_threshold") self.estimator_no_truncation = fuelbeds.Estimator(lookup)
def test_compute_grid(self, reset_config): fires_one = [{'latitude': 40.0, 'longitude': -118.5}] fires_two = [ {'latitude': 40.0, 'longitude': -118.5}, {'latitude': 45.0, 'longitude': -117.5} ] ## Missing spacing Config().set(True, "dispersion", "hysplit" , "compute_grid") with raises(BlueSkyConfigurationError) as e_info: hysplit_utils.get_grid_params(fires=fires_one) assert e_info.value.args[0] == ("Config settings 'spacing_latitude' " "and 'spacing_longitude' required to compute hysplit grid") Config().reset() Config().set(True, "dispersion", "hysplit" , "compute_grid") Config().set(0.05, 'dispersion', 'hysplit', 'spacing_longitude') with raises(BlueSkyConfigurationError) as e_info: hysplit_utils.get_grid_params(fires=fires_one) assert e_info.value.args[0] == ("Config settings 'spacing_latitude' " "and 'spacing_longitude' required to compute hysplit grid") Config().reset() Config().set(True, "dispersion", "hysplit" , "compute_grid") Config().set(0.05, 'dispersion', 'hysplit', 'spacing_latitude') with raises(BlueSkyConfigurationError) as e_info: hysplit_utils.get_grid_params(fires=fires_one) assert e_info.value.args[0] == ("Config settings 'spacing_latitude' " "and 'spacing_longitude' required to compute hysplit grid") ## no fires or two many fires Config().reset() Config().set(True, "dispersion", "hysplit" , "compute_grid") Config().set(0.05, 'dispersion', 'hysplit', 'spacing_latitude') Config().set(0.05, 'dispersion', 'hysplit', 'spacing_longitude') with raises(ValueError) as e_info: hysplit_utils.get_grid_params() assert e_info.value.args[0] == 'Option to compute grid only supported for runs with one fire' with raises(ValueError) as e_info: hysplit_utils.get_grid_params(fires=fires_two) assert e_info.value.args[0] == 'Option to compute grid only supported for runs with one fire' expected = { 'center_latitude': 40.0, 'center_longitude': -118.5, 'height_latitude': 18.01801801801802, 'spacing_latitude': 0.05, 'spacing_longitude': 0.05, 'width_longitude': 23.453239118438354 } assert expected == hysplit_utils.get_grid_params(fires=fires_one) # custom grid length (default is 2000) Config().set(1000, 'dispersion', 'hysplit', 'grid_length') expected = { 'center_latitude': 40.0, 'center_longitude': -118.5, 'height_latitude': 9.00900900900901, 'spacing_latitude': 0.05, 'spacing_longitude': 0.05, 'width_longitude': 11.726619559219177 } assert expected == hysplit_utils.get_grid_params(fires=fires_one)
def _initialize_today(self): self._manually_set_today = False self._processed_today = True self._today = datetimeutils.today_utc() Config().set_today(self._today)
def __init__(self, fire_failure_handler): self.fire_failure_handler = fire_failure_handler self.include_emissions_details = Config().get( 'emissions', 'include_emissions_details') self.species = Config().get('emissions', 'species')
def __init__(self, fire_failure_handler): super(UbcBsfFeps, self).__init__(fire_failure_handler) model = Config().get('emissions', 'model').lower() config = Config().get('emissions', model) self.emitter = UbcBsfFEPSEmissions(**config)
def setup(self): Config().set('foo.csv', 'extrafiles', 'emissionscsv', 'filename') self.writer = emissionscsv.EmissionsCsvWriter('/tmp/') self.writer.emissions_writer = MockEmissionsWriter()
def run_id(self, run_id): logging.debug('filling in run_id wildcards') self._run_id = datetimeutils.fill_in_datetime_strings(run_id, today=self.today) Config().set_run_id(self._run_id)
def today(self, today): today = datetimeutils.fill_in_datetime_strings(today) today = datetimeutils.to_datetime(today) self._today = today Config().set_today(self._today)
def test_merge_configs(self, reset_config): Config().merge( {"foo": { "A": "{run_id}-{today}", "b": 222, "c": 333, "d": 444 }}) EXPECTED_RAW = dict( DEFAULTS, **{"foo": { "a": "{run_id}-{today}", "b": 222, "c": 333, "d": 444 }}) EXPECTED = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-20160420", "b": 222, "c": 333, "d": 444 } }) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == None assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED Config().set_today(datetime.datetime(2019, 2, 4)) EXPECTED_RAW = dict( DEFAULTS, **{"foo": { "a": "{run_id}-{today}", "b": 222, "c": 333, "d": 444 }}) EXPECTED = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-20190204", "b": 222, "c": 333, "d": 444 } }) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime(2019, 2, 4) assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED Config().merge({ "foo": { "B": "{today-1}-{run_id}", "c": 3333, "d": 4444, "bb": "bb" }, "BAR": { "b": "b" }, "b": "b" }) EXPECTED_RAW = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-{today}", "b": "{today-1}-{run_id}", "c": 3333, "d": 4444, "bb": "bb" }, "bar": { "b": "b" }, "b": "b" }) EXPECTED = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-20190204", "b": "20190203-{run_id}", "c": 3333, "d": 4444, "bb": "bb" }, "bar": { "b": "b" }, "b": "b" }) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime(2019, 2, 4) assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED Config().merge({ "foo": { "c": 33333, "d": 44444, "cc": "cc" }, "baz": { "c": "c" }, "c": "c" }) EXPECTED_RAW = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-{today}", "b": "{today-1}-{run_id}", "c": 33333, "d": 44444, "bb": "bb", "cc": "cc" }, "bar": { "b": "b" }, "baz": { "c": "c" }, "b": "b", "c": "c" }) EXPECTED = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-20190204", "b": "20190203-{run_id}", "c": 33333, "d": 44444, "bb": "bb", "cc": "cc" }, "bar": { "b": "b" }, "baz": { "c": "c" }, "b": "b", "c": "c" }) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime(2019, 2, 4) assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED Config().set("444444", 'foo', 'd') Config().set("dd", 'foo', 'dd') Config().set("d", 'boo', 'd') Config().set("d", 'd') Config().set(True, 'dbt') Config().set(False, 'dbf') Config().set(23, 'di') Config().set(123.23, 'df') Config().set('23', 'dci') Config().set('123.23', 'dcf') EXPECTED_RAW = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-{today}", "b": "{today-1}-{run_id}", "c": 33333, "bb": "bb", "cc": "cc", "dd": "dd", "d": "444444" # because it was set on command line }, "bar": { "b": "b" }, "baz": { "c": "c" }, "boo": { "d": "d" }, "b": "b", "c": "c", "d": "d", "dbt": True, "dbf": False, "di": 23, "df": 123.23, "dci": "23", "dcf": "123.23" }) EXPECTED = dict( DEFAULTS, **{ "foo": { "a": "{run_id}-20190204", "b": "20190203-{run_id}", "c": 33333, "bb": "bb", "cc": "cc", "dd": "dd", "d": "444444" # because it was set on command line }, "bar": { "b": "b" }, "baz": { "c": "c" }, "boo": { "d": "d" }, "b": "b", "c": "c", "d": "d", "dbt": True, "dbf": False, "di": 23, "df": 123.23, "dci": "23", "dcf": "123.23" }) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime(2019, 2, 4) assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED assert self._ORIGINAL_DEFAULTS == DEFAULTS
def _initialize_run_id(self): self._maually_set_run_id = False # default to guid, but manual set will still be allowed self._run_id = str(uuid.uuid4()) Config().set_run_id(self._run_id)
def test_setting_config_run_id_today(self, reset_config): # setting Config().set({"FOO": "{run_id}_{today-2:%Y%m%d}_bar", "bar": "baz"}) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == None EXPECTED_RAW = dict(DEFAULTS, foo="{run_id}_{today-2:%Y%m%d}_bar", bar="baz") EXPECTED = dict(DEFAULTS, foo="{run_id}_20160418_bar", bar="baz") assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED # set today Config().set_today(datetime.datetime(2019, 1, 5, 10, 12, 1)) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime( 2019, 1, 5, 10, 12, 1) EXPECTED_RAW = dict(DEFAULTS, foo="{run_id}_{today-2:%Y%m%d}_bar", bar="baz") EXPECTED = dict(DEFAULTS, foo="{run_id}_20190103_bar", bar="baz") assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED # set again; datetime wildcard should be filled in Config().set({"fOO": "{run_id}_{today:%Y%m%d%H}_bar", "bar": "sdfsdf"}) assert Config()._data._RUN_ID == None assert Config()._data._TODAY == datetime.datetime( 2019, 1, 5, 10, 12, 1) EXPECTED_RAW = dict(DEFAULTS, foo="{run_id}_{today:%Y%m%d%H}_bar", bar="sdfsdf") EXPECTED = dict(DEFAULTS, foo="{run_id}_2019010510_bar", bar="sdfsdf") assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED # set run_id Config().set_run_id("abc123") assert Config()._data._RUN_ID == "abc123" assert Config()._data._TODAY == datetime.datetime( 2019, 1, 5, 10, 12, 1) EXPECTED_RAW = dict(DEFAULTS, foo="{run_id}_{today:%Y%m%d%H}_bar", bar="sdfsdf") EXPECTED = dict(DEFAULTS, foo="abc123_2019010510_bar", bar="sdfsdf") assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED # set again; datetime and run_id wildcards should be filled in Config().set({"foo": "FOO_{run_id}_{today:%Y%m%d%H}_bar", "bar": "zz"}) assert Config()._data._RUN_ID == "abc123" assert Config()._data._TODAY == datetime.datetime( 2019, 1, 5, 10, 12, 1) EXPECTED_RAW = dict(DEFAULTS, foo="FOO_{run_id}_{today:%Y%m%d%H}_bar", bar="zz") EXPECTED = dict(DEFAULTS, foo="FOO_abc123_2019010510_bar", bar="zz") assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED # set in individual values Config().set(100, "bar") Config().set(200, "BAAAR") Config().set("sdfsdf{run_id}", "baz", "a") Config().set("{run_id}", "BAZ", "b") assert Config()._data._RUN_ID == "abc123" assert Config()._data._TODAY == datetime.datetime( 2019, 1, 5, 10, 12, 1) EXPECTED_RAW = dict(DEFAULTS, foo="FOO_{run_id}_{today:%Y%m%d%H}_bar", bar=100, baaar=200, baz={ "a": "sdfsdf{run_id}", "b": "{run_id}" }) EXPECTED = dict(DEFAULTS, foo="FOO_abc123_2019010510_bar", bar=100, baaar=200, baz={ "a": "sdfsdfabc123", "b": "abc123" }) assert Config()._data._RAW_CONFIG == EXPECTED_RAW assert Config()._data._CONFIG == EXPECTED assert Config().get() == EXPECTED assert self._ORIGINAL_DEFAULTS == DEFAULTS
def skip_failed_fires(self): return not not Config().get('skip_failed_fires')
def setup(self): lookup = mock.Mock() Config().set(75.0, "fuelbeds", "truncation_percentage_threshold") Config().set(2, "fuelbeds", "truncation_count_threshold") self.estimator_w_options = fuelbeds.Estimator(lookup)
def config(self, *keys): return Config().get('export', self._export_mode, *keys)
def disp_config(*keys): return Config().get('dispersion', *keys)
def _run_parallel(self, working_dir): runner = self class T(threading.Thread): def __init__(self, fires, config, working_dir, tranche_num): super(T, self).__init__() self.fires = fires self.config = config self.working_dir = working_dir if not os.path.exists(working_dir): os.makedirs(working_dir) self.tranche_num = tranche_num self.exc = None def run(self): # We need to set config to what was loaded in the main thread. # Otherwise, we'll just be using defaults Config().set(self.config) try: runner._run_process(self.fires, self.working_dir, self.tranche_num) except Exception as e: self.exc = e fire_tranches = hysplit_utils.create_fire_tranches(self._fire_sets, self._num_processes, self._model_start, self._num_hours, self._grid_params) threads = [] main_thread_config = Config().get() for nproc in range(len(fire_tranches)): fires = fire_tranches[nproc] # Note: no need to set _context.basedir; it will be set to workdir logging.info("Starting thread to run HYSPLIT on %d fires." % (len(fires))) t = T(fires, main_thread_config, os.path.join(working_dir, str(nproc)), nproc) t.start() threads.append(t) # If there were any exceptions, raise one of them after joining all threads exc = None for t in threads: t.join() if t.exc: exc = t.exc # TODO: just raise exception here, possibly before all threads have been joined? if exc: raise exc # 'ttl' is sum of values; see http://nco.sourceforge.net/nco.html#Operation-Types # sum together all the PM2.5 fields then append the TFLAG field from # one of the individual runs (they're all the same) # using run 0 as it should always be present regardless of how many # processes were used.... # prevents ncea from adding all the TFLAGs together and mucking up the # date output_file = os.path.join(working_dir, self._output_file_name) #ncea_args = ["-y", "ttl", "-O"] ncea_args = ["-O","-v","PM25","-y","ttl"] ncea_args.extend(["%d/%s" % (i, self._output_file_name) for i in range(self._num_processes)]) ncea_args.append(output_file) io.SubprocessExecutor().execute(self.BINARIES['NCEA'], *ncea_args, cwd=working_dir) ncks_args = ["-A","-v","TFLAG"] ncks_args.append("0/%s" % (self._output_file_name)) ncks_args.append(output_file) io.SubprocessExecutor().execute(self.BINARIES['NCKS'], *ncks_args, cwd=working_dir) self._archive_file(output_file)
def disp_hysplit_config(*keys): return Config().get('dispersion', 'hysplit', *keys)
"_apply_settings", "FuelLoadingsManager", "FuelConsumptionForEmissions", "CONSUME_FIELDS", "CONSUME_VERSION_STR" ] CONSUME_VERSION_STR = '.'.join([ str(v) for v in [ consume.version.MAJOR_VERSION, consume.version.MINOR_VERSION, consume.version.PYPI_BUILD_REVISION ] ]) SETTINGS = Config().get('consumption', 'consume_settings') # User can configure output_units SETTINGS['all']['output_units'] = { # The default in the consume package is 'tons_ac'. When we tried # setting it to 'tons' here, it still ended up being 'tons_ac' in # the consumption results. So, just set it to 'tons_ac' to avoid # confusion. # (We ultimately want tons, and so we end up multiplying by # acreage to get it. It would be nice if setting output_units to # tons worked.) # Note that setting output_units='tons' does behave as expected # when computing emissions. 'default': "tons_ac" } def _apply_settings(fc, location, burn_type):
def config(self, *keys, **kwargs): return Config().get('dispersion', self._model, *keys, **kwargs)
def test_both_whitelist_or_blacklist_are_specified(self, reset_config): Config().set(False, 'filter', 'skip_failures') Config().set(["ZZ"], 'filter', 'country', 'blacklist') Config().set(["YY"], 'filter', 'country', 'whitelist') with raises(fires.FireActivityFilter.FilterError) as e_info: self.fm.filter_fires() assert self.fm.num_fires == 14 assert e_info.value.args[0] == fires.FireActivityFilter.SPECIFY_WHITELIST_OR_BLACKLIST_MSG Config().set(True, 'filter', 'skip_failures') self.fm.filter_fires() assert self.fm.num_fires == 14 assert self.init_fires == sorted(self.fm.fires, key=lambda e: int(e.id)) Config().set(False, 'filter', 'skip_failures') Config().set(["ZZ"], 'filter', 'country', 'blacklist') Config().set(None, 'filter', 'country', 'whitelist') self.fm.filter_fires() expected = [ fires.Fire({'id': '04', 'name': 'n4', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '05', 'name': 'n5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "USA"}]}]}), fires.Fire({'id': '06', 'name': 'n6', 'bar1': 1 , 'baz':'baz1', "activity": [{"active_areas": [{'country': ''}]}]}), fires.Fire({'id': '07', 'name': 'n7', 'bar2':'a2', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '08', 'name': 'n8', 'bar2':'adfsdf', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '09', 'name': 'n9', 'bar2': 2 , 'baz':'baz2', "activity": [{"active_areas": [{'country': 'Unknown'}]}]}), fires.Fire({'id': '10', 'name': 'n10', "barj": "jj", "baz": 99, "activity": [{"active_areas": [{"country": "USA"}]}]}), fires.Fire({'id': '11', 'name': 'n11', "barj": "jj", "baz": 99, "activity": [{"active_areas": [{"country": "BZ"}]}]}), fires.Fire({'id': '12', 'name': 'n3', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '14', 'name': 'n3.5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), ] assert self.fm.num_fires == 10 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) Config().set(["USA", "CA", "UK", "BZ"], 'filter', 'country', 'whitelist') Config().set(None, 'filter', 'country', 'blacklist') self.fm.filter_fires() expected = [ fires.Fire({'id': '04', 'name': 'n4', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '05', 'name': 'n5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "USA"}]}]}), fires.Fire({'id': '07', 'name': 'n7', 'bar2':'a2', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '08', 'name': 'n8', 'bar2':'adfsdf', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '10', 'name': 'n10', "barj": "jj", "baz": 99, "activity": [{"active_areas": [{"country": "USA"}]}]}), fires.Fire({'id': '11', 'name': 'n11', "barj": "jj", "baz": 99, "activity": [{"active_areas": [{"country": "BZ"}]}]}), fires.Fire({'id': '12', 'name': 'n3', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '14', 'name': 'n3.5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), ] assert self.fm.num_fires == 8 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) Config().set(["USA"], 'filter', 'country', 'blacklist') Config().set(None, 'filter', 'country', 'whitelist') self.fm.filter_fires() expected = [ fires.Fire({'id': '04', 'name': 'n4', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '07', 'name': 'n7', 'bar2':'a2', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '08', 'name': 'n8', 'bar2':'adfsdf', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '11', 'name': 'n11', "barj": "jj", "baz": 99, "activity": [{"active_areas": [{"country": "BZ"}]}]}), fires.Fire({'id': '12', 'name': 'n3', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '14', 'name': 'n3.5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), ] assert self.fm.num_fires == 6 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) Config().set(["USA", "CA", "UK"], 'filter', 'country', 'whitelist') Config().set(None, 'filter', 'country', 'blacklist') self.fm.filter_fires() expected = [ fires.Fire({'id': '04', 'name': 'n4', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '07', 'name': 'n7', 'bar2':'a2', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '08', 'name': 'n8', 'bar2':'adfsdf', 'baz':'baz2', "activity": [{"active_areas": [{'country': "CA"}]}]}), fires.Fire({'id': '12', 'name': 'n3', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '14', 'name': 'n3.5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), ] assert self.fm.num_fires == 5 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) Config().set(["USA", "CA"], 'filter', 'country', 'blacklist') Config().set(None, 'filter', 'country', 'whitelist') self.fm.filter_fires() expected = [ fires.Fire({'id': '04', 'name': 'n4', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '12', 'name': 'n3', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), fires.Fire({'id': '14', 'name': 'n3.5', 'bar1':'a1', 'baz':'baz1', "activity": [{"active_areas": [{'country': "UK"}]}]}), ] assert self.fm.num_fires == 3 assert expected == self.fm.fires Config().set(["UK", "CA"], 'filter', 'country', 'blacklist') Config().set(None, 'filter', 'country', 'whitelist') self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == self.fm.fires # call again with no fires self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == self.fm.fires
import random from collections import defaultdict import fccsmap from fccsmap.lookup import FccsLookUp from functools import reduce from bluesky.config import Config __all__ = ['run'] __version__ = "0.1.0" # TODO: set is_alaska based on lat & lng instead from 'state' FCCS_LOOKUPS = defaultdict( lambda: FccsLookUp(is_alaska=False, **Config().get('fuelbeds')), AK=FccsLookUp(is_alaska=True, **Config().get('fuelbeds'))) def run(fires_manager): """Runs emissions module Args: - fires_manager -- bluesky.models.fires.FiresManager object """ fires_manager.processed(__name__, __version__, fccsmap_version=fccsmap.__version__) logging.debug('Using FCCS version %s', Config().get('fuelbeds', 'fccs_version'))
def test_successful_filtering(self, reset_config): """ TODO: split this method into separate test cases """ # squeeze sw lat Config().set({"ne": {"lat": 88.12, "lng": 40}, "sw": {"lat": -5.75,"lng": -131.5}}, 'filter', 'location', 'boundary') expected = [ fires.Fire({'id': '1', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -80.0}]}]}]}), fires.Fire({'id': '2', 'activity': [{'active_areas': [{'specified_points':[{'lat': 45.0, 'lng': -81.0}, {'lat': 55.0, 'lng': -79.0}]}]}]}), fires.Fire({'id': '3', 'activity': [{'active_areas': [{'specified_points':[{'lat': 60.0, 'lng': -62.0}]}]}]}), fires.Fire({'id': '4', 'activity': [{'active_areas': [{'perimeter': {'polygon': [[-61, 71], [-61, 69], [-59, 69], [-59, 71], [-61, 71]]}}]}]}), fires.Fire({'id': '5', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '6', 'activity': [{'active_areas': [{'specified_points':[{'lat': 61.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '7', 'activity': [{'active_areas': [{'perimeter': {'polygon': [[-51,61], [-49, 61], [-49, 59], [-51, 59], [-51, 61]]}}]}]}), fires.Fire({'id': '8', 'activity': [{'active_areas': [{'specified_points':[{'lat': 70.0, 'lng': -120.0}]}]}]}), fires.Fire({'id': '10', 'activity': [{'active_areas': [{'specified_points': [{'lat': 40.0, 'lng': -80.0}]}]}]}) ] self.fm.filter_fires() assert self.fm.num_fires == 9 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # squeeze sw lng Config().set({"ne": {"lat": 88.12, "lng": 40}, "sw": {"lat": -5.75,"lng": -110.5}}, 'filter', 'location', 'boundary') expected = [ fires.Fire({'id': '1', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -80.0}]}]}]}), fires.Fire({'id': '2', 'activity': [{'active_areas': [{'specified_points':[{'lat': 45.0, 'lng': -81.0}, {'lat': 55.0, 'lng': -79.0}]}]}]}), fires.Fire({'id': '3', 'activity': [{'active_areas': [{'specified_points':[{'lat': 60.0, 'lng': -62.0}]}]}]}), fires.Fire({'id': '4', 'activity': [{'active_areas': [{'perimeter': {'polygon': [[-61, 71], [-61, 69], [-59, 69], [-59, 71], [-61, 71]]}}]}]}), fires.Fire({'id': '5', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '6', 'activity': [{'active_areas': [{'specified_points':[{'lat': 61.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '7', 'activity': [{'active_areas': [{'perimeter': {'polygon': [[-51,61], [-49, 61], [-49, 59], [-51, 59], [-51, 61]]}}]}]}), fires.Fire({'id': '10', 'activity': [{'active_areas': [{'specified_points': [{'lat': 40.0, 'lng': -80.0}]}]}]}) ] self.fm.filter_fires() assert self.fm.num_fires == 8 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # squeeze ne lat Config().set({"ne": {"lat": 66.12, "lng": 40}, "sw": {"lat": -5.75,"lng": -110.5}}, 'filter', 'location', 'boundary') expected = [ fires.Fire({'id': '1', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -80.0}]}]}]}), fires.Fire({'id': '2', 'activity': [{'active_areas': [{'specified_points':[{'lat': 45.0, 'lng': -81.0}, {'lat': 55.0, 'lng': -79.0}]}]}]}), fires.Fire({'id': '3', 'activity': [{'active_areas': [{'specified_points':[{'lat': 60.0, 'lng': -62.0}]}]}]}), fires.Fire({'id': '5', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '6', 'activity': [{'active_areas': [{'specified_points':[{'lat': 61.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '7', 'activity': [{'active_areas': [{'perimeter': {'polygon': [[-51,61], [-49, 61], [-49, 59], [-51, 59], [-51, 61]]}}]}]}), fires.Fire({'id': '10', 'activity': [{'active_areas': [{'specified_points': [{'lat': 40.0, 'lng': -80.0}]}]}]}) ] self.fm.filter_fires() assert self.fm.num_fires == 7 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # squeeze ne lng Config().set({"ne": {"lat": 66.12, "lng": -55}, "sw": {"lat": -5.75,"lng": -110.5}}, 'filter', 'location', 'boundary') expected = [ fires.Fire({'id': '1', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -80.0}]}]}]}), fires.Fire({'id': '2', 'activity': [{'active_areas': [{'specified_points':[{'lat': 45.0, 'lng': -81.0}, {'lat': 55.0, 'lng': -79.0}]}]}]}), fires.Fire({'id': '3', 'activity': [{'active_areas': [{'specified_points':[{'lat': 60.0, 'lng': -62.0}]}]}]}), fires.Fire({'id': '5', 'activity': [{'active_areas': [{'specified_points':[{'lat': 40.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '6', 'activity': [{'active_areas': [{'specified_points':[{'lat': 61.0, 'lng': -60.0}]}]}]}), fires.Fire({'id': '10', 'activity': [{'active_areas': [{'specified_points': [{'lat': 40.0, 'lng': -80.0}]}]}]}) ] self.fm.filter_fires() assert self.fm.num_fires == 6 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # squeeze ne lng Config().set({"ne": {"lat": 63.12, "lng": -61}, "sw": {"lat": 58.75,"lng": -62}}, 'filter', 'location', 'boundary') expected = [ fires.Fire({'id': '3', 'activity': [{'active_areas': [{'specified_points':[{'lat': 60.0, 'lng': -62.0}]}]}]}) ] self.fm.filter_fires() assert self.fm.num_fires == 1 assert expected == sorted(self.fm.fires, key=lambda e: int(e.id)) # squeeze out last fire Config().set({"ne": {"lat": 63.12, "lng": -61}, "sw": {"lat": 60.75,"lng": -62}}, 'filter', 'location', 'boundary') self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == sorted(self.fm.fires, key=lambda e: int(e.id)) # call again with no fires self.fm.filter_fires() assert self.fm.num_fires == 0 assert [] == sorted(self.fm.fires, key=lambda e: int(e.id))
def run(self): Config().set(self.config) while not self.terminate: self.check_progress() time.sleep(5)
def setup(self): Config().set('sev', 'plumerise', 'model') self.fm = FiresManager()