def read_yaml_config(fname): """Read YAML config file""" # Read config with open(fname, "r") as fid: config = ordered_load(fid) return config
def test_ordered_load(self): fid = StringIO(self.yaml_config) res = utils.ordered_load(fid) fid.close() self.assertTrue(list(res.keys())[0] == "config") keys = list(res["config"].keys()) self.assertTrue(keys[0] == "item_1") self.assertTrue(res["config"][keys[0]] == 1) self.assertTrue(keys[1] == "item_2") self.assertTrue(res["config"][keys[1]] == 2)
def invoke(self, context): """Invoke.""" # Set locking status, default to False self.use_lock = context.get("use_lock", False) self.logger.debug("Locking is used in compositor: %s", str(self.use_lock)) if self.use_lock: self.logger.debug( "Compositor acquires lock of previous " "worker: %s", str(context["prev_lock"])) utils.acquire_lock(context["prev_lock"]) instruments = context.get("instruments", None) if instruments is None: utils.release_locks([context["lock"], context["prev_lock"]], log=self.logger.error, log_msg="No instruments configured!") return readers = context.get("readers", None) with open(context["product_list"], "r") as fid: product_config = ordered_load(fid) msg = deepcopy(context['content']) for key, val in context.items(): if key.startswith('ignore_') and val is True: msg.data.pop(key[7:], None) # Rename the instrument in the message if an alias is given for it instrument_aliases = context.get("instrument_aliases", {}) if instrument_aliases: orig_sensor = msg.data['sensor'] if isinstance(orig_sensor, list): orig_sensor = orig_sensor[0] sensor = instrument_aliases.get(orig_sensor, orig_sensor) if sensor != orig_sensor: msg.data['sensor'] = sensor self.logger.info( "Adjusted message instrument name from %s to %s", orig_sensor, sensor) global_data = self.create_scene_from_message(msg, instruments, readers=readers) if global_data is None: utils.release_locks([context["lock"], context["prev_lock"]], log=self.logger.info, log_msg="Unable to create Scene, " + "skipping data") return monitor_topic = context.get("monitor_topic", None) if monitor_topic is not None: nameservers = context.get("nameservers", None) port = context.get("port", 0) service = context.get("service", None) monitor_metadata = utils.get_monitor_metadata(msg.data, status="start", service=service) utils.send_message(monitor_topic, "monitor", monitor_metadata, nameservers=nameservers, port=port) # TODO: add usage of external calibration coefficients # use_extern_calib = product_config["common"].get("use_extern_calib", # "False") process_by_area = product_config["common"].get("process_by_area", True) # Set lock if locking is used if self.use_lock: self.logger.debug("Compositor acquires own lock %s", str(context["lock"])) utils.acquire_lock(context["lock"]) for area_id in product_config["product_list"]: extra_metadata = {} # Check if the data was collected for specific area if "collection_area_id" in msg.data: if area_id != msg.data["collection_area_id"]: utils.release_locks([context["lock"]], log=self.logger.debug, log_msg="Collection not for this " + "area, skipping") continue # Load and unload composites for this area composites = self.load_composites(global_data, product_config, area_id) extra_metadata['products'] = composites extra_metadata['area_id'] = area_id context["output_queue"].put({ 'scene': global_data, 'extra_metadata': extra_metadata }) if process_by_area: context["output_queue"].put(None) # Add "terminator" to the queue to trigger computations for # this global scene, if not already done if not process_by_area: context["output_queue"].put(None) if utils.release_locks([context["lock"]]): self.logger.debug("Compositor releases own lock %s", str(context["lock"])) # Wait 1 second to ensure next worker has time to acquire the # lock time.sleep(1) del global_data global_data = None # Wait until the lock has been released downstream if self.use_lock: utils.acquire_lock(context["lock"]) utils.release_locks([context["lock"]]) if monitor_topic is not None: monitor_metadata = utils.get_monitor_metadata(msg.data, status="completed", service=service) utils.send_message(monitor_topic, "monitor", monitor_metadata, nameservers=nameservers, port=port) # After all the items have been processed, release the lock for # the previous step utils.release_locks([context["prev_lock"]], log=self.logger.debug, log_msg="Compositor releases lock of previous " "worker")
class TestUtils(unittest.TestCase): config = ordered_load(StringIO(CONFIG_MINIMAL)) info = { 'time': dt.datetime(2016, 11, 7, 12, 0), 'platform_name': 'Meteosat-10', 'area_id': 'EPSG4326', 'productname': 'dummy', 'sensor': ['seviri'] } def test_create_fnames(self): # Absolute minimum config fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "20161107_1200_Meteosat-10_EPSG4326_dummy.png") self.assertEqual(prod_name, 'dummy') # Add output_dir self.config["common"]["output_dir"] = "/tmp/{time:%Y/%m/%d}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual( fnames[0], "/tmp/2016/11/07/" + "20161107_1200_Meteosat-10_EPSG4326_dummy.png") # Add filename pattern self.config["common"]["fname_pattern"] = \ "{time:%Y_%m_%d_%H_%M}_asd.{format}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") # Add file formats self.config["common"]["formats"] = [ OrderedDict([ ('format', 'png'), ]), OrderedDict([ ('format', 'tif'), ]) ] fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") self.assertEqual(fnames[1], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.tif") # Change filename pattern to one where "time" is changed to # "satellite_time" self.config["common"][ "fname_pattern"] = "{satellite_time:%Y_%m_%d_%H_%M}_asd.{format}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") # Change metadata so that the time name doesn't match pattern self.info['start_time'] = self.info['time'] del self.info['time'] fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") def test_get_data_time_from_message_data(self): msg = {'time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {'nominal_time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {'start_time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {} res = utils._get_data_time_from_message_data(msg) self.assertIsNone(res) def test_get_orbit_number_from_message_data(self): msg = {"orbit_number": 42} res = utils._get_orbit_number_from_message_data(msg) self.assertEqual(res, 42) msg = {} res = utils._get_orbit_number_from_message_data(msg) self.assertIsNone(res) def test_get_monitor_metadata(self): msg = Message('/topic', 'monitor', self.info) res = utils.get_monitor_metadata(msg, status='foo') self.assertEqual(res['message_time'], msg.time) self.assertEqual(res['data_time'], self.info["start_time"]) self.assertEqual(res['platform_name'], self.info["platform_name"]) self.assertEqual(res['sensor'], self.info["sensor"]) self.assertIsNone(res['orbit_number']) self.assertEqual(res['status'], 'foo')
def _process(self, context): """Process a context.""" glbl = context["content"]["scene"] extra_metadata = context["content"]["extra_metadata"] with open(context["product_list"], "r") as fid: product_config = ordered_load(fid) # Handle config options kwargs = {} kwargs['mask_area'] = context.get('mask_area', True) self.logger.debug("Setting area masking to %s", str(kwargs['mask_area'])) kwargs['nprocs'] = context.get('nprocs', 1) self.logger.debug("Using %d CPUs for resampling.", kwargs['nprocs']) kwargs['resampler'] = context.get('resampler', "nearest") self.logger.debug("Using resampling method: '%s'.", kwargs['resampler']) try: kwargs['cache_dir'] = context['cache_dir'] self.logger.debug("Setting projection cache dir to %s", kwargs['cache_dir']) except (AttributeError, KeyError): pass prod_list = product_config["product_list"] # Overpass for coverage calculations scn_metadata = glbl.attrs if product_config['common'].get('coverage_check', True) and Pass: overpass = Pass(scn_metadata['platform_name'], scn_metadata['start_time'], scn_metadata['end_time'], instrument=scn_metadata['sensor'][0]) else: overpass = None # Get the area ID from metadata dict area_id = extra_metadata['area_id'] # Check for area coverage if overpass is not None: min_coverage = prod_list[area_id].get("min_coverage", 0.0) if not utils.covers(overpass, area_id, min_coverage, self.logger): return kwargs['radius_of_influence'] = None try: area_config = product_config["product_list"][area_id] kwargs['radius_of_influence'] = \ area_config.get("srch_radius", context["radius"]) except (AttributeError, KeyError): kwargs['radius_of_influence'] = 10000. if kwargs['radius_of_influence'] is None: self.logger.debug("Using default search radius.") else: self.logger.debug("Using search radius %d meters.", int(kwargs['radius_of_influence'])) # Set lock if locking is used if self.use_lock: self.logger.debug("Resampler acquires own lock %s", str(context["lock"])) utils.acquire_lock(context["lock"]) if area_id == "satproj": self.logger.info("Using satellite projection") lcl = glbl else: metadata = glbl.attrs self.logger.info("Resampling time slot %s to area %s", metadata["start_time"], area_id) lcl = glbl.resample(area_id, **kwargs) # Add area ID to the scene attributes so everything needed # in filename composing is in the same dictionary lcl.attrs["area_id"] = area_id metadata = extra_metadata.copy() metadata["product_config"] = product_config metadata["products"] = prod_list[area_id]['products'] self.logger.debug( "Inserting lcl (area: %s, start_time: %s) " "to writer's queue", area_id, str(scn_metadata["start_time"])) context["output_queue"].put({'scene': lcl, 'extra_metadata': metadata}) if utils.release_locks([context["lock"]]): self.logger.debug("Resampler releases own lock %s", str(context["lock"])) # Wait 1 second to ensure next worker has time to acquire the # lock time.sleep(1) # Wait until the lock has been released downstream if self.use_lock: utils.acquire_lock(context["lock"]) utils.release_locks([context["lock"]]) del lcl lcl = None
class TestUtils(unittest.TestCase): config = ordered_load(StringIO(CONFIG_MINIMAL)) info = { 'time': dt.datetime(2016, 11, 7, 12, 0), 'platform_name': 'Meteosat-10', 'area_id': 'EPSG4326', 'productname': 'dummy', 'sensor': ['seviri'] } def test_create_fnames(self): # Absolute minimum config fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "20161107_1200_Meteosat-10_EPSG4326_dummy.png") self.assertEqual(prod_name, 'dummy') # Add output_dir self.config["common"]["output_dir"] = "/tmp/{time:%Y/%m/%d}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual( fnames[0], "/tmp/2016/11/07/" + "20161107_1200_Meteosat-10_EPSG4326_dummy.png") # Add filename pattern self.config["common"]["fname_pattern"] = \ "{time:%Y_%m_%d_%H_%M}_asd.{format}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.tif") # Add file formats self.config["common"]["formats"] = [ OrderedDict([ ('format', 'png'), ]), OrderedDict([ ('format', 'tif'), ]) ] fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") self.assertEqual(fnames[1], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.tif") # Change filename pattern to one where "time" is changed to # "satellite_time" self.config["common"][ "fname_pattern"] = "{satellite_time:%Y_%m_%d_%H_%M}_asd.{format}" fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") # Change metadata so that the time name doesn't match pattern self.info['start_time'] = self.info['time'] del self.info['time'] fnames, prod_name = utils.create_fnames(self.info, self.config, "image_compositor_name") self.assertEqual(fnames[0], "/tmp/2016/11/07/" + "2016_11_07_12_00_asd.png") def test_get_data_time_from_message_data(self): msg = {'time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {'nominal_time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {'start_time': 'foo'} res = utils._get_data_time_from_message_data(msg) self.assertEqual(res, 'foo') msg = {} res = utils._get_data_time_from_message_data(msg) self.assertIsNone(res) def test_get_orbit_number_from_message_data(self): msg = {"orbit_number": 42} res = utils._get_orbit_number_from_message_data(msg) self.assertEqual(res, 42) msg = {} res = utils._get_orbit_number_from_message_data(msg) self.assertIsNone(res) def test_get_monitor_metadata(self): msg = Message('/topic', 'monitor', self.info) res = utils.get_monitor_metadata(msg, status='foo') self.assertEqual(res['message_time'], msg.time) self.assertEqual(res['data_time'], self.info["start_time"]) self.assertEqual(res['platform_name'], self.info["platform_name"]) self.assertEqual(res['sensor'], self.info["sensor"]) self.assertIsNone(res['orbit_number']) self.assertEqual(res['status'], 'foo') def test_select_dict_items(self): info = { "a": "a_value", "b": "b_value", "c": [{ "c_a": "c1_a_value", "c_b": "c1_b_value" }, { "c_a": "c2_a_value", "c_b": "c2_b_value" }] } selection = {'b_new': 'b'} res = utils.select_dict_items(info, selection) self.assertEqual(res, {'b_new': 'b_value'}) selection = {'b_new': 'b', 'b2': 'a'} res = utils.select_dict_items(info, selection) self.assertEqual(res, {'b_new': 'b_value', 'b2': 'a_value'}) selection = {'cXa': '/c/*/c_a', 'a': 'a'} res = utils.select_dict_items(info, selection) self.assertEqual(res, { 'cXa': ['c1_a_value', 'c2_a_value'], 'a': 'a_value' }) selection = [1, 2, 3] res = utils.select_dict_items(info, selection) self.assertEqual(res, {1: 1, 2: 2, 3: 3}) selection = {'a': '/a'} res = utils.select_dict_items(info, selection) self.assertEqual(res, {'a': 'a_value'}) def test_get_format_settings(self): from trollflow_sat.tests.utils import PRODUCT_LIST_TWO_AREAS res = utils.get_format_settings(PRODUCT_LIST_TWO_AREAS, 'overview', 'area2') self.assertTrue('writer' in res[0]) self.assertTrue('format' in res[0]) self.assertTrue('fill_value' in res[0]) @patch('trollflow_sat.utils.astronomy.sun_zenith_angle') def test_bad_sunzen_range(self, sun_zenith_angle): from trollflow_sat.tests.utils import PRODUCT_LIST_TWO_AREAS prod_conf = PRODUCT_LIST_TWO_AREAS.copy() # No sunzen limits configured res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertFalse(res) # Limit given for night product prod_conf = PRODUCT_LIST_TWO_AREAS.copy() prod = prod_conf['product_list']['area2']['products']['overview'] prod['sunzen_night_minimum'] = 90. prod['sunzen_lon'] = 0. prod['sunzen_lat'] = 0. sun_zenith_angle.return_value = 85. res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertTrue(sun_zenith_angle.called) self.assertTrue(res) sun_zenith_angle.return_value = 95. res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertFalse(res) # Limit given for day product del prod['sunzen_night_minimum'] prod['sunzen_day_maximum'] = 90. sun_zenith_angle.return_value = 95. res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertTrue(res) sun_zenith_angle.return_value = 85. res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertFalse(res) # No limits given del prod['sunzen_day_maximum'] res = utils.bad_sunzen_range(prod_conf, 'area2', 'overview', 'start_time') self.assertFalse(res) @patch('trollflow_sat.utils.Publish') def test_send_message(self, Publish): pub = Mock() Publish.return_value.__enter__.return_value = pub utils.send_message('topic', 'file', {}, nameservers=None) self.assertTrue(pub.send.called) pub = Mock() Publish.return_value.__enter__.return_value = pub utils.send_message('topic', 'file', {}, nameservers='foo') self.assertTrue(pub.send.called) @patch('trollflow_sat.utils.release_lock') def test_release_locks(self, release_lock): release_lock.return_value = 1 log = Mock() lock = None res = utils.release_locks(lock, log=None, log_msg=None) self.assertTrue(release_lock.called) res = utils.release_locks(lock, log=log, log_msg=None) self.assertFalse(log.called) res = utils.release_locks(lock, log=log, log_msg='message') self.assertTrue(log.called) @patch('trollflow_sat.utils.trollflow_acquire_lock') def test_acquire_lock(self, trollflow_acquire_lock): trollflow_acquire_lock.return_value = 'foo' res = utils.acquire_lock(None) self.assertEqual(res, 'foo') self.assertTrue(trollflow_acquire_lock.called) @patch('trollflow_sat.utils.get_area_def') def test_covers(self, get_area_def): overpass = Mock() logger = Mock() res = utils.covers(overpass, 'area1', 0, logger) self.assertTrue(res) self.assertFalse(logger.info.called) overpass.area_coverage.return_value = 0 res = utils.covers(overpass, 'area1', 10, logger) self.assertFalse(res) self.assertTrue(logger.info.called) overpass.area_coverage.return_value = 100 res = utils.covers(overpass, 'area1', 10, logger) self.assertTrue(res) self.assertTrue(logger.info.called) overpass.area_coverage.side_effect = AttributeError res = utils.covers(overpass, 'area1', 10, logger) self.assertTrue(logger.warning.called) def test_add_overviews(self): r_open = Mock() rasterio = Mock(RasterioIOError=BaseException, open=r_open) import sys sys.modules['rasterio'] = rasterio from trollflow_sat.utils import add_overviews logger = Mock() fnames = ['a', 'b', 'c'] overviews = None add_overviews(fnames, overviews, logger=logger) r_open.has_calls([call('a', 'r+'), call('b', 'r+'), call('c', 'r+')])