コード例 #1
0
    def test_pass_instrument_interface(self):

        tstart = datetime(2018, 10, 16, 2, 48, 29)
        tend = datetime(2018, 10, 16, 3, 2, 38)

        instruments = set(('viirs', 'avhrr', 'modis'))
        overp = Pass('NOAA-20',
                     tstart,
                     tend,
                     orb=self.n20orb,
                     instrument=instruments)
        self.assertEqual(overp.instrument, 'avhrr')

        instruments = set(('viirs', 'modis'))
        overp = Pass('NOAA-20',
                     tstart,
                     tend,
                     orb=self.n20orb,
                     instrument=instruments)
        self.assertEqual(overp.instrument, 'viirs')

        instruments = set(('amsu-a', 'mhs'))
        self.assertRaises(TypeError,
                          Pass,
                          self,
                          'NOAA-20',
                          tstart,
                          tend,
                          orb=self.n20orb,
                          instrument=instruments)
コード例 #2
0
    def test_generate_metno_xml(self):
        import xml.etree.ElementTree as ET
        root = ET.Element("acquisition-schedule")

        orig = (
            '<acquisition-schedule><pass satellite="FENGYUN 3D" aos="20190105010145" los="20190105011715" '
            'orbit="5907" max-elevation="52.943" asimuth-at-max-elevation="107.385" asimuth-at-aos="18.555" '
            'pass-direction="D" satellite-lon-at-aos="76.204" satellite-lat-at-aos="80.739" '
            'tle-epoch="20181229125844.110848" /></acquisition-schedule>')

        tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=60 * 15.5)

        tle1 = '1 43010U 17072A   18363.54078832 -.00000045  00000-0 -79715-6 0  9999'
        tle2 = '2 43010  98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158'

        mypass = Pass('FENGYUN 3D',
                      tstart,
                      tend,
                      instrument='mersi2',
                      tle1=tle1,
                      tle2=tle2)

        coords = (10.72, 59.942, 0.1)
        mypass.generate_metno_xml(coords, root)

        self.assertEqual(ET.tostring(root).decode("utf-8"), orig)
コード例 #3
0
    def find_sectors(self):
        """Identify sectors with at least some coverage by the provided scene.

        Returns
        -------
        list
            area_id of each sector with some coverage.
        """
        data = self.message.data
        overpass = Pass(
            data["platform_name"],
            self.scene.start_time,
            self.scene.end_time,
            instrument="viirs",
        )
        logger.debug(f"Created overpass {overpass}")
        logger.debug(f"args: {data['platform_name']} :: "
                     "{self.scene.start_time} :: {self.scene.end_time}")
        sectors = []
        coverage_threashold = float(
            tutil.get_env_var("COVERAGE_THRESHOLD", 0.1))
        for sector_def in parse_area_file(AREA_DEF):
            logger.debug("Checking coverage for %s", sector_def.area_id)
            coverage = overpass.area_coverage(sector_def)
            logger.debug("{} coverage: {}".format(sector_def.area_id,
                                                  coverage))
            if coverage > coverage_threashold:
                sectors.append(sector_def)
        return sectors
コード例 #4
0
def _granule_covers_region(granule_metadata, region):
    granule_pass = Pass(_get_platform_name(granule_metadata),
                        granule_metadata["start_time"],
                        granule_metadata["end_time"],
                        instrument=_get_sensor(granule_metadata))
    coverage = granule_pass.area_coverage(region)
    if coverage > 0:
        coverage_str = f"is overlapping region {region.description:s} by fraction {coverage:.5f}"
        _log_overlap_message(granule_metadata, coverage_str)
        return True
    return False
コード例 #5
0
 def _predict(self, granule_metadata, step):
     gr_time = granule_metadata["start_time"]
     while True:
         gr_time += step
         gr_pass = Pass(_get_platform_name(granule_metadata),
                        gr_time,
                        gr_time + self.granule_duration,
                        instrument=_get_sensor(granule_metadata))
         if not gr_pass.area_coverage(self.region) > 0:
             break
         self.planned_granule_times.add(gr_time)
コード例 #6
0
    def test_arctic_is_not_antarctic(self):

        tstart = datetime(2021, 2, 3, 16, 28, 3)
        tend = datetime(2021, 2, 3, 16, 31, 3)

        overp = Pass('Metop-B',
                     tstart,
                     tend,
                     orb=self.mborb,
                     instrument='avhrr')

        cov_south = overp.area_coverage(self.antarctica)
        cov_north = overp.area_coverage(self.arctica)

        assert cov_north == 0
        assert cov_south != 0
コード例 #7
0
ファイル: __init__.py プロジェクト: CMTdt/trollflow2
def check_sunlight_coverage(job):
    """Remove products with too low daytime coverage.

    This plugins looks for a parameter called `min_sunlight_coverage` in the
    product list, expressed in % (so between 0 and 100). If the sunlit fraction
    is less than configured, the affected products will be discarded.
    """
    if get_twilight_poly is None:
        LOG.error("Trollsched import failed, sunlight coverage calculation not possible")
        LOG.info("Keeping all products")
        return

    scn_mda = job['scene'].attrs.copy()
    scn_mda.update(job['input_mda'])
    platform_name = scn_mda['platform_name']
    start_time = scn_mda['start_time']
    end_time = scn_mda['end_time']
    sensor = scn_mda['sensor']

    if isinstance(sensor, (list, tuple, set)):
        sensor = list(sensor)[0]
        LOG.warning("Possibly many sensors given, taking only one for "
                    "coverage calculations: %s", sensor)

    product_list = job['product_list']
    areas = list(product_list['product_list']['areas'].keys())

    for area in areas:
        products = list(product_list['product_list']['areas'][area]['products'].keys())
        for product in products:
            try:
                if isinstance(product, tuple):
                    prod = job['resampled_scenes'][area][product[0]]
                else:
                    prod = job['resampled_scenes'][area][product]
            except KeyError:
                LOG.warning("No dataset %s for this scene and area %s", product, area)
                continue
            else:
                area_def = prod.attrs['area']
            prod_path = "/product_list/areas/%s/products/%s" % (area, product)
            config = get_config_value(product_list, prod_path, "sunlight_coverage")
            if config is None:
                continue
            min_day = config.get('min')
            use_pass = config.get('check_pass', False)
            if use_pass:
                overpass = Pass(platform_name, start_time, end_time, instrument=sensor)
            else:
                overpass = None
            if min_day is None:
                continue
            coverage = _get_sunlight_coverage(area_def, start_time, overpass)
            product_list['product_list']['areas'][area]['area_sunlight_coverage_percent'] = coverage * 100
            if coverage < (min_day / 100.0):
                LOG.info("Not enough sunlight coverage in "
                         "product '%s', removed.", product)
                dpath.util.delete(product_list, prod_path)
コード例 #8
0
    def invoke(self, context):
        """Invoke"""
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in coverage checker: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Scene loader acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        scene = context["content"]
        overpass = Pass(scene.info["platform_name"],
                        scene.info['start_time'],
                        scene.info['end_time'],
                        instrument=scene.info["sensor"][0])
        areas = []
        for area_name in scene.info["areas"]:
            self.logger.info("Checking coverage of %s", area_name)

            try:
                min_coverage = context["min_coverage"][area_name]
            except KeyError:
                self.logger.warning("No minimum coverage given, "
                                    "assuming 0 % coverage needed")
                areas.append(area_name)
                continue

            if utils.covers(overpass, area_name, min_coverage, self.logger):
                areas.append(area_name)
            else:
                self.logger.info("Area coverage too low, skipping %s",
                                 area_name)
                continue

        if len(areas) > 0:
            scene.info["areas"] = areas
            context["output_queue"].put(scene)
        else:
            self.logger.info("No areas with enough coverage")

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Scene loader releases own lock %s",
                              str(context["lock"]))
            time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Scene loader releases lock of " +
                            "previous worker")
コード例 #9
0
def granule_inside_area(start_time,
                        end_time,
                        platform_name,
                        instrument,
                        area_def,
                        thr_area_coverage,
                        tle_file=None):
    """Check if a satellite data granule is over area interest, using the start and
    end times from the filename

    """

    try:
        metop = Orbital(platform_name, tle_file)
    except KeyError:
        LOG.exception(
            'Failed getting orbital data for {0}'.format(platform_name))
        LOG.critical(
            'Cannot determine orbit! Probably TLE file problems...\n' +
            'Granule will be set to be inside area of interest disregarding')
        return True

    tle1 = metop.tle.line1
    tle2 = metop.tle.line2

    mypass = Pass(platform_name,
                  start_time,
                  end_time,
                  instrument=instrument,
                  tle1=tle1,
                  tle2=tle2)
    acov = mypass.area_coverage(area_def)
    LOG.debug("Granule coverage of area %s: %f", area_def.area_id, acov)

    is_inside = (acov > thr_area_coverage)

    if is_inside:
        from pyresample.boundary import AreaDefBoundary
        from trollsched.drawing import save_fig
        area_boundary = AreaDefBoundary(area_def, frequency=100)
        area_boundary = area_boundary.contour_poly
        save_fig(mypass, poly=area_boundary, directory='/tmp')

    return
コード例 #10
0
    def test_swath_boundary(self):

        tstart = datetime(2018, 10, 16, 2, 48, 29)
        tend = datetime(2018, 10, 16, 3, 2, 38)

        overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs')
        overp_boundary = SwathBoundary(overp)

        cont = overp_boundary.contour()

        assertNumpyArraysEqual(cont[0], LONS1)
        assertNumpyArraysEqual(cont[1], LATS1)

        tstart = datetime(2018, 10, 16, 4, 29, 4)
        tend = datetime(2018, 10, 16, 4, 30, 29, 400000)

        overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs')
        overp_boundary = SwathBoundary(overp, frequency=200)

        cont = overp_boundary.contour()

        assertNumpyArraysEqual(cont[0], LONS2)
        assertNumpyArraysEqual(cont[1], LATS2)

        # NOAA-19 AVHRR:
        tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S')
        tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S')

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')
        overp_boundary = SwathBoundary(overp, frequency=500)

        cont = overp_boundary.contour()

        assertNumpyArraysEqual(cont[0], LONS3)
        assertNumpyArraysEqual(cont[1], LATS3)

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3')
        overp_boundary = SwathBoundary(overp, frequency=500)

        cont = overp_boundary.contour()

        assertNumpyArraysEqual(cont[0], LONS3)
        assertNumpyArraysEqual(cont[1], LATS3)
コード例 #11
0
    def test_meos_pass_list(self):
        orig = (
            "  1 20190105 FENGYUN 3D  5907 52.943  01:01:45 n/a   01:17:15 15:30  18.6 107.4 -- "
            "Undefined(Scheduling not done 1546650105 ) a3d0df0cd289244e2f39f613f229a5cc D"
        )

        tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=60 * 15.5)

        tle1 = '1 43010U 17072A   18363.54078832 -.00000045  00000-0 -79715-6 0  9999'
        tle2 = '2 43010  98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158'

        mypass = Pass('FENGYUN 3D',
                      tstart,
                      tend,
                      instrument='mersi2',
                      tle1=tle1,
                      tle2=tle2)
        coords = (10.72, 59.942, 0.1)
        meos_format_str = mypass.print_meos(coords, line_no=1)
        self.assertEqual(meos_format_str, orig)

        mypass = Pass('FENGYUN 3D',
                      tstart,
                      tend,
                      instrument='mersi-2',
                      tle1=tle1,
                      tle2=tle2)
        coords = (10.72, 59.942, 0.1)
        meos_format_str = mypass.print_meos(coords, line_no=1)
        self.assertEqual(meos_format_str, orig)
コード例 #12
0
def process_xmlrequest(filename, plotdir, output_file, excluded_satellites):

    tree = ET.parse(filename)
    root = tree.getroot()

    for child in root:
        if child.tag == 'pass':
            LOG.debug("Pass: %s", str(child.attrib))
            platform_name = SATELLITE_NAMES.get(child.attrib['satellite'],
                                                child.attrib['satellite'])
            instrument = INSTRUMENT.get(platform_name)
            if not instrument:
                LOG.error('Instrument unknown! Platform = %s', platform_name)
                continue

            if platform_name in excluded_satellites:
                LOG.debug('Platform name excluded: %s', platform_name)
                continue
            try:
                overpass = Pass(platform_name,
                                datetime.strptime(child.attrib['start-time'],
                                                  '%Y-%m-%d-%H:%M:%S'),
                                datetime.strptime(child.attrib['end-time'],
                                                  '%Y-%m-%d-%H:%M:%S'),
                                instrument=instrument)
            except KeyError as err:
                LOG.warning('Failed on satellite %s: %s', platform_name,
                            str(err))
                continue

            save_fig(overpass, directory=plotdir)
            child.set('img', overpass.fig)
            child.set('rec', 'True')
            LOG.debug("Plot saved - plotdir = %s, platform_name = %s", plotdir,
                      platform_name)

    tree.write(output_file, encoding='utf-8', xml_declaration=True)

    with open(output_file) as fpt:
        lines = fpt.readlines()
        lines.insert(
            1, "<?xml-stylesheet type='text/xsl' href='reqreader.xsl'?>")

    with open(output_file, 'w') as fpt:
        fpt.writelines(lines)
コード例 #13
0
    def create_scene_from_mda(self, mda):
        """Read the metadata *mda* and return a corresponding MPOP scene.
        """
        time_slot = (mda.get('start_time') or mda.get('nominal_time')
                     or mda.get('end_time'))

        # orbit is not given for GEO satellites, use None

        if 'orbit_number' not in mda:
            mda['orbit_number'] = None

        platform = mda["platform_name"]

        LOGGER.info("platform %s time %s", str(platform), str(time_slot))

        if isinstance(mda['sensor'], (list, tuple, set)):
            sensor = mda['sensor'][0]
        else:
            sensor = mda['sensor']

        # Create satellite scene
        global_data = GF.create_scene(satname=str(platform),
                                      satnumber='',
                                      instrument=str(sensor),
                                      time_slot=time_slot,
                                      orbit=mda['orbit_number'],
                                      variant=mda.get('variant', ''))
        LOGGER.debug("Creating scene for satellite %s and time %s",
                     str(platform), str(time_slot))
        if mda['orbit_number'] is not None or mda.get('orbit_type') == "polar":
            global_data.overpass = Pass(platform,
                                        mda['start_time'],
                                        mda['end_time'],
                                        instrument=sensor)

        # Update missing information to global_data.info{}
        # TODO: this should be fixed in mpop.
        global_data.info.update(mda)
        global_data.info['time'] = time_slot

        return global_data
コード例 #14
0
    def test_swath_coverage(self):

        # NOAA-19 AVHRR:
        tstart = datetime.strptime('20181016 03:54:13', '%Y%m%d %H:%M:%S')
        tend = datetime.strptime('20181016 03:55:13', '%Y%m%d %H:%M:%S')

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr')

        cov = overp.area_coverage(self.euron1)
        self.assertEqual(cov, 0)

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr',
                     frequency=80)

        cov = overp.area_coverage(self.euron1)
        self.assertEqual(cov, 0)

        tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S')
        tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S')

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr')

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr',
                     frequency=100)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr/3',
                     frequency=133)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19',
                     tstart,
                     tend,
                     orb=self.n19orb,
                     instrument='avhrr',
                     frequency=300)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        # ASCAT and AVHRR on Metop-B:
        tstart = datetime.strptime("2019-01-02T10:19:39", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=180)
        tle1 = '1 38771U 12049A   19002.35527803  .00000000  00000+0  21253-4 0 00017'
        tle2 = '2 38771  98.7284  63.8171 0002025  96.0390 346.4075 14.21477776326431'

        mypass = Pass('Metop-B',
                      tstart,
                      tend,
                      instrument='ascat',
                      tle1=tle1,
                      tle2=tle2)
        cov = mypass.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.322812, 5)

        mypass = Pass('Metop-B',
                      tstart,
                      tend,
                      instrument='avhrr',
                      tle1=tle1,
                      tle2=tle2)
        cov = mypass.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.357324, 5)

        tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=60 * 15.5)

        tle1 = '1 43010U 17072A   18363.54078832 -.00000045  00000-0 -79715-6 0  9999'
        tle2 = '2 43010  98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158'

        mypass = Pass('FENGYUN 3D',
                      tstart,
                      tend,
                      instrument='mersi2',
                      tle1=tle1,
                      tle2=tle2)
        cov = mypass.area_coverage(self.euron1)

        self.assertAlmostEqual(cov, 0.786836, 5)
コード例 #15
0
    def collect(self, granule_metadata):
        """ 
            Parameters:

                granule_metadata : metadata 

        """

        # Check if input data is being waited for

        platform = granule_metadata['platform_name']

        start_time = granule_metadata['start_time']
        end_time = granule_metadata['end_time']

        for ptime in self.planned_granule_times:
            if abs(start_time - ptime) < timedelta(seconds=3):
                self.granule_times.add(ptime)
                self.granules.append(granule_metadata)
                LOG.info("Added %s (%s) granule to area %s",
                         platform,
                         str(start_time),
                         self.region.area_id)
                # If last granule return swath and cleanup
                if self.granule_times == self.planned_granule_times:
                    LOG.info("Collection finished for area: " +
                             str(self.region.area_id))
                    return self.finish()
                else:
                    return

        # Get corners from input data

        if self.granule_duration is None:
            self.granule_duration = end_time - start_time
            LOG.debug("Estimated granule duration to " +
                      str(self.granule_duration))

        granule_pass = Pass(platform, start_time, end_time,
                            instrument=granule_metadata["sensor"])

        # If file is within region, make pass prediction to know what to wait
        # for
        if granule_pass.area_coverage(self.region) > 0:
            self.granule_times.add(start_time)
            self.granules.append(granule_metadata)

            # Computation of the predicted granules within the region

            if not self.planned_granule_times:
                self.planned_granule_times.add(start_time)
                LOG.info("Added %s (%s) granule to area %s",
                         platform,
                         str(start_time),
                         self.region.area_id)
                LOG.debug(
                    "Predicting granules covering " + self.region.area_id)
                gr_time = start_time
                while True:
                    gr_time += self.granule_duration
                    gr_pass = Pass(platform, gr_time,
                                   gr_time + self.granule_duration,
                                   instrument=granule_metadata["sensor"])
                    if not gr_pass.area_coverage(self.region) > 0:
                        break
                    self.planned_granule_times.add(gr_time)

                gr_time = start_time
                while True:
                    gr_time -= self.granule_duration
                    gr_pass = Pass(platform, gr_time,
                                   gr_time + self.granule_duration,
                                   instrument=granule_metadata["sensor"])
                    if not gr_pass.area_coverage(self.region) > 0:
                        break
                    self.planned_granule_times.add(gr_time)

                LOG.info(
                    "Planned granules: " + str(sorted(self.planned_granule_times)))
                self.timeout = (max(self.planned_granule_times)
                                + self.granule_duration
                                + self.timeliness)
                LOG.info("Planned timeout: " + self.timeout.isoformat())
        else:
            try:
                LOG.debug("Granule %s is not overlapping %s",
                          granule_metadata["uri"], self.region.name)
            except KeyError:
                try:
                    LOG.debug("Granule with start and end times = " +
                              str(granule_metadata["start_time"]) + " " +
                              str(granule_metadata["end_time"]) +
                              "is not overlapping " + str(self.region.name))
                except KeyError:
                    LOG.debug("Failed printing debug info...")
                    LOG.debug("Keys in granule_metadata = " +
                              str(granule_metadata.keys()))

        # If last granule return swath and cleanup
        if (self.granule_times and
                (self.granule_times == self.planned_granule_times)):
            LOG.debug("Collection finished for area: " +
                      str(self.region.area_id))
            return self.finish()
コード例 #16
0
    def _process(self, context):
        """Process a context."""

        glbl = context["content"]["scene"]
        extra_metadata = context["content"]["extra_metadata"]

        with open(context["product_list"], "r") as fid:
            product_config = ordered_load(fid)

        # Handle config options
        kwargs = {}

        kwargs['mask_area'] = context.get('mask_area', True)
        self.logger.debug("Setting area masking to %s",
                          str(kwargs['mask_area']))

        kwargs['nprocs'] = context.get('nprocs', 1)
        self.logger.debug("Using %d CPUs for resampling.", kwargs['nprocs'])

        kwargs['resampler'] = context.get('resampler', "nearest")
        self.logger.debug("Using resampling method: '%s'.",
                          kwargs['resampler'])

        try:
            kwargs['cache_dir'] = context['cache_dir']
            self.logger.debug("Setting projection cache dir to %s",
                              kwargs['cache_dir'])
        except (AttributeError, KeyError):
            pass

        prod_list = product_config["product_list"]

        # Overpass for coverage calculations
        scn_metadata = glbl.attrs
        if product_config['common'].get('coverage_check', True) and Pass:
            overpass = Pass(scn_metadata['platform_name'],
                            scn_metadata['start_time'],
                            scn_metadata['end_time'],
                            instrument=scn_metadata['sensor'][0])
        else:
            overpass = None

        # Get the area ID from metadata dict
        area_id = extra_metadata['area_id']

        # Check for area coverage
        if overpass is not None:
            min_coverage = prod_list[area_id].get("min_coverage", 0.0)
            if not utils.covers(overpass, area_id, min_coverage, self.logger):
                return

        kwargs['radius_of_influence'] = None
        try:
            area_config = product_config["product_list"][area_id]
            kwargs['radius_of_influence'] = \
                area_config.get("srch_radius", context["radius"])
        except (AttributeError, KeyError):
            kwargs['radius_of_influence'] = 10000.

        if kwargs['radius_of_influence'] is None:
            self.logger.debug("Using default search radius.")
        else:
            self.logger.debug("Using search radius %d meters.",
                              int(kwargs['radius_of_influence']))
        # Set lock if locking is used
        if self.use_lock:
            self.logger.debug("Resampler acquires own lock %s",
                              str(context["lock"]))
            utils.acquire_lock(context["lock"])

        if area_id == "satproj":
            self.logger.info("Using satellite projection")
            lcl = glbl
        else:
            metadata = glbl.attrs
            self.logger.info("Resampling time slot %s to area %s",
                             metadata["start_time"], area_id)
            lcl = glbl.resample(area_id, **kwargs)

        # Add area ID to the scene attributes so everything needed
        # in filename composing is in the same dictionary
        lcl.attrs["area_id"] = area_id

        metadata = extra_metadata.copy()
        metadata["product_config"] = product_config
        metadata["products"] = prod_list[area_id]['products']

        self.logger.debug(
            "Inserting lcl (area: %s, start_time: %s) "
            "to writer's queue", area_id, str(scn_metadata["start_time"]))
        context["output_queue"].put({'scene': lcl, 'extra_metadata': metadata})

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Resampler releases own lock %s",
                              str(context["lock"]))
            # Wait 1 second to ensure next worker has time to acquire the
            # lock
            time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        del lcl
        lcl = None
コード例 #17
0
ファイル: metop.py プロジェクト: loreclem/radarraster
    #orb = Orbital("Metop-"+satname)
    #dtobj = datetime(int(sat_pos_time.strftime('%Y')),
    #             int(sat_pos_time.strftime('%m')),
    #             int(sat_pos_time.strftime('%d')),
    #             int(sat_pos_time.strftime('%H')),
    #             int(sat_pos_time.strftime('%M')),
    #             0)
    #print("---")
    #print(orb.get_lonlatalt(dtobj))
    #print("---")

    #lonlat = orb.get_lonlatalt(dtobj)

    #if lonlat[0] >= -10. and lonlat[0] <= 20. and lonlat[1] >= 40 and lonlat[1] <= 60:

    granule_pass = Pass("Metop-"+satname, glbl.start_time, glbl.end_time, instrument=glbl['natural_color'].sensor)
    if granule_pass.area_coverage(europe) > 0 :
       print("Region over Switzerland, making CCS4 domain...")
       #local_data = glbl.resample("ccs4large")
       local_data = glbl.resample(europe)
       local_data.save_dataset('night_fog', outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg")
       local_data.save_dataset('natural_color', outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg")
       cw.add_coastlines_to_file(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg", europe, resolution='l', level=1, outline=(255, 255, 255))
       cw.add_coastlines_to_file(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg", europe, resolution='l', level=1, outline=(255, 255, 255))
       cw.add_borders_to_file(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg", europe, outline=(255, 255, 255),resolution='i')
       cw.add_borders_to_file(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg", europe, outline=(255, 255, 255),resolution='i')
       if os.path.getsize(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg") < 170000:
           os.remove(outDir+"METOP-"+satname+"_fog-europe_"+st+".jpg")
       if os.path.getsize(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg") < 170000:
           os.remove(outDir+"METOP-"+satname+"_overview-europe_"+st+".jpg")
コード例 #18
0
    def test_swath_coverage(self):

        # NOAA-19 AVHRR:
        tstart = datetime.strptime('20181016 03:54:13', '%Y%m%d %H:%M:%S')
        tend = datetime.strptime('20181016 03:55:13', '%Y%m%d %H:%M:%S')

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')

        cov = overp.area_coverage(self.euron1)
        self.assertEqual(cov, 0)

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=80)

        cov = overp.area_coverage(self.euron1)
        self.assertEqual(cov, 0)

        tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S')
        tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S')

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=100)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3', frequency=133)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=300)

        cov = overp.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.103526, 5)

        # ASCAT and AVHRR on Metop-B:
        tstart = datetime.strptime("2019-01-02T10:19:39", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=180)
        tle1 = '1 38771U 12049A   19002.35527803  .00000000  00000+0  21253-4 0 00017'
        tle2 = '2 38771  98.7284  63.8171 0002025  96.0390 346.4075 14.21477776326431'

        mypass = Pass('Metop-B', tstart, tend, instrument='ascat', tle1=tle1, tle2=tle2)
        cov = mypass.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.322812, 5)

        mypass = Pass('Metop-B', tstart, tend, instrument='avhrr', tle1=tle1, tle2=tle2)
        cov = mypass.area_coverage(self.euron1)
        self.assertAlmostEqual(cov, 0.357324, 5)

        tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
        tend = tstart + timedelta(seconds=60*15.5)

        tle1 = '1 43010U 17072A   18363.54078832 -.00000045  00000-0 -79715-6 0  9999'
        tle2 = '2 43010  98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158'

        mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi2', tle1=tle1, tle2=tle2)
        cov = mypass.area_coverage(self.euron1)

        self.assertAlmostEqual(cov, 0.786836, 5)
コード例 #19
0
ファイル: __init__.py プロジェクト: pytroll/trollflow2
def check_sunlight_coverage(job):
    """Remove products with too low/high sunlight coverage.

    This plugins looks for a dictionary called `sunlight_coverage` in
    the product list, with members `min` and/or `max` that define the
    minimum and/or maximum allowed sunlight coverage within the scene.
    The limits are expressed in % (so between 0 and 100).  If the
    sunlit fraction is outside the set limits, the affected products
    will be discarded.  It is also possible to define `check_pass:
    True` in this dictionary to check the sunlit fraction within the
    overpass of an polar-orbiting satellite.

    """
    if get_twilight_poly is None:
        LOG.error(
            "Trollsched import failed, sunlight coverage calculation not possible"
        )
        LOG.info("Keeping all products")
        return

    scn_mda = _get_scene_metadata(job)
    scn_mda.update(job['input_mda'])
    platform_name = scn_mda['platform_name']
    start_time = scn_mda['start_time']
    end_time = scn_mda['end_time']
    sensor = scn_mda['sensor']

    if isinstance(sensor, (list, tuple, set)):
        sensor = list(sensor)
        if len(sensor) > 1:
            LOG.warning(
                "Multiple sensors given, taking only one for "
                "coverage calculations: %s", sensor[0])
        sensor = sensor[0]

    product_list = job['product_list']
    areas = list(product_list['product_list']['areas'].keys())

    for area in areas:
        products = list(
            product_list['product_list']['areas'][area]['products'].keys())
        try:
            area_def = get_area_def(area)
        except AreaNotFound:
            area_def = None
        coverage = {True: None, False: None}
        overpass = None
        for product in products:
            prod_path = "/product_list/areas/%s/products/%s" % (area, product)
            config = get_config_value(product_list, prod_path,
                                      "sunlight_coverage")
            if config is None:
                continue
            min_day = config.get('min')
            max_day = config.get('max')
            check_pass = config.get('check_pass', False)

            if min_day is None and max_day is None:
                LOG.debug("Sunlight coverage not configured for %s / %s",
                          product, area)
                continue

            if area_def is None:
                area_def = _get_product_area_def(job, area, product)
                if area_def is None:
                    continue

            if check_pass and overpass is None:
                overpass = Pass(platform_name,
                                start_time,
                                end_time,
                                instrument=sensor)

            if coverage[check_pass] is None:
                coverage[check_pass] = _get_sunlight_coverage(
                    area_def, start_time, overpass)
            area_conf = product_list['product_list']['areas'][area]
            area_conf[
                'area_sunlight_coverage_percent'] = coverage[check_pass] * 100
            if min_day is not None and coverage[check_pass] < (min_day /
                                                               100.0):
                LOG.info("Not enough sunlight coverage for "
                         f"product '{product!s}', removed. Needs at least "
                         f"{min_day:.1f}%, got {coverage[check_pass]:.1%}.")
                dpath.util.delete(product_list, prod_path)
            if max_day is not None and coverage[check_pass] > (max_day /
                                                               100.0):
                LOG.info("Too much sunlight coverage for "
                         f"product '{product!s}', removed. Needs at most "
                         f"{max_day:.1f}%, got {coverage[check_pass]:.1%}.")
                dpath.util.delete(product_list, prod_path)
コード例 #20
0
ファイル: __init__.py プロジェクト: pytroll/trollflow2
def get_scene_coverage(platform_name, start_time, end_time, sensor, area_id):
    """Get scene area coverage in percentages."""
    overpass = Pass(platform_name, start_time, end_time, instrument=sensor)
    area_def = get_area_def(area_id)

    return 100 * overpass.area_coverage(area_def)
コード例 #21
0
    def collect(self, granule_metadata):
        """ 
            Parameters:

                granule_metadata : metadata

        """

        # Check if input data is being waited for

        if "tle_platform_name" in granule_metadata:
            platform = granule_metadata['tle_platform_name']
        else:
            platform = granule_metadata['platform_name']

        start_time = granule_metadata['start_time']
        if ("end_time" not in granule_metadata
                and self.granule_duration is not None):
            granule_metadata["end_time"] = (granule_metadata["start_time"] +
                                            self.granule_duration)

        end_time = granule_metadata['end_time']

        if start_time > end_time:
            old_end_time = end_time
            end_date = start_time.date()
            if end_time.time() < start_time.time():
                end_date += timedelta(days=1)
            end_time = datetime.combine(end_date, end_time.time())
            LOG.debug('Adjusted end time from %s to %s.', old_end_time,
                      end_time)

        granule_metadata['end_time'] = end_time

        LOG.debug("Adding area ID to metadata: %s", str(self.region.area_id))
        granule_metadata['collection_area_id'] = self.region.area_id

        self.last_file_added = False
        for ptime in self.planned_granule_times:
            if abs(start_time - ptime) < timedelta(seconds=3) and \
               ptime not in self.granule_times:
                self.granule_times.add(ptime)
                self.granules.append(granule_metadata)
                self.last_file_added = True
                LOG.info("Added %s (%s) granule to area %s", platform,
                         str(start_time), self.region.area_id)
                # If last granule return swath and cleanup
                # if self.granule_times == self.planned_granule_times:
                if self.is_swath_complete():
                    LOG.info("Collection finished for area: %s",
                             str(self.region.area_id))
                    return self.finish()
                else:
                    try:
                        new_timeout = (max(self.planned_granule_times -
                                           self.granule_times) +
                                       self.granule_duration + self.timeliness)
                    except ValueError:
                        LOG.error("Calculation of new timeout failed, "
                                  "keeping previous timeout.")
                        LOG.error("Planned: %s", self.planned_granule_times)
                        LOG.error("Received: %s", self.granule_times)
                        return

                    if new_timeout < self.timeout:
                        self.timeout = new_timeout
                        LOG.info("Adjusted timeout: %s",
                                 self.timeout.isoformat())

                    return

        # Get corners from input data

        if self.granule_duration is None:
            self.granule_duration = end_time - start_time
            LOG.debug("Estimated granule duration to %s",
                      str(self.granule_duration))

        LOG.info("Platform name %s and sensor %s: Start and end times = %s %s",
                 str(platform), str(granule_metadata["sensor"]),
                 start_time.strftime('%Y%m%d %H:%M:%S'),
                 end_time.strftime('%Y%m%d %H:%M:%S'))

        self.sensor = granule_metadata["sensor"]
        if isinstance(self.sensor, list):
            self.sensor = self.sensor[0]
        granule_pass = Pass(platform,
                            start_time,
                            end_time,
                            instrument=self.sensor)

        # If file is within region, make pass prediction to know what to wait
        # for
        if granule_pass.area_coverage(self.region) > 0:
            self.granule_times.add(start_time)
            self.granules.append(granule_metadata)
            self.last_file_added = True

            # Computation of the predicted granules within the region

            if not self.planned_granule_times:
                self.planned_granule_times.add(start_time)
                LOG.info("Added %s (%s) granule to area %s", platform,
                         str(start_time), self.region.area_id)
                LOG.debug("Predicting granules covering %s",
                          self.region.area_id)
                gr_time = start_time
                while True:
                    gr_time += self.granule_duration
                    gr_pass = Pass(platform,
                                   gr_time,
                                   gr_time + self.granule_duration,
                                   instrument=self.sensor)
                    if not gr_pass.area_coverage(self.region) > 0:
                        break
                    self.planned_granule_times.add(gr_time)

                gr_time = start_time
                while True:
                    gr_time -= self.granule_duration
                    gr_pass = Pass(platform,
                                   gr_time,
                                   gr_time + self.granule_duration,
                                   instrument=self.sensor)
                    if not gr_pass.area_coverage(self.region) > 0:
                        break
                    self.planned_granule_times.add(gr_time)

                LOG.info("Planned granules for %s: %s", self.region.name,
                         str(sorted(self.planned_granule_times)))
                self.timeout = (max(self.planned_granule_times) +
                                self.granule_duration + self.timeliness)
                LOG.info("Planned timeout for %s: %s", self.region.name,
                         self.timeout.isoformat())

        else:
            try:
                LOG.debug("Granule %s is not overlapping %s",
                          granule_metadata["uri"], self.region.name)
            except KeyError:
                try:
                    LOG.debug(
                        "Granule with start and end times = %s  %s  "
                        "is not overlapping %s",
                        str(granule_metadata["start_time"]),
                        str(granule_metadata["end_time"]),
                        str(self.region.name))
                except KeyError:
                    LOG.debug("Failed printing debug info...")
                    LOG.debug("Keys in granule_metadata = %s",
                              str(granule_metadata.keys()))

        # If last granule return swath and cleanup
        if self.is_swath_complete():
            LOG.debug("Collection finished for area: %s",
                      str(self.region.area_id))
            return self.finish()
コード例 #22
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in resampler: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Compositor acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        glbl = context["content"]
        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)

        # Handle config options
        kwargs = {}

        kwargs['precompute'] = context.get('precompute', False)
        kwargs['mask_area'] = context.get('mask_area', True)
        self.logger.debug("Setting precompute to %s and masking to %s",
                          str(kwargs['precompute']), str(kwargs['mask_area']))

        kwargs['nprocs'] = context.get('nprocs', 1)
        self.logger.debug("Using %d CPUs for resampling.", kwargs['nprocs'])

        kwargs['resampler'] = context.get('proj_method', "nearest")
        self.logger.debug("Using resampling method: '%s'.",
                          kwargs['resampler'])

        try:
            kwargs['cache_dir'] = context['cache_dir']
            self.logger.debug("Setting projection cache dir to %s",
                              kwargs['cache_dir'])
        except (AttributeError, KeyError):
            pass

        prod_list = product_config["product_list"]

        # Overpass for coverage calculations
        try:
            metadata = glbl.attrs
        except AttributeError:
            metadata = glbl.info
        if product_config['common'].get('coverage_check', True):
            overpass = Pass(metadata['platform_name'],
                            metadata['start_time'],
                            metadata['end_time'],
                            instrument=metadata['sensor'][0])
        else:
            overpass = None

        for area_id in prod_list:
            # Check for area coverage
            if overpass is not None:
                min_coverage = prod_list[area_id].get("min_coverage", 0.0)
                if not utils.covers(overpass, area_id, min_coverage,
                                    self.logger):
                    continue

            kwargs['radius_of_influence'] = None
            try:
                area_config = product_config["product_list"][area_id]
                kwargs['radius_of_influence'] = \
                    area_config.get("srch_radius", context["radius"])
            except (AttributeError, KeyError):
                kwargs['radius_of_influence'] = 10000.

            if kwargs['radius_of_influence'] is None:
                self.logger.debug("Using default search radius.")
            else:
                self.logger.debug("Using search radius %d meters.",
                                  int(kwargs['radius_of_influence']))
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Resampler acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])
            # if area_id not in glbl.info["areas"]:
            #     utils.release_locks([context["lock"]])
            #     continue

            if area_id == "satproj":
                self.logger.info("Using satellite projection")
                lcl = glbl
            else:
                try:
                    metadata = glbl.attrs
                except AttributeError:
                    metadata = glbl.info
                self.logger.info("Resampling time slot %s to area %s",
                                 metadata["start_time"], area_id)
                lcl = glbl.resample(area_id, **kwargs)
            try:
                metadata = lcl.attrs
            except AttributeError:
                metadata = lcl.info
            metadata["product_config"] = product_config
            metadata["area_id"] = area_id
            metadata["products"] = prod_list[area_id]['products']

            self.logger.debug(
                "Inserting lcl (area: %s, start_time: %s) "
                "to writer's queue", area_id, str(metadata["start_time"]))
            context["output_queue"].put(lcl)
            del lcl
            lcl = None

            if utils.release_locks([context["lock"]]):
                self.logger.debug("Resampler releases own lock %s",
                                  str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Resampler releses lock of previous " +
                            "worker: %s" % str(context["prev_lock"]))