Esempio n. 1
0
    def requires(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs = container.get_acquisitions(group=self.group,
                                          granule=self.granule)

        # NBAR & SBT acquisitions
        nbar_acqs = [a for a in acqs if a.band_type == BandType.REFLECTIVE]
        sbt_acqs = [a for a in acqs if a.band_type == BandType.THERMAL]

        tasks = {}
        for coefficient in self.workflow.atmos_coefficients:
            if coefficient in Workflow.NBAR.atmos_coefficients:
                band_acqs = nbar_acqs
            else:
                band_acqs = sbt_acqs

            for acq in band_acqs:
                key = (acq.band_name, coefficient)
                kwargs = {
                    'level1': self.level1,
                    'work_root': self.work_root,
                    'granule': self.granule,
                    'group': self.group,
                    'band_name': acq.band_name,
                    'coefficient': coefficient,
                    'workflow': self.workflow,
                    'vertices': self.vertices,
                    'method': self.method
                }
                tasks[key] = InterpolateCoefficient(**kwargs)
        return tasks
Esempio n. 2
0
    def run(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs, group = container.get_highest_resolution(granule=self.granule)

        # output filename format
        output_fmt = pjoin(POINT_FMT, ALBEDO_FMT,
                           ''.join([POINT_ALBEDO_FMT, '.tp5']))

        # input filenames
        ancillary_fname = self.input()['ancillary'].path
        sat_sol_fname = self.input()[group]['sat_sol'].path
        lon_lat_fname = self.input()[group]['lon_lat'].path

        with self.output().temporary_path() as out_fname:
            tp5_data = _format_tp5(acqs, sat_sol_fname, lon_lat_fname,
                                   ancillary_fname, out_fname, self.workflow)

            # keep this as an indented block, that way the target will remain
            # atomic and be moved upon closing
            for key in tp5_data:
                point, albedo = key
                tp5_fname = output_fmt.format(p=point, a=albedo.value)
                target = pjoin(dirname(out_fname), self.base_dir, tp5_fname)
                with luigi.LocalTarget(target).open('w') as src:
                    src.writelines(tp5_data[key])
Esempio n. 3
0
 def test_missing_utm_file(self):
     """
     Test assertion is raised on missing utm file
     """
     geobox = acquisitions(LS7_SCENE1).get_all_acquisitions()[0].gridded_geo_box()
     with self.assertRaises(AssertionError):
         calc_land_sea_mask(geobox, ancillary_path=LAND_SEA_RASTERS)
Esempio n. 4
0
    def requires(self):
        with open(self.level1_list) as src:
            level1_list = [level1.strip() for level1 in src.readlines()]

        for level1 in level1_list:
            container = acquisitions(level1)
            outdir = pjoin(self.outdir, "{}.wagl".format(container.label))
            for granule in container.granules:
                kwargs = {
                    "level1": level1,
                    "granule": granule,
                    "workflow": self.workflow,
                    "vertices": self.vertices,
                    "pixel_quality": self.pixel_quality,
                    "method": self.method,
                    "modtran_exe": self.modtran_exe,
                    "outdir": outdir,
                    "land_sea_path": self.land_sea_path,
                    "aerosol": self.aerosol,
                    "brdf": self.brdf,
                    "ozone_path": self.ozone_path,
                    "water_vapour": self.water_vapour,
                    "dem_path": self.dem_path,
                    "ecmwf_path": self.ecmwf_path,
                    "invariant_height_fname": self.invariant_height_fname,
                    "dsm_fname": self.dsm_fname,
                    "tle_path": self.tle_path,
                    "rori": self.rori,
                    "compression": self.compression,
                    "filter_opts": self.filter_opts,
                    "buffer_distance": self.buffer_distance,
                    "h5_driver": self.h5_driver,
                }
                yield DataStandardisation(**kwargs)
Esempio n. 5
0
def _can_process(l1t_path, granule):
    _LOG.debug('Checking L1T: %r', l1t_path)
    acqs = acquisitions(l1t_path).get_all_acquisitions(granule)
    landsat_path = int(acqs[0].path)
    landsat_row = int(acqs[0].row)

    # TODO
    # the path/row exclusion logic is not long-term viable and the prototype
    # for S2 will follow a similar exclusion logic, but use MGRS tiles instead.
    # A geometry exclusion is probably better suited in going forward with
    # multi-sensor/platform support

    # Is it an Australian scene? That's all we support at the moment.
    # (numbers specified by Lan-Wei.)
    msg = 'Not an Australian {} ({}): {}'
    if not (87 <= landsat_path <= 116):
        msg = msg.format('path', landsat_path, basename(l1t_path))
        _LOG.info(msg)
        return False, msg
    if not (67 <= landsat_row <= 91):
        msg = msg.format('row', landsat_row, basename(l1t_path))
        _LOG.info(msg)
        return False, msg

    # Do we have a reference dir available to compute GQA?
    ref_dir, msg = get_acq_reference_directory(acqs[0])
    if not ref_dir:
        return ref_dir, msg

    return True, None
    def filter_granule_worker(out_stream):
        count = 0
        for level1_dataset in _level1_dataset_path_iter(Path(level1_root), *find_options):
            try:
                container = acquisitions(str(level1_dataset))
            except Exception as e:
                logging.warning('encountered unexpected error for %s: %s', str(level1_dataset), e)
                logging.exception(e)
                continue

            granule_md = get_archive_metadata(level1_dataset)

            for granule, sensing_date in granule_md.items():
                tile_id = granule.split('_')[-2]
                if tile_id not in tile_ids:
                    logging.debug('granule %s with MGRS tile ID %s outside AOI', granule, tile_id)
                    continue

                ymd = sensing_date.strftime('%Y-%m-%d')
                package = Package(
                    level1=str(level1_dataset),
                    workdir='',
                    granule=granule,
                    pkgdir=join(pkgdir, ymd)
                )
                if package.output().exists():
                    logging.debug('granule %s already processed', granule)
                    continue

                logging.debug('level1 dataset %s needs to be processed', level1_dataset)
                print(level1_dataset, file=out_stream)
                count += len(granule_md.keys())  # To handle multigranule files
                break
        return out_stream, count
Esempio n. 7
0
def get_acquisition(l1t_path, granule):
    # Get the acquisitions, metadata, and filter by wavelength
    acqs = acquisitions(l1t_path).get_all_acquisitions(granule=granule)

    # TODO include MGRS id logic
    # TODO improve path/row or MGRS id decision logic

    # check if the path/row is identified as a reef scene
    path = acqs[0].path
    row = acqs[0].row
    df = pandas.read_csv(REEF_PR)
    reef_scene = ((df.Path == path) & (df.Row == row)).any()

    # Get the wavelengths to filter the acquisitions
    # TODO parse min/max as args not config
    if reef_scene:
        min_lambda = CONFIG.getfloat('work', 'reef_min_lambda')
        max_lambda = CONFIG.getfloat('work', 'reef_max_lambda')
    else:
        min_lambda = CONFIG.getfloat('work', 'min_lambda')
        max_lambda = CONFIG.getfloat('work', 'max_lambda')

    # only accept a single wavelength (for now...)
    acq = [
        acq for acq in acqs
        if (acq.band_type == BandType.REFLECTIVE
            and min_lambda < acq.wavelength[1] <= max_lambda)
    ]

    return acq[0]
Esempio n. 8
0
    def run(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        # out_path = container.get_root(self.work_root, granule=self.granule)
        acqs = container.get_all_acquisitions(self.granule)
        atmospheric_inputs_fname = self.input().path
        base_dir = pjoin(self.work_root, self.base_dir)
        albedos = [Albedos(a) for a in self.albedos]

        prepare_modtran(acqs, self.point, albedos, base_dir)

        with self.output().temporary_path() as out_fname:
            nvertices = self.vertices[0] * self.vertices[1]
            _run_modtran(
                acqs,
                self.modtran_exe,
                base_dir,
                self.point,
                albedos,
                self.workflow,
                nvertices,
                atmospheric_inputs_fname,
                out_fname,
                self.compression,
                self.filter_opts,
            )
Esempio n. 9
0
    def run(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        grn = container.get_granule(granule=self.granule, container=True)
        sbt_path = None

        nbar_paths = {
            "aerosol_dict": self.aerosol,
            "water_vapour_dict": self.water_vapour,
            "ozone_path": self.ozone_path,
            "dem_path": self.dem_path,
            "brdf_dict": self.brdf,
        }

        if self.workflow == Workflow.STANDARD or self.workflow == Workflow.SBT:
            sbt_path = self.ecmwf_path

        with self.output().temporary_path() as out_fname:
            _collect_ancillary(
                grn,
                self.input().path,
                nbar_paths,
                sbt_path,
                self.invariant_height_fname,
                self.vertices,
                out_fname,
                self.compression,
                self.filter_opts,
            )
Esempio n. 10
0
 def test_granules_s2a_scene1(self):
     container = acquisitions(S2A_SCENE1)
     self.assertEqual(len(container.granules), 1)
     self.assertEqual(
         container.granules[0],
         "S2A_OPER_MSI_L1C_TL_SGS__20171207T032513_A012840_T55JEJ_N02.06",
     )
Esempio n. 11
0
 def test_granules_s2b_scene1(self):
     container = acquisitions(S2B_SCENE1)
     self.assertEqual(len(container.granules), 1)
     self.assertEqual(
         container.granules[0],
         "S2B_OPER_MSI_L1C_TL_SGS__20170719T012130_A001915_T56JKT_N02.05",
     )
Esempio n. 12
0
    def requires(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs = container.get_acquisitions(group=self.group, granule=self.granule)

        # NBAR & SBT acquisitions
        nbar_acqs = [a for a in acqs if a.band_type == BandType.REFLECTIVE]
        sbt_acqs = [a for a in acqs if a.band_type == BandType.THERMAL]

        tasks = {}
        for coefficient in self.workflow.atmos_coefficients:
            if coefficient in Workflow.NBAR.atmos_coefficients:
                band_acqs = nbar_acqs
            else:
                band_acqs = sbt_acqs

            for acq in band_acqs:
                key = (acq.band_name, coefficient)
                kwargs = {
                    "level1": self.level1,
                    "work_root": self.work_root,
                    "granule": self.granule,
                    "group": self.group,
                    "band_name": acq.band_name,
                    "coefficient": coefficient,
                    "workflow": self.workflow,
                    "vertices": self.vertices,
                    "method": self.method,
                }
                tasks[key] = InterpolateCoefficient(**kwargs)
        return tasks
Esempio n. 13
0
    def requires(self):
        with open(self.level1_list) as src:
            level1_list = [level1.strip() for level1 in src.readlines()]

        for level1 in level1_list:
            container = acquisitions(level1)
            outdir = pjoin(self.outdir, '{}.wagl'.format(container.label))
            for granule in container.granules:
                kwargs = {'level1': level1,
                          'granule': granule,
                          'workflow': self.workflow,
                          'vertices': self.vertices,
                          'pixel_quality': self.pixel_quality,
                          'method': self.method,
                          'modtran_exe': self.modtran_exe,
                          'outdir': outdir,
                          'land_sea_path': self.land_sea_path,
                          'aerosol': self.aerosol,
                          'brdf': self.brdf,
                          'ozone_path': self.ozone_path,
                          'water_vapour': self.water_vapour,
                          'dem_path': self.dem_path,
                          'ecmwf_path': self.ecmwf_path,
                          'invariant_height_fname': self.invariant_height_fname,
                          'dsm_fname': self.dsm_fname,
                          'tle_path': self.tle_path,
                          'rori': self.rori,
                          'compression': self.compression,
                          'filter_opts': self.filter_opts,
                          'buffer_distance': self.buffer_distance,
                          'h5_driver': self.h5_driver}
                yield DataStandardisation(**kwargs)
Esempio n. 14
0
    def run(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs = container.get_acquisitions(self.group, self.granule)

        # inputs
        inputs = self.input()
        interpolation_fname = inputs['interpolation'].path
        slp_asp_fname = inputs['slp_asp'].path
        incident_fname = inputs['incident'].path
        exiting_fname = inputs['exiting'].path
        relative_slope_fname = inputs['rel_slope'].path
        shadow_fname = inputs['shadow'].path
        sat_sol_fname = inputs['sat_sol'].path
        ancillary_fname = inputs['ancillary'].path

        # get the acquisition we wish to process
        acq = [acq for acq in acqs if acq.band_name == self.band_name][0]

        with self.output().temporary_path() as out_fname:
            _calculate_reflectance(acq, acqs, interpolation_fname,
                                   sat_sol_fname, slp_asp_fname,
                                   relative_slope_fname, incident_fname,
                                   exiting_fname, shadow_fname,
                                   ancillary_fname, self.rori, out_fname,
                                   self.compression, self.filter_opts)
Esempio n. 15
0
    def requires(self):
        band_acqs = []
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs = container.get_acquisitions(group=self.group, granule=self.granule)

        # NBAR acquisitions
        if self.workflow == Workflow.STANDARD or self.workflow == Workflow.NBAR:
            band_acqs.extend([a for a in acqs if a.band_type == BandType.REFLECTIVE])

        # SBT acquisitions
        if self.workflow == Workflow.STANDARD or self.workflow == Workflow.SBT:
            band_acqs.extend([a for a in acqs if a.band_type == BandType.THERMAL])

        tasks = []
        for acq in band_acqs:
            kwargs = {
                "level1": self.level1,
                "work_root": self.work_root,
                "granule": self.granule,
                "group": self.group,
                "band_name": acq.band_name,
                "workflow": self.workflow,
                "vertices": self.vertices,
                "method": self.method,
            }
            if acq.band_type == BandType.THERMAL:
                tasks.append(SurfaceTemperature(**kwargs))
            else:
                kwargs["dsm_fname"] = self.dsm_fname
                kwargs["buffer_distance"] = self.buffer_distance
                tasks.append(SurfaceReflectance(**kwargs))

        return tasks
Esempio n. 16
0
def prepare_dataset(path, acq_parser_hint=None, granule=None):
    """
    Returns a dictionary of image paths, granule id and metadata file location for the granules
    contained within the input file
    """

    acq_container = acquisitions(path, acq_parser_hint)
    tasks = []

    if granule is None:
        granules = acq_container.granules
    else:
        granules = [granule]

    for granule_id in granules:
        image_dict = OrderedDict([
            ('B01', {}), ('B02', {}), ('B03', {}), ('B04', {}), ('B05', {}),
            ('B06', {}), ('B07', {}), ('B08', {}), ('B8A', {}), ('B09', {}),
            ('B10', {}), ('B11', {}), ('B12', {})
        ])

        for group_id in acq_container.groups:
            acqs = acq_container.get_acquisitions(granule=granule_id,
                                                  group=group_id,
                                                  only_supported_bands=False)

            for acq in acqs:
                key = Path(acq.uri).stem[-3:]
                if key in image_dict:
                    image_dict[key] = {'path': acq.uri, 'layer': '1'}

        tasks.append(tuple([image_dict, granule_id, acq.granule_xml]))

    return tasks
Esempio n. 17
0
 def test_land_sea(self):
     """
     Check calc_land_sea_mask for similar result
     """
     precomputed_mean = 0.8835457063711911
     geobox = acquisitions(LS8_SCENE1).get_all_acquisitions()[0].gridded_geo_box()
     land_sea_mask = get_land_sea_mask(geobox, ancillary_path=LAND_SEA_RASTERS)
     self.assertAlmostEqual(land_sea_mask.mean(), precomputed_mean, places=2)
Esempio n. 18
0
    def run(self):
        acqs = (acquisitions(self.level1,
                             self.acq_parser_hint).get_acquisitions(
                                 self.group, self.granule))

        with self.output().temporary_path() as out_fname:
            _get_dsm(acqs[0], self.dsm_fname, self.buffer_distance, out_fname,
                     self.compression, self.filter_opts)
Esempio n. 19
0
    def run(self):
        acq = (acquisitions(self.level1,
                            self.acq_parser_hint).get_acquisitions(
                                self.group, self.granule))[0]

        with self.output().temporary_path() as out_fname:
            _create_lon_lat_grids(acq, out_fname, self.compression,
                                  self.filter_opts)
Esempio n. 20
0
 def output(self):
     out_dirs = [self.reflectance_dir, self.shadow_dir, self.interpolation_dir]
     container = acquisitions(self.level1, self.acq_parser_hint)
     for granule in container.granules:
         for group in container.supported_groups:
             pth = container.get_root(self.work_root, group, granule)
             for out_dir in out_dirs:
                 yield luigi.LocalTarget(pjoin(pth, out_dir))
Esempio n. 21
0
 def test_load_acquisitions_s2a_scene1(self):
     container = acquisitions(S2A_SCENE1)
     self.assertEqual(len(container.get_all_acquisitions()), 11)
     self.assertEqual(len(container.get_acquisitions(group='RES-GROUP-0')),
                      4)
     self.assertEqual(len(container.get_acquisitions(group='RES-GROUP-1')),
                      6)
     self.assertEqual(len(container.get_acquisitions(group='RES-GROUP-2')),
                      1)
Esempio n. 22
0
    def run(self):
        acqs = (acquisitions(self.level1,
                             self.acq_parser_hint).get_acquisitions(
                                 self.group, self.granule))
        dsm_fname = self.input().path

        with self.output().temporary_path() as out_fname:
            _slope_aspect_arrays(acqs[0], dsm_fname, self.buffer_distance,
                                 out_fname, self.compression, self.filter_opts)
Esempio n. 23
0
    def run(self):
        acqs = (acquisitions(self.level1,
                             self.acq_parser_hint).get_acquisitions(
                                 self.group, self.granule))

        with self.output().temporary_path() as out_fname:
            _calculate_angles(acqs[0],
                              self.input().path, out_fname, self.compression,
                              self.filter_opts, self.tle_path)
Esempio n. 24
0
    def run(self):
        container = acquisitions(self.level1, self.acq_parser_hint)
        acqs = container.get_acquisitions(self.group, self.granule)
        acq = [acq for acq in acqs if acq.band_name == self.band_name][0]

        with self.output().temporary_path() as out_fname:
            interpolation_fname = self.input()['interpolation'].path
            ancillary_fname = self.input()['ancillary'].path
            _surface_brightness_temperature(acq, acqs, interpolation_fname,
                                            ancillary_fname, out_fname,
                                            self.compression, self.filter_opts)
    def requires(self):
        with open(self.level1_list) as src:
            level1_list = [level1.strip() for level1 in src.readlines()]

        for level1 in level1_list:
            work_root = pjoin(self.workdir, '{}.ARD'.format(basename(level1)))
            container = acquisitions(level1, self.acq_parser_hint)
            for granule in container.granules:
                work_dir = container.get_root(work_root, granule=granule)
                # TODO; pkgdir for landsat data
                pkgdir = pjoin(self.pkgdir, basename(dirname(level1)))
                yield Package(level1, work_dir, granule, pkgdir)
Esempio n. 26
0
    def requires(self):
        with open(self.level1_list) as src:
            level1_list = [level1.strip() for level1 in src.readlines()]

        for level1 in level1_list:
            work_root = pjoin(self.workdir, '{}.ARD'.format(basename(level1)))
            container = acquisitions(level1, self.acq_parser_hint)
            for granule in container.granules:
                work_dir = container.get_root(work_root, granule=granule)
                acq = container.get_acquisitions(None, granule, False)[0]
                ymd = acq.acquisition_datetime.strftime('%Y-%m-%d')
                pkgdir = pjoin(self.pkgdir, ymd)
                yield Package(level1, work_dir, granule, pkgdir)
Esempio n. 27
0
    def run(self):
        acqs = (acquisitions(self.level1,
                             self.acq_parser_hint).get_acquisitions(
                                 self.group, self.granule))

        # input filenames
        dsm_fname = self.input()['dsm'].path
        sat_sol_fname = self.input()['sat_sol'].path

        with self.output().temporary_path() as out_fname:
            _calculate_cast_shadow(acqs[0], dsm_fname, self.buffer_distance,
                                   sat_sol_fname, out_fname, self.compression,
                                   self.filter_opts, False)
Esempio n. 28
0
 def tasks(level1_list):
     # TODO check with Lan-Wei regarding multi-granule vs single-granule
     #      gqa operation.
     #      Below demo's submit all granules as single granule gqa operation
     #      (same as wagl)
     for level1 in level1_list:
         container = acquisitions(level1, self.acq_parser_hint)
         for granule in container.granules:
             # TODO enable updating dataset in-place
             # if update_source:
             #     yield UpdateSource(level1, work_root)
             # else:
             yield GQATask(level1, granule, self.workdir)
Esempio n. 29
0
def can_pq(level1, acq_parser_hint=None):
    """
    A simple test to check if we can process a scene through the
    pq pipeline.

    :param level1:
        An `str` containing the file path name to the directory
        containing the level-1 data.

    :return:
        True if the scene can be processed through PQ, else False.
    """
    supported = ["LANDSAT_5", "LANDSAT_7", "LANDSAT_8"]
    acq = acquisitions(level1, acq_parser_hint).get_acquisitions()[0]
    return acq.platform_id in supported
Esempio n. 30
0
    def requires(self):
        with open(self.level1_list) as src:
            level1_list = [level1.strip() for level1 in src.readlines()]

        for level1 in level1_list:
            work_name = "{}-wagl".format(basename(level1))
            container = acquisitions(level1, self.acq_parser_hint)
            for granule in container.granules:
                # as each granule is independent, include the granule as the work root
                work_root = pjoin(self.outdir, work_name, granule)
                if "group" in self.task.get_param_names():
                    for group in container.supported_groups:
                        yield self.task(level1, work_root, granule, group)
                else:
                    yield self.task(level1, work_root, granule)