Example #1
0
 def test_parse_olci_l1b(self):
     fmt = os.path.join(
         "{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_"
         "{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_"
         "{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_"
         "{timeliness:2s}_{collection:3s}.SEN3",
         "{dataset_name}_radiance.nc")
     # made up:
     filename = os.path.join(
         "S3A_OL_1_EFR____20180916T090539_"
         "20180916T090839_20180916T090539_0001_"
         "001_001_0001_CEN_M_"
         "AA_AAA.SEN3", "Oa21_radiance.nc")
     data = {
         'mission_id': 'S3A',
         'datatype_id': 'EFR',
         'start_time': dt.datetime(2018, 9, 16, 9, 5, 39),
         'end_time': dt.datetime(2018, 9, 16, 9, 8, 39),
         'creation_time': dt.datetime(2018, 9, 16, 9, 5, 39),
         'duration': 1,
         'cycle': 1,
         'relative_orbit': 1,
         'frame': 1,
         'centre': 'CEN',
         'platform_mode': 'M',
         'timeliness': 'AA',
         'collection': 'AAA',
         'dataset_name': 'Oa21',
     }
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #2
0
 def test_parse_olci_l1b(self):
     fmt = os.path.join(
         "{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_"
         "{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_"
         "{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_"
         "{timeliness:2s}_{collection:3s}.SEN3",
         "{dataset_name}_radiance.nc")
     # made up:
     filename = os.path.join(
         "S3A_OL_1_EFR____20180916T090539_"
         "20180916T090839_20180916T090539_0001_"
         "001_001_0001_CEN_M_"
         "AA_AAA.SEN3",
         "Oa21_radiance.nc")
     data = {'mission_id': 'S3A',
             'datatype_id': 'EFR',
             'start_time': dt.datetime(2018, 9, 16, 9, 5, 39),
             'end_time': dt.datetime(2018, 9, 16, 9, 8, 39),
             'creation_time': dt.datetime(2018, 9, 16, 9, 5, 39),
             'duration': 1,
             'cycle': 1,
             'relative_orbit': 1,
             'frame': 1,
             'centre': 'CEN',
             'platform_mode': 'M',
             'timeliness': 'AA',
             'collection': 'AAA',
             'dataset_name': 'Oa21',
             }
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #3
0
 def test_parse_duplicate_fields(self):
     """Test parsing a pattern that has duplicate fields."""
     fmt = '{version_number:1s}/filename_with_version_number_{version_number:1s}.tif'
     filename = '1/filename_with_version_number_1.tif'
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertEqual(result['version_number'], '1')
Example #4
0
 def test_parse_duplicate_fields(self):
     """Test parsing a pattern that has duplicate fields."""
     fmt = '{version_number:1s}/filename_with_version_number_{version_number:1s}.tif'
     filename = '1/filename_with_version_number_1.tif'
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertEqual(result['version_number'], '1')
Example #5
0
 def test_parse_viirs_sdr(self):
     fmt = 'SVI01_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'
     filename = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
     data = {'platform_shortname': 'npp',
             'start_time': dt.datetime(2012, 2, 25, 18, 1, 24, 500000), 'orbit': 1708,
             'end_time': dt.datetime(1900, 1, 1, 18, 2, 48, 700000),
             'source': 'noaa_ops',
             'creation_time': dt.datetime(2012, 2, 26, 0, 21, 30, 255476)}
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #6
0
 def setUp(self):
     self.fmt = "/somedir/{directory}/hrpt_{platform:4s}{platnum:2s}" +\
         "_{time:%Y%m%d_%H%M}_{orbit:05d}.l1b"
     self.string = "/somedir/otherdir/hrpt_noaa16_20140210_1004_69022.l1b"
     self.data = {
         'directory': 'otherdir',
         'platform': 'noaa',
         'platnum': '16',
         'time': dt.datetime(2014, 2, 10, 10, 4),
         'orbit': 69022
     }
     self.p = Parser(self.fmt)
Example #7
0
 def test_parse_iasi_l2(self):
     fmt = "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}_C_EUMS_{processing_time:%Y%m%d%H%M%S}_IASI_PW3_02_{platform_id}_{start_time:%Y%m%d-%H%M%S}Z_{end_time:%Y%m%d.%H%M%S}Z.hdf"
     filename = "W_XX-EUMETSAT-kan,iasi,metopb+kan_C_EUMS_20170920103559_IASI_PW3_02_M01_20170920-102217Z_20170920.102912Z.hdf"
     data = {'reception_location': 'kan',
             'instrument': 'iasi',
             'long_platform_id': 'metopb',
             'processing_location': 'kan',
             'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59),
             'platform_id': 'M01',
             'start_time': dt.datetime(2017, 9, 20, 10, 22, 17),
             'end_time': dt.datetime(2017, 9, 20, 10, 29, 12)}
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #8
0
class TestParser(unittest.TestCase):
    def setUp(self):
        self.fmt = "/somedir/{directory}/hrpt_{platform:4s}{platnum:2s}" +\
            "_{time:%Y%m%d_%H%M}_{orbit:05d}.l1b"
        self.string = "/somedir/otherdir/hrpt_noaa16_20140210_1004_69022.l1b"
        self.data = {
            'directory': 'otherdir',
            'platform': 'noaa',
            'platnum': '16',
            'time': dt.datetime(2014, 2, 10, 10, 4),
            'orbit': 69022
        }
        self.p = Parser(self.fmt)

    def test_parse(self):
        # Run
        result = self.p.parse(self.string)
        # Assert
        self.assertDictEqual(result, self.data)

    def test_compose(self):
        # Run
        result = self.p.compose(self.data)
        # Assert
        self.assertEqual(result, self.string)

    def test_validate(self):
        # These cases are True
        self.assertTrue(
            self.p.validate(
                "/somedir/avhrr/2014/hrpt_noaa19_20140212_1412_12345.l1b"))
        # These cases are False
        self.assertFalse(
            self.p.validate(
                "/somedir/bla/bla/hrpt_noaa19_20140212__1412_00000.l1b"))

    def assertDictEqual(self, a, b):
        for key in a:
            self.assertTrue(key in b)
            self.assertEqual(a[key], b[key])

        self.assertEqual(len(a), len(b))

    def assertItemsEqual(self, a, b):
        for i in range(len(a)):
            if isinstance(a[i], dict):
                self.assertDictEqual(a[i], b[i])
            else:
                self.assertEqual(a[i], b[i])
        self.assertEqual(len(a), len(b))
Example #9
0
 def test_parse_viirs_sdr(self):
     fmt = 'SVI01_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'
     filename = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
     data = {
         'platform_shortname': 'npp',
         'start_time': dt.datetime(2012, 2, 25, 18, 1, 24, 500000),
         'orbit': 1708,
         'end_time': dt.datetime(1900, 1, 1, 18, 2, 48, 700000),
         'source': 'noaa_ops',
         'creation_time': dt.datetime(2012, 2, 26, 0, 21, 30, 255476)
     }
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #10
0
 def test_parse_iasi_l2(self):
     fmt = "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}_C_EUMS_{processing_time:%Y%m%d%H%M%S}_IASI_PW3_02_{platform_id}_{start_time:%Y%m%d-%H%M%S}Z_{end_time:%Y%m%d.%H%M%S}Z.hdf"
     filename = "W_XX-EUMETSAT-kan,iasi,metopb+kan_C_EUMS_20170920103559_IASI_PW3_02_M01_20170920-102217Z_20170920.102912Z.hdf"
     data = {
         'reception_location': 'kan',
         'instrument': 'iasi',
         'long_platform_id': 'metopb',
         'processing_location': 'kan',
         'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59),
         'platform_id': 'M01',
         'start_time': dt.datetime(2017, 9, 20, 10, 22, 17),
         'end_time': dt.datetime(2017, 9, 20, 10, 29, 12)
     }
     p = Parser(fmt)
     result = p.parse(filename)
     self.assertDictEqual(result, data)
Example #11
0
 def setUp(self):
     self.fmt = "/somedir/{directory}/hrpt_{platform:4s}{platnum:2s}" +\
         "_{time:%Y%m%d_%H%M}_{orbit:05d}.l1b"
     self.string = "/somedir/otherdir/hrpt_noaa16_20140210_1004_69022.l1b"
     self.data = {'directory': 'otherdir', 'platform': 'noaa',
                  'platnum': '16',
                  'time': dt.datetime(2014, 2, 10, 10, 4), 'orbit': 69022}
     self.p = Parser(self.fmt)
    def parse(self, filename):
        """Parse the given filename.

        Returns:
            File format, filename info
        """
        for fmt in self.formats:
            parser = Parser(fmt['pattern'])
            try:
                info = parser.parse(filename, full_match=fmt['full_match'])
                break
            except ValueError:
                continue
        else:
            raise ValueError('Filename {} doesn\'t match any of the supported '
                             'formats {}.'.format(filename, self.format_names))
        return fmt['name'], self._postproc(fmt['name'], info)
Example #13
0
class TestParser(unittest.TestCase):

    def setUp(self):
        self.fmt = "/somedir/{directory}/hrpt_{platform:4s}{platnum:2s}" +\
            "_{time:%Y%m%d_%H%M}_{orbit:05d}.l1b"
        self.string = "/somedir/otherdir/hrpt_noaa16_20140210_1004_69022.l1b"
        self.data = {'directory': 'otherdir', 'platform': 'noaa',
                     'platnum': '16',
                     'time': dt.datetime(2014, 2, 10, 10, 4), 'orbit': 69022}
        self.p = Parser(self.fmt)

    def test_parse(self):
        # Run
        result = self.p.parse(self.string)
        # Assert
        self.assertDictEqual(result, self.data)

    def test_compose(self):
        # Run
        result = self.p.compose(self.data)
        # Assert
        self.assertEqual(result, self.string)

    def test_validate(self):
        # These cases are True
        self.assertTrue(
            self.p.validate("/somedir/avhrr/2014/hrpt_noaa19_20140212_1412_12345.l1b"))
        # These cases are False
        self.assertFalse(
            self.p.validate("/somedir/bla/bla/hrpt_noaa19_20140212__1412_00000.l1b"))

    def assertDictEqual(self, a, b):
        for key in a:
            self.assertTrue(key in b)
            self.assertEqual(a[key], b[key])

        self.assertEqual(len(a), len(b))

    def assertItemsEqual(self, a, b):
        for i in range(len(a)):
            if isinstance(a[i], dict):
                self.assertDictEqual(a[i], b[i])
            else:
                self.assertEqual(a[i], b[i])
        self.assertEqual(len(a), len(b))
def process_all_scans_in_dname(dname, out_path, ok_dates=None, rotate=False):
    """Make level 1c files for all files in directory dname."""
    parser = Parser(HRIT_FILE_PATTERN)
    fl_ = glob(os.path.join(dname, globify(HRIT_FILE_PATTERN)))
    dates = [parser.parse(os.path.basename(p))['start_time'] for p in fl_]
    unique_dates = np.unique(dates).tolist()
    for uqdate in unique_dates:
        date_formated = uqdate.strftime("%Y%m%d%H%M")
        if ok_dates is not None and date_formated not in ok_dates.keys():
            print("Skipping date {date}".format(date=date_formated))
            continue
        # Every hour only:
        # if uqdate.minute != 0:
        #    continue
        tslot_files = [
            f for f in fl_
            if parser.parse(os.path.basename(f))['start_time'] == uqdate
        ]
        try:
            process_one_scan(tslot_files, out_path, rotate=rotate)
        except Exception:
            pass
Example #15
0
    def load(self, satscene, filename=None, *args, **kwargs):
        conf = ConfigParser()
        conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
        options = dict(conf.items(satscene.instrument_name + "-level2",
                                  raw=True))
        options["resolution"] = 1000
        options["geofile"] = os.path.join(options["dir"], options["geofile"])
        options.update(kwargs)

        fparser = Parser(options.get("filename"))
        gparser = Parser(options.get("geofile"))

        if filename is not None:
            datasets = {}
            if not isinstance(filename, (list, set, tuple)):
                filename = [filename]

            for fname in filename:
                if fnmatch(os.path.basename(fname), fparser.globify()):
                    metadata = fparser.parse(os.path.basename(fname))
                    datasets.setdefault(
                        metadata["start_time"], []).append(fname)
                elif fnmatch(os.path.basename(fname), gparser.globify()):
                    metadata = fparser.parse(fname)
                    datasets.setdefault(
                        metadata["start_time"], []).append(fname)

            scenes = []
            for start_time, dataset in datasets.iteritems():
                newscn = copy.deepcopy(satscene)
                newscn.time_slot = start_time
                self.load_dataset(newscn, filename=dataset, *args, **kwargs)
                scenes.append(newscn)

            if not scenes:
                logger.debug("Looking for files")
                self.load_dataset(satscene, *args, **kwargs)
            else:
                entire_scene = assemble_segments(
                    sorted(scenes, key=lambda x: x.time_slot))
                satscene.channels = entire_scene.channels
                satscene.area = entire_scene.area
                satscene.orbit = int(entire_scene.orbit)
                satscene.info["orbit_number"] = int(entire_scene.orbit)
        else:
            self.load_dataset(satscene, *args, **kwargs)
Example #16
0
    def load_dataset(self, satscene, filename=None, *args, **kwargs):
        """Read data from file and load it into *satscene*.
        """
        del args
        conf = ConfigParser()
        conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
        options = dict(conf.items(satscene.instrument_name + "-level2",
                                  raw=True))
        options["resolution"] = 1000
        options["geofile"] = os.path.join(options["dir"], options["geofile"])
        options.update(kwargs)

        fparser = Parser(options.get("filename"))
        gparser = Parser(options.get("geofile"))

        if isinstance(filename, (list, set, tuple)):
            # we got the entire dataset.
            for fname in filename:
                if fnmatch(os.path.basename(fname), fparser.globify()):
                    metadata = fparser.parse(os.path.basename(fname))
                    resolution = self.res[metadata["resolution"]]
                    self.datafiles[resolution] = fname
                elif fnmatch(os.path.basename(fname), gparser.globify()):
                    self.geofile = fname
        elif ((filename is not None) and
              fnmatch(os.path.basename(options["filename"]), fparser.globify())):
            # read just one file
            logger.debug("Reading from file: " + str(options["filename"]))
            filename = options["filename"]
            resolution = self.res[os.path.basename(filename)[5]]
            self.datafiles[resolution] = filename
        if not self.datafiles:
            # find files according to config
            logger.debug(
                "Didn't get any valid file as input, looking in defined places")
            resolution = int(options["resolution"]) or 1000

            for res in [250, 500, 1000]:
                datafile = globify(os.path.join(options['dir'],
                                                options["filename"]),
                                   {'resolution': self.inv_res[res],
                                    'start_time': satscene.time_slot})
                try:
                    self.datafiles[res] = check_filename(datafile)
                except IOError:
                    self.datafiles[res] = None
                    logger.warning("Can't find file for resolution %s with template: %s",
                                   str(res), datafile)

            try:
                self.geofile = check_filename(globify(options["geofile"],
                                                      {'start_time': satscene.time_slot}))
            except IOError:
                self.geofile = None
                logger.warning("Can't find geofile with template: %s",
                               options['geofile'])

        resolution = options["resolution"]
        cores = options.get("cores", max(multiprocessing.cpu_count() / 4, 1))

        datadict = {
            1000: ['EV_250_Aggr1km_RefSB',
                   'EV_500_Aggr1km_RefSB',
                   'EV_1KM_RefSB',
                   'EV_1KM_Emissive'],
            500: ['EV_250_Aggr500_RefSB',
                  'EV_500_RefSB'],
            250: ['EV_250_RefSB']}

        loaded_bands = []

        # process by dataset, reflective and emissive datasets separately

        resolutions = [250, 500, 1000]

        for res in resolutions:
            if res < resolution:
                continue
            logger.debug("Working on resolution %d", res)
            self.filename = self.datafiles[res]

            logger.debug("Using " + str(cores) + " cores for interpolation")

            try:
                self.data = SD(str(self.filename))
            except HDF4Error as err:
                logger.warning("Could not load data from " + str(self.filename)
                               + ": " + str(err))
                continue

            datasets = datadict[res]
            for dataset in datasets:
                subdata = self.data.select(dataset)
                band_names = subdata.attributes()["band_names"].split(",")
                if len(satscene.channels_to_load & set(band_names)) > 0:
                    # get the relative indices of the desired channels
                    indices = [i for i, band in enumerate(band_names)
                               if band in satscene.channels_to_load]
                    uncertainty = self.data.select(dataset + "_Uncert_Indexes")
                    if dataset.endswith('Emissive'):
                        array = calibrate_tb(
                            subdata, uncertainty, indices, band_names)
                    else:
                        array = calibrate_refl(subdata, uncertainty, indices)
                    for (i, idx) in enumerate(indices):
                        if band_names[idx] in loaded_bands:
                            continue
                        satscene[band_names[idx]] = array[i]
                        # fix the resolution to match the loaded data.
                        satscene[band_names[idx]].resolution = res
                        loaded_bands.append(band_names[idx])

        # Get the orbit number
        if not satscene.orbit:
            mda = self.data.attributes()["CoreMetadata.0"]
            orbit_idx = mda.index("ORBITNUMBER")
            satscene.orbit = int(mda[orbit_idx + 111:orbit_idx + 116])

        # Get the geolocation
        # if resolution != 1000:
        #    logger.warning("Cannot load geolocation at this resolution (yet).")
        #    return

        for band_name in loaded_bands:
            lon, lat = self.get_lonlat(
                satscene[band_name].resolution, satscene.time_slot, cores)
            area = geometry.SwathDefinition(lons=lon, lats=lat)
            satscene[band_name].area = area

        # Trimming out dead sensor lines (detectors) on aqua:
        # (in addition channel 21 is noisy)
        if satscene.satname == "aqua":
            for band in ["6", "27", "36"]:
                if not satscene[band].is_loaded() or satscene[band].data.mask.all():
                    continue
                width = satscene[band].data.shape[1]
                height = satscene[band].data.shape[0]
                indices = satscene[band].data.mask.sum(1) < width
                if indices.sum() == height:
                    continue
                satscene[band] = satscene[band].data[indices, :]
                satscene[band].area = geometry.SwathDefinition(
                    lons=satscene[band].area.lons[indices, :],
                    lats=satscene[band].area.lats[indices, :])

        # Trimming out dead sensor lines (detectors) on terra:
        # (in addition channel 27, 30, 34, 35, and 36 are nosiy)
        if satscene.satname == "terra":
            for band in ["29"]:
                if not satscene[band].is_loaded() or satscene[band].data.mask.all():
                    continue
                width = satscene[band].data.shape[1]
                height = satscene[band].data.shape[0]
                indices = satscene[band].data.mask.sum(1) < width
                if indices.sum() == height:
                    continue
                satscene[band] = satscene[band].data[indices, :]
                satscene[band].area = geometry.SwathDefinition(
                    lons=satscene[band].area.lons[indices, :],
                    lats=satscene[band].area.lats[indices, :])

        for band_name in loaded_bands:
            band_uid = hashlib.sha1(satscene[band_name].data.mask).hexdigest()
            satscene[band_name].area.area_id = ("swath_" + satscene.fullname + "_"
                                                + str(satscene.time_slot) + "_"
                                                +
                                                str(satscene[
                                                    band_name].shape) + "_"
                                                + str(band_uid))
            satscene[band_name].area_id = satscene[band_name].area.area_id
Example #17
0
    'IR_120': 'ch_tb12',
    'IR_134': 'ch_tb133',
    'IR_097': 'ch_tb97',
    'WV_062': 'ch_tb67',
    'WV_073': 'ch_tb73'
}
PLATFORM_SHORTNAMES = {
    "MSG1": "Meteosat-8",
    "MSG2": "Meteosat-9",
    "MSG3": "Meteosat-10",
    "MSG4": "Meteosat-11"
}

# H-000-MSG3__-MSG3________-IR_120___-000003___-201410051115-__:
hrit_file_pattern = '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<12s}-{channel:_<8s}_-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'
p__ = Parser(hrit_file_pattern)


def make_azidiff_angle(sata, suna, fill):
    """ Calculate azimuth difference angle """
    #np.ma.mod(np.ma.abs(sunaz - sataz), 180) same as?
    daz = sata - suna
    daz[daz < 0] = -1 * daz[daz < 0]
    daz[daz > 360] = daz[daz > 360] - 360
    daz[daz > 180] = 360 - daz[daz > 180]
    # fix nodata
    #daz[suna == fill] = fill
    #daz[sata == fill] = fill
    return daz

Example #18
0
    def load_dataset(self, satscene, filename=None, *args, **kwargs):
        """Read data from file and load it into *satscene*.
        """
        del args
        conf = ConfigParser()
        conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
        options = dict(conf.items(satscene.instrument_name + "-level2",
                                  raw=True))
        options["resolution"] = 1000
        options["geofile"] = os.path.join(options["dir"], options["geofile"])
        options.update(kwargs)

        fparser = Parser(options["filename"])
        gparser = Parser(options["geofile"])

        if isinstance(filename, (list, set, tuple)):
            # we got the entire dataset.
            for fname in filename:
                if fnmatch(os.path.basename(fname), fparser.globify()):
                    metadata = fparser.parse(os.path.basename(fname))
                    resolution = self.res[metadata["resolution"]]
                    self.datafiles[resolution] = fname
                elif fnmatch(os.path.basename(fname), gparser.globify()):
                    self.geofile = fname
        elif ((filename is not None) and
              fnmatch(os.path.basename(options["filename"]), fparser.globify())):
            # read just one file
            logger.debug("Reading from file: " + str(options["filename"]))
            filename = options["filename"]
            resolution = self.res[os.path.basename(filename)[5]]
            self.datafiles[resolution] = filename
        else:
            # find files according to config
            resolution = int(options["resolution"]) or 1000

            for res in [250, 500, 1000]:
                datafile = os.path.join(options['dir'],
                                        options["filename" + str(res)])
                try:
                    self.datafiles[res] = get_filename(datafile,
                                                       satscene.time_slot)
                except IOError:
                    self.datafiles[res] = None
                    logger.warning("Can't find file for resolution %s with template: %s",
                                   str(res), datafile)

            try:
                self.geofile = get_filename(options["geofile"],
                                            satscene.time_slot)
            except IOError:
                self.geofile = None
                logger.warning("Can't find geofile with template: %s",
                               options['geofile'])

        resolution = options["resolution"]
        cores = options.get("cores", max(multiprocessing.cpu_count() / 4, 1))

        datadict = {
            1000: ['EV_250_Aggr1km_RefSB',
                   'EV_500_Aggr1km_RefSB',
                   'EV_1KM_RefSB',
                   'EV_1KM_Emissive'],
            500: ['EV_250_Aggr500_RefSB',
                  'EV_500_RefSB'],
            250: ['EV_250_RefSB']}

        loaded_bands = []

        # process by dataset, reflective and emissive datasets separately

        resolutions = [250, 500, 1000]

        for res in resolutions:
            if res < resolution:
                continue
            logger.debug("Working on resolution %d", res)
            self.filename = self.datafiles[res]

            logger.debug("Using " + str(cores) + " cores for interpolation")

            try:
                self.data = SD(str(self.filename))
            except HDF4Error as err:
                logger.warning("Could not load data from " + str(self.filename)
                               + ": " + str(err))
                continue

            datasets = datadict[res]
            for dataset in datasets:
                subdata = self.data.select(dataset)
                band_names = subdata.attributes()["band_names"].split(",")
                if len(satscene.channels_to_load & set(band_names)) > 0:
                    # get the relative indices of the desired channels
                    indices = [i for i, band in enumerate(band_names)
                               if band in satscene.channels_to_load]
                    uncertainty = self.data.select(dataset + "_Uncert_Indexes")
                    if dataset.endswith('Emissive'):
                        array = calibrate_tb(
                            subdata, uncertainty, indices, band_names)
                    else:
                        array = calibrate_refl(subdata, uncertainty, indices)
                    for (i, idx) in enumerate(indices):
                        if band_names[idx] in loaded_bands:
                            continue
                        satscene[band_names[idx]] = array[i]
                        # fix the resolution to match the loaded data.
                        satscene[band_names[idx]].resolution = res
                        loaded_bands.append(band_names[idx])

        # Get the orbit number
        if not satscene.orbit:
            mda = self.data.attributes()["CoreMetadata.0"]
            orbit_idx = mda.index("ORBITNUMBER")
            satscene.orbit = int(mda[orbit_idx + 111:orbit_idx + 116])

        # Get the geolocation
        # if resolution != 1000:
        #    logger.warning("Cannot load geolocation at this resolution (yet).")
        #    return

        for band_name in loaded_bands:
            lon, lat = self.get_lonlat(satscene[band_name].resolution, satscene.time_slot, cores)
            area = geometry.SwathDefinition(lons=lon, lats=lat)
            satscene[band_name].area = area

        # Trimming out dead sensor lines (detectors) on aqua:
        # (in addition channel 21 is noisy)
        if satscene.satname == "aqua":
            for band in ["6", "27", "36"]:
                if not satscene[band].is_loaded() or satscene[band].data.mask.all():
                    continue
                width = satscene[band].data.shape[1]
                height = satscene[band].data.shape[0]
                indices = satscene[band].data.mask.sum(1) < width
                if indices.sum() == height:
                    continue
                satscene[band] = satscene[band].data[indices, :]
                satscene[band].area = geometry.SwathDefinition(
                    lons=satscene[band].area.lons[indices, :],
                    lats=satscene[band].area.lats[indices, :])

        # Trimming out dead sensor lines (detectors) on terra:
        # (in addition channel 27, 30, 34, 35, and 36 are nosiy)
        if satscene.satname == "terra":
            for band in ["29"]:
                if not satscene[band].is_loaded() or satscene[band].data.mask.all():
                    continue
                width = satscene[band].data.shape[1]
                height = satscene[band].data.shape[0]
                indices = satscene[band].data.mask.sum(1) < width
                if indices.sum() == height:
                    continue
                satscene[band] = satscene[band].data[indices, :]
                satscene[band].area = geometry.SwathDefinition(
                    lons=satscene[band].area.lons[indices, :],
                    lats=satscene[band].area.lats[indices, :])

        for band_name in loaded_bands:
            band_uid = hashlib.sha1(satscene[band_name].data.mask).hexdigest()
            satscene[band_name].area.area_id = ("swath_" + satscene.fullname + "_"
                                                + str(satscene.time_slot) + "_"
                                                +
                                                str(satscene[
                                                    band_name].shape) + "_"
                                                + str(band_uid))
            satscene[band_name].area_id = satscene[band_name].area.area_id
    def __call__(self, message):

        urlobj = urlparse(message.data['uri'])

        if 'start_time' in message.data:
            start_time = message.data['start_time']
        else:
            raise InconsistentMessage("No start time in message!")

        if message.data['instruments'] == self.instrument:
            path, fname = os.path.split(urlobj.path)
            LOG.debug("path " + str(path) + " filename = " + str(fname))
            instrument = str(message.data['instruments'])
            LOG.debug("Instrument %r supported!", instrument)
            platform_name = METOPS.get(message.data['satellite'],
                                       message.data['satellite'])
            filepath = os.path.join(path, fname)
        else:
            LOG.debug("Scene is not supported")
            raise SceneNotSupported("platform and instrument: " +
                                    str(message.data['platform_name']) + " " +
                                    str(message.data['instruments']))

        if 'end_time' in message.data:
            end_time = message.data['end_time']
        else:
            LOG.warning("No end time in message!")
            end_time = start_time + timedelta(seconds=self.passlength_seconds)
            LOG.info("End time set to: %s", str(end_time))

        # Check that the input file really exists:
        if not os.path.exists(filepath):
            #LOG.error("File %s does not exist. Don't do anything...", filepath)
            raise IOError("File %s does not exist. Don't do anything...",
                          filepath)

        LOG.info("Sat and Instrument: %s %s", platform_name, instrument)

        if not isinstance(self.tle_dirs, list):
            tle_dirs = [self.tle_dirs]
        tle_files = []
        for tledir in tle_dirs:
            tle_files = tle_files + glob(
                os.path.join(tledir, globify(self.tlefilename)))

        tlep = Parser(self.tlefilename)

        time_thr = timedelta(days=5)
        utcnow = datetime.utcnow()
        valid_tle_file = None
        for tlefile in tle_files:
            fname = os.path.basename(tlefile)
            res = tlep.parse(fname)
            dtobj = res['time']

            delta_t = abs(utcnow - dtobj)
            if delta_t < time_thr:
                time_thr = delta_t
                valid_tle_file = tlefile

        if not valid_tle_file:
            raise NoValidTles("Failed finding a valid tle file!")
        else:
            LOG.debug("Valid TLE file: %s", valid_tle_file)

        if not isinstance(self.areaids, list):
            self.areaids = [self.areaids]
        inside = False

        for areaid in self.areaids:
            area_def = load_area(self.area_def_file, areaid)
            inside = self.granule_inside_area(start_time, end_time,
                                              platform_name, area_def,
                                              valid_tle_file)
            if inside:
                return True

        return False
Example #20
0
def process_one_scan(tslot_files, out_path, rotate=True, engine='h5netcdf'):
    """Make level 1c files in PPS-format."""
    for fname in tslot_files:
        if not os.path.isfile(fname):
            raise FileNotFoundError('No such file: {}'.format(fname))

    tic = time.time()
    parser = Parser(HRIT_FILE_PATTERN)
    platform_shortname = parser.parse(os.path.basename(
        tslot_files[0]))['platform_shortname']
    start_time = parser.parse(os.path.basename(tslot_files[0]))['start_time']

    # Load and calibrate data using inter-calibration coefficients from
    # Meirink et al
    coefs = get_calibration_for_time(platform=platform_shortname,
                                     time=start_time)
    scn_ = Scene(reader='seviri_l1b_hrit',
                 filenames=tslot_files,
                 reader_kwargs={
                     'calib_mode': CALIB_MODE,
                     'ext_calib_coefs': coefs
                 })
    if not scn_.attrs['sensor'] == {'seviri'}:
        raise ValueError('Not SEVIRI data')
    scn_.load(BANDNAMES)

    # By default pixel (0,0) is S-E. Rotate bands so that (0,0) is N-W.
    if rotate:
        for band in BANDNAMES:
            rotate_band(scn_, band)
    scn_.attrs['image_rotated'] = rotate

    # Find lat/lon data
    lons, lats = get_lonlats(scn_['IR_108'])

    # Compute angles
    suna, sunz = get_solar_angles(scn_, lons=lons, lats=lats)
    sata, satz = get_satellite_angles(scn_['IR_108'], lons=lons, lats=lats)
    azidiff = make_azidiff_angle(sata, suna)

    # Update coordinates
    update_coords(scn_)

    # Add ancillary datasets to the scene
    add_ancillary_datasets(scn_,
                           lons=lons,
                           lats=lats,
                           sunz=sunz,
                           satz=satz,
                           azidiff=azidiff)
    add_proj_satpos(scn_)

    # Set attributes. This changes SEVIRI band names to PPS band names.
    set_attrs(scn_)

    # Write datasets to netcdf
    filename = compose_filename(scene=scn_,
                                out_path=out_path,
                                instrument='seviri',
                                band=scn_['IR_108'])
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_),
                       engine=engine,
                       encoding=get_encoding_seviri(scn_),
                       unlimited_dims=['time'],
                       include_lonlats=False,
                       pretty=True,
                       flatten_attrs=True,
                       exclude_attrs=['raw_metadata'])
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))  # About 40 seconds
    return filename