Ejemplo n.º 1
0
    def test_night_mask_sls(self):
        path_to_data = glob.glob("../../data/test_data/S3A*.zip")[0]
        path_to_target = "../../data/test_data/sls_nightmask.npy"
        path_to_temp = "../../data/temp/"
        target = np.load(path_to_target)

        product = utils.extract_zip(path_to_data, path_to_temp)
        HotspotDetector = SLSDetector(product)
        HotspotDetector.run_detector()

        self.assertEqual(True, (target == HotspotDetector.night_mask).all())
Ejemplo n.º 2
0
    def test_vza_interpolation(self):
        path_to_data = glob.glob("../../data/test_data/S3A*.zip")[0]
        path_to_target = "../../data/test_data/sls_vza.npy"
        path_to_temp = "../../data/temp/"

        target = np.load(path_to_target)

        product = utils.extract_zip(path_to_data, path_to_temp)
        HotspotDetector = SLSDetector(product)
        HotspotDetector.run_detector()

        self.assertEqual(True, (target == HotspotDetector.vza).all())
Ejemplo n.º 3
0
def main():
    file_to_process = sys.argv[1]
    sensor = sys.argv[2]

    if sensor != 'sls':
        product = epr.Product(file_to_process)
        HotspotDetector = ATXDetector(product)
        keys = ['latitude', 'longitude']
    else:
        product = utils.extract_zip(file_to_process, fp.slstr_extract_temp)
        HotspotDetector = SLSDetector(product)
        keys = ['latitude', 'longitude']

    HotspotDetector.run_detector()
    df = HotspotDetector.to_dataframe(keys=keys)
    df.to_csv(utils.build_outpath(sensor, file_to_process, 'hotspots'))
Ejemplo n.º 4
0
    def test_extract_zip(self):
        target = {
            "S5_radiance_an": None,
            "S6_radiance_an": None,
            "geodetic_an": None,
            "geometry_tn": None,
            "cartesian_an": None,
            "cartesian_tx": None,
            "indices_an": None,
            "flags_an": None,
            'time_an': None
        }
        path_to_data = glob.glob("../../data/test_data/S3A*.zip")[0]
        path_to_temp = "../../data/temp/"

        result = utils.extract_zip(path_to_data, path_to_temp)
        self.assertEqual(target.keys(), result.keys())
Ejemplo n.º 5
0
    def test_run_sls(self):
        # setup
        target = pd.read_csv(glob.glob("../../data/test_data/S3A*.csv")[0])
        path_to_data = glob.glob("../../data/test_data/S3A*.zip")[0]
        path_to_temp = "../../data/temp/"

        product = utils.extract_zip(path_to_data, path_to_temp)
        HotspotDetector = SLSDetector(product)
        HotspotDetector.run_detector()
        result = HotspotDetector.to_dataframe(
            keys=['latitude', 'longitude', 'sza', 'vza', 'swir_16', 'swir_22'])

        # TODO determine why floating point errors are causing issues in testing here
        target = target.astype(int)
        result = result.astype(int)

        # compare
        are_equal = target.equals(result)
        self.assertEqual(True, are_equal)
Ejemplo n.º 6
0
def main():
    file_to_process = sys.argv[1]
    sensor = sys.argv[2]

    if sensor != 'sls':
        product = epr.Product(file_to_process)
        HotspotDetector = ATXDetector(product)

        flare_keys = [
            'latitude', 'longitude', 'local_cloudiness', 'swir_16', 'frp',
            'pixel_size', 'mwir', 'background_mwir'
        ]

        flare_aggregator = {
            'frp': np.sum,
            'swir_16': np.mean,
            'mwir': np.mean,
            'background_mwir': np.mean,
            'pixel_size': np.sum,
            'latitude': np.mean,
            'longitude': np.mean,
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        sampling_keys = ['latitude', 'longitude', 'local_cloudiness']

        sampling_aggregator = {
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        atx_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_ats.csv')
        persistent_df = pd.read_csv(atx_persistent_fp)

    else:
        product = utils.extract_zip(file_to_process, fp.slstr_extract_temp)
        HotspotDetector = SLSDetector(product)

        flare_keys = [
            'latitude', 'longitude', 'local_cloudiness', 'swir_16', 'swir_22',
            'frp', 'pixel_size'
        ]

        flare_aggregator = {
            'frp': np.sum,
            'swir_16': np.mean,
            'swir_22': np.mean,
            'pixel_size': np.sum,
            'latitude': np.mean,
            'longitude': np.mean,
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        sampling_keys = [
            'latitude',
            'longitude',
            'local_cloudiness',
        ]

        sampling_aggregator = {
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        # merge persistent dataframes for SLSTR
        atx_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_atx.csv')
        atx_persistent_df = pd.read_csv(atx_persistent_fp)

        sls_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_sls.csv')
        sls_persistent_df = pd.read_csv(sls_persistent_fp)

        persistent_df = merge_hotspot_dataframes(atx_persistent_df,
                                                 sls_persistent_df)
    # find persistent hotspots (i.e. flares)
    HotspotDetector.run_detector(flares_or_sampling=True)
    flare_df = HotspotDetector.to_dataframe(keys=flare_keys,
                                            joining_df=persistent_df)
    aggregated_flare_df = aggregate(flare_df, flare_aggregator)
    aggregated_flare_df.to_csv(
        utils.build_outpath(sensor, file_to_process, 'flares'))

    # get sampling associated with persistent hotspots
    sampling_df = HotspotDetector.to_dataframe(keys=sampling_keys,
                                               joining_df=persistent_df)
    aggregated_sampling_df = aggregate(sampling_df, sampling_aggregator)
    aggregated_sampling_df.to_csv(
        utils.build_outpath(sensor, file_to_process, 'samples'))