コード例 #1
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_sun_earth_distance(self):
        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)

        target = 0.9877038273760421
        result = HotspotDetector._compute_sun_earth_distance()
        self.assertAlmostEqual(target, result)
コード例 #2
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_compute_frp(self):
        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)
        HotspotDetector.run_detector(flares_or_sampling=True)

        path_to_target = "../../data/test_data/atx_frp.npy"
        target = np.load(path_to_target)
        result = HotspotDetector.frp
        self.assertEqual(True, (target == result).all())
コード例 #3
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_detect_hotspots_atx(self):
        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        path_to_target = "../../data/test_data/atx_detect_hotspots.npy"

        target = np.load(path_to_target)

        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)
        HotspotDetector.run_detector()

        self.assertEqual(True, (target == HotspotDetector.hotspots).all())
コード例 #4
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_radiance_from_BT(self):

        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)

        brightness_temp = 1500
        wavelength = 1.6
        result = HotspotDetector._rad_from_BT(wavelength, brightness_temp)
        target = 28200.577465487077
        self.assertAlmostEqual(target, result)
コード例 #5
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_radiance_from_reflectance(self):

        path_to_target = "../../data/test_data/atx_radiance_from_reflectance.npy"
        target = np.load(path_to_target)

        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)
        reflectance = product.get_band('reflec_nadir_1600').read_as_array()
        result = HotspotDetector._rad_from_ref(reflectance)

        self.assertEqual(True, (target == result).all())
コード例 #6
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_night_mask_atx(self):
        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        path_to_target = "../../data/test_data/atx_nightmask.npy"
        target = np.load(path_to_target)

        target_mean = np.mean(target)

        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)
        HotspotDetector.run_detector()

        self.assertAlmostEqual(target_mean,
                               np.mean(HotspotDetector.night_mask))
コード例 #7
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_get_arcmin_int(self):

        coords = np.array([
            -150.53434, -100.13425, -50.20493, 0.34982, 50.43562, 100.12343,
            150.56443
        ])
        target = np.array([-15032, -10008, -5012, 21, 5026, 10007, 15034])

        path_to_data = glob.glob("../../data/test_data/*.N1")[0]
        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)

        result = HotspotDetector._find_arcmin_gridcell(coords)
        self.assertEqual(True, (target == result).all())
コード例 #8
0
ファイル: test_ggf.py プロジェクト: dnf0/kcl-globalgasflaring
    def test_run_atx(self):
        target = pd.read_csv(glob.glob("../../data/test_data/ATS*.csv")[0])
        path_to_data = glob.glob("../../data/test_data/*.N1")[0]

        product = epr.Product(path_to_data)
        HotspotDetector = ATXDetector(product)
        HotspotDetector.run_detector()
        result = HotspotDetector.to_dataframe(keys=['latitude', 'longitude'])

        # TODO determine why floating point errors are causing issues in testing here
        target = target.astype(int)
        result = result.astype(int)
        are_equal = target.equals(result)

        self.assertEqual(True, are_equal)
コード例 #9
0
ファイル: hotspots.py プロジェクト: dnf0/kcl-globalgasflaring
def main():
    file_to_process = sys.argv[1]
    sensor = sys.argv[2]

    if sensor != 'sls':
        product = epr.Product(file_to_process)
        HotspotDetector = ATXDetector(product)
        keys = ['latitude', 'longitude']
    else:
        product = utils.extract_zip(file_to_process, fp.slstr_extract_temp)
        HotspotDetector = SLSDetector(product)
        keys = ['latitude', 'longitude']

    HotspotDetector.run_detector()
    df = HotspotDetector.to_dataframe(keys=keys)
    df.to_csv(utils.build_outpath(sensor, file_to_process, 'hotspots'))
コード例 #10
0
ファイル: auto_generate_ASAR.py プロジェクト: whigg/PySOL
def create_asar_image(iPath, oPath_4326, oPath_3413, fileName):
    #plt.close("all")
    product = epr.Product(os.path.join(iPath, fileName))

    band = product.get_band('proc_data')

    sc_w = product.get_scene_width()
    sc_h = product.get_scene_height()

    raw_counts = band.read_as_array(
        sc_w, sc_h)  #, xoffset=100, yoffset=6500, xstep=2, ystep=2)
    lat = product.get_band('latitude').read_as_array(sc_w, sc_h)
    lon = product.get_band('longitude').read_as_array(sc_w, sc_h)
    incident_angle = product.get_band('incident_angle').read_as_array(
        sc_w, sc_h)

    raw_counts_trmmd = raw_counts
    # Trimming the array by removing zero values from rows and cols
    msk = []
    for m in range(raw_counts_trmmd.shape[0]):
        if raw_counts_trmmd[m, :].sum() == 0:
            msk.append(m)
    raw_counts_trmmd = numpy.delete(raw_counts_trmmd, msk, axis=0)
    lat = numpy.delete(lat, msk, axis=0)
    lon = numpy.delete(lon, msk, axis=0)
    incident_angle = numpy.delete(incident_angle, msk, axis=0)

    msk = []
    for n in range(raw_counts_trmmd.shape[1]):
        if raw_counts_trmmd[:, n].sum() == 0:
            msk.append(n)
    raw_counts_trmmd = numpy.delete(raw_counts_trmmd, msk, axis=1)
    lat = numpy.delete(lat, msk, axis=1)
    lon = numpy.delete(lon, msk, axis=1)
    incident_angle = numpy.delete(incident_angle, msk, axis=1)
    raw_counts = raw_counts_trmmd

    # Adding Sigma_0
    calibration_constant = \
    product.get_dataset('MAIN_PROCESSING_PARAMS_ADS').read_record(0).get_field('calibration_factors.1.ext_cal_fact').get_elems()
    # sigma0 = 10*log10( raw_counts**2*sin(incident_angle*pi/180)/calibration_constant )
    sigma0 = raw_counts**2 * sin(
        incident_angle * pi / 180) / calibration_constant

    print "    start filter"
    from scipy.signal import wiener
    sigma0w = wiener(sigma0, mysize=(7, 7), noise=None)
    # sigma0w = sigma0

    pol = product.get_sph().get_field('MDS1_TX_RX_POLAR').get_elem()
    if pol == 'H/H':
        ph = (2.20495, -14.3561e-2, 11.28e-4)
        sigma0_hh_ref = exp(
            (ph[0] + incident_angle * ph[1] + incident_angle**2 * ph[2]) *
            log(10))
        roughness = sigma0w / sigma0_hh_ref
    elif pol == 'V/V':
        pv = (2.29373, -15.393e-2, 15.1762e-4)
        sigma0_vv_ref = exp(
            (pv[0] + incident_angle * pv[1] + incident_angle**2 * pv[2]) *
            log(10))
        roughness = sigma0w / sigma0_vv_ref

    # masking the arrays
    raw_counts = ma.masked_where(raw_counts == 0, raw_counts)
    roughness = ma.masked_where(raw_counts == 0, roughness)
    sigma0 = ma.masked_where(raw_counts == 0, sigma0)
    sigma0w = ma.masked_where(raw_counts == 0, sigma0w)
    default_fill_value = double(ma.default_fill_value(raw_counts))

    scale = 30

    pr.kd_tree.which_kdtree()
    pr.get_capabilities()

    scale = 1

    ln = lon[::scale, ::scale]
    lt = lat[::scale, ::scale]
    data = roughness[::scale, ::scale]

    for proj in ['EPSG:4326', 'EPSG:3413']:
        print "    start projection %s" % proj
        if proj == 'EPSG:4326':
            oPath = oPath_4326
            area_def = swath_area_def(
                name='Temporal SWATH EPSG Projection 4326',
                proj='eqc',
                lonlim=(lon.min(), lon.max()),
                latlim=(lat.min(), lat.max()),
                ellps="WGS84",
                res=150)
        elif proj == 'EPSG:3413':
            oPath = oPath_3413
            area_def = swath_area_def(
                name='Temporal SWATH EPSG Projection 3413',
                proj='stere',
                lonlim=(lon.min(), lon.max()),
                latlim=(lat.min(), lat.max()),
                ellps="WGS84",
                res=150,
                lat_ts=70,
                lat_0=90,
                lon_0=-45)

        swath_def = pr.geometry.SwathDefinition(lons=ln, lats=lt)
        # result = pr.kd_tree.resample_nearest(swath_def, data.ravel(), area_def, radius_of_influence=30000, nprocs=2)
        # result = pr.kd_tree.resample_nearest(swath_def, data, area_def, radius_of_influence=50000, epsilon=0.5, fill_value=default_fill_value)
        result = pr.kd_tree.resample_nearest(swath_def,
                                             data.ravel(),
                                             area_def,
                                             radius_of_influence=300,
                                             epsilon=0.5,
                                             nprocs=2,
                                             fill_value=None)

        oFileName = os.path.join(oPath, fileName + '.png')
        gray()
        imsave(oFileName, result, vmin=0, vmax=2)

        create_KML_asar(area_def.area_extent,
                        os.path.join(oPath, fileName + '.kml'))
    close()
コード例 #11
0
def read_atsr(path_to_ats_data):
    return epr.Product(path_to_ats_data)
コード例 #12
0
def main():
    file_to_process = sys.argv[1]
    sensor = sys.argv[2]

    if sensor != 'sls':
        product = epr.Product(file_to_process)
        HotspotDetector = ATXDetector(product)

        flare_keys = [
            'latitude', 'longitude', 'local_cloudiness', 'swir_16', 'frp',
            'pixel_size', 'mwir', 'background_mwir'
        ]

        flare_aggregator = {
            'frp': np.sum,
            'swir_16': np.mean,
            'mwir': np.mean,
            'background_mwir': np.mean,
            'pixel_size': np.sum,
            'latitude': np.mean,
            'longitude': np.mean,
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        sampling_keys = ['latitude', 'longitude', 'local_cloudiness']

        sampling_aggregator = {
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        atx_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_ats.csv')
        persistent_df = pd.read_csv(atx_persistent_fp)

    else:
        product = utils.extract_zip(file_to_process, fp.slstr_extract_temp)
        HotspotDetector = SLSDetector(product)

        flare_keys = [
            'latitude', 'longitude', 'local_cloudiness', 'swir_16', 'swir_22',
            'frp', 'pixel_size'
        ]

        flare_aggregator = {
            'frp': np.sum,
            'swir_16': np.mean,
            'swir_22': np.mean,
            'pixel_size': np.sum,
            'latitude': np.mean,
            'longitude': np.mean,
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        sampling_keys = [
            'latitude',
            'longitude',
            'local_cloudiness',
        ]

        sampling_aggregator = {
            'local_cloudiness': np.mean,
            'year': 'first',
            'month': 'first',
            'day': 'first',
            'hhmm': 'first'
        }

        # merge persistent dataframes for SLSTR
        atx_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_atx.csv')
        atx_persistent_df = pd.read_csv(atx_persistent_fp)

        sls_persistent_fp = os.path.join(fp.output_l3, 'all_sensors',
                                         'all_flare_locations_sls.csv')
        sls_persistent_df = pd.read_csv(sls_persistent_fp)

        persistent_df = merge_hotspot_dataframes(atx_persistent_df,
                                                 sls_persistent_df)
    # find persistent hotspots (i.e. flares)
    HotspotDetector.run_detector(flares_or_sampling=True)
    flare_df = HotspotDetector.to_dataframe(keys=flare_keys,
                                            joining_df=persistent_df)
    aggregated_flare_df = aggregate(flare_df, flare_aggregator)
    aggregated_flare_df.to_csv(
        utils.build_outpath(sensor, file_to_process, 'flares'))

    # get sampling associated with persistent hotspots
    sampling_df = HotspotDetector.to_dataframe(keys=sampling_keys,
                                               joining_df=persistent_df)
    aggregated_sampling_df = aggregate(sampling_df, sampling_aggregator)
    aggregated_sampling_df.to_csv(
        utils.build_outpath(sensor, file_to_process, 'samples'))
コード例 #13
0
ファイル: readASAR.py プロジェクト: whigg/PySOL
def readASAR(iPath,
             fileName,
             pxlRes=800.0,
             image_size_skip=40 * 1e6,
             area_bbox=45):

    logger.info(os.path.join(iPath, fileName))
    try:
        product = epr.Product(os.path.join(iPath, fileName))
    except:
        logger.error('unable to read file')
        return False

    try:
        band = product.get_band('proc_data')
    except epr.EPRValueError:
        logger.error(
            'unable to get band "proc_data": epr_get_band_id: band not found')
        return False

    sc_w = double(product.get_scene_width())
    sc_h = double(product.get_scene_height())

    # Skipping if image too large
    logger.debug('sc_w*sc_h = %s MPs', str(round(sc_w * sc_h / 1e6)))
    if sc_w * sc_h > image_size_skip:
        logger.debug("ASAR Image too large, skipping...")
        return False

    # Get lat/lon from geolocation grid
    dataset = product.get_dataset('GEOLOCATION_GRID_ADS')
    fltp_lats = map(
        lambda x: dataset.read_record(x).get_field('first_line_tie_points.lats'
                                                   ).get_elems(),
        range(dataset.get_num_records()))
    lltp_lats = map(
        lambda x: dataset.read_record(x).get_field('last_line_tie_points.lats')
        .get_elems(), range(dataset.get_num_records()))
    fltp_lons = map(
        lambda x: dataset.read_record(x).get_field(
            'first_line_tie_points.longs').get_elems(),
        range(dataset.get_num_records()))
    lltp_lons = map(
        lambda x: dataset.read_record(x).get_field('last_line_tie_points.longs'
                                                   ).get_elems(),
        range(dataset.get_num_records()))

    fltp_lats = asarray(double(fltp_lats)) / 1e6
    lltp_lats = asarray(double(lltp_lats)) / 1e6
    fltp_lons = asarray(double(fltp_lons)) / 1e6
    lltp_lons = asarray(double(lltp_lons)) / 1e6

    lats = row_stack((fltp_lats, lltp_lats[-1, :]))
    lons = row_stack((fltp_lons, lltp_lons[-1, :]))

    # Skipping if no area overlap
    if mean(lats[:]) <= area_bbox:
        logger.debug("No area overlap. Skipping...")
        return False

    lats = fliplr(lats)
    lons = fliplr(lons)

    # Find scale to reduce image to the specified resolution
    arrShape = asarray([sc_w, sc_h])
    _lats = asarray([lats[0, 0], lats[-1, -1], lats[0, -1], lats[-1, 0]])
    _lons = asarray([lons[0, 0], lons[-1, -1], lons[0, -1], lons[-1, 0]])
    imageRes = round(
        mean(
            asarray(
                distancelib.getPixelResolution(_lats, _lons, arrShape, 'km')) *
            1e3))
    scale = pxlRes / imageRes

    extMax = (0., 0., arrShape[0] - 1, arrShape[1] - 1)
    ext = (0., 0., arrShape[0] - 1, arrShape[1] - 1)

    # Format extent/spacing
    ext, spa = format_extent_spacing(extent=ext, spacing=scale, extmax=extMax)

    # Read data with stepping=spacing
    try:
        raw_counts = band.read_as_array(sc_w, sc_h, xstep=spa[0], ystep=spa[1])
        incident_angle = product.get_band('incident_angle').read_as_array(
            sc_w, sc_h, xstep=spa[0], ystep=spa[1])
    except epr.EPRValueError:
        logger.error("EPRValueError")
        return False

    lats_2 = imresize(lats, raw_counts.shape)
    lons_2 = imresize(lons, raw_counts.shape)

    #     if lats.max() <= 35:
    #         logger.debug("skipping no area overlap")
    #         return False

    # Trimming the array by removing zero values from rows and cols
    msk = []
    for m in range(raw_counts.shape[0]):
        if raw_counts[m, :].sum() == 0:
            msk.append(m)
    raw_counts = delete(raw_counts, msk, axis=0)
    lats_2 = delete(lats_2, msk, axis=0)
    lons_2 = delete(lons_2, msk, axis=0)
    incident_angle = delete(incident_angle, msk, axis=0)
    polarization = product.get_sph().get_field('MDS1_TX_RX_POLAR').get_elem()

    msk = []
    for n in range(raw_counts.shape[1]):
        if raw_counts[:, n].sum() == 0:
            msk.append(n)
    raw_counts = delete(raw_counts, msk, axis=1)
    lats_2 = delete(lats_2, msk, axis=1)
    lons_2 = delete(lons_2, msk, axis=1)
    incident_angle = delete(incident_angle, msk, axis=1)

    # Adding Sigma_0
    calibration_constant = product.get_dataset(
        'MAIN_PROCESSING_PARAMS_ADS').read_record(0).get_field(
            'calibration_factors.1.ext_cal_fact').get_elems()
    # sigma0 = 10*log10( raw_counts**2*sin(incident_angle*pi/180)/calibration_constant )
    sigma0 = raw_counts**2 * sin(
        incident_angle * pi / 180) / calibration_constant

    return sigma0, lats_2, lons_2, incident_angle, polarization, lats, lons