Exemplo n.º 1
0
    uniq, indices, inverse, count = np.unique(ar=lonlat,
                                              axis=0,
                                              return_index=True,
                                              return_counts=True,
                                              return_inverse=True)

    print(uniq.shape)

    loc = np.where(uniq[:, 1] < lat_x)

    print(uniq[loc])

    print(uniq[loc].shape)

    coords = uniq[loc].tolist()

    geoms = [
        Vector.get_osgeo_geom('POINT ({} {})'.format(str(coord[0]),
                                                     str(coord[1])))
        for coord in coords
    ]

    outlist = []
    for geom in geoms:

        if geom.Intersects(geom2):
            outlist.append(geom)

    print(len(outlist))
Exemplo n.º 2
0
def get_path(args):
    """
    Method to extract path from a GEDI file
    args:
        filename: GEDI filename
        bounds_wkt: WKT representation of boundary geometry
        res: bin resolution (degrees) (default : 0.1 degrees)
        buffer: buffer in degrees

    :return: (attribute dictionary, geometry WKT, None) if no error is raised while opening file
            (None, None, error string) if error is raised
    """

    pt_limit = 15
    verbose = False

    filename, temp_dir, boundary_wkt, spatial_resolution = args

    if verbose:
        Opt.cprint('Working on - {} '.format(Handler(filename).basename))

    Handler(filename).copy_file(temp_dir)

    temp_filename = temp_dir + Handler(filename).basename

    date_str = Handler(temp_filename).basename.split('_')[2]

    year = int(date_str[0:4])
    julian_day = int(date_str[4:7])

    bounds_geom = ogr.CreateGeometryFromWkt(boundary_wkt)

    file_keys = []
    try:
        fs = h5py.File(temp_filename, 'r')
        fs.visit(file_keys.append)
    except Exception as e:
        return Handler(temp_filename).basename, ' '.join(e.args)

    beam_ids = list(set(list(key.split('/')[0].strip() for key in file_keys if 'BEAM' in key)))

    feat_list = []
    err = 'No Keys found'

    for beam in beam_ids:
        beam_id = int(beam.replace('BEAM', ''), 2)

        if verbose:
            Opt.cprint('\nBEAM - {}'.format(beam_id), newline='  ')

        try:
            lat_arr = np.array(fs['{}/geolocation/latitude_bin0'.format(beam)])
            lon_arr = np.array(fs['{}/geolocation/longitude_bin0'.format(beam)])
        except Exception as e:
            err = ' '.join(e.args)
            continue

        # make an array of lat lon
        pt_arr = np.vstack([lon_arr, lat_arr]).T

        # remove NaN values
        nan_loc_pre = np.where(np.apply_along_axis(lambda x: (not (np.isnan(x[0])) and (not np.isnan(x[1]))),
                                                   1, pt_arr))
        pt_arr = pt_arr[nan_loc_pre]
        groups = group_nearby(pt_arr)

        # find start and end of valid strips
        chunks = list(pt_arr[elem[0]:(elem[1] + 1), :] for elem in groups)

        main_geom = ogr.Geometry(ogr.wkbMultiLineString)

        any_geom = False

        # find polygons for each strip and add to main_geom
        for chunk in chunks:

            if chunk.shape[0] <= pt_limit:
                if verbose:
                    Opt.cprint('chunk too short size={},'.format(chunk.shape[0]), newline='  ')
                continue
            else:
                if verbose:
                    Opt.cprint('chunk size={},'.format(str(chunk.shape[0])), newline=' ')

                try:
                    resampled_chunk = resample_chunk(chunk, spatial_resolution)
                except Exception as e:
                    if verbose:
                        Opt.cprint('invalid chunk({})'.format(e.args[0]), newline='  ')
                    continue

                part_geom_json = json.dumps({'type': 'Linestring', 'coordinates': resampled_chunk.tolist()})
                part_geom = Vector.get_osgeo_geom(part_geom_json, 'json')

                if part_geom.Intersects(bounds_geom):
                    any_geom = True

                    part_geom_intersection = part_geom.Intersection(bounds_geom)

                    # add to main geometry
                    main_geom.AddGeometryDirectly(part_geom_intersection)

        attributes = {'BEAM': beam_id,
                      'FILE': Handler(temp_filename).basename,
                      'YEAR': year,
                      'JDAY': julian_day}

        if any_geom:
            # Opt.cprint(attributes)
            wkt = main_geom.ExportToWkt()
            main_geom = None
        else:
            wkt = None

        feat_list.append((wkt, attributes))

    fs.close()
    Handler(temp_filename).file_delete()

    if len(feat_list) == 0:
        return Handler(filename).basename, err
    else:
        return feat_list, None
Exemplo n.º 3
0
                    [-113.466796875, 68.13885164925574],
                    [-125.947265625, 70.4073476760681],
                    [-140.09765625, 70.31873847853124],
                    [-156.708984375, 71.63599288330609],
                    [-167.431640625, 69.3493386397765],
                    [-165.146484375, 66.72254132270653],
                    [-168.662109375, 65.47650756256367],
                    [-165.673828125, 63.31268278043484],
                    [-168.837890625, 60.326947742998414],
                    [-166.552734375, 56.218923189166624]]

    # boundary = ee.Geometry.Polygon(bound_coords)
    # boundary = ee.FeatureCollection('users/masseyr44/shapefiles/NAboreal_10kmbuffer').first().geometry()

    boundary_geom = Vector.get_osgeo_geom(Vector.wkt_from_coords(
        bound_coords, 'polygon'),
                                          geom_type='wkt')

    # values to copy from fusion table/feature collection
    feat_properties = ['site', 'year', 'decid_frac']
    KEYS = ee.List(feat_properties)

    # properties to retrieve from scene
    scene_properties = [
        'CLOUD_COVER', 'GEOMETRIC_RMSE_MODEL', 'LANDSAT_ID',
        'SOLAR_ZENITH_ANGLE'
    ]
    PROPERTY_LIST = ee.List(scene_properties)

    # Bands to retrieve
    bands = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'pixel_qa', 'radsat_qa']
Exemplo n.º 4
0
    samp_data = Handler(infilename).read_from_csv(return_dicts=True)

    wkt_list = list()
    attr_list = list()

    spref_str = '+proj=longlat +datum=WGS84'
    latlon = list()
    count = 0
    for row in samp_data:
        print('Reading elem: {}'.format(str(count + 1)))

        elem = dict()
        for header in list(attr):
            elem[header] = row[header]

        samp_geom = Vector.get_osgeo_geom(
            Vector.wkt_from_coords([row['Lon'], row['Lat']]))

        latlon.append([row['Lon'], row['Lat']])

        if elem['Lat'] < 52.0 and samp_geom.Intersects(bounds_geom):
            wkt_list.append(Vector.wkt_from_coords([row['Lon'], row['Lat']]))

            attr_list.append(elem)

        count += 1

    uniq, indices, inverse, count = np.unique(ar=latlon,
                                              axis=0,
                                              return_index=True,
                                              return_counts=True,
                                              return_inverse=True)
Exemplo n.º 5
0
    outfile = outdir + "all_samp_postbin_v{}.csv".format(version)
    outshpfile = outdir + "all_samp_postbin_v{}.shp".format(version)

    trn_outfile = outdir + "all_samp_post_v{}_trn_samp.shp".format(version)
    val_outfile = outdir + "all_samp_post_v{}_val_samp.shp".format(version)

    boreal_bounds = "D:/shared/Dropbox/projects/NAU/landsat_deciduous/data/STUDY_AREA/boreal/" \
                    "NABoreal_simple_10km_buffer_geo.shp"

    year_bins = [(1984, 1997), (1998, 2002), (2003, 2007), (2008, 2012),
                 (2013, 2018)]

    # script-----------------------------------------------------------------------------------------------

    boreal_vec = Vector(boreal_bounds)
    boreal_geom = Vector.get_osgeo_geom(boreal_vec.wktlist[0])

    year_samp = list(list() for _ in range(len(year_bins)))
    year_samp_reduced = list(list() for _ in range(len(year_bins)))

    # get data and names
    file_data = Handler(infile).read_from_csv(return_dicts=True)
    header = list(file_data[0])

    print('\nTotal samples: {}'.format(str(len(file_data))))

    boreal_samp_count = 0

    # bin all samples based on sample years using year_bins
    for elem in file_data:
        for i, years in enumerate(year_bins):