xpixel = 0.00027 ypixel = 0.00027 x_extent = lon_limits[1] - lon_limits[0] y_extent = lat_limits[1] - lat_limits[0] xsize = np.ceil(x_extent / xpixel) ysize = np.ceil(y_extent / ypixel) print('x extent {} : y extent {}'.format(str(x_extent), str(y_extent))) print('Num of 30m pixels in x {} | Num of 30m pixels in y {}'.format( str(xsize), str(ysize))) wkt_list = list( Vector.wkt_from_coords(coords, geom_type='point') for coords in pos_arr.tolist()) attrib = {'cover': 'float', 'cover_error': 'float'} attr_list = list({ 'cover': cover[0], 'cover_error': cover[1] } for cover in zip(cover_arr, cover_err_arr)) vector = Vector.vector_from_string(wkt_list, geom_string_type='wkt', out_epsg=4326, vector_type='point', attributes=attr_list, attribute_types=attrib,
def read_gee_extract_data(filename): """ Method to read sample data in the form of a site dictionary with samples dicts by year :param filename: Input data file name :return: dict of list of dicts by year """ lines = Handler(filename).read_from_csv(return_dicts=True) site_dict = dict() line_counter = 0 for j, line in enumerate(lines): include = True for key, val in line.items(): if type(val).__name__ == 'str': if val == 'None': include = False if saturated_bands(line['radsat_qa']) \ or line['GEOMETRIC_RMSE_MODEL'] > 15.0 \ or unclear_value(line['pixel_qa']): include = False if include: line_counter += 1 site_year = str(line['site']) + '_' + str(line['year']) if site_year not in site_dict: geom_wkt = Vector.wkt_from_coords((line['longitude'], line['latitude'])) site_dict[site_year] = {'geom': geom_wkt, 'decid_frac': line['decid_frac'], 'data': dict(), 'site_year': line['year'], 'site': line['site']} temp_dict = dict() sensor_dict = extract_date(line['LANDSAT_ID']) temp_dict['img_jday'] = sensor_dict['date'].timetuple().tm_yday temp_dict['img_year'] = sensor_dict['date'].timetuple().tm_year temp_dict['sensor'] = sensor_dict['sensor'] bands = list('B' + str(ii + 1) for ii in range(7)) + ['slope', 'elevation', 'aspect'] band_dict = dict() for band in bands: if band in line: band_dict[band] = line[band] temp_dict['bands'] = correct_landsat_sr(band_dict, sensor_dict['sensor'], scale=0.0001) site_dict[site_year]['data'].update({'{}_{}'.format(str(temp_dict['img_jday']), str(temp_dict['img_year'])): temp_dict}) # print(line_counter) return site_dict
[-95.185546875, 67.57571741708057], [-113.466796875, 68.13885164925574], [-125.947265625, 70.4073476760681], [-140.09765625, 70.31873847853124], [-156.708984375, 71.63599288330609], [-167.431640625, 69.3493386397765], [-165.146484375, 66.72254132270653], [-168.662109375, 65.47650756256367], [-165.673828125, 63.31268278043484], [-168.837890625, 60.326947742998414], [-166.552734375, 56.218923189166624]] # boundary = ee.Geometry.Polygon(bound_coords) # boundary = ee.FeatureCollection('users/masseyr44/shapefiles/NAboreal_10kmbuffer').first().geometry() boundary_geom = Vector.get_osgeo_geom(Vector.wkt_from_coords( bound_coords, 'polygon'), geom_type='wkt') # values to copy from fusion table/feature collection feat_properties = ['site', 'year', 'decid_frac'] KEYS = ee.List(feat_properties) # properties to retrieve from scene scene_properties = [ 'CLOUD_COVER', 'GEOMETRIC_RMSE_MODEL', 'LANDSAT_ID', 'SOLAR_ZENITH_ANGLE' ] PROPERTY_LIST = ee.List(scene_properties) # Bands to retrieve bands = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'pixel_qa', 'radsat_qa']
wkt_list = list() attr_list = list() spref_str = '+proj=longlat +datum=WGS84' latlon = list() count = 0 for row in samp_data: print('Reading elem: {}'.format(str(count + 1))) elem = dict() for header in list(attr): elem[header] = row[header] samp_geom = Vector.get_osgeo_geom( Vector.wkt_from_coords([row['Lon'], row['Lat']])) latlon.append([row['Lon'], row['Lat']]) if elem['Lat'] < 52.0 and samp_geom.Intersects(bounds_geom): wkt_list.append(Vector.wkt_from_coords([row['Lon'], row['Lat']])) attr_list.append(elem) count += 1 uniq, indices, inverse, count = np.unique(ar=latlon, axis=0, return_index=True, return_counts=True, return_inverse=True)
# take mean of all samples of the same site that fall in the same year bin for i, samp_list in enumerate(year_samp): print('year: {}'.format(str(year_bins[i]))) samp_count = 0 site_ids = list(set(list(attr_dict['site'] for attr_dict in samp_list))) for site_id in site_ids[0:100]: same_site_samp_list = list(samp for samp in samp_list if samp['site'] == site_id) lat = same_site_samp_list[0]['Latitude'] lon = same_site_samp_list[0]['Longitude'] samp_wkt = Vector.wkt_from_coords([lon, lat]) samp_geom = Vector.get_osgeo_geom(samp_wkt) if boreal_geom.Intersects(samp_geom): decid_frac = np.mean( list(site_samp['decid_frac'] for site_samp in same_site_samp_list)) year = int( np.mean( list(site_samp['year'] for site_samp in same_site_samp_list))) # remove spaces in site names, # and add year to site name eg: 'site1' + '2007' = 'site1_2007' year_samp_reduced[i].append({