def _map_coverage(self, el): utm_tile = el['S2_L1C_rec']['title'].split('_')[5][1:] geom1 = self.S2_tiles[self.S2_tiles.Name==utm_tile].iloc[0]['geometry'] geom2 = wkt.loads(el['S2_L1C_rec']['footprint']) return area(geometry.mapping(geom2.intersection(geom1)))/area(geometry.mapping(geom1))
def remove_excess_polygon(polygons_dict, region): """ The function removes polygons that cover a given area only with the overlap area with the adjacent polygon.""" start_len = len(polygons_dict) poly_region_default_area = area( geojson.Feature(geometry=region, properties={}).geometry) idx = 0 iteration_range = start_len while idx < iteration_range: intersection_polygon_area = 0 poly_list = [] poly_copy = copy.deepcopy(polygons_dict) del poly_copy[idx] for el in poly_copy: el_poly = shapely.geometry.asShape(el['geometry']) poly_list.append(el_poly) union_poly = cascaded_union(poly_list) intersection_polygon = union_poly.intersection(region) if not (intersection_polygon.is_empty and union_poly.is_empty): intersection_polygon_area = area( geojson.Feature(geometry=intersection_polygon, properties={}).geometry) else: break if float("{0:.2f}".format(poly_region_default_area)) == float( "{0:.2f}".format(intersection_polygon_area)): del polygons_dict[idx] iteration_range -= 1 else: idx += 1 if len(polygons_dict) > 0 and (len(polygons_dict) != start_len): return polygons_dict else: return None
def get_polygons_from_ft(table_id, name_attr, geometry_attr, user_settings=None): # finds only the first polygon with outer boundary rows = read_table(table_id=table_id, user_settings=user_settings) polygons = [] for row in rows: polygon = dict(name=row[name_attr], geometry=[]) max_polygon = [] feature = row[geometry_attr] if 'type' not in feature: feature = feature['geometry'] if feature["type"] == "Polygon": outer_boundary = feature["coordinates"][0] for vertex in outer_boundary: polygon['geometry'].append(dict(lon=vertex[0], lat=vertex[1])) elif feature["type"] == "MultiPolygon": for boundary in feature["coordinates"]: max_polygon.append( area({ "type": "Polygon", "coordinates": boundary })) index = np.argmax(np.array(max_polygon)) for vertex in feature["coordinates"][index][0]: polygon['geometry'].append(dict(lon=vertex[0], lat=vertex[1])) elif feature["type"] == "GeometryCollection": geometries = feature['geometries'] for geometry in geometries: if geometry["type"] in ["Polygon", "MultiPolygon"]: max_polygon.append(area(geometry)) else: max_polygon.append(0) index = np.argmax(np.array(max_polygon)) max_polygon = [] feature = geometries[index] if feature["type"] == "Polygon": outer_boundary = feature["coordinates"][0] for vertex in outer_boundary: polygon['geometry'].append( dict(lon=vertex[0], lat=vertex[1])) elif feature["type"] == "MultiPolygon": for boundary in feature["coordinates"]: max_polygon.append( area({ "type": "Polygon", "coordinates": boundary })) index = np.argmax(np.array(max_polygon)) for vertex in feature["coordinates"][index][0]: polygon['geometry'].append( dict(lon=vertex[0], lat=vertex[1])) if len(polygon['geometry']) > 0: polygons.append(polygon) return polygons
def ll_area(lat, res=0.25): ''' input: lat: 经纬度网格中心点纬度组成的数组,np.2darray res: 经纬度网格分辨率/边长,单位:度 return: 面积组成的数组,单位km2. TODO: 沿海地区的经纬度网格内陆地面积应该比网格面积小,实际上的平均排放速率应该通过陆地面积计算。 以后这里应该直接返回一个numpy数组,因为meic网格和海陆分布是固定的,不需要每次临时计算 ''' startlon = 0 #任意起始经度,随便是几都可以。这里选0度。 return_area = np.zeros_like(lat) isize, jsize = return_area.shape for i in range(isize): for j in range(jsize): obj = { 'type': 'Polygon', 'coordinates': [ #网格的四个顶点经纬度 [[startlon, lat[i, j] - res / 2.0], [startlon, lat[i, j] + res / 2.0], [startlon + res, lat[i, j] + res / 2.0], [startlon + res, lat[i, j] - res / 2.0]] ] } return_area[i, j] = area(obj) / 1000.0 / 1000.0 return return_area
def calculate_fragment_values(pop_raster, poly_layer=None, add_columns=False): """Calculates population sum per isochrone.""" pop_data = [] for poly_idx, poly_cap, poly_geom in zip(poly_layer['counter'], poly_layer['cap_int'], poly_layer['geometry']): feature = gpd.GeoSeries([poly_geom]).to_json() pop_stats = zonal_stats( feature, pop_raster, stats=['sum']) # calculate population for given area poly_area = area(mapping(poly_geom)) / 1e+6 # in kilometer pop_data.append( [poly_idx, poly_cap, pop_stats[0]['sum'], poly_area, poly_geom]) df = pd.DataFrame( pop_data, columns=['counter', 'cap_int', 'pop', 'area', 'geometry']) df['pop'].fillna(0, inplace=True) if add_columns is True: df['cap_pop'] = (df['cap_int'] / df['pop']) * 100000 df['pop_area'] = df['pop'] / df['area'] # population density df['cap_dens'] = df['cap_int'] / df['pop_area'] df = df.replace([np.inf, -np.inf], np.nan) result_geodf = gpd.GeoDataFrame(df, geometry='geometry') return result_geodf
async def building_area(self, session): geojson = await self.fetch(session) total = 0 for f in geojson['features']: if 'building' in f['properties']: total += area(f['geometry']) return total
def load_catalog(self, start_date, end_date): S1_df = async_load_parquets(self.catalog.loc[(self.catalog['date']>=start_date) & (self.catalog['date']<end_date) & (self.catalog['platform']=='Sentinel-1'),'f'].values.tolist(), self.CONFIG['N_workers']) S2_L1C_df = async_load_parquets(self.catalog.loc[(self.catalog['date']>=start_date) & (self.catalog['date']<end_date) & (self.catalog['product']=='S2MSI1C'),'f'].values.tolist(), self.CONFIG['N_workers']) S2_L2A_df = async_load_parquets(self.catalog.loc[(self.catalog['date']>=start_date) & (self.catalog['date']<end_date) & (self.catalog['product']=='S2MSI2A'),'f'].values.tolist(), self.CONFIG['N_workers']) # filter only desired polarisation for S1 S1_df = S1_df[S1_df['polarisationmode']=='VV VH'] S2_L1C_df['beginposition'] = pd.to_datetime(S2_L1C_df['beginposition']) S2_L2A_df['beginposition'] = pd.to_datetime(S2_L2A_df['beginposition']) S1_df['beginposition'] = pd.to_datetime(S1_df['beginposition']) S1_df['endposition'] = pd.to_datetime(S1_df['endposition']) # only retain S2 records where there is both L1C and L2A S2_L2A_df = S2_L2A_df[S2_L2A_df['level1cpdiidentifier'].isin(S2_L1C_df['level1cpdiidentifier'])] S2_L2A_df = S2_L2A_df.set_index('level1cpdiidentifier') # looks like it's just used to match them. S2_L1C_df = S2_L1C_df.set_index('level1cpdiidentifier') #print ('len S2', len(S2_L2A_df)) # get S2_L2A_df geometry intersection S2_L2A_df['utm_tile'] = S2_L2A_df['title'].str.split('_').str[5].str[1:] S2_L2A_df = pd.merge(S2_L2A_df.reset_index(), self.S2_tiles[['Name','geometry']], how='left',left_on='utm_tile',right_on='Name') S2_L2A_df = S2_L2A_df.set_index('level1cpdiidentifier') S2_L2A_df['intersection_geom'] = S2_L2A_df.apply(lambda row: row['geometry'].intersection(wkt.loads(row['footprint'])), axis=1) # get coverage S2_L2A_df['coverage'] = S2_L2A_df.apply(lambda row: area(geometry.mapping(row['intersection_geom']))/area(geometry.mapping(row['geometry'])), axis=1) #S2_L2A_df['coverage'] = S2_L2A_df.apply(lambda row: row['intersection_geom'].area/wkt.loads(row['footprint']).area, axis=1) #print (S2_L2A_df.loc[:,['title','coverage']]) return S1_df, S2_L1C_df, S2_L2A_df
def test_area_illinois_with_string(self): """ Computer the area of illinois with string input """ # Go the top of the file f.seek(0) illinois = f.read() self.assertEqual(round(area(illinois), 2), round(illinois_area, 2))
def test_liststring_area(self): """ Compute the area of a line string """ self.assertEqual( area({ 'type': 'LineString', 'coordinates': [[0, 0], [1, 1]] }), 0)
def test_area_illinois_with_string(self): """ Computer the area of illinois with string input """ # Go the top of the file f.seek(0) illinois = f.read() self.assertEqual(round(area(illinois), 2), round(illinois_area, 2))
async def predict_tile(self, session, tile): quadkey = get_tile_quadkey(tile) bounds = list(mercantile.bounds(tile)) polygon_wkt = bbox_to_polygon_wkt(bounds) tile_url = f'{self.endpoint}?searchAreaWkt={polygon_wkt}&outputFormat=geojson' try: res = await session.get(tile_url) if res.status != 200: logging.warning(f'Unable to fetch tile {tile_url}') raise Exception(f'Unable to fetch tile {tile_url}') data = await res.json() # FIXME: validate data building_area = 0 for feature in data['features']: geometry = clip_polygon(tile, feature['geometry']) building_area = building_area + area(geometry) return { 'quadkey': quadkey, 'centroid': get_tile_center(tile), 'predictions': { 'ml_prediction': building_area, 'url': tile_url, } } except Exception as e: logging.error(str(e))
def area_calc(f): gj = { 'type': f['geometry']['type'], 'coordinates': f['geometry']['coordinates'] } shape_area = area(gj) return shape_area
def testPPI(self): volume = _raveio.open(self.VOLUMENAME).object transformer = _rave.transform() transformer.method = _rave.NEAREST a = area.area("ang_240") param = _rave.cartesianparam() param.nodata = 255.0 param.undetect = 0.0 param.quantity = "DBZH" param.setData(numpy.zeros((a.ysize, a.xsize), numpy.uint8)) cartesian = _rave.cartesian() cartesian.xscale = a.xscale cartesian.yscale = a.yscale cartesian.areaextent = a.extent cartesian.date = "20100101" cartesian.time = "090000" cartesian.source = volume.source cartesian.product = _rave.Rave_ProductType_CAPPI cartesian.objectType = _rave.Rave_ObjectType_IMAGE cartesian.areaextent = a.extent cartesian.projection = _rave.projection(a.Id, a.name, pcs.pcs(a.pcs).tostring()) cartesian.addParameter(param) cartesian.defaultParameter = "DBZH" scan = volume.getScan(0) transformer.ppi(scan, cartesian) rio = _raveio.new() rio.object = cartesian rio.save("cartesian_ppi.h5")
def load(self, fname): glutInit() window = glutCreateWindow('dumm') plist = DaPa.parse(fname) self.map_image = plist['init'].get('map_image') for i in plist.keys(): if i == '#' : continue if i == 'init' : continue else : tr = map_realm() tr.name = i tr.pos = plist[i].get('pos', tr.pos) dlist = DaPa.parse(plist[i].get('data')) tr.load_prov_map( dlist['init'].get('provinces_map') ) tr.paths = self.load_paths(dlist['init'].get('paths')) tr.polygones = self.load_polygones(tr.paths) self.map_realms[tr.name] = tr if 'areas' in dlist.keys() : for key, i in dlist['areas'].items() : ta = area() ta.name = key ta.color = tuple(i.get('color', ta.color)) ta.select_texture = i.get('pic', ta.select_texture) ta.center = i.get('center', ta.center) ta.path = i.get('path', ta.path) ta.ways = i.get('ways', ta.ways) ta.realm = tr.name ta.region = i.get('region', ta.region) #ta.id = int(i) tr.areas_colors[ta.color] = ta Game.areas_id[ta.name] = ta if 'cities' in dlist.keys() : for key, i in dlist['cities'].items() : pos = i.get('pos') if pos == None : continue c = Game.create_city(pos) c.name = key if 'buildings' in i.keys() : for j in i['buildings'].items() : c.build(j[0], j[1][0], j[1][1]) if 'countries' in dlist.keys() : for key, i in dlist['countries'].items() : c = Game.create_country(key) c.icon = i.get('icon', c.icon) prov = i.get('provinces', []) for i in prov : c.get_area(i) if self.width < tr.width + tr.pos[0] : self.width = tr.width + tr.pos[0] if self.height < tr.height + tr.pos[1] : self.height = tr.height + tr.pos[1] glutDestroyWindow(window)
def test_geometry_collection_area(self): """ Compute the area of a geometry collection """ total = illinois_area + world_area self.assertEqual( area({ 'type': 'GeometryCollection', 'geometries': [world, illinois] }), total)
def test_w_avg_sf(): """ Function that finds average scale factor over a region, weighting each scale factor in proportion to its land's area. This test averages four grid boxes with lon bounds (-175, 170) and lat bounds (-86, -82). These correspond to the boxes at [1, 1], [2, 1], [2, 2], and [1, 2] (counterclock-wise) """ # get lon/lat arrays lon = np.load(LON_LAT_DIR + '/lon.npy') lat = np.load(LON_LAT_DIR + '/lat.npy') # define some test lon and lat bounds lon_bounds = (-175., -170.) lat_bounds = (-86., -82.) # make a faux scale factor array sfs_arr = np.zeros((1, 1, 72, 46)) # fill in values sfs_arr[0, 0, 1, 1] = 1 sfs_arr[0, 0, 2, 1] = 2 sfs_arr[0, 0, 2, 2] = 3 sfs_arr[0, 0, 1, 2] = 4 # manually compute the weighted area average box_ar_1 = area(computation.subgrid_rect_obj(-177.5, -88)) box_ar_2 = area(computation.subgrid_rect_obj(-172.5, -88)) box_ar_3 = area(computation.subgrid_rect_obj(-172.5, -84)) box_ar_4 = area(computation.subgrid_rect_obj(-177.5, -84)) areas = [box_ar_1, box_ar_2, box_ar_3, box_ar_4] box_vals = [1, 2, 3, 4] # find the weighted average weighted_avg = sum([areas[i] * box_vals[i] / sum(areas) for i in range(4)]) # compute using the function weighted_avg_comp = computation.w_avg_sf(sfs_arr=sfs_arr, lon=lon, lat=lat, lon_bounds=lon_bounds, lat_bounds=lat_bounds, month=0) assert weighted_avg == weighted_avg_comp
def tmp_bakcup_func(session): result = {'code': 0, 'msg': [], 'errmsg': []} path = '/tools/config/tmpbak.ini' configname = "/tools/config/nsconfig/config.ini" if not os.path.isfile(path): result['errmsg'].append('%s配置文件不存在' % path) if not os.path.isfile(configname): result['errmsg'].append('%s配置文件不存在' % configname) try: # 读取配置文件方法 g = ConfigLoad(configname) lists = g.ProfileHandleNew('bakmysql') strUser = lists['user'] strPass = lists['passwd'] filepath = lists['path'] timepick = time.strftime('%Y%m%d') gamearea = area.area() gamearea.init() file = open(path, 'r') file_object = file.readlines() regular = re.compile('^' + share.regex_str.regex_IntranetIP) shell = ''' whereis mysqldump |awk '{print $2}' ''' cet, ceterr = get_output(shell) if ceterr != "": result['code'] = -100 result['errmsg'].append('%s : %s' % (sys.exc_info()[0], sys.exc_info()[1])) removal = cet.strip('\n') for i in file_object: # 读取/tools/config/tmpbak.ini文件 循环处理每一条数据并且匹配内网ip条目,再循环执行mysqldump备份。 if regular.findall(i) != []: info = i.split() if "account" in info[1]: continue a = info[0].split('.') merge = '%s/%s' % (filepath, info[2]) if not os.path.isfile(merge): shell = '''mkdir -p %s/%s ''' % (filepath, info[2]) ret, reterr = get_output(shell) gamearea.addgws( info[0], strUser, strPass, info[1], '', '%s/%s/%s_' % (filepath, info[2], info[1] + '_' + a[3]) + timepick + '.gz') errmsg = gamearea.dumpdb(removal, session) if errmsg != []: result['code'] = -100 for x in range(len(errmsg)): result['errmsg'].append(errmsg[x]) except: result['code'] = -100 result['errmsg'].append('%s : %s' % (sys.exc_info()[0], sys.exc_info()[1])) if result['errmsg'] != []: result['msg'].append('部分大区备份已完成') else: result['msg'].append('所有大区备份已完成') return obj2str(result)
def test01(self): height=3 width=4 correct_answer =12 answer=aaa.area().rect_area(height,width) if answer == correct_answer: print 'Test passed' else: print 'Test failed'
def selectData(selectData): coor = [] if selectData == None: return '' for i in range(len(selectData['points'])): coor.append([selectData['points'][i]['lon'], selectData['points'][i]['lat']]) obja = {'type': 'Polygon', 'coordinates': [coor]} x = ar.area(obja) return 'Selected area:'+f"{int(x):,d}"+' m²'
def test_w_avg_flux(): """ Essentially the same test as test_w_avg_sf. Some inputs are different. """ # get lon/lat arrays lon = np.load(LON_LAT_DIR + '/lon.npy') lat = np.load(LON_LAT_DIR + '/lat.npy') # define some test lon and lat bounds lon_bounds = (-175., -170.) lat_bounds = (-86., -82.) # make a faux scale factor array sfs_arr = np.zeros((1, 72, 46)) # fill in values sfs_arr[0, 1, 1] = 1 sfs_arr[0, 2, 1] = 2 sfs_arr[0, 2, 2] = 3 sfs_arr[0, 1, 2] = 4 # manually compute the weighted area average box_ar_1 = area(computation.subgrid_rect_obj(-177.5, -88)) box_ar_2 = area(computation.subgrid_rect_obj(-172.5, -88)) box_ar_3 = area(computation.subgrid_rect_obj(-172.5, -84)) box_ar_4 = area(computation.subgrid_rect_obj(-177.5, -84)) areas = [box_ar_1, box_ar_2, box_ar_3, box_ar_4] box_vals = [1, 2, 3, 4] # find the weighted average weighted_avg = sum([areas[i] * box_vals[i] / sum(areas) for i in range(4)]) # compute using the function weighted_avg_comp = computation.w_avg_flux(flux_arr=sfs_arr, ocean_idxs=np.array([]), lon=lon, lat=lat, lon_bounds=lon_bounds, lat_bounds=lat_bounds, month=0) assert weighted_avg == weighted_avg_comp
def check_cross_polygon(polygons_dict, region): """ Checking for the complete occurrence of a given area exclusively in the area of intersection of polygons and when the condition is met, returns the polygon with the lowest index""" result_poly_name = '' start_len = len(polygons_dict) poly_names = [] poly_region_default_area = area( geojson.Feature(geometry=region, properties={}).geometry) for main_el in polygons_dict: for child_el in polygons_dict: intersection_region_area = 0 main_poly = shapely.geometry.asShape(main_el['geometry']) child_poly = shapely.geometry.asShape(child_el['geometry']) intersection_polygon = main_poly.intersection(child_poly) control_area = area( geojson.Feature(geometry=child_poly, properties={}).geometry) if not intersection_polygon.is_empty and area( geojson.Feature(geometry=intersection_polygon, properties={}).geometry) < control_area: intersection_region = region.intersection(intersection_polygon) if not intersection_region.is_empty: intersection_region_area = area( geojson.Feature(geometry=intersection_region, properties={}).geometry) if float("{0:.2f}".format(intersection_region_area)) == float( "{0:.2f}".format(poly_region_default_area)): poly_names.append(main_el["properties"]["Name"]) poly_names.append(child_el["properties"]["Name"]) if poly_names: result_poly_name = sorted(set(poly_names))[0] idx = 0 iteration_range = len(polygons_dict) while idx < iteration_range: if polygons_dict[idx]["properties"]["Name"] != result_poly_name: del polygons_dict[idx] iteration_range -= 1 else: idx += 1 if len(polygons_dict) != start_len: return polygons_dict else: return None
def is_AreaConstraint(self): area_constraint = area(self.curve) self.type = 'area' self.computeUpdate = area_constraint self.object = self.computeUpdate self.pass_value = self.value self.given = self.value self.has_contractor = True self.contractor = area_constraint.contractor #self.y_axis = y_axis return
def get_area_from_row(r): coords = [ [r[co.LON_00], r[co.LAT_00]], [r[co.LON_10], r[co.LAT_10]], [r[co.LON_11], r[co.LAT_11]], [r[co.LON_01], r[co.LAT_01]], [r[co.LON_00], r[co.LAT_00]], ] obj = {'type': 'Polygon', 'coordinates': [coords]} ar = area.area(obj) return ar
def get_nghd_area(obj): """ Returns the area of the neighborhood in square miles. """ if 'sqmiles' in obj['properties']: return obj['properties']['sqmiles'] # TODO add other ways that people store neighborhood areas here. else: nghd_area_sqm = area.area(obj['geometry']) print obj print nghd_area_sqm return nghd_area_sqm / 2589988.11 # 1 sq mile = this many sq meters. return None
def calcHomelessStats(hcounts, tractShapes, tractPopulations): df = tractPopulations print("Calculating areas of each census tract... ", end='', flush=True) SQ_METERS_TO_SQ_MILES = 3.86102e-7 areas = { f['properties']['TRACTCE']: area(f['geometry']) * SQ_METERS_TO_SQ_MILES for f in tractShapes['features'] } print("DONE.") dfAreas = pd.DataFrame.from_dict(areas, orient='index', columns=['area']) # print("Census tract shape duplicates") # printDups(dfAreas.index) # must use state code too, county codes are only unique within a state df = df[(df.STATEA == 6) & (df.COUNTYA == 37)] df = df[['TRACTA', 'H7V001']] df = df.set_index('TRACTA') df.columns = ['population'] print("Found %i LA County census tract population blocks." % len(df.index)) # print(df) df = df.merge(dfAreas, left_index=True, right_index=True, how='inner') df['density'] = df.population / df.area # print(df) # print("Census tract population duplicates") # printDups(df.index) hh = hcounts df = df.rename_axis('TRACTA') hh = hh.reset_index(level=['Year']) # print(hh) df = hh.join(df, how='inner') #df = df.merge(hh, left_on=['TRACTA'], right_on=['TRACTA'], how='inner') df['hdense'] = df.Unsheltered / df.area df['hrate'] = df.Unsheltered / df.population df['hconflict'] = (df.density * df.hdense)**(1 / 2) x = hh.groupby(['Year']).agg({ 'Unsheltered': ['count', 'sum'], 'Sheltered': 'sum', 'Total': 'sum' }) print(x) return df
def validate_query_area(cls, v, values): """Ensure that requested AOI is is not larger than 200 000 km^2, otherwise query takes too long""" if area(v.geometry.dict()) / ( 1000 * 1000) > config.TIMELAPSE_MAX_AREA and values.get( "date_range"): raise ValueError( "AOI cannot exceed 200 000 km^2, when queried with a date range. " "To query with this AOI please query with a single date") return v
def calculate_column_change(pop_raster, poly_layer): """Calculates flood impact per isochrone for each column.""" pop_data = [] poly_layer = poly_layer[[ 'counter_1', 'cap_int_1', 'counter_2', 'cap_int_2', 'geometry' ]] for poly_idx_n, poly_cap_n, poly_idx_2, poly_cap_2, poly_geom in zip( poly_layer['counter_1'], poly_layer['cap_int_1'], poly_layer['counter_2'], poly_layer['cap_int_2'], poly_layer['geometry']): feature = gpd.GeoSeries([poly_geom]).to_json() # drop geometries with an area size smaller than 1m² if area(mapping(poly_geom)) > 0.000009039: # calculate population for given area pop_stats = zonal_stats(feature, pop_raster, stats=['sum']) poly_area = area(mapping(poly_geom)) / 1e+6 # in kilometer pop_data.append([ poly_idx_n, poly_cap_n, poly_idx_2, poly_cap_2, pop_stats[0]['sum'], poly_area, poly_geom ]) df = pd.DataFrame(pop_data, columns=[ 'counter_1', 'cap_int_1', 'counter_2', 'cap_int_2', 'pop', 'area', 'geometry' ]) df.dropna(subset=['pop']) df['pop_area'] = df['pop'] / df['area'] # population density df['cap_pop'] = (df['cap_int_1'] / df['pop']) * 100000 df['cap_dens'] = df['cap_int_1'] / df['pop_area'] df['cap_dens_2'] = df['cap_int_2'] / df['pop_area'] df['cap_dens_d'] = df['cap_dens_2'] - df['cap_dens'] df = df.replace([np.inf, -np.inf], np.nan) result_geodf = gpd.GeoDataFrame(df, geometry='geometry') return result_geodf
def mysql_bakcup_func(session): path = '/tools/config/tmpbak.ini' configname = "/tools/config/nsconfig/config.ini" if not os.path.isfile(path): log('mysqlbackup', '3', '%s配置文件不存在' % path) if not os.path.isfile(configname): log('mysqlbackup', '3', '%s配置文件不存在' % path) try: # 读取配置文件方法 g = ConfigLoad(configname) lists = g.ProfileHandleNew('bakmysql') strUser = lists['user'] strPass = lists['passwd'] filepath = lists['fullpath'] timepick = time.strftime('%Y%m%d') gamearea = area.area() gamearea.init() file = open(path, 'r') file_object = file.readlines() regular = re.compile('^' + share.regex_str.regex_IntranetIP) shell = ''' whereis mysqldump |awk '{print $2}' ''' cet, ceterr = get_output(shell) if ceterr != "": log('mysqlbackup', '3', '%s : %s' % (sys.exc_info()[0], sys.exc_info()[1])) removal = cet.strip('\n') for i in file_object: # 读取/tools/config/tmpbak.ini文件 循环处理每一条数据并且匹配内网ip条目,再循环执行mysqldump备份。 if regular.findall(i) != []: info = i.split() a = info[0].split('.') merge = '%s/%s' % (filepath, info[2]) if not os.path.isfile(merge): shell = '''mkdir -p %s/%s ''' % (filepath, info[2]) ret, reterr = get_output(shell) gamearea.addgws( info[0], strUser, strPass, info[1], '', '%s/%s/%s_' % (filepath, info[2], info[1] + '_' + a[3]) + timepick + '.gz') errmsg = gamearea.dumpdb(removal, session) err = ','.join(errmsg) if errmsg != []: log('mysqlbackup', '3', err) except: log('mysqlbackup', '3', '%s : %s' % (sys.exc_info()[0], sys.exc_info()[1])) if errmsg != []: log('mysqlbackup', '3', '备份不完整,部分大区已备份完成') else: print 'All mysqlbackup ok'
def calArea(self, data): """Calculate feature area and return it Args: data (Object): Geojson object Returns: Float: area value """ areaval = area(data) if areaval <= 0.00000001: return self.defaultarea return areaval
def shp_area(polygon): ''' shp_area() - Return the area of a PyShp polygon Parameters ---------- polygon : PyShp shapefile polygon Returns ------- Polygon area : float ''' from area import area return area(polygon)
def reached_area(iso_layer): """Calculate reached area""" amenities = SETTINGS['amenity_osm_values'] area_data = {} for amenity in amenities: area_data[amenity] = {} for amenity, iso_value, iso_geom in zip(iso_layer['amenity'], iso_layer['value'], iso_layer['geometry']): area_size = area(mapping(iso_geom)) area_data[amenity][iso_value] = area_size return area_data
def load_areas(self, fname): plist = DaPa.parse(fname) for i in plist.keys(): if i == '#' : continue ta = area() ta.name = i if 'color' in plist[i].keys() : ta.color = ( int(plist[i]['color'][0]), int(plist[i]['color'][1]), int(plist[i]['color'][2]) ) ta.select_texture = plist[i].get('pic', ta.select_texture) ta.center = plist[i].get('center', ta.center) ta.path = plist[i].get('path', ta.path) ta.ways = plist[i].get('ways', ta.ways) ta.realm = plist[i].get('realm', ta.realm) ta.region = plist[i].get('region', ta.region) self.map.map_realms[ta.realm].areas_colors[ta.color] = ta self.areas_id[i] = ta
def ll_area_new(lat, res): from area import area startlon = 0 return_area = np.zeros_like(lat) isize, jsize = return_area.shape for i in range(isize): for j in range(jsize): obj = { 'type': 'Polygon', 'coordinates': [[[startlon, lat[i, j] - 0.125], [startlon, lat[i, j] + 0.125], [startlon + 0.25, lat[i, j] + 0.125], [startlon + 0.25, lat[i, j] - 0.125]]] } return_area[i, j] = area(obj) / 1000.0 / 1000.0 return return_area
def selectData(selectData): filtList = [] coor=[] if selectData==None: return ''.format(filtList) else : # selmapdata = map_data[map_data["Id"].isin(selectData['points']['text'])] for i in range(len(selectData['points'])): filtList.append(selectData['points'][i]['pointIndex']) coor.append([selectData['points'][i]['lon'],selectData['points'][i]['lat']]) obja={'type':'Polygon','coordinates':[coor]} x= ar.area(obja) print(x) selmapdata = map_data[map_data["Id"].isin(filtList)].groupby(['Percentage','Type']).size().reset_index() selmapdata.columns = ['Percentage' , 'Type' ,'Count'] return selmapdata.to_json(date_format='iso', orient='records')
def __init__( self, out, screen, tile_size, generator ): #avoid our attribute management system self.recurse_attributes = False self.screen = screen self.window = screen.get_size() self.tile_size = tile_size self.tile_dim = ( self.window[0]/tile_size[0], self.window[1]/tile_size[1] ) self.window = ( self.tile_dim[0] * tile_size[0], self.tile_dim[1] * tile_size[1] ) self.start_rect = Rect( (0,0), tile_size) self.ts = ts = tile_size self.hts = hts = (ts[0]/2, ts[1]/2) self.areas = [ [ area.area( vector3( x*ts[0] + hts[0], y*ts[1] + hts[1], 0 ), ts, potential = resource.resource( food = generator(x, y), wood = generator(x, y), gold = generator(x, y), stone = generator(x, y) ) ) for y in range( self.tile_dim[1] ) ] for x in range( self.tile_dim[0] ) ] self.cities = [ city.city( vector3( x + (hts[0]-5) * random.random(), y + (hts[1]-5) * random.random(), 0.0 ), out, area = self.get_area_at( x/ts[0], y/ts[1] ) ) for x in range(0, self.window[0], hts[0]) for y in range(0, self.window[1], ts[1]) ]
def create_area(self,pos): r = 0 g = 0 b = 0 while (r,g,b) in self.areas.keys() : r = random.randint(2,254) g = random.randint(2,254) b = random.randint(2,254) tsur = self.provinces.get_surface() print provfill.fill_prov(self.map_to_prov(pos), (r,g,b,255), tsur) self.provinces.replace_surface(tsur) a = area((r,g,b)) a.name = 'Area ' + str(r) + str(g) + str(b) self.areas[(r,g,b)] = a self.last_created_area = a self.gr.redraw_map() return a
def calculate_geojson_area(self, spatial): """ Calculates the area of the spatial feature """ return area(spatial)
def area2(xc, yc, xp, yp): return area.area(distance.distance(xc, yc, xp, yp))
def test2(self): self.assertAlmostEqual(area(Square(9)), 81)
def test_area_world(self): """ Compute the area of the whole world """ self.assertEqual(area(world), world_area)
continue marks = [] try: shapes.append(walk(( px - 1, py ), marks)) except Exception as e: remove += marks for px, py in remove: markclean(px, py) tile.save('ctiles/' + str(x) + '/' + str(y) + '.png') shapes = [ shape for shape in shapes if rhr.is_rhr(shape) ] shapes = [ douglaspeucker.simplify_poly(shape, 1.4) for shape in shapes ] shapes = [ shape for shape in shapes if len(shape) > 2 ] shapes = [ shape for shape in shapes if area.area(shape) > 150 ] for shape in shapes: orthogonalise() shapes = [ douglaspeucker.simplify_poly(shape, 1.0) for shape in shapes ] for shape in shapes: fixcorners() shapes = [ area.expand(shape, 1.5) for shape in shapes ] # Merge nodes nodes = [] ways = []
def test_geometry_collection_area(self): """ Compute the area of a geometry collection """ total = illinois_area + world_area self.assertEqual(area({'type': 'GeometryCollection', 'geometries': [world, illinois]}), total)
def test_liststring_area(self): """ Compute the area of a line string """ self.assertEqual(area({'type': 'LineString', 'coordinates': [[0, 0], [1, 1]]}), 0)
def test_point_area(self): """ Compute the area of a point """ self.assertEqual(area({'type': 'Point', 'coordinates': [0, 0]}), 0)
def circle_area(xc, yc, xp, yp): return area(distance(xc, yc, xp, yp))
def test3(self): self.assertAlmostEqual(area(Rectangle(2, 7)), 14)
def test_area_illinois(self): """ Compute the area of illinois """ self.assertEqual(round(area(illinois), 2), round(illinois_area, 2))
def test1(self): self.assertAlmostEqual(area(Circle(2)), 4 * math.pi)
from area import area # 经度&维度位于: 经度121.42624135594815~121.48529286962003, 纬度31.271208918485904~31.221897323905157 box_str = '{ \ "type": "Polygon", \ "coordinates": [ \ [ \ [ \ 121.42624135594815, \ 31.271208918485904 \ ], \ [ \ 121.42624135594815, \ 31.221897323905157 \ ], \ [ \ 121.48529286962003, \ 31.221897323905157 \ ], \ [ \ 121.48529286962003, \ 31.271208918485904 \ ]\ ] \ ] \ }' box = area(box_str) print(box/1000000)