def stats_rename_dataset(data_path): pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") irrg_files = os.listdir(data_path) for irrg_file in irrg_files: if irrg_file.endswith('.tif'): oldpath = os.path.join(data_path,irrg_file) region = irrg_file.split('_') year = region[1] row = region[2][:2] col = region[2][2:] suffix = region[3] title = region[0]+'_'+year+'_'+row+'_'+col city_sql = '''select city_id from public.aisample_grid where title like '%s';'''%(title) # print(city_sql) city_data = pg_src.getAll(city_sql) if len(city_data)==0: print('the file %s has no cityid'%(irrg_file)) else: city_id = city_data[0][0] if city_id is None: print(irrg_file, 'has no cityid') else: newname = str(city_id)+'_'+year+'_'+row+'_'+col+'_'+suffix newpath = os.path.join(data_path, newname) mv_cmd = 'mv %s %s'%(oldpath,newpath) os.system(mv_cmd)
def subtask_update_imageid_sid(): pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") for region in region_dict.keys(): # region_tiles_shp = os.path.join(region_bbox_path,(region + '_subtiles.shp')) # region is one of the region_dict.keys() # print('row,col: %s, %s'%(rnum,cnum)) images_key = region_dict[region]['images_key'] year_list = region_dict[region]['year'] for year in year_list: # subtask--tiles into pgsql task_title = region + '_' + str(year) taskid = get_taskid_by_tasktitle(task_title) imageids = get_imageids(images_key=images_key, year=year) for image in reversed(imageids): # find all the tiles contained in the image bbox imagefile = os.path.join(irrg_path, image + '_IRRG.TIF') imagebbox = get_image_bbox_withoutnodata(imagefile, '/tmp/%s.shp' % image) tile_update_imageid_sql = '''UPDATE public.mark_subtask SET imageid=%s where taskid='%s' and ST_Contains(st_geomfromtext(%s), geojson);''' pg_src.update(tile_update_imageid_sql, (image, taskid, imagebbox)) task_region = get_wkt_by_tasktitle(task_title) subtask_update_sql = '''UPDATE public.mark_subtask SET sid=1 where taskid='%s' and ST_Contains(st_geomfromtext(%s), st_geomfromtext(geojson));''' pg_src.update(subtask_update_sql, (taskid, task_region))
def gen_subtask(): pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") for region in region_dict.keys(): # region_tiles_shp = os.path.join(region_bbox_path,(region + '_subtiles.shp')) # region is one of the region_dict.keys() # print('row,col: %s, %s'%(rnum,cnum)) images_key = region_dict[region]['images_key'] year_list = region_dict[region]['year'] for year in year_list: # subtask--tiles into pgsql task_title = region + '_' + str(year) imageids = get_imageids(images_key=images_key, year=year) gtfile = os.path.join(gt_path, region + '_' + str(year) + '.tif') tile_shp = os.path.join(region_bbox_path, (region + '_' + str(year) + '_' + 'tiles.shp')) wgs_bbox_list, rnum, cnum, region_bbox = gen_tile_bbox(gtfile, BLOCK_SIZE, OVERLAP_SIZE) tile_bbox_to_shp(wgs_bbox_list, rnum, cnum, tile_shp) # tasktiles_shp_into_pgsql(task_title, tile_shp, imageids) sql = "select id from public.mark_task where title='%s' " % (task_title) datas = pg_src.getAll(sql) taskid = datas[0][0] # insert tiles as subtask with fiona.open(tile_shp, 'r') as inp: projection = inp.crs_wkt for f in inp: geojson = json.dumps(f['geometry']) trans_state, geom = gjson_geotrans_to_wgs84(geojson, projection) if trans_state == 0: wkt = geom.ExportToWkt() type = f['geometry']['type'] row = f['properties']['row'] col = f['properties']['col'] # guid=gen_uuid() row_s = '0' + str(row) col_s = '0' + str(col) guid = task_title + '_' + row_s[-2:] + '_' + col_s[-2:] ctime = get_curtime() insert_sql = '''INSERT INTO public.mark_subtask (guid, taskid, ctime, geojson ) VALUES(%s ,%s, %s, %s); ''' update_sql = '''UPDATE public.mark_subtask SET guid=%s, taskid=%s, ctime=%s, geojson=%s; ''' sql = "select * from public.mark_subtask where guid='%s' " % (guid) datas = pg_src.getAll(sql) if len(datas) == 0: pg_src.update(insert_sql, (guid, taskid, ctime, wkt)) print("insert subtask tile of ", guid) else: pg_src.update(update_sql, (guid, taskid, ctime, wkt)) print("insert subtask tile of ", guid)
def task_update(): pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") for region in region_dict.keys(): # region_tiles_shp = os.path.join(region_bbox_path,(region + '_subtiles.shp')) # region is one of the region_dict.keys() # print('row,col: %s, %s'%(rnum,cnum)) images_key = region_dict[region]['images_key'] year_list = region_dict[region]['year'] for year in year_list: task_title = region + '_' + str(year) region_shp = os.path.join(region_shp_path, region + '_wgs.geojson') print(region_shp) with open(region_shp, "r") as f: # 打开文件 data = f.read() # 读 task_geojson = json.loads(data) geom_json = json.dumps(task_geojson['features'][0]['geometry']) geom = ogr.CreateGeometryFromJson(geom_json) task_wkt = geom.ExportToWkt() gtfile = os.path.join(gt_path, region + '_' + str(year) + '.tif') imageids = get_imageids(images_key=images_key, year=year) task_update_sql = '''UPDATE public.mark_task SET geojson=%s, gtfile=%s,image=%s where title=%s;''' pg_src.update(task_update_sql, (task_wkt, gtfile, imageids, task_title))
def sifting_subtask_tile(task_title): import utils.pgsql as pgsql pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") task_search_sql = '''SELECT id, geojson FROM public.mark_task where title='%s';''' % (task_title) data = pg_src.getAll(task_search_sql) taskid = data[0][0] task_region = data[0][1] # region_geom = task_region['geometry'] subtask_update_sql = '''UPDATE public.mark_subtask SET sid=1 where taskid='%s' and ST_Contains(st_geomfromtext(%s), st_geomfromtext(geojson));''' pg_src.update(subtask_update_sql, (taskid, task_region)) subtask_search_sql = '''SELECT id FROM public.mark_subtask where taskid='%s' and ST_Contains(st_geomfromtext('%s'), st_geomfromtext(geojson));''' % (taskid, task_region) # subtask_search_sql = '''SELECT id FROM public.mark_subtask where taskid='%s' and ST_Intersects(st_geomfromtext('%s'), st_geomfromtext(geojson));'''%(taskid,task_region) data1 = pg_src.getAll(subtask_search_sql) num = len(data1) print(num) return num
def update_cityid_to_grid(): pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") city_list = ['石家庄市','张家口市','承德市','乌海市','鄂尔多斯市','杭州市','青岛市','济宁市', '泰安市', '临沂市','菏泽市','武汉市','延安市','榆林市','银川市', '石嘴山市','吴忠市','中卫市'] grid_sql = '''select title from public.aisample_grid;''' grid_title = pg_src.getAll(city_sql) for city in city_list: city_sql ='''SELECT id, provid, geom FROM public.cn_city where name like '%s';'''%(city) city_data = pg_src.getAll(city_sql) city_id = city_data[0][0] prov_id = city_data[0][1] city_geom = city_data[0][2] update_sql = '''UPDATE public.aisample_grid a SET city_id= (select cityid FROM public.aisample_cn_cities b where ST_Intersects(st_geomfromtext(st_astext(b.the_geom)), st_geomfromtext(a.geom)) limit 1); ''' pg_src.update(update_sql, (city_id, city_geom)) print("update grid tile of ", city)
def region_search_dem(min_lat, max_lat, min_long, max_long): pg_src = pgsql.Pgsql("10.0.81.35", "2345", "postgres", "", "gscloud_metadata") data_sql = '''SELECT id, dataid, name, "path", "row", lt_long, lt_lat, rb_long, rb_lat,the_geom FROM public.metadata_dem_gdem where rb_long>%s and lt_long<%s and rb_lat<%s and lt_lat>%s ORDER BY row DESC;''' % ( min_long, max_long, max_lat, min_lat) dem_data = pg_src.getAll(data_sql) num = len(dem_data) #output bounding box into shp dataid_list = [] for i in range(num): record = dem_data[i] bbox = dem_data[i][9] dataid = dem_data[i][1] if dataid.startswith('ASTGTM2'): dataid_list.append(dataid) return dataid_list
import shapely, fiona from asn1crypto._ffi import null import datetime, time from osgeo import ogr, osr import json import utils.geojsons as gjsonc from utils.geotrans import GeomTrans import uuid # data_root = "/mnt/gscloud/LANDSAT" shp_path = "/mnt/win/phd/samples/pd_1995" shp_file = os.path.join(shp_path, 'PD_1995_120035.shp') # geojson=os.path.join(shp_path, 'pd_1995_120035.geojson') # pg_src = pgsql.Pgsql("10.0.85.20", "postgres", "", "mark") pg_src = pgsql.Pgsql("10.0.81.19", "9999", "postgres", "", "gscloud_web") # def parse_shp_to_geojson(shpfile): # return geojson def encode_json(t): # f=open(geojson_file) # t = json.loads(f.read()) crs_wkt = t['crs']['properties']['name'] num_geom = len(t['features']) for i in range(num_geom): print(i) geom = str(t['features'][i]['geometry']) geom_wgs = GeomTrans(crs_wkt, 'EPSG:4326').transform_json(geom) geojs = gjsonc.trunc_geojson(json.loads(geom_wgs), 4)