def getAverageWidth(sel_road,polygons,buffer): area,widths=[],[] num=len(polygons) limit=16 if num>limit: width_idx=np.linspace(1,num,limit) else: width_idx=np.linspace(1,num,num) width_idx=width_idx.tolist() ID_length=len(width_idx) for ID in range(ID_length): width_idx[ID]=int(width_idx[ID]) for iii in width_idx: i=iii-1 clip_poly=[] for j in buffer: temp=getPolygon(polygons[i],j) if len(temp)==2: break if len(temp)==1: clip_poly.append(temp[0]) if len(clip_poly)!=2: continue area.append(polygons[i].area) try: p1x,p1y=clip_poly[0].centroid.X,clip_poly[0].centroid.Y p2x,p2y=clip_poly[1].centroid.X,clip_poly[1].centroid.Y except: widths.append(0) continue n=1 per_lines=getLine(sel_road.extent,[p1x,p1y,p2x,p2y],n) width=[] for j in per_lines: POLYGON=arcpy.FromWKT(sel_road.WKT) clip_line=j.intersect(POLYGON,2) poly_to_lines=polylineToLines(clip_line.WKT) if len(poly_to_lines)==1: width.append(clip_line.length) else: for ij in poly_to_lines: string_to_line=arcpy.FromWKT(ij) if string_to_line.crosses(polygons[i]) or string_to_line.within(polygons[i]): width.append(string_to_line.length) break tot,count=0,0 for k in width: tot+=k count+=1 widths.append(tot/count) totArea,result=0,0 median=getMedian(widths) tolerance=3 for ii in range(len(widths)): if widths[ii]>=3*median or widths[ii]<=median/3: continue result+=widths[ii]*area[ii] totArea+=area[ii] return result/totArea
def FromWKT(): wkt = "MULTIPOLYGON({})".format( ",".join("(({}))".format( ", ".join("{} {}".format(*xy) for xy in ring) ) for ring in poly_rings) ) return arcpy.FromWKT(wkt)
def creatZxshp(outfile,values,spat_ref): # try: outshp = arcpy.CreateFeatureclass_management(outPath,outfile, "POLYLINE", "", "", "",spat_ref) arcpy.AddField_management(outshp,"XH","TEXT") arcpy.AddField_management(outshp,"TYBH","TEXT") arcpy.AddField_management(outshp,"DCFH","TEXT") arcpy.AddField_management(outshp,"DCQX","TEXT") arcpy.AddField_management(outshp,"DCQJ","TEXT") # except: # outshp = outPath+"/"+outName+".shp" cur = arcpy.InsertCursor(outshp) for value in values: row = cur.newRow() xhz = value[0] tybhz = value[1] dcfhz = value[2] dcqxz = str(value[3]) dcqjz = str(value[4]) wktStings = value[5] # stx = value[5] # sty = value[6] # endx = value[7] # endy = value[8] plGeometry = arcpy.FromWKT(wktStings) row.shape = plGeometry row.XH = xhz row.TYBH = tybhz row.DCFH = dcfhz row.DCQX = dcqxz # row.DCQJ = dcqjz cur.insertRow(row)
def create_geometry(source_file, save_path): geo_data = read_data(source_file) arcpy.env.workspace = save_path points = [] for geo in geo_data: wkt = 'POINT (' + geo['lng'] + ' ' + geo['lat'] + ')' point = arcpy.FromWKT(wkt) points.append(point) arcpy.CopyFeatures_management(points, 'mobike_points.shp')
def getCover(extent,n,spacing): result=[] if extent[0]=='type1': for i in range(n): p1=[extent[3]-0.1,extent[5]+i*spacing-0.1] p2=[extent[2]+0.1,extent[5]+i*spacing-0.1] p3=[extent[2]+0.1,extent[5]+(i+1)*spacing-0.1] p4=[extent[3]-0.1,extent[5]+(i+1)*spacing-0.1] wkt_string=PointToPolygon([p1,p2,p3,p4,p1]) result.append(arcpy.FromWKT(wkt_string)) else: for i in range(n): p1=[extent[3]+i*spacing-0.1,extent[5]-0.1] p2=[extent[3]+(i+1)*spacing-0.1,extent[5]-0.1] p3=[extent[3]+(i+1)*spacing-0.1,extent[4]+0.1] p4=[extent[3]+i*spacing-0.1,extent[4]+0.1] wkt_string=PointToPolygon([p1,p2,p3,p4,p1]) result.append(arcpy.FromWKT(wkt_string)) return result
def PointToLine(points): line='LINESTRING (' for i in range(len(points)): if i==0: line+=str(points[i][0]) line+=' '+str(points[i][1]) else: line+=', '+str(points[i][0]) line+=' '+str(points[i][1]) line+=')' return arcpy.FromWKT(line)
def get_item_from_queue(queue_item, spatial_reference): """ Gets the next item from the queue and converts it to proper format :param queue_item: :type queue_item: :return: :rtype: """ queue_value = queue_item.get() feature_id, geometry_wkt = queue_value geometry = arcpy.FromWKT(geometry_wkt, spatial_reference) return feature_id, geometry
def create_polyline(geo_str): arcpy.env.workspace = r"G:\xin.data\test_data" points = geo_str.split("_") wkt = "POLYGON(( " for point in points: pos = point.split(",") wkt = wkt + pos[0] + " " + pos[1] + "," wkt = wkt.strip(",") + "))" polygon = arcpy.FromWKT(wkt) arcpy.CopyFeatures_management(polygon, "temp.shp")
def getPolygon(main,cover): try: poly_WKT=main.clip(cover.extent).WKT except: main1=arcpy.FromWKT(main.WKT) poly_WKT=main1.clip(cover.extent).WKT if 'EMPTY' in poly_WKT: return [] poly_WKT=poly_WKT[14:-1] polygons=[] temp_poly='POLYGON ' for i in range(len(poly_WKT)-1): temp_poly+=poly_WKT[i] if poly_WKT[i]==' ' and poly_WKT[i+1]=='(': temp_poly=temp_poly[0:-2] polygons.append(arcpy.FromWKT(temp_poly)) temp_poly='POLYGON ' continue if i==len(poly_WKT)-2: temp_poly+=')' polygons.append(arcpy.FromWKT(temp_poly)) return polygons
def wkt2shp(incsv_orlist, outfc, fieldout_list, typeout_list, order_list): sr = arcpy.SpatialReference(4326) #set to WGS 1984 arcpy.env.overwriteOutput = True #create empty shapefile, Id field will be automatically generated arcpy.CreateFeatureclass_management(os.path.dirname(outfc), os.path.basename(outfc), "POLYLINE", spatial_reference=sr) #add fields for fname, dtype in zip(fieldout_list[1:], typeout_list[1:]): arcpy.AddField_management(outfc, fname, dtype) print(fname, dtype) #delete default fields if len(fieldout_list) > 1: arcpy.DeleteField_management( outfc, 'Id' ) #minimum of one other field in addition to OID and Shape required #check fields added correctly in new feature class for fd in fieldout_list: print(fd, fd in [field.name for field in arcpy.ListFields(outfc)]) #search cursor on input text file if isinstance(incsv_orlist, list): cursor = incsv_orlist print('list input') else: cursor = arcpy.da.SearchCursor(incsv_orlist, ['*']) print(cursor.fields) #insert cursor on output shape file curs_out = arcpy.da.InsertCursor( outfc, fieldout_list) #if only columns are OID and Shape, could write an Id print(curs_out.fields) #cursor.next() #skip header line #not needed, arc seems to do this automatically for m, row in enumerate(cursor): if isinstance(incsv_orlist, list): temp = row else: temp = list(row) for i, r in enumerate(temp): #shapefiles don't support null values, need to adopt our own null convention if not r: if typeout_list[i].lower() == "text": temp[i] = '' else: temp[i] = -999 out_list = [arcpy.FromWKT(temp[order_list[0]], sr)] + [ temp[order_list[k]] for k in range(1, len(order_list)) ] curs_out.insertRow(out_list) return outfc
def insert_into_shp(shp, workspace, query_item): uid = query_item[0] name = query_item[1] geo_type = query_item[2] page = query_item[3] json_page = json.loads(page) if not json_page.has_key("content"): return content = json_page["content"] item_info = content[0] geo = item_info["geo"] _geo = geo.split("|")[2].strip(";") real_geo = "MULTILINESTRING(" for segement in _geo.split(";"): real_geo = real_geo + "(" los = segement.split(",") for i in range(0, len(los), 2): # if i>2 : # if los[i]==los[i - 2] and los[i + 1]==los[i - 1]: # continue real_geo = real_geo + los[i] + " " + los[i + 1] + "," real_geo = real_geo.strip(",") + ")," real_geo = real_geo.strip(",") + ")" timetable = item_info["timetable"] if timetable is None: timetable = "" price = int(item_info["ticketPrice"]) / 100.0 current_city = json_page["current_city"] city = current_city["name"] if city is None: city = "" province = current_city["up_province_name"] if province is None: province = "" arcpy.env.workspace = workspace polyline = arcpy.FromWKT(real_geo) fields = [ "UID", "NAME", "PROVINCE", "CITY", "GEO_TYPE", "TIMETABLE", "PRICE" ] fields.append("SHAPE@") values = [uid, name, province, city, geo_type, timetable, price, polyline] cursor = arcpy.da.InsertCursor(shp, fields) cursor.insertRow(values) del cursor db = MysqlHandle() sql = 'update baidu_busline_page set status=200 where uid="' + item[0] + '"' db.update(sql)
def getLine(extent,points,n): lines=[] a,b=np.polyfit([points[0],points[2]],[points[1],points[3]],1) slope=-1/a starting_x,ending_x=min([points[0],points[2]]),max([points[0],points[2]]) step=(ending_x-starting_x)/(n+1) for i in range(n): x2=starting_x+(i+1)*step y2=a*x2+b x1,x3=extent.XMin-0.1,extent.XMax+0.1 y1=slope*(x1-x2)+y2 y3=slope*(x3-x2)+y2 lineStr='LINESTRING ('+str(x1)+' '+str(y1)+', '+str(x3)+' '+str(y3)+')' lines.append(arcpy.FromWKT(lineStr)) return lines
def download_shp_file(request): boundaries = request.POST.get("boundaries") boundaries = boundaries.strip("*&") city = request.POST.get("city") arcpy.env.workspace = "temp" wkt = "POLYGON(" for boundary in boundaries.split("*&"): wkt = wkt + "(" wkt = wkt + boundary.replace(", ", " ") wkt = wkt.replace(";", ",") wkt = wkt.strip(",") wkt = wkt + ")," wkt = wkt.strip(",") + ")" print(wkt) polygons = arcpy.FromWKT(wkt) if not arcpy.Exists(r'G:\xin.data\spiders_data\hbs' + "\\" + city + ".shp"): arcpy.CopyFeatures_management( polygons, r'G:\xin.data\spiders_data\hbs' + "\\" + city + ".shp") result = {"status": "0"} return HttpResponse(json.dumps(result), content_type="application/json")
def insert_into_stations(shp, workspace, query_item): l_uid = query_item[0] name = query_item[1] page = query_item[3] json_page = json.loads(page) if not json_page.has_key("content"): return content = json_page["content"] item_info = content[0] stations = item_info["stations"] current_city = json_page["current_city"] city = current_city["name"] if city is None: city = "" province = current_city["up_province_name"] if province is None: province = "" for station in stations: station_name = station["name"] station_geo = station["geo"].strip(";").split("|")[-1].replace( ",", " ") geo_str = "POINT(%s)" % (station_geo) station_uid = station["uid"] arcpy.env.workspace = workspace point = arcpy.FromWKT(geo_str) fields = ["UID", "NAME", "PROVINCE", "CITY", "L_UID", "L_NAME"] fields.append("SHAPE@") values = [ station_uid, station_name, province, city, l_uid, name, point ] cursor = arcpy.da.InsertCursor(shp, fields) cursor.insertRow(values) del cursor db = MysqlHandle() sql = 'update baidu_busline_page set status=200 where uid="' + item[0] + '"' db.update(sql)
import arcpy user_input = '''POLYGON ((93.7825988660001 31.0,97.0246144870001 34.4845636900001,101.57533366 35.40660483,105.62041737 31.5697239580001,101.277901035 27.0,98.3333180400001 28.654884226,93.7825988660001 31.0))''' arcpy.CreateFeatureclass_management('in_memory', 'sel', 'POLYGON') with arcpy.da.InsertCursor('in_memory/sel', ['SHAPE@']) as cursor: polygon = arcpy.FromWKT(user_input) cursor.insertRow([polygon]) arcpy.MakeFeatureLayer_management('shp/jog.shp', 'jog_lyr') arcpy.SelectLayerByLocation_management('jog_lyr', 'intersect', 'in_memory/sel') # arcpy.CopyFeatures_management('jog_lyr', r'C:\Users\Esri\Desktop\select.shp') with arcpy.da.SearchCursor('jog_lyr', '*') as cursor: for row in cursor: print('{0}, {1}, {2}'.format(row[4], row[5], row[6]))
def CreateShape(self, wkt): import arcpy shape = arcpy.FromWKT(wkt) return shape
# -*- encoding:utf-8 -*- import arcpy arcpy.env.workspace = "../temp" wkt = "POLYGON((116.13982 29.88508,116.133298 29.865047,116.140065 29.853345,116.136074 29.841912,116.141487 29.833255,116.139141 29.827136,116.094168 29.804491,116.05429 29.765514,115.969441 29.730778,115.951961 29.7266,115.901597 29.733592,115.848585 29.751062,115.769124 29.800163,115.713168 29.842946,115.689668 29.85718,115.674007 29.857044,115.613694 29.842301,115.586868 29.840578,115.5252 29.843555,115.478962 29.856092,115.459597 29.869491,115.436349 29.905496,115.418339 29.915956,115.414394 29.928174,115.41487 29.950444,115.38586 29.975737,115.368123 30.006744,115.33548 30.036738,115.330484 30.045083,115.328576 30.078342,115.30855 30.117856,115.27418 30.161666,115.23118 30.208822,115.214144 30.218919,115.195419 30.222019,115.173966 30.221622,115.148019 30.211853,115.12642 30.215717,115.103114 30.227566,115.086667 30.242818,115.085262 30.267359,115.096426 30.298311,115.09247 30.330363,115.104584 30.354133,115.094606 30.369622,115.083706 30.363208,115.071347 30.385562,115.055222 30.399363,115.040581 30.405584,115.018059 30.411633,114.957343 30.410559,114.899734 30.41612,114.879463 30.426017,114.853994 30.448418,114.842412 30.468337,114.838641 30.484741,114.84026 30.520581,114.845446 30.546981,114.844419 30.566417,114.815363 30.595449,114.811554 30.606528,114.794198 30.619363,114.802522 30.625508,114.850438 30.631565,114.858127 30.637346,114.85142 30.643258,114.862842 30.639819,114.864011 30.642663,114.860203 30.649033,114.852272 30.647867,114.852101 30.653853,114.835283 30.675505,114.830507 30.676892,114.825561 30.669529,114.819804 30.666575,114.816514 30.668242,114.811099 30.680345,114.817529 30.692339,114.802808 30.705428,114.790826 30.727486,114.810885 30.729886,114.822966 30.736901,114.847936 30.744705,114.877126 30.76586,114.8828 30.776263,114.882513 30.798587,114.88531 30.802074,114.894284 30.802577,114.921991 30.790449,114.939121 30.802092,114.945921 30.792376,114.959368 30.782336,114.963342 30.786666,114.96319 30.795321,114.960114 30.799798,114.951639 30.802635,114.950814 30.808121,114.969669 30.813316,114.97633 30.818195,114.979415 30.813385,114.987561 30.821945,114.989159 30.814371,114.994905 30.815386,114.998204 30.830591,115.007347 30.836425,114.998341 30.851483,115.00346 30.862551,115.025286 30.869018,115.028789 30.862642,115.037593 30.861618,115.035839 30.854631,115.04188 30.850942,115.045785 30.851971,115.05348 30.862421,115.056052 30.858734,115.061764 30.862101,115.059112 30.869897,115.085775 30.888963,115.085336 30.895442,115.066106 30.900638,115.061758 30.910838,115.042152 30.915286,115.027965 30.931268,115.018792 30.92898,115.001472 30.943353,114.992939 30.961009,114.998714 30.973105,114.992991 30.974555,114.988736 30.980992,114.995281 30.990673,114.99451 31.003842,114.989741 31.003911,114.990181 30.998608,114.986967 30.998621,114.99071 31.02373,114.982844 31.033432,114.976799 31.027439,114.983139 31.012355,114.967394 30.992596,114.961789 30.993082,114.955782 31.00162,114.945048 31.003443,114.942708 31.008364,114.934727 31.001685,114.926412 31.020488,114.911053 31.011414,114.888873 31.012258,114.867955 30.998404,114.862745 30.991306,114.880109 30.979102,114.882972 30.971623,114.871909 30.963025,114.866862 30.950256,114.846928 30.956291,114.82872 30.951981,114.831069 30.957608,114.820999 30.962728,114.817705 30.973194,114.810046 30.97664,114.815296 30.982364,114.811771 30.998215,114.809814 31.000226,114.802814 30.99584,114.802259 31.000993,114.791246 31.001181,114.788892 30.991263,114.781402 30.985945,114.778763 30.971567,114.775575 30.969779,114.768857 30.969822,114.771886 30.975968,114.76959 30.986108,114.775284 30.989111,114.767173 30.990936,114.767066 30.995398,114.745638 30.992904,114.743519 30.988484,114.726931 30.983487,114.724253 30.987747,114.721543 30.986954,114.722417 30.982951,114.715469 30.984287,114.7085 30.976746,114.704021 30.964551,114.704513 30.955779,114.713819 30.948315,114.710907 30.9441,114.689074 30.95457,114.672481 30.94021,114.664655 30.938172,114.653631 30.942485,114.647617 30.933368,114.641731 30.931858,114.639359 30.934332,114.630028 30.929696,114.618815 30.932664,114.618511 30.944029,114.614296 30.944745,114.608447 30.964198,114.582758 30.975708,114.577497 30.984266,114.567496 30.985505,114.559948 30.998378,114.549434 30.997934,114.534128 31.017041,114.515958 31.020291,114.505379 31.042626,114.510063 31.051115,114.520831 31.056739,114.520952 31.069604,114.539429 31.100064,114.533888 31.113969,114.539931 31.122675,114.532992 31.128099,114.531053 31.14123,114.524885 31.149985,114.498782 31.15543,114.494142 31.163872,114.476393 31.176468,114.466775 31.174727,114.464416 31.189197,114.47743 31.190161,114.478975 31.195288,114.455993 31.202376,114.458803 31.209721,114.448437 31.213001,114.44016 31.211763,114.42829 31.218499,114.411223 31.235328,114.407847 31.255088,114.419227 31.267522,114.445372 31.313759,114.446905 31.330576,114.442649 31.344421,114.448519 31.348893,114.442387 31.353385,114.432527 31.354634,114.427524 31.362635,114.434872 31.371223,114.434845 31.384979,114.443762 31.389819,114.454063 31.389234,114.458361 31.403237,114.454526 31.414458,114.456414 31.420686,114.466922 31.416097,114.465453 31.413397,114.469619 31.410747,114.484346 31.411855,114.4868 31.415353,114.487982 31.425088,114.481745 31.427521,114.480298 31.433099,114.484099 31.45201,114.493771 31.464832,114.491864 31.472315,114.500945 31.473035,114.506337 31.464248,114.518121 31.468019,114.511651 31.49118,114.504449 31.503365,114.510851 31.52703,114.516487 31.530359,114.519307 31.542013,114.526502 31.546997,114.526771 31.564185,114.53279 31.555897,114.549104 31.554274,114.55665 31.558135,114.564285 31.574856,114.570682 31.56126,114.579807 31.55948,114.582312 31.565415,114.600178 31.566877,114.603141 31.579745,114.617239 31.58251,114.62747 31.590941,114.651282 31.585372,114.651766 31.5791,114.661541 31.575266,114.680052 31.548028,114.706204 31.549034,114.702325 31.529738,114.723111 31.531185,114.727399 31.522276,114.746222 31.531099,114.75576 31.52621,114.761336 31.532734,114.777091 31.524576,114.790754 31.530455,114.791239 31.523687,114.779621 31.517887,114.78021 31.508141,114.789299 31.502798,114.788838 31.490142,114.797717 31.484076,114.819982 31.484091,114.82887 31.477643,114.838684 31.463435,114.844162 31.464421,114.851908 31.471893,114.856639 31.482785,114.863192 31.486108,114.879829 31.484074,114.882654 31.476262,114.891734 31.47434,114.899298 31.479691,114.914412 31.481529,114.929275 31.488296,114.931868 31.48322,114.945258 31.475494,114.952793 31.489237,114.967484 31.501594,114.978556 31.506294,114.984929 31.502176,114.988015 31.485051,114.997985 31.485086,114.999473 31.477295,115.00308 31.477081,115.007129 31.49153,115.016765 31.494657,115.010039 31.502629,115.00923 31.509013,115.016027 31.5132,115.029032 31.536241,115.03494 31.534701,115.034802 31.529257,115.043322 31.519877,115.055471 31.525102,115.068594 31.518233,115.101459 31.5178,115.101209 31.527958,115.115479 31.533358,115.119238 31.540576,115.109011 31.548142,115.115812 31.559081,115.10863 31.564921,115.107938 31.573351,115.134895 31.591423,115.127197 31.604829,115.149195 31.612496,115.175791 31.611138,115.182675 31.60627,115.185719 31.587897,115.199406 31.578925,115.199109 31.57197,115.214844 31.572164,115.219599 31.561897,115.242319 31.565029,115.236272 31.542118,115.224075 31.522845,115.22649 31.510471,115.219119 31.49993,115.224538 31.476021,115.214123 31.453465,115.226173 31.43265,115.23831 31.433767,115.258035 31.42754,115.264992 31.412741,115.256552 31.400027,115.266247 31.395099,115.286959 31.405837,115.30062 31.402019,115.306333 31.390535,115.315443 31.389411,115.321471 31.39062,115.342141 31.40745,115.359597 31.404823,115.37775 31.409837,115.397032 31.398586,115.396406 31.390501,115.388797 31.381075,115.380874 31.377444,115.386836 31.36816,115.386777 31.362071,115.377385 31.355558,115.405442 31.348323,115.412118 31.343414,115.413555 31.348049,115.42653 31.356402,115.429582 31.352893,115.438153 31.355854,115.447926 31.352476,115.454686 31.322173,115.461217 31.319593,115.466916 31.326425,115.468671 31.323056,115.462004 31.288572,115.472822 31.282094,115.475243 31.27438,115.480474 31.270321,115.496113 31.272547,115.507352 31.267582,115.515038 31.27479,115.525659 31.269228,115.546605 31.235028,115.542372 31.202524,115.550979 31.19715,115.553844 31.188476,115.562147 31.179989,115.562634 31.169448,115.579972 31.160191,115.590623 31.147888,115.607905 31.1626,115.610421 31.173625,115.622233 31.181662,115.623023 31.186983,115.641341 31.189969,115.650631 31.213164,115.668037 31.217624,115.696735 31.208499,115.704971 31.209038,115.717648 31.20368,115.718586 31.188251,115.737536 31.181124,115.741155 31.160704,115.752689 31.157327,115.76592 31.141429,115.777535 31.112688,115.780448 31.111422,115.788921 31.116356,115.794354 31.12673,115.801647 31.131465,115.826943 31.129911,115.843701 31.132265,115.858299 31.143827,115.860093 31.149592,115.886448 31.152017,115.892995 31.139171,115.902002 31.134273,115.895242 31.123793,115.895955 31.111487,115.900378 31.103587,115.908972 31.097068,115.915874 31.09731,115.928161 31.091494,115.929075 31.083006,115.951301 31.070989,115.951618 31.062845,115.945969 31.051767,115.94851 31.046625,115.95869 31.048855,115.972275 31.043132,115.988653 31.046029,116.003491 31.038258,116.009739 31.039272,116.017746 31.02034,116.027375 31.019824,116.035744 31.014776,116.04488 31.021074,116.063983 31.018804,116.065558 31.000886,116.077162 30.962304,116.047004 30.964629,116.039827 30.957333,116.029978 30.959531,116.015379 30.955626,115.995908 30.940526,115.981332 30.936904,115.975923 30.925561,115.959439 30.918586,115.954485 30.909704,115.941312 30.909209,115.934947 30.893816,115.92591 30.891918,115.91952 30.894103,115.894965 30.885388,115.881032 30.873533,115.871126 30.869971,115.869591 30.861065,115.857126 30.849658,115.852721 30.840682,115.854492 30.833435,115.869031 30.821363,115.868285 30.81373,115.872309 30.806709,115.863476 30.792441,115.865825 30.788719,115.877422 30.786234,115.875672 30.781676,115.857662 30.771593,115.857064 30.762417,115.85402 30.761352,115.840658 30.762252,115.82886 30.756294,115.816104 30.765256,115.792347 30.763,115.771688 30.715526,115.772657 30.700282,115.767745 30.694612,115.768171 30.690951,115.777794 30.683605,115.780638 30.674777,115.801175 30.665835,115.804294 30.656048,115.817896 30.64497,115.820385 30.630732,115.813087 30.621712,115.824007 30.604475,115.829461 30.602055,115.833386 30.611926,115.854061 30.607463,115.862673 30.595031,115.880346 30.588913,115.884593 30.583824,115.880954 30.574867,115.888666 30.564918,115.89181 30.549389,115.904693 30.543706,115.916036 30.525154,115.926697 30.523412,115.915055 30.512749,115.903385 30.488223,115.906101 30.472615,115.899297 30.465305,115.900062 30.458558,115.922408 30.448996,115.927994 30.43481,115.951524 30.431067,115.94572 30.424197,115.93647 30.426842,115.929185 30.424607,115.917671 30.407157,115.906027 30.403756,115.890743 30.387713,115.90073 30.363726,115.9078 30.357794,115.912463 30.347674,115.91972 30.346279,115.924055 30.339604,115.92052 30.323124,115.912347 30.314241,115.935972 30.307002,115.945403 30.316165,115.948881 30.312347,115.958854 30.312169,115.965677 30.30641,115.979242 30.304494,115.994949 30.292735,115.990897 30.283856,115.997086 30.277172,115.992685 30.27178,115.994357 30.267901,116.00258 30.261562,116.016766 30.259885,116.021479 30.251972,116.019792 30.24688,116.0345 30.24611,116.035058 30.239861,116.049434 30.232436,116.054099 30.225438,116.063322 30.225327,116.071754 30.211239,116.067526 30.207052,116.072542 30.200057,116.062027 30.192428,116.061776 30.188938,116.067774 30.159359,116.076815 30.138369,116.094534 30.131912,116.090472 30.120182,116.095002 30.110338,116.087375 30.096647,116.09333 30.083696,116.08326 30.071534,116.085095 30.061653,116.098449 30.037232,116.08855 30.031404,116.090631 30.024616,116.084122 30.000224,116.086296 29.986582,116.07991 29.971681,116.132426 29.90812,116.133564 29.890932,116.13982 29.88508),(115.136881 30.223666,115.156066 30.243421,115.152526 30.248852,115.141529 30.253336,115.136949 30.249692,115.121486 30.263801,115.118886 30.251386,115.100492 30.240335,115.124195 30.223945,115.136881 30.223666))" polygons = arcpy.FromWKT(wkt) arcpy.CopyFeatures_management(polygons, 'wuhan.shp')
def PolylintToPolygon(WKT): return arcpy.FromWKT("MULTIPOLYGON (((" + WKT + ")))")
def createcurves(layer, jsonwkt, angle_th, radius_th): single = findsinglevertex(layer) if jsonwkt == u'JSON': # dictionary {OID: ESRI geoJSON} shape_as_json = { row[0]: json.loads(row[1]) for row in arcpy.da.SearchCursor(layer, ['OID@', 'SHAPE@JSON'], spatial_reference=WEBMERC) } # field to omit service_fields = ['hasZ', 'hasM', 'spatialReference'] for oid in shape_as_json: # first - find geometry key in dictionary keys = [k for k in shape_as_json[oid] if k not in service_fields] geom_key = keys[0] geom = shape_as_json[oid][geom_key] newgeom = [] trigger = 0 pnt_no = 0 for part in geom: newpart = [] for i, pnt in enumerate(part): if i in [0, len(part) - 1] or pnt_no not in single[oid]: # if it's the first, last or non-single point - write it down and go further newpart.append(pnt) else: # read 3 points and it's types from ESRI geoJson pnt_prev, type_prev = getjsonpoint(part[i - 1]) pnt_next, type_next = getjsonpoint(part[i + 1]) pnt_cur, type_cur = getjsonpoint(pnt) if u'curve' in [type_next, type_cur]: # +type_prev # if current or next point are curves - write it down, go further newpart.append(pnt) trigger = 1 new_geom_key = u'curvePaths' old_geom_key = geom_key else: angle = getanglebetweenvectors( pnt_prev, pnt_cur, pnt_next) if angle_th < abs(angle) < 180 - angle_th: d1 = (pnt_cur[0] - pnt_prev[0])**2 + ( pnt_cur[1] - pnt_prev[1])**2 d2 = (pnt_cur[0] - pnt_next[0])**2 + ( pnt_cur[1] - pnt_next[1])**2 if d1 < 4 * radius_th**2 or d2 < 4 * radius_th**2: radius_th_w = min([d1, d2])**0.5 / 2 else: radius_th_w = radius_th new_prev = getpointonvector( pnt_cur, pnt_prev, radius_th_w) new_next = getpointonvector( pnt_cur, pnt_next, radius_th_w) dx1 = pnt_cur[0] - pnt_prev[0] dy1 = pnt_cur[1] - pnt_prev[1] dx2 = pnt_next[0] - pnt_cur[0] dy2 = pnt_next[1] - pnt_cur[1] fake_prev = [ new_prev[0] + dy1, new_prev[1] - dx1 ] fake_next = [ new_next[0] + dy2, new_next[1] - dx2 ] radius_prev = getlineequation( new_prev[0], new_prev[1], fake_prev[0], fake_prev[1]) radius_next = getlineequation( new_next[0], new_next[1], fake_next[0], fake_next[1]) center = getlinesintersection( radius_prev[0], radius_prev[1], radius_prev[2], radius_next[0], radius_next[1], radius_next[2]) radius_act = ( (center[0] - new_prev[0])**2 + (center[1] - new_prev[1])**2)**0.5 arc_center = getpointonvector( center, pnt_cur, radius_act) arc = {u'c': [new_next, arc_center]} newpart.append(new_prev) newpart.append(arc) new_geom_key = u'curvePaths' old_geom_key = geom_key trigger = 1 else: newpart.append(pnt_cur) pnt_no += 1 newgeom.append(newpart) if trigger == 1: del shape_as_json[oid][old_geom_key] shape_as_json[oid][new_geom_key] = newgeom shape_as_json[oid]['shape'] = arcpy.AsShape( shape_as_json[oid], True) with arcpy.da.UpdateCursor(layer, ['SHAPE@', 'OID@'], spatial_reference=WEBMERC) as uc: for row in uc: row[0] = shape_as_json[row[1]]['shape'] uc.updateRow(row) #################################################################################################################### elif jsonwkt == 'WKT': shape_as_wkt = { row[0]: readwkt(row[1]) for row in arcpy.da.SearchCursor(layer, ['OID@', 'SHAPE@WKT'], spatial_reference=WEBMERC) } for oid in shape_as_wkt: try: keys = [k for k in shape_as_wkt[oid]] geom_key = keys[0] geom = shape_as_wkt[oid][geom_key] newgeom = [] pnt_no = 0 for part in geom: newpart = [] for i, pnt in enumerate(part): if i in [0, len(part) - 1 ] or pnt_no not in single[oid]: # if it's the first, last or non-single point - write it down and go further newpart.append(pnt) else: # read 3 points and it's types from ESRI geoJson pnt_prev = part[i - 1] pnt_next = part[i + 1] pnt_cur = pnt angle = getanglebetweenvectors( pnt_prev, pnt_cur, pnt_next) if angle_th < abs(angle) < 180 - angle_th: d1 = (pnt_cur[0] - pnt_prev[0])**2 + ( pnt_cur[1] - pnt_prev[1])**2 d2 = (pnt_cur[0] - pnt_next[0])**2 + ( pnt_cur[1] - pnt_next[1])**2 if d1 < 4 * radius_th**2 or d2 < 4 * radius_th**2: radius_th_w = min([d1, d2])**0.5 / 2 else: radius_th_w = radius_th new_prev = getpointonvector( pnt_cur, pnt_prev, radius_th_w) new_next = getpointonvector( pnt_cur, pnt_next, radius_th_w) dx1 = pnt_cur[0] - pnt_prev[0] dy1 = pnt_cur[1] - pnt_prev[1] dx2 = pnt_next[0] - pnt_cur[0] dy2 = pnt_next[1] - pnt_cur[1] fake_prev = [ new_prev[0] + dy1, new_prev[1] - dx1 ] fake_next = [ new_next[0] + dy2, new_next[1] - dx2 ] radius_prev = getlineequation( new_prev[0], new_prev[1], fake_prev[0], fake_prev[1]) radius_next = getlineequation( new_next[0], new_next[1], fake_next[0], fake_next[1]) center = getlinesintersection( radius_prev[0], radius_prev[1], radius_prev[2], radius_next[0], radius_next[1], radius_next[2]) radius_act = ( (center[0] - new_prev[0])**2 + (center[1] - new_prev[1])**2)**0.5 base_vector = [ center[0] - new_prev[0], center[1] - new_prev[1] ] step_angle = math.degrees(2 * math.acos( (radius_act - 0.01) / radius_act)) step_angle *= math.copysign(1, angle) quant = int(round(abs(angle / step_angle))) newpart.append(new_prev) if quant > 1: true_step = angle / float(quant) for j in range(quant): sin_angle = math.sin(-math.radians( (j + 1) * true_step)) cos_angle = math.cos(-math.radians( (j + 1) * true_step)) x = base_vector[0] y = base_vector[1] vect = [ x * cos_angle + y * sin_angle, -x * sin_angle + y * cos_angle ] vect_x = center[0] - vect[0] vect_y = center[1] - vect[1] newpart.append([vect_x, vect_y]) newpart.append(new_next) else: newpart.append(pnt_cur) pnt_no += 1 newgeom.append(newpart) shape_as_wkt[oid][geom_key] = newgeom wkt_string = writewkt([geom_key, shape_as_wkt[oid][geom_key]]) shape_as_wkt[oid]['shape'] = arcpy.FromWKT(wkt_string, WEBMERC) except Exception as err: arcpy.AddMessage(u'Error details: {0}'.format(err.args)) with arcpy.da.UpdateCursor(layer, ['SHAPE@', 'OID@'], spatial_reference=WEBMERC) as uc: for row in uc: row[0] = shape_as_wkt[row[1]]['shape'] uc.updateRow(row)
def createcurves(layer, jsonwkt, angle_th, radius_th): from find_functions import findsinglevertex angle_th = 1 if angle_th < 1 else angle_th shapetype = arcpy.Describe(layer).shapeType single = findsinglevertex(layer) if jsonwkt == u'JSON': # dictionary {OID: ESRI geoJSON} shape_as_json = { row[0]: json.loads(row[1]) for row in arcpy.da.SearchCursor(layer, ['OID@', 'SHAPE@JSON'], spatial_reference=WEBMERC) } else: shape_as_json = { row[0]: json.loads(arcpy.FromWKT(row[1]).JSON) for row in arcpy.da.SearchCursor(layer, ['OID@', 'SHAPE@WKT'], spatial_reference=WEBMERC) } # field to omit service_fields = ['hasZ', 'hasM', 'spatialReference'] for oid in shape_as_json: # first - find geometry key in dictionary keys = [k for k in shape_as_json[oid] if k not in service_fields] geom_key = keys[0] geom = shape_as_json[oid][geom_key] newgeom = [] trigger = 0 pnt_no = 0 for part in geom: fixed_points = [0, len(part) - 1] if shapetype == 'Polyline' else [len(part) - 1] newpart = [] for i, pnt in enumerate(part): if i in fixed_points or pnt_no not in single[oid]: # if it's the first, last or non-single point - write it down and go further newpart.append(pnt) else: # read 3 points and it's types from ESRI geoJson if i == 0 and shapetype == 'Polygon': pnt_prev, type_prev = getjsonpoint(part[i - 2]) else: pnt_prev, type_prev = getjsonpoint(part[i - 1]) pnt_next, type_next = getjsonpoint(part[i + 1]) pnt_cur, type_cur = getjsonpoint(pnt) # TODO # check polygons with curves if u'curve' in [type_next, type_cur]: # +type_prev # if current or next point are curves - write it down, go further newpart.append(pnt) trigger = 1 new_geom_key = u'curvePaths' old_geom_key = geom_key else: # calculate angle and compare it with limits angle = getanglebetweenvectors(pnt_prev, pnt_cur, pnt_next) if angle_th < abs(angle) < 180 - angle_th: # calculate two radiuses d1 = (pnt_cur[0] - pnt_prev[0])**2 + ( pnt_cur[1] - pnt_prev[1])**2 d2 = (pnt_cur[0] - pnt_next[0])**2 + ( pnt_cur[1] - pnt_next[1])**2 # set it as basic radius if any of it is less then limit if d1 < 4 * radius_th**2 or d2 < 4 * radius_th**2: radius_th_w = min([d1, d2])**0.5 / 2 else: radius_th_w = radius_th # calculate basic point on previous side and next side new_prev = getpointonvector( pnt_cur, pnt_prev, radius_th_w) new_next = getpointonvector( pnt_cur, pnt_next, radius_th_w) # calculate deltas dx1 = pnt_cur[0] - pnt_prev[0] dy1 = pnt_cur[1] - pnt_prev[1] dx2 = pnt_next[0] - pnt_cur[0] dy2 = pnt_next[1] - pnt_cur[1] # calculate fake points on perpendicular to previous and next sides fake_prev = [new_prev[0] + dy1, new_prev[1] - dx1] fake_next = [new_next[0] + dy2, new_next[1] - dx2] # calculate two radiuses radius_prev = getlineequation( new_prev[0], new_prev[1], fake_prev[0], fake_prev[1]) radius_next = getlineequation( new_next[0], new_next[1], fake_next[0], fake_next[1]) # find circle center center = getlinesintersection( radius_prev[0], radius_prev[1], radius_prev[2], radius_next[0], radius_next[1], radius_next[2]) radius_act = ((center[0] - new_prev[0])**2 + (center[1] - new_prev[1])**2)**0.5 if jsonwkt == 'JSON': # TODO # check polygons with curves # get center of new arc arc_center = getpointonvector( center, pnt_cur, radius_act) # write it (lastpoint of arc, center of arc) arc = {u'c': [new_next, arc_center]} newpart.append(new_prev) newpart.append(arc) # rewrite geometry keys new_geom_key = u'curvePaths' old_geom_key = geom_key trigger = 1 else: # vector from center to the first point of 'arc' base_vector = [ center[0] - new_prev[0], center[1] - new_prev[1] ] # calculate angular step step_angle = math.degrees(2 * math.acos( (radius_act - 0.01) / radius_act)) step_angle *= math.copysign(1, angle) # calculate quantity of intermadiate points quant = int(round(abs(angle / step_angle))) newpart.append(new_prev) # if there is more than 1 intermediate point if quant > 1: true_step = angle / float(quant) for j in range(quant): sin_angle = math.sin(-math.radians( (j + 1) * true_step)) cos_angle = math.cos(-math.radians( (j + 1) * true_step)) x = base_vector[0] y = base_vector[1] vect = [ x * cos_angle + y * sin_angle, -x * sin_angle + y * cos_angle ] vect_x = center[0] - vect[0] vect_y = center[1] - vect[1] newpart.append([vect_x, vect_y]) newpart.append(new_next) trigger = 1 else: newpart.append(pnt_cur) pnt_no += 1 # rewrite last point in polygon if shapetype == 'Polygon': newpart[-1] = newpart[0] # append part to geometry list newgeom.append(newpart) # rewrite geometry (and key) if trigger == 1: if jsonwkt == 'JSON': del shape_as_json[oid][old_geom_key] shape_as_json[oid][new_geom_key] = newgeom else: shape_as_json[oid][geom_key] = newgeom shape_as_json[oid]['shape'] = arcpy.AsShape(shape_as_json[oid], True) with arcpy.da.UpdateCursor(layer, ['SHAPE@', 'OID@'], spatial_reference=WEBMERC) as uc: for row in uc: row[0] = shape_as_json[row[1]]['shape'] uc.updateRow(row)
''' Run this code from within Arcgis Desktop ''' import arcpy # Create a new Geometry object from WKT. wkt_point = arcpy.FromWKT("POINT (0 0)") # Add the new Geometry to Map arcpy.CopyFeatures_management(wkt_point, r"in_memory\wkt_point")
# the field holding the WKT string... field1 = "wkt" # the field holding the unique ID... field2 = "id" # set up the empty list... featureList = [] # set the spatial reference to a known EPSG code... sr = arcpy.SpatialReference(2177) # iterate on table row... cursor = arcpy.SearchCursor(root.baselink) row = cursor.next() print('lama') while row: print(row.getValue(field2)) WKT = row.getValue(field1) # this is the part that converts the WKT string to geometry using the defined spatial reference... temp = arcpy.FromWKT(WKT, sr) # append the current geometry to the list... featureList.append(temp) row = cursor.next() # copy all geometries in the list to a feature class... arcpy.CopyFeatures_management(featureList, root.shplink) # clean up... del row, temp, WKT, root.baselink, field1, featureList, cursor
def mainFunction( wfsURL, wfsDataID, dataType, extent, lastUpdateFile, changesetDatasetID, targetDatasetID, wfsDownloadType, outputWorkspace, datasetName ): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter) try: # --------------------------------------- Start of code --------------------------------------- # # Check the URL parameters if "?" in wfsURL: # If first parameter already provided firstParameter = "&" else: firstParameter = "?" # Setup the request URL requestURL = wfsURL + firstParameter + "SERVICE=WFS&REQUEST=GetFeature&TYPENAME=" + wfsDataID # If setting an extent for the data if ((len(extent) > 0) and (dataType.lower() == "layer")): # Add the bounding box to the request requestURL = requestURL + "&bbox=" + str(extent) # Set the spatial reference if (dataType.lower() == "layer"): requestURL = requestURL + "&srsName=EPSG:2193" # If a changeset is being requested if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Maximum number of days between a changeset to allow maxDaysChange = 30 # Get the last updated date with io.open(lastUpdateFile) as jsonFile: jsonConfig = json.load(jsonFile) lastUpdateDate = datetime.datetime.strptime( jsonConfig["lastUpdated"], "%Y-%m-%dT%H:%M:%S") # Get the days between now and the last update lastUpdateChange = datetime.datetime.now() - lastUpdateDate arcpy.AddMessage("Last update was " + str(lastUpdateChange.days) + " days ago...") # If last update date is less than the max days change variable if (lastUpdateChange.days <= maxDaysChange): # Get the current date as a string currentDate = datetime.datetime.now().strftime( "%Y-%m-%dT%H:%M:%S") lastUpdateDate = lastUpdateDate.strftime("%Y-%m-%dT%H:%M:%S") # Last update date more than the max days change variable else: # Set current date to be the max days change variable more than the last update date currentDate = (lastUpdateDate + datetime.timedelta( days=maxDaysChange)).strftime("%Y-%m-%dT%H:%M:%S") lastUpdateDate = lastUpdateDate.strftime("%Y-%m-%dT%H:%M:%S") # Setup the to and from dates for the changeset requestURL = requestURL + "&viewparams=from:" + str( lastUpdateDate) + ";to:" + str(currentDate) # Set the output request parameter requestURL = requestURL + "&outputformat=" + wfsDownloadType # -------------------- Downloading Data -------------------- arcpy.AddMessage("WFS request made - " + requestURL) urllib2.urlopen(requestURL) response = urllib2.urlopen(requestURL) # Shape file if (wfsDownloadType.lower() == "shape-zip"): fileType = ".zip" # CSV elif (wfsDownloadType.lower() == "csv"): fileType = ".csv" # JSON else: # Python version check if sys.version_info[0] >= 3: # Python 3.x # Read json response JSONData = json.loads(response.read().decode('utf8')) else: # Python 2.x # Read json response JSONData = json.loads(response.read()) # Shape file or CSV if ((wfsDownloadType.lower() == "shape-zip") or (wfsDownloadType.lower() == "csv")): # Download the data arcpy.AddMessage("Downloading data...") fileChunk = 16 * 1024 downloadedFile = os.path.join( arcpy.env.scratchFolder, "Data-" + str(uuid.uuid1()) + fileType) with open(downloadedFile, 'wb') as file: downloadCount = 0 while True: chunk = response.read(fileChunk) # If data size is small if ((downloadCount == 0) and (len(chunk) < 1000)): # Log error and end download arcpy.AddWarning("No data returned...") if (enableLogging == "true"): logger.warning("No data returned...") # If a changeset if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Update changes config file updateChangesConfig(lastUpdateFile, str(currentDate)) sys.exit() if not chunk: break # Write chunk to output file file.write(chunk) downloadCount = downloadCount + 1 file.close() arcpy.AddMessage("Downloaded to " + downloadedFile + "...") # -------------------- Extracting Data -------------------- # Shape file if (wfsDownloadType.lower() == "shape-zip"): # Unzip the file to the scratch folder arcpy.AddMessage("Extracting zip file...") zip = zipfile.ZipFile(downloadedFile, mode="r") zip.extractall(arcpy.env.scratchFolder) # Get the extracted shape file extractedShp = max( glob.iglob(str(arcpy.env.scratchFolder) + r"\*.shp"), key=os.path.getmtime) # Copy to feature class arcpy.AddMessage("Copying to " + os.path.join(outputWorkspace, datasetName) + "...") if (enableLogging == "true"): logger.info("Copying to " + os.path.join(outputWorkspace, datasetName) + "...") arcpy.CopyFeatures_management( extractedShp, os.path.join(outputWorkspace, datasetName), "", "0", "0", "0") # CSV elif (wfsDownloadType.lower() == "csv"): # Unzip the file to the scratch folder arcpy.AddMessage("Translating CSV file...") # Set the max size of the csv fields csv.field_size_limit(10000000) # Set CSV delimiter csvDelimiter = "," # Count number of records in CSV with io.open(downloadedFile, 'rt', encoding='utf-8') as csvFile: # Python version check if sys.version_info[0] >= 3: # Python 3.x numberRecords = sum(1 for row in csv.reader( csvFile, delimiter=csvDelimiter)) - 1 else: # Python 2.x # Read in CSV data as unicode def unicodeEncoder(csvData): for row in csvData: yield row.encode('utf-8') numberRecords = sum(1 for row in csv.reader( unicodeEncoder(csvFile), delimiter=csvDelimiter)) - 1 arcpy.AddMessage(str(numberRecords) + " records to load...") if (enableLogging == "true"): logger.info(str(numberRecords) + " records to load...") # If there are some records if (numberRecords > 0): # Open the CSV file with io.open(downloadedFile, 'rt', encoding='utf-8') as csvFile: # Python version check if sys.version_info[0] >= 3: # Python 3.x rows = csv.reader(csvFile, delimiter=csvDelimiter) else: # Python 2.x # Read in CSV data as unicode def unicodeEncoder(csvData): for row in csvData: yield row.encode('utf-8') rows = csv.reader(unicodeEncoder(csvFile), delimiter=csvDelimiter) # For each row in the CSV count = 0 fields = [] for row in rows: # If at the header line if (count == 0): # For each field for field in row: # Add each of the fields to an array if ((field.lower() != "geometry") and (field.lower() != "shape")): fields.append(field) if (dataType.lower() == "layer"): fields.append("SHAPE@") # For each row after the header else: values = [] if (dataType.lower() == "layer"): # Get the last column - geometry geometryWKT = row[len(row) - 1] # If geometry not null if (geometryWKT): # Convert to Esri geometry Geometry = arcpy.FromWKT( geometryWKT, arcpy.SpatialReference(2193)) # If table or geometry not null if ((dataType.lower() == "table") or (geometryWKT)): # If it's the first feature, create new feature class if (count == 1): # If layer if (dataType.lower() == "layer"): # Create temporary feature class to get shape type arcpy.CopyFeatures_management( Geometry, "in_memory\outputDataset") geometryType = arcpy.Describe( "in_memory\outputDataset" ).shapeType # Create new feature class arcpy.CreateFeatureclass_management( arcpy.env.scratchGDB, wfsDataID.replace("-", "_"), geometryType, "", "", "", arcpy.SpatialReference(2193)) # If table else: # create new table arcpy.CreateTable_management( arcpy.env.scratchGDB, wfsDataID.replace("-", "_"), "") # Add the fields for field in fields: if (field.lower() != "shape@"): arcpy.AddField_management( os.path.join( arcpy.env.scratchGDB, wfsDataID.replace( "-", "_")), str(field.replace("-", "_")), "TEXT") # Add the field values valueCount = 0 for value in row: # If layer if (dataType.lower() == "layer"): # Get the number of columns columnLength = len(row) - 1 else: # Get the number of columns columnLength = len(row) # If it's not the last column - geometry if (valueCount != columnLength): # Add each of the values to an array values.append(value) valueCount = valueCount + 1 if (dataType.lower() == "layer"): # If geometry not null if (geometryWKT): # Add in the geometry values.append(Geometry) # Blank geometry else: # Create a blank geometry if (geometryType.lower() == "point"): Geometry = arcpy.PointGeometry( arcpy.Point(None)) if (geometryType.lower() == "polygon"): Geometry = arcpy.Polygon( arcpy.Array(None)) else: Geometry = arcpy.Polyline( arcpy.Array(None)) # Add in the geometry values.append(Geometry) # Load it into existing feature class cursor = arcpy.da.InsertCursor( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), fields) cursor.insertRow(values) arcpy.AddMessage("Loaded " + str(count) + " of " + str(numberRecords) + " records...") count = count + 1 # Delete cursor del cursor # If a changeset is being requested if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Apply changes to target dataset applyChangeset( lastUpdateFile, str(currentDate), os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), outputWorkspace, os.path.join(outputWorkspace, datasetName), changesetDatasetID, targetDatasetID) # Full dataset else: arcpy.AddMessage( "Copying to " + os.path.join(outputWorkspace, datasetName) + "...") if (enableLogging == "true"): logger.info( "Copying to " + os.path.join(outputWorkspace, datasetName) + "...") # If layer if (dataType.lower() == "layer"): arcpy.CopyFeatures_management( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), os.path.join(outputWorkspace, datasetName), "", "0", "0", "0") # If table else: arcpy.Copy_management( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), os.path.join(outputWorkspace, datasetName)) # No records else: arcpy.AddWarning("No data returned...") if (enableLogging == "true"): logger.warning("No data returned...") # If a changeset if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Update changes config file updateChangesConfig(lastUpdateFile, str(currentDate)) sys.exit() # JSON else: arcpy.AddMessage("Translating JSON data...") # Convert geometry (GeoJSON) to shape - For each feature in GeoJSON numberRecords = len(JSONData["features"]) arcpy.AddMessage(str(numberRecords) + " records to load...") if (enableLogging == "true"): logger.info(str(numberRecords) + " records to load...") # If there are some records if (numberRecords > 0): count = 0 fields = [] for feature in JSONData["features"]: values = [] if (dataType.lower() == "layer"): # If geometry not null if (feature["geometry"]): # Convert to Esri geometry Geometry = arcpy.AsShape(feature["geometry"]) # If it's the first feature, create new feature class if (count == 0): # Get the fields for field in feature["properties"]: fields.append(field) if (dataType.lower() == "layer"): fields.append("SHAPE@") # If layer if (dataType.lower() == "layer"): # Create temporary feature class to get shape type arcpy.CopyFeatures_management( Geometry, "in_memory\outputDataset") geometryType = arcpy.Describe( "in_memory\outputDataset").shapeType # Create new feature class arcpy.CreateFeatureclass_management( arcpy.env.scratchGDB, wfsDataID.replace("-", "_"), geometryType, "", "", "", arcpy.SpatialReference(2193)) # If table else: # create new table arcpy.CreateTable_management( arcpy.env.scratchGDB, wfsDataID.replace("-", "_"), "") # Add the fields for field in fields: if (field.lower() != "shape@"): arcpy.AddField_management( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), str(field.replace("-", "_")), "TEXT") # Add the field values for field in fields: if (field.lower() != "shape@"): # Add each of the values to an array values.append(feature["properties"][field]) if (dataType.lower() == "layer"): # If geometry not null if (feature["geometry"]): # Add in the geometry values.append(Geometry) # Blank geometry else: # Create a blank geometry if (geometryType.lower() == "point"): Geometry = arcpy.PointGeometry( arcpy.Point(None)) if (geometryType.lower() == "polygon"): Geometry = arcpy.Polygon(arcpy.Array(None)) else: Geometry = arcpy.Polyline(arcpy.Array(None)) # Add in the geometry values.append(Geometry) # Load it into existing feature class cursor = arcpy.da.InsertCursor( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), fields) cursor.insertRow(values) count = count + 1 arcpy.AddMessage("Loaded " + str(count) + " of " + str(numberRecords) + " records...") # Delete cursor del cursor # If a changeset is being requested if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Apply changes to target dataset applyChangeset( lastUpdateFile, str(currentDate), os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), outputWorkspace, os.path.join(outputWorkspace, datasetName), changesetDatasetID, targetDatasetID) # Full dataset else: arcpy.AddMessage( "Copying to " + os.path.join(outputWorkspace, datasetName) + "...") if (enableLogging == "true"): logger.info( "Copying to " + os.path.join(outputWorkspace, datasetName) + "...") # If layer if (dataType.lower() == "layer"): arcpy.CopyFeatures_management( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), os.path.join(outputWorkspace, datasetName), "", "0", "0", "0") # If table else: arcpy.Copy_management( os.path.join(arcpy.env.scratchGDB, wfsDataID.replace("-", "_")), os.path.join(outputWorkspace, datasetName)) # --------------------------------------- End of code --------------------------------------- # # No records else: arcpy.AddWarning("No data returned...") if (enableLogging == "true"): logger.warning("No data returned...") # If a changeset if ("changeset" in wfsDataID.lower()) and (lastUpdateFile): # Update changes config file updateChangesConfig(lastUpdateFile, str(currentDate)) sys.exit() # If called from gp tool return the arcpy parameter if __name__ == '__main__': # Return the output if there is any if output: arcpy.SetParameterAsText(1, output) # Otherwise return the result else: # Return the output if there is any if output: return output # Logging if (enableLogging == "true"): # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] # If arcpy error except arcpy.ExecuteError: # Build and show the error message errorMessage = arcpy.GetMessages(2) arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage) # If python error except Exception as e: errorMessage = "" # Build and show the error message # If many arguments if (e.args): for i in range(len(e.args)): if (i == 0): # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = str( e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = unicode(e.args[i]).encode('utf-8') else: # Python version check if sys.version_info[0] >= 3: # Python 3.x errorMessage = errorMessage + " " + str( e.args[i]).encode('utf-8').decode('utf-8') else: # Python 2.x errorMessage = errorMessage + " " + unicode( e.args[i]).encode('utf-8') # Else just one argument else: errorMessage = e arcpy.AddError(errorMessage) # Logging if (enableLogging == "true"): # Log error logger.error(errorMessage) # Log end of process logger.info("Process ended.") # Remove file handler and close log file logMessage.flush() logMessage.close() logger.handlers = [] if (sendErrorEmail == "true"): # Send email sendEmail(errorMessage)
# -*- encoding:utf-8 -*- import arcpy arcpy.env.workspace = "" polygons = arcpy.FromWKT( "POLYGON((116.13982 29.88508,116.133298 29.865047,116.140065 29.853345,116.136074 29.841912,116.141487 29.833255,116.139141 29.827136,116.094168 29.804491,116.05429 29.765514,115.969441 29.730778,115.951961 29.7266,115.901597 29.733592,115.848585 29.751062,115.769124 29.800163,115.713168 29.842946,115.689668 29.85718,115.674007 29.857044,115.613694 29.842301,115.586868 29.840578,115.5252 29.843555,115.478962 29.856092,115.459597 29.869491,115.436349 29.905496,115.418339 29.915956,115.414394 29.928174,115.41487 29.950444,115.38586 29.975737,115.368123 30.006744,115.33548 30.036738,115.330484 30.045083,115.328576 30.078342,115.30855 30.117856,115.27418 30.161666,115.23118 30.208822,115.214144 30.218919,115.195419 30.222019,115.173966 30.221622,115.148019 30.211853,115.12642 30.215717,115.103114 30.227566,115.086667 30.242818,115.085262 30.267359,115.096426 30.298311,115.09247 30.330363,115.104584 30.354133,115.094606 30.369622,115.083706 30.363208,115.071347 30.385562,115.055222 30.399363,115.040581 30.405584,115.018059 30.411633,114.957343 30.410559,114.899734 30.41612,114.879463 30.426017,114.853994 30.448418,114.842412 30.468337,114.838641 30.484741,114.84026 30.520581,114.845446 30.546981,114.844419 30.566417,114.815363 30.595449,114.811554 30.606528,114.794198 30.619363,114.802522 30.625508,114.850438 30.631565,114.858127 30.637346,114.85142 30.643258,114.862842 30.639819,114.864011 30.642663,114.860203 30.649033,114.852272 30.647867,114.852101 30.653853,114.835283 30.675505,114.830507 30.676892,114.825561 30.669529,114.819804 30.666575,114.816514 30.668242,114.811099 30.680345,114.817529 30.692339,114.802808 30.705428,114.790826 30.727486,114.810885 30.729886,114.822966 30.736901,114.847936 30.744705,114.877126 30.76586,114.8828 30.776263,114.882513 30.798587,114.88531 30.802074,114.894284 30.802577,114.921991 30.790449,114.939121 30.802092,114.945921 30.792376,114.959368 30.782336,114.963342 30.786666,114.96319 30.795321,114.960114 30.799798,114.951639 30.802635,114.950814 30.808121,114.969669 30.813316,114.97633 30.818195,114.979415 30.813385,114.987561 30.821945,114.989159 30.814371,114.994905 30.815386,114.998204 30.830591,115.007347 30.836425,114.998341 30.851483,115.00346 30.862551,115.025286 30.869018,115.028789 30.862642,115.037593 30.861618,115.035839 30.854631,115.04188 30.850942,115.045785 30.851971,115.05348 30.862421,115.056052 30.858734,115.061764 30.862101,115.059112 30.869897,115.085775 30.888963,115.085336 30.895442,115.066106 30.900638,115.061758 30.910838,115.042152 30.915286,115.027965 30.931268,115.018792 30.92898,115.001472 30.943353,114.992939 30.961009,114.998714 30.973105,114.992991 30.974555,114.988736 30.980992,114.995281 30.990673,114.99451 31.003842,114.989741 31.003911,114.990181 30.998608,114.986967 30.998621,114.99071 31.02373,114.982844 31.033432,114.976799 31.027439,114.983139 31.012355,114.967394 30.992596,114.961789 30.993082,114.955782 31.00162,114.945048 31.003443,114.942708 31.008364,114.934727 31.001685,114.926412 31.020488,114.911053 31.011414,114.888873 31.012258,114.867955 30.998404,114.862745 30.991306,114.880109 30.979102,114.882972 30.971623,114.871909 30.963025,114.866862 30.950256,114.846928 30.956291,114.82872 30.951981,114.831069 30.957608,114.820999 30.962728,114.817705 30.973194,114.810046 30.97664,114.815296 30.982364,114.811771 30.998215,114.809814 31.000226,114.802814 30.99584,114.802259 31.000993,114.791246 31.001181,114.788892 30.991263,114.781402 30.985945,114.778763 30.971567,114.775575 30.969779,114.768857 30.969822,114.771886 30.975968,114.76959 30.986108,114.775284 30.989111,114.767173 30.990936,114.767066 30.995398,114.745638 30.992904,114.743519 30.988484,114.726931 30.983487,114.724253 30.987747,114.721543 30.986954,114.722417 30.982951,114.715469 30.984287,114.7085 30.976746,114.704021 30.964551,114.704513 30.955779,114.713819 30.948315,114.710907 30.9441,114.689074 30.95457,114.672481 30.94021,114.664655 30.938172,114.653631 30.942485,114.647617 30.933368,114.641731 30.931858,114.639359 30.934332,114.630028 30.929696,114.618815 30.932664,114.618511 30.944029,114.614296 30.944745,114.608447 30.964198,114.582758 30.975708,114.577497 30.984266,114.567496 30.985505,114.559948 30.998378,114.549434 30.997934,114.534128 31.017041,114.515958 31.020291,114.505379 31.042626,114.510063 31.051115,114.520831 31.056739,114.520952 31.069604,114.539429 31.100064,114.533888 31.113969,114.539931 31.122675,114.532992 31.128099,114.531053 31.14123,114.524885 31.149985,114.498782 31.15543,114.494142 31.163872,114.476393 31.176468,114.466775 31.174727,114.464416 31.189197,114.47743 31.190161,114.478975 31.195288,114.455993 31.202376,114.458803 31.209721,114.448437 31.213001,114.44016 31.211763,114.42829 31.218499,114.411223 31.235328,114.407847 31.255088,114.419227 31.267522,114.445372 31.313759,114.446905 31.330576,114.442649 31.344421,114.448519 31.348893,114.442387 31.353385,114.432527 31.354634,114.427524 31.362635,114.434872 31.371223,114.434845 31.384979,114.443762 31.389819,114.454063 31.389234,114.458361 31.403237,114.454526 31.414458,114.456414 31.420686,114.466922 31.416097,114.465453 31.413397,114.469619 31.410747,114.484346 31.411855,114.4868 31.415353,114.487982 31.425088,114.481745 31.427521,114.480298 31.433099,114.484099 31.45201,114.493771 31.464832,114.491864 31.472315,114.500945 31.473035,114.506337 31.464248,114.518121 31.468019,114.511651 31.49118,114.504449 31.503365,114.510851 31.52703,114.516487 31.530359,114.519307 31.542013,114.526502 31.546997,114.526771 31.564185,114.53279 31.555897,114.549104 31.554274,114.55665 31.558135,114.564285 31.574856,114.570682 31.56126,114.579807 31.55948,114.582312 31.565415,114.600178 31.566877,114.603141 31.579745,114.617239 31.58251,114.62747 31.590941,114.651282 31.585372,114.651766 31.5791,114.661541 31.575266,114.680052 31.548028,114.706204 31.549034,114.702325 31.529738,114.723111 31.531185,114.727399 31.522276,114.746222 31.531099,114.75576 31.52621,114.761336 31.532734,114.777091 31.524576,114.790754 31.530455,114.791239 31.523687,114.779621 31.517887,114.78021 31.508141,114.789299 31.502798,114.788838 31.490142,114.797717 31.484076,114.819982 31.484091,114.82887 31.477643,114.838684 31.463435,114.844162 31.464421,114.851908 31.471893,114.856639 31.482785,114.863192 31.486108,114.879829 31.484074,114.882654 31.476262,114.891734 31.47434,114.899298 31.479691,114.914412 31.481529,114.929275 31.488296,114.931868 31.48322,114.945258 31.475494,114.952793 31.489237,114.967484 31.501594,114.978556 31.506294,114.984929 31.502176,114.988015 31.485051,114.997985 31.485086,114.999473 31.477295,115.00308 31.477081,115.007129 31.49153,115.016765 31.494657,115.010039 31.502629,115.00923 31.509013,115.016027 31.5132,115.029032 31.536241,115.03494 31.534701,115.034802 31.529257,115.043322 31.519877,115.055471 31.525102,115.068594 31.518233,115.101459 31.5178,115.101209 31.527958,115.115479 31.533358,115.119238 31.540576,115.109011 31.548142,115.115812 31.559081,115.10863 31.564921,115.107938 31.573351,115.134895 31.591423,115.127197 31.604829,115.149195 31.612496,115.175791 31.611138,115.182675 31.60627,115.185719 31.587897,115.199406 31.578925,115.199109 31.57197,115.214844 31.572164,115.219599 31.561897,115.242319 31.565029,115.236272 31.542118,115.224075 31.522845,115.22649 31.510471,115.219119 31.49993,115.224538 31.476021,115.214123 31.453465,115.226173 31.43265,115.23831 31.433767,115.258035 31.42754,115.264992 31.412741,115.256552 31.400027,115.266247 31.395099,115.286959 31.405837,115.30062 31.402019,115.306333 31.390535,115.315443 31.389411,115.321471 31.39062,115.342141 31.40745,115.359597 31.404823,115.37775 31.409837,115.397032 31.398586,115.396406 31.390501,115.388797 31.381075,115.380874 31.377444,115.386836 31.36816,115.386777 31.362071,115.377385 31.355558,115.405442 31.348323,115.412118 31.343414,115.413555 31.348049,115.42653 31.356402,115.429582 31.352893,115.438153 31.355854,115.447926 31.352476,115.454686 31.322173,115.461217 31.319593,115.466916 31.326425,115.468671 31.323056,115.462004 31.288572,115.472822 31.282094,115.475243 31.27438,115.480474 31.270321,115.496113 31.272547,115.507352 31.267582,115.515038 31.27479,115.525659 31.269228,115.546605 31.235028,115.542372 31.202524,115.550979 31.19715,115.553844 31.188476,115.562147 31.179989,115.562634 31.169448,115.579972 31.160191,115.590623 31.147888,115.607905 31.1626,115.610421 31.173625,115.622233 31.181662,115.623023 31.186983,115.641341 31.189969,115.650631 31.213164,115.668037 31.217624,115.696735 31.208499,115.704971 31.209038,115.717648 31.20368,115.718586 31.188251,115.737536 31.181124,115.741155 31.160704,115.752689 31.157327,115.76592 31.141429,115.777535 31.112688,115.780448 31.111422,115.788921 31.116356,115.794354 31.12673,115.801647 31.131465,115.826943 31.129911,115.843701 31.132265,115.858299 31.143827,115.860093 31.149592,115.886448 31.152017,115.892995 31.139171,115.902002 31.134273,115.895242 31.123793,115.895955 31.111487,115.900378 31.103587,115.908972 31.097068,115.915874 31.09731,115.928161 31.091494,115.929075 31.083006,115.951301 31.070989,115.951618 31.062845,115.945969 31.051767,115.94851 31.046625,115.95869 31.048855,115.972275 31.043132,115.988653 31.046029,116.003491 31.038258,116.009739 31.039272,116.017746 31.02034,116.027375 31.019824,116.035744 31.014776,116.04488 31.021074,116.063983 31.018804,116.065558 31.000886,116.077162 30.962304,116.047004 30.964629,116.039827 30.957333,116.029978 30.959531,116.015379 30.955626,115.995908 30.940526,115.981332 30.936904,115.975923 30.925561,115.959439 30.918586,115.954485 30.909704,115.941312 30.909209,115.934947 30.893816,115.92591 30.891918,115.91952 30.894103,115.894965 30.885388,115.881032 30.873533,115.871126 30.869971,115.869591 30.861065,115.857126 30.849658,115.852721 30.840682,115.854492 30.833435,115.869031 30.821363,115.868285 30.81373,115.872309 30.806709,115.863476 30.792441,115.865825 30.788719,115.877422 30.786234,115.875672 30.781676,115.857662 30.771593,115.857064 30.762417,115.85402 30.761352,115.840658 30.762252,115.82886 30.756294,115.816104 30.765256,115.792347 30.763,115.771688 30.715526,115.772657 30.700282,115.767745 30.694612,115.768171 30.690951,115.777794 30.683605,115.780638 30.674777,115.801175 30.665835,115.804294 30.656048,115.817896 30.64497,115.820385 30.630732,115.813087 30.621712,115.824007 30.604475,115.829461 30.602055,115.833386 30.611926,115.854061 30.607463,115.862673 30.595031,115.880346 30.588913,115.884593 30.583824,115.880954 30.574867,115.888666 30.564918,115.89181 30.549389,115.904693 30.543706,115.916036 30.525154,115.926697 30.523412,115.915055 30.512749,115.903385 30.488223,115.906101 30.472615,115.899297 30.465305,115.900062 30.458558,115.922408 30.448996,115.927994 30.43481,115.951524 30.431067,115.94572 30.424197,115.93647 30.426842,115.929185 30.424607,115.917671 30.407157,115.906027 30.403756,115.890743 30.387713,115.90073 30.363726,115.9078 30.357794,115.912463 30.347674,115.91972 30.346279,115.924055 30.339604,115.92052 30.323124,115.912347 30.314241,115.935972 30.307002,115.945403 30.316165,115.948881 30.312347,115.958854 30.312169,115.965677 30.30641,115.979242 30.304494,115.994949 30.292735,115.990897 30.283856,115.997086 30.277172,115.992685 30.27178,115.994357 30.267901,116.00258 30.261562,116.016766 30.259885,116.021479 30.251972,116.019792 30.24688,116.0345 30.24611,116.035058 30.239861,116.049434 30.232436,116.054099 30.225438,116.063322 30.225327,116.071754 30.211239,116.067526 30.207052,116.072542 30.200057,116.062027 30.192428,116.061776 30.188938,116.067774 30.159359,116.076815 30.138369,116.094534 30.131912,116.090472 30.120182,116.095002 30.110338,116.087375 30.096647,116.09333 30.083696,116.08326 30.071534,116.085095 30.061653,116.098449 30.037232,116.08855 30.031404,116.090631 30.024616,116.084122 30.000224,116.086296 29.986582,116.07991 29.971681,116.132426 29.90812,116.133564 29.890932,116.13982 29.88508),(115.136881 30.223666,115.156066 30.243421,115.152526 30.248852,115.141529 30.253336,115.136949 30.249692,115.121486 30.263801,115.118886 30.251386,115.100492 30.240335,115.124195 30.223945,115.136881 30.223666))" ) arcpy.CopyFeatures_management(polygons, 'wuhan.shp')
def executer(self, env, no_nc, listeFichierXml, listeFeatureLayer, detruire): #------------------------------------------------------------------------------------- """ Permet d'exécuter le traitement d'identifier les éléments non-conformes par classe, partition et dépôt qui sont contenus dans des Zones de Travail (ZT). Paramètres: ----------- env : Environnement de travail no_nc : Numéro de non-conformité à traiter. listeFichierXml : Liste des fichiers XML contenant l'information de chaque zone de travail à traiter. listeFeatureLayer : Liste des "FeatureLayer" contenant les éléments sélectionnés et qui correspondent aux classes, partitions et dépôts des fichiers XML. detruire : True : Indique qu'on doit détruire les éléments déjà identifiés avant d'ajouter les nouveaux. False : Indique qu'on doit seulement ajouter les nouveaux éléments identifiés sans détruire ceux existants. Retour: ------- listeFichierXmlSortie : Liste des noms de fichier XML de sortie corrigés """ #Instanciation de la classe Sib et connexion à la BD Sib self.Sib = self.CompteSib.OuvrirConnexionSib(env, env) #Extraire le nom de l'usager SIB sUsagerSib = self.CompteSib.UsagerSib() #Requête dans SIB afin de valider le numéro de non-conformité arcpy.AddMessage("- Valider le numéro de non-conformité") sql = "SELECT NO_NC,DATE_FERMETURE,DATE_TRAITEMENT FROM F702_NC WHERE NO_NC='" + no_nc + "'" arcpy.AddMessage(sql) resultat = self.Sib.requeteSib(sql) #Si aucun résultat retourné if len(resultat) == 0: raise Exception("Numéro de non-conformité invalide : " + no_nc) #Si la date de fermeture n'est pas vide if resultat[0][1] <> None or resultat[0][2] <> None: raise Exception("La date de fermeture n'est pas vide : " + resultat[0][1]) #Si la date de traitement n'est pas vide if resultat[0][1] <> None or resultat[0][2] <> None: raise Exception("La date de traitement n'est pas vide : " + resultat[0][2]) #Initialiser les listes listeFichierXmlSortie = [] object_id = "OBJECTID" bdg_id = "BDG_ID" #Traiter tous les fichiers XML de la liste for fichierXml in listeFichierXml.split(","): #Instanciation de la classe XmlCreerZT pour lire et écrire dans le fichier XML selon le profil du service CreerZT arcpy.AddMessage(" ") arcpy.AddMessage("- Lecture du fichier XML: %s" % fichierXml) oXmlCreerZT = util_XmlCreerZT.XmlCreerZT(fichierXml) #Extraire le dépôt depot = oXmlCreerZT.obtenirDepot() arcpy.AddMessage(" Dépôt: %s" % depot) #Extraire la ZT_ID zt_id = oXmlCreerZT.obtenirZtId() arcpy.AddMessage(" ZT_ID: %s" % zt_id) #Par défaut, on suppose que le ZT_ID contient le NO_NC zt_id_nc = zt_id #Vérifier si le ZT_ID contient le NO_NC if no_nc not in zt_id: #Écrire le nouveau ZT_ID contenant le NO_NC dans le fichier XML zt_id_nc = zt_id + "_NC_" + no_nc oXmlCreerZT.definirZtId(zt_id_nc) arcpy.AddMessage(" ZT_ID_NC: %s" % zt_id_nc) # #Extraire le catalogue catalogue = oXmlCreerZT.obtenirCatalogue() arcpy.AddMessage(" Catalogue: %s" % catalogue) #Extraire la partition partition = oXmlCreerZT.obtenirPartition() arcpy.AddMessage(" Partition: %s" % partition) #Extraire la liste des classes listeClasse = oXmlCreerZT.obtenirListeNomElement() arcpy.AddMessage(" ListeClasse: %s" % listeClasse) #Extraire le polygonWKT mxd = arcpy.mapping.MapDocument("CURRENT") polygonWKT = oXmlCreerZT.obtenirPolygone() arcpy.AddMessage(" PolygonWKT: %s" % polygonWKT) polygon = arcpy.FromWKT( polygonWKT.replace("POLYGON (", "MULTIPOLYGON ((") + ")", mxd.activeDataFrame.spatialReference) #Vérifier s'il faut détruire les entrées existantes #if detruire: #Détruire les entrées existantes dans la table F705_PR #sql = "DELETE FROM F705_PR WHERE NO_NC='" + no_nc + "' AND TY_PRODUIT='" + partition + "' AND IDENTIFIANT='" + zt_id_nc + "'" #arcpy.AddMessage(sql) #self.Sib.execute(sql) #Traiter toutes les classes non-conformes for classe in listeClasse.split(","): #Vérifier s'il faut détruire les entrées existantes if detruire: #Détruire les entrées existantes dans la table F705_EL sql = "DELETE FROM F705_EL WHERE NO_NC='" + no_nc + "' AND CATALOGUE='" + catalogue + "' AND DEPOT='" + depot + "' AND PARTITION='" + partition + "' AND CLASSE='" + classe + "' AND ZT_ID_DEBUT='" + zt_id_nc + "'" arcpy.AddMessage(sql) self.Sib.execute(sql) #Initialiser le compteur d'élément nb_elem = 0 #Trouver le FeatureLayer de la classe featureLayer = self.extraireFeatureLayer( classe, listeFeatureLayer) #Vérifier si des éléments sont sélectionnés if len(featureLayer) > 0: #Traiter tous les éléments sélectionnés du FeatureLayer for row in arcpy.SearchCursor(featureLayer): #Extraire l'élément element = row.getValue("Shape") #Vérifier si le polygon contient l'élément if polygon.contains(element): #Compter les éléments ajoutés nb_elem = nb_elem + 1 #Ajouter un élément non-conforme d'une classe sql = "P0G03_UTL.PU_HORODATEUR,'" + sUsagerSib + "',SYSDATE,SYSDATE,'" + no_nc + "','" + catalogue + "','" + depot + "','" + partition + "','" + classe + "'" sql = sql + "," + str( row.getValue(object_id)) + ",'" + str( row.getValue(bdg_id) ) + "','" + zt_id_nc + "',NULL,NULL" arcpy.AddMessage("INSERT INTO F705_EL VALUES (" + sql + ")") self.Sib.execute("INSERT INTO F705_EL VALUES (" + sql + ")") #Si aucun élément sélectionnée if nb_elem == 0: #Ajouter une clase non-conforme sans élément sql = "P0G03_UTL.PU_HORODATEUR,'" + sUsagerSib + "',SYSDATE,SYSDATE,'" + no_nc + "','" + catalogue + "','" + depot + "','" + partition + "','" + classe + "'" sql = sql + ",0,NULL,'" + zt_id_nc + "',NULL,NULL" arcpy.AddMessage("INSERT INTO F705_EL VALUES (" + sql + ")") self.Sib.execute("INSERT INTO F705_EL VALUES (" + sql + ")") #Ajouter un identifiant du produit non-conforme #sql = "INSERT INTO F705_PR VALUES ('" + sUsagerSib + "',SYSDATE,SYSDATE,'" + no_nc + "','" + partition + "','" + zt_id_nc + "',1,0,P0G03_UTL.PU_HORODATEUR,99999,99)" #arcpy.AddMessage(sql) #self.Sib.execute(sql) #Changer le nom du fichier XML selon le nouvel identifiant contenant le no_nc fichierXmlSortie = fichierXml.replace(zt_id, zt_id_nc) #Écrire dans le même fichier le nouvel identifiant arcpy.AddMessage("- Écriture du fichier XML %s:" % fichierXmlSortie) oXmlCreerZT.ecrire(fichierXmlSortie) #Ajouter le fichier XML de sortie à la liste listeFichierXmlSortie.append(fichierXmlSortie) #Accepter les modifications arcpy.AddMessage(" ") arcpy.AddMessage("- Accepter les modifications") sql = "COMMIT" arcpy.AddMessage(sql) self.Sib.execute(sql) #Sortie normale pour une exécution réussie arcpy.AddMessage(" ") self.CompteSib.FermerConnexionSib( ) #exécution de la méthode pour la fermeture de la connexion de la BD SIB #Sortir et retourner la liste des fichiers XML de sortie return listeFichierXmlSortie
arcpy.management.CreateFeatureclass(gdb, fc_name, "Polygon", "", "", "", spRef) arcpy.management.AddFields(fc_name, fields_desc) ### Cleaning the raw table and mask by target confidence level----------------------- df_clean = df[df['geometry'].str.contains('|'.join(['POLYGON']))].copy() # Select records with a valid geometry (that starts with 'POLYGON'). # If a record starts with invalid texts (such as 'EMPTY'), the record will be removed. df_conf = df_clean[df_clean['confidence'] > tarConf].copy() # Mask the table by confidence level. ### TO TEST THE CODE with a small chunk of data: #df_test = df_conf.iloc[0:100, :].copy() #df_test.reset_index(inplace=True, drop=True) ### Main loop - Convert the CSV data to a feature class:----------------------------- for i, r in df_conf.iterrows(): geomet = arcpy.FromWKT(r['geometry'], spRef) lat = r[0] long = r[1] area = r[2] conf = r[3] plus = r[5] rowList = [lat, long, area, conf, plus, geomet] with arcpy.da.InsertCursor(fc_name, fields) as cursor: cursor.insertRow(rowList) print('END PROCESS.')