def writeShp(geoList, outFilePath, geoType): if not (geoType in ["Point", "Ptgon", "Line"]): print "The output geometry type should be Point, Ptgon, or Line" return if geoType == "Point": geoT = shapelib.SHPT_POINT elif geoType == "Ptgon": geoT = shapelib.SHPT_POLYGON else: geoT = shapelib.SHPT_ARC outShp = shapelib.create(outFilePath, geoT) outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf') outDbf.add_field("ID", dbflib.FTInteger, 100, 0) for j in range(len(geoList)): recordDict = {"ID": j} if geoType == "Polygon": if geoList[j].geom_type == 'Polygon': vert = [list(geoList[j].exterior.coords)] for interi in geoList[j].interiors: vert.append(list(interi.coords)) #print vert else: vert = [list(geoList[j].coords)] obj = shapelib.SHPObject(geoT, -1, vert) outShp.write_object(-1, obj) outDbf.write_record(j, recordDict) print "%d records, %d fields" % (outDbf.record_count(), outDbf.field_count())
def make_shapefile(filename): # Create a shapefile with polygons outfile = shapelib.create(filename, shapelib.SHPT_POLYGON) # Create one very simple polygon and write it to the shapefile. The # vertices should be given in clockwise order to comply with the # shapefile specification. obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, [[(10, 10), (10, 20), (20, 20), (10, 10)]]) print obj.extents() print obj.vertices() outfile.write_object(-1, obj) # Create a polygon with a hole. Note that according to the # shapefile specification, the vertices of the outer ring have to be # in clockwise order and the inner rings have to be in counter # clockwise order. # # There's an optional fourth parameter which when given must be a # list of part types, one for each part of the shape. For polygons, # the part type is always shapelib.SHPP_RING, though. The part # types are only relevant for SHPT_MULTIPATCH shapefiles. obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, [[(0, 0), (0, 40), (40, 40), (40, 0), (0, 0)], [(10, 10), (20, 10), (20, 20), (10, 20),(10, 10)], ]) print obj.extents() print obj.vertices() outfile.write_object(-1, obj) # close the file. outfile.close()
def calculateShoreLength(gridFilePath, shoreFilePath, outFilePath): gridShp = shapelib.ShapeFile(gridFilePath) gridDbf = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') shoreShp = shapelib.ShapeFile(shoreFilePath) outShp = shapelib.create(outFilePath, shapelib.SHPT_POLYGON) outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf') outDbf.add_field("ID", dbflib.FTInteger, 10, 0) outDbf.add_field("Impact", dbflib.FTDouble, 30, 6) outDbf.add_field("Length", dbflib.FTDouble, 30, 6) inter = shoreShp.read_object(0).vertices()[1:] if inter: shoreGeo = Polygon(shoreShp.read_object(0).vertices()[0], inter).boundary else: shoreGeo = Polygon(tuple( shoreShp.read_object(0).vertices()[0])).boundary for j in range(gridDbf.record_count()): gridObj = gridShp.read_object(j) gridGeo = Polygon(tuple(gridObj.vertices()[0])) shoreLength = gridGeo.intersection(shoreGeo).length recordDict = gridDbf.read_record(j) newDict = [j, recordDict["Impact"], shoreLength] outShp.write_object(-1, gridObj) outDbf.write_record(j, newDict)
def make_shapefile(filename): obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, [[(10, 10), (20, 10), (20, 20), (10, 10)]]) logging.info(obj.extents()) logging.info(obj.vertices()) outfile = shapelib.create(filename, shapelib.SHPT_POLYGON) outfile.write_object(-1, obj) del outfile
def AddBoomField(gridFilePath, sol, outFilePath): gridSHP = shapelib.ShapeFile(gridFilePath) gridDBF = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') outSHP = shapelib.create(outFilePath, shapelib.SHPT_POLYGON) outDBF = dbflib.create(outFilePath.split('.')[0] + '.dbf') for j in range(gridDBF.field_count()): #if j == 17: #outDBF.add_field(gridDBF.field_info(j)[1], dbflib.FTDouble, 20, 10) #else: outDBF.add_field( gridDBF.field_info(j)[1], gridDBF.field_info(j)[0], gridDBF.field_info(j)[2], gridDBF.field_info(j)[3]) outDBF.add_field('Boomed', dbflib.FTInteger, 5, 0) jj = 0 for j in range(gridDBF.record_count()): vert = gridSHP.read_object(j) recordDict = gridDBF.read_record(j) if j in sol: recordDict['Boomed'] = 1 else: recordDict['Boomed'] = 0 outSHP.write_object(-1, vert) outDBF.write_record(jj, recordDict) jj += 1
def do(dt): """Generate for a given date """ dbconn = psycopg2.connect(database='idep', host='iemdb', user='******') cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( """ SELECT ST_AsText(i.geom), i.huc_12, coalesce(avg_precip, 0), coalesce(avg_loss, 0), coalesce(avg_runoff, 0), coalesce(avg_delivery, 0) from ia_huc12 i JOIN results_by_huc12 r on (r.huc_12 = i.huc_12) WHERE valid = %s """, (dt, )) os.chdir("/tmp") fn = "idepv2_%s" % (dt.strftime("%Y%m%d"), ) shp = shapelib.create(fn, shapelib.SHPT_POLYGON) dbf = dbflib.create(fn) dbf.add_field("HUC_12", dbflib.FTString, 12, 0) dbf.add_field("PREC_MM", dbflib.FTDouble, 8, 2) dbf.add_field("LOS_KGM2", dbflib.FTDouble, 8, 2) dbf.add_field("RUNOF_MM", dbflib.FTDouble, 8, 2) dbf.add_field("DELI_KGM", dbflib.FTDouble, 8, 2) for i, row in enumerate(cursor): g = wellknowntext.convert_well_known_text(row[0]) obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, g) shp.write_object(-1, obj) del (obj) dbf.write_record( i, dict(HUC_12=row[1], PREC_MM=row[2], LOS_KGM2=row[3], RUNOF_MM=row[4], DELI_KGM=row[5])) # hack way to close the files del (shp) del (dbf) shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/26915.prj", fn + ".prj") z = zipfile.ZipFile(fn + ".zip", 'w', zipfile.ZIP_DEFLATED) suffixes = ['shp', 'shx', 'dbf', 'prj'] for s in suffixes: z.write(fn + "." + s) z.close() sys.stdout.write("Content-type: application/octet-stream\n") sys.stdout.write(("Content-Disposition: attachment; filename=%s.zip\n\n" "") % (fn, )) sys.stdout.write(file(fn + ".zip", 'r').read()) suffixes.append('zip') for s in suffixes: os.remove(fn + "." + s)
def do(dt): """Generate for a given date """ dbconn = psycopg2.connect(database='idep', host='iemdb', user='******') cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute(""" SELECT ST_AsText(i.geom), i.huc_12, coalesce(avg_precip, 0), coalesce(avg_loss, 0), coalesce(avg_runoff, 0), coalesce(avg_delivery, 0) from ia_huc12 i JOIN results_by_huc12 r on (r.huc_12 = i.huc_12) WHERE valid = %s """, (dt,)) os.chdir("/tmp") fn = "idepv2_%s" % (dt.strftime("%Y%m%d"),) shp = shapelib.create(fn, shapelib.SHPT_POLYGON) dbf = dbflib.create(fn) dbf.add_field("HUC_12", dbflib.FTString, 12, 0) dbf.add_field("PREC_MM", dbflib.FTDouble, 8, 2) dbf.add_field("LOS_KGM2", dbflib.FTDouble, 8, 2) dbf.add_field("RUNOF_MM", dbflib.FTDouble, 8, 2) dbf.add_field("DELI_KGM", dbflib.FTDouble, 8, 2) for i, row in enumerate(cursor): g = wellknowntext.convert_well_known_text(row[0]) obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, g) shp.write_object(-1, obj) del(obj) dbf.write_record(i, dict(HUC_12=row[1], PREC_MM=row[2], LOS_KGM2=row[3], RUNOF_MM=row[4], DELI_KGM=row[5])) # hack way to close the files del(shp) del(dbf) shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/26915.prj", fn+".prj") z = zipfile.ZipFile(fn+".zip", 'w', zipfile.ZIP_DEFLATED) suffixes = ['shp', 'shx', 'dbf', 'prj'] for s in suffixes: z.write(fn+"."+s) z.close() sys.stdout.write("Content-type: application/octet-stream\n") sys.stdout.write(("Content-Disposition: attachment; filename=%s.zip\n\n" "") % (fn,)) sys.stdout.write(file(fn+".zip", 'r').read()) suffixes.append('zip') for s in suffixes: os.remove(fn+"."+s)
def createGIS(s): dir = s.strftime("/wepp/data/rainfall/shape/daily/%Y/%m/") fname = s.strftime("%Y%m%d_rain") if (not os.path.isdir(dir)): os.makedirs(dir) dbf = dbflib.create(dir + fname) dbf.add_field("RAINFALL", dbflib.FTDouble, 4, 2) shp = shapelib.create(dir + fname, shapelib.SHPT_POINT) return shp, dbf
def createGIS(s): """ Create the shapefiles """ dirname = s.strftime("/mnt/idep/data/rainfall/shape/daily/%Y/%m/") fname = s.strftime("%Y%m%d_rain") if not os.path.isdir(dirname): os.makedirs(dirname) dbf = dbflib.create(dirname + fname) dbf.add_field("RAINFALL", dbflib.FTDouble, 5, 2) shp = shapelib.create(dirname + fname, shapelib.SHPT_POINT) return shp, dbf
def write_shp(self, newShape): tempshp = shapelib.create('temp', self.shpType) for b in range(self.shpNum): print('\t merging -> %d' % b) shpobj = self.shp.read_object(b) #(5, i, [[(), (),()], []]) if len(shpobj.vertices()) > 1: shpobj = shapelib.SHPObject(self.shpType, b, [shpobj.vertices()[0]]) tempshp.write_object(b, shpobj) tempshp.close() shutil.copy(self.buildingSHPFile + '.dbf', 'temp' + '.dbf') tempshp.close() self.clear_small_polygons('temp', newShape, 20.)
def exportOpticsCluster(points, RD,CD,order,threshold,shapefilename): shapeType = shapelib.SHPT_POINT shapeFile = shapelib.create(shapefilename, shapeType) dbfName = shapefilename[:-3] + 'dbf' dbf = dbflib.create(dbfName) dbf.add_field('ID', dbflib.FTInteger, 50,0) dbf.add_field('RD', dbflib.FTDouble, 50,10) dbf.add_field('CD', dbflib.FTDouble, 50,10) dbf.add_field('Order', dbflib.FTInteger, 50,0) dbf.add_field('Cluster', dbflib.FTInteger, 50,0) noise = False noise = 0 clusterid = 1 cluster = noise cluster = 1 for i,id in enumerate(order): p = points[id] p = list(p) p.append(0) shapeObject = [tuple(p)] obj = shapelib.SHPObject(shapeType, -1, [shapeObject]) shapeFile.write_object(-1, obj) if RD[id] >= threshold and noise == False: cluster+=1 noise = True dbf.write_record(i, {'ID':id,'RD':RD[id],'CD':CD[id],'Order':i,'Cluster':0}) continue elif RD[id] >= threshold and noise == True: dbf.write_record(i, {'ID':id,'RD':RD[id],'CD':CD[id],'Order':i,'Cluster':0}) continue dbf.write_record(i, {'ID':id,'RD':RD[id],'CD':CD[id],'Order':i,'Cluster':cluster}) noise = False """ if RD[id] > threshold: if CD[id] <= threshold: clusterid += 1 cluster = clusterid else: cluster = noise dbf.write_record(i, {'ID':id,'RD':RD[id],'CD':CD[id],'Order':i,'Cluster':cluster}) else: dbf.write_record(i, {'ID':id,'RD':RD[id],'CD':CD[id],'Order':i,'Cluster':cluster}) """ shapeFile.close() dbf.close()
def write_point_shape_out(self, pointShapeFileName, pointsList): w2shp = shapelib.create(pointShapeFileName, shapelib.SHPT_POINT) w2dbf = dbflib.create(pointShapeFileName) w2dbf.add_field('ID', dbflib.FTInteger, 10, 0) # create 3 field for the ID and x, y coordinate w2dbf.add_field('x', dbflib.FTDouble, 16, 2) w2dbf.add_field('y', dbflib.FTDouble, 16, 2) i = 0 for pts in pointsList: for pt in pts: shpObj = shapelib.SHPObject(shapelib.SHPT_POINT, i, [[pt]]) w2shp.write_object(i, shpObj) w2dbf.write_record(i, {'ID':i}) w2dbf.write_record(i, {'x':pt[0]}) w2dbf.write_record(i, {'y':pt[1]}) i += 1 w2shp.close() w2dbf.close()
def simulateRampCapacity(rampFilePath, outFilePath): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf') outShp = shapelib.create(outFilePath, shapelib.SHPT_POINT) outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf') outDbf.add_field("ID", dbflib.FTInteger, 10, 0) outDbf.add_field("EBCap", dbflib.FTInteger, 30, 0) outDbf.add_field("VesCap", dbflib.FTInteger, 10, 0) tn = rampDbf.record_count() ebCap = randint(500, high=5001, size=tn) vesCap = choice(6, tn, p=[0.5, 0.2, 0.1, 0.1, 0.05, 0.05]) for j in range(tn): rampObj = rampShp.read_object(j) recordDict = rampDbf.read_record(j) newDict = [j, ebCap[j], vesCap[j]] outShp.write_object(-1, rampObj) outDbf.write_record(j, newDict)
def shpCreateFile(fileName, shptype, fields): """ Create a shapefile (and a corresponding dbf file) of the give type, containing the given fields. Input: fileName - full path (excluding extension!) to the shapefile to create. shptype - shapelib object type (these are integer values, but you can also use the shapelib.SHPT_ value). fields - a dictionary of dictionaries with field names as keys, and each sub-dictionary containing keys of 'Type', 'Length','Precision' and 'Data': 'Type' must be one of the following integer values: 0 - strings 1 - integers 2 - doubles 4 - Invalid Output: shapefile and dbffile objects """ try: fshp = shapelib.create(fileName,shptype) except IOError: logger.critical("Failed to create shapefile: %s.shp"%fileName) raise IOError try: fdbf = dbflib.create(fileName) except IOError: logger.critical("Failed to create dbffile: %s.dbf"%fileName) raise IOError fieldNames = fields.keys() for f in sorted(fieldNames): fieldType = fields[f]['Type'] fieldLength = fields[f]['Length'] # Force the precision to be zero unless the field is a double if fieldType==2: fieldPrec = fields[f]['Precision'] else: fieldPrec = 0 fdbf.add_field(f, fieldType, fieldLength, fieldPrec) return fshp, fdbf
def clear_small_polygons(self, shapeFile, newShapeFile, lenLimit): shp = shapelib.ShapeFile(shapeFile) dbf = dbflib.open(shapeFile) newSHP = shapelib.create(newShapeFile, self.shpType) newDBF = dbflib.create(newShapeFile) for f in range(dbf.field_count()): fi = dbf.field_info(f) newDBF.add_field(fi[1], fi[0], fi[2], fi[3]) bb = 0 for b in range(shp.info()[0]): sobj = shp.read_object(b) rec = dbf.read_record(b) if self.length_of_polygon(sobj.vertices()) > lenLimit: shpobj = shapelib.SHPObject(self.shpType, bb, [sobj.vertices()[0]]) newSHP.write_object(bb, shpobj) newDBF.write_record(bb, rec) bb += 1 shp.close() dbf.close() newSHP.close() newDBF.close()
sts = mx.DateTime.DateTime(2005, 3, 1) ets = mx.DateTime.DateTime(2005, 11, 1) interval = mx.DateTime.RelativeDateTime(days=+7) now = sts twp = {} rs = mydb.query( "SELECT astext(transform(the_geom,4326)) as t, model_twp from iatwp ORDER by model_twp ASC" ).dictresult() for i in range(len(rs)): twp[rs[i]["model_twp"]] = rs[i]["t"] while now < ets: print "Hello Heather, I am here ", now shp = shapelib.create("weeklysm/%ssm" % (now.strftime("%Y%m%d"),), shapelib.SHPT_POLYGON) dbf = dbflib.create("weeklysm/%ssm" % (now.strftime("%Y%m%d"),)) dbf.add_field("S0-10CM", dbflib.FTDouble, 8, 2) dbf.add_field("S10-20CM", dbflib.FTDouble, 8, 2) dbf.add_field("VSM", dbflib.FTDouble, 8, 2) rs = mydb.query( "select model_twp, avg(vsm) as v, \ avg(s10cm) as s10, avg(s20cm) as s20 from \ waterbalance_by_twp WHERE valid >= '%s' and valid < '%s' \ GROUP by model_twp ORDER by model_twp ASC" % (now.strftime("%Y-%m-%d"), (now + interval).strftime("%Y-%m-%d")) ).dictresult() for i in range(len(rs)): m = rs[i]["model_twp"]
vil | smallint | max_dbz | smallint | max_dbz_height | real | top | real | drct | smallint | sknt | smallint | valid | timestamp with time zone | """ os.chdir("/tmp") # Delete anything older than 20 minutes now = mx.DateTime.gmt() eTS = mx.DateTime.gmt() - mx.DateTime.RelativeDateTime(minutes=+20) shp = shapelib.create("current_nexattr", shapelib.SHPT_POINT) dbf = dbflib.create("current_nexattr") dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("STORM_ID", dbflib.FTString, 2, 0) dbf.add_field("NEXRAD", dbflib.FTString, 3, 0) dbf.add_field("AZIMUTH", dbflib.FTInteger, 3, 0) dbf.add_field("RANGE", dbflib.FTInteger, 3, 0) dbf.add_field("TVS", dbflib.FTString, 10, 0) dbf.add_field("MESO", dbflib.FTString, 10, 0) dbf.add_field("POSH", dbflib.FTInteger, 3, 0) dbf.add_field("POH", dbflib.FTInteger, 3, 0) dbf.add_field("MAX_SIZE", dbflib.FTDouble, 5, 2) dbf.add_field("VIL", dbflib.FTInteger, 3, 0) dbf.add_field("MAX_DBZ", dbflib.FTInteger, 3, 0) dbf.add_field("MAX_DBZ_H", dbflib.FTDouble, 5, 2)
magnitude | real | city | character varying(32) | county | character varying(32) | state | character(2) | source | character varying(32) | remark | text | wfo | character(3) | geom | geometry | typetext | geometry | """ # We set one minute into the future, so to get expiring warnings # out of the shapefile eTS = mx.DateTime.gmt() + mx.DateTime.RelativeDateTime(minutes=+1) shp = shapelib.create("lsr_24hour", shapelib.SHPT_POINT) dbf = dbflib.create("lsr_24hour") dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("MAG", dbflib.FTDouble, 8, 2) dbf.add_field("WFO", dbflib.FTString, 3, 0) dbf.add_field("TYPECODE", dbflib.FTString, 1, 0) dbf.add_field("TYPETEXT", dbflib.FTString, 40, 0) dbf.add_field("CITY", dbflib.FTString, 40, 0) dbf.add_field("COUNTY", dbflib.FTString, 40, 0) dbf.add_field("SOURCE", dbflib.FTString, 40, 0) dbf.add_field("REMARK", dbflib.FTString, 200, 0) #sql = "SELECT *, astext(geom) as tgeom from warnings WHERE issue < '%s' and \ sql = """SELECT distinct *, astext(geom) as tgeom from lsrs_%s WHERE
myGDD = gdd[j,i] myPrecip = precip[j,i] res = {'data': [], } res['data'].append({ 'gdd': "%.0f" % (myGDD,), 'precip': "%.1f" % (myPrecip,), 'latitude': "%.4f" % (lat,), 'longitude': "%.4f" % (lon,) }) print 'Content-type: text/plain\n' print json.dumps( res ) if format == 'shp': # Time to create the shapefiles fp = "iemre_%s_%s" % (ts0.strftime("%Y%m%d"), ts1.strftime("%Y%m")) shp = shapelib.create("%s.shp" % (fp,), shapelib.SHPT_POLYGON) for x in iemre.XAXIS: for y in iemre.YAXIS: obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, [[(x,y),(x,y+iemre.DY),(x+iemre.DX,y+iemre.DY), (x+iemre.DX,y),(x,y)]]) shp.write_object(-1, obj) del(shp) dbf = dbflib.create(fp) dbf.add_field("GDD", dbflib.FTDouble, 10, 2) dbf.add_field("PREC_IN", dbflib.FTDouble, 10, 2) cnt = 0 for i in range(len(iemre.XAXIS)):
#!/usr/bin/env python # Need something to generate shapes! import wellknowntext, pg, shapelib from Scientific.IO.ArrayIO import * mydb = pg.connect("wepp") rs = mydb.query("SELECT hrap_i, transform(the_geom, 4326) as the_geom from hrap_utm ORDER by hrap_i ASC").dictresult() shp = shapelib.create("hrap_polygon", shapelib.SHPT_POLYGON) for i in range(len(rs)): s = rs[i]["the_geom"] f = wellknowntext.convert_well_known_text(s) obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, f ) shp.write_object(-1, obj) lats = readFloatArray("/wepp/GIS/lats.dat") lons = readFloatArray("/wepp/GIS/lons.dat") shp2 = shapelib.create("hrap_point", shapelib.SHPT_POINT) for row in range(134): for col in range(173): obj = shapelib.SHPObject(shapelib.SHPT_POINT, 1, [[(lons[row][col], lats[row][col])]] ) shp2.write_object(-1, obj)
dbf = dbflib.create("coop_" + ts) dbf.add_field("SID", dbflib.FTString, 5, 0) dbf.add_field("SITE_NAME", dbflib.FTString, 40, 0) dbf.add_field("ELEV_M", dbflib.FTDouble, 10, 2) dbf.add_field("YYYYMMDD", dbflib.FTString, 8, 0) dbf.add_field("HHMM", dbflib.FTString, 4, 0) dbf.add_field("HI_T_F", dbflib.FTInteger, 10, 0) dbf.add_field("LO_T_F", dbflib.FTInteger, 10, 0) dbf.add_field("PREC", dbflib.FTDouble, 10, 2) dbf.add_field("SNOW", dbflib.FTDouble, 10, 2) dbf.add_field("SDEPTH", dbflib.FTDouble, 10, 2) dbf.add_field("PMONTH", dbflib.FTDouble, 10, 2) dbf.add_field("SMONTH", dbflib.FTDouble, 10, 2) shp = shapelib.create("coop_" + ts, shapelib.SHPT_POINT) j = 0 for sid in cob.keys(): obj = shapelib.SHPObject(shapelib.SHPT_POINT, j, [[(cob[sid]["LON"], cob[sid]["LAT"])]]) shp.write_object(-1, obj) # print id, cob[sid] if cob[sid]["TMPX"] < 0: cob[sid]["TMPX"] = -99.0 if cob[sid]["TMPN"] < 0: cob[sid]["TMPN"] = -99.0 if cob[sid]["P24I"] < 0: cob[sid]["P24I"] = -99.0 if cob[sid]["SNOW"] < 0: cob[sid]["SNOW"] = -99.0
if 1: file = "USGS_sGA.dbf" dbf = dbflib.create(file) dbf.add_field("LON", dbflib.FTDouble, 10, 4) dbf.add_field("LAT", dbflib.FTDouble, 10, 4) dbf.add_field("CODE", dbflib.FTString, 8, 0) dbf.add_field("NAME", dbflib.FTString, 40, 0) dbf.close() # dbf = dbflib.open(file, "r+b") for (i, record) in enumerate(USGS_SE + USGS_SW): dbf.write_record(i, record) dbf.close() # filename = "USGS_sGA.shp" outfile = shapelib.create(filename, shapelib.SHPT_POINT) for (i, record) in enumerate(USGS_SE + USGS_SW): obj = shapelib.SHPObject(shapelib.SHPT_POINT, i, [[(record[0], record[1])]]) outfile.write_object(-1, obj) outfile.close() if 1: file = "COAPS_sGA.dbf" dbf = dbflib.create(file) dbf.add_field("LON", dbflib.FTDouble, 10, 4) dbf.add_field("LAT", dbflib.FTDouble, 10, 4) dbf.add_field("CODE", dbflib.FTString, 8, 0) dbf.add_field("NAME", dbflib.FTString, 40, 0) dbf.close() #
from pyiem import wellknowntext import psycopg2.extras POSTGIS = psycopg2.connect(database='postgis', host='iemdb', user='******') pcursor = POSTGIS.cursor(cursor_factory=psycopg2.extras.DictCursor) pcursor.execute("SET TIME ZONE 'GMT'") # Get CGI vars form = cgi.FormContent() year = int(form["year"][0]) etn = int(form["etn"][0]) fp = "watch_%s_%s" % (year, etn) os.chdir("/tmp/") shp = shapelib.create(fp, shapelib.SHPT_POLYGON) dbf = dbflib.create(fp) dbf.add_field("SIG", dbflib.FTString, 1, 0) dbf.add_field("ETN", dbflib.FTInteger, 4, 0) sql = """select ST_astext(ST_multi(ST_union(ST_SnapToGrid(u.geom,0.0001)))) as tgeom from warnings_%s w JOIN ugcs u on (u.gid = w.gid) WHERE significance = 'A' and phenomena IN ('TO','SV') and eventid = %s and ST_isvalid(u.geom) and issue < ((select issued from watches WHERE num = %s and extract(year from issued) = %s LIMIT 1) + '60 minutes'::interval) """ % (year, etn, etn, year) pcursor.execute(sql) if pcursor.rowcount == 0:
day = mx.DateTime.DateTime(y, m, d, 12, 0) m = int(sys.argv[4]) d = int(sys.argv[5]) day1 = mx.DateTime.DateTime(y, m, d, 12, 0) fname = day.strftime("%Y%m%d") hrap_shapes = {} hrain = {} rs = mydb.query("SELECT transform(the_geom, 4326) as the_geom, hrap_i from hrap_utm").dictresult() for i in range(len(rs)): hrain[ int(rs[i]["hrap_i"]) ] = {"HRAP_I": int(rs[i]["hrap_i"]) } hrap_shapes[ int(rs[i]["hrap_i"]) ] = rs[i]["the_geom"] shp = shapelib.create(fname, shapelib.SHPT_POLYGON) dbf = dbflib.create(fname) dbf.add_field("HRAP_I", dbflib.FTInteger, 8, 0) dbf.add_field("RAINFALL", dbflib.FTDouble, 5, 2) interval = mx.DateTime.RelativeDateTime(minutes=+15) now = day d = 0 while (now <= day1): now += interval gts = now.gmtime() fp = gts.strftime("/wepp/data/rainfall/product/%Y/%Y%m%d/IA%Y%m%d_%H%M.dat")
def export_shapefile(txn, tp): """Export a Shapefile of Road Conditions""" os.chdir("/tmp") dbf = dbflib.create("iaroad_cond") dbf.add_field("SEGID", dbflib.FTInteger, 6, 0) dbf.add_field("MAJOR", dbflib.FTString, 10, 0) dbf.add_field("MINOR", dbflib.FTString, 128, 0) dbf.add_field("US1", dbflib.FTInteger, 4, 0) dbf.add_field("ST1", dbflib.FTInteger, 4, 0) dbf.add_field("INT1", dbflib.FTInteger, 4, 0) dbf.add_field("TYPE", dbflib.FTInteger, 4, 0) dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0) dbf.add_field("COND_TXT", dbflib.FTString, 120, 0) dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0) dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0) shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC) txn.execute("""select b.*, c.*, ST_astext(b.geom) as bgeom from roads_base b, roads_current c WHERE b.segid = c.segid and valid is not null and b.geom is not null""") i = 0 for row in txn: s = row["bgeom"] f = wellknowntext.convert_well_known_text(s) valid = row["valid"] d = {} d["SEGID"] = row["segid"] d["MAJOR"] = row["major"] d["MINOR"] = row["minor"] d["US1"] = row["us1"] d["ST1"] = row["st1"] d["INT1"] = row["int1"] d["TYPE"] = row["type"] d["VALID"] = valid.strftime("%Y%m%d%H%M") d["COND_CODE"] = row["cond_code"] d["COND_TXT"] = row["raw"] d["BAN_TOW"] = str(row["towing_prohibited"])[0] d["LIM_VIS"] = str(row["limited_vis"])[0] obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f) shp.write_object(-1, obj) dbf.write_record(i, d) del (obj) i += 1 del (shp) del (dbf) z = zipfile.ZipFile("iaroad_cond.zip", 'w') z.write("iaroad_cond.shp") z.write("iaroad_cond.shx") z.write("iaroad_cond.dbf") o = open('iaroad_cond.prj', 'w') o.write(EPSG26915) o.close() z.write("iaroad_cond.prj") z.close() utc = tp + datetime.timedelta(hours=6) subprocess.call( ("/home/ldm/bin/pqinsert -p 'zip ac %s " "gis/shape/26915/ia/iaroad_cond.zip " "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" "") % (utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")), shell=True) for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']: os.unlink("iaroad_cond.%s" % (suffix, ))
def export_shapefile(txn, tp): """Export a Shapefile of Road Conditions""" os.chdir("/tmp") dbf = dbflib.create("iaroad_cond") dbf.add_field("SEGID", dbflib.FTInteger, 4, 0) dbf.add_field("MAJOR", dbflib.FTString, 10, 0) dbf.add_field("MINOR", dbflib.FTString, 128, 0) dbf.add_field("US1", dbflib.FTInteger, 4, 0) dbf.add_field("ST1", dbflib.FTInteger, 4, 0) dbf.add_field("INT1", dbflib.FTInteger, 4, 0) dbf.add_field("TYPE", dbflib.FTInteger, 4, 0) dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0) dbf.add_field("COND_TXT", dbflib.FTString, 120, 0) dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0) dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0) shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC) txn.execute("""select b.*, c.*, ST_astext(b.geom) as bgeom from roads_base b, roads_current c WHERE b.segid = c.segid and valid is not null and b.geom is not null""") i = 0 for row in txn: s = row["bgeom"] f = wellknowntext.convert_well_known_text(s) valid = row["valid"] d = {} d["SEGID"] = row["segid"] d["MAJOR"] = row["major"] d["MINOR"] = row["minor"] d["US1"] = row["us1"] d["ST1"] = row["st1"] d["INT1"] = row["int1"] d["TYPE"] = row["type"] d["VALID"] = valid.strftime("%Y%m%d%H%M") d["COND_CODE"] = row["cond_code"] d["COND_TXT"] = row["raw"] d["BAN_TOW"] = str(row["towing_prohibited"])[0] d["LIM_VIS"] = str(row["limited_vis"])[0] obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f) shp.write_object(-1, obj) dbf.write_record(i, d) del(obj) i += 1 del(shp) del(dbf) z = zipfile.ZipFile("iaroad_cond.zip", 'w') z.write("iaroad_cond.shp") z.write("iaroad_cond.shx") z.write("iaroad_cond.dbf") o = open('iaroad_cond.prj', 'w') o.write(EPSG26915) o.close() z.write("iaroad_cond.prj") z.close() utc = tp + datetime.timedelta(hours=6) subprocess.call(("/home/ldm/bin/pqinsert -p 'zip ac %s " "gis/shape/26915/ia/iaroad_cond.zip " "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" "") % (utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")), shell=True) for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']: os.unlink("iaroad_cond.%s" % (suffix,))
def real_parser(txn, raw): """Actually do the heavy lifting of parsing this product Args: txn (cursor): psycopg2 database transaction raw (str): the raw text that needs parsing """ # Load up dictionary of Possible Road Conditions if len(ROADS) == 0: log.msg("Initializing ROADS and CONDITIONS dicts...") init_dicts(txn) tp = TextProduct(raw) log.msg("PROCESSING STOIA: %s" % (tp.valid,)) # Lets start our processing by looking for the first * and then # processing after finding it lines = re.split("\n", raw[raw.find("*") :]) for line in lines: if len(line) < 40 or line[0] == "*" or line[30:40].strip() == "": continue data = line[7:] # Find the right most ) and chomp everything up until it pos = data.rfind(")") meat = data[: pos + 1].upper() condition = data[(pos + 1) :].upper().strip() if meat.strip() == "": continue if meat not in ROADS: log.msg("Unknown road: %s\n" % (meat,)) continue road_code = figureCondition(txn, condition) towingProhibited = condition.find("TOWING PROHIBITED") > -1 limitedVis = condition.find("LIMITED VIS.") > -1 segid = ROADS[meat]["segid"] txn.execute( """ UPDATE roads_current SET cond_code = %s, valid = %s, towing_prohibited = %s, limited_vis = %s, raw = %s WHERE segid = %s """, (road_code, tp.valid, towingProhibited, limitedVis, condition, segid), ) # Copy the currents table over to the log... HARD CODED if tp.valid.month < 7: logtable = "roads_%s_%s_log" % (tp.valid.year - 1, tp.valid.year) else: logtable = "roads_%s_%s_log" % (tp.valid.year, tp.valid.year + 1) txn.execute( """ INSERT into """ + logtable + """ SELECT * from roads_current WHERE valid = %s """, (tp.valid,), ) log.msg("Copied %s rows into %s table" % (txn.rowcount, logtable)) # Now we generate a shapefile.... dbf = dbflib.create("iaroad_cond") dbf.add_field("SEGID", dbflib.FTInteger, 4, 0) dbf.add_field("MAJOR", dbflib.FTString, 10, 0) dbf.add_field("MINOR", dbflib.FTString, 128, 0) dbf.add_field("US1", dbflib.FTInteger, 4, 0) dbf.add_field("ST1", dbflib.FTInteger, 4, 0) dbf.add_field("INT1", dbflib.FTInteger, 4, 0) dbf.add_field("TYPE", dbflib.FTInteger, 4, 0) dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0) dbf.add_field("COND_TXT", dbflib.FTString, 120, 0) dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0) dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0) shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC) txn.execute( """select b.*, c.*, ST_astext(b.geom) as bgeom from roads_base b, roads_current c WHERE b.segid = c.segid and valid is not null and b.geom is not null""" ) i = 0 for row in txn: s = row["bgeom"] f = wellknowntext.convert_well_known_text(s) valid = row["valid"] d = {} d["SEGID"] = row["segid"] d["MAJOR"] = row["major"] d["MINOR"] = row["minor"] d["US1"] = row["us1"] d["ST1"] = row["st1"] d["INT1"] = row["int1"] d["TYPE"] = row["type"] d["VALID"] = valid.strftime("%Y%m%d%H%M") d["COND_CODE"] = row["cond_code"] d["COND_TXT"] = row["raw"] d["BAN_TOW"] = str(row["towing_prohibited"])[0] d["LIM_VIS"] = str(row["limited_vis"])[0] obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f) shp.write_object(-1, obj) dbf.write_record(i, d) del (obj) i += 1 del (shp) del (dbf) z = zipfile.ZipFile("iaroad_cond.zip", "w") z.write("iaroad_cond.shp") z.write("iaroad_cond.shx") z.write("iaroad_cond.dbf") o = open("iaroad_cond.prj", "w") o.write(EPSG26915) o.close() z.write("iaroad_cond.prj") z.close() utc = tp.valid.astimezone(pytz.timezone("UTC")) subprocess.call( ( "/home/ldm/bin/pqinsert -p 'zip ac %s " "gis/shape/26915/ia/iaroad_cond.zip " "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" "" ) % (utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")), shell=True, ) for suffix in ["shp", "shx", "dbf", "prj", "zip"]: os.unlink("iaroad_cond.%s" % (suffix,))
dbf = dbflib.create("coop_"+ts) dbf.add_field("SID", dbflib.FTString, 5, 0) dbf.add_field("SITE_NAME", dbflib.FTString, 64, 0) dbf.add_field("ELEV_M", dbflib.FTDouble, 10, 2) dbf.add_field("YYYYMMDD", dbflib.FTString, 8, 0) dbf.add_field("HHMM", dbflib.FTString, 4, 0) dbf.add_field("HI_T_F", dbflib.FTInteger, 10, 0) dbf.add_field("LO_T_F", dbflib.FTInteger, 10, 0) dbf.add_field("PREC", dbflib.FTDouble, 10, 2) dbf.add_field("SNOW", dbflib.FTDouble, 10, 2) dbf.add_field("SDEPTH", dbflib.FTDouble, 10, 2) dbf.add_field("PMONTH", dbflib.FTDouble, 10, 2) dbf.add_field("SMONTH", dbflib.FTDouble, 10, 2) shp = shapelib.create("coop_"+ts, shapelib.SHPT_POINT) j = 0 for sid in cob.keys(): obj = shapelib.SHPObject(shapelib.SHPT_POINT, j, [[(cob[sid]["LON"], cob[sid]["LAT"])]]) shp.write_object(-1, obj) # print id, cob[sid] if cob[sid]["TMPX"] < 0: cob[sid]["TMPX"] = -99. if cob[sid]["TMPN"] < 0: cob[sid]["TMPN"] = -99. if cob[sid]["P24I"] < 0: cob[sid]["P24I"] = -99. if cob[sid]["SNOW"] < 0:
myGDD = gdd[j, i] myPrecip = precip[j, i] res = {'data': [], } res['data'].append({ 'gdd': "%.0f" % (myGDD,), 'precip': "%.1f" % (myPrecip,), 'latitude': "%.4f" % (lat,), 'longitude': "%.4f" % (lon,) }) sys.stdout.write('Content-type: application/json\n\n') sys.stdout.write(json.dumps(res)) if format == 'shp': # Time to create the shapefiles fp = "iemre_%s_%s" % (ts0.strftime("%Y%m%d"), ts1.strftime("%Y%m")) shp = shapelib.create("%s.shp" % (fp,), shapelib.SHPT_POLYGON) for x in iemre.XAXIS: for y in iemre.YAXIS: obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, [[(x, y), (x, y+iemre.DY), (x+iemre.DX, y+iemre.DY), (x+iemre.DX, y), (x, y)]]) shp.write_object(-1, obj) del(shp) dbf = dbflib.create(fp) dbf.add_field("GDD", dbflib.FTDouble, 10, 2) dbf.add_field("PREC_IN", dbflib.FTDouble, 10, 2) cnt = 0
lons = [0]*points lat0 = 40.000 lon0 = -97.000 latn = 44.000 lonn = -89.000 x = 173.0000 y = 134.0000 dlat = (lat0 - latn ) / y dlon = (lon0 - lonn ) / x dbf = dbflib.create("precip_points") dbf.add_field("SID", dbflib.FTString, 1, 0) dbf.add_field("SITE_NAME", dbflib.FTString, 1, 0) shp = shapelib.create("precip_points", shapelib.SHPT_POINT) for i in range(points): row = i / int(x) col = i % int(x) lat = lat0 - ( row * dlat ) lon = lon0 - ( col * dlon ) mydb.query("INSERT into precip_points(point, geom) values( \ "+ str(i) +", 'SRID=-1;POINT("+ str(lon) +" "+ str(lat) +");')") obj = shapelib.SHPObject(shapelib.SHPT_POINT, i, [[(lon, lat)]] ) shp.write_object(-1, obj) dbf.write_record(i, ("b", "b") ) del obj
conv_gml = gml_sd_import(input_schema.read()) except Exception, strerror: return "", strerror conv_gml.setGeo_name(name) #fill geometry conv_gml = gml_import(input_gml.read(),conv_gml) #generate output path output_path = os.path.join(FILES_PATH, conv_gml.getGeo_name()) #prepare outfile #SHP Point if conv_gml.getFeat_type() == '1': outfile = shapelib.create(output_path, shapelib.SHPT_POINT) for m in range(len(conv_gml.getShp_records())): vertices_up = [] temp_a = vertices_up.append for n in range (len(conv_gml.getShp_records()[m])): temp_a((conv_gml.getShp_records()[m][n])) # print vertices_up obj = shapelib.SHPObject(shapelib.SHPT_POINT, 1,vertices_up) outfile.write_object(-1, obj) #SHP Line elif conv_gml.getFeat_type() == '3': outfile = shapelib.create(output_path, shapelib.SHPT_ARC) for m in range(len(conv_gml.getShp_records())): vertices_up = [] temp_a = vertices_up.append for n in range (len(conv_gml.getShp_records()[m])):
def generate_shapefile(ts): """ Generate a shapefile of this data """ # Now we generate a shapefile.... dbf = dbflib.create("iaroad_cond") dbf.add_field("SEGID", dbflib.FTInteger, 4, 0) dbf.add_field("MAJOR", dbflib.FTString, 10, 0) dbf.add_field("MINOR", dbflib.FTString, 40, 0) dbf.add_field("US1", dbflib.FTInteger, 4, 0) dbf.add_field("ST1", dbflib.FTInteger, 4, 0) dbf.add_field("INT1", dbflib.FTInteger, 4, 0) dbf.add_field("TYPE", dbflib.FTInteger, 4, 0) dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0) dbf.add_field("COND_TXT", dbflib.FTString, 120, 0) dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0) dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0) shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC) pcursor.execute("""select b.*, c.*, astext(b.geom) as bgeom from roads_base b, roads_current c WHERE b.segid = c.segid""") i = 0 for row in pcursor: s = row["bgeom"] f = wellknowntext.convert_well_known_text(s) valid = row["valid"] d = {} d["SEGID"] = row["segid"] d["MAJOR"] = row["major"] d["MINOR"] = row["minor"] d["US1"] = row["us1"] d["ST1"] = row["st1"] d["INT1"] = row["int1"] d["TYPE"] = row["type"] d["VALID"] = valid.strftime("%Y%m%d%H%M") d["COND_CODE"] = row["cond_code"] d["COND_TXT"] = row["raw"] d["BAN_TOW"] = str(row["towing_prohibited"])[0] d["LIM_VIS"] = str(row["limited_vis"])[0] obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f ) shp.write_object(-1, obj) dbf.write_record(i, d) del(obj) i += 1 del(shp) del(dbf) z = zipfile.ZipFile("iaroad_cond.zip", 'w') z.write("iaroad_cond.shp") z.write("iaroad_cond.shx") z.write("iaroad_cond.dbf") shutil.copyfile("/mesonet/data/gis/meta/26915.prj", "iaroad_cond.prj") z.write("iaroad_cond.prj") z.close() utc = ts.astimezone( pytz.timezone("UTC") ) subprocess.call("/home/ldm/bin/pqinsert -p 'zip ac %s gis/shape/26915/ia/iaroad_cond.zip GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" % ( utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")), shell=True ) for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']: os.unlink("iaroad_cond.%s" % (suffix,))
day2 = (ts + mx.DateTime.RelativeDateTime(days=interval)).strftime("%Y%m%d") if monthly is not None: sql = """SELECT model_twp, sum(avg_precip) as avg_precip, sum(avg_loss) as avg_loss, sum(avg_runoff) as avg_runoff from results_by_twp WHERE valid BETWEEN '%s-01' and ('%s-01'::date + '1 month'::interval) GROUP by model_twp""" % (ts.strftime("%Y-%m"), ts.strftime("%Y-%m") ) day1 = (ts + mx.DateTime.RelativeDateTime(day=1)).strftime("%Y%m%d") day2 = (ts + mx.DateTime.RelativeDateTime(day=1,months=1) - mx.DateTime.RelativeDateTime(days=1) ).strftime("%Y%m%d") wcursor.execute( sql ) if form.has_key("point"): shp = shapelib.create(fp, shapelib.SHPT_POINT) else: shp = shapelib.create(fp, shapelib.SHPT_POLYGON) dbf = dbflib.create(fp) dbf.add_field("DAY_STA", dbflib.FTString, 8, 0) dbf.add_field("DAY_END", dbflib.FTString, 8, 0) dbf.add_field("MODL_TWP", dbflib.FTString, 10, 0) dbf.add_field("PRECIP", dbflib.FTDouble, 8, 4) dbf.add_field("LOSS", dbflib.FTDouble, 8, 4) dbf.add_field("RUNOFF", dbflib.FTDouble, 8, 4) i = 0 for row in wcursor: m = row['model_twp'] loss = row['avg_loss'] runoff = row['avg_runoff']
magnitude | real | city | character varying(32) | county | character varying(32) | state | character(2) | source | character varying(32) | remark | text | wfo | character(3) | geom | geometry | typetext | geometry | """ # We set one minute into the future, so to get expiring warnings # out of the shapefile eTS = mx.DateTime.gmt() + mx.DateTime.RelativeDateTime(minutes=+1) shp = shapelib.create("lsr_24hour", shapelib.SHPT_POINT) dbf = dbflib.create("lsr_24hour") dbf.add_field("VALID", dbflib.FTString, 12, 0) dbf.add_field("MAG", dbflib.FTDouble, 8, 2) dbf.add_field("WFO", dbflib.FTString, 3, 0) dbf.add_field("TYPECODE", dbflib.FTString, 1, 0) dbf.add_field("TYPETEXT", dbflib.FTString, 40, 0) dbf.add_field("CITY", dbflib.FTString, 40, 0) dbf.add_field("COUNTY", dbflib.FTString, 40, 0) dbf.add_field("SOURCE", dbflib.FTString, 40, 0) dbf.add_field("REMARK", dbflib.FTString, 200, 0) #sql = "SELECT *, astext(geom) as tgeom from warnings WHERE issue < '%s' and \ sql = """SELECT distinct *, ST_astext(geom) as tgeom from lsrs_%s WHERE valid > (now() -'1 day'::interval) """ % (eTS.year, )