示例#1
0
def calculateShoreLength(gridFilePath, shoreFilePath, outFilePath):
    gridShp = shapelib.ShapeFile(gridFilePath)
    gridDbf = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf')
    shoreShp = shapelib.ShapeFile(shoreFilePath)

    outShp = shapelib.create(outFilePath, shapelib.SHPT_POLYGON)
    outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf')
    outDbf.add_field("ID", dbflib.FTInteger, 10, 0)
    outDbf.add_field("Impact", dbflib.FTDouble, 30, 6)
    outDbf.add_field("Length", dbflib.FTDouble, 30, 6)

    inter = shoreShp.read_object(0).vertices()[1:]
    if inter:
        shoreGeo = Polygon(shoreShp.read_object(0).vertices()[0],
                           inter).boundary
    else:
        shoreGeo = Polygon(tuple(
            shoreShp.read_object(0).vertices()[0])).boundary

    for j in range(gridDbf.record_count()):
        gridObj = gridShp.read_object(j)
        gridGeo = Polygon(tuple(gridObj.vertices()[0]))

        shoreLength = gridGeo.intersection(shoreGeo).length
        recordDict = gridDbf.read_record(j)
        newDict = [j, recordDict["Impact"], shoreLength]
        outShp.write_object(-1, gridObj)
        outDbf.write_record(j, newDict)
示例#2
0
def shpCreateDBFFile(fileName, fields):
    """
    Create a dbf file, containing the given fields
    Input: fileName - full path (excluding extension!) to the shapefile
                      to create
           fields - a dictionary of dictionaries with field names as
                    keys, and each sub-dictionary containing keys of
                    'Type','Length','Precision' and 'Data'
                    'Type' must be one of the following integer values:
                    0 - strings
                    1 - integers
                    2 - doubles
                    4 - Invalid
    Output: dbffile object
    """
    try:
        fdbf = dbflib.create(fileName)
    except IOError:
        print "Failed to create dbffile: %s.dbf"%fileName
        return None
    fieldNames = fields.keys()
    for f in sorted(fieldNames):
        fieldType = fields[f]['Type']
        fieldLength = fields[f]['Length']
        # Force the precision to be zero unless the field is a double
        if fieldType == 2:
            fieldPrec = fields[f]['Precision']
        else:
            fieldPrec = 0
        fdbf.add_field(f, fieldType, fieldLength, fieldPrec)

    return fdbf
示例#3
0
def AddBoomField(gridFilePath, sol, outFilePath):
    gridSHP = shapelib.ShapeFile(gridFilePath)
    gridDBF = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf')

    outSHP = shapelib.create(outFilePath, shapelib.SHPT_POLYGON)
    outDBF = dbflib.create(outFilePath.split('.')[0] + '.dbf')

    for j in range(gridDBF.field_count()):
        #if j == 17:
        #outDBF.add_field(gridDBF.field_info(j)[1], dbflib.FTDouble, 20, 10)
        #else:
        outDBF.add_field(
            gridDBF.field_info(j)[1],
            gridDBF.field_info(j)[0],
            gridDBF.field_info(j)[2],
            gridDBF.field_info(j)[3])

    outDBF.add_field('Boomed', dbflib.FTInteger, 5, 0)

    jj = 0
    for j in range(gridDBF.record_count()):
        vert = gridSHP.read_object(j)
        recordDict = gridDBF.read_record(j)

        if j in sol:
            recordDict['Boomed'] = 1
        else:
            recordDict['Boomed'] = 0

        outSHP.write_object(-1, vert)
        outDBF.write_record(jj, recordDict)
        jj += 1
示例#4
0
文件: BGL_8.2.2.py 项目: wgwei/BGSL
def mergeDBF(receiverFile, resultFile, mergedFile, ID):
    DBFin1 = dbflib.open(receiverFile)      
    DBFin2 = dbflib.open(resultFile)
    DBFOut = dbflib.create(mergedFile)
    print DBFin1.record_count()-DBFin2.record_count(), ' points are missed.'
    # add field
    for n in xrange(DBFin1.field_count()):  
        fi = DBFin1.field_info(n)
        DBFOut.add_field(fi[1], fi[0], fi[2], fi[3])
    for n in xrange(DBFin2.field_count()):
        fi = DBFin2.field_info(n)
        DBFOut.add_field(fi[1], fi[0], fi[2], fi[3])
    # copy attributes
    for r in xrange(DBFin1.record_count()):
        print 'merging ', r, ' record'
        for f in xrange(DBFin1.field_count()):
            v = DBFin1.read_attribute(r, f)
            DBFOut.write_attribute(r, f, v)
        IDentify = DBFin1.read_record(r)[ID]
        for r2 in xrange(DBFin2.record_count()):
            if IDentify==DBFin2.read_record(r2)[ID]:
                break                
        for f in xrange(DBFin2.field_count()):
            v = DBFin2.read_attribute(r2, f)
            DBFOut.write_attribute(r, f+DBFin1.field_count(), v)
    DBFin1.close()
    DBFin2.close()
    DBFOut.close()          
示例#5
0
def writeShp(geoList, outFilePath, geoType):
    if not (geoType in ["Point", "Ptgon", "Line"]):
        print "The output geometry type should be Point, Ptgon, or Line"
        return

    if geoType == "Point":
        geoT = shapelib.SHPT_POINT
    elif geoType == "Ptgon":
        geoT = shapelib.SHPT_POLYGON
    else:
        geoT = shapelib.SHPT_ARC

    outShp = shapelib.create(outFilePath, geoT)
    outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf')
    outDbf.add_field("ID", dbflib.FTInteger, 100, 0)

    for j in range(len(geoList)):
        recordDict = {"ID": j}
        if geoType == "Polygon":
            if geoList[j].geom_type == 'Polygon':
                vert = [list(geoList[j].exterior.coords)]
                for interi in geoList[j].interiors:
                    vert.append(list(interi.coords))
            #print vert
        else:
            vert = [list(geoList[j].coords)]
        obj = shapelib.SHPObject(geoT, -1, vert)
        outShp.write_object(-1, obj)
        outDbf.write_record(j, recordDict)

    print "%d records, %d fields" % (outDbf.record_count(),
                                     outDbf.field_count())
示例#6
0
def do(dt):
    """Generate for a given date """
    dbconn = psycopg2.connect(database='idep', host='iemdb', user='******')
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cursor.execute(
        """
    SELECT ST_AsText(i.geom), i.huc_12, coalesce(avg_precip, 0),
    coalesce(avg_loss, 0), coalesce(avg_runoff, 0),
    coalesce(avg_delivery, 0) from ia_huc12 i JOIN results_by_huc12 r
    on (r.huc_12 = i.huc_12) WHERE valid = %s
    """, (dt, ))

    os.chdir("/tmp")
    fn = "idepv2_%s" % (dt.strftime("%Y%m%d"), )
    shp = shapelib.create(fn, shapelib.SHPT_POLYGON)

    dbf = dbflib.create(fn)
    dbf.add_field("HUC_12", dbflib.FTString, 12, 0)
    dbf.add_field("PREC_MM", dbflib.FTDouble, 8, 2)
    dbf.add_field("LOS_KGM2", dbflib.FTDouble, 8, 2)
    dbf.add_field("RUNOF_MM", dbflib.FTDouble, 8, 2)
    dbf.add_field("DELI_KGM", dbflib.FTDouble, 8, 2)

    for i, row in enumerate(cursor):
        g = wellknowntext.convert_well_known_text(row[0])
        obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, g)
        shp.write_object(-1, obj)
        del (obj)

        dbf.write_record(
            i,
            dict(HUC_12=row[1],
                 PREC_MM=row[2],
                 LOS_KGM2=row[3],
                 RUNOF_MM=row[4],
                 DELI_KGM=row[5]))

    # hack way to close the files
    del (shp)
    del (dbf)

    shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/26915.prj",
                    fn + ".prj")
    z = zipfile.ZipFile(fn + ".zip", 'w', zipfile.ZIP_DEFLATED)
    suffixes = ['shp', 'shx', 'dbf', 'prj']
    for s in suffixes:
        z.write(fn + "." + s)
    z.close()

    sys.stdout.write("Content-type: application/octet-stream\n")
    sys.stdout.write(("Content-Disposition: attachment; filename=%s.zip\n\n"
                      "") % (fn, ))

    sys.stdout.write(file(fn + ".zip", 'r').read())

    suffixes.append('zip')
    for s in suffixes:
        os.remove(fn + "." + s)
示例#7
0
文件: idep2.py 项目: KayneWest/iem
def do(dt):
    """Generate for a given date """
    dbconn = psycopg2.connect(database='idep', host='iemdb', user='******')
    cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cursor.execute("""
    SELECT ST_AsText(i.geom), i.huc_12, coalesce(avg_precip, 0),
    coalesce(avg_loss, 0), coalesce(avg_runoff, 0),
    coalesce(avg_delivery, 0) from ia_huc12 i JOIN results_by_huc12 r
    on (r.huc_12 = i.huc_12) WHERE valid = %s
    """, (dt,))

    os.chdir("/tmp")
    fn = "idepv2_%s" % (dt.strftime("%Y%m%d"),)
    shp = shapelib.create(fn, shapelib.SHPT_POLYGON)

    dbf = dbflib.create(fn)
    dbf.add_field("HUC_12", dbflib.FTString, 12, 0)
    dbf.add_field("PREC_MM", dbflib.FTDouble, 8, 2)
    dbf.add_field("LOS_KGM2", dbflib.FTDouble, 8, 2)
    dbf.add_field("RUNOF_MM", dbflib.FTDouble, 8, 2)
    dbf.add_field("DELI_KGM", dbflib.FTDouble, 8, 2)

    for i, row in enumerate(cursor):
        g = wellknowntext.convert_well_known_text(row[0])
        obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1, g)
        shp.write_object(-1, obj)
        del(obj)

        dbf.write_record(i, dict(HUC_12=row[1],
                                 PREC_MM=row[2],
                                 LOS_KGM2=row[3],
                                 RUNOF_MM=row[4],
                                 DELI_KGM=row[5]))

    # hack way to close the files
    del(shp)
    del(dbf)

    shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/26915.prj",
                    fn+".prj")
    z = zipfile.ZipFile(fn+".zip", 'w', zipfile.ZIP_DEFLATED)
    suffixes = ['shp', 'shx', 'dbf', 'prj']
    for s in suffixes:
        z.write(fn+"."+s)
    z.close()

    sys.stdout.write("Content-type: application/octet-stream\n")
    sys.stdout.write(("Content-Disposition: attachment; filename=%s.zip\n\n"
                      "") % (fn,))

    sys.stdout.write(file(fn+".zip", 'r').read())

    suffixes.append('zip')
    for s in suffixes:
        os.remove(fn+"."+s)
示例#8
0
def createGIS(s):
	dir = s.strftime("/wepp/data/rainfall/shape/daily/%Y/%m/")
	fname = s.strftime("%Y%m%d_rain")

	if (not os.path.isdir(dir)):
		os.makedirs(dir)
	dbf = dbflib.create(dir + fname)
	dbf.add_field("RAINFALL", dbflib.FTDouble, 4, 2)

	shp = shapelib.create(dir + fname, shapelib.SHPT_POINT)
	return shp, dbf
示例#9
0
def createGIS(s):
    """ Create the shapefiles """
    dirname = s.strftime("/mnt/idep/data/rainfall/shape/daily/%Y/%m/")
    fname = s.strftime("%Y%m%d_rain")

    if not os.path.isdir(dirname):
        os.makedirs(dirname)
    dbf = dbflib.create(dirname + fname)
    dbf.add_field("RAINFALL", dbflib.FTDouble, 5, 2)

    shp = shapelib.create(dirname + fname, shapelib.SHPT_POINT)
    return shp, dbf
示例#10
0
 def write_point_shape_out(self, pointShapeFileName, pointsList):
     w2shp = shapelib.create(pointShapeFileName, shapelib.SHPT_POINT)
     w2dbf = dbflib.create(pointShapeFileName)
     w2dbf.add_field('ID', dbflib.FTInteger, 10, 0) # create 3 field for the ID and x, y coordinate
     w2dbf.add_field('x', dbflib.FTDouble, 16, 2)
     w2dbf.add_field('y', dbflib.FTDouble, 16, 2)
     i = 0
     for pts in pointsList:
         for pt in pts:
             shpObj = shapelib.SHPObject(shapelib.SHPT_POINT, i, [[pt]])
             w2shp.write_object(i, shpObj)
             w2dbf.write_record(i, {'ID':i})
             w2dbf.write_record(i, {'x':pt[0]})
             w2dbf.write_record(i, {'y':pt[1]})
             i += 1
     w2shp.close()
     w2dbf.close()
示例#11
0
def simulateRampCapacity(rampFilePath, outFilePath):
    rampShp = shapelib.ShapeFile(rampFilePath)
    rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf')

    outShp = shapelib.create(outFilePath, shapelib.SHPT_POINT)
    outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf')
    outDbf.add_field("ID", dbflib.FTInteger, 10, 0)
    outDbf.add_field("EBCap", dbflib.FTInteger, 30, 0)
    outDbf.add_field("VesCap", dbflib.FTInteger, 10, 0)

    tn = rampDbf.record_count()
    ebCap = randint(500, high=5001, size=tn)
    vesCap = choice(6, tn, p=[0.5, 0.2, 0.1, 0.1, 0.05, 0.05])

    for j in range(tn):
        rampObj = rampShp.read_object(j)
        recordDict = rampDbf.read_record(j)

        newDict = [j, ebCap[j], vesCap[j]]
        outShp.write_object(-1, rampObj)
        outDbf.write_record(j, newDict)
示例#12
0
def shpCreateFile(fileName, shptype, fields):
    """
    Create a shapefile (and a corresponding dbf file) of the give type,
    containing the given fields.
    Input: fileName - full path (excluding extension!) to the shapefile
           to create.
           shptype - shapelib object type (these are integer
           values, but you can also use the shapelib.SHPT_ value).
           fields - a dictionary of dictionaries with field names as
           keys, and each sub-dictionary containing keys of 'Type',
           'Length','Precision' and 'Data':
           'Type' must be one of the following integer values:
                    0 - strings
                    1 - integers
                    2 - doubles
                    4 - Invalid
    Output: shapefile and dbffile objects
    """
    try:
        fshp = shapelib.create(fileName,shptype)
    except IOError:
        logger.critical("Failed to create shapefile: %s.shp"%fileName)
        raise IOError
    try:
        fdbf = dbflib.create(fileName)
    except IOError:
        logger.critical("Failed to create dbffile: %s.dbf"%fileName)
        raise IOError
    fieldNames = fields.keys()
    for f in sorted(fieldNames):
        fieldType = fields[f]['Type']
        fieldLength = fields[f]['Length']
        # Force the precision to be zero unless the field is a double
        if fieldType==2:
            fieldPrec = fields[f]['Precision']
        else:
            fieldPrec = 0
        fdbf.add_field(f, fieldType, fieldLength, fieldPrec)

    return fshp, fdbf
示例#13
0
 def clear_small_polygons(self, shapeFile, newShapeFile, lenLimit):
     shp = shapelib.ShapeFile(shapeFile)
     dbf = dbflib.open(shapeFile)
     newSHP = shapelib.create(newShapeFile, self.shpType)
     newDBF = dbflib.create(newShapeFile)
     for f in range(dbf.field_count()):
         fi = dbf.field_info(f)
         newDBF.add_field(fi[1], fi[0], fi[2], fi[3])
     bb = 0
     for b in range(shp.info()[0]):
         sobj = shp.read_object(b)
         rec = dbf.read_record(b)
         if self.length_of_polygon(sobj.vertices()) > lenLimit:
             shpobj = shapelib.SHPObject(self.shpType, bb,
                                         [sobj.vertices()[0]])
             newSHP.write_object(bb, shpobj)
             newDBF.write_record(bb, rec)
             bb += 1
     shp.close()
     dbf.close()
     newSHP.close()
     newDBF.close()
示例#14
0
 def _redrawMap(self, receiverFileName, epsilon, delta, timeHour, day):
     receiverLevels = self._updateMap(epsilon, delta, timeHour)
     DBF = dbflib.open(receiverFileName)
     RECN = DBF.record_count()
     if not os.path.exists(str(day)):
         os.mkdir(str(day))
     newDBF = dbflib.create(
         str(day) + '\\' + receiverFileName + '_update_' + str(timeHour) +
         'u')
     # copy the old field and record
     for f in xrange(DBF.field_count()):
         fi = DBF.field_info(f)
         newDBF.add_field(fi[1], fi[0], fi[2], fi[3])
     for r in xrange(RECN):
         rec = DBF.read_record(r)
         newDBF.write_record(r, rec)
     # add new field and attribute
     newDBF.add_field('DBA_D', dbflib.FTDouble, 9, 1)
     for r in xrange(RECN):
         newDBF.write_attribute(r, f + 1, receiverLevels[r])
     DBF.close()
     newDBF.close()
示例#15
0
文件: combine.py 项目: nbackas/iem
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005

import mapscript, dbflib, sys, os
ts = sys.argv[1]

outshp = mapscript.shapefileObj('dm_%s.shp' % ts,
                                mapscript.MS_SHAPEFILE_POLYGON)

dbf = dbflib.create("dm_%s" % ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)

counter = 0
for d in range(5):
    if not os.path.isfile("Drought_Areas_US_D%s.shp" % d):
        print "No Shapefile for D %s" % (d, )
        continue
    shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' % d)

    for i in range(shp.numshapes):
        shpObj = shp.getShape(i)
        outshp.add(shpObj)
        dbf.write_record(counter, [
            d,
        ])
        del shpObj
        counter += 1

del outshp
del dbf
示例#16
0
 county    | character varying(32)    |
 state     | character(2)             |
 source    | character varying(32)    |
 remark    | text                     |
 wfo       | character(3)             |
 geom      | geometry                 |
 typetext  | geometry                 |
"""

# We set one minute into the future, so to get expiring warnings
# out of the shapefile
eTS = mx.DateTime.gmt() + mx.DateTime.RelativeDateTime(minutes=+1)

shp = shapelib.create("lsr_24hour", shapelib.SHPT_POINT)

dbf = dbflib.create("lsr_24hour")
dbf.add_field("VALID", dbflib.FTString, 12, 0)
dbf.add_field("MAG", dbflib.FTDouble, 8, 2)
dbf.add_field("WFO", dbflib.FTString, 3, 0)
dbf.add_field("TYPECODE", dbflib.FTString, 1, 0)
dbf.add_field("TYPETEXT", dbflib.FTString, 40, 0)
dbf.add_field("CITY", dbflib.FTString, 40, 0)
dbf.add_field("COUNTY", dbflib.FTString, 40, 0)
dbf.add_field("SOURCE", dbflib.FTString, 40, 0)
dbf.add_field("REMARK", dbflib.FTString, 200, 0)


#sql = "SELECT *, astext(geom) as tgeom from warnings WHERE issue < '%s' and \
sql = """SELECT distinct *, astext(geom) as tgeom from lsrs_%s WHERE 
	valid > (now() -'1 day'::interval) """ % (eTS.year,)
pcursor.execute(sql)
           (-84.58, 30.91, '02356000', 'Flint River at Bainbridge, GA'),
           (-84.74, 31.04, '02357000', 'Spring Creek near Iron City, GA')]


COAPS_SE = [(-82.52, 31.52, '90211', 'Alma CAA Airport'),
            (-82.82, 31.52, '92783', 'Douglas'),
            (-82.80, 31.02, '94429', 'Homerville'), ]
COAPS_SW = [(-84.12, 31.52, '90140', 'Albany'),
            (-84.18, 31.79, '91500', 'Camilla'),
            (-84.77, 31.17, '92153', 'Colquitt'),
            (-84.77, 31.77, '92450', 'Cuthbert')]


if 1:
    file = "USGS_sGA.dbf"
    dbf = dbflib.create(file)
    dbf.add_field("LON", dbflib.FTDouble, 10, 4)
    dbf.add_field("LAT", dbflib.FTDouble, 10, 4)
    dbf.add_field("CODE", dbflib.FTString, 8, 0)
    dbf.add_field("NAME", dbflib.FTString, 40, 0)
    dbf.close()
    #
    dbf = dbflib.open(file, "r+b")
    for (i, record) in enumerate(USGS_SE + USGS_SW):
        dbf.write_record(i, record)
    dbf.close()
    #
    filename = "USGS_sGA.shp"
    outfile = shapelib.create(filename, shapelib.SHPT_POINT)
    for (i, record) in enumerate(USGS_SE + USGS_SW):
        obj = shapelib.SHPObject(shapelib.SHPT_POINT, i,
示例#18
0
m = int(sys.argv[4])
d = int(sys.argv[5])
day1 = mx.DateTime.DateTime(y, m, d, 12, 0)

fname = day.strftime("%Y%m%d")

hrap_shapes = {}
hrain = {}
rs = mydb.query("SELECT transform(the_geom, 4326) as the_geom, hrap_i from hrap_utm").dictresult()
for i in range(len(rs)):
	hrain[ int(rs[i]["hrap_i"]) ] = {"HRAP_I": int(rs[i]["hrap_i"]) }
	hrap_shapes[ int(rs[i]["hrap_i"]) ] = rs[i]["the_geom"]

shp = shapelib.create(fname, shapelib.SHPT_POLYGON)
dbf = dbflib.create(fname)


dbf.add_field("HRAP_I", dbflib.FTInteger, 8, 0)
dbf.add_field("RAINFALL", dbflib.FTDouble, 5, 2)


interval = mx.DateTime.RelativeDateTime(minutes=+15)
now = day

d = 0
while (now <= day1):
	now += interval
	gts = now.gmtime()
	fp = gts.strftime("/wepp/data/rainfall/product/%Y/%Y%m%d/IA%Y%m%d_%H%M.dat")
	print fp
示例#19
0
def make_dbf(file):
    # create a new dbf file and add three fields.
    dbf = dbflib.create(file)
    dbf.add_field("NAME", dbflib.FTString, 20, 0)
    dbf.add_field("INT", dbflib.FTInteger, 10, 0)
    dbf.add_field("FLOAT", dbflib.FTDouble, 10, 4)
示例#20
0
文件: testdbf.py 项目: robi56/thesis
 def test_add_field(self):
     """Test whethe add_field reports exceptions"""
     dbf = dbflib.create("test.dbf")
     # For strings the precision parameter must be 0
     self.assertRaises(RuntimeError, dbf.add_field, "str", dbflib.FTString,
                       10, 5)
示例#21
0
文件: 24h_lsr.py 项目: nbackas/iem
 county    | character varying(32)    |
 state     | character(2)             |
 source    | character varying(32)    |
 remark    | text                     |
 wfo       | character(3)             |
 geom      | geometry                 |
 typetext  | geometry                 |
"""

# We set one minute into the future, so to get expiring warnings
# out of the shapefile
eTS = mx.DateTime.gmt() + mx.DateTime.RelativeDateTime(minutes=+1)

shp = shapelib.create("lsr_24hour", shapelib.SHPT_POINT)

dbf = dbflib.create("lsr_24hour")
dbf.add_field("VALID", dbflib.FTString, 12, 0)
dbf.add_field("MAG", dbflib.FTDouble, 8, 2)
dbf.add_field("WFO", dbflib.FTString, 3, 0)
dbf.add_field("TYPECODE", dbflib.FTString, 1, 0)
dbf.add_field("TYPETEXT", dbflib.FTString, 40, 0)
dbf.add_field("CITY", dbflib.FTString, 40, 0)
dbf.add_field("COUNTY", dbflib.FTString, 40, 0)
dbf.add_field("SOURCE", dbflib.FTString, 40, 0)
dbf.add_field("REMARK", dbflib.FTString, 200, 0)

#sql = "SELECT *, astext(geom) as tgeom from warnings WHERE issue < '%s' and \
sql = """SELECT distinct *, ST_astext(geom) as tgeom from lsrs_%s WHERE 
	valid > (now() -'1 day'::interval) """ % (eTS.year, )
pcursor.execute(sql)
示例#22
0
sts = mx.DateTime.DateTime(2005, 3, 1)
ets = mx.DateTime.DateTime(2005, 11, 1)
interval = mx.DateTime.RelativeDateTime(days=+7)

now = sts
twp = {}
rs = mydb.query(
    "SELECT astext(transform(the_geom,4326)) as t, model_twp from iatwp ORDER by model_twp ASC"
).dictresult()
for i in range(len(rs)):
    twp[rs[i]["model_twp"]] = rs[i]["t"]

while now < ets:
    print "Hello Heather, I am here ", now
    shp = shapelib.create("weeklysm/%ssm" % (now.strftime("%Y%m%d"),), shapelib.SHPT_POLYGON)
    dbf = dbflib.create("weeklysm/%ssm" % (now.strftime("%Y%m%d"),))
    dbf.add_field("S0-10CM", dbflib.FTDouble, 8, 2)
    dbf.add_field("S10-20CM", dbflib.FTDouble, 8, 2)
    dbf.add_field("VSM", dbflib.FTDouble, 8, 2)

    rs = mydb.query(
        "select model_twp, avg(vsm) as v, \
	avg(s10cm) as s10, avg(s20cm) as s20 from \
	waterbalance_by_twp  WHERE valid >= '%s' and valid < '%s' \
        GROUP by model_twp ORDER by model_twp ASC"
        % (now.strftime("%Y-%m-%d"), (now + interval).strftime("%Y-%m-%d"))
    ).dictresult()

    for i in range(len(rs)):
        m = rs[i]["model_twp"]
        f = wellknowntext.convert_well_known_text(twp[m])
示例#23
0
)

for row in icursor:
    thisStation = row["id"]
    thisPrec = row["tprec"]
    thisSnow = row["tsnow"]
    if thisStation not in cob:
        continue
    cob[thisStation]["PMOI"] = round(float(thisPrec), 2)
    cob[thisStation]["SMOI"] = round(float(thisSnow), 2)

csv = open("coop.csv", "w")
csv.write("nwsli,site_name,longitude,latitude,date,time,high_f,low_f,prec_in,")
csv.write("snow_in,snow_depth_in,prec_mon_in,snow_mon_in,elevation_m\n")

dbf = dbflib.create("coop_" + ts)
dbf.add_field("SID", dbflib.FTString, 5, 0)
dbf.add_field("SITE_NAME", dbflib.FTString, 40, 0)
dbf.add_field("ELEV_M", dbflib.FTDouble, 10, 2)
dbf.add_field("YYYYMMDD", dbflib.FTString, 8, 0)
dbf.add_field("HHMM", dbflib.FTString, 4, 0)
dbf.add_field("HI_T_F", dbflib.FTInteger, 10, 0)
dbf.add_field("LO_T_F", dbflib.FTInteger, 10, 0)
dbf.add_field("PREC", dbflib.FTDouble, 10, 2)
dbf.add_field("SNOW", dbflib.FTDouble, 10, 2)
dbf.add_field("SDEPTH", dbflib.FTDouble, 10, 2)
dbf.add_field("PMONTH", dbflib.FTDouble, 10, 2)
dbf.add_field("SMONTH", dbflib.FTDouble, 10, 2)

shp = shapelib.create("coop_" + ts, shapelib.SHPT_POINT)
示例#24
0
sts = mx.DateTime.DateTime(2006,3,1)
ets = mx.DateTime.DateTime(2006,11,1)
interval = mx.DateTime.RelativeDateTime(days=+1)

now = sts
ohrap = {}
rs = mydb.query("SELECT hrap_i from hrap_utm ORDER by hrap_i ASC").dictresult()
for i in range(len(rs)):
    ohrap[ int(rs[i]['hrap_i']) ] = {'rain': 0, 'hours': 0, 'mrain': 0}

hrapi = ohrap.keys()
hrapi.sort()

while (now < ets):
    print "Hello Heather, I am here ", now
    dbf = dbflib.create("dailyrain/%srain" % (now.strftime("%Y%m%d"), ) )
    dbf.add_field("RAINFALL", dbflib.FTDouble, 8, 2)
    dbf.add_field("RAINHOUR", dbflib.FTDouble, 8, 2)
    dbf.add_field("RAINPEAK", dbflib.FTDouble, 8, 2)
    
    rs = mydb.query("select hrap_i, rainfall /25.4 as rain, \
	peak_15min /25.4 * 4 as mrain, hr_cnt / 4.0 as hours from \
	daily_rainfall_%s  WHERE valid = '%s' \
        ORDER by hrap_i ASC" % (now.strftime("%Y"), \
        now.strftime("%Y-%m-%d") \
        ) ).dictresult()

    hrap = ohrap
    for i in range(len(rs)):
        #print rs[i]
        hrap[ int(rs[i]['hrap_i']) ]= {'rain': float(rs[i]['rain']), \
示例#25
0
if format == 'shp':
    # Time to create the shapefiles
    fp = "iemre_%s_%s" % (ts0.strftime("%Y%m%d"), ts1.strftime("%Y%m"))
    shp = shapelib.create("%s.shp" % (fp,), shapelib.SHPT_POLYGON)

    for x in iemre.XAXIS:
        for y in iemre.YAXIS:
            obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1,
                                     [[(x, y), (x, y+iemre.DY),
                                       (x+iemre.DX, y+iemre.DY),
                                       (x+iemre.DX, y), (x, y)]])
            shp.write_object(-1, obj)

    del(shp)
    dbf = dbflib.create(fp)
    dbf.add_field("GDD", dbflib.FTDouble, 10, 2)
    dbf.add_field("PREC_IN", dbflib.FTDouble, 10, 2)

    cnt = 0
    for i in range(len(iemre.XAXIS)):
        for j in range(len(iemre.YAXIS)):
            dbf.write_record(cnt, {'PREC_IN': precip[j, i],
                                   'GDD': gdd[j, i]})
            cnt += 1

    del(dbf)

    # Create zip file, send it back to the clients
    shutil.copyfile("/mesonet/www/apps/iemwebsite/data/gis/meta/4326.prj",
                    fp+".prj")
示例#26
0
文件: combine.py 项目: KayneWest/iem
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005

import mapscript, dbflib, sys, os
ts = sys.argv[1]

outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )

dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)

counter = 0
for d in range(5):
  if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
    print "No Shapefile for D %s" % (d,)
    continue
  shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)

  for i in range( shp.numshapes ):
    shpObj = shp.getShape(i)
    outshp.add( shpObj )
    dbf.write_record(counter, [d,])
    del shpObj
    counter += 1

del outshp
del dbf
示例#27
0
mydb = pg.connect('wepp')

points = 23182
lats = [0]*points
lons = [0]*points

lat0 = 40.000
lon0 = -97.000
latn = 44.000
lonn = -89.000
x = 173.0000
y = 134.0000
dlat = (lat0 - latn ) / y
dlon = (lon0 - lonn ) / x

dbf = dbflib.create("precip_points")
dbf.add_field("SID", dbflib.FTString, 1, 0)
dbf.add_field("SITE_NAME", dbflib.FTString, 1, 0)

shp = shapelib.create("precip_points", shapelib.SHPT_POINT)

for i in range(points):
  row = i / int(x)
  col = i % int(x)
  lat = lat0 - ( row * dlat )
  lon = lon0 - ( col * dlon )
  mydb.query("INSERT into precip_points(point, geom) values( \
   "+ str(i) +", 'SRID=-1;POINT("+ str(lon) +" "+ str(lat) +");')")

  obj = shapelib.SHPObject(shapelib.SHPT_POINT, i, [[(lon, lat)]] )
  shp.write_object(-1, obj)
示例#28
0
def real_parser(txn, raw):
    """Actually do the heavy lifting of parsing this product

    Args:
      txn (cursor): psycopg2 database transaction
      raw (str): the raw text that needs parsing
    """
    # Load up dictionary of Possible Road Conditions
    if len(ROADS) == 0:
        log.msg("Initializing ROADS and CONDITIONS dicts...")
        init_dicts(txn)

    tp = TextProduct(raw)
    log.msg("PROCESSING STOIA: %s" % (tp.valid,))

    # Lets start our processing by looking for the first * and then
    # processing after finding it
    lines = re.split("\n", raw[raw.find("*") :])
    for line in lines:
        if len(line) < 40 or line[0] == "*" or line[30:40].strip() == "":
            continue
        data = line[7:]
        # Find the right most ) and chomp everything up until it
        pos = data.rfind(")")
        meat = data[: pos + 1].upper()
        condition = data[(pos + 1) :].upper().strip()
        if meat.strip() == "":
            continue
        if meat not in ROADS:
            log.msg("Unknown road: %s\n" % (meat,))
            continue

        road_code = figureCondition(txn, condition)
        towingProhibited = condition.find("TOWING PROHIBITED") > -1
        limitedVis = condition.find("LIMITED VIS.") > -1
        segid = ROADS[meat]["segid"]

        txn.execute(
            """
            UPDATE roads_current SET cond_code = %s, valid = %s,
            towing_prohibited = %s, limited_vis = %s, raw = %s
            WHERE segid = %s
            """,
            (road_code, tp.valid, towingProhibited, limitedVis, condition, segid),
        )

    # Copy the currents table over to the log... HARD CODED
    if tp.valid.month < 7:
        logtable = "roads_%s_%s_log" % (tp.valid.year - 1, tp.valid.year)
    else:
        logtable = "roads_%s_%s_log" % (tp.valid.year, tp.valid.year + 1)
    txn.execute(
        """
        INSERT into """
        + logtable
        + """
        SELECT * from roads_current WHERE valid = %s
        """,
        (tp.valid,),
    )
    log.msg("Copied %s rows into %s table" % (txn.rowcount, logtable))

    # Now we generate a shapefile....
    dbf = dbflib.create("iaroad_cond")
    dbf.add_field("SEGID", dbflib.FTInteger, 4, 0)
    dbf.add_field("MAJOR", dbflib.FTString, 10, 0)
    dbf.add_field("MINOR", dbflib.FTString, 128, 0)
    dbf.add_field("US1", dbflib.FTInteger, 4, 0)
    dbf.add_field("ST1", dbflib.FTInteger, 4, 0)
    dbf.add_field("INT1", dbflib.FTInteger, 4, 0)
    dbf.add_field("TYPE", dbflib.FTInteger, 4, 0)
    dbf.add_field("VALID", dbflib.FTString, 12, 0)
    dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0)
    dbf.add_field("COND_TXT", dbflib.FTString, 120, 0)
    dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0)
    dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0)

    shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC)

    txn.execute(
        """select b.*, c.*, ST_astext(b.geom) as bgeom from
         roads_base b, roads_current c WHERE b.segid = c.segid
         and valid is not null and b.geom is not null"""
    )
    i = 0
    for row in txn:
        s = row["bgeom"]
        f = wellknowntext.convert_well_known_text(s)
        valid = row["valid"]
        d = {}
        d["SEGID"] = row["segid"]
        d["MAJOR"] = row["major"]
        d["MINOR"] = row["minor"]
        d["US1"] = row["us1"]
        d["ST1"] = row["st1"]
        d["INT1"] = row["int1"]
        d["TYPE"] = row["type"]
        d["VALID"] = valid.strftime("%Y%m%d%H%M")
        d["COND_CODE"] = row["cond_code"]
        d["COND_TXT"] = row["raw"]
        d["BAN_TOW"] = str(row["towing_prohibited"])[0]
        d["LIM_VIS"] = str(row["limited_vis"])[0]

        obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f)
        shp.write_object(-1, obj)
        dbf.write_record(i, d)

        del (obj)
        i += 1

    del (shp)
    del (dbf)
    z = zipfile.ZipFile("iaroad_cond.zip", "w")
    z.write("iaroad_cond.shp")
    z.write("iaroad_cond.shx")
    z.write("iaroad_cond.dbf")
    o = open("iaroad_cond.prj", "w")
    o.write(EPSG26915)
    o.close()
    z.write("iaroad_cond.prj")
    z.close()

    utc = tp.valid.astimezone(pytz.timezone("UTC"))
    subprocess.call(
        (
            "/home/ldm/bin/pqinsert -p 'zip ac %s "
            "gis/shape/26915/ia/iaroad_cond.zip "
            "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip"
            ""
        )
        % (utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")),
        shell=True,
    )

    for suffix in ["shp", "shx", "dbf", "prj", "zip"]:
        os.unlink("iaroad_cond.%s" % (suffix,))
示例#29
0
def export_shapefile(txn, tp):
    """Export a Shapefile of Road Conditions"""
    os.chdir("/tmp")
    dbf = dbflib.create("iaroad_cond")
    dbf.add_field("SEGID", dbflib.FTInteger, 4, 0)
    dbf.add_field("MAJOR", dbflib.FTString, 10, 0)
    dbf.add_field("MINOR", dbflib.FTString, 128, 0)
    dbf.add_field("US1", dbflib.FTInteger, 4, 0)
    dbf.add_field("ST1", dbflib.FTInteger, 4, 0)
    dbf.add_field("INT1", dbflib.FTInteger, 4, 0)
    dbf.add_field("TYPE", dbflib.FTInteger, 4, 0)
    dbf.add_field("VALID", dbflib.FTString, 12, 0)
    dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0)
    dbf.add_field("COND_TXT", dbflib.FTString, 120, 0)
    dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0)
    dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0)

    shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC)

    txn.execute("""select b.*, c.*, ST_astext(b.geom) as bgeom from
         roads_base b, roads_current c WHERE b.segid = c.segid
         and valid is not null and b.geom is not null""")
    i = 0
    for row in txn:
        s = row["bgeom"]
        f = wellknowntext.convert_well_known_text(s)
        valid = row["valid"]
        d = {}
        d["SEGID"] = row["segid"]
        d["MAJOR"] = row["major"]
        d["MINOR"] = row["minor"]
        d["US1"] = row["us1"]
        d["ST1"] = row["st1"]
        d["INT1"] = row["int1"]
        d["TYPE"] = row["type"]
        d["VALID"] = valid.strftime("%Y%m%d%H%M")
        d["COND_CODE"] = row["cond_code"]
        d["COND_TXT"] = row["raw"]
        d["BAN_TOW"] = str(row["towing_prohibited"])[0]
        d["LIM_VIS"] = str(row["limited_vis"])[0]

        obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f)
        shp.write_object(-1, obj)
        dbf.write_record(i, d)

        del(obj)
        i += 1

    del(shp)
    del(dbf)
    z = zipfile.ZipFile("iaroad_cond.zip", 'w')
    z.write("iaroad_cond.shp")
    z.write("iaroad_cond.shx")
    z.write("iaroad_cond.dbf")
    o = open('iaroad_cond.prj', 'w')
    o.write(EPSG26915)
    o.close()
    z.write("iaroad_cond.prj")
    z.close()

    utc = tp + datetime.timedelta(hours=6)
    subprocess.call(("/home/ldm/bin/pqinsert -p 'zip ac %s "
                     "gis/shape/26915/ia/iaroad_cond.zip "
                     "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip"
                     "") % (utc.strftime("%Y%m%d%H%M"),
                            utc.strftime("%Y%m%d%H%M")), shell=True)

    for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']:
        os.unlink("iaroad_cond.%s" % (suffix,))
示例#30
0
 max_dbz_height | real                     |
 top            | real                     |
 drct           | smallint                 |
 sknt           | smallint                 |
 valid          | timestamp with time zone |
"""

os.chdir("/tmp")

# Delete anything older than 20 minutes
now = mx.DateTime.gmt()
eTS = mx.DateTime.gmt() - mx.DateTime.RelativeDateTime(minutes=+20)

shp = shapelib.create("current_nexattr", shapelib.SHPT_POINT)

dbf = dbflib.create("current_nexattr")
dbf.add_field("VALID", dbflib.FTString, 12, 0)
dbf.add_field("STORM_ID", dbflib.FTString, 2, 0)
dbf.add_field("NEXRAD", dbflib.FTString, 3, 0)
dbf.add_field("AZIMUTH", dbflib.FTInteger, 3, 0)
dbf.add_field("RANGE", dbflib.FTInteger, 3, 0)
dbf.add_field("TVS", dbflib.FTString, 10, 0)
dbf.add_field("MESO", dbflib.FTString, 10, 0)
dbf.add_field("POSH", dbflib.FTInteger, 3, 0)
dbf.add_field("POH", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_SIZE", dbflib.FTDouble, 5, 2)
dbf.add_field("VIL", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_DBZ", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_DBZ_H", dbflib.FTDouble, 5, 2)
dbf.add_field("TOP", dbflib.FTDouble, 5, 2)
dbf.add_field("DRCT", dbflib.FTDouble, 3, 0)
示例#31
0
    """ % (now.year, now.year))

for row in icursor:
    thisStation = row["id"]
    thisPrec = row["tprec"]
    thisSnow = row["tsnow"]
    if thisStation not in cob:
        continue
    cob[thisStation]["PMOI"] = round(float(thisPrec), 2)
    cob[thisStation]["SMOI"] = round(float(thisSnow), 2)

csv = open('coop.csv', 'w')
csv.write('nwsli,site_name,longitude,latitude,date,time,high_f,low_f,prec_in,')
csv.write('snow_in,snow_depth_in,prec_mon_in,snow_mon_in,elevation_m\n')

dbf = dbflib.create("coop_"+ts)
dbf.add_field("SID", dbflib.FTString, 5, 0)
dbf.add_field("SITE_NAME", dbflib.FTString, 64, 0)
dbf.add_field("ELEV_M", dbflib.FTDouble, 10, 2)
dbf.add_field("YYYYMMDD", dbflib.FTString, 8, 0)
dbf.add_field("HHMM", dbflib.FTString, 4, 0)
dbf.add_field("HI_T_F", dbflib.FTInteger, 10, 0)
dbf.add_field("LO_T_F", dbflib.FTInteger, 10, 0)
dbf.add_field("PREC", dbflib.FTDouble, 10, 2)
dbf.add_field("SNOW", dbflib.FTDouble, 10, 2)
dbf.add_field("SDEPTH", dbflib.FTDouble, 10, 2)
dbf.add_field("PMONTH", dbflib.FTDouble, 10, 2)
dbf.add_field("SMONTH", dbflib.FTDouble, 10, 2)

shp = shapelib.create("coop_"+ts, shapelib.SHPT_POINT)
示例#32
0
def export_shapefile(txn, tp):
    """Export a Shapefile of Road Conditions"""
    os.chdir("/tmp")
    dbf = dbflib.create("iaroad_cond")
    dbf.add_field("SEGID", dbflib.FTInteger, 6, 0)
    dbf.add_field("MAJOR", dbflib.FTString, 10, 0)
    dbf.add_field("MINOR", dbflib.FTString, 128, 0)
    dbf.add_field("US1", dbflib.FTInteger, 4, 0)
    dbf.add_field("ST1", dbflib.FTInteger, 4, 0)
    dbf.add_field("INT1", dbflib.FTInteger, 4, 0)
    dbf.add_field("TYPE", dbflib.FTInteger, 4, 0)
    dbf.add_field("VALID", dbflib.FTString, 12, 0)
    dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0)
    dbf.add_field("COND_TXT", dbflib.FTString, 120, 0)
    dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0)
    dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0)

    shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC)

    txn.execute("""select b.*, c.*, ST_astext(b.geom) as bgeom from
         roads_base b, roads_current c WHERE b.segid = c.segid
         and valid is not null and b.geom is not null""")
    i = 0
    for row in txn:
        s = row["bgeom"]
        f = wellknowntext.convert_well_known_text(s)
        valid = row["valid"]
        d = {}
        d["SEGID"] = row["segid"]
        d["MAJOR"] = row["major"]
        d["MINOR"] = row["minor"]
        d["US1"] = row["us1"]
        d["ST1"] = row["st1"]
        d["INT1"] = row["int1"]
        d["TYPE"] = row["type"]
        d["VALID"] = valid.strftime("%Y%m%d%H%M")
        d["COND_CODE"] = row["cond_code"]
        d["COND_TXT"] = row["raw"]
        d["BAN_TOW"] = str(row["towing_prohibited"])[0]
        d["LIM_VIS"] = str(row["limited_vis"])[0]

        obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f)
        shp.write_object(-1, obj)
        dbf.write_record(i, d)

        del (obj)
        i += 1

    del (shp)
    del (dbf)
    z = zipfile.ZipFile("iaroad_cond.zip", 'w')
    z.write("iaroad_cond.shp")
    z.write("iaroad_cond.shx")
    z.write("iaroad_cond.dbf")
    o = open('iaroad_cond.prj', 'w')
    o.write(EPSG26915)
    o.close()
    z.write("iaroad_cond.prj")
    z.close()

    utc = tp + datetime.timedelta(hours=6)
    subprocess.call(
        ("/home/ldm/bin/pqinsert -p 'zip ac %s "
         "gis/shape/26915/ia/iaroad_cond.zip "
         "GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip"
         "") % (utc.strftime("%Y%m%d%H%M"), utc.strftime("%Y%m%d%H%M")),
        shell=True)

    for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']:
        os.unlink("iaroad_cond.%s" % (suffix, ))
示例#33
0
 def test_add_field(self):
     """Test whethe add_field reports exceptions"""
     dbf = dbflib.create("test.dbf")
     # For strings the precision parameter must be 0
     self.assertRaises(RuntimeError, dbf.add_field, "str", dbflib.FTString, 10, 5)
示例#34
0
else:
    sts = datetime.datetime(int(sys.argv[1]), 1, 1)
    ets = datetime.datetime(int(sys.argv[1]), 12, 31)

now = sts
ohrap = {}
wcursor.execute("SELECT hrap_i from hrap_utm ORDER by hrap_i ASC")
for row in wcursor:
    ohrap[row[0]] = {'rain': 0, 'hours': 0, 'mrain': 0}

hrapi = ohrap.keys()
hrapi.sort()

while now < ets:
    dbfname = "%s_rain" % (now.strftime("%Y%m"), )
    dbf = dbflib.create(dbfname)
    dbf.add_field("RAINFALL", dbflib.FTDouble, 8, 2)
    dbf.add_field("RAINHOUR", dbflib.FTDouble, 8, 2)
    dbf.add_field("RAINPEAK", dbflib.FTDouble, 8, 2)

    wcursor.execute("""select hrap_i, rainfall /25.4 as rain,
        peak_15min /25.4 * 4 as mrain, hr_cnt / 4.0 as hours from
        monthly_rainfall_%s  WHERE valid = '%s'
        ORDER by hrap_i ASC
        """ % (now.strftime("%Y"), now.strftime("%Y-%m-%d")))

    hrap = ohrap
    for row in wcursor:
        hrap[row[0]] = {'rain': row[1],
                        'hours': row[3], 'mrain': row[2]}
示例#35
0
文件: BGL_8.2.2.py 项目: wgwei/BGSL
    def __init__(self,Cvsq, Ctsq, buildingFile, receiverFile, sourceFile, resultOutFile,\
        rZoneSize=2000.0, r2sDist=1500.0, flags=['D', 'E', 'N'], modelType='scattering'):
        ''' buildingFile, receiverFile, sourceFile and resultOutFile 
            are shape file names
            resultOutFile is also the new folder
            receiverZone is the vertix of the smaller receiver region
            SBzone is the corresponding zone of the receivers
            flags -> 'D', 'E', 'N' represent for day, evening and Night
        '''
        self.Cvsq = Cvsq
        self.Ctsq = Ctsq
        self.i = 0   # acounter to write results out
        print 'initialing...'   
        # common constants
        self.fr = [63, 125, 250, 500, 1000, 2000, 4000, 8000]
        self.waveLength = 340.0/np.array(self.fr)
        self.Aweight =  np.array([-26.2, -16.1,-8.6, -3.2, 0, 1.2, 1, -1.1])   # {63:-26.2, 125:-16.1, 250:-8.6, 500:-3.2, 1000:0, 2000:1.2, 4000:1, 8000:-1.1}
        self.sHi = 0.0   # source Height♠
        self.rHi = 4.5   # receiver height
        self.modelType = modelType        
        # preparing to write results out
        print "preparing to write to file: ", resultOutFile
        if not os.path.exists(resultOutFile):
            os.mkdir(resultOutFile)
        shutil.copy(receiverFile+'.shp', resultOutFile)
        shutil.copy(receiverFile+'.shx', resultOutFile)
        self.DBFOut = dbflib.create(resultOutFile+'\\'+resultOutFile)
        self.DBFOut.add_field("GENTID", dbflib.FTInteger, 7, 0)  # add the idential ID
        self.fieldName = ['L_63', 'L_125', 'L_250', 'L_500', 'L_1000', 'L_2000', 'L_4000', 'L_8000', 'LA']        
        for fg in flags:
            for f in self.fieldName: # add new field
                self.DBFOut.add_field(f+fg, dbflib.FTDouble, 9, 1)
        
        # write log
        print 'Create log'
        yr = time.gmtime()[0]
        mon = time.gmtime()[1]
        day = time.gmtime()[2]
        hour = time.gmtime()[3]
        minu = time.gmtime()[4]
        label = str(yr)+str(mon)+str(day)+str(hour)+str(minu)
        logw = open(resultOutFile+'\\'+'log_' + label +'.txt', 'w')
        logw.write(time.ctime()+'\r\n')
        logw.write('buildingFile: '+buildingFile+'\r\n'+'receiverFile: '+receiverFile+'\r\n')
        logw.write('sourceFiel: '+sourceFile+'\r\n')
        logw.write('Dimension of receiver zone: '+str(rZoneSize)+'*'+str(rZoneSize)+'\r\n')
        logw.write('Maximum distance from source to receiver: '+str(r2sDist)+'\r\n')
        logw.write('Source type: ' + str(flags)+'\r\n')
        logw.write('Model type: ' + modelType + '\r\n\r\n')
        tic = time.clock()
        
        print 'Prepare source, receiver and buildings'  
#        try: 
        if not os.path.exists('pkData'):
            os.mkdir('pkData')        
        if not os.path.exists('pkData\\PKLsourceOBJ.pkl'):
            sourceObjects = packSourceToPKL(sourceFile)
            sw = open('pkData\\PKLsourceOBJ.pkl', 'wb')
            pickle.dump(sourceObjects, sw, 2)  # protocal 2 for verion 2.x, 3for 3.x
            sw.close()
        else:
            sr = open('pkData\\PKLsourceOBJ.pkl', 'rb')
            sourceObjects = pickle.load(sr)
            sr.close()
        if not os.path.exists('pkData\\PKLreceiverOBJ.pkl'):
            receiverObjects = packReceiverToPKL(receiverFile, 'GID')
            rw = open('pkData\\PKLreceiverOBJ.pkl', 'wb')
            pickle.dump(receiverObjects, rw, 2)
            rw.close()
        else:
            rr = open('pkData\\PKLreceiverOBJ.pkl', 'rb')
            receiverObjects = pickle.load(rr)
            rr.close()
        if not os.path.exists('pkData\\PKLbuildingOBJ.pkl'):
            polygonObjects = packBuildingToPKL(buildingFile)
            bw = open('pkData\\PKLbuildingOBJ.pkl', 'wb')
            pickle.dump(polygonObjects, bw, 2)
            bw.close()
        else:
            br = open('pkData\\PKLbuildingOBJ.pkl', 'rb')
            polygonObjects = pickle.load(br)
            br.close()     
            
        toc1 = time.clock()
        logw.write('Initializing takes '+str(toc1-tic)+' seconds\r\n')
        
        print 'calculating...'
        # test the zones        
        rSHP = shapelib.open(receiverFile)
        if rSHP.info()[3][0]-rSHP.info()[2][0]>9999999999 or rSHP.info()[3][1]-rSHP.info()[2][1]>999999999:
            smObj = SmallerZones(receiverFile, rZoneSize, r2sDist)
            [pxList, pyList] = smObj._generateGridVertices()
            [rZones, sbZones] = smObj._verticesToZones(pxList, pyList) 
            print 'Divid to ', len(rZones), ' zones'
            for n in xrange(len(rZones)):
                print 'Calculating zone ', n
                rz = rZones[n]
                sbz = sbZones[n]
                # shrinking region or load shape
                self.receiverObjects = shrinkReceiverZone(receiverObjects,rz)
                if len(self.receiverObjects)>0:
                    listPolygonObjects = shrinkBuildingZone(polygonObjects, sbz)
                    self.buildings = KDTreeManualPolygon(listPolygonObjects)
                    self.sourceObjects = shrinkSourceZone(sourceObjects, sbz)                        
                    self.runModel()
        else:
            self.buildings = KDTreeManualPolygon(polygonObjects)
            self.sourceObjects = sourceObjects
            self.receiverObjects = receiverObjects    
            self.runModel()
#        except: 
#            logw.write('\r\n\r\nAborted unexpectedly!!! \r\n\r\n')
        toc2 = time.clock()
        logw.write('Calculating takes '+str(toc2-tic)+' seconds\r\n')
        logw.close()
示例#36
0
import pg, dbflib, mx.DateTime, shutil, sys, Numeric
from Scientific.IO.NetCDF import *
from Scientific.IO.ArrayIO import *
from pyIEM import iemdb
i = iemdb.iemdb()
wepp = i['wepp']

ohrap = {}
rs = wepp.query("SELECT hrap_i from hrap_utm ORDER by hrap_i ASC").dictresult()
for i in range(len(rs)):
    ohrap[ int(rs[i]['hrap_i']) ] = {'rain': 0, 'hours': 0, 'mrain': 0}

hrapi = ohrap.keys()
hrapi.sort()

dbf = dbflib.create("rain"  )
dbf.add_field("RAINFALL", dbflib.FTDouble, 8, 2)
dbf.add_field("RAINHOUR", dbflib.FTDouble, 8, 2)
dbf.add_field("RAINPEAK", dbflib.FTDouble, 8, 2)
    
rs = wepp.query("select hrap_i, sum(rainfall) /25.4 as rain, \
	max(peak_15min) /25.4 * 4 as mrain, sum(hr_cnt) / 4.0 as hours from \
	monthly_rainfall_2007 WHERE valid IN ('2007-08-01','2007-09-01','2007-10-01') \
        GROUP by hrap_i ORDER by hrap_i ASC" ).dictresult()

hrap = ohrap
for i in range(len(rs)):
    #print rs[i]
    hrap[ int(rs[i]['hrap_i']) ]= {'rain': float(rs[i]['rain']), \
           'hours': float(rs[i]['hours']), 'mrain': float(rs[i]['mrain']) }
示例#37
0
def make_dbf(file):
    # create a new dbf file and add three fields.
    dbf = dbflib.create(file)
    dbf.add_field("NAME", dbflib.FTString, 20, 0)
    dbf.add_field("INT", dbflib.FTInteger, 10, 0)
    dbf.add_field("FLOAT", dbflib.FTDouble, 10, 4)
示例#38
0
        for m in range(len(conv_gml.getShp_records())):
            vertices_up = []
            temp_a = vertices_up.append
            for n in range (len(conv_gml.getShp_records()[m])):
                temp_a(conv_gml.getShp_records()[m][n])
            obj = shapelib.SHPObject(shapelib.SHPT_POLYGON, 1,vertices_up)
            outfile.write_object(-1, obj)
    else:
        outfile = shapelib.create(output_path, shapelib.SHPT_POINT)
        outfile.write_object(-1, shapelib.SHPObject(shapelib.SHPT_POINT, 1, [[(-9999,-9999)]]))

    del outfile

    dbf_recc_type = {}
    dbf_float_decc = {}
    dbf = dbflib.create(output_path)

    dbf.add_field("FID", dbflib.FTInteger, int(8), int(0))
    for j in range(len(conv_gml.getRec_dbf())):
        ttype, tname, tlen, tdecc = (conv_gml.getRec_dbf())[j]
        if ttype == 'string':
            dbf.add_field(tname, dbflib.FTString, int(tlen), int(tdecc))
        elif ttype == 'integer':
            dbf.add_field(tname, dbflib.FTInteger, int(tlen), int(tdecc))
        elif ttype == 'decimal':
            dbf.add_field(tname, dbflib.FTDouble, int(tlen), int(tdecc))
        dbf_recc_type[tname] = ttype
        dbf_float_decc[tname] = tdecc

    # Records added as a dictionary...
    for k in range(len(conv_gml.getDbf_records())):
示例#39
0
 max_dbz_height | real                     |
 top            | real                     |
 drct           | smallint                 |
 sknt           | smallint                 |
 valid          | timestamp with time zone |
"""

os.chdir("/tmp")

# Delete anything older than 20 minutes
now = mx.DateTime.gmt()
eTS = mx.DateTime.gmt() - mx.DateTime.RelativeDateTime(minutes=+20)

shp = shapelib.create("current_nexattr", shapelib.SHPT_POINT)

dbf = dbflib.create("current_nexattr")
dbf.add_field("VALID", dbflib.FTString, 12, 0)
dbf.add_field("STORM_ID", dbflib.FTString, 2, 0)
dbf.add_field("NEXRAD", dbflib.FTString, 3, 0)
dbf.add_field("AZIMUTH", dbflib.FTInteger, 3, 0)
dbf.add_field("RANGE", dbflib.FTInteger, 3, 0)
dbf.add_field("TVS", dbflib.FTString, 10, 0)
dbf.add_field("MESO", dbflib.FTString, 10, 0)
dbf.add_field("POSH", dbflib.FTInteger, 3, 0)
dbf.add_field("POH", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_SIZE", dbflib.FTDouble, 5, 2)
dbf.add_field("VIL", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_DBZ", dbflib.FTInteger, 3, 0)
dbf.add_field("MAX_DBZ_H", dbflib.FTDouble, 5, 2)
dbf.add_field("TOP", dbflib.FTDouble, 5, 2)
dbf.add_field("DRCT", dbflib.FTDouble, 3, 0)
示例#40
0
    sql = """SELECT model_twp, 
      sum(avg_precip) as avg_precip, 
      sum(avg_loss) as avg_loss, 
      sum(avg_runoff) as avg_runoff from results_by_twp 
      WHERE valid BETWEEN '%s-01' and ('%s-01'::date + '1 month'::interval) 
      GROUP by model_twp""" % (ts.strftime("%Y-%m"), ts.strftime("%Y-%m") )
    day1 = (ts + mx.DateTime.RelativeDateTime(day=1)).strftime("%Y%m%d")
    day2 = (ts + mx.DateTime.RelativeDateTime(day=1,months=1) - mx.DateTime.RelativeDateTime(days=1) ).strftime("%Y%m%d")

wcursor.execute( sql )

if form.has_key("point"):
  shp = shapelib.create(fp, shapelib.SHPT_POINT)
else:
  shp = shapelib.create(fp, shapelib.SHPT_POLYGON)
dbf = dbflib.create(fp)
dbf.add_field("DAY_STA", dbflib.FTString, 8, 0)
dbf.add_field("DAY_END", dbflib.FTString, 8, 0)
dbf.add_field("MODL_TWP", dbflib.FTString, 10, 0)
dbf.add_field("PRECIP", dbflib.FTDouble, 8, 4)
dbf.add_field("LOSS", dbflib.FTDouble, 8, 4)
dbf.add_field("RUNOFF", dbflib.FTDouble, 8, 4)

i = 0
for row in wcursor:
  m = row['model_twp']
  loss = row['avg_loss']
  runoff = row['avg_runoff']
  precip = row['avg_precip']
  f = wellknowntext.convert_well_known_text( twp[m] )
  if form.has_key("point"):
示例#41
0
def generate_shapefile(ts):
    """ Generate a shapefile of this data """
    # Now we generate a shapefile....
    dbf = dbflib.create("iaroad_cond")
    dbf.add_field("SEGID", dbflib.FTInteger, 4, 0)
    dbf.add_field("MAJOR", dbflib.FTString, 10, 0)
    dbf.add_field("MINOR", dbflib.FTString, 40, 0)
    dbf.add_field("US1", dbflib.FTInteger, 4, 0)
    dbf.add_field("ST1", dbflib.FTInteger, 4, 0)
    dbf.add_field("INT1", dbflib.FTInteger, 4, 0)
    dbf.add_field("TYPE", dbflib.FTInteger, 4, 0)
    dbf.add_field("VALID", dbflib.FTString, 12, 0)
    dbf.add_field("COND_CODE", dbflib.FTInteger, 4, 0)
    dbf.add_field("COND_TXT", dbflib.FTString, 120, 0)
    dbf.add_field("BAN_TOW", dbflib.FTString, 1, 0)
    dbf.add_field("LIM_VIS", dbflib.FTString, 1, 0)

    shp = shapelib.create("iaroad_cond", shapelib.SHPT_ARC)

    pcursor.execute("""select b.*, c.*, astext(b.geom) as bgeom from 
         roads_base b, roads_current c WHERE b.segid = c.segid""")
    i = 0
    for row in pcursor:
        s = row["bgeom"]
        f = wellknowntext.convert_well_known_text(s)
        valid = row["valid"]
        d = {}
        d["SEGID"] = row["segid"]
        d["MAJOR"] = row["major"]
        d["MINOR"] = row["minor"]
        d["US1"] = row["us1"]
        d["ST1"] = row["st1"]
        d["INT1"] = row["int1"]
        d["TYPE"] = row["type"]
        d["VALID"] = valid.strftime("%Y%m%d%H%M")
        d["COND_CODE"] = row["cond_code"]
        d["COND_TXT"] = row["raw"]
        d["BAN_TOW"] = str(row["towing_prohibited"])[0]
        d["LIM_VIS"] = str(row["limited_vis"])[0]

        obj = shapelib.SHPObject(shapelib.SHPT_ARC, 1, f )
        shp.write_object(-1, obj)
        dbf.write_record(i, d)

        del(obj)
        i += 1

    del(shp)
    del(dbf)
    z = zipfile.ZipFile("iaroad_cond.zip", 'w')
    z.write("iaroad_cond.shp")
    z.write("iaroad_cond.shx")
    z.write("iaroad_cond.dbf")
    shutil.copyfile("/mesonet/data/gis/meta/26915.prj", "iaroad_cond.prj")
    z.write("iaroad_cond.prj")
    z.close()

    utc = ts.astimezone( pytz.timezone("UTC") )
    subprocess.call("/home/ldm/bin/pqinsert -p 'zip ac %s gis/shape/26915/ia/iaroad_cond.zip GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" % (
                                            utc.strftime("%Y%m%d%H%M"), 
                                            utc.strftime("%Y%m%d%H%M")), 
                    shell=True )

    for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']:
        os.unlink("iaroad_cond.%s" % (suffix,))