def generateOSCOMInputs(rampFilePath, spillFilePath, containRadius, distThreshold): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath[:-4] + '.dbf') # rampFilePath.split('.')[0] spillShp = shapelib.ShapeFile(spillFilePath) spillDbf = dbflib.DBFFile(spillFilePath[:-4] + '.dbf') # spillFilePath.split('.')[0] vesCapList = [] rampGeoList = [] tn = rampDbf.record_count() for j in range(tn): rampObj = rampShp.read_object(j) rampGeo = Point(tuple(rampObj.vertices()[0])) rampGeoList.append(rampGeo) recordDict = rampDbf.read_record(j) vesCapList.append(recordDict['VesCap']) distDict = {} serveDict = {} coverDict = {} volumeList = [] spillGeoList = [] sn = spillDbf.record_count() for j in range(sn): serveDict[j] = [] spillObj = spillShp.read_object(j) spillGeo = Point(tuple(spillObj.vertices()[0])) spillGeoList.append(spillGeo) recordDict = spillDbf.read_record(j) volumeList.append(recordDict['VOLUME_BBL'] * 42) for i in range(tn): dist = rampGeoList[i].distance(spillGeo) if dist <= distThreshold: distDict[(i, j)] = dist serveDict[j].append(i) servedDict = {} for i in range(tn): servedDict[i] = [] for k, v in serveDict.items(): if i in v: servedDict[i].append(k) for j in range(sn): coverDict[j] = [] for l in range(sn): sdist = spillGeoList[j].distance(spillGeoList[l]) if sdist <= containRadius: coverDict[j].append(l) print 'done' return [vesCapList, volumeList, distDict, serveDict, servedDict, coverDict]
def AddBoomField(gridFilePath, sol, outFilePath): gridSHP = shapelib.ShapeFile(gridFilePath) gridDBF = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') outSHP = shapelib.create(outFilePath, shapelib.SHPT_POLYGON) outDBF = dbflib.create(outFilePath.split('.')[0] + '.dbf') for j in range(gridDBF.field_count()): #if j == 17: #outDBF.add_field(gridDBF.field_info(j)[1], dbflib.FTDouble, 20, 10) #else: outDBF.add_field( gridDBF.field_info(j)[1], gridDBF.field_info(j)[0], gridDBF.field_info(j)[2], gridDBF.field_info(j)[3]) outDBF.add_field('Boomed', dbflib.FTInteger, 5, 0) jj = 0 for j in range(gridDBF.record_count()): vert = gridSHP.read_object(j) recordDict = gridDBF.read_record(j) if j in sol: recordDict['Boomed'] = 1 else: recordDict['Boomed'] = 0 outSHP.write_object(-1, vert) outDBF.write_record(jj, recordDict) jj += 1
def calculateShoreLength(gridFilePath, shoreFilePath, outFilePath): gridShp = shapelib.ShapeFile(gridFilePath) gridDbf = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') shoreShp = shapelib.ShapeFile(shoreFilePath) outShp = shapelib.create(outFilePath, shapelib.SHPT_POLYGON) outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf') outDbf.add_field("ID", dbflib.FTInteger, 10, 0) outDbf.add_field("Impact", dbflib.FTDouble, 30, 6) outDbf.add_field("Length", dbflib.FTDouble, 30, 6) inter = shoreShp.read_object(0).vertices()[1:] if inter: shoreGeo = Polygon(shoreShp.read_object(0).vertices()[0], inter).boundary else: shoreGeo = Polygon(tuple( shoreShp.read_object(0).vertices()[0])).boundary for j in range(gridDbf.record_count()): gridObj = gridShp.read_object(j) gridGeo = Polygon(tuple(gridObj.vertices()[0])) shoreLength = gridGeo.intersection(shoreGeo).length recordDict = gridDbf.read_record(j) newDict = [j, recordDict["Impact"], shoreLength] outShp.write_object(-1, gridObj) outDbf.write_record(j, newDict)
def run(): sf = shapelib.open( os.getenv("SWFP_DATADIR") + "/mapnik_render/world_boundaries/world_boundaries_m.shp") d = dbflib.DBFFile( os.getenv("SWFP_DATADIR") + "/mapnik_render/world_boundaries/world_boundaries_m.dbf") num_shapes = sf.info()[0] assert num_shapes == d.record_count() swedish_polygons = 0 for idx in xrange(num_shapes): obj = sf.read_object(idx) rec = d.read_record(idx) if rec['CNTRY_NAME'] == 'Sweden': #print "Sweden: ",obj.vertices() swedish_polygons += 1 assert len(obj.vertices()) == 1 out = [] for vert in obj.vertices()[0]: cd = prj.inverse(mapnik.Coord(vert[1], vert[0])) #print "lat: %s, lon: %s,"%(cd.y,cd.x) out.append(mapper.format_lfv(cd.y, cd.x)) print "Swedpol:", " - ".join(out) print "Swedish polygons: %d" % (swedish_polygons, )
def generateMCLPInputsGrid(rampFilePath, gridFilePath, distThreshold): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf') gridShp = shapelib.ShapeFile(gridFilePath) gridDbf = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') rampGeoList = [] tn = rampDbf.record_count() for j in range(tn): rampObj = rampShp.read_object(j) rampGeo = Point(tuple(rampObj.vertices()[0])) rampGeoList.append(rampGeo) recordDict = rampDbf.read_record(j) distDict = {} serveDict = {} gridLenList = [] gridSenList = [] gridGeoList = [] sn = gridDbf.record_count() for j in range(sn): serveDict[j] = [] gridObj = gridShp.read_object(j) gridGeo = Polygon(tuple(gridObj.vertices()[0])).centroid gridGeoList.append(gridGeo) recordDict = gridDbf.read_record(j) gridSenList.append(recordDict['Total_S']) for i in range(tn): dist = rampGeoList[i].distance(gridGeo) if dist <= distThreshold: distDict[(i, j)] = dist serveDict[j].append(i) servedDict = {} for i in range(tn): servedDict[i] = [] for k, v in serveDict.items(): if i in v: servedDict[i].append(k) print 'done' return [gridSenList, serveDict, servedDict]
def generateMCLPInputs(rampFilePath, shoreFilePath, distThreshold): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf') shoreShp = shapelib.ShapeFile(shoreFilePath) shoreDbf = dbflib.DBFFile(shoreFilePath.split('.')[0] + '.dbf') rampGeoList = [] tn = rampDbf.record_count() for j in range(tn): rampObj = rampShp.read_object(j) rampGeo = Point(tuple(rampObj.vertices()[0])) rampGeoList.append(rampGeo) recordDict = rampDbf.read_record(j) distDict = {} serveDict = {} shoreSenList = [] shoreGeoList = [] sn = shoreDbf.record_count() for j in range(sn): serveDict[j] = [] shoreObj = shoreShp.read_object(j) shoreGeo = LineString(tuple(shoreObj.vertices()[0])).centroid shoreGeoList.append(shoreGeo) recordDict = shoreDbf.read_record(j) shoreSenList.append(recordDict['ESI_1']) for i in range(tn): dist = rampGeoList[i].distance(shoreGeo) if dist <= distThreshold: distDict[(i, j)] = dist serveDict[j].append(i) servedDict = {} for i in range(tn): servedDict[i] = [] for k, v in serveDict.items(): if i in v: servedDict[i].append(k) print 'done' return [shoreSenList, serveDict, servedDict]
def createPointFast(filePath): shpList = [] shp = shapelib.ShapeFile(filePath) dbf = dbflib.DBFFile(filePath.split('.')[0] + '.dbf') for j in range(dbf.record_count()): geo = Point(tuple(shp.read_object(j).vertices()[0])) shpList.append(geo) return shpList
def generatEBAMInputs(rampFilePath, gridFilePath, speed, hitDay): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf') gridShp = shapelib.ShapeFile(gridFilePath) gridDbf = dbflib.DBFFile(gridFilePath.split('.')[0] + '.dbf') ebCap = [] rampGeoList = [] cdDict = {} tn = rampDbf.record_count() for j in range(tn): rampObj = rampShp.read_object(j) rampGeo = Point(tuple(rampObj.vertices()[0])) rampGeoList.append(rampGeo) recordDict = rampDbf.read_record(j) ebCap.append(recordDict['EBCap_Mete']) cdDict[j] = [] distDict = {} coverDict = {} impactList = [] needBoomList = [] for j in range(gridDbf.record_count()): gridObj = gridShp.read_object(j) gridGeo = Polygon(tuple(gridObj.vertices()[0])) recordDict = gridDbf.read_record(j) impactList.append(recordDict['Impact']) needBoomList.append(recordDict['Length']) coverDict[j] = [] for i in range(tn): dist = rampGeoList[i].distance(gridGeo) hours = dist / speed if hours <= hitDay * 24: coverDict[j].append(i) cdDict[i].append(j) distDict[(i, j)] = dist return [ebCap, impactList, needBoomList, distDict, coverDict, cdDict]
def createPolygonFastNoInterior(filePath): shpList = [] shp = shapelib.ShapeFile(filePath) dbf = dbflib.DBFFile(filePath.split('.')[0] + '.dbf') for j in range(dbf.record_count()): inter = shp.read_object(j).vertices()[1:] geo = Polygon(tuple(shp.read_object(j).vertices()[0])) shpList.append(geo.buffer(0)) return shpList
def list_dbf(file): # print the contents of a dbf file to stdout dbf = dbflib.DBFFile(file) print "%d records, %d fields" % (dbf.record_count(), dbf.field_count()) format = "" for i in range(dbf.field_count()): type, name, len, decc = dbf.field_info(i) if type == 0: format = format + " %%(%s)%ds" % (name, len) elif type == 1: format = format + " %%(%s)%dd" % (name, len) elif type == 2: format = format + " %%(%s)%dg" % (name, len) print format for i in range(dbf.record_count()): print format % dbf.read_record(i)
def loadDBF(self, path): # load dbf #import dbflib dbf = dbflib.DBFFile(path[:-4] + '.dbf') self.dbf.n_records = dbf.record_count() self.dbf.n_fields = dbf.field_count() self.dbf.header = [] self.dbf.field_spec = [] for i in range(self.dbf.n_fields): typee, name, lenn, decc = dbf.field_info(i) self.dbf.header.append(name) if typee == 0: # string self.dbf.field_spec.append(('C', lenn, decc)) elif typee == 1: # integer self.dbf.field_spec.append(('N', lenn, decc)) elif typee == 2: # float self.dbf.field_spec.append(('F', lenn, decc)) self.dbf._dbf = dbf
def simulateRampCapacity(rampFilePath, outFilePath): rampShp = shapelib.ShapeFile(rampFilePath) rampDbf = dbflib.DBFFile(rampFilePath.split('.')[0] + '.dbf') outShp = shapelib.create(outFilePath, shapelib.SHPT_POINT) outDbf = dbflib.create(outFilePath.split('.')[0] + '.dbf') outDbf.add_field("ID", dbflib.FTInteger, 10, 0) outDbf.add_field("EBCap", dbflib.FTInteger, 30, 0) outDbf.add_field("VesCap", dbflib.FTInteger, 10, 0) tn = rampDbf.record_count() ebCap = randint(500, high=5001, size=tn) vesCap = choice(6, tn, p=[0.5, 0.2, 0.1, 0.1, 0.05, 0.05]) for j in range(tn): rampObj = rampShp.read_object(j) recordDict = rampDbf.read_record(j) newDict = [j, ebCap[j], vesCap[j]] outShp.write_object(-1, rampObj) outDbf.write_record(j, newDict)