def createTiles(self): tilecount.value = 0 skipcount.value = 0 queueName = self.tmsConfig.get('General', 'sqsqueue') self.t0 = time.time() if len(queueName) <= 0: logger.error('Missing queueName') return procfactor = int(self.tmsConfig.get('General', 'procfactor')) pm = PoolManager(factor=procfactor) qtiles = QueueTerrainTiles( queueName, self.dbConfigFile, self.tmsConfig, self.t0, pm.nbOfProcesses ) logger.info('Starting creation of tiles from queue %s ' % (queueName)) pm.imap_unordered(createTileFromQueue, qtiles, 1) tend = time.time() logger.info( 'It took %s to create %s tiles (%s were skipped) from queue' % ( str(datetime.timedelta(seconds=tend - self.t0)), tilecount.value, skipcount.value ) )
def create(self): def callback(counter, result): if not counter % 10000: logger.info('counter: %s' % counter) logger.info('result: %s' % result) self.t0 = time.time() tilecount.value = 0 skipcount.value = 0 tiles = TerrainTiles(self.dbConfigFile, self.tmsConfig, self.t0) procfactor = int(self.tmsConfig.get('General', 'procfactor')) pm = PoolManager(factor=procfactor) maxChunks = int(self.tmsConfig.get('General', 'maxChunks')) nbTiles = self.numOfTiles() tilesPerProc = int(nbTiles / pm.nbOfProcesses) if tilesPerProc < maxChunks: maxChunks = tilesPerProc if maxChunks < 1: maxChunks = 1 logger.info('Starting creation of %s tiles (%s per chunk)' % (nbTiles, maxChunks)) pm.imap_unordered(createTile, tiles, maxChunks, callback=callback) tend = time.time() logger.info('It took %s to create %s tiles (%s were skipped)' % (str(datetime.timedelta(seconds=tend - self.t0)), tilecount.value, skipcount.value))
def setup_func_with_callback(): global pm_c global callback def callback(counter, res): print counter print res pm_c = PoolManager(numProcs=2, store=True) pm_c.imap_unordered(add, xrange(0, 100), 2, callback=callback)
def populateTables(self): logger.info('Action: populateTables()') reproject = self.config.get('Reprojection', 'reproject') keepfiles = self.config.get('Reprojection', 'keepfiles') outDirectory = self.config.get('Reprojection', 'outDirectory') geosuiteCmd = self.config.get('Reprojection', 'geosuiteCmd') fromPFrames = self.config.get('Reprojection', 'fromPFrames') toPFrames = self.config.get('Reprojection', 'toPFrames') fromAFrames = self.config.get('Reprojection', 'fromAFrames') toAFrames = self.config.get('Reprojection', 'toAFrames') logfile = self.config.get('Reprojection', 'logfile') errorfile = self.config.get('Reprojection', 'errorfile') if not os.path.exists(outDirectory): raise OSError('%s does not exist' % outDirectory) if not os.path.exists(geosuiteCmd): raise OSError('%s does not exist' % geosuiteCmd) tstart = time.time() models = modelsPyramid.models featuresArgs = [] for i in range(0, len(models)): model = models[i] for shp in model.__shapefiles__: featuresArgs.append( PopulateFeaturesArguments( engineURL=self.userEngine.url, modelIndex=i, shpFile=shp, reproject=True if reproject == '1' else False, keepfiles=True if keepfiles == '1' else False, outDirectory=outDirectory, geosuiteCmd=geosuiteCmd, fromPFrames=fromPFrames, toPFrames=toPFrames, fromAFrames=fromAFrames, toAFrames=toAFrames, logfile=logfile, errorfile=errorfile)) cpuCount = multiprocessing.cpu_count() numFiles = len(featuresArgs) numProcs = cpuCount if numFiles >= cpuCount else numFiles pm = PoolManager(numProcs=numProcs, factor=1) pm.imap_unordered(populateFeatures, featuresArgs, 1) tend = time.time() logger.info('All tables have been created. It took %s' % str(datetime.timedelta(seconds=tend - tstart)))
def copyKeys(fromPrefix, toPrefix, zooms): t0 = time.time() copycount.value = 0 for zoom in zooms: log.info('doing zoom ' + str(zoom)) t0zoom = time.time() keys = S3KeyIterator(fromPrefix + str(zoom) + '/', toPrefix + str(zoom) + '/', t0) cpuCount = multiprocessing.cpu_count() pm = PoolManager(numProcs=cpuCount, factor=1) pm.imap_unordered(copyKey, keys, 50) log.info('It took %s to copy this zoomlevel (total %s)' % (str(datetime.timedelta(seconds=time.time() - t0zoom)), copycount.value)) log.info( 'It took %s to copy for all zoomlevels (total %s)' % (str(datetime.timedelta(seconds=time.time() - t0)), copycount.value))
def createS3BasedTileJSON(params): t0 = time.time() maxChunks = 50 baseUrls = getBaseUrls(params) tiles = Tiles( params.bounds, params.minZoom, params.maxScanZoom, t0, basePath=params.bucketBasePath, tFormat=params.format, gridOrigin=params.gridOrigin, tilesURLs=params.tilesURLs ) pm = PoolManager(factor=1, store=True) tMeta = LayerMetadata( bounds=params.bounds, minzoom=params.minZoom, maxzoom=params.maxZoom, baseUrls=baseUrls, description=params.description, attribution=params.attribution, format=params.format, name=params.name ) pm.imap_unordered(tileNotExists, tiles, maxChunks) for xyz in pm.results: tMeta.removeTile(xyz[0], xyz[1], xyz[2]) return tMeta.toJSON()
def setup_func_error_within_subprocess(): global pm_error_sub pm_error_sub = PoolManager(numProcs=2, store=True) pm_error_sub.imap_unordered(error_func, xrange(0, 100), 2)
def setup_func_error(): global pm_error pm_error = PoolManager(numProcs=2, factor=2, store=True) pm_error.imap_unordered(add, 'notvalid', 2)
def setup_func_store(): global pm_store pm_store = PoolManager(numProcs=2, store=True) pm_store.imap_unordered(add, xrange(0, 100), 2)
def setup_func_no_store(): global pm pm = PoolManager(numProcs=2) pm.imap_unordered(add, xrange(0, 100), 2)