def execute( self, task_request, run_args ): SparkEngine.initContext() wpsLog.info( "Executing Spark engine, args: %s" % ( run_args ) ) data = get_json_arg( 'data', run_args ) def loadPartition( partition_index ): import cdms2 SparkEngine.initLogger() SparkEngine.logger.debug( " Reading variable: %s" % str(data) ) dset_path = cleanup_url( data['url'] ) varname = data['id'] f=cdms2.open( dset_path ) variable = f[ varname ] return variable() cdmsData = SparkEngine.partitions.map(loadPartition) def run_kernel( dataSlice ): kernel_args = generate_kernel_args( dataSlice, run_args ) SparkEngine.logger.debug( " Running kernel, args: %s" % kernel_args ) result = kernelMgr.run( kernel_args ) return result result = cdmsData.map( run_kernel ).collect() return result
def execute( self, run_args ): from staging.celery.manager import submitTask wpsLog.info( " Celery staging task, args = '%s' " % ( str( run_args ) ) ) task = submitTask.delay( run_args ) result = task.get() return result
def execute( self, tesk_request ): from staging.celery.manager import submitTask wpsLog.info( " Celery staging task, args = '%s' " % ( str( tesk_request ) ) ) task = submitTask.delay( tesk_request ) result = task.get() return result
def submitTask( tesk_request ): engine_id = tesk_request['engine'] engine = submitTask.engines.getInstance( engine_id ) wpsLog.info( " Celery submit task, args = '%s', engine = %s (%s)" % ( str( tesk_request ), engine_id, type(engine) ) ) result = engine.execute( tesk_request ) return result
def submitTask( run_args ): engine_id = run_args['engine'] engine = submitTask.engines.getInstance( engine_id ) wpsLog.info( " Celery submit task, args = '%s', engine = %s (%s)" % ( str( run_args ), engine_id, type(engine) ) ) result = engine.execute( run_args ) return result