if time_axis is not None: units = time_axis.units.split() if( len(units) == 3 ) and ( units[1] == 'since' ): newunits = "%s since 1970-1-1" % units[0] time_axis.toRelativeTime(newunits) rv = input_variable if result is None else result_data t1 = time.time() wpsLog.debug( " $$$ Applied Operation: %s to variable shape %s in time %.4f, result shape = %s" % ( str( operation ), str(input_variable.shape), (t1-t0), rshape ) ) result_mdata['time'] = time_axis return ( rv, result_mdata ) if __name__ == "__main__": from kernels.manager import kernelMgr from request.manager import TaskRequest wpsLog.addHandler( logging.StreamHandler(sys.stdout) ) #logging.FileHandler( os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'logs', 'wps.log') ) ) ) wpsLog.setLevel(logging.DEBUG) run_args = { 'region': '{"longitude": -137.09327695888, "latitude": 35.487604770915, "level": 85000 }', # , 'data': '{"collection": "MERRA/mon/atmos", "id": "hur"}', # 'operation': '[ {"kernel": "time", "method": "departures", "bounds":"np" } ] ' 'operation': '[ {"kernel": "time", "method": "climatology", "bounds":"annualcycle"} ] ' # 'operation': '[ {"kernel": "time", "method": "annualcycle", "bounds":"np"} ] ' } kernelMgr.run( TaskRequest( request=run_args ) )
def run_kernel( dataSlice ): kernel_args = generate_kernel_args( dataSlice, run_args ) SparkEngine.logger.debug( " Running kernel, args: %s" % kernel_args ) result = kernelMgr.run( kernel_args ) return result