def testAsyncPool(self): ''' test asyncPool wrapper ''' from processing.multiprocess import asyncPoolEC, test_func_dec, test_func_ec args = [(n, ) for n in range(5)] kwargs = dict(wait=1) ec = asyncPoolEC(test_func_dec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) assert ec == 0 ec = asyncPoolEC(test_func_ec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) assert ec == 4 ec = asyncPoolEC(test_func_ec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=False) assert ec == 0
def testAsyncPool(self): ''' test asyncPool wrapper ''' from processing.multiprocess import asyncPoolEC, test_func_dec, test_func_ec args = [(n,) for n in xrange(5)] kwargs = dict(wait=1) ec = asyncPoolEC(test_func_dec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) assert ec == 0 ec = asyncPoolEC(test_func_ec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) assert ec == 4 ec = asyncPoolEC(test_func_ec, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=False) assert ec == 0
if isinstance(periods, (np.integer,int)): periods = [periods] # shall we do some fancy regridding on-the-fly? if grid == 'native': griddef = None else: griddef = getCommonGrid(grid) # print an announcement print('\n Computing Climatologies for WRF experiments:\n') print([exp.name for exp in experiments]) if grid != 'native': print('\nRegridding to \'{0:s}\' grid.\n'.format(grid)) print('\nOVERWRITE: {0:s}\n'.format(str(loverwrite))) # assemble argument list and do regridding args = [] # list of arguments for workers, i.e. "work packages" # generate list of parameters for experiment in experiments: # loop over file types for filetype in filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1,experiment.domains+1) else: tmpdom = domains for domain in tmpdom: # arguments for worker function args.append( (experiment, filetype, domain) ) # static keyword arguments kwargs = dict(periods=periods, offset=offset, griddef=griddef, loverwrite=loverwrite, varlist=varlist) # call parallel execution function ec = asyncPoolEC(computeClimatology, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10+np.ceil(10.*ec/len(args))) if ec > 0 else 0)
for dsres in dsreses: args.append( (dataset, mode, stnfct, dict(varlist=varlist, period=None, resolution=dsres)) ) # append to list # CESM datasets for experiment in CESM_experiments: for filetype in CESM_filetypes: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('CESM', mode, stnfct, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], period=period, load3D=load3D)) ) # WRF datasets for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1,experiment.domains+1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('WRF', mode, stnfct, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, period=period)) ) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performExtraction, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10+np.ceil(10.*ec/len(args))) if ec > 0 else 0)
# WRF datasets for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1, experiment.domains + 1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append(('WRF', mode, griddef, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, period=period))) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performRegridding, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10 + int(10. * ec / len(args))) if ec > 0 else 0)
for dsres in dsreses: args.append( (dataset, mode, shape_name, shape_dict, dict(varlist=varlist, period=None, resolution=dsres)) ) # append to list # CESM datasets for experiment in CESM_experiments: for filetype in CESM_filetypes: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('CESM', mode, shape_name, shape_dict, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], period=period, load3D=load3D)) ) # WRF datasets for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1,experiment.domains+1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('WRF', mode, shape_name, shape_dict, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, period=period)) ) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performShapeAverage, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10+np.ceil(10.*ec/len(args))) if ec > 0 else 0)
for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1, experiment.domains + 1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append(('WRF', mode, stnfct, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, grid=grid, period=period))) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performExtraction, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10 + int(10. * ec / len(args))) if ec > 0 else 0)
for dsres in dsreses: args.append( (dataset, mode, griddef, dict(varlist=varlist, period=None, resolution=dsres)) ) # append to list # CESM datasets for experiment in CESM_experiments: for filetype in CESM_filetypes: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('CESM', mode, griddef, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], period=period, load3D=load3D)) ) # WRF datasets for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1,experiment.domains+1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('WRF', mode, griddef, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, period=period)) ) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performRegridding, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10+int(10.*ec/len(args))) if ec > 0 else 0)
tmpdom = WRF_domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append(('WRF', mode, dict(experiment=experiment, filetypes=WRF_filetypes, grid=grid, varlist=load_list, domain=domain, period=period))) # static keyword arguments kwargs = dict(obs_dataset=obs_dataset, bc_method=bc_method, bc_args=bc_args, loverwrite=loverwrite, lgzip=lgzip, tag=tag) # N.B.: formats will be iterated over inside export function ## call parallel execution function ec = asyncPoolEC(generateBiasCorrection, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10 + int(10. * ec / len(args))) if ec > 0 else 0)
# assemble argument list and do regridding args = [] # list of arguments for workers, i.e. "work packages" # generate list of parameters for experiment in WRF_experiments: # loop over file types for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1, experiment.domains + 1) else: tmpdom = domains for domain in tmpdom: # arguments for worker function args.append((experiment, filetype, domain)) # static keyword arguments kwargs = dict(periods=periods, offset=offset, griddef=griddef, loverwrite=loverwrite, varlist=varlist) # call parallel execution function ec = asyncPoolEC(computeClimatology, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10 + int(10. * ec / len(args))) if ec > 0 else 0)
elif isinstance(resolutions,dict): dsreses = [dsres for dsres in resolutions[dataset] if dsres in mod.TS_grids] for dsres in dsreses: args.append( (dataset, mode, dict(grid=grid, varlist=load_list, period=None, resolution=dsres, unity_grid=unity_grid)) ) # append to list # CESM datasets for experiment in CESM_experiments: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('CESM', mode, dict(experiment=experiment, filetypes=CESM_filetypes, grid=grid, varlist=load_list, period=period, load3D=load3D)) ) # WRF datasets for experiment in WRF_experiments: # effectively, loop over domains if WRF_domains is None: tmpdom = range(1,experiment.domains+1) else: tmpdom = WRF_domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append( ('WRF', mode, dict(experiment=experiment, filetypes=WRF_filetypes, grid=grid, varlist=load_list, domain=domain, period=period)) ) # static keyword arguments kwargs = dict(obs_dataset=obs_dataset, bc_method=bc_method, bc_args=bc_args, loverwrite=loverwrite, lgzip=lgzip, tag=tag) # N.B.: formats will be iterated over inside export function ## call parallel execution function ec = asyncPoolEC(generateBiasCorrection, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10+int(10.*ec/len(args))) if ec > 0 else 0)
# WRF datasets for experiment in WRF_experiments: for filetype in WRF_filetypes: # effectively, loop over domains if domains is None: tmpdom = range(1, experiment.domains + 1) else: tmpdom = domains for domain in tmpdom: for period in periodlist: # arguments for worker function: dataset and dataargs args.append(('WRF', mode, shape_name, shape_dict, dict(experiment=experiment, varlist=varlist, filetypes=[filetype], domain=domain, period=period))) # static keyword arguments kwargs = dict(loverwrite=loverwrite, varlist=varlist) ## call parallel execution function ec = asyncPoolEC(performShapeAverage, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True) # exit with fraction of failures (out of 10) as exit code exit(int(10 + np.ceil(10. * ec / len(args))) if ec > 0 else 0)