示例#1
0
def _find_missing_dates(inputdir,
                        mon,
                        year=datetime.datetime.now().date().year):
    """
    
    """
    def _process_freshair_perproc(mydate):
        try:
            fname = get_freshair(inputdir, mydate)
            if fname is None: return None
            return mydate
        except Exception as e:
            print(e)
            return None

    #
    assert (mon in range(1, 13))
    weekdays_of_month = npr_utils.weekdays_of_month_of_year(year, mon)
    valid_filenames = set(
        map(
            lambda day: os.path.join(
                inputdir, 'NPR.FreshAir.%02d.%02d.%04d.m4a' %
                (day, mon, year)),
            npr_utils.weekdays_of_month_of_year(year, mon)))
    filenames_act = set(
        glob.glob(
            os.path.join(inputdir,
                         'NPR.FreshAir.*.%02d.%04d.m4a' % (mon, year))))
    filenames_remain = list(valid_filenames - filenames_act)
    if len(filenames_remain) == 0: return
    print('NUMBER OF CANDIDATE EPS REMAIN FOR %d / %d: %d' %
          (mon, year, len(filenames_remain)))
    days_remain = list(
        map(lambda filename: int(os.path.basename(filename).split('.')[2]),
            filenames_remain))
    input_tuples = list(
        map(
            lambda day: datetime.datetime.strptime(
                '%02d.%02d.%04d' % (day, mon, year), '%d.%m.%Y').date(),
            days_remain))
    logging.info('list of input_tuples: %s.' % input_tuples)
    # pool = multiprocessing.Pool( processes = multiprocessing.cpu_count( ) )
    with multiprocessing.ThreadPool(processes=min(multiprocessing.cpu_count(),
                                                  len(input_tuples))) as pool:
        successes = list(
            filter(None, pool.map(_process_freshair_perproc, input_tuples)))
    print('successes (%d/%d): %s' %
          (len(successes), len(input_tuples), successes))
示例#2
0
def multi_map(func, iterable, num_threads=1, use_threading=False):
    out_list = []
    if num_threads != 1:
        if not use_threading:
            pool = mp.ProcessingPool(num_threads)
        else:
            pool = mp.ThreadPool(num_threads)

    # If multithreading run pool.map, otherwise just run recon_and_pour
    if num_threads != 1:
        out_list = pool.map(func, iterable)

        # processingpool carries over information. Need to terminate a restart
        # to prevent memory leaks.
        pool.terminate()
        if not use_threading:
            pool.restart()
    else:
        for element in iterable:
            out_list.append(func(element))
    return out_list
示例#3
0
def get_all_waitwaits_year(yearnum, inputdir):
    """
    Looks for missing `NPR Wait Wait <waitwait_>`_ episodes in a given year, then downloads them.

    :param int yearnum: the year for which to search for missing `NPR Wait Wait <waitwait_>`_ episodes.
    :param str inputdir: the directory into which the `NPR Wait Wait <waitwait_>`_ episodes are downloaded.
    """
    order_dates_remain = get_waitwait_valid_dates_remaining_tuples(
        yearnum, inputdir)
    if len(order_dates_remain) == 0: return
    totnum = order_dates_remain[0][1]
    nprocs = multiprocessing.cpu_count()
    input_tuples = [(inputdir, totnum, [
        (date_s, order) for (order, totnum, date_s) in order_dates_remain
        if (order - 1) % nprocs == procno
    ]) for procno in range(nprocs)]
    time0 = time.time()
    with multiprocessing.ThreadPool(processes=nprocs) as pool:
        _ = list(pool.map(_process_waitwaits_by_year_tuple, input_tuples))
    logging.info(
        'processed all Wait Wait downloads for %04d in %0.3f seconds.' %
        (yearnum, time.time() - time0))
示例#4
0
def batch_process(num_workers=8, multiprocess=False, multithread=False):
    assert (multiprocess and multithread) is False, \
        "Either multiprocess or multithread can be True, not both"

    if multiprocess:
        pool = multiprocessing.ProcessPool(num_workers)
    elif multithread:
        pool = multiprocessing.ThreadPool(num_workers)
    else:
        pool = None

    def decorator(single_sample_fn):
        def wrapper(*tensors, ndim=2, **kwargs):
            device = tensors[0].device
            tensors = [t.detach().cpu().numpy() for t in tensors]

            if tensors[0].ndim == ndim:
                out = single_sample_fn(*tensors, **kwargs)
            elif tensors[0].ndim == ndim + 1:

                def single_sample_fn_(args):
                    return single_sample_fn(*args, **kwargs)

                if pool:
                    outs = pool.map(single_sample_fn_, zip(*tensors))
                else:
                    outs = [single_sample_fn_(args) for args in zip(*tensors)]
                out = np.stack(outs)
            else:
                raise ValueError("The input tensor must have either {} "
                                 "or {} dimensions".format(ndim, ndim + 1))

            return torch.as_tensor(out, device=device)

        return wrapper

    return decorator
示例#5
0
	# get the lons and lats in a grid and then rotate them to a 0-360 -- Pacific Centered Latlong
	lons, lats = coordinates( meta=meta_10min, numpy_array=grid_10min )

	# build some args
	months = ['01','02','03','04','05','06','07','08','09','10','11','12']
	output_filenames_10min = [ os.path.join( cru_path, variable+'_cru_cl20_10min_'+month+'_1961_1990.tif' ) for month in months ]
	output_filenames = [ os.path.join( cru_path, variable+'_cru_cl20_akcan_'+month+'_1961_1990.tif' ) for month in months ]
	akcan_mask = template_raster.read_masks( 1 )

	# run in parallel
	args_list = [ { 'x':cru_gdf['lon'], 'y':cru_gdf['lat'], 'z':np.array(cru_gdf[ month ]), \
					'meshgrid_10min':(lons,lats), 'output_filename_10min':out_fn_10min, 'output_filename':out_fn, \
					'meta_10min':meta_10min, 'meta_akcan':template_raster.meta, 'mask':akcan_mask, 'method':'cubic', 'template_raster':template_raster } \
						for month, out_fn_10min, out_fn in zip( months, output_filenames_10min, output_filenames ) ]

	pool = mp.ThreadPool( 12 )
	out = pool.map( run, args_list )
	pool.close()

# # # EXAMPLE OF USE # # #
# import os
# os.chdir( '/workspace/UA/malindgren/repos/downscale/snap_scripts' )
# cru_folder = '/Data/Base_Data/Climate/World/CRU_grids/CRU_TS20'
# var_fn_dict = { 'hur':os.path.join( cru_folder, 'grid_10min_reh.dat.gz'),'tas':os.path.join( cru_folder, 'grid_10min_tmp.dat.gz'), 'sunp':os.path.join( cru_folder, 'grid_10min_sunp.dat.gz' ) }
# base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/CRU_TEM_2016'
# template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'

# for variable, cru_filename in var_fn_dict.iteritems():
# 	# print 'working on : %s' % variable
# 	os.system( 'ipython cru_cl20_climatology_preprocess.py -- -p ' + base_path + ' -cru ' + cru_filename + ' -v ' + variable + ' -tr ' + template_raster_fn )
示例#6
0
        for month in months
    ]
    output_filenames = [
        os.path.join(cru_path,
                     variable + '_cru_cl20_akcan_' + month + '_1961_1990.tif')
        for month in months
    ]
    akcan_mask = template_raster.read_masks(1)

    # run in parallel
    args_list = [ { 'x':cru_gdf['lon'], 'y':cru_gdf['lat'], 'z':np.array(cru_gdf[ month ]), \
        'meshgrid_10min':(lons,lats), 'output_filename_10min':out_fn_10min, 'output_filename':out_fn, \
        'meta_10min':meta_10min, 'meta_akcan':template_raster.meta, 'mask':akcan_mask, 'method':'cubic', 'template_raster':template_raster } \
         for month, out_fn_10min, out_fn in zip( months, output_filenames_10min, output_filenames ) ]

    pool = mp.ThreadPool(2)
    out = pool.map(run, args_list)
    pool.close()
    # out = map( run, args_list )

# # # EXAMPLE OF USE # # #
# import os
# os.chdir( '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/CODE/tem_ar5_inputs/downscale_cmip5/bin' )
# cru_folder = '/Data/Base_Data/Climate/World/CRU_grids/CRU_TS20'
# var_fn_dict = { 'hur':os.path.join( cru_folder, 'grid_10min_reh.dat.gz'),'tas':os.path.join( cru_folder, 'grid_10min_tmp.dat.gz'), 'sunp':os.path.join( cru_folder, 'grid_10min_sunp.dat.gz' ) }
# base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_v4'
# template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'

# for variable, cru_filename in var_fn_dict.iteritems():
# 	print 'working on : %s' % variable
# 	os.system( 'ipython cru_cl20_1961_1990_climatology_preprocess_10min.py -- -p ' + base_path + ' -cru ' + cru_filename + ' -v ' + variable + ' -tr ' + template_raster_fn )
示例#7
0
    reload(src.car_env)
    display = Display(visible = 0, size = (900, 900))
    display.start()
    env = src.car_env.CarRacing()
    env.reset()
    env.step([1, 0, 0])
    return env


envs = [get_env() for _ in range(40)]

def step_1(env):
    return env.step([1, 0, 0])

from tqdm import tqdm
pool = multiprocessing.ThreadPool(processes=2)
res = [step_1(env) for env in tqdm(envs)]
res = pool.map(step_1, envs)
# pool.close()


# from stable_baselines.common.vec_env.subproc_vec_env import SubprocVecEnv
# from src.utils import EnvWrapper
# def get_fully_wrapped_env(wrapper_class=EnvWrapper, wrapper_params={},
#                           n_jobs=2, env_name='CarRacing-v0'):
#     def make_env(rank, seed=0):
#         reload(src.car_env)
#         # def _init():
#         display = Display(visible=0, size=(900, 900))
#         display.start()
#         env = src.car_env.CarRacing()
示例#8
0
from src.noisy_linear_layer import NoisyLinear
from src.q_network import DQN
from src.q_network import Agent



agent = Agent(device)
agent.load('exp_5epoch_95.pth')

action = env.action_space.sample()
a = env.step(action)
act = np.concatenate([a[0][np.newaxis, ...]] * 10)
act.shape
agent.act(act)

n_jobs = 2

def step_1(env):
    return env.step(action)

pool = multiprocessing.ThreadPool(processes=n_jobs)
res = list(map(step_1, envs))
res = [step_1(env) for env in envs]
res = pool.map(step_1, envs)
# pool.close()
#env.action_space.sample()
for _ in tqdm(range(1000)):
    agent.act(act)
# for _ in tqdm(range(1000)):
#     action = env.action_space.sample()
#     a = env.step(action)