コード例 #1
0
ファイル: hyperopt.py プロジェクト: ychaim/freqtrade
 def run_optimizer_parallel(self, parallel, asked) -> List:
     return parallel(
         delayed(wrap_non_picklable_objects(self.generate_optimizer))(v)
         for v in asked)
コード例 #2
0
ファイル: functions.py プロジェクト: vishalbelsare/gplearn
def make_function(function, name, arity, wrap=True):
    """Make a function node, a representation of a mathematical relationship.

    This factory function creates a function node, one of the core nodes in any
    program. The resulting object is able to be called with NumPy vectorized
    arguments and return a resulting vector based on a mathematical
    relationship.

    Parameters
    ----------
    function : callable
        A function with signature `function(x1, *args)` that returns a Numpy
        array of the same shape as its arguments.

    name : str
        The name for the function as it should be represented in the program
        and its visualizations.

    arity : int
        The number of arguments that the `function` takes.

    wrap : bool, optional (default=True)
        When running in parallel, pickling of custom functions is not supported
        by Python's default pickler. This option will wrap the function using
        cloudpickle allowing you to pickle your solution, but the evolution may
        run slightly more slowly. If you are running single-threaded in an
        interactive Python session or have no need to save the model, set to
        `False` for faster runs.

    """
    if not isinstance(arity, int):
        raise ValueError('arity must be an int, got %s' % type(arity))
    if not isinstance(function, np.ufunc):
        if function.__code__.co_argcount != arity:
            raise ValueError('arity %d does not match required number of '
                             'function arguments of %d.' %
                             (arity, function.__code__.co_argcount))
    if not isinstance(name, str):
        raise ValueError('name must be a string, got %s' % type(name))
    if not isinstance(wrap, bool):
        raise ValueError('wrap must be an bool, got %s' % type(wrap))

    # Check output shape
    args = [np.ones(10) for _ in range(arity)]
    try:
        function(*args)
    except ValueError:
        raise ValueError('supplied function %s does not support arity of %d.' %
                         (name, arity))
    if not hasattr(function(*args), 'shape'):
        raise ValueError(
            'supplied function %s does not return a numpy array.' % name)
    if function(*args).shape != (10, ):
        raise ValueError('supplied function %s does not return same shape as '
                         'input vectors.' % name)

    # Check closure for zero & negative input arguments
    args = [np.zeros(10) for _ in range(arity)]
    if not np.all(np.isfinite(function(*args))):
        raise ValueError('supplied function %s does not have closure against '
                         'zeros in argument vectors.' % name)
    args = [-1 * np.ones(10) for _ in range(arity)]
    if not np.all(np.isfinite(function(*args))):
        raise ValueError('supplied function %s does not have closure against '
                         'negatives in argument vectors.' % name)

    if wrap:
        return _Function(function=wrap_non_picklable_objects(function),
                         name=name,
                         arity=arity)
    return _Function(function=function, name=name, arity=arity)
コード例 #3
0
def run_interface_from_dataset(
        data: xr.Dataset,
        params: dict,
        frequency='detect',
        pdfs_file='builtin',
        num_cores=multiprocessing.cpu_count()) -> xr.Dataset:
    """
    Parameters
    ----------
    data: xarray Dataset
        containing at lest one variable 'global_horizontal' with mean
        global horizontal irradiance in W/m2.
        Optional variables: 'diffuse_fraction', 'temperature' in °C
    params: dict
        Parameters for GSEE, i.e. 'tilt', 'azim',
        'tracking', 'capacity'. tilt can be a function depending on
        latitude -- see example input. Tracking can be 0, 1, 2 for no
        tracking, 1-axis tracking, 2-axis tracking.
    frequency: str, optional
        Frequency of the input data. One of ['A', 'S', 'M', 'D', 'H'],
        for annual, seasonal, monthly, daily, hourly. Defaults to 'detect',
        whith attempts to automatically detect the correct frequency.
    pdfs_file: str, optional
        Path to a NetCDF file with probability density functions to use
        for each month. Only for annual, seasonal and monthly data.
        Default is 'builtin', which automatically downloads and uses a
        built-in global PDF based on MERRA-2 data. Set to None to disable.
    num_cores: int, optional
        Number of cores that should be used for the computation.
        Default is all available cores.

    Returns
    -------
    xarray Dataset
        PV power output in Wh/hour if frequency is 'H', else in Wh/day

    """
    frequency = _detect_frequency(data, frequency)

    # Produce list of coordinates of all grid points to iterate over
    coord_list = list(product(data['lat'].values, data['lon'].values))

    # Modify time dimension so it fits the requirements of
    # the "resample_for_gsee" function
    data['time'] = _mod_time_dim(pd.to_datetime(data['time'].values), frequency)

    # Shareable list with a place for every coordinate in the grid
    manager = multiprocessing.Manager()
    shr_mem = manager.list([None] * len(coord_list))
    # Store length of coordinate list in prog_mem to draw
    # the progress bar dynamically
    prog_mem = manager.list()
    prog_mem.append(len(coord_list))

    start = time.time()

    if pdfs_file is not None:
        if frequency in ['A', 'S', 'M']:
            pdfs_path = util.return_pdf_path() if pdfs_file == 'builtin' else pdfs_file
            pdfs = xr.open_dataset(pdfs_path)
            pdf_coords = list(product(pdfs['lat'].values, pdfs['lon'].values))
            tree = spatial.KDTree(pdf_coords)
            coord_list_nn = [pdf_coords[int(tree.query([x])[1])] for x in coord_list]
        else:
            raise ValueError(
                'For frequencies other than "A", "M", or "D", '
                '`pdfs_file` must be explicitly set to None.'
            )

    if num_cores > 1:
        from joblib import Parallel, delayed, wrap_non_picklable_objects
        from joblib.parallel import get_active_backend
        print('Parallel mode: {} cores'.format(num_cores))
        Parallel(n_jobs=num_cores)(delayed(wrap_non_picklable_objects(resample_for_gsee))(
            data.sel(lat=coords[0], lon=coords[1]), frequency, params,
            i, coords, shr_mem, prog_mem,
            None if pdfs_file is None else pdfs.sel(lat=coord_list_nn[i][0], lon=coord_list_nn[i][1])
        ) for i, coords in enumerate(coord_list))
    else:
        print('Single core mode')
        for i, coords in enumerate(coord_list):
            resample_for_gsee(
                data.sel(lat=coords[0], lon=coords[1]),
                frequency, params, i, coords, shr_mem, prog_mem,
                None if pdfs_file is None else pdfs.sel(lat=coord_list_nn[i][0], lon=coord_list_nn[i][1])
            )

    end = time.time()
    print('\nComputation part took: {} seconds'.format(str(round(end - start, 2))))

    # Stitch together the data
    result = xr.Dataset()
    for piece in shr_mem:
        if type(piece) == type(data):
            result = xr.merge([result, piece])
    result = result.transpose('time', 'lat', 'lon')
    result['time'] = data['time']
    if frequency == 'H':
        result['pv'].attrs['unit'] = 'Wh'
    elif frequency in ['A', 'S', 'M', 'D']:
        result['pv'].attrs['unit'] = 'Wh/day'

    return result