Exemple #1
0
    def get_app(self, function: str) -> Callable:
        """Obtains a Parsl python_application.

        Parameters
        ----------
        function : str
            A full path to a function

        Returns
        -------
        callable
            The desired AppFactory

        Examples
        --------

        >>> get_app("numpy.einsum")
        <class PythonApp"AppFactory for einsum>
        """

        from parsl.app.app import python_app

        if function in self.app_map:
            return self.app_map[function]

        func = self.get_function(function)

        # TODO set walltime and the like
        self.app_map[function] = python_app(func, data_flow_kernel=self.client)

        return self.app_map[function]
Exemple #2
0
    def get_function(self, function):
        """Obtains a Python function wrapped in a Parsl Python App

        Parameters
        ----------
        function : str
            A full path to a function

        Returns
        -------
        callable
            The desired AppFactory

        Examples
        --------

        >>> get_function("numpy.einsum")
        <class PythonApp"AppFactory for einsum>
        """

        from parsl.app.app import python_app

        if function in self.function_map:
            return self.function_map[function]

        module_name, func_name = function.split(".", 1)
        module = importlib.import_module(module_name)
        func = operator.attrgetter(func_name)(module)

        # TODO set walltime and the like
        self.function_map[function] = python_app(
            func, data_flow_kernel=self.dataflow)

        return self.function_map[function]
Exemple #3
0
def run(workflow, config, decorate, directory, capillary, gsasloc, pdf, gpx):
    """
    GSAS Parsl CLI runner
    """
    import importlib
    import gsas.configs
    config = importlib.import_module(config)

    funcname = ''.join(workflow.rsplit('.')[-1])
    modulename = '.'.join(workflow.rsplit('.')[:-1])

    print(workflow, funcname, modulename)
    globals()['executor'] = config.executor
    gsas.configs.executor = config.executor

    module = importlib.import_module(modulename)

    func = getattr(module, funcname)
    _dfunc = func

    if decorate == 'python_app':
        from parsl.app.app import python_app
        _dfunc = python_app(func, executors=[config.executor])

    if decorate == 'container_app':
        from parsl.app.app import container_app
        _dfunc = container_app(func, executors=[config.executor])

    if decorate == 'bash_app':
        from parsl.app.app import bash_app
        _dfunc = bash_app(func, executors=[config.executor])

    setattr(module, funcname,_dfunc)

    run = getattr(module, 'run')

    _pdf = {}
    if pdf is not None:
        with open(pdf, 'r') as configfile:
            _pdf = json.loads(configfile.read())

    run(directory=directory, gpx=gpx, config=config,
        pdf=_pdf, capillary=capillary, gsasloc=gsasloc)
Exemple #4
0
 def _globus_stage_out_app(self):
     return python_app(executors=['data_manager'])(self._globus_stage_out)
Exemple #5
0
 def _http_stage_in_app(self, executor):
     return python_app(executors=[executor])(_http_stage_in)
Exemple #6
0
def parsl_executor(items, function, accumulator, **kwargs):
    """Execute using parsl pyapp wrapper

    Parameters
    ----------
        items : list
            List of input arguments
        function : callable
            A function to be called on each input, which returns an accumulator instance
        accumulator : AccumulatorABC
            An accumulator to collect the output of the function
        config : parsl.config.Config, optional
            A parsl DataFlow configuration object. Necessary if there is no active kernel

            .. note:: In general, it is safer to construct the DFK with ``parsl.load(config)`` prior to calling this function
        status : bool
            If true (default), enable progress bar
        unit : str
            Label of progress bar unit
        desc : str
            Label of progress bar description
        compression : int, optional
            Compress accumulator outputs in flight with LZ4, at level specified (default 1)
            Set to ``None`` for no compression.
    """
    if len(items) == 0:
        return accumulator
    import parsl
    from parsl.app.app import python_app
    from .parsl.timeout import timeout
    status = kwargs.pop('status', True)
    unit = kwargs.pop('unit', 'items')
    desc = kwargs.pop('desc', 'Processing')
    clevel = kwargs.pop('compression', 1)
    if clevel is not None:
        function = _compression_wrapper(clevel, function)
    add_fn = _iadd

    cleanup = False
    config = kwargs.pop('config', None)
    try:
        parsl.dfk()
    except RuntimeError:
        cleanup = True
        pass
    if cleanup and config is None:
        raise RuntimeError("No active parsl DataFlowKernel, must specify a config to construct one")
    elif not cleanup and config is not None:
        raise RuntimeError("An active parsl DataFlowKernel already exists")
    elif config is not None:
        parsl.clear()
        parsl.load(config)

    app = timeout(python_app(function))

    futures = set(app(item) for item in items)
    _futures_handler(futures, accumulator, status, unit, desc, add_fn)

    if cleanup:
        parsl.dfk().cleanup()
        parsl.clear()

    return accumulator
Exemple #7
0
 def _globus_stage_out_app(self):
     return python_app(executors=['data_manager'], data_flow_kernel=self.dfk)(self._globus_stage_out)
Exemple #8
0
 def _http_stage_in_app(self, executor):
     return python_app(executors=[executor],
                       data_flow_kernel=self.dfk)(_http_stage_in)
Exemple #9
0
import numpy as np
import time

from parsl.app.app import python_app
from .timeout import timeout
from ..executor import _futures_handler

lz4_clevel = 1


def coffea_pyapp_func(dataset, fn, treename, chunksize, index, procstr, timeout=None, flatten=True, **kwargs):
    raise RuntimeError('parsl_executor.coffea pyapp cannot be used any more,'
                       'please use a wrapped _work_function from processor.executor')


coffea_pyapp = timeout(python_app(coffea_pyapp_func))


class ParslExecutor(object):

    def __init__(self):
        self._counts = {}

    @property
    def counts(self):
        return self._counts

    def __call__(self, items, processor_instance, output, status=True, unit='items', desc='Processing', timeout=None, flatten=True, **kwargs):

        raise RuntimeError('ParslExecutor.__call__ cannot be used any more,'
                           'please use processor.parsl_executor')
Exemple #10
0
 def _globus_stage_out_app(self, executor, dfk):
     executor_obj = dfk.executors[executor]
     f = partial(_globus_stage_out, self, executor_obj)
     return python_app(executors=['data_manager'], data_flow_kernel=dfk)(f)
Exemple #11
0
 def _file_stage_in_app(self):
     return python_app(executors=['data_manager'])(self._file_stage_in)
Exemple #12
0
 def _globus_stage_in_app(self, executor, dfk):
     executor_obj = dfk.executors[executor]
     f = partial(_globus_stage_in, self, executor_obj)
     return python_app(executors=['_parsl_internal'],
                       data_flow_kernel=dfk)(f)