Beispiel #1
0
    def test_push_pull_recarray(self):
        """push/pull recarrays"""
        import numpy
        from numpy.testing import assert_array_equal

        view = self.client[-1]

        R = numpy.array(
            [
                (1, 'hi', 0.0),
                (2**30, 'there', 2.5),
                (-99999, 'world', -12345.6789),
            ],
            [('n', int), ('s', '|S10'), ('f', float)],
        )

        view['RR'] = R
        R2 = view['RR']

        r_dtype, r_shape = view.apply_sync(
            interactive(lambda: (RR.dtype, RR.shape))  # noqa: F821
        )
        self.assertEqual(r_dtype, R.dtype)
        self.assertEqual(r_shape, R.shape)
        self.assertEqual(R2.dtype, R.dtype)
        self.assertEqual(R2.shape, R.shape)
        assert_array_equal(R2, R)
Beispiel #2
0
    def line_pmap(self,
                  function_name,
                  args,
                  kernel_name=None,
                  set_variable=None):
        """
        %pmap FUNCTION [ARGS1,ARGS2,...] - ("parallel map") call a FUNCTION on args

        This line magic will apply a function name to all of the
        arguments given one at a time using a dynamic load balancing scheduler.

        Currently, the args are provided as a Python expression (with no spaces).

        You must first setup a cluster using the %parallel magic.

        Examples:

            %pmap function-name-in-language range(10)
            %pmap function-name-in-language [1,2,3,4]
            %pmap run_experiment range(1,100,5)
            %pmap run_experiment ["test1","test2","test3"]
            %pmap f [(1,4,7),(2,3,5),(7,2,2)]

        The function name must be a function that is available on all
        nodes in the cluster. For example, you could:

            %%px
            (define myfunc
               (lambda (n)
                 (+ n 1)))

        to define myfunc on all machines (use %%px -e to also define
        it in the running notebook or console). Then you can apply it
        to a list of arguments:

            %%pmap myfunc range(100)

        The load balancer will run myfunc on the next available node
        in the cluster.

        Note: not all languages may support running a function via this magic.
        """
        if kernel_name is None:
            kernel_name = self.kernel_name

        # To make sure we can find `kernels`:
        try:
            from ipyparallel.util import interactive
        except ImportError:
            from IPython.parallel.util import interactive
        f = interactive(lambda arg, kname=kernel_name, fname=function_name: \
                        kernels[kname].do_function_direct(fname, arg))
        results = self.view_load_balanced.map_async(f, eval(args))
        if set_variable is None:
            self.retval = results
        else:
            self.kernel.set_variable(set_variable, results)
            self.retval = None
Beispiel #3
0
    def line_pmap(self, function_name, args, kernel_name=None, set_variable=None):
        """
        %pmap FUNCTION [ARGS1,ARGS2,...] - ("parallel map") call a FUNCTION on args

        This line magic will apply a function name to all of the
        arguments given one at a time using a dynamic load balancing scheduler.

        Currently, the args are provided as a Python expression (with no spaces).

        You must first setup a cluster using the %parallel magic.

        Examples:

            %pmap function-name-in-language range(10)
            %pmap function-name-in-language [1,2,3,4]
            %pmap run_experiment range(1,100,5)
            %pmap run_experiment ["test1","test2","test3"]
            %pmap f [(1,4,7),(2,3,5),(7,2,2)]

        The function name must be a function that is available on all
        nodes in the cluster. For example, you could:

            %%px
            (define myfunc
               (lambda (n)
                 (+ n 1)))

        to define myfunc on all machines (use %%px -e to also define
        it in the running notebook or console). Then you can apply it
        to a list of arguments:

            %%pmap myfunc range(100)

        The load balancer will run myfunc on the next available node
        in the cluster.

        Note: not all languages may support running a function via this magic.
        """
        if kernel_name is None:
            kernel_name = self.kernel_name

        # To make sure we can find `kernels`:
        try:
            from ipyparallel.util import interactive
        except ImportError:
            from IPython.parallel.util import interactive
        f = interactive(lambda arg, kname=kernel_name, fname=function_name: \
                        kernels[kname].do_function_direct(fname, arg))
        results = self.view_load_balanced.map_async(f, eval(args))
        if set_variable is None:
            self.retval = results
        else:
            self.kernel.set_variable(set_variable, results)
            self.retval = None
Beispiel #4
0
    def test_push_pull_recarray(self):
        """push/pull recarrays"""
        import numpy
        from numpy.testing.utils import assert_array_equal

        view = self.client[-1]

        R = numpy.array(
            [(1, "hi", 0.0), (2 ** 30, "there", 2.5), (-99999, "world", -12345.6789)],
            [("n", int), ("s", "|S10"), ("f", float)],
        )

        view["RR"] = R
        R2 = view["RR"]

        r_dtype, r_shape = view.apply_sync(interactive(lambda: (RR.dtype, RR.shape)))
        self.assertEqual(r_dtype, R.dtype)
        self.assertEqual(r_shape, R.shape)
        self.assertEqual(R2.dtype, R.dtype)
        self.assertEqual(R2.shape, R.shape)
        assert_array_equal(R2, R)
Beispiel #5
0
 def test_push_pull_recarray(self):
     """push/pull recarrays"""
     import numpy
     from numpy.testing.utils import assert_array_equal
     
     view = self.client[-1]
     
     R = numpy.array([
         (1, 'hi', 0.),
         (2**30, 'there', 2.5),
         (-99999, 'world', -12345.6789),
     ], [('n', int), ('s', '|S10'), ('f', float)])
     
     view['RR'] = R
     R2 = view['RR']
     
     r_dtype, r_shape = view.apply_sync(interactive(lambda : (RR.dtype, RR.shape)))
     self.assertEqual(r_dtype, R.dtype)
     self.assertEqual(r_shape, R.shape)
     self.assertEqual(R2.dtype, R.dtype)
     self.assertEqual(R2.shape, R.shape)
     assert_array_equal(R2, R)