Example #1
0
    def test_genericNpyScatter_multi(self):
        data1_in = random.rand(100, 100)
        data2_in = random.rand(100, 100)
        b = InMemoryDROP("b", "b")
        c = InMemoryDROP("c", "c")
        droputils.save_numpy(b, data1_in)
        droputils.save_numpy(c, data2_in)
        s = GenericNpyScatterApp("s",
                                 "s",
                                 num_of_copies=2,
                                 scatter_axes="[0,0]")
        s.addInput(b)
        s.addInput(c)
        o1 = InMemoryDROP("o1", "o1")
        o2 = InMemoryDROP("o2", "o2")
        o3 = InMemoryDROP("o3", "o3")
        o4 = InMemoryDROP("o4", "o4")
        for x in o1, o2, o3, o4:
            s.addOutput(x)
        self._test_graph_runs((b, s, o1, o2, o3, o4), (b, c), (o1, o2, o3, o4),
                              timeout=4)

        data11 = droputils.load_numpy(o1)
        data12 = droputils.load_numpy(o2)
        data1_out = concatenate([data11, data12])
        self.assertEqual(data1_out.shape, data1_in.shape)
        testing.assert_array_equal(data1_out, data1_in)

        data21 = droputils.load_numpy(o3)
        data22 = droputils.load_numpy(o4)
        data2_out = concatenate([data21, data22])
        testing.assert_array_equal(data2_out, data2_in)
Example #2
0
    def test_genericNpyScatter(self):
        data_in = random.rand(100, 100)
        b = InMemoryDROP("b", "b")
        droputils.save_numpy(b, data_in)
        s = GenericNpyScatterApp("s", "s", num_of_copies=2)
        s.addInput(b)
        o1 = InMemoryDROP("o1", "o1")
        o2 = InMemoryDROP("o2", "o2")
        for x in o1, o2:
            s.addOutput(x)
        self._test_graph_runs((b, s, o1, o2), b, (o1, o2), timeout=4)

        data1 = droputils.load_numpy(o1)
        data2 = droputils.load_numpy(o2)
        data_out = concatenate([data1, data2])
        self.assertEqual(data_in.all(), data_out.all())
Example #3
0
 def gather_inputs(self):
     """gathers each input drop interpreted as an npy drop"""
     result: Optional[Number] = None
     gather = getattr(np, f"{self.function}")
     for input in self.inputs:
         data = droputils.load_numpy(input)
         # assign instead of gather for the first input
         result = data if result is None else gather(result, data)
     return result
Example #4
0
 def reduce_gather_inputs(self):
     """reduces then gathers each input drop interpreted as an npy drop"""
     result: Optional[Number] = None
     reduce = getattr(np, f"{self.function}")
     gather = getattr(np, f"{self.functions[self.function]}")
     for input in self.inputs:
         data = droputils.load_numpy(input)
         # skip gather for the first input
         result = (reduce(data, axis=self.reduce_axes) if result is None
                   else gather(result, reduce(data, axis=self.reduce_axes)))
     return result
Example #5
0
    def run(self):
        if len(self.inputs) * self.num_of_copies != len(self.outputs):
            raise DaliugeException(
                f"expected {len(self.inputs) * self.num_of_copies} outputs,\
                 got {len(self.outputs)}")
        if len(self.inputs) != len(self.scatter_axes):
            raise DaliugeException(f"expected {len(self.inputs)} axes,\
                 got {len(self.scatter_axes)}, {self.scatter_axes}")

        # split it as many times as we have outputs
        self.num_of_copies = self.num_of_copies

        for in_index in range(len(self.inputs)):
            nObj = droputils.load_numpy(self.inputs[in_index])
            try:
                result = np.array_split(nObj,
                                        self.num_of_copies,
                                        axis=self.scatter_axes[in_index])
            except IndexError as err:
                raise err
            for split_index in range(self.num_of_copies):
                out_index = in_index * self.num_of_copies + split_index
                droputils.save_numpy(self.outputs[out_index],
                                     result[split_index])