Esempio n. 1
0
async def build_fd_pipes(pipe_args: str,
                         node: BaseNode) -> Tuple[Pipes, Pipes]:
    try:
        pipe_json = json.loads(json.loads(pipe_args))
        # if debugging, pycharm escapes the outer JSON
        # pipe_json = json.loads(pipe_args.encode('utf-8').decode('unicode_escape'))
        dest_args = pipe_json['outputs']
        src_args = pipe_json['inputs']
    except (KeyError, json.JSONDecodeError):
        raise errors.ConfigurationError("invalid pipes argument: [%s]" %
                                        pipe_args)
    pipes_out = {}
    pipes_in = {}
    for name, arg in dest_args.items():
        wf = pipes.writer_factory(arg['fd'])
        dest_stream = None
        if arg['id'] is not None:  # used in testing when no API is available
            dest_stream = await node.data_stream_get(arg['id'])
        pipes_out[name] = pipes.OutputPipe(stream=dest_stream,
                                           layout=arg['layout'],
                                           writer_factory=wf)

    for name, arg in src_args.items():
        rf = pipes.reader_factory(arg['fd'])
        src_stream = None
        if arg['id'] is not None:  # used in testing when no API is available
            src_stream = await node.data_stream_get(arg['id'])
        pipes_in[name] = pipes.InputPipe(stream=src_stream,
                                         layout=arg['layout'],
                                         reader_factory=rf)

    return pipes_in, pipes_out
Esempio n. 2
0
    async def _spawn_outputs(self) -> asyncio.Task:
        tasks: List[asyncio.Task] = []
        # configure output pipes          [module]==>[worker]
        for (name, stream) in self.module.outputs.items():
            (r, w) = os.pipe()
            rf = pipes.reader_factory(r)
            os.set_inheritable(w, True)
            pipe = pipes.InputPipe(name=name, stream=stream, reader_factory=rf)
            self.output_connections.append(
                DataConnection(name, w, stream, pipe))
            t = asyncio.create_task(
                self._output_handler(pipe, self.subscribers[stream]))
            t.set_name("worker [%s]: output [%s]" %
                       (self.module.name, stream.name))
            tasks.append(t)

        return asyncio.gather(*tasks)
Esempio n. 3
0
    async def _build_pipes_new(self, interval, input_streams, output_streams,
                               pipe_args) -> Tuple[Pipes, Pipes]:
        input_pipes = {}
        output_pipes = {}
        # use network sockets for connection to inputs and outputs
        if pipe_args == 'unset':
            for (name, stream) in input_streams.items():
                if interval is None:  # subscribe to live data
                    input_pipes[name] = await self.node.data_subscribe(stream)
                else:
                    input_pipes[name] = await self.node.data_read(
                        stream, interval[0], interval[1])
            for (name, stream) in output_streams.items():
                if interval is None:
                    output_pipes[name] = await self.node.data_write(stream)
                else:
                    output_pipes[name] = await self.node.data_write(
                        stream, interval[0], interval[1])
        # use file descriptors provided by joule for connection to inputs and outputs
        else:
            try:
                pipe_json = json.loads(json.loads(pipe_args))
                # if debugging, pycharm escapes the outer JSON
                # pipe_json = json.loads(pipe_args.encode('utf-8').decode('unicode_escape'))
                output_args = pipe_json['outputs']
                input_args = pipe_json['inputs']
            except (KeyError, json.JSONDecodeError):
                raise ConfigurationError(
                    f"invalid pipes argument: {pipe_args}")

            for name, arg in output_args.items():
                wf = pipes.writer_factory(arg['fd'])
                output_pipes[name] = pipes.OutputPipe(
                    stream=output_streams[name],
                    layout=arg['layout'],
                    writer_factory=wf)
            for name, arg in input_args.items():
                rf = pipes.reader_factory(arg['fd'])
                input_pipes[name] = pipes.InputPipe(stream=input_streams[name],
                                                    layout=arg['layout'],
                                                    reader_factory=rf)
        # keep track of the pipes so they can be closed
        self.pipes = list(input_pipes.values()) + list(output_pipes.values())
        return input_pipes, output_pipes
Esempio n. 4
0
    def test_writes_to_pipes(self):
        module = SimpleReader()
        (r, w) = os.pipe()
        loop = asyncio.get_event_loop()
        rf = pipes.reader_factory(r)
        pipe = pipes.InputPipe(name="output",
                               stream=self.stream,
                               reader_factory=rf)
        pipe_arg = json.dumps(
            json.dumps({
                "outputs": {
                    'output': {
                        'fd': w,
                        'id': None,
                        'layout': self.stream.layout
                    }
                },
                "inputs": {}
            }))
        data = helpers.create_data(self.stream.layout)
        args = argparse.Namespace(pipes=pipe_arg,
                                  socket="unset",
                                  url='http://localhost:8080',
                                  node="",
                                  api_socket="",
                                  mock_data=data)
        # run the reader module
        loop = asyncio.new_event_loop()
        loop.set_debug(True)
        asyncio.set_event_loop(loop)

        module.start(args)
        asyncio.set_event_loop(self.loop)
        # check the output
        received_data = self.loop.run_until_complete(pipe.read())
        np.testing.assert_array_equal(data, received_data)
        self.loop.run_until_complete(pipe.close())
        if not loop.is_closed():
            loop.close()
Esempio n. 5
0
 def test_runs_composited_modules(self):
     module = SimpleComposite()
     (r, w) = os.pipe()
     module_loop = asyncio.new_event_loop()
     rf = pipes.reader_factory(r)
     pipe = pipes.InputPipe(name="output", stream=self.stream, reader_factory=rf)
     pipe_arg = json.dumps(json.dumps({"outputs": {'output': {'fd': w, 'id': None, 'layout': self.stream.layout}},
                                       "inputs": {}}))
     data = helpers.create_data(self.stream.layout)
     args = argparse.Namespace(pipes=pipe_arg, socket="unset",
                               node="", api_socket="",
                               url='http://localhost:8080',
                               mock_data=data)
     # run the composite module
     asyncio.set_event_loop(module_loop)
     module.start(args)
     asyncio.set_event_loop(self.loop)
     # check the output
     received_data = self.loop.run_until_complete(pipe.read())
     np.testing.assert_array_equal(data['timestamp'], received_data['timestamp'])
     np.testing.assert_array_almost_equal(data['data'] * 2, received_data['data'])
     self.loop.run_until_complete(pipe.close())
     self.loop.close()
Esempio n. 6
0
    def test_writes_to_pipes(self):
        module = SimpleFilter()
        (r, w_module) = os.pipe()
        loop = asyncio.get_event_loop()
        rf = pipes.reader_factory(r)
        from_filter = pipes.InputPipe(name="from_filter",
                                      stream=self.output,
                                      reader_factory=rf)
        (r_module, w) = os.pipe()
        loop = asyncio.get_event_loop()
        wf = pipes.writer_factory(w)
        to_filter = pipes.OutputPipe(name="to_filter",
                                     stream=self.input,
                                     writer_factory=wf)

        pipe_arg = json.dumps(
            json.dumps({
                "outputs": {
                    'from_filter': {
                        'fd': w_module,
                        'id': 2,
                        'layout': self.output.layout
                    }
                },
                "inputs": {
                    'to_filter': {
                        'fd': r_module,
                        'id': 3,
                        'layout': self.input.layout
                    }
                }
            }))
        data = helpers.create_data(self.input.layout)
        self.loop.run_until_complete(to_filter.write(data))
        self.loop.run_until_complete(to_filter.close())
        args = argparse.Namespace(pipes=pipe_arg,
                                  socket="unset",
                                  node="",
                                  api_socket="",
                                  live=False,
                                  url='http://localhost:8080')
        # run the reader module
        loop = asyncio.new_event_loop()
        loop.set_debug(True)
        asyncio.set_event_loop(loop)

        class MockNode(BaseNode):
            def __init__(self):
                self.session = mock.Mock()
                self.session.close = asynctest.CoroutineMock()
                pass

            @property
            def loop(self):
                return asyncio.get_event_loop()

        with mock.patch('joule.client.base_module.node') as mock_node_pkg:
            node = MockNode()
            node.data_stream_get = asynctest.CoroutineMock(
                return_value=self.output)
            mock_node_pkg.UnixNode = mock.Mock(return_value=node)
            module.start(args)
            # make sure the API was used to retrieve stream objects
            self.assertEqual(node.data_stream_get.await_count, 2)

        asyncio.set_event_loop(self.loop)
        # check the output
        received_data = self.loop.run_until_complete(from_filter.read())
        np.testing.assert_array_equal(data['timestamp'],
                                      received_data['timestamp'])
        np.testing.assert_array_almost_equal(data['data'] * 2,
                                             received_data['data'])
        self.loop.run_until_complete(from_filter.close())
        if not loop.is_closed():
            loop.close()