Ejemplo n.º 1
0
def _setup_pipes(task_inputs, inputs, task_outputs, outputs, tempdir):
    """
    Returns a 2 tuple of input and output pipe mappings. The first element is
    a dict mapping input file descriptors to the corresponding stream adapters,
    the second is a dict mapping output file descriptors to the corresponding
    stream adapters. This also handles the special cases of STDIN, STDOUT, and
    STDERR mappings, and in the case of non-streaming standard IO pipes, will
    create default bindings for those as well.
    """
    ipipes = {}
    opipes = {}

    def make_pipe(id, spec, bindings):
        """
        Helper to make a pipe conditionally for valid streaming IO specs. If the
        given spec is not a streaming spec, returns False. If it is, returns the
        path to the pipe file that was created.
        """
        if (spec.get('stream') and id in bindings and
                spec.get('target') == 'filepath'):
            path = spec.get('path', id)
            if path.startswith('/'):
                raise Exception('Streaming filepaths must be relative.')
            path = os.path.join(tempdir, path)
            os.mkfifo(path)
            return path
        return False

    # handle stream inputs
    for id, spec in task_inputs.iteritems():
        pipe = make_pipe(id, spec, inputs)
        if pipe:
            # Don't open from this side, must be opened for reading first!
            ipipes[pipe] = make_stream_fetch_adapter(inputs[id])

    # handle stream outputs
    for id, spec in task_outputs.iteritems():
        pipe = make_pipe(id, spec, outputs)
        if pipe:
            opipes[os.open(pipe, os.O_RDONLY | os.O_NONBLOCK)] = \
                make_stream_push_adapter(outputs[id])

    # special handling for stdin, stdout, and stderr pipes
    if '_stdin' in task_inputs and '_stdin' in inputs:
        if task_inputs['_stdin'].get('stream'):
            ipipes['_stdin'] = make_stream_fetch_adapter(inputs['_stdin'])
        else:
            ipipes['_stdin'] = utils.MemoryFetchAdapter(
                inputs[id], inputs[id]['data'])

    for id in ('_stdout', '_stderr'):
        if id in task_outputs and id in outputs:
            if task_outputs[id].get('stream'):
                opipes[id] = make_stream_push_adapter(outputs[id])
            else:
                opipes[id] = utils.AccumulateDictAdapter(
                    outputs[id], 'script_data')

    return ipipes, opipes
Ejemplo n.º 2
0
def _setup_streams(task_inputs, inputs, task_outputs, outputs, tempdir,
                   job_mgr, progress_pipe):
    """
    Returns a 2 tuple of input and output pipe mappings. The first element is
    a dict mapping input file descriptors to the corresponding stream adapters,
    the second is a dict mapping output file descriptors to the corresponding
    stream adapters. This also handles the special cases of STDIN, STDOUT, and
    STDERR mappings, and in the case of non-streaming standard IO pipes, will
    create default bindings for those as well.
    """
    stream_connectors = []

    def stream_pipe_path(id, spec, bindings):
        """
        Helper to check for a  valid streaming IO specs. If the
        given spec is not a streaming spec, returns None. If it is, returns the
        path.
        """
        if spec.get('stream') and id in bindings and spec.get(
                'target') == 'filepath':
            path = spec.get('path', id)
            if path.startswith('/'):
                raise Exception('Streaming filepaths must be relative.')
            path = os.path.join(tempdir, path)
            return path

        return None

    # handle stream inputs
    for id, spec in task_inputs.iteritems():

        path = stream_pipe_path(id, spec, inputs)
        # We have a streaming input
        if path is not None:
            writer = NamedPipeWriter(NamedPipe(path))
            connector = FDWriteStreamConnector(
                make_stream_fetch_adapter(inputs[id]), writer)
            stream_connectors.append(connector)
            # Don't open from this side, must be opened for reading first!

    # handle stream outputs
    for id, spec in task_outputs.iteritems():
        path = stream_pipe_path(id, spec, outputs)
        if path is not None:
            reader = NamedPipeReader(NamedPipe(path))
            connector = FDReadStreamConnector(
                reader, make_stream_push_adapter(outputs[id]))
            stream_connectors.append(connector)

    # handle special stream output for job progress
    if progress_pipe and job_mgr:
        progress_pipe = ProgressPipe(os.path.join(tempdir, '.girder_progress'))
        stream_connectors.append(progress_pipe.open())

    return stream_connectors
Ejemplo n.º 3
0
    def testInputStreams(self):
        input_spec = {"mode": "http", "method": "GET", "url": "http://mockedhost"}

        @httmock.urlmatch(netloc="^mockedhost$", method="GET")
        def mock_fetch(url, request):
            return "hello\nworld"

        adapters = {_pipepath: make_stream_fetch_adapter(input_spec)}
        cmd = [sys.executable, _iscript, _pipepath]

        try:
            with captureOutput() as stdpipes, httmock.HTTMock(mock_fetch):
                run_process(cmd, input_pipes=adapters)
        except Exception:
            print("Stdout/stderr from exception: ")
            print(stdpipes)
            raise
        self.assertEqual(stdpipes, ["olleh\ndlrow\n", ""])
Ejemplo n.º 4
0
    def testInputStreams(self):
        input_spec = {
            'mode': 'http',
            'method': 'GET',
            'url': 'http://mockedhost'
        }

        @httmock.urlmatch(netloc='^mockedhost$', method='GET')
        def mock_fetch(url, request):
            return 'hello\nworld'

        adapters = {_pipepath: make_stream_fetch_adapter(input_spec)}
        cmd = [sys.executable, _iscript, _pipepath]

        try:
            with captureOutput() as stdpipes, httmock.HTTMock(mock_fetch):
                run_process(cmd, input_pipes=adapters)
        except Exception:
            print('Stdout/stderr from exception: ')
            print(stdpipes)
            raise
        self.assertEqual(stdpipes, ['olleh\ndlrow\n', ''])