Example #1
0
    def test_flow_no_container(self):
        f = (Flow()
             .add(name='dummyEncoder', yaml_path='mwu-encoder/mwu_encoder.yml'))

        with f:
            f.index(input_fn=random_docs(10))
Example #2
0
def test_load_flow_with_port():
    f = Flow.load_config('yaml/test-flow-port.yml')
    with f:
        assert f.port_expose == 12345
Example #3
0
def test_flow_arguments_priorities():
    f = Flow(port_expose=12345).add(name='test', port_expose=23456)
    assert '23456' in f._pod_nodes['test'].cli_args
    assert '12345' not in f._pod_nodes['test'].cli_args
Example #4
0
def test_load_flow_from_yaml():
    with open(os.path.join(cur_dir, '../yaml/test-flow.yml')) as fp:
        a = Flow.load_config(fp)
        with open(os.path.join(cur_dir, '../yaml/swarm-out.yml'), 'w') as fp, a:
            a.to_swarm_yaml(fp)
        rm_files([os.path.join(cur_dir, '../yaml/swarm-out.yml')])
Example #5
0
def test_flow_no_container():
    f = (Flow()
         .add(name='dummyEncoder', uses=os.path.join(cur_dir, '../mwu-encoder/mwu_encoder.yml')))

    with f:
        f.index(input_fn=random_docs(10))
Example #6
0
def search():
    f = (Flow(rest_api=True, port_expose=int(os.environ['JINA_PORT'])).add(
        yaml_path='- !RandomPopRanker {}').add(yaml_path='indexer.yml'))
    with f:
        f.block()
Example #7
0
def test_pass_arbitrary_kwargs_from_yaml():
    f = Flow.load_config(os.path.join(cur_dir, 'flow.yml'))
    assert f._pod_nodes['pod1'].args.docker_kwargs == {
        'hello': 0,
        'environment': ['VAR1=BAR', 'VAR2=FOO'],
    }
Example #8
0
    def test_flow_with_jump(self):
        f = (Flow().add(name='r1', uses='_forward').add(
            name='r2',
            uses='_forward').add(name='r3', uses='_forward', needs='r1').add(
                name='r4', uses='_forward',
                needs='r2').add(name='r5', uses='_forward', needs='r3').add(
                    name='r6', uses='_forward',
                    needs='r4').add(name='r8', uses='_forward',
                                    needs='r6').add(name='r9',
                                                    uses='_forward',
                                                    needs='r5').add(
                                                        name='r10',
                                                        uses='_merge',
                                                        needs=['r9', 'r8']))

        with f:
            f.dry_run()

        node = f._pod_nodes['gateway']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_CONNECT)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r1']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUB_BIND)

        node = f._pod_nodes['r2']
        self.assertEqual(node.head_args.socket_in, SocketType.SUB_CONNECT)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r3']
        self.assertEqual(node.head_args.socket_in, SocketType.SUB_CONNECT)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r4']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r5']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r6']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r8']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r9']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_CONNECT)

        node = f._pod_nodes['r10']
        self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
        self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_BIND)

        for name, node in f._pod_nodes.items():
            self.assertEqual(node.peas_args['peas'][0], node.head_args)
            self.assertEqual(node.peas_args['peas'][0], node.tail_args)

        f.save_config('tmp.yml')
        Flow.load_config('tmp.yml')

        with Flow.load_config('tmp.yml') as fl:
            fl.dry_run()

        self.add_tmpfile('tmp.yml')
Example #9
0
    def build(self) -> Dict:
        """A wrapper of docker build """
        if self.args.dry_run:
            result = self.dry_run()
        else:
            is_build_success, is_push_success = True, False
            _logs = []
            _excepts = []

            with TimeContext(f'building {colored(self.args.path, "green")}',
                             self.logger) as tc:
                try:
                    self._check_completeness()

                    streamer = self._raw_client.build(
                        decode=True,
                        path=self.args.path,
                        tag=self.canonical_name,
                        pull=self.args.pull,
                        dockerfile=self.dockerfile_path_revised,
                        rm=True)

                    for chunk in streamer:
                        if 'stream' in chunk:
                            for line in chunk['stream'].splitlines():
                                if is_error_message(line):
                                    self.logger.critical(line)
                                    _excepts.append(line)
                                elif 'warning' in line.lower():
                                    self.logger.warning(line)
                                else:
                                    self.logger.info(line)
                                _logs.append(line)
                except Exception as ex:
                    # if pytest fails it should end up here as well
                    is_build_success = False
                    _excepts.append(str(ex))

            if is_build_success:
                # compile it again, but this time don't show the log
                image, log = self._client.images.build(
                    path=self.args.path,
                    tag=self.canonical_name,
                    pull=self.args.pull,
                    dockerfile=self.dockerfile_path_revised,
                    rm=True)

                # success

                _details = {
                    'inspect': self._raw_client.inspect_image(image.tags[0]),
                    'tag': image.tags[0],
                    'hash': image.short_id,
                    'size': get_readable_size(image.attrs['Size']),
                }

                self.logger.success(
                    '🎉 built {tag} ({hash}) uncompressed size: {size}'.
                    format_map(_details))

            else:
                self.logger.error(
                    f'can not build the image, please double check the log')
                _details = {}

            if is_build_success:
                if self.args.test_uses:
                    try:
                        from jina.flow import Flow
                        with Flow().add(uses=image.tags[0]):
                            pass
                    except PeaFailToStart:
                        self.logger.error(f'can not use it in the Flow')
                        is_build_success = False

                if self.args.push:
                    try:
                        self.push(image.tags[0], self.readme_path)
                        is_push_success = True
                    except Exception:
                        self.logger.error(f'can not push to the registry')

            if self.args.prune_images:
                self.logger.info('deleting unused images')
                self._raw_client.prune_images()

            result = {
                'name': getattr(self, 'canonical_name', ''),
                'path': self.args.path,
                'details': _details,
                'last_build_time': get_now_timestamp(),
                'build_duration': tc.duration,
                'is_build_success': is_build_success,
                'is_push_success': is_push_success,
                'build_logs': _logs,
                'exception': _excepts
            }
        if not result['is_build_success'] and self.args.raise_error:
            # remove the very verbose build log when throw error
            result.pop('build_logs')
            raise RuntimeError(result)
        else:
            return result
Example #10
0
 def test_any_file(self):
     f = Flow().add(yaml_path='!File2DataURICrafter\nwith: {base64: true}')
     with f:
         f.index(input_fn=input_fn2, output_fn=print)
Example #11
0
    def test_py_client(self):
        f = (Flow().add(name='r1', uses='_forward').add(
            name='r2',
            uses='_forward').add(name='r3', uses='_forward', needs='r1').add(
                name='r4', uses='_forward',
                needs='r2').add(name='r5', uses='_forward', needs='r3').add(
                    name='r6', uses='_forward',
                    needs='r4').add(name='r8', uses='_forward',
                                    needs='r6').add(name='r9',
                                                    uses='_forward',
                                                    needs='r5').add(
                                                        name='r10',
                                                        uses='_merge',
                                                        needs=['r9', 'r8']))

        with f:
            f.dry_run()
            from jina.clients import py_client
            py_client(port_expose=f.port_expose,
                      host=f.host).dry_run(as_request='index')

        with f:
            node = f._pod_nodes['gateway']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_CONNECT)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r1']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out, SocketType.PUB_BIND)

            node = f._pod_nodes['r2']
            self.assertEqual(node.head_args.socket_in, SocketType.SUB_CONNECT)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r3']
            self.assertEqual(node.head_args.socket_in, SocketType.SUB_CONNECT)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r4']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r5']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r6']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r8']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r9']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out,
                             SocketType.PUSH_CONNECT)

            node = f._pod_nodes['r10']
            self.assertEqual(node.head_args.socket_in, SocketType.PULL_BIND)
            self.assertEqual(node.tail_args.socket_out, SocketType.PUSH_BIND)

            for name, node in f._pod_nodes.items():
                self.assertEqual(node.peas_args['peas'][0], node.head_args)
                self.assertEqual(node.peas_args['peas'][0], node.tail_args)
Example #12
0
def test_crud_advanced_example(tmpdir, config, mocker, monkeypatch):
    '''
    This test indexes documents into an example flow and updates one document.
    The update is implemented as delete & index.
    '''

    # generate documents to index
    index_data = list(get_docs_to_index([
        '0,1,2,3,4,5,6,7,8,9',
        'a ijk,b ijk,c jk',
        'w mno,x no,y op,z i',
    ]))

    response_docs = []

    def on_index_done(resp):
        response_docs.extend(resp.docs)

    # insert documents into the indexers
    # response_docs is used to store the chunks generated by the segmenter via on_index_done
    # at the moment the deletion of chunks via document_id is not possible
    # therefore, the chunks are needed later on when when deleting the documents
    with Flow.load_config('flow-index.yml') as index_flow:
        index_flow.index(
            index_data,
            on_done=on_index_done
        )

    validate_index(
        tmpdir,
        validation_data=[
            ('docIndexer.bin', 3),
            ('chunkidx.bin', 17),
            ('vecidx.bin', 17)
        ]
    )

    # pick document 0 to be deleted
    delete_data = list(get_docs_to_delete({
        0: response_docs[0].chunks
    }))

    # delete the docs and all its chunks
    # 'a ijk,b ijk,c jk' is deleted?
    delete_idx = []

    for d in delete_data:
        delete_idx.append(d.id)
        for c in d.chunks:
            delete_idx.append(c.id)
    # assert ids not overlapping
    assert len(delete_idx) == len(set(delete_idx))

    # run flow for deletion
    with Flow.load_config('flow-index.yml') as delete_flow:
        delete_flow.delete(delete_idx)
    print('kvsearch vectorsearch after delete')

    validate_index(
        tmpdir,
        validation_data=[
            ('docIndexer.bin', 2),
            ('chunkidx.bin', 7),
            ('vecidx.bin', 7)
        ]
    )

    # generate a new document 0 as a replacement for the deleted one
    updated_data = get_docs_to_index([
        '1 ijk,2 jk,3 k',
    ])

    # insert the updated document
    with Flow.load_config('flow-index.yml') as index_flow:
        index_flow.index(updated_data)

    validate_index(
        tmpdir,
        validation_data=[
            ('docIndexer.bin', 3),
            ('chunkidx.bin', 10),
            ('vecidx.bin', 10)
        ]
    )

    def validate_granularity_1(resp):
        assert len(resp.docs) == 3
        for doc in resp.docs:
            assert doc.granularity == 0
            assert len(doc.matches) == 3
            assert doc.matches[0].granularity == 0

        assert resp.docs[0].text == '2 jk'
        assert (
                resp.docs[0].matches[0].text
                == '1 ijk,2 jk,3 k'
        )

        assert resp.docs[1].text == 'i'
        assert (
                resp.docs[1].matches[0].text
                == 'w mno,x no,y op,z i'
        )

        assert resp.docs[2].text == 'm'
        assert (
                resp.docs[2].matches[0].text
                == 'w mno,x no,y op,z i'
        )

    search_data = [
        '2 jk',
        'i',
        'm',
    ]

    mock = mocker.Mock()
    with Flow.load_config('flow-query.yml') as search_flow:
        search_flow.search(
            inputs=search_data,
            on_done=mock,
        )

    mock.assert_called_once()
    validate_callback(mock, validate_granularity_1)
Example #13
0
def test_flow_with_sse_no_deadlock_one_pod():
    f = Flow(logserver=True). \
        add(uses='BaseExecutor', parallel=1, name='crafter')
    with f:
        assert hasattr(f, '_sse_logger')
        pass
Example #14
0
 def test_flow_log_server(self):
     f = Flow.load_config('yaml/test_log_server.yml')
     with f:
         self.assertTrue(hasattr(JINA_GLOBAL.logserver, 'ready'))
         a = requests.get(JINA_GLOBAL.logserver.ready, timeout=5)
         self.assertEqual(a.status_code, 200)
Example #15
0
def test_flow_default_argument_passing():
    f = Flow(port_expose=12345).add(name='test')
    assert f._pod_nodes["test"].cli_args[-1] == '12345'
Example #16
0
def test_evaluation(tmpdir):
    os.environ['JINA_TEST_RANKING_EVALUATION'] = str(tmpdir)

    def index_documents():
        """Index Documents:
            doc: tag__id = 0
                 tag__dummy_score = 0
                 embedding = 0
            doc: tag__id = 1
                 tag__dummy_score = -1
                 embedding = 1
            doc: tag__id = 2
                 tag__dummy_score = -2
                 embedding = 2
        """
        with Document() as doc0:
            doc0.tags['id'] = '0'
            doc0.tags['dummy_score'] = 0
            doc0.embedding = np.array([0])

        with Document() as doc1:
            doc1.tags['id'] = '1'
            doc1.tags['dummy_score'] = -1
            doc1.embedding = np.array([1])

        with Document() as doc2:
            doc2.tags['id'] = '2'
            doc2.tags['dummy_score'] = -2
            doc2.embedding = np.array([2])

        return [doc0, doc1, doc2]

    with Flow().load_config('flow-index.yml') as index_flow:
        index_flow.index(input_fn=index_documents)

    def validate_evaluation_response(resp):
        assert len(resp.docs) == 2
        for doc in resp.docs:
            assert len(doc.evaluations
                       ) == 8  # 2 evaluation Pods with 4 evaluations each

        doc = resp.docs[0]
        assert len(doc.matches) == 2
        assert doc.evaluations[0].op_name == 'evaluate_match-Precision@1'
        assert doc.evaluations[0].value == 1.0
        assert doc.evaluations[1].op_name == 'evaluate_match-Precision@2'
        assert doc.evaluations[1].value == 0.5
        assert doc.evaluations[2].op_name == 'evaluate_match-Recall@1'
        assert doc.evaluations[2].value == 0.5
        assert doc.evaluations[3].op_name == 'evaluate_match-Recall@2'
        assert doc.evaluations[3].value == 0.5

        assert doc.evaluations[4].op_name == 'evaluate_rank-Precision@1'
        assert doc.evaluations[4].value == 1.0
        assert doc.evaluations[5].op_name == 'evaluate_rank-Precision@2'
        assert doc.evaluations[5].value == 0.5
        assert doc.evaluations[6].op_name == 'evaluate_rank-Recall@1'
        assert doc.evaluations[6].value == 0.5
        assert doc.evaluations[7].op_name == 'evaluate_rank-Recall@2'
        assert doc.evaluations[7].value == 0.5

        doc = resp.docs[1]
        assert doc.evaluations[0].op_name == 'evaluate_match-Precision@1'
        assert doc.evaluations[0].value == 1.0
        assert doc.evaluations[1].op_name == 'evaluate_match-Precision@2'
        assert doc.evaluations[1].value == 1.0
        assert doc.evaluations[2].op_name == 'evaluate_match-Recall@1'
        assert doc.evaluations[2].value == 0.5
        assert doc.evaluations[3].op_name == 'evaluate_match-Recall@2'
        assert doc.evaluations[3].value == 1.0

        assert doc.evaluations[4].op_name == 'evaluate_rank-Precision@1'
        assert doc.evaluations[4].value == 1.0
        assert doc.evaluations[5].op_name == 'evaluate_rank-Precision@2'
        assert doc.evaluations[5].value == 1.0
        assert doc.evaluations[6].op_name == 'evaluate_rank-Recall@1'
        assert doc.evaluations[6].value == 0.5
        assert doc.evaluations[7].op_name == 'evaluate_rank-Recall@2'
        assert doc.evaluations[7].value == 1.0

    def doc_groundtruth_evaluation_pairs():
        with Document() as doc0:
            doc0.embedding = np.array([0])

        with Document() as groundtruth0:
            m1 = Document(id='1' * 16)
            m1.score.value = -1
            match0 = groundtruth0.matches.append(m1)
            match0.tags['id'] = '0'
            m2 = Document(id='2' * 16)
            m2.score.value = -1
            match1 = groundtruth0.matches.append(m2)
            match1.tags['id'] = '2'
        # top_k is set to 2 for VectorSearchDriver
        # expects as matches [0, 2] but given [0, 1]
        # Precision@1 = 100%
        # Precision@2 = 50%
        # Recall@1 = 100%
        # Recall@2 = 50%

        # expects as ranked [0, 2] but given [0, 1]
        # Precision@1 = 100%
        # Precision@2 = 50%
        # Recall@1 = 100%
        # Recall@2 = 50%

        with Document() as doc1:
            doc1.embedding = np.array([2])

        with Document() as groundtruth1:
            m1 = Document(id='1' * 16)
            m1.score.value = -1
            match0 = groundtruth1.matches.append(m1)
            match0.tags['id'] = '1'
            m2 = Document(id='2' * 16)
            m2.score.value = -1
            match1 = groundtruth1.matches.append(m2)
            match1.tags['id'] = '2'
        # expects as matches [1, 2] but given [2, 1]
        # Precision@1 = 100%
        # Precision@2 = 100%
        # Recall@1 = 100%
        # Recall@2 = 100%

        # expects as ranked [1, 2] but given [2, 1]
        # Precision@1 = 100%
        # Precision@2 = 100%
        # Recall@1 = 100%
        # Recall@2 = 100%

        return [(doc0, groundtruth0), (doc1, groundtruth1)]

    with Flow().load_config('flow-evaluate.yml') as evaluate_flow:
        evaluate_flow.search(input_fn=doc_groundtruth_evaluation_pairs,
                             output_fn=validate_evaluation_response,
                             callback_on='body',
                             top_k=2)

    del os.environ['JINA_TEST_RANKING_EVALUATION']
Example #17
0
def index():
    f = Flow().add(yaml_path='indexer.yml')
    with f:
        f.index_files(sys.argv[2])
Example #18
0
def search():
    f = Flow.load_config('flows/query.yml')

    with f:
        f.block()
Example #19
0
def test_query():
    f = Flow.load_config('../yaml/examples/faces/flow-query.yml')
    with f:
        node = f._pod_nodes['gateway']
        assert node.head_args.socket_in == SocketType.PULL_CONNECT
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['loader']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.ROUTER_BIND
        for arg in node.peas_args['peas']:
            assert arg.socket_in == SocketType.DEALER_CONNECT
            assert arg.socket_out == SocketType.PUSH_CONNECT
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['flipper']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.ROUTER_BIND
        for arg in node.peas_args['peas']:
            assert arg.socket_in == SocketType.DEALER_CONNECT
            assert arg.socket_out == SocketType.PUSH_CONNECT
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['normalizer']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.ROUTER_BIND
        for arg in node.peas_args['peas']:
            assert arg.socket_in == SocketType.DEALER_CONNECT
            assert arg.socket_out == SocketType.PUSH_CONNECT
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['encoder']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.ROUTER_BIND
        for arg in node.peas_args['peas']:
            assert arg.socket_in == SocketType.DEALER_CONNECT
            assert arg.socket_out == SocketType.PUSH_CONNECT
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['chunk_indexer']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.PUSH_CONNECT
        assert node.peas_args['peas'][0].socket_in == node.head_args.socket_in
        assert node.peas_args['peas'][
            0].socket_out == node.head_args.socket_out
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['ranker']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.PUSH_CONNECT
        assert node.peas_args['peas'][0].socket_in == node.head_args.socket_in
        assert node.peas_args['peas'][
            0].socket_out == node.head_args.socket_out
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['doc_indexer']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.head_args.socket_out == SocketType.PUSH_BIND
        assert node.peas_args['peas'][0].socket_in == node.head_args.socket_in
        assert node.peas_args['peas'][
            0].socket_out == node.head_args.socket_out
        assert node.tail_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_BIND
        f.dry_run()
Example #20
0
def dryrun():
    f = Flow.load_config('flows/query.yml')

    with f:
        pass
Example #21
0
def test_flow_with_one_container_pod(docker_image_built):
    f = Flow().add(name='dummyEncoder1', uses=f'docker://{img_name}')

    with f:
        f.index(inputs=random_docs(10))
Example #22
0
def test_simple_use_relative_import():
    from .dummyhub import DummyHubExecutor
    DummyHubExecutor()

    with Flow().add(uses='DummyHubExecutor'):
        pass
Example #23
0
def test_dryrun():
    f = (Flow()
         .add(name='dummyEncoder', uses=os.path.join(cur_dir, '../mwu-encoder/mwu_encoder.yml')))

    with f:
        f.dry_run()
Example #24
0
def test_use_from_local_dir_flow_level():
    with Flow().add(uses='dummyhub/config.yml'):
        pass
Example #25
0
def test_flow_with_jump():
    f = (Flow().add(name='r1')
         .add(name='r2')
         .add(name='r3', needs='r1')
         .add(name='r4', needs='r2')
         .add(name='r5', needs='r3')
         .add(name='r6', needs='r4')
         .add(name='r8', needs='r6')
         .add(name='r9', needs='r5')
         .add(name='r10', needs=['r9', 'r8']))

    with f:
        f.dry_run()

    node = f._pod_nodes['gateway']
    assert node.head_args.socket_in == SocketType.PULL_CONNECT
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r1']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUB_BIND

    node = f._pod_nodes['r2']
    assert node.head_args.socket_in == SocketType.SUB_CONNECT
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r3']
    assert node.head_args.socket_in == SocketType.SUB_CONNECT
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r4']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r5']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r6']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r8']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r9']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

    node = f._pod_nodes['r10']
    assert node.head_args.socket_in == SocketType.PULL_BIND
    assert node.tail_args.socket_out == SocketType.PUSH_BIND

    for name, node in f._pod_nodes.items():
        assert node.peas_args['peas'][0] == node.head_args
        assert node.peas_args['peas'][0] == node.tail_args

    f.save_config('tmp.yml')
    Flow.load_config('tmp.yml')

    with Flow.load_config('tmp.yml') as fl:
        fl.dry_run()

    rm_files(['tmp.yml'])
Example #26
0
def test_py_client():
    f = (Flow().add(name='r1', uses='_pass').add(name='r2', uses='_pass').add(
        name='r3', uses='_pass',
        needs='r1').add(name='r4', uses='_pass', needs='r2').add(
            name='r5', uses='_pass',
            needs='r3').add(name='r6', uses='_pass', needs='r4').add(
                name='r8', uses='_pass',
                needs='r6').add(name='r9', uses='_pass',
                                needs='r5').add(name='r10',
                                                uses='_merge',
                                                needs=['r9', 'r8']))

    with f:
        f.dry_run()
        from jina.clients import py_client
        py_client(port_expose=f.port_expose,
                  host=f.host).dry_run(as_request='index')

    with f:
        node = f._pod_nodes['gateway']
        assert node.head_args.socket_in == SocketType.PULL_CONNECT
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r1']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUB_BIND

        node = f._pod_nodes['r2']
        assert node.head_args.socket_in == SocketType.SUB_CONNECT
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r3']
        assert node.head_args.socket_in == SocketType.SUB_CONNECT
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r4']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r5']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r6']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r8']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r9']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_CONNECT

        node = f._pod_nodes['r10']
        assert node.head_args.socket_in == SocketType.PULL_BIND
        assert node.tail_args.socket_out == SocketType.PUSH_BIND

        for name, node in f._pod_nodes.items():
            assert node.peas_args['peas'][0] == node.head_args
            assert node.peas_args['peas'][0] == node.tail_args
Example #27
0
def test_load_flow_from_cli():
    a = set_flow_parser().parse_args(['--uses', 'yaml/test-flow-port.yml'])
    f = Flow.load_config(a.uses)
    with f:
        assert f.port_expose == 12345
Example #28
0
def test_flow_arguments_priorities():
    f = Flow(port_expose=12345).add(name='test', port_expose=23456)
    assert f._pod_nodes["test"].cli_args[-1] == '23456'
Example #29
0
def test_flow_default_argument_passing():
    f = Flow(port_expose=12345).add(name='test')
    assert '12345' in f._pod_nodes['test'].cli_args
Example #30
0
    def test_dryrun(self):
        f = (Flow()
             .add(name='dummyEncoder', yaml_path='mwu-encoder/mwu_encoder.yml'))

        with f:
            f.dry_run()