Example #1
0
async def test_flow_monitoring(logger, tmpdir, docker_images, port_generator):
    dump_path = os.path.join(str(tmpdir), 'docker-compose-flow-monitoring.yml')
    port1 = port_generator()
    port2 = port_generator()

    flow = Flow(name='test-flow-monitoring',
                monitoring=True,
                port_monitoring=port1).add(
                    name='segmenter',
                    uses=f'docker://{docker_images[0]}',
                    monitoring=True,
                    port_monitoring=port2,
                )
    flow.to_docker_compose_yaml(dump_path, 'default')
    with DockerComposeFlow(dump_path):
        for port in [port1, port2]:
            resp = req.get(f'http://localhost:{port}/')
            assert resp.status_code == 200
def test_flow_to_docker_compose_sandbox(tmpdir):
    flow = Flow(name='test-flow',
                port=8080).add(uses=f'jinahub+sandbox://DummyHubExecutor')

    dump_path = os.path.join(str(tmpdir), 'test_flow_docker_compose.yml')

    flow.to_docker_compose_yaml(output_path=dump_path, )

    configuration = None
    with open(dump_path) as f:
        configuration = yaml.safe_load(f)

    services = configuration['services']
    gateway_service = services['gateway']
    gateway_args = gateway_service['command']

    deployment_addresses = json.loads(
        gateway_args[gateway_args.index('--deployments-addresses') + 1])
    assert deployment_addresses['executor0'][0].startswith('grpcs://')
Example #3
0
async def test_flow_with_workspace(logger, docker_images, tmpdir):
    flow = Flow(name='k8s_flow-with_workspace', port_expose=9090, protocol='http').add(
        name='test_executor',
        uses=f'docker://{docker_images[0]}',
        workspace='/shared',
    )

    dump_path = os.path.join(str(tmpdir), 'docker-compose.yml')
    flow.to_docker_compose_yaml(dump_path)

    with DockerComposeFlow(dump_path):
        resp = await run_test(
            flow=flow,
            endpoint='/workspace',
        )

    docs = resp[0].docs
    assert len(docs) == 10
    for doc in docs:
        assert doc.tags['workspace'] == '/shared/TestExecutor/0'
def test_flow_to_docker_compose_gpus(tmpdir, count):
    flow = Flow().add(name='encoder', gpus=count)
    dump_path = os.path.join(str(tmpdir), 'test_flow_docker_compose_gpus.yml')

    flow.to_docker_compose_yaml(output_path=dump_path, )

    configuration = None
    with open(dump_path) as f:
        configuration = yaml.safe_load(f)

    services = configuration['services']
    encoder_service = services['encoder']
    assert encoder_service['deploy'] == {
        'resources': {
            'reservations': {
                'devices': [{
                    'driver': 'nvidia',
                    'count': count,
                    'capabilities': ['gpu']
                }]
            }
        }
    }
Example #5
0
    def to_docker_compose_yaml(
        uses: str,
        output_path: Optional[str] = None,
        network_name: Optional[str] = None,
        executor_type: Optional[
            StandaloneExecutorType] = StandaloneExecutorType.EXTERNAL,
        uses_with: Optional[Dict] = None,
        uses_metas: Optional[Dict] = None,
        uses_requests: Optional[Dict] = None,
        **kwargs,
    ):
        """
        Converts the Executor into a yaml file to run with `docker-compose up`
        :param uses: the Executor to use. Has to be containerized
        :param output_path: The output path for the yaml file
        :param network_name: The name of the network that will be used by the deployment name
        :param executor_type: The type of Executor. Can be external or shared. External Executors include the Gateway. Shared Executors don't. Defaults to External
        :param uses_with: dictionary of parameters to overwrite from the default config's with field
        :param uses_metas: dictionary of parameters to overwrite from the default config's metas field
        :param uses_requests: dictionary of parameters to overwrite from the default config's requests field
        :param kwargs: other kwargs accepted by the Flow, full list can be found `here <https://docs.jina.ai/api/jina.orchestrate.flow.base/>`
        """
        from jina import Flow

        f = Flow(**kwargs).add(
            uses=uses,
            uses_with=uses_with,
            uses_metas=uses_metas,
            uses_requests=uses_requests,
        )
        f.to_docker_compose_yaml(
            output_path=output_path,
            network_name=network_name,
            include_gateway=executor_type ==
            BaseExecutor.StandaloneExecutorType.EXTERNAL,
        )
def test_disable_auto_volume(tmpdir):
    flow = Flow(name='test-flow', port=9090).add(uses='docker://image',
                                                 name='executor0',
                                                 disable_auto_volume=True)

    dump_path = os.path.join(str(tmpdir),
                             'test_flow_docker_compose_volume.yml')

    flow.to_docker_compose_yaml(output_path=dump_path)

    configuration = None
    with open(dump_path) as f:
        configuration = yaml.safe_load(f)

    assert set(configuration.keys()) == {'version', 'services', 'networks'}
    assert configuration['version'] == '3.3'
    assert configuration['networks'] == {'jina-network': {'driver': 'bridge'}}
    services = configuration['services']
    assert len(services) == 2  # gateway, executor0-head, executor0
    assert set(services.keys()) == {
        'gateway',
        'executor0',
    }
    assert 'volumes' not in services['executor0']
Example #7
0
def test_raise_exception_invalid_executor():
    from jina.excepts import NoContainerizedError

    with pytest.raises(NoContainerizedError):
        f = Flow().add(uses='A')
        f.to_docker_compose_yaml()