async def test_flow_store(workspace): store = FlowStore() flow_id = DaemonID('jflow') flow_model = FlowModel() flow_model.uses = f'flow.yml' await store.add( id=flow_id, workspace_id=workspace, params=flow_model, ports={}, port_expose=56789, ) assert len(store) == 1 assert flow_id in store await store.delete(flow_id) assert flow_id not in store assert not store
def __init__( self, id: Optional[DaemonID] = None, files: List[UploadFile] = File(None) ) -> None: self.id = id if id else DaemonID('jworkspace') self.files = files from daemon.tasks import __task_queue__ if self.id not in store: # when id doesn't exist in store, create it. store.add(id=self.id, value=RemoteWorkspaceState.PENDING) __task_queue__.put((self.id, self.files)) if self.id in store and store[self.id].state == RemoteWorkspaceState.ACTIVE: # when id exists in the store and is "active", update it. store.update(id=self.id, value=RemoteWorkspaceState.PENDING) __task_queue__.put((self.id, self.files)) self.item = {self.id: store[self.id]}
def _create_workspace_directly(cur_dir): from daemon.models import DaemonID from daemon.helper import get_workspace_path from daemon.files import DaemonFile from daemon import daemon_logger from daemon.dockerize import Dockerizer from daemon.stores import workspace_store from daemon.models import WorkspaceItem from daemon.models.workspaces import WorkspaceMetadata from daemon.models.workspaces import WorkspaceArguments workspace_id = DaemonID('jworkspace') workdir = get_workspace_path(workspace_id) shutil.copytree(cur_dir, workdir) daemon_file = DaemonFile( workdir=get_workspace_path(workspace_id), logger=daemon_logger ) image_id = Dockerizer.build( workspace_id=workspace_id, daemon_file=daemon_file, logger=daemon_logger ) network_id = Dockerizer.network(workspace_id=workspace_id) from jina.enums import RemoteWorkspaceState workspace_store[workspace_id] = WorkspaceItem( state=RemoteWorkspaceState.ACTIVE, metadata=WorkspaceMetadata( image_id=image_id, image_name=workspace_id.tag, network=network_id, workdir=workdir, ), arguments=WorkspaceArguments( files=os.listdir(cur_dir), jinad={'a': 'b'}, requirements='' ), ) return image_id, network_id, workspace_id, workspace_store
def test_jtype_only(): for jtype in VALID_JTYPES: d = DaemonID(jtype) assert d.jtype == jtype assert uuid.UUID(d.jid)
def test_workspace_path(): uid = DaemonID('jworkspace') assert get_workspace_path(uid) == f'{jinad_args.workspace}/{uid}' assert get_workspace_path('123', '456') == f'{jinad_args.workspace}/123/456'
@pytest.fixture(scope='module', autouse=True) def workspace(): from tests.conftest import _create_workspace_directly, _clean_up_workspace image_id, network_id, workspace_id, workspace_store = _create_workspace_directly( cur_dir) yield workspace_id _clean_up_workspace(image_id, network_id, workspace_id, workspace_store) @pytest.mark.asyncio @pytest.mark.parametrize( 'model, store, id', [ (PeaModel(), PeaStore, DaemonID(f'jpea')), (PodModel(), PodStore, DaemonID(f'jpod')), ], ) async def test_peapod_store_add(model, store, id, workspace): s = store() await s.add(id=id, params=model, workspace_id=workspace, ports={}) assert len(s) == 1 assert id in s await s.delete(id) assert not s @pytest.mark.asyncio @pytest.mark.parametrize('model, store, type', [(PeaModel(), PeaStore, 'pea'), (PodModel(), PodStore, 'pod')])
import pathlib from daemon.models import DaemonID from daemon.models.base import StoreItem from daemon.stores.base import BaseStore from daemon import __root_workspace__ store_items = { DaemonID(f'jflow'): StoreItem(), DaemonID(f'jflow'): StoreItem(), DaemonID(f'jflow'): StoreItem(), } class DummyStore(BaseStore): def __init__(self, mock): super().__init__() self.some_field = 'hello world' self.mock = mock @BaseStore.dump def some_function(self): self.mock() def test_base_store_serialization(mocker): mock = mocker.Mock() pathlib.Path(__root_workspace__).mkdir(parents=True, exist_ok=True) dummy_store = DummyStore(mock) dummy_store.status.items.update(store_items)
from daemon.helper import get_workspace_path from daemon.models import DaemonID from daemon.models.containers import ContainerArguments from daemon.models import ContainerItem from daemon.models.containers import ContainerMetadata from jina import Flow log_content = """ {"host":"ubuntu","process":"32539","type":"INFO","name":"encode1","uptime":"20210124215151","context":"encode1","workspace_path":"/tmp/jinad/32aa7734-fbb8-4e7a-9f76-46221b512648","log_id":"16ef0bd7-e534-42e7-9076-87a3f585933c","message":"starting jina.peapods.runtimes.zmq.zed.ZEDRuntime..."} {"host":"ubuntu","process":"32539","type":"INFO","name":"encode1","uptime":"20210124215151","context":"encode1/ZEDRuntime","workspace_path":"/tmp/jinad/32aa7734-fbb8-4e7a-9f76-46221b512648","log_id":"16ef0bd7-e534-42e7-9076-87a3f585933c","message":"input \u001B[33mtcp://0.0.0.0:45319\u001B[0m (PULL_BIND) output \u001B[33mtcp://0.0.0.0:59229\u001B[0m (PUSH_CONNECT) control over \u001B[33mtcp://0.0.0.0:49571\u001B[0m (PAIR_BIND)"} {"host":"ubuntu","process":"31612","type":"SUCCESS","name":"encode1","uptime":"20210124215151","context":"encode1","workspace_path":"/tmp/jinad/32aa7734-fbb8-4e7a-9f76-46221b512648","log_id":"16ef0bd7-e534-42e7-9076-87a3f585933c","message":"ready and listening"} {"host":"ubuntu","process":"32546","type":"INFO","name":"encode2","uptime":"20210124215151","context":"encode2","workspace_path":"/tmp/jinad/32aa7734-fbb8-4e7a-9f76-46221b512648","log_id":"16ef0bd7-e534-42e7-9076-87a3f585933c","message":"starting jina.peapods.runtimes.zmq.zed.ZEDRuntime..."} """ workspace_id = DaemonID('jworkspace') flow_id = DaemonID('jflow') nonexisting_id = DaemonID('jflow') def _write_to_logfile(content, append=False): with open( get_workspace_path(workspace_id, 'logs', flow_id, 'logging.log'), 'a' if append else 'w+', ) as f: f.writelines(content) def _write_to_workspace_logfile(content, append=False): with open( get_workspace_path(workspace_id, 'logging.log'),