def setup_module(): print('setup', get_workspace_path(workspace_id, flow_id)) _create_flow() os.makedirs(get_workspace_path(workspace_id, 'logs', flow_id), exist_ok=True) _write_to_logfile(log_content) _write_to_workspace_logfile(log_content)
def store_files_in_workspace(workspace_id: DaemonID, files: List[UploadFile], logger: "JinaLogger") -> None: """Store the uploaded files in local disk :param workspace_id: workspace id representing the local directory :param files: files uploaded to the workspace endpoint :param logger: JinaLogger to use """ workdir = get_workspace_path(workspace_id) Path(workdir).mkdir(parents=True, exist_ok=True) if not files: logger.warning(f'couldn\'t find any files to upload!') return for f in files: dest = os.path.join(workdir, f.filename) if os.path.isfile(dest): logger.warning( f'file {f.filename} already exists in workspace {workspace_id}, will be replaced' ) with open(dest, 'wb+') as fp: content = f.file.read() fp.write(content) logger.debug(f'saved uploads to {dest}') if is_zipfile(dest): logger.debug(f'unzipping {dest}') with ZipFile(dest, 'r') as f: f.extractall(path=workdir) os.remove(dest)
def _create_workspace_directly(cur_dir): from daemon.models import DaemonID from daemon.helper import get_workspace_path from daemon.files import DaemonFile from daemon import daemon_logger from daemon.dockerize import Dockerizer from daemon.stores import workspace_store from daemon.models import WorkspaceItem from daemon.models.workspaces import WorkspaceMetadata from daemon.models.workspaces import WorkspaceArguments workspace_id = DaemonID('jworkspace') workdir = get_workspace_path(workspace_id) shutil.copytree(cur_dir, workdir) daemon_file = DaemonFile( workdir=get_workspace_path(workspace_id), logger=daemon_logger ) image_id = Dockerizer.build( workspace_id=workspace_id, daemon_file=daemon_file, logger=daemon_logger ) network_id = Dockerizer.network(workspace_id=workspace_id) from jina.enums import RemoteWorkspaceState workspace_store[workspace_id] = WorkspaceItem( state=RemoteWorkspaceState.ACTIVE, metadata=WorkspaceMetadata( image_id=image_id, image_name=workspace_id.tag, network=network_id, workdir=workdir, ), arguments=WorkspaceArguments( files=os.listdir(cur_dir), jinad={'a': 'b'}, requirements='' ), ) return image_id, network_id, workspace_id, workspace_store
def localpath(self) -> Path: """Validates local filepath in workspace from filename. Raise 404 if filepath doesn't exist in workspace. :return: filepath for flow yaml """ try: return FilePath.validate( Path(get_workspace_path(self.workspace_id, self.filename)) ) except PathNotAFileError as e: raise HTTPException( status_code=HTTPStatus.NOT_FOUND, detail=f'File `{self.filename}` not found in workspace `{self.workspace_id}`', )
def test_upload(fastapi_client): response = fastapi_client.post('/workspaces', files=[('files', open(str(cur_dir / d), 'rb')) for d in deps]) assert response.status_code == 201 response_json = response.json() workspace_id = next(iter(response_json)) item = WorkspaceItem(**response_json[workspace_id]) assert item.state == RemoteWorkspaceState.PENDING assert item.metadata is None assert item.arguments is None for d in deps: os.path.exists(get_workspace_path(workspace_id, d))
def load_and_dump(self) -> None: """ every Flow created inside JinaD lives inside a container. It is important to know the list of ports to be published with localhost before actually starting the container. 1. `load` the flow yaml here. - yaml is stored in `workspace` directory, so we'll `cd` there - yaml might include env vars. so we'll set them (passed via query params) 2. `build` the Flow so that `gateway` gets added. - get the list of ports to be published (port_expose, port_in, port_out, port_ctrl) - ports need to be published for gateway & executors that are not `ContainerRuntime` or `JinadRuntime` based - Deployment level args for ports are enough, as we don't need to publish Pod ports 3. `save` the Flow config. - saves port configs of all `executors` into the new yaml. - set `JINA_FULL_CLI` envvar, so that `gateway` args are also added. - save the config into a new file. 4. pass this new file as filename to `partial-daemon` to start the Flow """ with ExitStack() as stack: # set env vars stack.enter_context(change_env('JINA_FULL_CLI', 'true')) # change directory to `workspace` stack.enter_context(change_cwd(get_workspace_path(self.workspace_id))) # load and build f: Flow = Flow.load_config( str(self.localpath()), substitute=True, context=self.envs ).build() # get & set the ports mapping, set `runs_in_docker` port_mapping = [] port_mapping.append( PortMapping( deployment_name='gateway', pod_name='gateway', ports=Ports(port_expose=f.port_expose), ) ) for deployment_name, deployment in f._deployment_nodes.items(): runtime_cls = update_runtime_cls(deployment.args, copy=True).runtime_cls if runtime_cls in ['WorkerRuntime'] + list( GATEWAY_RUNTIME_DICT.values() ): current_ports = Ports() for port_name in Ports.__fields__: setattr( current_ports, port_name, getattr(deployment.args, port_name, None), ) port_mapping.append( PortMapping( deployment_name=deployment_name, pod_name='', ports=current_ports, ) ) elif ( runtime_cls in ['ContainerRuntime'] and hasattr(deployment.args, 'replicas') and deployment.args.replicas > 1 ): for pod_args in [deployment.pod_args['head']]: self._update_port_mapping( pod_args, deployment_name, port_mapping ) self.ports = port_mapping # save to a new file & set it for partial-daemon f.save_config(filename=self.newfile) self.params.uses = self.newname
def newfile(self) -> str: """Return newfile path fetched from newname :return: return filepath to save flow config in """ return get_workspace_path(self.workspace_id, self.newname)
def test_workspace_path(): uid = DaemonID('jworkspace') assert get_workspace_path(uid) == f'{jinad_args.workspace}/{uid}' assert get_workspace_path('123', '456') == f'{jinad_args.workspace}/123/456'
def workdir(self) -> str: """sets workdir for current worker thread :return: local directory where files would get stored """ return get_workspace_path(self.id)
def _write_to_workspace_logfile(content, append=False): with open( get_workspace_path(workspace_id, 'logging.log'), 'a' if append else 'w+', ) as f: f.writelines(content)