def submit_async(): # noqa: E501 """Send an asyncronous operation # noqa: E501 :rtype: InlineResponse202 """ shared_directory = '/mnt/shared' task_write = tasks.CustomTask( 'download-file', 'workflows-extract-download', url='https://github.com/MetaCell/cloud-harness/blob/master/README.md') task_print = tasks.CustomTask('print-file', 'samples-print-file', file_path=shared_directory + '/README.md') op = operations.PipelineOperation('test-custom-connected-op-', (task_write, task_print), shared_directory=shared_directory) submitted = op.execute() if not op.is_error(): return InlineResponse202(task=InlineResponse202Task( href=op.get_operation_update_url(), name=submitted.name)), 202 else: return 'Error submitting operation', 500
def create_operation(workspace, workspace_resource): resources = { 'requests': { 'memory': '256Mi', 'cpu': '250m' }, 'limits': { 'memory': '2048Mi', 'cpu': '2500m' } } workspace_pvc_name = WorkspaceRepository().get_pvc_name(workspace) shared_directory = f'{workspace_pvc_name}:/project_download' download_task = tasks.CustomTask(name='osb-download-file', image_name='workflows-extract-download', url=workspace_resource.location, shared_directory=shared_directory, folder=workspace_resource.folder) op = operations.PipelineOperation(basename=f'osb-download-file-job', tasks=(download_task, ), shared_directory=shared_directory, folder=workspace_resource.folder, shared_volume_size=100, on_exit_notify={ 'queue': 'osb-download-file-queue', 'payload': str(workspace_resource.id) }) workflow = op.execute()
def test_custom_task_workflow(): task = operations.CustomTask('download-file', 'workflows-extract-download', url='https://www.bing.com') op = operations.PipelineOperation('test-custom-op-', (task, )) print('\n', yaml.dump(op.to_workflow())) if execute: print(op.execute())
def test_pipeline_workflow(): def f(): import time time.sleep(2) print('whatever') op = operations.PipelineOperation( 'test-pipeline-op-', (tasks.PythonTask('step1', f), tasks.PythonTask('step2', f))) print('\n', yaml.dump(op.to_workflow())) if execute: print(op.execute())
def test_custom_connected_task_workflow(): shared_directory = '/mnt/shared' task_write = operations.CustomTask( 'download-file', 'workflows-extract-download', shared_directory=shared_directory, url= 'https://raw.githubusercontent.com/openworm/org.geppetto/master/README.md' ) task_print = operations.CustomTask('print-file', 'workflows-print-file', shared_directory=shared_directory, file_path=shared_directory + '/README.md') op = operations.PipelineOperation('test-custom-connected-op-', (task_write, task_print), shared_directory=shared_directory, shared_volume_size=100) # op.execute() print('\n', yaml.dump(op.to_workflow())) if execute: print(op.execute())
def delete_resource(workspace_resource, pvc_name, resource_path: str): logger.info( f"Delete workspace resource with id: {workspace_resource.id}, path: {resource_path}" ) shared_directory = f"{pvc_name}:/project_download" delete_task = tasks.CommandBasedTask( name="osb-delete-resource", command=["rm", "-Rf", "project_download/" + resource_path]) scan_task = create_scan_task(workspace_resource.workspace_id) op = operations.PipelineOperation( basename="osb-delete-resource-job", tasks=( delete_task, scan_task, ), shared_directory=shared_directory, ttl_strategy=ttl_strategy, pod_context=operations.PodExecutionContext( "workspace", workspace_resource.workspace_id, True), ) workflow = op.execute()
def clone_workspaces_content(source_ws_id, dest_ws_id): source_pvc_name = WorkspaceService.get_pvc_name(source_ws_id) dest_pvc_name = WorkspaceService.get_pvc_name(dest_ws_id) source_volume = f"{source_pvc_name}:/source" dest_volume = f"{dest_pvc_name}:/project_download" copy_task = tasks.BashTask( name=f"clone-workspace-data", source="sleep 1 && cp -R /source/* /project_download") scan_task = create_scan_task(dest_ws_id) op = operations.PipelineOperation( basename="osb-clone-workspace-job", tasks=( copy_task, scan_task, ), ttl_strategy=ttl_strategy, pod_context=operations.PodExecutionContext("workspace", dest_ws_id, True), ) op.volumes = (source_volume, dest_volume) workflow = op.execute()