def main(): """Clean up a job """ parser = argparse.ArgumentParser(description='Clean up a job') parser.add_argument('work', help='Job') parser.add_argument('--cred', help='cred file', type=convert_path, required=True) parser.add_argument('--db', action='store_true', help="Also remove from the jobs db") parser.add_argument('--fs', action='store_true', help="Also remove from the filesystem") parser.add_argument('--data_dir', help="The root of where data is saved.") args = parser.parse_args() cred_params = read_config_stream(args.cred) store = create_store_backend(**cred_params['jobs_db']) cleaned = cleanup(args.work, store, purge_db=args.db, purge_fs=args.fs, data_dir=args.data_dir) print("Results of this request:") print_table(cleaned)
def request_cleanup_http(url: str, jwt_token: str, work: str, purge_db: bool = False, purge_fs: bool = False) -> None: """Request the status over HTTP :param url: the base URL :param jwt_token: The token for last user auth :param work: The pipeline ID :param purge_db: Should we delete the pipeline from the jobs db too? :param purge_fs: Should we remove pipeline file system artifacts? """ response = requests.delete( f'{url}/v1/pipelines/{work}', headers={'Authorization': f'Bearer {jwt_token}'}, params={ 'db': purge_db, 'fs': purge_fs }, ) if response.status_code == 401: raise ValueError("Invalid login") results = response.json() cleaned = [_result2cleanup(r) for r in results['cleanups']] print("Results of this request:") print_table(cleaned)
async def request_events(url: str, resource: str, namespace: str = 'default') -> None: """Get k8s events for some resource. :param url: The location of the server :param resource: The name of the resource you are asking about. :param namespace: The namespace of the resource you are asking about. """ async with websockets.connect(url) as websocket: await websocket.send( json.dumps({ APIField.COMMAND: 'EVENTS', APIField.REQUEST: { 'resource': resource, 'namespace': namespace } })) resp = json.loads(await websocket.recv()) if resp[APIField.STATUS] == APIStatus.ERROR: LOGGER.error(resp) return if resp[APIField.STATUS] == APIStatus.OK: rows = [Event(**r) for r in resp[APIField.RESPONSE]] print_table(rows)
def request_events_http(url: str, resource: str) -> None: """Get events for a resource over HTTP :param url: The base URL :param resource: The resource ID """ results = HttpClient(url).request_events(resource) events = [_result2event(result) for result in results['events']] print_table(events)
def request_nodes_http(url: str) -> None: """Request the status over HTTP :param url: the base URL """ nodes = HttpClient(url).request_cluster_hw_status() rows = [] for node in nodes: for gpu in node['gpus']: rows.append(_gpu2row(gpu, node['host'])) print_table(rows)
def request_nodes_http(url: str) -> None: """Request the status over HTTP :param url: the base URL """ response = requests.get(f'{url}/v1/nodes') nodes = response.json()['nodes'] rows = [] for node in nodes: for gpu in node['gpus']: rows.append(_gpu2row(gpu, node['host'])) print_table(rows)
def request_events_http(url: str, resource: str) -> None: """Get events for a resource over HTTP :param url: The base URL :param resource: The resource ID """ response = requests.get(f'{url}/v1/resources/{resource}/events') if response.status_code == 401: raise ValueError("Invalid login") results = response.json() events = [_result2event(result) for result in results['events']] print_table(events)
def request_cleanup_http(url: str, jwt_token: str, work: str, purge_db: bool = False, purge_fs: bool = False) -> None: """Request the status over HTTP :param url: the base URL :param jwt_token: The token for last user auth :param work: The pipeline ID :param purge_db: Should we delete the pipeline from the jobs db too? :param purge_fs: Should we remove pipeline file system artifacts? """ results = HttpClient(url, jwt_token=jwt_token).delete_pipeline(work, purge_db, purge_fs) cleaned = [_result2cleanup(r) for r in results['cleanups']] print("Results of this request:") print_table(cleaned)
async def request_cleanup(ws: str, work: str, purge_db: bool = False, purge_fs: bool = False): """Request the work is cleaned up by the server.""" async with websockets.connect(ws) as websocket: args = {'work': work, 'purge_db': purge_db, 'purge_fs': purge_fs} await websocket.send(json.dumps({APIField.COMMAND: 'CLEANUP', APIField.REQUEST: args})) results = json.loads(await websocket.recv()) if results[APIField.STATUS] == APIStatus.ERROR: LOGGER.error(results) return if results[APIField.STATUS] == APIStatus.OK: cleaned = results[APIField.RESPONSE] print("Results of this request:") print_table([Cleaned(**c) for c in cleaned])
def show_status(pipe: Pipeline, rows: List[Row], columns: Optional[Set[str]] = None, all_cols: bool = False) -> None: """Show the status for a pipeline. :param pipe: Information about the pipeline itself. :param rows: The rows of the table. :param columns: A set of columns to include in the output. :param all_cols: Include all the columns in the output. """ status = None if pipe.status == PipelineStatus.DONE: status = color(pipe.status, Colors.CYAN) elif pipe.status == PipelineStatus.RUNNING: status = color(pipe.status, Colors.GREEN) elif pipe.status == PipelineStatus.TERMINATED: status = color(pipe.status, Colors.RED) elif pipe.status == PipelineStatus.BUILDING: status = color(pipe.status, Colors.YELLOW) width = max(len(pipe.label), len('Finished')) print(f'{pipe.label:<{width}} --> {status}') if pipe.submitted is not None: start = "Started" print(f'{start:<{width}} --> {pipe.submitted}') if pipe.completed is not None: fin = "Finished" print(f'{fin:<{width}} --> {pipe.completed}') print() if columns: columns.update(DEFAULT_COLUMNS) else: columns = DEFAULT_COLUMNS if rows: if all_cols: columns.update(rows[0]._fields) print_table(rows, columns)