def neptune_ml_endpoint(args: argparse.Namespace, client: Client, output: widgets.Output, params): if args.which_sub == 'create': if params is None or params == '' or params == {}: params = {"id": args.job_id, 'instanceType': args.instance_type} create_endpoint_res = client.endpoints_create(args.model_job_id, **params) create_endpoint_res.raise_for_status() create_endpoint_job = create_endpoint_res.json() if args.wait: return wait_for_endpoint(create_endpoint_job['id'], client, output, args.wait_interval, args.wait_timeout) else: return create_endpoint_job elif args.which_sub == 'status': if args.wait: return wait_for_endpoint(args.job_id, client, output, args.wait_interval, args.wait_timeout) else: endpoint_status = client.endpoints_status(args.job_id) endpoint_status.raise_for_status() return endpoint_status.json() else: return f'Sub parser "{args.which} {args.which_sub}" was not recognized'
def neptune_ml_training(args: argparse.Namespace, client: Client, output: widgets.Output, params): if args.which_sub == 'start': if params is None or params == '' or params == {}: params = { "id": args.job_id, "dataProcessingJobId": args.data_processing_id, "trainingInstanceType": args.instance_type, } start_training_res = client.modeltraining_start( args.job_id, args.s3_output_uri, **params) start_training_res.raise_for_status() training_job = start_training_res.json() if args.wait: return wait_for_training(training_job['id'], client, output, args.wait_interval, args.wait_timeout) else: return training_job elif args.which_sub == 'status': if args.wait: return wait_for_training(args.job_id, client, output, args.wait_interval, args.wait_timeout) else: training_status_res = client.modeltraining_job_status(args.job_id) training_status_res.raise_for_status() return training_status_res.json() else: return f'Sub parser "{args.which} {args.which_sub}" was not recognized'
def modeltransform_wait(job_id: str, client: Client, output: widgets.Output, wait_interval: int = DEFAULT_WAIT_INTERVAL, wait_timeout: int = DEFAULT_WAIT_TIMEOUT): job_id_output = widgets.Output() update_status_output = widgets.Output() with output: display(job_id_output, update_status_output) with job_id_output: print(f'Wait called on endpoint creation job {job_id}') with update_status_output: beginning_time = datetime.datetime.utcnow() while datetime.datetime.utcnow() - beginning_time < ( datetime.timedelta(seconds=wait_timeout)): update_status_output.clear_output() status_res = client.modeltransform_status(job_id) status_res.raise_for_status() status = status_res.json() if status['status'] in ['Completed', 'Failed', 'Stopped']: print('modeltransform is finished') return status else: print(f'Status is {status["status"]}') print(f'Waiting for {wait_interval} before checking again...') time.sleep(wait_interval)
def wait_for_export(client: Client, export_url: str, job_id: str, output: widgets.Output, export_ssl: bool = True, wait_interval: int = DEFAULT_WAIT_INTERVAL, wait_timeout: int = DEFAULT_WAIT_TIMEOUT): job_id_output = widgets.Output() update_widget_output = widgets.Output() with output: display(job_id_output, update_widget_output) with job_id_output: print(f'Wait called on export job {job_id}') with update_widget_output: beginning_time = datetime.datetime.utcnow() while datetime.datetime.utcnow() - beginning_time < ( datetime.timedelta(seconds=wait_timeout)): update_widget_output.clear_output() print('Checking for latest status...') status_res = client.export_status(export_url, job_id, export_ssl) status_res.raise_for_status() export_status = status_res.json() if export_status['status'] in ['succeeded', 'failed']: print('Export is finished') return export_status else: print(f'Status is {export_status["status"]}') print(f'Waiting for {wait_interval} before checking again...') time.sleep(wait_interval)
def neptune_ml_export(args: argparse.Namespace, client: Client, output: widgets.Output, cell: str): export_ssl = not args.export_no_ssl if args.which_sub == 'start': if cell == '': return 'Cell body must have json payload or reference notebook variable using syntax ${payload_var}' export_job = neptune_ml_export_start(client, cell, args.export_url, export_ssl) if args.wait: return wait_for_export(client, args.export_url, export_job['jobId'], output, export_ssl, args.wait_interval, args.wait_timeout) else: return export_job elif args.which_sub == 'status': if args.wait: status = wait_for_export(client, args.export_url, args.job_id, output, export_ssl, args.wait_interval, args.wait_timeout) else: status_res = client.export_status(args.export_url, args.job_id, export_ssl) status_res.raise_for_status() status = status_res.json() return status
def modeltransform_status(args: argparse.Namespace, client: Client, output: widgets.Output): if args.wait: return modeltransform_wait(args.job_id, client, output) else: status_res = client.modeltransform_status(args.job_id) status_res.raise_for_status() return status_res.json()
def neptune_ml_export_status(client: Client, export_url: str, job_id: str, export_ssl: bool = True): res = client.export_status(export_url, job_id, export_ssl) res.raise_for_status() job = res.json() return job
def modeltransform_start(client: Client, params): """ Starts a new modeltransform job. If Params is not empty, we will attempt to parse it into JSON and use it as the command payload. Otherwise we will check args for the required parameters: """ data = params if type(params) is dict else json.loads(params) res: Response = client.modeltransform_create(**data) res.raise_for_status() return res.json()
def neptune_ml_export_start(client: Client, params, export_url: str, export_ssl: bool = True): if type(params) is str: params = json.loads(params) export_res = client.export(export_url, params, export_ssl) export_res.raise_for_status() job = export_res.json() return job
def long_running_gremlin_query(c: Client, query: str): res = c.gremlin_query(query) return res
def modeltransform_stop(args: argparse.Namespace, client: Client): stop_res = client.modeltransform_stop(args.job_id) stop_res.raise_for_status() return f'Job cancelled, you can check its status by running the command "%neptune_ml modeltransform status {args.job_id}"'
def modeltransform_list(client: Client): list_res = client.modeltransform_list() list_res.raise_for_status() return list_res.json()
def long_running_sparql_query(c: Client, query: str): res = c.sparql(query) return res
def long_running_gremlin_query(c: Client, query: str): with pytest.raises(GremlinServerError): c.gremlin_query(query) return