class RunsApi(object): def __init__(self, api_client): self.client = JobsService(api_client) def submit_run(self, json, version=None): return self.client.client.perform_query('POST', '/jobs/runs/submit', data=json, version=version) def list_runs(self, job_id, active_only, completed_only, offset, limit, version=None): return self.client.list_runs(job_id, active_only, completed_only, offset, limit, version=version) def get_run(self, run_id, version=None): return self.client.get_run(run_id, version=version) def cancel_run(self, run_id, version=None): return self.client.cancel_run(run_id, version=version) def get_run_output(self, run_id, version=None): return self.client.get_run_output(run_id, version=version)
class JobsApi(object): def __init__(self, api_client): self.client = JobsService(api_client) def create_job(self, json, headers=None, version=None): return self.client.client.perform_query('POST', '/jobs/create', data=json, headers=headers, version=version) def list_jobs(self, job_type=None, expand_tasks=None, offset=None, limit=None, headers=None, version=None): resp = self.client.list_jobs(job_type=job_type, expand_tasks=expand_tasks, offset=offset, limit=limit, headers=headers, version=version) if 'jobs' not in resp: resp['jobs'] = [] return resp def delete_job(self, job_id, headers=None, version=None): return self.client.delete_job(job_id, headers=headers, version=version) def get_job(self, job_id, headers=None, version=None): return self.client.get_job(job_id, headers=headers, version=version) def reset_job(self, json, headers=None, version=None): return self.client.client.perform_query('POST', '/jobs/reset', data=json, headers=headers, version=version) def run_now(self, job_id, jar_params, notebook_params, python_params, spark_submit_params, headers=None, version=None): return self.client.run_now(job_id, jar_params, notebook_params, python_params, spark_submit_params, headers=headers, version=version) def _list_jobs_by_name(self, name, headers=None): jobs = self.list_jobs(headers=headers)['jobs'] result = list(filter(lambda job: job['settings']['name'] == name, jobs)) return result
class JobsApi(object): def __init__(self, api_client): self.client = JobsService(api_client) def create_job(self, json): return self.client.client.perform_query('POST', '/jobs/create', data=json) def list_jobs(self): return self.client.list_jobs() def delete_job(self, job_id): return self.client.delete_job(job_id) def get_job(self, job_id): return self.client.get_job(job_id) def reset_job(self, json): return self.client.client.perform_query('POST', '/jobs/reset', data=json) def run_now(self, job_id, jar_params, notebook_params, python_params, spark_submit_params): return self.client.run_now(job_id, jar_params, notebook_params, python_params, spark_submit_params) def _list_jobs_by_name(self, name): jobs = self.list_jobs()['jobs'] result = list(filter(lambda job: job['settings']['name'] == name, jobs)) return result
class JobsApi(object): def __init__(self, api_client): self.client = JobsService(api_client) def create_job(self, json): return self.client.client.perform_query('POST', '/jobs/create', data=json) def list_jobs(self): return self.client.list_jobs() def delete_job(self, job_id): return self.client.delete_job(job_id) def get_job(self, job_id): return self.client.get_job(job_id) def reset_job(self, json): return self.client.client.perform_query('POST', '/jobs/reset', data=json) def run_now(self, job_id, jar_params, notebook_params, python_params, spark_submit_params): return self.client.run_now(job_id, jar_params, notebook_params, python_params, spark_submit_params)
def __init__(self, api_client): self.client = JobsService(api_client)
def apply(api_client, deploy_yml, configmap_yml, version): deploy_config = read_yml(deploy_yml) config_map = read_yml(configmap_yml) job_service = JobsService(api_client) json_obj = _construct_job_payload(config_map, deploy_config, version) job_service.reset_job(json_obj["job_id"], json_obj)
def get_jobs_client(): api_client = _get_api_client() return JobsService(api_client)
#Let's create Repos Service repos_service = ReposService(api_client) # Let's store the path for our new Repo _b = branch.replace('/','_') repo_path = f'{repos_path_prefix}_{_b}_{str(datetime.now().microsecond)}' print('Checking out the following repo: ', repo_path) # Let's clone our GitHub Repo in Databricks using Repos API repo = repos_service.create_repo(url=git_url, provider=provider, path=repo_path) try: repos_service.update_repo(id=repo['id'], branch=branch) #Let's create a jobs service to be able to start/stop Databricks jobs jobs_service = JobsService(api_client) notebook_task = {'notebook_path': repo_path + notebook_path} #new_cluster = json.loads(new_cluster_config) # Submit integration test job to Databricks REST API res = jobs_service.submit_run(run_name="xxx", existing_cluster_id=existing_cluster_id, notebook_task=notebook_task, ) run_id = res['run_id'] print(run_id) #Wait for the job to complete while True: status = jobs_service.get_run(run_id) print(status) result_state = status["state"].get("result_state", None) if result_state: