class RunsApi(object): def __init__(self, api_client): self.client = JobsService(api_client) def submit_run(self, json, version=None): return self.client.client.perform_query('POST', '/jobs/runs/submit', data=json, version=version) def list_runs(self, job_id, active_only, completed_only, offset, limit, version=None): return self.client.list_runs(job_id, active_only, completed_only, offset, limit, version=version) def get_run(self, run_id, version=None): return self.client.get_run(run_id, version=version) def cancel_run(self, run_id, version=None): return self.client.cancel_run(run_id, version=version) def get_run_output(self, run_id, version=None): return self.client.get_run_output(run_id, version=version)
# Let's clone our GitHub Repo in Databricks using Repos API repo = repos_service.create_repo(url=git_url, provider=provider, path=repo_path) try: repos_service.update_repo(id=repo['id'], branch=branch) #Let's create a jobs service to be able to start/stop Databricks jobs jobs_service = JobsService(api_client) notebook_task = {'notebook_path': repo_path + notebook_path} #new_cluster = json.loads(new_cluster_config) # Submit integration test job to Databricks REST API res = jobs_service.submit_run(run_name="xxx", existing_cluster_id=existing_cluster_id, notebook_task=notebook_task, ) run_id = res['run_id'] print(run_id) #Wait for the job to complete while True: status = jobs_service.get_run(run_id) print(status) result_state = status["state"].get("result_state", None) if result_state: print(result_state) assert result_state == "SUCCESS" break else: time.sleep(5) finally: repos_service.delete_repo(id=repo['id'])