def Run(self, args): client = self.context['dataproc_client'] messages = self.context['dataproc_messages'] job_ref = util.ParseJob(args.id, self.context) request = messages.DataprocProjectsJobsCancelRequest( projectId=job_ref.projectId, jobId=job_ref.jobId, cancelJobRequest=messages.CancelJobRequest()) # TODO(user) Check if Job is still running and fail or handle 401. if not console_io.PromptContinue( message="The job '{0}' will be killed.".format(args.id)): raise exceptions.ToolException('Cancellation aborted by user.') job = client.projects_jobs.Cancel(request) log.status.Print('Job cancellation initiated for [{0}].'.format( job_ref.jobId)) job = util.WaitForJobTermination( job, self.context, message='Waiting for job cancellation', goal_state=messages.JobStatus.StateValueValuesEnum.CANCELLED) log.status.Print('Killed [{0}].'.format(job_ref)) return job
def Run(self, args): client = self.context['dataproc_client'] job_ref = util.ParseJob(args.id, self.context) request = job_ref.Request() job = client.projects_jobs.Get(request) return job
def Run(self, args): """This is what gets called when the user runs this command.""" client = self.context['dataproc_client'] messages = self.context['dataproc_messages'] job_id = util.GetJobId(args.id) job_ref = util.ParseJob(job_id, self.context) files_by_type = {} self.PopulateFilesByType(args, files_by_type) cluster_ref = util.ParseCluster(args.cluster, self.context) request = cluster_ref.Request() try: cluster = client.projects_clusters.Get(request) except apitools_base.HttpError as error: raise exceptions.HttpException(util.FormatHttpError(error)) self.ValidateAndStageFiles(bucket=args.bucket, cluster=cluster, files_by_type=files_by_type) job = messages.Job( reference=messages.JobReference(projectId=job_ref.projectId, jobId=job_ref.jobId), placement=messages.JobPlacement(clusterName=args.cluster)) self.ConfigureJob(job, args, files_by_type) request = messages.DataprocProjectsJobsSubmitRequest( projectId=job.reference.projectId, submitJobRequest=messages.SubmitJobRequest(job=job)) try: job = client.projects_jobs.Submit(request) except apitools_base.HttpError as error: raise exceptions.HttpException(util.FormatHttpError(error)) log.status.Print('Job [{0}] submitted.'.format(job_id)) if not args. async: job = util.WaitForJobTermination( job, self.context, message='Waiting for job completion', goal_state=messages.JobStatus.StateValueValuesEnum.DONE, stream_driver_log=True) log.status.Print('Job [{0}] finished successfully.'.format(job_id)) return job
def Run(self, args): client = self.context['dataproc_client'] messages = self.context['dataproc_messages'] job_ref = util.ParseJob(args.id, self.context) request = messages.DataprocProjectsJobsDeleteRequest( projectId=job_ref.projectId, jobId=job_ref.jobId) if not console_io.PromptContinue( message="The job '{0}' will be deleted.".format(args.id)): raise exceptions.ToolException('Deletion aborted by user.') client.projects_jobs.Delete(request) util.WaitForResourceDeletion(client.projects_jobs.Get, job_ref, message='Waiting for job deletion') log.DeletedResource(job_ref)
def Run(self, args): client = self.context['dataproc_client'] messages = self.context['dataproc_messages'] job_ref = util.ParseJob(args.id, self.context) request = job_ref.Request() job = client.projects_jobs.Get(request) # TODO(user) Check if Job is still running and fail or handle 401. job = util.WaitForJobTermination( job, self.context, message='Waiting for job completion', goal_state=messages.JobStatus.StateValueValuesEnum.DONE, stream_driver_log=True) log.status.Print('Job [{0}] finished successfully.'.format(args.id)) return job