def __init__(self, user): self.user = user self.resource_manager_api_pool = resource_manager_api.get_resource_manager_pool( ) self.resource_manager_api = self.resource_manager_api_pool.get( user.username) self.mapreduce_api = mapreduce_api.get_mapreduce_api(user.username) self.history_server_api = history_server_api.get_history_server_api( user.username) self.spark_history_server_api = spark_history_server_api.get_history_server_api( ) # Spark HS does not support setuser
def get_job(request, job_id): try: job = get_api(request.user, request.jt).get_job(jobid=job_id) except ApplicationNotRunning, e: if e.job.get('state', '').lower() == 'accepted': rm_pool = resource_manager_api.get_resource_manager_pool() rm_api = rm_pool.get(request.user.username) job = Application(e.job, rm_api) rm_pool.put(rm_api) else: raise e # Job has not yet been accepted by RM
def get_job(request, job_id): try: job = get_api(request.user, request.jt).get_job(jobid=job_id) except ApplicationNotRunning, e: if e.job.get('state', '').lower() == 'accepted' and 'kill' in request.path: rm_pool = resource_manager_api.get_resource_manager_pool() rm_api = rm_pool.get(request.user.username) job = Application(e.job, rm_api) rm_pool.put(rm_api) else: # reverse() seems broken, using request.path but beware, it discards GET and POST info return job_not_assigned(request, job_id, request.path)
def kill(self, job_id): app_id = job_id.replace('job', 'application') pool = get_resource_manager_pool() rmobj = pool.get(self.username) rmobj.kill(app_id) # We need to call the RM pool.put(rmobj)