示例#1
0
    def get_jobs(self, notebook, snippet, logs):
        jobs = []

        if snippet['type'] == 'hive':
            engine = self._get_hive_execution_engine(notebook, snippet)
            jobs_with_state = parse_out_jobs(logs,
                                             engine=engine,
                                             with_state=True)

            jobs = [{
                'name':
                job.get('job_id', ''),
                'url':
                reverse('jobbrowser.views.single_job',
                        kwargs={'job': job.get('job_id', '')}),
                'started':
                job.get('started', False),
                'finished':
                job.get('finished', False)
            } for job in jobs_with_state]
        elif snippet['type'] == 'impala' and ENABLE_QUERY_BROWSER.get():
            query_id = "%x:%x" % struct.unpack(
                b"QQ", snippet['result']['handle']['guid'])
            progress = min(self.progress(snippet, logs),
                           99) if snippet['status'] != 'available' and snippet[
                               'status'] != 'success' else 100
            jobs = [{
                'name': query_id,
                'url': '/hue/jobbrowser#!id=%s' % query_id,
                'started': True,
                'finished': False,
                'percentJob': progress
            }]

        return jobs
示例#2
0
  def get_jobs(self, notebook, snippet, logs):
    jobs = []

    if snippet['type'] == 'hive':
      engine = self._get_hive_execution_engine(notebook, snippet)
      jobs_with_state = parse_out_jobs(logs, engine=engine, with_state=True)
      queries_with_state = parse_out_queries(logs, engine=engine, with_state=True)

      jobs = [{
        'name': job.get('job_id', ''),
        'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
        'started': job.get('started', False),
        'finished': job.get('finished', False)
      } for job in jobs_with_state]
      if has_hive_query_browser:
        jobs += [{
          'name': job.get('job_id', ''),
          'url': 'api/job/queries-hive/',
          'started': job.get('started', False),
          'finished': job.get('finished', False)
        } for job in queries_with_state]
    elif snippet['type'] == 'impala' and has_query_browser:
      query_id = unpack_guid_base64(snippet['result']['handle']['guid'])
      progress = min(self.progress(notebook, snippet, logs), 99) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
      jobs = [{
        'name': query_id,
        'url': '/hue/jobbrowser#!id=%s' % query_id,
        'started': True,
        'finished': False,
        'percentJob': progress
      }]

    return jobs
示例#3
0
文件: api.py 项目: sandredd/hue-1
def watch_query_refresh_json(request, id):
  query_history = authorized_get_query_history(request, id, must_exist=True)
  db = dbms.get(request.user, query_history.get_query_server_config())

  if not request.POST.get('next'): # We need this as multi query would fail as current query is closed
    handle, state = _get_query_handle_and_state(query_history)
    query_history.save_state(state)

  # Go to next statement if asked to continue or when a statement with no dataset finished.
  try:
    if request.POST.get('next') or (not query_history.is_finished() and query_history.is_success() and not query_history.has_results):
      close_operation(request, id)
      query_history = db.execute_next_statement(query_history, request.POST.get('query-query'))
      handle, state = _get_query_handle_and_state(query_history)
  except QueryServerException as ex:
    raise ex
  except Exception as ex:
    LOG.exception(ex)
    handle, state = _get_query_handle_and_state(query_history)

  try:
    start_over = request.POST.get('log-start-over') == 'true'
    log = db.get_log(handle, start_over=start_over)
  except Exception as ex:
    log = str(ex)

  jobs = parse_out_jobs(log)
  job_urls = massage_job_urls_for_json(jobs)

  result = {
    'status': -1,
    'log': log,
    'jobs': jobs,
    'jobUrls': job_urls,
    'isSuccess': query_history.is_success(),
    'isFailure': query_history.is_failure(),
    'id': id,
    'statement': query_history.get_current_statement(),
    'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
    'oldLogsApi': USE_GET_LOG_API.get()
  }

  # Run time error
  if query_history.is_failure():
    res = db.get_operation_status(handle)
    if query_history.is_canceled(res):
      result['status'] = 0
    elif hasattr(res, 'errorMessage') and res.errorMessage:
      result['message'] = res.errorMessage
    else:
      result['message'] = _('Bad status for request %s:\n%s') % (id, res)
  else:
    result['status'] = 0

  return JsonResponse(result)
示例#4
0
  def get_jobs(self, notebook, snippet, logs):
    jobs = []

    if snippet['type'] == 'hive':
      engine = self._get_hive_execution_engine(notebook, snippet)
      jobs_with_state = parse_out_jobs(logs, engine=engine, with_state=True)

      jobs = [{
        'name': job.get('job_id', ''),
        'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
        'started': job.get('started', False),
        'finished': job.get('finished', False)
      } for job in jobs_with_state]

    return jobs
示例#5
0
  def get_jobs(self, notebook, snippet, logs):
    jobs = []

    if snippet['type'] == 'hive':
      engine = self._get_hive_execution_engine(notebook, snippet)
      jobs_with_state = parse_out_jobs(logs, engine=engine, with_state=True)

      jobs = [{
        'name': job.get('job_id', ''),
        'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
        'started': job.get('started', False),
        'finished': job.get('finished', False)
      } for job in jobs_with_state]

    return jobs
示例#6
0
      close_operation(request, id)
      query_history = db.execute_next_statement(query_history, request.POST.get('query-query'))
      handle, state = _get_query_handle_and_state(query_history)
  except QueryServerException, ex:
    raise ex
  except Exception, ex:
    LOG.exception(ex)
    handle, state = _get_query_handle_and_state(query_history)

  try:
    start_over = request.POST.get('log-start-over') == 'true'
    log = db.get_log(handle, start_over=start_over)
  except Exception, ex:
    log = str(ex)

  jobs = parse_out_jobs(log)
  job_urls = massage_job_urls_for_json(jobs)

  result = {
    'status': -1,
    'log': log,
    'jobs': jobs,
    'jobUrls': job_urls,
    'isSuccess': query_history.is_success(),
    'isFailure': query_history.is_failure(),
    'id': id,
    'statement': query_history.get_current_statement(),
    'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
    'oldLogsApi': USE_GET_LOG_API.get()
  }
示例#7
0
文件: api.py 项目: cloudera/hue
      close_operation(request, id)
      query_history = db.execute_next_statement(query_history, request.POST.get('query-query'))
      handle, state = _get_query_handle_and_state(query_history)
  except QueryServerException, ex:
    raise ex
  except Exception, ex:
    LOG.exception(ex)
    handle, state = _get_query_handle_and_state(query_history)

  try:
    start_over = request.POST.get('log-start-over') == 'true'
    log = db.get_log(handle, start_over=start_over)
  except Exception, ex:
    log = str(ex)

  jobs = parse_out_jobs(log)
  job_urls = massage_job_urls_for_json(jobs)

  result = {
    'status': -1,
    'log': log,
    'jobs': jobs,
    'jobUrls': job_urls,
    'isSuccess': query_history.is_success(),
    'isFailure': query_history.is_failure(),
    'id': id,
    'statement': query_history.get_current_statement(),
    'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
    'oldLogsApi': USE_GET_LOG_API.get()
  }