def logs(self, appid, app_type, log_name, is_embeddable=False): logs = '' try: if app_type == 'MAPREDUCE' or app_type == 'Oozie Launcher': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) logs = json.loads(response.content).get('logs') if logs and len(logs) == 4: logs = logs[1] else: response = job_attempt_logs_json( MockDjangoRequest(self.user), job=appid, name=log_name, is_embeddable=is_embeddable) logs = json.loads(response.content).get('log') elif app_type == 'SPARK': response = job_executor_logs(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None except PopupException, e: LOG.warn('No task attempt found for logs: %s' % smart_str(e))
def logs(self, appid, app_type, log_name, is_embeddable=False): logs = '' logs_list = [] try: if app_type == 'YarnV2' or app_type == 'MAPREDUCE': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) parseResponse = json.loads(response.content) logs = parseResponse.get('logs') logs_list = parseResponse.get('logsList') if logs and len(logs) == 4: if app_type == 'YarnV2' and logs[0]: #logs[0] is diagnostics logs = logs[0] else: logs = logs[1] else: response = job_attempt_logs_json(MockDjangoRequest(self.user), job=appid, name=log_name, is_embeddable=is_embeddable) logs = json.loads(response.content).get('log') elif app_type == 'SPARK': response = job_executor_logs(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None except PopupException as e: LOG.warn('No task attempt found for logs: %s' % smart_str(e)) return {'logs': logs, 'logsList': logs_list}
def get_log(request, oozie_workflow, make_links=True, log_start_pattern=None, log_end_pattern=None): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{'job': action.externalId}) if data and 'logs' in data: action_logs = data['logs'][1] if log_start_pattern: re_log_start = re.compile(log_start_pattern, re.M | re.DOTALL) if re_log_start.search(action_logs): action_logs = re.search(re_log_start, action_logs).group(1).strip() else: LOG.debug('Failed to find given start log pattern in logs: %s' % log_start_pattern) if make_links: action_logs = LinkJobLogs._make_links(action_logs) logs[action.name] = action_logs if log_end_pattern: re_log_end = re.compile(log_end_pattern) is_really_done = re_log_end.search(action_logs) is not None if is_really_done and not action_logs: LOG.warn('Unable to scrape full logs, try increasing the jobbrowser log_offset configuration value.') except Exception, e: LOG.error('An error occurred while watching the job running: %(error)s' % {'error': e}) is_really_done = True
def logs(self, appid, app_type, log_name, is_embeddable=False): logs = '' logs_list = [] try: if app_type == 'YarnV2' or app_type == 'MAPREDUCE': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) parseResponse = json.loads(response.content) logs = parseResponse.get('logs') logs_list = parseResponse.get('logsList') if logs and len(logs) == 4: if app_type == 'YarnV2' and logs[0]: #logs[0] is diagnostics logs = logs[0] else: logs = logs[1] else: response = job_attempt_logs_json(MockDjangoRequest(self.user), job=appid, name=log_name, is_embeddable=is_embeddable) logs = json.loads(response.content).get('log') elif app_type == 'SPARK': response = job_executor_logs(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None except PopupException, e: LOG.warn('No task attempt found for logs: %s' % smart_str(e))
def get_log(self, request, oozie_workflow): logs = {} for action in oozie_workflow.get_working_actions(): try: if action.externalId: log = job_single_logs(request, **{'job': action.externalId}) if log: logs[action.name] = self._match_logs(log['logs'][1]) except Exception, e: LOG.error('An error happen while watching the demo running: %(error)s' % {'error': e})
def get_log(self, request, oozie_workflow): logs = {} for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{'job': action.externalId}) if data: matched_logs = self._match_logs(data) logs[action.name] = self._make_links(matched_logs) except Exception, e: LOG.error('An error happen while watching the demo running: %(error)s' % {'error': e})
def get_log(request, oozie_workflow, make_links=True, log_start_pattern=None, log_end_pattern=None): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{'job': action.externalId}) if data and 'logs' in data: action_logs = data['logs'][1] if log_start_pattern: re_log_start = re.compile(log_start_pattern, re.M | re.DOTALL) if re_log_start.search(action_logs): action_logs = re.search( re_log_start, action_logs).group(1).strip() else: LOG.debug( 'Failed to find given start log pattern in logs: %s' % log_start_pattern) if log_end_pattern: re_log_end = re.compile(log_end_pattern) is_really_done = re_log_end.search( action_logs ) is not None or oozie_workflow.status == 'KILLED' if is_really_done and not action_logs: LOG.warning( 'Unable to scrape full logs, try increasing the jobbrowser log_offset configuration value.' ) if make_links: action_logs = LinkJobLogs._make_links(action_logs) logs[action.name] = action_logs except Exception: LOG.exception('An error occurred while watching the job running') is_really_done = True workflow_actions = _get_workflow_actions(oozie_workflow, logs, is_really_done) return logs, workflow_actions, is_really_done
def logs(self, appid, app_type, log_name): if app_type == 'MAPREDUCE': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) logs = json.loads(response.content).get('logs') else: response = job_attempt_logs_json(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None return {'logs': logs}
def get_log(self, request, oozie_workflow): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{'job': action.externalId}) if data: matched_logs = self._match_logs(data) logs[action.name] = self._make_links(matched_logs) is_really_done = OozieApi.RE_LOG_END.search(data['logs'][1]) is not None except Exception, e: LOG.error('An error happen while watching the demo running: %(error)s' % {'error': e})
def logs(self, appid, app_type, log_name): logs = '' try: if app_type == 'MAPREDUCE': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) logs = json.loads(response.content).get('logs') if logs and len(logs) == 4: logs = logs[3] else: response = job_attempt_logs_json(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None except PopupException, e: LOG.warn('No task attempt found for logs: %s' % e)
def get_log(self, request, oozie_workflow): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{"job": action.externalId}) if data: matched_logs = self._match_logs(data) logs[action.name] = LinkJobLogs._make_links(matched_logs) is_really_done = OozieApi.RE_LOG_END.search(data["logs"][1]) is not None except Exception, e: LOG.error("An error happen while watching the job running: %(error)s" % {"error": e}) is_really_done = True
def logs(self, appid, app_type, log_name, is_embeddable=False): logs = '' try: if app_type == 'MAPREDUCE' or app_type == 'Oozie Launcher': if log_name == 'default': response = job_single_logs(MockDjangoRequest(self.user), job=appid) logs = json.loads(response.content).get('logs') if logs and len(logs) == 4: logs = logs[1] else: response = job_attempt_logs_json(MockDjangoRequest(self.user), job=appid, name=log_name, is_embeddable=is_embeddable) logs = json.loads(response.content).get('log') elif app_type == 'SPARK': response = job_executor_logs(MockDjangoRequest(self.user), job=appid, name=log_name) logs = json.loads(response.content).get('log') else: logs = None except PopupException, e: LOG.warn('No task attempt found for logs: %s' % smart_str(e))
def _get_syslog(self, job_id): # TODO: Refactor this (and one in oozie_batch.py) to move to jobbrowser syslog = None q = QueryDict(self.request.GET, mutable=True) q['format'] = 'python' # Hack for triggering the good section in single_task_attempt_logs self.request.GET = q attempts = 0 max_attempts = 10 while syslog is None and attempts < max_attempts: data = job_single_logs(self.request, **{'job': job_id}) if data: log_output = data['logs'][3] if log_output.startswith('Unable to locate'): LOG.debug('Failed to get job attempt logs, possibly due to YARN archiving job to JHS. Will sleep and try again.') time.sleep(2.0) else: syslog = log_output attempts += 1 return syslog
def get_log(self, request, oozie_workflow): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{'job': action.externalId}) if data and 'logs' in data: matched_logs = self._match_logs(data) if matched_logs: logs[action.name] = LinkJobLogs._make_links(matched_logs) is_really_done = OozieApi.RE_LOG_END.search(data['logs'][1]) is not None if is_really_done and not matched_logs: LOG.warn('Unable to scrape full pig logs, try increasing the jobbrowser log_offset configuration value.') except Exception, e: LOG.error('An error occurred while watching the job running: %(error)s' % {'error': e}) is_really_done = True
def get_log(self, request, oozie_workflow): logs = {} is_really_done = False for action in oozie_workflow.get_working_actions(): try: if action.externalId: data = job_single_logs(request, **{"job": action.externalId}) if data and "logs" in data: matched_logs = self._match_logs(data) if matched_logs: logs[action.name] = LinkJobLogs._make_links(matched_logs) is_really_done = OozieApi.RE_LOG_END.search(data["logs"][1]) is not None if is_really_done and not matched_logs: LOG.warn( "Unable to scrape full pig logs, try increasing the jobbrowser log_offset configuration value." ) except Exception, e: LOG.error("An error occurred while watching the job running: %(error)s" % {"error": e}) is_really_done = True