def test_format_duration_in_millis(): assert_equal("1h:0m:0s", format_duration_in_millis(60 * 60 * 1000)) assert_equal("0s", format_duration_in_millis(0)) assert_equal( "1d:12h:24m:32s", format_duration_in_millis(24 * 60 * 60 * 1000 + 12 * 60 * 60 * 1000 + 24 * 60 * 1000 + 32 * 1000))
def test_format_duration_in_millis(): assert_equal("1h:0m:0s", format_duration_in_millis(60 * 60 * 1000)) assert_equal("0s", format_duration_in_millis(0)) assert_equal( "1d:12h:24m:32s", format_duration_in_millis(24 * 60 * 60 * 1000 + 12 * 60 * 60 * 1000 + 24 * 60 * 1000 + 32 * 1000), )
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False): jobs = [] for job in oozie_jobs: if not just_sla or (just_sla and job.has_sla) and job.appName != 'pig-app-hue-script': last_modified_time_millis = hasattr(job, 'lastModTime') and job.lastModTime and (time.time() - time.mktime(job.lastModTime)) * 1000 or 0 duration_millis = job.durationTime massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'lastModTimeInMillis': last_modified_time_millis, 'lastModTimeFormatted': last_modified_time_millis and format_duration_in_millis(last_modified_time_millis) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime and format_time(job.kickoffTime) or '', 'kickoffTimeInMillis': hasattr(job, 'kickoffTime') and job.kickoffTime and time.mktime(catch_unicode_time(job.kickoffTime)) or 0, 'nextMaterializedTime': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or '', 'nextMaterializedTimeInMillis': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and time.mktime(job.nextMaterializedTime) or 0, 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'pauseTime': hasattr(job, 'pauseTime') and job.pauseTime and format_time(job.endTime) or None, 'concurrency': hasattr(job, 'concurrency') and job.concurrency or None, 'endTimeInMillis': job.endTime and time.mktime(job.endTime) or 0, 'lastActionInMillis': hasattr(job, 'lastAction') and job.lastAction and time.mktime(job.lastAction) or 0, 'status': job.status, 'group': job.group, 'isRunning': job.is_running(), 'duration': duration_millis and format_duration_in_millis(duration_millis) or None, 'durationInMillis': duration_millis, 'appName': job.appName, 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'suspend'}), 'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'resume'}), 'created': hasattr(job, 'createdTime') and job.createdTime and format_time(job.createdTime) or '', 'createdInMillis': job.submissionTime, 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'startTimeInMillis': hasattr(job, 'startTime') and job.startTime and time.mktime(job.startTime) or 0, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and Coordinator.CRON_MAPPING.get(job.frequency, job.frequency) or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, 'parentUrl': hasattr(job, 'parentId') and job.parentId and get_link(job.parentId) or '', 'submittedManually': hasattr(job, 'parentId') and (job.parentId is None or 'C@' not in job.parentId) } jobs.append(massaged_job) return { 'jobs': jobs }
def _fixup(self): self.is_mr2 = True jobid = self.id if self.state in ("FINISHED", "FAILED", "KILLED"): setattr(self, "status", self.finalStatus) else: setattr(self, "status", self.state) setattr(self, "jobId", jobid) setattr(self, "jobId_short", re.sub("(application|job)_", "", self.jobId)) setattr(self, "jobName", self.name) setattr(self, "is_retired", False) setattr(self, "maps_percent_complete", self.progress) setattr(self, "reduces_percent_complete", self.progress) setattr(self, "queueName", self.queue) setattr(self, "priority", "") if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime setattr(self, "durationInMillis", finishTime - self.startedTime) setattr(self, "startTimeMs", self.startedTime) setattr(self, "startTimeFormatted", format_unixtime_ms(self.startedTime)) setattr(self, "finishedMaps", None) setattr(self, "desiredMaps", None) setattr(self, "finishedReduces", None) setattr(self, "desiredReduces", None) setattr(self, "durationFormatted", format_duration_in_millis(self.durationInMillis))
def _fixup(self): self.is_mr2 = True jobid = self.id if self.state in ('FINISHED', 'FAILED', 'KILLED'): setattr(self, 'status', self.finalStatus) else: setattr(self, 'status', self.state) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', re.sub('(application|job)_', '', self.jobId)) setattr(self, 'jobName', self.name) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', self.progress) setattr(self, 'reduces_percent_complete', self.progress) setattr(self, 'queueName', self.queue) setattr(self, 'priority', '') if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime setattr(self, 'durationInMillis', finishTime - self.startedTime) setattr(self, 'startTimeMs', self.startedTime) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startedTime)) setattr(self, 'finishedMaps', None) setattr(self, 'desiredMaps', None) setattr(self, 'finishedReduces', None) setattr(self, 'desiredReduces', None) setattr(self, 'durationFormatted', format_duration_in_millis(self.durationInMillis))
def _fixup(self): jobid = self.id setattr(self, 'status', self.state) setattr(self, 'jobName', self.name) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', self.jobId.replace('job_', '')) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', None) setattr(self, 'reduces_percent_complete', None) if self.finishTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishTime if self.startTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startTime setattr(self, 'duration', durationInMillis) setattr(self, 'durationFormatted', self.duration and format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(self.finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startTime)) setattr(self, 'finishedMaps', self.mapsCompleted) setattr(self, 'desiredMaps', 0) setattr(self, 'finishedReduces', self.reducesCompleted) setattr(self, 'desiredReduces', 0) setattr(self, 'applicationType', 'MR2')
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False): jobs = [] for job in oozie_jobs: if not just_sla or (just_sla and job.has_sla) and job.appName != 'pig-app-hue-script': massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime and format_time(job.kickoffTime) or '', 'nextMaterializedTime': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or '', 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'appName': job.appName, 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'suspend'}), 'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'resume'}), 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and Coordinator.CRON_MAPPING.get(job.frequency, job.frequency) or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def massaged_oozie_jobs_for_json(oozie_jobs, user): jobs = [] for job in oozie_jobs: if job.is_running(): if job.type == 'Workflow': job = get_oozie().get_job(job.id) else: job = get_oozie().get_coordinator(job.id) massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'appName': escapejs(job.appName), 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), } jobs.append(massaged_job) return jobs
def massaged_jobs_for_json(self, request, oozie_jobs, hue_jobs): jobs = [] hue_jobs = dict([(script.dict.get("job_id"), script) for script in hue_jobs if script.dict.get("job_id")]) for job in oozie_jobs: if job.is_running(): job = self.oozie_api.get_job(job.id) get_copy = request.GET.copy() # Hacky, would need to refactor JobBrowser get logs get_copy["format"] = "python" request.GET = get_copy try: logs, workflow_action, is_really_done = self.get_log(request, job) progress = workflow_action[0]["progress"] except: LOG.exception("failed to get progress") progress = 0 else: progress = 100 hue_pig = hue_jobs.get(job.id) and hue_jobs.get(job.id) or None massaged_job = { "id": job.id, "lastModTime": hasattr(job, "lastModTime") and job.lastModTime and format_time(job.lastModTime) or None, "kickoffTime": hasattr(job, "kickoffTime") and job.kickoffTime or None, "timeOut": hasattr(job, "timeOut") and job.timeOut or None, "endTime": job.endTime and format_time(job.endTime) or None, "status": job.status, "isRunning": job.is_running(), "duration": job.endTime and job.startTime and format_duration_in_millis((time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or None, "appName": hue_pig and hue_pig.dict["name"] or _("Unsaved script"), "scriptId": hue_pig and hue_pig.id or -1, "scriptContent": hue_pig and hue_pig.dict["script"] or "", "progress": progress, "progressPercent": "%d%%" % progress, "user": job.user, "absoluteUrl": job.get_absolute_url(), "canEdit": has_job_edition_permission(job, self.user), "killUrl": reverse("oozie:manage_oozie_jobs", kwargs={"job_id": job.id, "action": "kill"}), "watchUrl": reverse("pig:watch", kwargs={"job_id": job.id}) + "?format=python", "created": hasattr(job, "createdTime") and job.createdTime and job.createdTime and ((job.type == "Bundle" and job.createdTime) or format_time(job.createdTime)), "startTime": hasattr(job, "startTime") and format_time(job.startTime) or None, "run": hasattr(job, "run") and job.run or 0, "frequency": hasattr(job, "frequency") and job.frequency or None, "timeUnit": hasattr(job, "timeUnit") and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def _init_attributes(self): self.queueName = i18n.smart_unicode(self.job.profile.queueName) self.jobName = i18n.smart_unicode(self.job.profile.name) self.user = i18n.smart_unicode(self.job.profile.user) self.mapProgress = self.job.status.mapProgress self.reduceProgress = self.job.status.reduceProgress self.setupProgress = self.job.status.setupProgress self.cleanupProgress = self.job.status.cleanupProgress if self.job.desiredMaps == 0: maps_percent_complete = 0 else: maps_percent_complete = int( round( float(self.job.finishedMaps) / self.job.desiredMaps * 100)) self.desiredMaps = self.job.desiredMaps if self.job.desiredReduces == 0: reduces_percent_complete = 0 else: reduces_percent_complete = int( round( float(self.job.finishedReduces) / self.job.desiredReduces * 100)) self.desiredReduces = self.job.desiredReduces self.maps_percent_complete = maps_percent_complete self.finishedMaps = self.job.finishedMaps self.finishedReduces = self.job.finishedReduces self.reduces_percent_complete = reduces_percent_complete self.startTimeMs = self.job.startTime self.startTimeFormatted = format_unixtime_ms(self.job.startTime) self.launchTimeMs = self.job.launchTime self.launchTimeFormatted = format_unixtime_ms(self.job.launchTime) self.finishTimeMs = self.job.finishTime self.finishTimeFormatted = format_unixtime_ms(self.job.finishTime) self.status = self.job.status.runStateAsString self.priority = self.job.priorityAsString self.jobFile = self.job.profile.jobFile finishTime = self.job.finishTime if finishTime == 0: finishTime = datetime.datetime.now() else: finishTime = datetime.datetime.fromtimestamp(finishTime / 1000) self.duration = finishTime - datetime.datetime.fromtimestamp( self.job.startTime / 1000) diff = int(finishTime.strftime("%s")) * 1000 - self.startTimeMs self.durationFormatted = format_duration_in_millis(diff) self.durationInMillis = diff
def _fixup(self): self.is_mr2 = True jobid = self.id self.yarnStatus = self.state if self.state in ('FINISHED', 'FAILED', 'KILLED'): setattr(self, 'status', self.finalStatus) else: setattr(self, 'status', self.state) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', re.sub('(application|job)_', '', self.jobId)) setattr(self, 'jobName', self.name) setattr(self, 'applicationType', self.applicationType) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', self.progress) setattr(self, 'reduces_percent_complete', self.progress) setattr(self, 'queueName', self.queue) setattr(self, 'priority', '') if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime if self.startedTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startedTime setattr(self, 'durationInMillis', durationInMillis) setattr( self, 'durationFormatted', durationInMillis and format_duration_in_millis(self.durationInMillis)) setattr(self, 'startTimeMs', self.startedTime) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startedTime)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(finishTime)) setattr(self, 'finishedMaps', None) setattr(self, 'desiredMaps', None) setattr(self, 'finishedReduces', None) setattr(self, 'desiredReduces', None) for attr in [ 'preemptedResourceVCores', 'vcoreSeconds', 'memorySeconds', 'diagnostics' ]: if not hasattr(self, attr): setattr(self, attr, 'N/A') if not hasattr(self, 'acls'): setattr(self, 'acls', {}) # YARN returns a N/A url if it's not set. if not hasattr(self, 'trackingUrl') or self.trackingUrl == 'http://N/A': self.trackingUrl = None
def massaged_jobs_for_json(self, request, oozie_jobs, hue_jobs): jobs = [] hue_jobs = dict([(script.dict.get('job_id'), script) for script in hue_jobs if script.dict.get('job_id')]) for job in oozie_jobs: if job.is_running(): job = self.oozie_api.get_job(job.id) get_copy = request.GET.copy() # Hacky, would need to refactor JobBrowser get logs get_copy['format'] = 'python' request.GET = get_copy try: logs, workflow_action, is_really_done = self.get_log(request, job) progress = workflow_action[0]['progress'] except: LOG.exception('failed to get progress') progress = 0 else: progress = 100 hue_pig = hue_jobs.get(job.id) and hue_jobs.get(job.id) or None massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime or None, 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'appName': hue_pig and hue_pig.dict['name'] or _('Unsaved script'), 'scriptId': hue_pig and hue_pig.id or -1, 'scriptContent': hue_pig and hue_pig.dict['script'] or '', 'progress': progress, 'progressPercent': '%d%%' % progress, 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, self.user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'watchUrl': reverse('pig:watch', kwargs={'job_id': job.id}) + '?format=python', 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and job.frequency or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def _fixup(self): jobid = self.id if self.state in ('FINISHED', 'FAILED', 'KILLED'): setattr(self, 'status', self.finalStatus) else: setattr(self, 'status', self.state) setattr(self, 'type', self.applicationType) setattr(self, 'applicationType', 'YarnV2') setattr(self, 'jobName', self.name) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', self.jobId.replace('job_', '')) setattr(self, 'is_retired', False) setattr(self, 'is_mr2', True) setattr(self, 'maps_percent_complete', None) setattr(self, 'reduces_percent_complete', None) setattr(self, 'finishedMaps', 0) setattr(self, 'desiredMaps', 0) setattr(self, 'finishedReduces', 0) setattr(self, 'desiredReduces', 0) if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime if self.startedTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startedTime setattr(self, 'duration', durationInMillis) setattr(self, 'durationInMillis', durationInMillis) setattr(self, 'durationFormatted', self.duration and format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startedTime)) setattr(self, 'startTimeMs', self.startTimeFormatted) setattr(self, 'startTime', self.startedTime) setattr(self, 'finishTime', finishTime) try: setattr( self, 'assignedContainerId', urllib.parse.urlsplit(self.amContainerLogs).path.split( '/node/containerlogs/')[1].split('/')[0]) except Exception: setattr(self, 'assignedContainerId', '')
def _fixup(self): jobid = self.id setattr(self, 'status', self.state) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', self.jobId.replace('job_', '')) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', None) setattr(self, 'reduces_percent_complete', None) setattr(self, 'duration', self.finishTime - self.startTime) setattr(self, 'durationFormatted', format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(self.finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startTime)) setattr(self, 'finishedMaps', self.mapsCompleted) setattr(self, 'desiredMaps', None) setattr(self, 'finishedReduces', self.reducesCompleted) setattr(self, 'desiredReduces', None)
def _fixup(self): self.is_mr2 = True jobid = self.id self.yarnStatus = self.state if self.state in ('FINISHED', 'FAILED', 'KILLED'): setattr(self, 'status', self.finalStatus) else: setattr(self, 'status', self.state) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', re.sub('(application|job)_', '', self.jobId)) setattr(self, 'jobName', self.name) setattr(self, 'applicationType', self.applicationType) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', self.progress) setattr(self, 'reduces_percent_complete', self.progress) setattr(self, 'queueName', self.queue) setattr(self, 'priority', '') if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime if self.startedTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startedTime setattr(self, 'durationInMillis', durationInMillis) setattr(self, 'durationFormatted', durationInMillis and format_duration_in_millis(self.durationInMillis)) setattr(self, 'startTimeMs', self.startedTime) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startedTime)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(finishTime)) setattr(self, 'finishedMaps', None) setattr(self, 'desiredMaps', None) setattr(self, 'finishedReduces', None) setattr(self, 'desiredReduces', None) for attr in ['preemptedResourceVCores', 'vcoreSeconds', 'memorySeconds', 'diagnostics']: if not hasattr(self, attr): setattr(self, attr, 'N/A') if not hasattr(self, 'acls'): setattr(self, 'acls', {}) # YARN returns a N/A url if it's not set. if not hasattr(self, 'trackingUrl') or self.trackingUrl == 'http://N/A': self.trackingUrl = None
def _fixup(self): jobid = self.id setattr(self, 'status', self.state) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', self.jobId.replace('job_', '')) setattr(self, 'is_retired', False) setattr(self, 'maps_percent_complete', None) setattr(self, 'reduces_percent_complete', None) setattr(self, 'duration', self.finishTime - self.startTime) setattr(self, 'durationFormatted', format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(self.finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startTime)) setattr(self, 'finishedMaps', self.mapsCompleted) setattr(self, 'desiredMaps', None) setattr(self, 'finishedReduces', self.reducesCompleted) setattr(self, 'desiredReduces', None) setattr(self, 'applicationType', None)
def _fixup(self): jobid = self.id if self.state in ('FINISHED', 'FAILED', 'KILLED'): setattr(self, 'status', self.finalStatus) else: setattr(self, 'status', self.state) setattr(self, 'type', self.applicationType) setattr(self, 'applicationType', 'YarnV2') setattr(self, 'jobName', self.name) setattr(self, 'jobId', jobid) setattr(self, 'jobId_short', self.jobId.replace('job_', '')) setattr(self, 'is_retired', False) setattr(self, 'is_mr2', True) setattr(self, 'maps_percent_complete', None) setattr(self, 'reduces_percent_complete', None) setattr(self, 'finishedMaps', 0) setattr(self, 'desiredMaps', 0) setattr(self, 'finishedReduces', 0) setattr(self, 'desiredReduces', 0) if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime if self.startedTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startedTime setattr(self, 'duration', durationInMillis) setattr(self, 'durationInMillis', durationInMillis) setattr(self, 'durationFormatted', self.duration and format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startedTime)) setattr(self, 'startTimeMs', self.startTimeFormatted) setattr(self, 'startTime', self.startedTime) setattr(self, 'finishTime', finishTime) try: setattr(self, 'assignedContainerId', urlparse.urlsplit(self.amContainerLogs).path.split('/node/containerlogs/')[1].split('/')[0]) except Exception: setattr(self, 'assignedContainerId', '')
def _get_metrics(self): self.metrics = {} try: executors = self.history_server_api.executors( self.jobId, self.attempt_id) if executors: self.metrics['headers'] = [ _('Executor Id'), _('Address'), _('RDD Blocks'), _('Storage Memory'), _('Disk Used'), _('Active Tasks'), _('Failed Tasks'), _('Complete Tasks'), _('Task Time'), _('Input'), _('Shuffle Read'), _('Shuffle Write'), _('Logs') ] self.metrics['executors'] = [] for e in executors: self.metrics['executors'].append([ e.get('id', 'N/A'), e.get('hostPort', ''), e.get('rddBlocks', ''), '%s / %s' % (big_filesizeformat(e.get('memoryUsed', 0)), big_filesizeformat(e.get('maxMemory', 0))), big_filesizeformat(e.get('diskUsed', 0)), e.get('activeTasks', ''), e.get('failedTasks', ''), e.get('completedTasks', ''), format_duration_in_millis(e.get('totalDuration', 0)), big_filesizeformat(e.get('totalInputBytes', 0)), big_filesizeformat(e.get('totalShuffleRead', 0)), big_filesizeformat(e.get('totalShuffleWrite', 0)), e.get('executorLogs', '') ]) except Exception, e: LOG.error('Failed to get Spark Job executors: %s' % e)
def _fixup(self): if not hasattr(self, 'diagnostics'): self.diagnostics = '' setattr(self, 'type', 'YarnV2') if self.finishedTime == 0: finishTime = int(time.time() * 1000) else: finishTime = self.finishedTime if self.startTime == 0: durationInMillis = None else: durationInMillis = finishTime - self.startTime setattr(self, 'duration', durationInMillis) setattr(self, 'durationInMillis', durationInMillis) setattr(self, 'durationFormatted', self.duration and format_duration_in_millis(self.duration)) setattr(self, 'finishTimeFormatted', format_unixtime_ms(finishTime)) setattr(self, 'startTimeFormatted', format_unixtime_ms(self.startTime)) setattr(self, 'status', 'RUNNING' if self.finishedTime == 0 else 'SUCCEEDED') setattr(self, 'properties', {})
def massaged_oozie_jobs_for_json(oozie_jobs, user): jobs = [] for job in oozie_jobs: if job.is_running(): if job.type == 'Workflow': job = get_oozie().get_job(job.id) elif job.type == 'Coordinator': job = get_oozie().get_coordinator(job.id) else: job = get_oozie().get_bundle(job.id) massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime or '', 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'duration_sort': job.endTime and job.startTime and (time.mktime(job.endTime) - time.mktime(job.startTime)) or 0, 'appName': job.appName, 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'suspend'}), 'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'resume'}), 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and job.frequency or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False): jobs = [] for job in oozie_jobs: if job.is_running(): if job.type == 'Workflow': job = get_oozie(user).get_job(job.id) elif job.type == 'Coordinator': job = get_oozie(user).get_coordinator(job.id) else: job = get_oozie(user).get_bundle(job.id) if not just_sla or (just_sla and job.has_sla): massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime and format_time(job.kickoffTime) or '', 'nextMaterializedTime': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or '', 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'appName': job.appName, 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'suspend'}), 'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'resume'}), 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and job.frequency or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def _get_metrics(self): self.metrics = {} try: executors = self.history_server_api.executors(self.jobId, self.attempt_id) if executors: self.metrics['headers'] = [ _('Executor Id'), _('Address'), _('RDD Blocks'), _('Storage Memory'), _('Disk Used'), _('Active Tasks'), _('Failed Tasks'), _('Complete Tasks'), _('Task Time'), _('Input'), _('Shuffle Read'), _('Shuffle Write'), _('Logs')] self.metrics['executors'] = [] for e in executors: self.metrics['executors'].append([ e.get('id', 'N/A'), e.get('hostPort', ''), e.get('rddBlocks', ''), '%s / %s' % (big_filesizeformat(e.get('memoryUsed', 0)), big_filesizeformat(e.get('maxMemory', 0))), big_filesizeformat(e.get('diskUsed', 0)), e.get('activeTasks', ''), e.get('failedTasks', ''), e.get('completedTasks', ''), format_duration_in_millis(e.get('totalDuration', 0)), big_filesizeformat(e.get('totalInputBytes', 0)), big_filesizeformat(e.get('totalShuffleRead', 0)), big_filesizeformat(e.get('totalShuffleWrite', 0)), e.get('executorLogs', '') ]) except Exception, e: LOG.error('Failed to get Spark Job executors: %s' % e)
def massaged_jobs_for_json(self, oozie_jobs, hue_jobs): jobs = [] hue_jobs = dict([(script.dict.get('job_id'), script) for script in hue_jobs if script.dict.get('job_id')]) for job in oozie_jobs: if job.is_running(): job = get_oozie().get_job(job.id) hue_pig = hue_jobs.get(job.id) and hue_jobs.get(job.id) or None massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime or None, 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis(( time.mktime(job.endTime) - time.mktime(job.startTime) ) * 1000) or None, 'appName': hue_pig and hue_pig.dict['name'] or _('Unsaved script'), 'scriptId': hue_pig and hue_pig.id or -1, 'progress': job.get_progress(), 'progressPercent': '%d%%' % job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, self.user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={'job_id':job.id, 'action':'kill'}), 'watchUrl': reverse('pig:watch', kwargs={'job_id': job.id}) + '?format=python', 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and job.frequency or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False): jobs = [] for job in oozie_jobs: if not just_sla or (just_sla and job.has_sla) and job.appName != "pig-app-hue-script": last_modified_time_millis = ( hasattr(job, "lastModTime") and job.lastModTime and (time.time() - time.mktime(job.lastModTime)) * 1000 or 0 ) duration_millis = ( job.endTime and job.startTime and ((time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or 0 ) massaged_job = { "id": job.id, "lastModTime": hasattr(job, "lastModTime") and job.lastModTime and format_time(job.lastModTime) or None, "lastModTimeInMillis": last_modified_time_millis, "lastModTimeFormatted": last_modified_time_millis and format_duration_in_millis(last_modified_time_millis) or None, "kickoffTime": hasattr(job, "kickoffTime") and job.kickoffTime and format_time(job.kickoffTime) or "", "kickoffTimeInMillis": hasattr(job, "kickoffTime") and job.kickoffTime and time.mktime(catch_unicode_time(job.kickoffTime)) or 0, "nextMaterializedTime": hasattr(job, "nextMaterializedTime") and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or "", "nextMaterializedTimeInMillis": hasattr(job, "nextMaterializedTime") and job.nextMaterializedTime and time.mktime(job.nextMaterializedTime) or 0, "timeOut": hasattr(job, "timeOut") and job.timeOut or None, "endTime": job.endTime and format_time(job.endTime) or None, "pauseTime": hasattr(job, "pauseTime") and job.pauseTime and format_time(job.endTime) or None, "concurrency": hasattr(job, "concurrency") and job.concurrency or None, "endTimeInMillis": job.endTime and time.mktime(job.endTime) or 0, "status": job.status, "isRunning": job.is_running(), "duration": duration_millis and format_duration_in_millis(duration_millis) or None, "durationInMillis": duration_millis, "appName": job.appName, "progress": job.get_progress(), "user": job.user, "absoluteUrl": job.get_absolute_url(), "canEdit": has_job_edition_permission(job, user), "killUrl": reverse("oozie:manage_oozie_jobs", kwargs={"job_id": job.id, "action": "kill"}), "suspendUrl": reverse("oozie:manage_oozie_jobs", kwargs={"job_id": job.id, "action": "suspend"}), "resumeUrl": reverse("oozie:manage_oozie_jobs", kwargs={"job_id": job.id, "action": "resume"}), "created": hasattr(job, "createdTime") and job.createdTime and format_time(job.createdTime) or "", "createdInMillis": hasattr(job, "createdTime") and job.createdTime and time.mktime(catch_unicode_time(job.createdTime)) or 0, "startTime": hasattr(job, "startTime") and format_time(job.startTime) or None, "startTimeInMillis": hasattr(job, "startTime") and job.startTime and time.mktime(job.startTime) or 0, "run": hasattr(job, "run") and job.run or 0, "frequency": hasattr(job, "frequency") and Coordinator.CRON_MAPPING.get(job.frequency, job.frequency) or None, "timeUnit": hasattr(job, "timeUnit") and job.timeUnit or None, "parentUrl": hasattr(job, "parentId") and job.parentId and get_link(job.parentId) or "", "submittedManually": hasattr(job, "parentId") and _submitted_manually(job, user), } jobs.append(massaged_job) return {"jobs": jobs}
def massaged_jobs_for_json(self, oozie_jobs, hue_jobs): jobs = [] hue_jobs = dict([(script.dict.get('job_id'), script) for script in hue_jobs if script.dict.get('job_id')]) for job in oozie_jobs: if job.is_running(): job = get_oozie().get_job(job.id) hue_pig = hue_jobs.get(job.id) and hue_jobs.get(job.id) or None massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime or None, 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis( (time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or None, 'appName': hue_pig and hue_pig.dict['name'] or _('Unsaved script'), 'scriptId': hue_pig and hue_pig.id or -1, 'progress': job.get_progress(), 'progressPercent': '%d%%' % job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, self.user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={ 'job_id': job.id, 'action': 'kill' }), 'watchUrl': reverse('pig:watch', kwargs={'job_id': job.id}) + '?format=python', 'created': hasattr(job, 'createdTime') and job.createdTime and job.createdTime and ((job.type == 'Bundle' and job.createdTime) or format_time(job.createdTime)), 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and job.frequency or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs
def massaged_oozie_jobs_for_json(oozie_jobs, user, just_sla=False): jobs = [] for job in oozie_jobs: if not just_sla or (just_sla and job.has_sla ) and job.appName != 'pig-app-hue-script': massaged_job = { 'id': job.id, 'lastModTime': hasattr(job, 'lastModTime') and job.lastModTime and format_time(job.lastModTime) or None, 'lastModTimeInMillis': hasattr(job, 'lastModTime') and job.lastModTime and time.mktime(job.lastModTime) or 0, 'kickoffTime': hasattr(job, 'kickoffTime') and job.kickoffTime and format_time(job.kickoffTime) or '', 'kickoffTimeInMillis': hasattr(job, 'kickoffTime') and job.kickoffTime and time.mktime(catch_unicode_time(job.kickoffTime)) or 0, 'nextMaterializedTime': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and format_time(job.nextMaterializedTime) or '', 'nextMaterializedTimeInMillis': hasattr(job, 'nextMaterializedTime') and job.nextMaterializedTime and time.mktime(job.nextMaterializedTime) or 0, 'timeOut': hasattr(job, 'timeOut') and job.timeOut or None, 'endTime': job.endTime and format_time(job.endTime) or None, 'endTimeInMillis': job.endTime and time.mktime(job.endTime) or 0, 'status': job.status, 'isRunning': job.is_running(), 'duration': job.endTime and job.startTime and format_duration_in_millis( (time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or None, 'durationInMillis': job.endTime and job.startTime and ((time.mktime(job.endTime) - time.mktime(job.startTime)) * 1000) or 0, 'appName': job.appName, 'progress': job.get_progress(), 'user': job.user, 'absoluteUrl': job.get_absolute_url(), 'canEdit': has_job_edition_permission(job, user), 'killUrl': reverse('oozie:manage_oozie_jobs', kwargs={ 'job_id': job.id, 'action': 'kill' }), 'suspendUrl': reverse('oozie:manage_oozie_jobs', kwargs={ 'job_id': job.id, 'action': 'suspend' }), 'resumeUrl': reverse('oozie:manage_oozie_jobs', kwargs={ 'job_id': job.id, 'action': 'resume' }), 'created': hasattr(job, 'createdTime') and job.createdTime and format_time(job.createdTime) or '', 'createdInMillis': hasattr(job, 'createdTime') and job.createdTime and time.mktime(catch_unicode_time(job.createdTime)) or 0, 'startTime': hasattr(job, 'startTime') and format_time(job.startTime) or None, 'startTimeInMillis': hasattr(job, 'startTime') and job.startTime and time.mktime(job.startTime) or 0, 'run': hasattr(job, 'run') and job.run or 0, 'frequency': hasattr(job, 'frequency') and Coordinator.CRON_MAPPING.get(job.frequency, job.frequency) or None, 'timeUnit': hasattr(job, 'timeUnit') and job.timeUnit or None, } jobs.append(massaged_job) return jobs