def retrieve(self, request, project, pk=None): """ GET method implementation for detail view Return a single job with log_references and artifact names and links to the artifact blobs. """ try: job = Job.objects.select_related( *self._default_select_related).get(repository__name=project, id=pk) except Job.DoesNotExist: return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) resp = serializers.JobSerializer(job, read_only=True).data resp["resource_uri"] = reverse("jobs-detail", kwargs={ "project": project, "pk": pk }) resp["logs"] = [] for (name, url) in JobLog.objects.filter(job=job).values_list('name', 'url'): resp["logs"].append({'name': name, 'url': url}) platform_option = job.get_platform_option() if platform_option: resp["platform_option"] = platform_option return Response(resp)
def retrieve(self, request, project, pk=None): """ GET method implementation for detail view Return a single job with log_references and artifact names and links to the artifact blobs. """ try: job = Job.objects.select_related(*self._default_select_related + ['taskcluster_metadata']).get( repository__name=project, id=pk) except Job.DoesNotExist: return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) resp = serializers.JobSerializer(job, read_only=True).data resp["resource_uri"] = reverse("jobs-detail", kwargs={ "project": project, "pk": pk }) resp["logs"] = [] for (name, url) in JobLog.objects.filter(job=job).values_list('name', 'url'): resp["logs"].append({'name': name, 'url': url}) platform_option = job.get_platform_option() if platform_option: resp["platform_option"] = platform_option try: resp['taskcluster_metadata'] = { 'task_id': job.taskcluster_metadata.task_id, 'retry_id': job.taskcluster_metadata.retry_id } except ObjectDoesNotExist: pass status_map = {k: v for k, v in Job.AUTOCLASSIFY_STATUSES} resp["autoclassify_status"] = status_map[job.autoclassify_status] return Response(resp)