def get(self, request, *args, **kwargs): auditor.record(event_type=JOB_LOGS_VIEWED, instance=self.job, actor_id=request.user.id, actor_name=request.user.username) job_name = self.job.unique_name if self.job.is_done: log_path = stores.get_job_logs_path(job_name=job_name, temp=False) log_path = archive_logs_file( log_path=log_path, namepath=job_name) else: process_logs(job=self.job, temp=True) log_path = stores.get_job_logs_path(job_name=job_name, temp=True) filename = os.path.basename(log_path) chunk_size = 8192 try: wrapped_file = FileWrapper(open(log_path, 'rb'), chunk_size) response = StreamingHttpResponse(wrapped_file, content_type=mimetypes.guess_type(log_path)[0]) response['Content-Length'] = os.path.getsize(log_path) response['Content-Disposition'] = "attachment; filename={}".format(filename) return response except FileNotFoundError: _logger.warning('Log file not found: log_path=%s', log_path) return Response(status=status.HTTP_404_NOT_FOUND, data='Log file not found: log_path={}'.format(log_path))
def get(self, request, *args, **kwargs): auditor.record(event_type=JOB_LOGS_VIEWED, instance=self.job, actor_id=request.user.id, actor_name=request.user.username) job_name = self.job.unique_name if self.job.is_done: log_path = stores.get_job_logs_path(job_name=job_name, temp=False) log_path = archive_logs_file(log_path=log_path, namepath=job_name) else: process_logs(job=self.job, temp=True) log_path = stores.get_job_logs_path(job_name=job_name, temp=True) return stream_file(file_path=log_path, logger=_logger)
def test_job_logs_path_creation_deletion(self): job = JobFactory() job_logs_path = stores.get_job_logs_path(job_name=job.unique_name, temp=False) stores.create_job_logs_path(job_name=job.unique_name, temp=False) open(job_logs_path, '+w') # Should be true, created by the signal assert os.path.exists(job_logs_path) is True stores_schedule_logs_deletion(persistence=None, subpath=job.subpath) assert os.path.exists(job_logs_path) is False
def safe_log_job(job_name, log_lines, temp, append=False): log_path = stores.get_job_logs_path(job_name=job_name, temp=temp) try: stores.create_job_logs_path(job_name=job_name, temp=temp) _lock_log(log_path, log_lines, append=append) except OSError: # Retry stores.create_job_logs_path(job_name=job_name, temp=temp) _lock_log(log_path, log_lines, append=append)
def create_logs(self, temp): log_path = stores.get_job_logs_path(job_name=self.job.unique_name, temp=temp) stores.create_job_logs_path(job_name=self.job.unique_name, temp=temp) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n')
def get_pod_container(self, volume_mounts, persistence_outputs, persistence_data, outputs_refs_jobs=None, outputs_refs_experiments=None, secret_refs=None, configmap_refs=None, env_vars=None, command=None, args=None, resources=None): """Pod job container for task.""" # Env vars preparation env_vars = to_list(env_vars, check_none=True) logs_path = stores.get_job_logs_path(job_name=self.job_name, temp=False) outputs_path = stores.get_job_outputs_path( persistence=persistence_outputs, job_name=self.job_name) env_vars += get_job_env_vars( log_level=self.log_level, persistence_outputs=persistence_outputs, outputs_path=outputs_path, persistence_data=persistence_data, logs_path=logs_path, outputs_refs_jobs=outputs_refs_jobs, outputs_refs_experiments=outputs_refs_experiments) env_vars += [ get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME, value=json.dumps(self.labels)), ] env_vars += get_resources_env_vars(resources=resources) # Env from configmap and secret refs env_from = get_pod_env_from(secret_refs=secret_refs, configmap_refs=configmap_refs) ports = [ client.V1ContainerPort(container_port=port) for port in self.ports ] return client.V1Container(name=self.job_container_name, image=self.job_docker_image, command=command, args=args, ports=ports or None, env=env_vars, env_from=env_from, resources=get_resources(resources), volume_mounts=volume_mounts)
def _get_logs_path(self, persistence_logs='default'): return stores.get_job_logs_path(persistence=persistence_logs, job_name=self.job_name, temp=False)
def get_log_path(instance): return stores.get_job_logs_path(job_name=instance.unique_name, temp=False)