def get_pod_container(self, volume_mounts, env_vars=None, command=None, args=None, resources=None): """Pod job container for task.""" env_vars = get_list(env_vars) env_vars += get_job_env_vars( log_level=self.log_level, outputs_path=get_job_outputs_path(job_name=self.job_name), logs_path=get_job_logs_path(job_name=self.job_name), data_path=get_job_data_path(job_name=self.job_name), project_data_path=get_project_data_path(project_name=self.project_name) ) env_vars += [ get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME, value=json.dumps(self.labels)), ] if resources: env_vars += get_resources_env_vars(resources=resources) ports = [client.V1ContainerPort(container_port=port) for port in self.ports] return client.V1Container(name=self.job_container_name, image=self.job_docker_image, command=command, args=args, ports=ports, env=env_vars, resources=get_resources(resources), volume_mounts=volume_mounts)
def safe_log_job(job_name, log_lines): log_path = get_job_logs_path(job_name) try: _lock_log(log_path, log_lines) except (FileNotFoundError, OSError): create_job_logs_path(job_name=job_name) # Retry _lock_log(log_path, log_lines)
def test_build_job_logs_path_creation_deletion(self): job = BuildJobFactory() job_logs_path = get_job_logs_path(job.unique_name) create_job_logs_path(job.unique_name) open(job_logs_path, '+w') # Should be true, created by the signal assert os.path.exists(job_logs_path) is True delete_job_logs(job.unique_name) assert os.path.exists(job_logs_path) is False
def events_handle_logs_build_job(job_uuid, job_name, log_line): # Must persist resources if logs according to the config if not BuildJob.objects.filter(uuid=job_uuid).exists(): return _logger.debug('handling log event for %s', job_name) xp_logger = logging.getLogger(job_name) log_path = get_job_logs_path(job_name) try: log_handler = logging.FileHandler(log_path) log_formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s') log_handler.setFormatter(log_formatter) xp_logger.addHandler(log_handler) xp_logger.setLevel(logging.INFO) xp_logger.info(log_line) xp_logger.handlers = [] except OSError: # TODO: retry instead? pass
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = BuildJobFactory(project=project) self.url = '/{}/{}/{}/builds/{}/logs'.format( API_V1, project.user.username, project.name, job.id) log_path = get_job_logs_path(job.unique_name) create_job_logs_path(job_name=job.unique_name) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n')
def get(self, request, *args, **kwargs): job = self.get_job() auditor.record(event_type=BUILD_JOB_LOGS_VIEWED, instance=self.job, actor_id=request.user.id) log_path = get_job_logs_path(job.unique_name) filename = os.path.basename(log_path) chunk_size = 8192 try: wrapped_file = FileWrapper(open(log_path, 'rb'), chunk_size) response = StreamingHttpResponse(wrapped_file, content_type=mimetypes.guess_type(log_path)[0]) response['Content-Length'] = os.path.getsize(log_path) response['Content-Disposition'] = "attachment; filename={}".format(filename) return response except FileNotFoundError: _logger.warning('Log file not found: log_path=%s', log_path) return Response(status=status.HTTP_404_NOT_FOUND, data='Log file not found: log_path={}'.format(log_path))
def get_pod_container(self, volume_mounts, persistence_outputs, persistence_data, outputs_refs_jobs=None, outputs_refs_experiments=None, env_vars=None, command=None, args=None, resources=None): """Pod job container for task.""" env_vars = get_list(env_vars) env_vars += get_job_env_vars( log_level=self.log_level, outputs_path=get_job_outputs_path( persistence_outputs=persistence_outputs, job_name=self.job_name), data_paths=get_data_paths(persistence_data), logs_path=get_job_logs_path(job_name=self.job_name), outputs_refs_jobs=outputs_refs_jobs, outputs_refs_experiments=outputs_refs_experiments) env_vars += [ get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME, value=json.dumps(self.labels)), ] env_vars += get_resources_env_vars(resources=resources) ports = [ client.V1ContainerPort(container_port=port) for port in self.ports ] return client.V1Container(name=self.job_container_name, image=self.job_docker_image, command=command, args=args, ports=ports or None, env=env_vars, resources=get_resources(resources), volume_mounts=volume_mounts)
def get_log_path(instance): return get_job_logs_path(instance.unique_name)
def get_log_path(instance): return get_job_logs_path(instance.unique_name)