def setUp(self): super().setUp() self.project = ProjectFactory(user=self.auth_client.user) self.job = self.factory_class(project=self.project) self.download_url = '/{}/{}/{}/jobs/{}/outputs/download'.format( API_V1, self.project.user.username, self.project.name, self.job.id) self.job_outputs_path = stores.get_job_outputs_path( persistence=self.job.persistence_outputs, job_name=self.job.unique_name) self.url = self.download_url
def get(self, request, *args, **kwargs): auditor.record(event_type=JOB_OUTPUTS_DOWNLOADED, instance=self.job, actor_id=self.request.user.id, actor_name=self.request.user.username) job_outputs_path = stores.get_job_outputs_path( persistence=self.job.persistence_outputs, job_name=self.job.unique_name) archived_path, archive_name = archive_outputs( outputs_path=job_outputs_path, namepath=self.job.unique_name, persistence_outputs=self.job.persistence_outputs) return self.redirect(path='{}/{}'.format(archived_path, archive_name))
def get_init_container(self, persistence_outputs): """Pod init container for setting outputs path.""" outputs_path = stores.get_job_outputs_path( persistence=persistence_outputs, job_name=self.job_name) _, outputs_volume_mount = get_pod_outputs_volume( persistence_outputs=persistence_outputs) return client.V1Container(name=self.init_container_name, image=self.init_docker_image, command=["/bin/sh", "-c"], args=to_list( get_output_args( command=InitCommands.CREATE, outputs_path=outputs_path)), volume_mounts=outputs_volume_mount)
def get_pod_container(self, volume_mounts, persistence_outputs, persistence_data, outputs_refs_jobs=None, outputs_refs_experiments=None, secret_refs=None, configmap_refs=None, env_vars=None, command=None, args=None, resources=None): """Pod job container for task.""" # Env vars preparation env_vars = to_list(env_vars, check_none=True) logs_path = stores.get_job_logs_path(job_name=self.job_name, temp=False) outputs_path = stores.get_job_outputs_path( persistence=persistence_outputs, job_name=self.job_name) env_vars += get_job_env_vars( log_level=self.log_level, persistence_outputs=persistence_outputs, outputs_path=outputs_path, persistence_data=persistence_data, logs_path=logs_path, outputs_refs_jobs=outputs_refs_jobs, outputs_refs_experiments=outputs_refs_experiments) env_vars += [ get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME, value=json.dumps(self.labels)), ] env_vars += get_resources_env_vars(resources=resources) # Env from configmap and secret refs env_from = get_pod_env_from(secret_refs=secret_refs, configmap_refs=configmap_refs) ports = [ client.V1ContainerPort(container_port=port) for port in self.ports ] return client.V1Container(name=self.job_container_name, image=self.job_docker_image, command=command, args=args, ports=ports or None, env=env_vars, env_from=env_from, resources=get_resources(resources), volume_mounts=volume_mounts)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/outputs/files'.format( API_V1, project.user.username, project.name, job.id) outputs_path = stores.get_job_outputs_path( persistence=job.persistence_outputs, job_name=job.unique_name) stores.create_job_outputs_path( persistence=job.persistence_outputs, job_name=job.unique_name) self.create_paths(path=outputs_path, url=self.url)
def get(self, request, *args, **kwargs): filepath = request.query_params.get('path') if not filepath: raise ValidationError('Files view expect a path to the file.') job_outputs_path = stores.get_job_outputs_path( persistence=self.job.persistence_outputs, job_name=self.job.unique_name) download_filepath = archive_outputs_file(persistence_outputs=self.job.persistence_outputs, outputs_path=job_outputs_path, namepath=self.job.unique_name, filepath=filepath) if not download_filepath: return Response(status=status.HTTP_404_NOT_FOUND, data='Outputs file not found: log_path={}'.format(download_filepath)) return stream_file(file_path=download_filepath, logger=_logger)
def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence_data): """Pod init container for setting outputs path.""" outputs_path = stores.get_job_outputs_path( persistence=persistence_outputs, job_name=self.job_name) _, outputs_volume_mount = get_pod_outputs_volume( persistence_outputs=persistence_outputs) init_command = init_command or ["/bin/sh", "-c"] init_args = init_args or to_list( get_output_args(command=InitCommands.CREATE, outputs_path=outputs_path)) return client.V1Container( name=self.init_container_name, image=self.init_docker_image, image_pull_policy=self.init_docker_image_pull_policy, command=init_command, args=init_args, volume_mounts=outputs_volume_mount)
def get(self, request, *args, **kwargs): try: store_manager = stores.get_outputs_store( persistence_outputs=self.job.persistence_outputs) except (PolyaxonStoresException, VolumeNotFoundError) as e: raise ValidationError(e) job_outputs_path = stores.get_job_outputs_path( persistence=self.job.persistence_outputs, job_name=self.job.unique_name) if request.query_params.get('path'): job_outputs_path = os.path.join(job_outputs_path, request.query_params.get('path')) try: data = store_manager.ls(job_outputs_path) except VolumeNotFoundError: raise ValidationError('Store manager could not load the volume requested,' ' to get the outputs data.') except Exception: raise ValidationError('Experiment outputs path does not exists or bad configuration.') return Response(data=data, status=200)
def get_jobs_outputs_spec(self): import stores if not self.jobs.count(): return None annotation = { 'persistence_outputs': KeyTransform('outputs', 'persistence') } query = self.jobs.annotate(**annotation) job_data = query.values_list('id', 'project__user__username', 'project__name', 'persistence_outputs') outputs_spec_data = {} for data in job_data: project_name = PROJECT_UNIQUE_NAME_FORMAT.format(user=data[1], project=data[2]) job_name = JOB_UNIQUE_NAME_FORMAT.format(project_name=project_name, id=data[0]) outputs_path = stores.get_job_outputs_path(persistence=data[3], job_name=job_name) outputs_spec_data[data[0]] = OutputsRefsSpec(path=outputs_path, persistence=data[3]) return outputs_spec_data
def get(self, request, *args, **kwargs): filepath = request.query_params.get('path') if not filepath: raise ValidationError('Files view expect a path to the file.') job_outputs_path = stores.get_job_outputs_path( persistence=self.job.persistence_outputs, job_name=self.job.unique_name) download_filepath = archive_outputs_file( persistence_outputs=self.job.persistence_outputs, outputs_path=job_outputs_path, namepath=self.job.unique_name, filepath=filepath) if not download_filepath: return Response(status=status.HTTP_404_NOT_FOUND, data='Log file not found: log_path={}'.format( download_filepath)) filename = os.path.basename(download_filepath) chunk_size = 8192 try: wrapped_file = FileWrapper(open(download_filepath, 'rb'), chunk_size) response = StreamingHttpResponse( wrapped_file, content_type=mimetypes.guess_type(download_filepath)[0]) response['Content-Length'] = os.path.getsize(download_filepath) response['Content-Disposition'] = "attachment; filename={}".format( filename) return response except FileNotFoundError: _logger.warning('Log file not found: log_path=%s', download_filepath) return Response(status=status.HTTP_404_NOT_FOUND, data='Log file not found: log_path={}'.format( download_filepath))
def _get_outputs_path(self, persistence_outputs): return stores.get_job_outputs_path(persistence=persistence_outputs, job_name=self.job_name)