Example #1
0
 def _make_tempfile(self, filename, content):
     fileobj = TemporaryUploadedFile(
         name=filename + ".tempfile",
         content_type='text/plain',
         size=0,
         charset='utf8',
     )
     fileobj.write(content)
     fileobj.flush()
     return fileobj
Example #2
0
 def _make_tempfile(self, filename, content):
     fileobj = TemporaryUploadedFile(
         name=filename + ".tempfile",
         content_type='text/plain',
         size=0,
         charset='utf8',
     )
     fileobj.write(content)
     fileobj.flush()
     return fileobj
Example #3
0
def upload_file(url, timeout=5):
    """
        Загрузка файла по урлу во временный файл.

        Пример:
            from libs.upload import *
            ...

            try:
                uploaded_file = upload_file('http://host.ru/image.jpg')
            except URLError as e:
                return JsonResponse({
                    'message': str(e.msg),
                }, status=e.code)

            request.user.avatar.save(uploaded_file.name, uploaded_file, save=False)
            uploaded_file.close()

            try:
                request.user.full_clean()
            except ValidationError as e:
                request.user.avatar.delete(save=False)
                return JsonResponse({
                    'message': ', '.join(e.messages),
                }, status=400)
            else:
                request.user.save()
    """
    logger.debug('Uploading %s...', url)
    with contextlib.closing(urlopen(url, timeout=timeout)) as fp:
        headers = fp.info()

        file_name = url.split('/')[-1]
        content_type = headers.get('content-type')
        file_size = headers.get('content-length')
        charset = 'utf-8'

        tmp = TemporaryUploadedFile(file_name, content_type, file_size,
                                    charset, {})

        while True:
            block = fp.read(8 * 1024)
            if not block:
                break
            tmp.write(block)

        logger.debug('Uploaded %s to file %s', url, tmp.file.name)

    tmp.seek(0)
    tmp.flush()
    return tmp
Example #4
0
    def write(self):
        self.name = uuid.uuid4().hex
        img_tmp = TemporaryUploadedFile(self.name, self.mimetype, self.size,
                                        None)
        img_tmp.write(self._data)
        img_tmp.flush()

        # use temporary file size if there is no content-length in response header
        # The file size is validated at converter
        if self.size == 0:
            img_tmp.size = os.path.getsize(img_tmp.temporary_file_path())

        self._data = None
        return img_tmp
Example #5
0
    def test_file_context(self):
        response = None

        with open("threats/test_data/boss.gif", "rb") as boss_reader:
            file_data = boss_reader.read()
            boss_reader.seek(0)
            response = self.client.post('/', {
                'artifact': '{"type": "file.content"}',
                'file': boss_reader
            },
                                        format="multipart")

        upload_file_args = {
            "name": "boss.gif",
            "content_type": "application/octet-stream",
            "size": 29927,
            "charset": None,
        }

        self.assertEqual(response.status_code, 200)

        temp_file = TemporaryUploadedFile(**upload_file_args)
        temp_file.write(file_data)
        temp_file.flush()

        context = SearchContext({"type": "file.content", "value": temp_file})
        context.save()

        self.assertEqual(context.file_data_len, len(file_data))

        loaded_context = SearchContext.load(context.id)
        self.assertEqual(loaded_context.base64_file_data_len,
                         context.base64_file_data_len)
        self.assertEqual(loaded_context.file_data_len, context.file_data_len)

        with open(loaded_context.value.temporary_file_path(),
                  "rb") as temp_file:
            loaded_file_data = temp_file.read()

        for counter in range(0, len(loaded_file_data) // 100):
            begin = counter * 100
            end = begin + 100
            self.assertEqual(file_data[begin:end], loaded_file_data[begin:end])

        self.assertEqual(len(file_data), len(loaded_file_data))
    def __setstate__(self, pickle_dictionary):
        if pickle_dictionary["_context"]["type"] == "file.content" and \
                isinstance(pickle_dictionary["_context"]["value"], dict):
            arguments = pickle_dictionary["_context"]["value"]

            # File data info, especially useful for test pickling
            self.base64_file_data_len = len(arguments["content"])
            file_content = base64.b64decode(arguments.pop("content"))

            # File data info, especially useful for test pickling
            self.file_data_len = len(file_content)

            file_object = TemporaryUploadedFile(**arguments)
            file_object.write(file_content)
            file_object.flush()
            pickle_dictionary["_context"]["value"] = file_object

        self.__dict__.update(pickle_dictionary)
 def ensure_saved(self, file):
     """This may create a temporary file, which will be deleted when
     it's closed, so always close() it but only when you've finished!"""
     
     if isinstance(file, InMemoryUploadedFile):
         print "Writing %s to disk (%d bytes)" % (file, file.size)
         tmp = TemporaryUploadedFile(name=file.name,
             content_type=file.content_type, size=file.size,
             charset=file.charset)
         file.seek(0)
         buf = file.read()
         tmp.write(buf)
         print "Wrote %d bytes" % len(buf)
         tmp.flush()
     else:
         tmp = file
         
     if isinstance(tmp, TemporaryUploadedFile):
         path = tmp.temporary_file_path()
     else:
         path = tmp.name
     
     return (tmp, path)
Example #8
0
def upload_chunked_file(request, param_name, allow_memory=True):
    """
        Загрузчик файлов, переданных от форм.
        Поддерживает передачу файла по частям.

        Возвращает обертку над файлом (возможно в памяти), удаляющимся после закрытия.

        Если allow_memory = False, то мелкие файлы, которые Django сохраняет в память,
        будут принудительно сохранены во временные файлы на диске.

        Пример:
            from libs.upload import upload_chunked_file, NotLastChunk, TemporaryFileNotFoundError
            ...

            try:
                uploaded_file = upload_chunked_file(request, 'image')
            except TemporaryFileNotFoundError as e:
                return JsonResponse({
                    'message': str(e),
                }, status=400)
            except NotLastChunk:
                return HttpResponse()

            request.user.avatar.save(uploaded_file.name, uploaded_file, save=False)
            uploaded_file.close()

            try:
                request.user.avatar.field.clean(request.user.avatar, request.user)
            except ValidationError as e:
                request.user.avatar.delete(save=False)
                return JsonResponse({
                    'message': ', '.join(e.messages),
                }, status=400)

            request.user.avatar.clean()
            request.user.avatar.save()

    """
    file = request.FILES[param_name]

    chunk_num = int(request.POST.get('chunk', 0))
    chunk_count = int(request.POST.get('chunks', 1))

    if chunk_count == 1:
        # файл одним куском
        if not isinstance(file, InMemoryUploadedFile):
            return file
        elif allow_memory:
            return file
        else:
            # принудительное сохранение в файл
            tmp = TemporaryUploadedFile(file.name, file.content_type,
                                        file.size, file.charset,
                                        file.content_type_extra)
            for chunk in file.chunks():
                tmp.write(chunk)
            tmp.seek(0)
            tmp.flush()
            return tmp
    else:
        # pluploader отправляет имя "blob"
        file.name = os.path.basename(request.POST.get('name', file.name))

        # генерируем имя, которое можно восстановить при получении
        # следующих чанков
        name, ext = os.path.splitext(file.name)
        hashname = '%s.%s' % (request.META.get('REMOTE_ADDR'), name)
        hashname = hashlib.md5(hashname.encode()).hexdigest()
        tempfile_name = '%s.upload%s' % (hashname, ext)
        tempfile_path = os.path.join(tempfile.gettempdir(), tempfile_name)

        if chunk_num > 0:
            if not os.path.exists(tempfile_path):
                raise TemporaryFileNotFoundError(_('Temporary file lost'))

        tmp = open(tempfile_path, 'ab+')
        if chunk_num == 0:
            tmp.seek(0)
            tmp.truncate()
        for chunk in file.chunks():
            tmp.write(chunk)

        if chunk_num < chunk_count - 1:
            tmp.close()
            raise NotLastChunk(chunk_num + 1, chunk_count)

        tmp.seek(0)
        tmp.flush()

        file_info = os.stat(tempfile_path)
        return TempUploadedFile(tmp,
                                name=file.name,
                                content_type=file.content_type,
                                size=file_info.st_size,
                                charset=file.charset,
                                content_type_extra=file.content_type_extra)
Example #9
0
    def _get_documents_from_api(self) -> List[Tuple[str, BytesIO]]:
        """Retrieve the documents and their content from the Documenten API."""

        logger.debug("Retrieving documents from Documenten API")

        variables = self.task.get_variables()
        document_urls = check_variable(variables, "documents")

        client_pool = DRCClientPool(variables)
        client_pool.populate_clients(self.task, document_urls)

        documents = []

        current_total_documents_size = 0
        for document_url in document_urls:
            # Getting the appropriate client
            document_client = client_pool.get_client_for(document_url)
            # Retrieving the document
            document_data = document_client.retrieve(
                resource="enkelvoudiginformatieobject",
                url=document_url,
            )

            # Retrieving the content of the document
            # Need use requests directly instead of `document_client.request()` since the response is not in JSON format
            response = requests.get(
                document_data["inhoud"],
                headers=document_client.auth_header,
                stream=True,
            )

            # Get the document size in bytes
            document_size = document_data["bestandsomvang"]

            # If the size of the document is above the max size or if all the documents together have already reached
            # the maximum size, write the file content to a temporary file
            if (document_size > settings.MAX_DOCUMENT_SIZE
                    or (current_total_documents_size + document_size) >
                    settings.MAX_TOTAL_DOCUMENT_SIZE):
                # The file is created with rb+ mode by default
                tmp_file_object = TemporaryUploadedFile(
                    name=
                    f"{document_data['titel']}-{get_random_string(length=5)}.tempfile",
                    content_type="application/octet-stream",
                    size=document_size,
                    charset=
                    None,  # Required argument in TemporaryUploadedFile, but not in parent class UploadedFile
                )
                for chunk in response.iter_content(
                        chunk_size=settings.CHUNK_SIZE):
                    tmp_file_object.write(chunk)
                tmp_file_object.flush()
                doc_tuple = (document_data["titel"], tmp_file_object)
            else:
                doc_tuple = (document_data["titel"], BytesIO(response.content))
                current_total_documents_size += document_size

            response.close()

            documents.append(doc_tuple)

        return documents