Esempio n. 1
0
 def test_iterator(self):
     start = pdata = Pdata('blob')
     for i in range(0, 5):
         pdata.next = Pdata('bla')
         pdata = pdata.next
     iterator = PdataStreamIterator(start, size=19, streamsize=6)
     expected = ['blobbl', 'ablabl', 'ablabl', 'a']
     self.assertEquals(list(iterator), expected)
Esempio n. 2
0
 def _makeFile(self, id='test', title='', file='', chunks=()):
     from OFS.Image import File
     from OFS.Image import Pdata
     fileobj = File(id, title, file)
     jar = fileobj._p_jar = DummyConnection()
     if chunks:
         chunks = list(chunks)
         chunks.reverse()
         head = None
         for chunk in chunks:  # build Pdata chain
             pdata = Pdata(chunk)
             pdata.next = head
             head = pdata
         fileobj.data = head
     return fileobj
Esempio n. 3
0
    def _getConversionFromProxyServer(self, format):
        """
      Communicates with server to convert a file
    """
        if not self.hasBaseData():
            # XXX please pass a meaningful description of error as argument
            raise NotConvertedError()
        if format == 'text-content':
            # Extract text from the ODF file
            cs = cStringIO.StringIO()
            cs.write(str(self.getBaseData()))
            z = zipfile.ZipFile(cs)
            s = z.read('content.xml')
            s = self.rx_strip.sub(" ", s)  # strip xml
            s = self.rx_compr.sub(" ", s)  # compress multiple spaces
            cs.close()
            z.close()
            return 'text/plain', s
        server_proxy = OOoServerProxy(self)
        orig_format = self.getBaseContentType()
        generate_result = server_proxy.run_generate(
            self.getId(), enc(str(self.getBaseData())), None, format,
            orig_format)
        try:
            response_code, response_dict, response_message = generate_result
        except ValueError:
            # This is for backward compatibility with older oood version returning
            # only response_dict
            response_dict = generate_result

        # XXX: handle possible OOOd server failure
        return response_dict['mime'], Pdata(dec(response_dict['data']))
Esempio n. 4
0
    def test_exif_upload(self):
        atct = self._ATCT
        atct._image_exif = None

        # string upload
        atct.setImage(TEST_JPEG)
        self.assertTrue(len(atct.getEXIF()), atct.getEXIF())
        atct._image_exif = None

        # file upload
        atct.setImage(TEST_JPEG_FILE)
        self.assertTrue(len(atct.getEXIF()), atct.getEXIF())
        atct._image_exif = None

        # Pdata upload
        from OFS.Image import Pdata
        pd = Pdata(TEST_JPEG)
        atct.setImage(pd)
        self.assertTrue(len(atct.getEXIF()), atct.getEXIF())
        atct._image_exif = None

        # ofs image upload
        ofs = atct.getImage()
        atct.setImage(ofs)
        self.assertTrue(len(atct.getEXIF()), atct.getEXIF())
        atct._image_exif = None
    def test_readDataFile_Pdata(self):

        from OFS.Image import Pdata

        SNAPSHOT_ID = 'readDataFile_Pdata'
        FILENAME = 'pdata.txt'

        site, tool, ctx = self._makeOne(SNAPSHOT_ID)
        self._makeFile(tool, SNAPSHOT_ID, FILENAME, Pdata(printable_bytes))

        self.assertEqual(ctx.readDataFile(FILENAME), printable_bytes)
Esempio n. 6
0
    def afterSetUp(self):
        self.tool = getattr(self.portal, JSTOOLNAME)
        self.tool.clearResources()
        self.toolpath = '/' + self.tool.absolute_url(1)

        self.setRoles(['Manager'])
        self.skinstool = getattr(self.portal, 'portal_skins')
        self.skinstool.custom.manage_addFile(id='hello_world.js',
                                   content_type='application/javascript',
                                   file=Pdata('alert("Hello world!");'))
        self.tool.registerScript('hello_world.js')
        self.setRoles(['Member'])
Esempio n. 7
0
 def _populateConversionCacheWithHTML(self, zip_file=None):
     """
 Extract content from the ODF zip file and populate the document.
 Optional parameter zip_file prevents from converting content twice.
 """
     if zip_file is None:
         format_list = [
             x for x in self.getTargetFormatList()
             if x.startswith('html') or x.endswith('html')
         ]
         format = format_list[0]
         mime, data = self._getConversionFromProxyServer(format)
         archive_file = cStringIO.StringIO()
         archive_file.write(str(data))
         zip_file = zipfile.ZipFile(archive_file)
         must_close = 1
     else:
         must_close = 0
     for f in zip_file.infolist():
         filename = f.filename
         document = self.get(filename, None)
         if document is not None:
             self.manage_delObjects(
                 [filename])  # For compatibility with old implementation
         if filename.endswith('html'):
             mime = 'text/html'
             # call portal_transforms to strip HTML in safe mode
             portal = self.getPortalObject()
             transform_tool = getToolByName(portal, 'portal_transforms')
             data = transform_tool.convertToData('text/x-html-safe',
                                                 zip_file.read(filename),
                                                 object=self,
                                                 context=self,
                                                 mimetype=mime)
         else:
             mime = guess_content_type(filename)[0]
             data = Pdata(zip_file.read(filename))
         self.setConversion(data,
                            mime=mime,
                            format=EMBEDDED_FORMAT,
                            filename=filename)
     if must_close:
         zip_file.close()
         archive_file.close()
Esempio n. 8
0
  def _read_data(self, persistent_object, value):
    """Copied from OFS.Image._read_data
    with some modernisation.

    Returns always a Pdata and its size

      - persistent_object: Object known by storage to access it.
      - value: value to wrapp into Pdata
    """

    n = self._max_len

    if isinstance(value, (str, unicode)):
        if isinstance(value, unicode):
            value = value.encode('utf-8')
        size=len(value)
        if size < n:
          return Pdata(value), size
        # Big string: cut it into smaller chunks
        value = StringIO(value)

    if isinstance(value, FileUpload) and not value:
        raise ValueError, 'File not specified'

    if isinstance(value, Pdata):
        size = self._read_size_from_pdata(value)
        return value, size

    # Clear md5sum to force refreshing
    self.md5sum = None

    seek=value.seek
    read=value.read

    seek(0,2)
    size=end=value.tell()

    if size <= 2*n:
        seek(0)
        return Pdata(read(size)), size

    # Make sure we have an _p_jar, even if we are a new object, by
    # doing a sub-transaction commit.
    transaction.savepoint(optimistic=True)

    if persistent_object._p_jar is None:
        # Ugh
        seek(0)
        return Pdata(read(size)), size

    # Now we're going to build a linked list from back
    # to front to minimize the number of database updates
    # and to allow us to get things out of memory as soon as
    # possible.
    next = None
    while end > 0:
        pos = end-n
        if pos < n:
            pos = 0 # we always want at least n bytes
        seek(pos)

        # Create the object and assign it a next pointer
        # in the same transaction, so that there is only
        # a single database update for it.
        data = Pdata(read(end-pos))
        persistent_object._p_jar.add(data)
        data.next = next

        # Save the object so that we can release its memory.
        transaction.savepoint(optimistic=True)
        data._p_deactivate()
        # The object should be assigned an oid and be a ghost.
        assert data._p_oid is not None
        assert data._p_state == -1

        next = data
        end = pos

    return next, size
Esempio n. 9
0
    def _read_data(self, persistent_object, value):
        """Copied from OFS.Image._read_data
    with some modernisation.

    Returns always a Pdata and its size

      - persistent_object: Object known by storage to access it.
      - value: value to wrapp into Pdata
    """

        n = self._max_len

        if isinstance(value, (str, unicode)):
            if isinstance(value, unicode):
                value = value.encode('utf-8')
            size = len(value)
            if size < n:
                return Pdata(value), size
            # Big string: cut it into smaller chunks
            value = StringIO(value)

        if isinstance(value, FileUpload) and not value:
            raise ValueError('File not specified')

        if isinstance(value, Pdata):
            size = self._read_size_from_pdata(value)
            return value, size

        # Clear md5sum to force refreshing
        self.md5sum = None

        seek = value.seek
        read = value.read

        seek(0, 2)
        size = end = value.tell()

        if size <= 2 * n:
            seek(0)
            return Pdata(read(size)), size

        # Make sure we have an _p_jar, even if we are a new object, by
        # doing a sub-transaction commit.
        transaction.savepoint(optimistic=True)

        if persistent_object._p_jar is None:
            # Ugh
            seek(0)
            return Pdata(read(size)), size

        # Now we're going to build a linked list from back
        # to front to minimize the number of database updates
        # and to allow us to get things out of memory as soon as
        # possible.
        next_ = None
        while end > 0:
            pos = end - n
            if pos < n:
                pos = 0  # we always want at least n bytes
            seek(pos)

            # Create the object and assign it a next pointer
            # in the same transaction, so that there is only
            # a single database update for it.
            data = Pdata(read(end - pos))
            persistent_object._p_jar.add(data)
            data.next = next_

            # Save the object so that we can release its memory.
            transaction.savepoint(optimistic=True)
            data._p_deactivate()
            # The object should be assigned an oid and be a ghost.
            assert data._p_oid is not None
            assert data._p_state == -1

            next_ = data
            end = pos

        return next_, size
Esempio n. 10
0
 def test_pdata_storable(self):
     pdata = Pdata(getFile('image.gif').read())
     fi = NamedBlobImage(pdata, filename=u'image.gif')
     self.assertEqual(303, fi.getSize())
Esempio n. 11
0
def _read_data(self, file):
    import transaction

    n=1 << 16

    # Make sure we have an _p_jar, even if we are a new object, by
    # doing a sub-transaction commit.
    transaction.savepoint(optimistic=True)

    if isinstance(file, str):
        size=len(file)
        if size<n or IBlobStorage.providedBy(self._p_jar.db().storage):
            #for blobs we don't have to cut anything up or if the size<n
            return file,size
        # Big string: cut it into smaller chunks
        file = StringIO(file)

    if isinstance(file, FileUpload) and not file:
        raise ValueError, 'File not specified'

    if hasattr(file, '__class__') and file.__class__ is Pdata:
        size=len(file)
        return file, size

    seek=file.seek
    read=file.read

    seek(0,2)
    size=end=file.tell()

    if IBlobStorage.providedBy(self._p_jar.db().storage):
        seek(0)
        return read(size), size

    if size <= 2*n:
        seek(0)
        if size < n: return read(size), size
        return Pdata(read(size)), size



    if self._p_jar is None:
        # Ugh
        seek(0)
        return Pdata(read(size)), size

    # Now we're going to build a linked list from back
    # to front to minimize the number of database updates
    # and to allow us to get things out of memory as soon as
    # possible.
    next = None
    while end > 0:
        pos = end-n
        if pos < n:
            pos = 0 # we always want at least n bytes
        seek(pos)

        # Create the object and assign it a next pointer
        # in the same transaction, so that there is only
        # a single database update for it.
        data = Pdata(read(end-pos))
        self._p_jar.add(data)
        data.next = next

        # Save the object so that we can release its memory.
        transaction.savepoint(optimistic=True)
        data._p_deactivate()
        # The object should be assigned an oid and be a ghost.
        assert data._p_oid is not None
        assert data._p_state == -1

        next = data
        end = pos

    return next, size