Exemplo n.º 1
0
    def do_upload(imgchunk, cloudpath):
        encoded = chunks.encode(imgchunk, meta.encoding(mip),
                                meta.compressed_segmentation_block_size(mip))

        remote_compress = should_compress(meta.encoding(mip), compress, cache)
        cache_compress = should_compress(meta.encoding(mip),
                                         compress,
                                         cache,
                                         iscache=True)
        remote_compress = compression.normalize_encoding(remote_compress)
        cache_compress = compression.normalize_encoding(cache_compress)

        encoded = compression.compress(encoded, remote_compress)
        cache_encoded = encoded
        if remote_compress != cache_compress:
            cache_encoded = compression.compress(encoded, cache_compress)

        remote.put(
            path=cloudpath,
            content=encoded,
            content_type=content_type(meta.encoding(mip)),
            compress=remote_compress,
            compression_level=compress_level,
            cache_control=cdn_cache_control(cdn_cache),
            raw=True,
        )

        if cache.enabled:
            local.put(
                path=cloudpath,
                content=cache_encoded,
                content_type=content_type(meta.encoding(mip)),
                compress=cache_compress,
                raw=True,
            )
Exemplo n.º 2
0
def test_transcode(dest_encoding):
    from cloudfiles import CloudFiles, compression
    base_text = b'hello world'
    encodings = [None, "gzip", "br", "zstd"]

    varied_texts = []

    ans = compression.compress(base_text, dest_encoding)

    for i in range(200):
        src_encoding = encodings[i % len(encodings)]
        varied_texts.append({
            "path":
            str(i),
            "content":
            compression.compress(base_text, src_encoding),
            "raw":
            src_encoding is not None,
            "compress":
            src_encoding,
        })

    transcoded = (x['content']
                  for x in compression.transcode(varied_texts, dest_encoding))
    for content in transcoded:
        assert content == ans
Exemplo n.º 3
0
def test_size(s3, protocol, compress, green):
  from cloudfiles import CloudFiles, exceptions, compression

  url = compute_url(protocol, 'size')
  cf = CloudFiles(url)
  
  content = b'some_string'
  cf.put('info', content, compress=compress, cache_control='no-cache')
  cf['info2'] = content
  cf.put('zero', b'', compress=None, cache_control='no-cache')

  compressed_content = compression.compress(content, compress)

  assert cf.size('info') == len(compressed_content)
  assert cf.size(['info', 'info2']) == { 
    "info": len(compressed_content), 
    "info2": len(content) 
  }
  assert cf.size('nonexistent') is None
  assert cf.size('zero') == 0

  cf.delete(['info', 'info2', 'zero'])