def generate(self):
        """Generate and save the code for this snippet bundle."""
        template = 'base/fetch_snippets.jinja'
        if self.client.startpage_version == 5:
            template = 'base/fetch_snippets_as.jinja'
        bundle_content = render_to_string(template, {
            'snippet_ids': [snippet.id for snippet in self.snippets],
            'snippets_json': json.dumps([s.to_dict() for s in self.snippets]),
            'client': self.client,
            'locale': self.client.locale,
            'settings': settings,
            'current_firefox_major_version': util.current_firefox_major_version(),
        })

        if isinstance(bundle_content, str):
            bundle_content = bundle_content.encode('utf-8')

        if (settings.BUNDLE_BROTLI_COMPRESS and self.client.startpage_version >= 5):
            content_file = ContentFile(brotli.compress(bundle_content))
            content_file.content_encoding = 'br'
        else:
            content_file = ContentFile(bundle_content)

        default_storage.save(self.filename, content_file)
        cache.set(self.cache_key, True, ONE_DAY)
Beispiel #2
0
def brotli_compress(infile, level, decompress):
    """
    @param infile
    @param level
    @param decompress
    @return CompressionData

    Compresses one file using the brotli algorithm by google.

    """

    original_size = int(os.stat(infile).st_size)
    with open(infile, "rU") as fdorig:
        origlines = fdorig.read()
    origtext = memoryview(bytearray(origlines, "utf8"))
    # compressedtext = memoryview(zlib.compress(origtext.tobytes(), int(level)))
    compressedtext = memoryview(brotli.compress(origtext.tobytes(), quality=int(level)))
    compressed_size = len(compressedtext)

    decompress_time = None
    if decompress:
        decompress_time = min(timeit.repeat(lambda: zlib.decompress(compressedtext.tobytes()),
                                            number=10,
                                            repeat=3, timer=time.clock))

    cd = CompressionData(original_size, compressed_size, decompress_time)

    return cd
Beispiel #3
0
    def process_response(self, request, response):
        if (response.streaming or
                response.has_header('Content-Encoding') or
                not self._accepts_brotli_encoding(request) or
                len(response.content) < self.MIN_LEN_RESPONSE_TO_PROCESS):
            # ---------
            # 1) Skip streaming content, GZipMiddleware will compress it
            #    (supported, see https://github.com/google/brotli/issues/191).
            # 2) Skip if the content is already encoded.
            # 3) Skip if client didn't request brotli.
            # 4) Skip if the content is short, compressing isn't worth it
            #    (same logic as Django's GZipMiddleware).
            # ---------
            return response

        compressed_content = brotli.compress(response.content, quality=5)

        # Return the uncompressed content if compression didn't help
        if len(compressed_content) >= len(response.content):
            return response

        response.content = compressed_content
        patch_vary_headers(response, ('Accept-Encoding',))
        response['Content-Length'] = str(len(compressed_content))
        response['Content-Encoding'] = 'br'
        return response
Beispiel #4
0
	def setUpClass(cls):
		assert os.path.exists(METADATA)
		with open(METADATA, 'rb') as f:
			cls.xml_metadata = f.read()
		cls.compressed_metadata = brotli.compress(cls.xml_metadata, mode=brotli.MODE_TEXT)
		# make random byte strings; font data must be 4-byte aligned
		cls.fontdata = bytes(bytearray(random.sample(range(0, 256), 80)))
		cls.privData = bytes(bytearray(random.sample(range(0, 256), 20)))
Beispiel #5
0
 def test_http_payload_brotli(self, stream):
     compressed = brotli.compress(b'brotli data')
     out = aiohttp.FlowControlDataQueue(stream)
     p = HttpPayloadParser(
         out, length=len(compressed), compression='br')
     p.feed_data(compressed)
     assert b'brotli data' == b''.join(d for d, _ in out._buffer)
     assert out.is_eof()
def test_roundtrip_compression_with_files(simple_compressed_file):
    """
    Roundtripping data through the compressor works correctly.
    """
    with open(simple_compressed_file[0], 'rb') as f:
        uncompressed_data = f.read()

    assert brotli.decompress(
        brotli.compress(uncompressed_data)
    ) == uncompressed_data
Beispiel #7
0
    def handle(self):
        # self.request is the TCP socket connected to the client
        self.data = self.request.recv(1024).strip()
        print "{} wrote:".format(self.client_address[0])
        print self.data
        self.data = """HTTP/1.1 200 ok
                    Server: testbrotli
                    Date: Wed, 23 Sep 2015 05:52:16 GMT
                    Content-Type: text/plain
                    Connection: keep-alive
                    Content-Encoding: brotli

                    """
        self.data += brotli.compress("AAAAAAAA")
        self.request.sendall(self.data)
Beispiel #8
0
    def test_brotli_not_accepted_auto_decode(self):
        import brotli
        content = brotli.compress('ABCDEFG'.encode('utf-8'))

        headers = {'Content-Type': 'application/octet-stream',
                   'Content-Encoding': 'br',
                   'Content-Length': str(len(content))
                  }

        headers, gen, is_rw = self.rewrite_record(headers, content, ts='201701mp_')

        assert 'Content-Encoding' not in headers
        assert 'Content-Length' not in headers
        assert headers['X-Archive-Orig-Content-Encoding'] == 'br'

        assert b''.join(gen).decode('utf-8') == 'ABCDEFG'
Beispiel #9
0
    def test_brotli_accepted_no_change(self):
        import brotli
        content = brotli.compress('ABCDEFG'.encode('utf-8'))

        headers = {'Content-Type': 'application/octet-stream',
                   'Content-Encoding': 'br',
                   'Content-Length': str(len(content))
                  }

        headers, gen, is_rw = self.rewrite_record(headers, content, ts='201701mp_',
                                                  environ={'HTTP_ACCEPT_ENCODING': 'gzip, deflate, br'})

        assert headers['Content-Encoding'] == 'br'
        assert headers['Content-Length'] == str(len(content))

        assert brotli.decompress(b''.join(gen)).decode('utf-8') == 'ABCDEFG'
Beispiel #10
0
	def close(self):
		""" All tags must have been specified. Now write the table data and directory.
		"""
		if len(self.tables) != self.numTables:
			raise TTLibError("wrong number of tables; expected %d, found %d" % (self.numTables, len(self.tables)))

		if self.sfntVersion in ("\x00\x01\x00\x00", "true"):
			isTrueType = True
		elif self.sfntVersion == "OTTO":
			isTrueType = False
		else:
			raise TTLibError("Not a TrueType or OpenType font (bad sfntVersion)")

		# The WOFF2 spec no longer requires the glyph offsets to be 4-byte aligned.
		# However, the reference WOFF2 implementation still fails to reconstruct
		# 'unpadded' glyf tables, therefore we need to 'normalise' them.
		# See:
		# https://github.com/khaledhosny/ots/issues/60
		# https://github.com/google/woff2/issues/15
		if isTrueType:
			self._normaliseGlyfAndLoca(padding=4)
		self._setHeadTransformFlag()

		# To pass the legacy OpenType Sanitiser currently included in browsers,
		# we must sort the table directory and data alphabetically by tag.
		# See:
		# https://github.com/google/woff2/pull/3
		# https://lists.w3.org/Archives/Public/public-webfonts-wg/2015Mar/0000.html
		# TODO(user): remove to match spec once browsers are on newer OTS
		self.tables = OrderedDict(sorted(self.tables.items()))

		self.totalSfntSize = self._calcSFNTChecksumsLengthsAndOffsets()

		fontData = self._transformTables()
		compressedFont = brotli.compress(fontData, mode=brotli.MODE_FONT)

		self.totalCompressedSize = len(compressedFont)
		self.length = self._calcTotalSize()
		self.majorVersion, self.minorVersion = self._getVersion()
		self.reserved = 0

		directory = self._packTableDirectory()
		self.file.seek(0)
		self.file.write(pad(directory + compressedFont, size=4))
		self._writeFlavorData()
Beispiel #11
0
def brotli(f, *args, **kwargs):
    """Brotli Flask Response Decorator"""

    data = f(*args, **kwargs)

    if isinstance(data, Response):
        content = data.data
    else:
        content = data

    deflated_data = _brotli.compress(content)

    if isinstance(data, Response):
        data.data = deflated_data
        data.headers['Content-Encoding'] = 'br'
        data.headers['Content-Length'] = str(len(data.data))

        return data

    return deflated_data
Beispiel #12
0
def getSFNTData(pathOrFile, unsortGlyfLoca=False, glyphBBox="", alt255UInt16=False):
    font = TTFont(pathOrFile)
    tableChecksums = {}
    tableData = {}
    tableOrder = [i for i in sorted(font.keys()) if len(i) == 4]
    if unsortGlyfLoca:
        assert "loca" in tableOrder
        loca = tableOrder.index("loca")
        glyf = tableOrder.index("glyf")
        tableOrder.insert(glyf, tableOrder.pop(loca))
    for tag in tableOrder:
        tableChecksums[tag] = font.reader.tables[tag].checkSum
        tableData[tag] = transformTable(font, tag, glyphBBox=glyphBBox, alt255UInt16=alt255UInt16)
    totalData = "".join([tableData[tag][1] for tag in tableOrder])
    compData = brotli.compress(totalData, brotli.MODE_FONT)
    if len(compData) >= len(totalData):
        compData = totalData
    font.close()
    del font
    return tableData, compData, tableOrder, tableChecksums
Beispiel #13
0
def compress(data, level=9, method='gz'):
  """
  Compress *data* with the specified *method*. It can be ``'zip'``,
  ``'gz'`` or ``'bz'``. If the :mod:`brotli` module is installed,
  ``'brt'`` is also an accepted method.

  :raise ValueError: If an invalid *method* was supplied.
  """

  if method == 'gz':
    return gzip.compress(data, level)
  elif method == 'zip':
    return zlib.compress(data, level)
  elif method == 'bz':
    return bz2.compress(data, level)
  elif method == 'brt':
    if not brotli:
      raise ImportError('brotli')
    return brotli.compress(data)
  else:
    raise ValueError('invalid method: {0!r}'.format(method))
Beispiel #14
0
def main():
    """Testing 'brotlipy' package"""
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-v",
        "--verbosity",
        dest="verbosity",
        action="count",
        default=0,
        help="set verbosity level",
    )
    args = parser.parse_args()

    if args.verbosity == 1:
        logging.basicConfig(level=logging.INFO)
    elif args.verbosity > 1:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.ERROR)

    data = brotli.compress(b"test")
    data = brotli.decompress(data)
Beispiel #15
0
 def post_build(self, pkt, pay):
     if not conf.contribs["http"]["auto_compression"]:
         return pkt + pay
     encodings = self._get_encodings()
     # Compress
     if "deflate" in encodings:
         import zlib
         pay = zlib.compress(pay)
     elif "gzip" in encodings:
         pay = gzip_compress(pay)
     elif "compress" in encodings:
         import lzw
         pay = lzw.compress(pay)
     elif "br" in encodings:
         if _is_brotli_available:
             pay = brotli.compress(pay)
         else:
             log_loading.info(
                 "Can't import brotli. brotli compression will "
                 "be ignored !"
             )
     return pkt + pay
Beispiel #16
0
    async def get_many(self, request):
        service = self.di()
        page = request.query.getone('page', None)
        elements = request.query.getone('elements', None)

        url = str(request.url)

        cached = await redis_pool.get(url)

        if cached:
            decompressed_bytes = brotli.decompress(cached)
            response = web.Response(body=decompressed_bytes.decode('utf-8'),
                                    content_type='application/json')
            response.enable_compression()
            return response
        data = json.dumps({'data': await service.all(page, elements)})

        compressed_data = brotli.compress(data.encode('utf-8'))
        # await redis_pool.set(url, codecs.encode(data.encode('utf-8'), 'gzip'), expire=86400)
        await redis_pool.set(url, compressed_data, expire=86400)
        response = web.Response(body=data, content_type='application/json')
        response.enable_compression()
        return response
Beispiel #17
0
	def _calcFlavorDataOffsetsAndSize(self, start):
		"""Calculate offsets and lengths for any meta- and/or private data."""
		offset = start
		data = self.flavorData
		if data.metaData:
			self.metaOrigLength = len(data.metaData)
			self.metaOffset = offset
			self.compressedMetaData = brotli.compress(
				data.metaData, mode=brotli.MODE_TEXT)
			self.metaLength = len(self.compressedMetaData)
			offset += self.metaLength
		else:
			self.metaOffset = self.metaLength = self.metaOrigLength = 0
			self.compressedMetaData = b""
		if data.privData:
			# make sure private data is padded to 4-byte boundary
			offset = (offset + 3) & ~3
			self.privOffset = offset
			self.privLength = len(data.privData)
			offset += self.privLength
		else:
			self.privOffset = self.privLength = 0
		return offset
Beispiel #18
0
    def generate(self):
        """Generate and save the code for this snippet bundle."""
        # Generate the new AS Router bundle format
        data = [snippet.render() for snippet in self.snippets]
        bundle_content = json.dumps({
            'messages': data,
            'metadata': {
                'generated_at': datetime.utcnow().isoformat(),
                'number_of_snippets': len(data),
            }
        })

        if isinstance(bundle_content, str):
            bundle_content = bundle_content.encode('utf-8')

        if settings.BUNDLE_BROTLI_COMPRESS:
            content_file = ContentFile(brotli.compress(bundle_content))
            content_file.content_encoding = 'br'
        else:
            content_file = ContentFile(bundle_content)

        default_storage.save(self.filename, content_file)
        cache.set(self.cache_key, True, ONE_DAY)
    def generate(self):
        """Generate and save the code for this snippet bundle."""
        # Generate the new AS Router bundle format
        data = [snippet.render() for snippet in self.snippets]
        bundle_content = json.dumps({
            'messages': data,
            'metadata': {
                'generated_at': datetime.utcnow().isoformat(),
                'number_of_snippets': len(data),
            }
        })

        if isinstance(bundle_content, str):
            bundle_content = bundle_content.encode('utf-8')

        if settings.BUNDLE_BROTLI_COMPRESS:
            content_file = ContentFile(brotli.compress(bundle_content))
            content_file.content_encoding = 'br'
        else:
            content_file = ContentFile(bundle_content)

        default_storage.save(self.filename, content_file)
        cache.set(self.cache_key, True, ONE_DAY)
Beispiel #20
0
 def _calcFlavorDataOffsetsAndSize(self, start):
     """Calculate offsets and lengths for any meta- and/or private data."""
     offset = start
     data = self.flavorData
     if data.metaData:
         self.metaOrigLength = len(data.metaData)
         self.metaOffset = offset
         self.compressedMetaData = brotli.compress(data.metaData,
                                                   mode=brotli.MODE_TEXT)
         self.metaLength = len(self.compressedMetaData)
         offset += self.metaLength
     else:
         self.metaOffset = self.metaLength = self.metaOrigLength = 0
         self.compressedMetaData = b""
     if data.privData:
         # make sure private data is padded to 4-byte boundary
         offset = (offset + 3) & ~3
         self.privOffset = offset
         self.privLength = len(data.privData)
         offset += self.privLength
     else:
         self.privOffset = self.privLength = 0
     return offset
Beispiel #21
0
 def encode_content_body(self, text, encoding):
     logger.dbg('Encoding content to {}'.format(encoding))
     data = text
     if encoding == 'identity':
         pass
     elif encoding in ('gzip', 'x-gzip'):
         _io = BytesIO()
         with gzip.GzipFile(fileobj=_io, mode='wb') as f:
             f.write(text)
         data = _io.getvalue()
     elif encoding == 'deflate':
         data = zlib.compress(text)
     elif encoding == 'br':
         # Brotli algorithm
         try:
             data = brotli.compress(text)
         except Exception as e:
             #raise Exception('Could not compress Brotli stream: "{}"'.format(str(e)))
             logger.err('Could not compress Brotli stream: "{}"'.format(str(e)))
     else:
         #raise Exception("Unknown Content-Encoding: %s" % encoding)
         logger.err('Unknown Content-Encoding: "{}"'.format(encoding))
     return data
Beispiel #22
0
def main(args):

    options = parse_options(args)

    if options.infile:
        if not os.path.isfile(options.infile):
            print('file "%s" not found' % options.infile, file=sys.stderr)
            sys.exit(1)
        with open(options.infile, "rb") as infile:
            data = infile.read()
    else:
        if sys.stdin.isatty():
            # interactive console, just quit
            usage()
        infile = get_binary_stdio('stdin')
        data = infile.read()

    if options.outfile:
        if os.path.isfile(options.outfile) and not options.force:
            print('output file exists')
            sys.exit(1)
        outfile = open(options.outfile, "wb")
    else:
        outfile = get_binary_stdio('stdout')

    try:
        if options.decompress:
            data = brotli.decompress(data)
        else:
            data = brotli.compress(data, options.mode, options.transform)
    except brotli.error as e:
        print('[ERROR] %s: %s' % (e, options.infile or 'sys.stdin'),
              file=sys.stderr)
        sys.exit(1)

    outfile.write(data)
    outfile.close()
Beispiel #23
0
def main(args):

    options = parse_options(args)

    if options.infile:
        if not os.path.isfile(options.infile):
            print('file "%s" not found' % options.infile, file=sys.stderr)
            sys.exit(1)
        with open(options.infile, "rb") as infile:
            data = infile.read()
    else:
        if sys.stdin.isatty():
            # interactive console, just quit
            usage()
        infile = get_binary_stdio('stdin')
        data = infile.read()

    if options.outfile:
        if os.path.isfile(options.outfile) and not options.force:
            print('output file exists')
            sys.exit(1)
        outfile = open(options.outfile, "wb")
    else:
        outfile = get_binary_stdio('stdout')

    try:
        if options.decompress:
            data = brotli.decompress(data)
        else:
            data = brotli.compress(data, options.mode, options.transform)
    except brotli.error as e:
        print('[ERROR] %s: %s' % (e, options.infile or 'sys.stdin'),
              file=sys.stderr)
        sys.exit(1)

    outfile.write(data)
    outfile.close()
Beispiel #24
0
async def format_download_form(file_path: Path, base_path: Path, dir_path: Path, compress: bool, rename: str = None) \
        -> FormData:

    if compress and get_content_encoding(file_path) == "br":
        path_log = "/".join(file_path.parts[2:])
        start = time.time()
        if which('brotli'):
            log_info(f'brotlify (system) {path_log} ...')
            p = await asyncio.create_subprocess_shell(
                cmd=f'brotli {str(file_path)}',
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE,
                shell=True)

            async for f in p.stderr:
                log_error(f.decode('utf-8'))
            await p.communicate()
            os.system(f'rm {str(file_path)}')
            os.system(f'mv {str(file_path)}.br {str(file_path)}')
        else:
            log_info(f'brotlify (python) {path_log}')
            start = time.time()
            compressed = brotli.compress(file_path.read_bytes())
            with file_path.open("wb") as f:
                f.write(compressed)
        log_info(f'...{path_log} => {time.time() - start} s')

    data = open(str(file_path), 'rb').read()
    path_bucket = base_path / file_path.relative_to(
        dir_path) if not rename else base_path / rename

    return FormData(objectName=path_bucket,
                    objectData=data,
                    owner=Configuration.owner,
                    objectSize=len(data),
                    content_type=get_content_type(file_path.name),
                    content_encoding=get_content_encoding(file_path.name))
Beispiel #25
0
 def brotli(self,
            INPUT_FILE='system.new.dat.br',
            OUTPUT_FILE='system.new.dat',
            flag=1):
     import brotli as b
     if quiet == 0:
         if input('是否转换.new.dat.br?y/n>>>') == 'n': return
     if flag == 1:
         with open(INPUT_FILE, 'rb') as infile:
             data = infile.read()
             outfile = open(OUTPUT_FILE, 'wb')
             data = b.decompress(data)
             outfile.write(data)
             outfile.close()
             infile.close()
     if flag == 2:
         with open(INPUT_FILE, 'rb') as infile:
             data = infile.read()
             outfile = open(OUTPUT_FILE, 'wb')
             data = b.compress(data)
             outfile.write(data)
             outfile.close()
             infile.close()
     print('参数无效!')
Beispiel #26
0
def compressedFiles(path):
    for filepath in shipItFiles(path):
        try:
            filecontent = pathlib.Path(filepath).read_bytes()
        except FileNotFoundError:
            print("Skip symlink or unreadable {0}".format(filepath))
            continue

        if (args.compress_gzip):
           compressedContent = gzip.compress(filecontent)
           yield filepath, compressedContent
           continue

        if (args.compress_brotli):
           try:
               import brotli
           except ImportError:
               print("Error: Brotli compressor is not installed, run pip(3) install brotli")
               exit(1)
           compressedContent = brotli.compress(filecontent)
           yield filepath, compressedContent
           continue

        yield filepath, filecontent
Beispiel #27
0
def getSFNTData(pathOrFile, unsortGlyfLoca=False, glyphBBox="", alt255UInt16=False):
    if isinstance(pathOrFile, TTFont):
        font = pathOrFile
    else:
        font = getTTFont(pathOrFile)
    tableChecksums = {}
    tableData = {}
    tableOrder = [i for i in sorted(font.keys()) if len(i) == 4]
    if unsortGlyfLoca:
        assert "loca" in tableOrder
        loca = tableOrder.index("loca")
        glyf = tableOrder.index("glyf")
        tableOrder.insert(glyf, tableOrder.pop(loca))
    for tag in tableOrder:
        tableChecksums[tag] = font.reader.tables[tag].checkSum
        tableData[tag] = transformTable(font, tag, glyphBBox=glyphBBox, alt255UInt16=alt255UInt16)
    totalData = "".join([tableData[tag][1] for tag in tableOrder])
    compData = brotli.compress(totalData, brotli.MODE_FONT)
    if len(compData) >= len(totalData):
        compData = totalData
    if not isinstance(pathOrFile, TTFont):
        font.close()
        del font
    return tableData, compData, tableOrder, tableChecksums
Beispiel #28
0
def brotli_compress(content, quality=None):
    if quality is None:
        # 5/6 are good balance between compression speed and compression rate
        quality = 5
    return brotli.compress(content, quality=quality)
Beispiel #29
0
 def test_garbage_appended(self):
     with self.assertRaises(brotli.error):
         self.decompressor.process(brotli.compress(b'a') + b'a')
    def handle(self, *args, **options):
        if not options['timestamp']:
            self.stdout.write('Generating all bundles.')
            total_jobs = Job.objects.all()
        else:
            self.stdout.write(
                'Generating bundles with Jobs modified on or after {}'.format(options['timestamp'])
            )
            total_jobs = Job.objects.filter(
                Q(snippet__modified__gte=options['timestamp']) |
                Q(distribution__distributionbundle__modified__gte=options['timestamp'])
            ).distinct()

        if not total_jobs:
            self.stdout.write('Nothing to do…')
            return

        self.stdout.write('Processing bundles…')

        combinations_to_process = set(
            itertools.chain.from_iterable(
                itertools.product(
                    job.channels,
                    job.snippet.locale.code.strip(',').split(',')
                )
                for job in total_jobs
            )
        )
        distribution_bundles_to_process = DistributionBundle.objects.filter(
            distributions__jobs__in=total_jobs
        ).distinct().order_by('id')

        for distribution_bundle in distribution_bundles_to_process:
            distributions = distribution_bundle.distributions.all()

            for channel, locale in combinations_to_process:
                additional_jobs = []
                if channel == 'nightly' and settings.NIGHTLY_INCLUDES_RELEASE:
                    additional_jobs = Job.objects.filter(
                        status=Job.PUBLISHED).filter(**{
                            'targets__on_release': True,
                            'distribution__in': distributions,
                        })

                channel_jobs = Job.objects.filter(
                    status=Job.PUBLISHED).filter(
                        Q(**{
                            'targets__on_{}'.format(channel): True,
                            'distribution__in': distributions,
                        }))

                all_jobs = Job.objects.filter(
                    Q(id__in=additional_jobs) | Q(id__in=channel_jobs)
                )

                locales_to_process = [
                    key.lower() for key in product_details.languages.keys()
                    if key.lower().startswith(locale)
                ]

                for locale_to_process in locales_to_process:
                    filename = 'Firefox/{channel}/{locale}/{distribution}.json'.format(
                        channel=channel,
                        locale=locale_to_process,
                        distribution=distribution_bundle.code_name,
                    )
                    filename = os.path.join(settings.MEDIA_BUNDLES_PREGEN_ROOT, filename)
                    full_locale = ',{},'.format(locale_to_process.lower())
                    splitted_locale = ',{},'.format(locale_to_process.lower().split('-', 1)[0])
                    bundle_jobs = all_jobs.filter(
                        Q(snippet__locale__code__contains=splitted_locale) |
                        Q(snippet__locale__code__contains=full_locale)).distinct()

                    # If DistributionBundle is not enabled, or if there are no
                    # Published Jobs for the channel / locale / distribution
                    # combination, delete the current bundle file if it exists.
                    if not distribution_bundle.enabled or not bundle_jobs.exists():
                        if default_storage.exists(filename):
                            self.stdout.write('Removing {}'.format(filename))
                            default_storage.delete(filename)
                        continue

                    data = []
                    channel_job_ids = list(channel_jobs.values_list('id', flat=True))
                    for job in bundle_jobs:
                        if job.id in channel_job_ids:
                            render = job.render()
                        else:
                            render = job.render(always_eval_to_false=True)
                        data.append(render)

                    bundle_content = json.dumps({
                        'messages': data,
                        'metadata': {
                            'generated_at': datetime.utcnow().isoformat(),
                            'number_of_snippets': len(data),
                            'channel': channel,
                        }
                    })

                    # Convert str to bytes.
                    if isinstance(bundle_content, str):
                        bundle_content = bundle_content.encode('utf-8')

                    if settings.BUNDLE_BROTLI_COMPRESS:
                        content_file = ContentFile(brotli.compress(bundle_content))
                        content_file.content_encoding = 'br'
                    else:
                        content_file = ContentFile(bundle_content)

                    default_storage.save(filename, content_file)
                    self.stdout.write(self.style.SUCCESS('Writing bundle {}'.format(filename)))
Beispiel #31
0
#!/usr/bin/env python

import sys
import zlib
import lz4
import brotli
from pcapfile import savefile

for arg in sys.argv[1:]:
	pcap = savefile.load_savefile(open(arg, 'r'))
	for i in range(0, len(pcap.packets)):
		# strip IP/UDP
		pkt = pcap.packets[i].raw()[42:]
		# do not compress header
		zlib_len = len(zlib.compress(pkt[12:])) + 12
		lz4_len = len(lz4.compress(pkt[12:])) + 12
		bro_len = len(brotli.compress(pkt[12:])) + 12
		print('[%d] base: %4dB, zlib: %4dB, lz4: %4dB, brotli: %4dB' % \
		      (i, len(pkt), zlib_len, lz4_len, bro_len))

Beispiel #32
0
 def _brotli_compress(self, original_file):
     original_file.seek(0)
     content = BytesIO(brotli.compress(original_file.read()))
     content.seek(0)
     return File(content)
Beispiel #33
0
def main(args=None):

    parser = argparse.ArgumentParser(
        prog='bro.py',
        description=
        "Compression/decompression utility using the Brotli algorithm.")
    parser.add_argument('--version',
                        action='version',
                        version=brotli.__version__)
    parser.add_argument('-i',
                        '--input',
                        metavar='FILE',
                        type=str,
                        dest='infile',
                        help='Input file',
                        default=None)
    parser.add_argument('-o',
                        '--output',
                        metavar='FILE',
                        type=str,
                        dest='outfile',
                        help='Output file',
                        default=None)
    parser.add_argument('-f',
                        '--force',
                        action='store_true',
                        help='Overwrite existing output file',
                        default=False)
    parser.add_argument('-d',
                        '--decompress',
                        action='store_true',
                        help='Decompress input file',
                        default=False)
    params = parser.add_argument_group('optional encoder parameters')
    params.add_argument(
        '-m',
        '--mode',
        metavar="MODE",
        type=int,
        choices=[0, 1],
        help='The compression mode can be 0 for generic input, '
        '1 for UTF-8 encoded text, or 2 for WOFF 2.0 font data. '
        'Defaults to 0.')
    params.add_argument(
        '-q',
        '--quality',
        metavar="QUALITY",
        type=int,
        choices=list(range(0, 12)),
        help='Controls the compression-speed vs compression-density '
        'tradeoff. The higher the quality, the slower the '
        'compression. Range is 0 to 11. Defaults to 11.')
    params.add_argument(
        '--lgwin',
        metavar="LGWIN",
        type=int,
        choices=list(range(16, 25)),
        help='Base 2 logarithm of the sliding window size. Range is '
        '10 to 24. Defaults to 22.')
    params.add_argument(
        '--lgblock',
        metavar="LGBLOCK",
        type=int,
        choices=[0] + list(range(16, 25)),
        help='Base 2 logarithm of the maximum input block size. '
        'Range is 16 to 24. If set to 0, the value will be set based '
        'on the quality. Defaults to 0.')
    # set default values using global DEFAULT_PARAMS dictionary
    parser.set_defaults(**DEFAULT_PARAMS)

    options = parser.parse_args(args=args)

    if options.infile:
        if not os.path.isfile(options.infile):
            parser.error('file "%s" not found' % options.infile)
        with open(options.infile, "rb") as infile:
            data = infile.read()
    else:
        if sys.stdin.isatty():
            # interactive console, just quit
            parser.error('no input')
        infile = get_binary_stdio('stdin')
        data = infile.read()

    if options.outfile:
        if os.path.isfile(options.outfile) and not options.force:
            parser.error('output file exists')
        outfile = open(options.outfile, "wb")
    else:
        outfile = get_binary_stdio('stdout')

    try:
        if options.decompress:
            data = brotli.decompress(data)
        else:
            data = brotli.compress(data,
                                   mode=options.mode,
                                   quality=options.quality,
                                   lgwin=options.lgwin,
                                   lgblock=options.lgblock)
    except brotli.error as e:
        parser.exit(1,
                    'bro: error: %s: %s' % (e, options.infile or 'sys.stdin'))

    outfile.write(data)
    outfile.close()
Beispiel #34
0
 def _encode_payload(cls, payload: dict) -> str:
     text = json.dumps(payload, ensure_ascii=False)
     data = brotli.compress(text.encode('utf-8'), quality=6)
     return cls._b64encode(data)
Beispiel #35
0
def encode_brotli(content: bytes) -> bytes:
    return brotli.compress(content)
Beispiel #36
0
def encode_brotli(content):
    return brotli.compress(content)
Beispiel #37
0
         
        with open('/home/folkertdev/elm/elm-brotli/tests/Generated.elm', 'w') as output:
            output.write(file_format.format(contents, result))


if __name__ == '__main__':
    bases = [] 
    for sourcePath in glob.glob("sources/**"):
        bases.append(os.path.splitext(os.path.basename(sourcePath))[0])

    for sourcePath in glob.glob("sources/**"):
        with open(sourcePath) as source:
            contents = source.read() 
            bs = contents.encode('utf-8')

            bs = brotli.compress(bs)
            result = [ v for v in bs ] 

            base = os.path.basename(sourcePath)
            destName = os.path.splitext(base)[0]

            with open(destName + ".elm", 'w') as dst: 
                dst.write(test_file_format.format(destName, contents, result))


    imports = "\n".join("import Gutenberg." + name + " as " + name for name in bases)

    tests = ", ".join("pipeline \"" + name + "\" " + name + ".text " + name + ".bytes" for name in bases) 

    with open("../Gutenberg.elm", 'w') as dst:
        dst.write(gutenberg_format.format(imports, tests))
Beispiel #38
0
 def _compress(self, test_data, **kwargs):
     temp_compressed = _test_utils.get_temp_compressed_name(test_data)
     with open(temp_compressed, 'wb') as out_file:
         with open(test_data, 'rb') as in_file:
             out_file.write(brotli.compress(in_file.read(), **kwargs))
Beispiel #39
0
def defaultTestData(header=None, directory=None, collectionHeader=None, collectionDirectory=None, tableData=None, compressedData=None, metadata=None, privateData=None, flavor="cff", Base128Bug=False):
    isCollection = collectionDirectory is not None
    parts = []
    # setup the header
    if header is None:
        header = deepcopy(testDataWOFFHeader)
    parts.append(header)
    # setup the directory
    if directory is None:
        if flavor == "cff":
            directory = deepcopy(testCFFDataWOFFDirectory)
        else:
            directory = deepcopy(testTTFDataWOFFDirectory)
    parts.append(directory)
    if isCollection:
        if collectionHeader is None:
            collectionHeader = dict(version=0x00010000, numFonts=len(collectionDirectory))
        parts.append(collectionHeader)
        parts.append(collectionDirectory)
    # setup the table data
    if tableData is None:
        if flavor == "cff":
            tableData = deepcopy(sfntCFFTableData)
        else:
            tableData = deepcopy(sfntTTFTableData)
    if compressedData is None:
        if flavor == "cff":
            compressedData = deepcopy(sfntCFFCompressedData)
        else:
            compressedData = deepcopy(sfntTTFCompressedData)
    parts.append(compressedData)
    # sanity checks
    assert len(directory) == len(tableData)
    if not isCollection:
        assert set(tableData.keys()) == set([entry["tag"] for entry in directory])
    # apply the directory data to the header
    header["numTables"] = len(directory)
    if isCollection:
        header["flavor"] = "ttcf"
    elif "CFF " in tableData:
        header["flavor"] = "OTTO"
    else:
        header["flavor"] = "\000\001\000\000"
    # apply the table data to the directory and the header
    if isCollection:
        # TTC header
        header["totalSfntSize"] = 12 + 4 * collectionHeader["numFonts"]
        header["totalSfntSize"] += sfntDirectorySize * collectionHeader["numFonts"]
        for entry in collectionDirectory:
            header["totalSfntSize"] += sfntDirectoryEntrySize * entry["numTables"]
    else:
        header["totalSfntSize"] = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize)
    header["totalCompressedSize"] = len(compressedData)
    for i, entry in enumerate(directory):
        tag = entry["tag"]
        if isCollection:
            origData, transformData = tableData[i][1]
        else:
            origData, transformData = tableData[tag]
        entry["origLength"] = len(origData)
        entry["transformLength"] = len(transformData)
        if tag == "hmtx" and entry["origLength"] > entry["transformLength"]:
            entry["transformFlag"] = 1
        header["totalSfntSize"] += entry["origLength"]
        header["totalSfntSize"] += calcPaddingLength(header["totalSfntSize"])
    header["length"] = woffHeaderSize + len(packTestDirectory(directory, Base128Bug=Base128Bug))
    if isCollection:
        header["length"] += len(packTestCollectionHeader(collectionHeader))
        header["length"] += len(packTestCollectionDirectory(collectionDirectory))
    header["length"] += len(compressedData)
    header["length"] += calcPaddingLength(header["length"])
    # setup the metadata
    if metadata is not None:
        if isinstance(metadata, tuple):
            metadata, compMetadata = metadata
        else:
            compMetadata = None
        if compMetadata is None:
            compMetadata = brotli.compress(metadata, brotli.MODE_TEXT)
        header["metaOffset"] = header["length"]
        header["metaLength"] = len(compMetadata)
        header["metaOrigLength"] = len(metadata)
        header["length"] += len(compMetadata)
        if privateData is not None:
            header["length"] += calcPaddingLength(len(compMetadata))
        parts.append((metadata, compMetadata))
    # setup the private data
    if privateData is not None:
        header["privOffset"] = header["length"]
        header["privLength"] = len(privateData)
        header["length"] += len(privateData)
        parts.append(privateData)
    # return the parts
    return parts
Beispiel #40
0
 def test_already_finished(self):
     self.decompressor.process(brotli.compress(b'a'))
     with self.assertRaises(brotli.error):
         self.decompressor.process(b'a')
Beispiel #41
0
def main(args=None):

    parser = argparse.ArgumentParser(
        prog='bro.py',
        description="Compression/decompression utility using the Brotli algorithm.")
    parser.add_argument('--version', action='version', version=brotli.__version__)
    parser.add_argument('-i', '--input', metavar='FILE', type=str, dest='infile',
                        help='Input file', default=None)
    parser.add_argument('-o', '--output', metavar='FILE', type=str, dest='outfile',
                        help='Output file', default=None)
    parser.add_argument('-f', '--force', action='store_true',
                        help='Overwrite existing output file', default=False)
    parser.add_argument('-d', '--decompress', action='store_true',
                        help='Decompress input file', default=False)
    params = parser.add_argument_group('optional encoder parameters')
    params.add_argument('-m', '--mode', metavar="MODE", type=int, choices=[0, 1],
                        help='The compression mode can be 0 for generic input, '
                        '1 for UTF-8 encoded text, or 2 for WOFF 2.0 font data. '
                        'Defaults to 0.')
    params.add_argument('-q', '--quality', metavar="QUALITY", type=int,
                        choices=list(range(0, 12)),
                        help='Controls the compression-speed vs compression-density '
                        'tradeoff. The higher the quality, the slower the '
                        'compression. Range is 0 to 11. Defaults to 11.')
    params.add_argument('--lgwin', metavar="LGWIN", type=int,
                        choices=list(range(16, 25)),
                        help='Base 2 logarithm of the sliding window size. Range is '
                        '10 to 24. Defaults to 22.')
    params.add_argument('--lgblock', metavar="LGBLOCK", type=int,
                        choices=[0] + list(range(16, 25)),
                        help='Base 2 logarithm of the maximum input block size. '
                        'Range is 16 to 24. If set to 0, the value will be set based '
                        'on the quality. Defaults to 0.')
    # set default values using global DEFAULT_PARAMS dictionary
    parser.set_defaults(**DEFAULT_PARAMS)

    options = parser.parse_args(args=args)

    if options.infile:
        if not os.path.isfile(options.infile):
            parser.error('file "%s" not found' % options.infile)
        with open(options.infile, "rb") as infile:
            data = infile.read()
    else:
        if sys.stdin.isatty():
            # interactive console, just quit
            parser.error('no input')
        infile = get_binary_stdio('stdin')
        data = infile.read()

    if options.outfile:
        if os.path.isfile(options.outfile) and not options.force:
            parser.error('output file exists')
        outfile = open(options.outfile, "wb")
    else:
        outfile = get_binary_stdio('stdout')

    try:
        if options.decompress:
            data = brotli.decompress(data)
        else:
            data = brotli.compress(
                data, mode=options.mode, quality=options.quality,
                lgwin=options.lgwin, lgblock=options.lgblock)
    except brotli.error as e:
        parser.exit(1,'bro: error: %s: %s' % (e, options.infile or 'sys.stdin'))

    outfile.write(data)
    outfile.close()
Beispiel #42
0
 async def ReplyCompressBrotli(self, connection, header):
     header["Additional"].append(b"Content-Encoding: br")
     tmp_compress = brotli.compress(header["ReplyContent"])
     header["ReplyContent"] = tmp_compress
     await self.Reply(connection, header)
Beispiel #43
0
 def handle(self, *args, **options):
     self.stdout.write("Generating JSON...")
     now = time.time()
     try:
         os.mkdir("json")
     except FileExistsError:
         # Folder already exists
         pass
     # Write new files next to the old ones, then atomically replace
     with open("json/coords.geojson.tmp", "w", encoding="utf-8") as f:
         records = (SchoolV2.objects.exclude(lat__isnull=True).exclude(
             lon__isnull=True).exclude(lat=0,
                                       lon=0).values("id", "lat",
                                                     "lon").iterator())
         features = [{
             "type": "Feature",
             "geometry": {
                 "type": "Point",
                 "coordinates": [record["lon"], record["lat"]],
             },
             "properties": {
                 "id": record["id"]
             },
         } for record in records]
         count = len(features)
         feature_collection = {
             "type": "FeatureCollection",
             "features": features
         }
         json.dump(feature_collection, f, separators=(",", ":"))
     self.stdout.write(
         self.style.SUCCESS(
             f"Wrote {count} records in {round(time.time()-now, 3)} seconds"
         ))
     # Generate compressed versions
     with open("json/coords.geojson.tmp", "rb") as f:
         data = f.read()
         # gzip
         self.stdout.write("Compressing with gzip...")
         now = time.time()
         with gzip.open("json/coords.geojson.gz.tmp", "wb") as g:
             g.write(data)
         self.stdout.write(
             self.style.SUCCESS(
                 f"Finished in {round(time.time()-now, 3)} seconds"))
         # brotli takes a long time, don't bother when testing
         if not settings.DEBUG:
             self.stdout.write("Compressing with brotli...")
             now = time.time()
             with open("json/coords.geojson.br.tmp", "wb") as b:
                 b.write(brotli.compress(data, mode=brotli.MODE_TEXT))
             self.stdout.write(
                 self.style.SUCCESS(
                     f"Finished in {round(time.time()-now, 3)} seconds"))
         else:
             self.stdout.write("Skipping brotli...")
     # Replace
     os.replace("json/coords.geojson.tmp", "json/coords.geojson")
     os.replace("json/coords.geojson.gz.tmp", "json/coords.geojson.gz")
     if not settings.DEBUG:
         os.replace("json/coords.geojson.br.tmp", "json/coords.geojson.br")
Beispiel #44
0
	def setUpFlavorData(cls):
		assert os.path.exists(METADATA)
		with open(METADATA, 'rb') as f:
			cls.xml_metadata = f.read()
		cls.compressed_metadata = brotli.compress(cls.xml_metadata, mode=brotli.MODE_TEXT)
		cls.privData = bytes(bytearray(random.sample(range(0, 256), 20)))
import brotli

import random
import string

with open('./silesia.tar', 'rb') as f:
    decoded = f.read()

# with open('ipsum.brotli', 'wb') as f:
with open('silesia-5.brotli', 'wb') as f:
    compressed = brotli.compress(decoded, quality=5)
    f.write(compressed)
Beispiel #46
0
def brotli_encode(data, level=11, mode=0, lgwin=22, out=None):
    """Compress Brotli."""
    return brotli.compress(data, quality=level, mode=mode, lgwin=lgwin)
Beispiel #47
0
def update_geojson_summary(args, stations, updated_stations, summary):

    stations_with_ascents = dict()
    # unroll into dicts for quick access
    if 'features' in summary:
        for feature in summary.features:
            a = feature.properties['ascents']
            if len(a):
                st_id = a[0]['station_id']
                stations_with_ascents[st_id] = feature

    # remove entries from ascents which have a syn_timestamp less than cutoff_ts
    cutoff_ts = now() - args.max_age

    # now walk the updates
    for id, asc in updated_stations:
        if id in stations_with_ascents:

            # we already have ascents from this station.
            # append, sort by synoptic time and de-duplicate
            oldlist = stations_with_ascents[id]['properties']['ascents']
            oldlist.append(asc)

            pruned = [x for x in oldlist if x['syn_timestamp'] > cutoff_ts]

            logging.debug(f"pruning {id}: {len(oldlist)} -> {len(pruned)}")

            newlist = sorted(pruned,
                             key=itemgetter('syn_timestamp'),
                             reverse=True)
            # https://stackoverflow.com/questions/9427163/remove-duplicate-dict-in-list-in-python
            seen = set()
            dedup = []
            for d in newlist:
                # keep an ascent of each source, even if same synop time
                t = str(d['syn_timestamp']) + d['source']
                if t not in seen:
                    seen.add(t)
                    dedup.append(d)
            stations_with_ascents[id]['properties']['ascents'] = dedup
        else:
            # station appears with first-time ascent
            properties = dict()
            properties["ascents"] = [asc]

            if id in stations:
                st = stations[id]
                coords = (st['lon'], st['lat'], st['elevation'])
                properties["name"] = st['name']
            else:
                # unlisted station: anonymous + mobile
                # take coords and station_id as name from ascent
                coords = (asc['lon'], asc['lat'], asc['elevation'])
                properties["name"] = asc['station_id']

            stations_with_ascents[id] = geojson.Feature(
                geometry=geojson.Point(coords), properties=properties)

    # create GeoJSON summary
    ns = na = 0
    fc = geojson.FeatureCollection([])
    for st, f in stations_with_ascents.items():
        ns += 1
        na += len(f.properties['ascents'])
        fc.features.append(f)

    gj = geojson.dumps(fc, indent=4)
    dest = os.path.splitext(args.summary)[0]
    if not dest.endswith(".br"):
        dest += '.br'

    logging.debug(f"summary {dest}: {ns} active stations, {na} ascents")

    fd, path = tempfile.mkstemp(dir=args.tmpdir)
    src = gj.encode("utf8")
    start = time.time()
    dst = brotli.compress(src, quality=BROTLI_SUMMARY_QUALITY)
    end = time.time()
    dt = end - start
    sl = len(src)
    dl = len(dst)
    ratio = (1. - dl / sl) * 100.
    logging.debug(
        f"summary {dest}: brotli {sl} -> {dl}, compression={ratio:.1f}% in {dt:.3f}s"
    )
    os.write(fd, dst)
    os.fsync(fd)
    os.close(fd)
    os.rename(path, dest)
    os.chmod(dest, 0o644)
def brotli_file(filepath):
    destination = filepath + ".br"
    with open(filepath, "rb") as source, open(destination, "wb") as dest:
        dest.write(brotli.compress(source.read()))

    return destination
Beispiel #49
0
def compress(data: str):
    c = brotli.compress(bytes(data, encoding='utf-8'))
    return base64.b64encode(c).decode('utf-8')
Beispiel #50
0
def compress_with_brotli(data: bytes) -> bytes:
    """Compress binary data using brotli compression."""

    return brotli.compress(data, mode=brotli.MODE_TEXT, quality=11)
Beispiel #51
0
def test_value_error_without_request(header_value):
    headers = [(b"Content-Encoding", header_value)]
    body = b"test 123"
    compressed_body = brotli.compress(body)[3:]
    with pytest.raises(ValueError):
        httpx.Response(200, headers=headers, content=compressed_body)
Beispiel #52
0
 def compress(self, path, file):
     return ContentFile(brotli.compress(file.read()))
Beispiel #53
0
 def do_compress(self, filename, compressed_filename):
     import brotli
     with open(filename, 'rb') as f_in, \
             open(compressed_filename, 'wb') as f_out:
         f_out.write(brotli.compress(f_in.read(), mode=brotli.MODE_TEXT))
def brotli_compress(content):
    return compress(content, quality=DEFAULT_LEVEL)
Beispiel #55
0
 def compress_brotli(data):
     return brotli.compress(data)
Beispiel #56
0
def main(truthVersion: str = None):
    if not truthVersion:
        truthVersion = input("TruthVersion: ")

    # Download the database if TruthVersion is exist
    r = requests.get(
        f"https://img-pc.so-net.tw/dl/Resources/{truthVersion}/Jpn/AssetBundles/Android/manifest/masterdata_assetmanifest",
        headers=header,
    )

    if r.status_code != 200:
        print(f"TruthVersion {truthVersion} is not exist")
        return
    print(f"TruthVersion {truthVersion} is exist")

    filename, path, _, size, _ = r.text.split(",")

    print(f"Downloading asset bundle ...")
    r = requests.get(
        f"https://img-pc.so-net.tw/dl/pool/AssetBundles/{path[:2]}/{path}",
        headers=header)

    if r.headers.get("Content-Length") != size:
        print("Size is not same, but it may be fine")

    with open(os.path.join(script_dir, "masterdata_master.unity3d"),
              "wb+") as f:
        f.write(r.content)

    masterDB = None
    # Unpack asset bundle
    with open("masterdata_master.unity3d", "rb") as f:
        bundle = unitypack.load(f)

        for asset in bundle.assets:
            for id, object in asset.objects.items():
                if object.type == "TextAsset":
                    data = object.read()
                    masterDB = data.script
                    break

    os.remove(os.path.join(script_dir, "masterdata_master.unity3d"))

    # Compress
    print("Compressing redive_tw.db.br ...")
    brotliDB = brotli.compress(masterDB)

    # Hash Check
    print("Generating MD5 Hash ...")
    new_hash = hashlib.md5(brotliDB).hexdigest()
    with open(os.path.join(script_dir, "out/version.json")) as f:
        old_version = json.load(f)

    if old_version.get("hash") == new_hash:
        print("Database Hash are same, Return")
        return
    print(
        f"Old Hash: {old_version.get('hash')} ({old_version.get('TruthVersion')})"
    )
    print(f"New Hash: {new_hash} ({truthVersion})")

    # Save
    shutil.copyfile(os.path.join(script_dir, "out/redive_tw.db"),
                    os.path.join(script_dir, "out/prev.redive_tw.db"))

    with open(os.path.join(script_dir, "out/redive_tw.db.br"), "wb") as f:
        f.write(brotliDB)

    with open(os.path.join(script_dir, "out/redive_tw.db"), "wb") as f:
        f.write(masterDB)

    with open(os.path.join(script_dir, "out/version.json"), "w") as f:
        json.dump({"TruthVersion": truthVersion, "hash": new_hash}, f)

    # Diff Check
    print("Generating diff report ...")
    os.system(
        f"{os.path.join(script_dir, 'sqldiff.exe')} {os.path.join(script_dir, 'out/prev.redive_tw.db')} {os.path.join(script_dir, 'out/redive_tw.db')} > {os.path.join(script_dir, f'out/diff/{truthVersion}.sql')}"
    )

    print("Done\n")
    return True
Beispiel #57
0
	def setUpFlavorData(cls):
		assert os.path.exists(METADATA)
		with open(METADATA, 'rb') as f:
			cls.xml_metadata = f.read()
		cls.compressed_metadata = brotli.compress(cls.xml_metadata, mode=brotli.MODE_TEXT)
		cls.privData = bytes(bytearray(random.sample(range(0, 256), 20)))
Beispiel #58
0
def getWOFFCollectionData(pathOrFiles, MismatchGlyfLoca=False, reverseNames=False):
    from defaultData import defaultTestData

    tableChecksums = []
    tableData = []
    tableOrder = []
    collectionDirectory = []
    locaIndices = []

    fonts = [TTFont(pathOrFile) for pathOrFile in pathOrFiles]
    for i, font in enumerate(fonts):
        index = i
        if reverseNames:
            index = len(fonts) - i - 1

        # Make the name table unique
        name = font["name"]
        for namerecord in name.names:
            nameID = namerecord.nameID
            string = namerecord.toUnicode()
            if nameID == 1:
                namerecord.string = "%s %d" % (string, index)
            elif nameID == 4:
                namerecord.string = string.replace("Regular", "%d Regular" % index)
            elif nameID == 6:
                namerecord.string = string.replace("-", "%d-" % index)

        tags = [i for i in sorted(font.keys()) if len(i) == 4]
        if "glyf" in tags:
            glyf = tags.index("glyf")
            loca = tags.index("loca")
            tags.insert(glyf + 1, tags.pop(loca))
        tableIndices = OrderedDict()
        for tag in tags:
            data = transformTable(font, tag)
            if MismatchGlyfLoca and tag in ("glyf", "loca"):
                tableData.append([tag, data])
                tableChecksums.append([tag, font.reader.tables[tag].checkSum])
                tableOrder.append(tag)
                tableIndex = len(tableData) - 1
                tableIndices[tag] = tableIndex
                if tag == "loca":
                    locaIndices.append(tableIndex)
            else:
                if [tag, data] not in tableData:
                    tableData.append([tag, data])
                    tableChecksums.append([tag, font.reader.tables[tag].checkSum])
                    tableOrder.append(tag)
                tableIndices[tag] = tableData.index([tag, data])
        collectionDirectory.append(dict(numTables=len(tableIndices), flavor=font.sfntVersion, index=tableIndices))
        font.close()
        del font

    if MismatchGlyfLoca:
        locaIndices.reverse()
        for i, entry in enumerate(collectionDirectory):
            entry["index"]["loca"] = locaIndices[i]
    totalData = "".join([data[1][1] for data in tableData])
    compData = brotli.compress(totalData, brotli.MODE_FONT)
    if len(compData) >= len(totalData):
        compData = totalData

    directory = [dict(tag=tag, origLength=0, transformLength=0, transformFlag=0) for tag in tableOrder]

    header, directory, collectionHeader, collectionDirectory, tableData = defaultTestData(directory=directory,
            tableData=tableData, compressedData=compData, collectionDirectory=collectionDirectory)

    data = packTestHeader(header)
    data += packTestDirectory(directory, isCollection=True)
    data += packTestCollectionHeader(collectionHeader)
    data += packTestCollectionDirectory(collectionDirectory)
    data += tableData

    data = padData(data)

    return data