def defaultSFNTTestData(tableData=None, flavor="cff"): parts = [] # setup the header header = deepcopy(testDataSFNTHeader) parts.append(header) # setup the directory if flavor == "cff": directory = deepcopy(testCFFDataSFNTDirectory) else: directory = deepcopy(testTTFDataSFNTDirectory) parts.append(directory) # setup the table data if tableData is None: if flavor == "cff": tableData = deepcopy(sfntCFFTableData) else: tableData = deepcopy(sfntTTFTableData) for tag, (data, transformData) in tableData.items(): tableData[tag] = data parts.append(tableData) # sanity checks assert len(directory) == len(tableData) assert set(tableData.keys()) == set([entry["tag"] for entry in directory]) # apply the directory data to the header header["numTables"] = len(directory) if flavor == "cff": header["flavor"] = "OTTO" else: header["flavor"] = "\000\001\000\000" # apply the table data to the directory and the header offset = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize) for entry in directory: tag = entry["tag"] data = tableData[tag] length = len(data) # measure paddedLength = length + calcPaddingLength(length) # store entry["offset"] = offset entry["length"] = length if data in originalSFNTChecksums: checksum = originalSFNTChecksums[data] else: checksum = calcTableChecksum(tag, data) entry["checksum"] = checksum # next offset += paddedLength # return the parts return parts
def defaultSFNTTestData(flavor="cff"): parts = [] # setup the header header = deepcopy(testDataSFNTHeader) parts.append(header) # setup the directory if flavor == "cff": directory = deepcopy(testCFFDataSFNTDirectory) else: directory = deepcopy(testTTFDataSFNTDirectory) parts.append(directory) # setup the table data if flavor == "cff": tableData = deepcopy(sfntCFFTableData) else: tableData = deepcopy(sfntTTFTableData) for tag, (data, transformData) in tableData.items(): tableData[tag] = data parts.append(tableData) # sanity checks assert len(directory) == len(tableData) assert set(tableData.keys()) == set([entry["tag"] for entry in directory]) # apply the directory data to the header header["numTables"] = len(directory) if flavor == "cff": header["flavor"] = "OTTO" else: header["flavor"] = "\000\001\000\000" # apply the table data to the directory and the header offset = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize) for entry in directory: tag = entry["tag"] data = tableData[tag] length = len(data) # measure paddedLength = length + calcPaddingLength(length) # store entry["offset"] = offset entry["length"] = length if data in originalSFNTChecksums: checksum = originalSFNTChecksums[data] else: checksum = calcTableChecksum(tag, data) entry["checksum"] = checksum # next offset += paddedLength # return the parts return parts
def defaultTestData(header=None, directory=None, tableData=None, metadata=None, privateData=None, flavor="cff"): parts = [] # setup the header if header is None: header = deepcopy(testDataWOFFHeader) parts.append(header) # setup the directory if directory is None: if flavor == "cff": directory = deepcopy(testCFFDataWOFFDirectory) else: directory = deepcopy(testTTFDataWOFFDirectory) parts.append(directory) # setup the table data if tableData is None: if flavor == "cff": tableData = deepcopy(sfntCFFTableData) else: tableData = deepcopy(sfntTTFTableData) parts.append(tableData) # sanity checks assert len(directory) == len(tableData) assert set(tableData.keys()) == set([entry["tag"] for entry in directory]) # apply the directory data to the header header["numTables"] = len(directory) header["length"] = woffHeaderSize + (woffDirectoryEntrySize * len(directory)) if "CFF " in tableData: header["flavor"] = "OTTO" else: header["flavor"] = "\000\001\000\000" # apply the table data to the directory and the header header["totalSfntSize"] = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize) offset = header["length"] for entry in directory: tag = entry["tag"] origData, compData = tableData[tag] # measure compLength = len(compData) compPaddedLength = compLength + calcPaddingLength(compLength) origLength = len(origData) origPaddedLength = origLength + calcPaddingLength(origLength) # store entry["offset"] = offset entry["compLength"] = compLength entry["origLength"] = origLength if origData in originalSFNTChecksums: checksum = originalSFNTChecksums[origData] else: checksum = calcTableChecksum(tag, origData) entry["origChecksum"] = checksum header["length"] += compPaddedLength header["totalSfntSize"] += origPaddedLength # next offset += compPaddedLength # setup the metadata if metadata is not None: if isinstance(metadata, tuple): metadata, compMetadata = metadata else: compMetadata = None if compMetadata is None: compMetadata = zlib.compress(metadata) header["metaOffset"] = header["length"] header["metaLength"] = len(compMetadata) header["metaOrigLength"] = len(metadata) header["length"] += len(compMetadata) if privateData is not None: header["length"] += calcPaddingLength(len(compMetadata)) parts.append((metadata, compMetadata)) # setup the private data if privateData is not None: header["privOffset"] = header["length"] header["privLength"] = len(privateData) header["length"] += len(privateData) parts.append(privateData) # return the parts return parts
def defaultTestData(header=None, directory=None, collectionHeader=None, collectionDirectory=None, tableData=None, compressedData=None, metadata=None, privateData=None, flavor="cff", Base128Bug=False): isCollection = collectionDirectory is not None parts = [] # setup the header if header is None: header = deepcopy(testDataWOFFHeader) parts.append(header) # setup the directory if directory is None: if flavor == "cff": directory = deepcopy(testCFFDataWOFFDirectory) else: directory = deepcopy(testTTFDataWOFFDirectory) parts.append(directory) if isCollection: if collectionHeader is None: collectionHeader = dict(version=0x00010000, numFonts=len(collectionDirectory)) parts.append(collectionHeader) parts.append(collectionDirectory) # setup the table data if tableData is None: if flavor == "cff": tableData = deepcopy(sfntCFFTableData) else: tableData = deepcopy(sfntTTFTableData) if compressedData is None: if flavor == "cff": compressedData = deepcopy(sfntCFFCompressedData) else: compressedData = deepcopy(sfntTTFCompressedData) parts.append(compressedData) # sanity checks assert len(directory) == len(tableData) if not isCollection: assert set(tableData.keys()) == set([entry["tag"] for entry in directory]) # apply the directory data to the header header["numTables"] = len(directory) if isCollection: header["flavor"] = "ttcf" elif "CFF " in tableData: header["flavor"] = "OTTO" else: header["flavor"] = "\000\001\000\000" # apply the table data to the directory and the header if isCollection: # TTC header header["totalSfntSize"] = 12 + 4 * collectionHeader["numFonts"] header["totalSfntSize"] += sfntDirectorySize * collectionHeader["numFonts"] for entry in collectionDirectory: header["totalSfntSize"] += sfntDirectoryEntrySize * entry["numTables"] else: header["totalSfntSize"] = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize) header["totalCompressedSize"] = len(compressedData) for i, entry in enumerate(directory): tag = entry["tag"] if isCollection: origData, transformData = tableData[i][1] else: origData, transformData = tableData[tag] entry["origLength"] = len(origData) entry["transformLength"] = len(transformData) if tag == "hmtx" and entry["origLength"] > entry["transformLength"]: entry["transformFlag"] = 1 header["totalSfntSize"] += entry["origLength"] header["totalSfntSize"] += calcPaddingLength(header["totalSfntSize"]) header["length"] = woffHeaderSize + len(packTestDirectory(directory, Base128Bug=Base128Bug)) if isCollection: header["length"] += len(packTestCollectionHeader(collectionHeader)) header["length"] += len(packTestCollectionDirectory(collectionDirectory)) header["length"] += len(compressedData) header["length"] += calcPaddingLength(header["length"]) # setup the metadata if metadata is not None: if isinstance(metadata, tuple): metadata, compMetadata = metadata else: compMetadata = None if compMetadata is None: compMetadata = brotli.compress(metadata, brotli.MODE_TEXT) header["metaOffset"] = header["length"] header["metaLength"] = len(compMetadata) header["metaOrigLength"] = len(metadata) header["length"] += len(compMetadata) if privateData is not None: header["length"] += calcPaddingLength(len(compMetadata)) parts.append((metadata, compMetadata)) # setup the private data if privateData is not None: header["privOffset"] = header["length"] header["privLength"] = len(privateData) header["length"] += len(privateData) parts.append(privateData) # return the parts return parts
def defaultTestData(header=None, directory=None, collectionHeader=None, collectionDirectory=None, tableData=None, compressedData=None, metadata=None, privateData=None, flavor="cff", Base128Bug=False): isCollection = collectionDirectory is not None parts = [] # setup the header if header is None: header = deepcopy(testDataWOFFHeader) parts.append(header) # setup the directory if directory is None: if flavor == "cff": directory = deepcopy(testCFFDataWOFFDirectory) else: directory = deepcopy(testTTFDataWOFFDirectory) parts.append(directory) if isCollection: if collectionHeader is None: collectionHeader = dict(version=0x00010000, numFonts=len(collectionDirectory)) parts.append(collectionHeader) parts.append(collectionDirectory) # setup the table data if tableData is None: if flavor == "cff": tableData = deepcopy(sfntCFFTableData) else: tableData = deepcopy(sfntTTFTableData) if compressedData is None: if flavor == "cff": compressedData = deepcopy(sfntCFFCompressedData) else: compressedData = deepcopy(sfntTTFCompressedData) parts.append(compressedData) # sanity checks assert len(directory) == len(tableData) if not isCollection: assert set(tableData.keys()) == set( [entry["tag"] for entry in directory]) # apply the directory data to the header header["numTables"] = len(directory) if isCollection: header["flavor"] = "ttcf" elif "CFF " in tableData: header["flavor"] = "OTTO" else: header["flavor"] = "\000\001\000\000" # apply the table data to the directory and the header if isCollection: # TTC header header["totalSfntSize"] = 12 + 4 * collectionHeader["numFonts"] header["totalSfntSize"] += sfntDirectorySize * collectionHeader[ "numFonts"] for entry in collectionDirectory: header[ "totalSfntSize"] += sfntDirectoryEntrySize * entry["numTables"] else: header["totalSfntSize"] = sfntDirectorySize + (len(directory) * sfntDirectoryEntrySize) header["totalCompressedSize"] = len(compressedData) for i, entry in enumerate(directory): tag = entry["tag"] if isCollection: origData, transformData = tableData[i][1] else: origData, transformData = tableData[tag] entry["origLength"] = len(origData) entry["transformLength"] = len(transformData) header["totalSfntSize"] += entry["origLength"] header["totalSfntSize"] += calcPaddingLength(header["totalSfntSize"]) header["length"] = woffHeaderSize + len( packTestDirectory(directory, Base128Bug=Base128Bug)) if isCollection: header["length"] += len(packTestCollectionHeader(collectionHeader)) header["length"] += len( packTestCollectionDirectory(collectionDirectory)) header["length"] += len(compressedData) header["length"] += calcPaddingLength(header["length"]) # setup the metadata if metadata is not None: if isinstance(metadata, tuple): metadata, compMetadata = metadata else: compMetadata = None if compMetadata is None: compMetadata = brotli.compress(metadata, brotli.MODE_TEXT) header["metaOffset"] = header["length"] header["metaLength"] = len(compMetadata) header["metaOrigLength"] = len(metadata) header["length"] += len(compMetadata) if privateData is not None: header["length"] += calcPaddingLength(len(compMetadata)) parts.append((metadata, compMetadata)) # setup the private data if privateData is not None: header["privOffset"] = header["length"] header["privLength"] = len(privateData) header["length"] += len(privateData) parts.append(privateData) # return the parts return parts
def getSFNTCollectionData(pathOrFiles, modifyNames=True, reverseNames=False, DSIG=False, duplicates=[], shared=[]): tables = [] offsets = {} fonts = [TTFont(pathOrFile) for pathOrFile in pathOrFiles] numFonts = len(fonts) header = dict( TTCTag="ttcf", Version=0x00010000, numFonts=numFonts, ) if DSIG: header["version"] = 0x00020000 fontData = sstruct.pack(ttcHeaderFormat, header) offset = ttcHeaderSize + (numFonts * struct.calcsize(">L")) if DSIG: offset += 3 * struct.calcsize(">L") for font in fonts: fontData += struct.pack(">L", offset) tags = [i for i in sorted(font.keys()) if len(i) == 4] offset += sfntDirectorySize + (len(tags) * sfntDirectoryEntrySize) if DSIG: data = "\0" * 4 tables.append(data) offset += len(data) fontData += struct.pack(">4s", "DSIG") fontData += struct.pack(">L", len(data)) fontData += struct.pack(">L", offset) for i, font in enumerate(fonts): # Make the name table unique if modifyNames: index = i if reverseNames: index = len(fonts) - i - 1 name = font["name"] for namerecord in name.names: nameID = namerecord.nameID string = namerecord.toUnicode() if nameID == 1: namerecord.string = "%s %d" % (string, index) elif nameID == 4: namerecord.string = string.replace("Regular", "%d Regular" % index) elif nameID == 6: namerecord.string = string.replace("-", "%d-" % index) tags = [i for i in sorted(font.keys()) if len(i) == 4] searchRange, entrySelector, rangeShift = getSearchRange(len(tags), 16) offsetTable = dict( sfntVersion=font.sfntVersion, numTables=len(tags), searchRange=searchRange, entrySelector=entrySelector, rangeShift=rangeShift, ) fontData += sstruct.pack(sfntDirectoryFormat, offsetTable) for tag in tags: data = font.getTableData(tag) checksum = calcTableChecksum(tag, data) entry = dict( tag=tag, offset=offset, length=len(data), checkSum=checksum, ) if (shared and tag not in shared) or tag in duplicates or data not in tables: tables.append(data) offsets[checksum] = offset offset += len(data) + calcPaddingLength(len(data)) else: entry["offset"] = offsets[checksum] fontData += sstruct.pack(sfntDirectoryEntryFormat, entry) for table in tables: fontData += padData(table) for font in fonts: font.close() return fontData
def getSFNTCollectionData(pathOrFiles, modifyNames=True, reverseNames=False, duplicates=[]): tables = [] offsets = {} fonts = [TTFont(pathOrFile) for pathOrFile in pathOrFiles] numFonts = len(fonts) header = dict( TTCTag="ttcf", Version=0x00010000, numFonts=numFonts, ) fontData = sstruct.pack(ttcHeaderFormat, header) offset = ttcHeaderSize + (numFonts * struct.calcsize(">L")) for font in fonts: fontData += struct.pack(">L", offset) tags = [i for i in sorted(font.keys()) if len(i) == 4] offset += sfntDirectorySize + (len(tags) * sfntDirectoryEntrySize) for i, font in enumerate(fonts): # Make the name table unique if modifyNames: index = i if reverseNames: index = len(fonts) - i - 1 name = font["name"] for namerecord in name.names: nameID = namerecord.nameID string = namerecord.toUnicode() if nameID == 1: namerecord.string = "%s %d" % (string, index) elif nameID == 4: namerecord.string = string.replace("Regular", "%d Regular" % index) elif nameID == 6: namerecord.string = string.replace("-", "%d-" % index) tags = [i for i in sorted(font.keys()) if len(i) == 4] searchRange, entrySelector, rangeShift = getSearchRange(len(tags), 16) offsetTable = dict( sfntVersion=font.sfntVersion, numTables=len(tags), searchRange=searchRange, entrySelector=entrySelector, rangeShift=rangeShift, ) fontData += sstruct.pack(sfntDirectoryFormat, offsetTable) for tag in tags: data = font.getTableData(tag) checksum = font.reader.tables[tag].checkSum entry = dict( tag=tag, offset=offset, length=len(data), checkSum=checksum, ) if tag in duplicates or data not in tables: tables.append(data) offsets[checksum] = offset offset += len(data) + calcPaddingLength(len(data)) else: entry["offset"] = offsets[checksum] fontData += sstruct.pack(sfntDirectoryEntryFormat, entry) for table in tables: fontData += padData(table) for font in fonts: font.close() return fontData