def compile(self, ttFont): dataList = [] dataList.append(sstruct.pack(bigGlyphMetricsFormat, self.metrics)) dataList.append(struct.pack(">H", len(self.componentArray))) for curComponent in self.componentArray: curComponent.glyphCode = ttFont.getGlyphID(curComponent.name) dataList.append(sstruct.pack(ebdtComponentFormat, curComponent)) return string.join(dataList, "")
def compile(self, ttFont): if 'glyf' in ttFont: if ttFont.isLoaded('glyf') and ttFont.recalcBBoxes: self.recalc(ttFont) else: pass # CFF self.numGlyphs = len(ttFont.getGlyphOrder()) if self.tableVersion != 0x00005000: self.tableVersion = 0x00010000 data = sstruct.pack(maxpFormat_0_5, self) if self.tableVersion == 0x00010000: data = data + sstruct.pack(maxpFormat_1_0_add, self) return data
def compile(self, ttFont): panose = self.panose self.panose = sstruct.pack(panoseFormat, self.panose) if self.version == 0: data = sstruct.pack(OS2_format_0, self) elif self.version == 1: data = sstruct.pack(OS2_format_1, self) elif self.version in (2, 3, 4): data = sstruct.pack(OS2_format_2, self) else: from fontTools import ttLib raise ttLib.TTLibError, "unknown format for OS/2 table: version %s" % self.version self.panose = panose return data
def compile(self, ttFont): panose = self.panose self.panose = sstruct.pack(panoseFormat, self.panose) if self.version == 0: data = sstruct.pack(OS2_format_0, self) elif self.version == 1: data = sstruct.pack(OS2_format_1, self) elif self.version in (2, 3, 4): data = sstruct.pack(OS2_format_2, self) else: from fontTools import ttLib raise ttLib.TTLibError("unknown format for OS/2 table: version %s" % self.version) self.panose = panose return data
def _calcMasterChecksum(self, directory): # calculate checkSumAdjustment tags = self.tables.keys() checksums = [] for i in range(len(tags)): checksums.append(self.tables[tags[i]].checkSum) # TODO(behdad) I'm fairly sure the checksum for woff is not working correctly. # Haven't debugged. if self.DirectoryEntry != SFNTDirectoryEntry: # Create a SFNT directory for checksum calculation purposes self.searchRange, self.entrySelector, self.rangeShift = getSearchRange(self.numTables) directory = sstruct.pack(sfntDirectoryFormat, self) tables = self.tables.items() tables.sort() for tag, entry in tables: sfntEntry = SFNTDirectoryEntry() for item in ['tag', 'checkSum', 'offset', 'length']: setattr(sfntEntry, item, getattr(entry, item)) directory = directory + sfntEntry.toString() directory_end = sfntDirectorySize + len(self.tables) * sfntDirectoryEntrySize assert directory_end == len(directory) checksums.append(calcChecksum(directory)) checksum = sum(checksums) & 0xffffffff # BiboAfba! checksumadjustment = (0xB1B0AFBA - checksum) & 0xffffffff return checksumadjustment
def close(self): if self.numTables != len(self.tables): raise WOFFLibError("wrong number of tables; expected %d, found %d" % (self.numTables, len(self.tables))) # first, handle the checkSumAdjustment if self.recalculateHeadChecksum and "head" in self.tables: self._handleHeadChecksum() # check the table directory conformance for tag, (index, entry, data) in sorted(self.tables.items()): self._checkTableConformance(entry, data) # write the header header = sstruct.pack(woffHeaderFormat, self) self.file.seek(0) self.file.write(header) # update the directory offsets offset = woffHeaderSize + (woffDirectoryEntrySize * self.numTables) order = self._tableOrder() for tag in order: index, entry, data = self.tables[tag] entry.offset = offset offset += calc4BytePaddedLength(entry.compLength) # ensure byte alignment # write the directory self._writeTableDirectory() # write the table data self._writeTableData() # write the metadata self._writeMetadata() # write the private data self._writePrivateData() # write the header self._writeHeader() # go to the beginning of the file self.file.seek(0)
def test_incorrect_compressed_size(self): data = self.file.read(woff2DirectorySize) header = sstruct.unpack(woff2DirectoryFormat, data) header['totalCompressedSize'] = 0 data = sstruct.pack(woff2DirectoryFormat, header) with self.assertRaises(brotli.error): WOFF2Reader(BytesIO(data + self.file.read()))
def calcHeadCheckSumAdjustment(flavor, tables): numTables = len(tables) # build the sfnt header searchRange, entrySelector, rangeShift = getSearchRange(numTables) sfntDirectoryData = dict(sfntVersion=flavor, numTables=numTables, searchRange=searchRange, entrySelector=entrySelector, rangeShift=rangeShift) # build the sfnt directory directory = sstruct.pack(sfntDirectoryFormat, sfntDirectoryData) for tag, entry in sorted(tables.items()): entry = tables[tag] sfntEntry = SFNTDirectoryEntry() sfntEntry.tag = tag sfntEntry.checkSum = entry["checkSum"] sfntEntry.offset = entry["offset"] sfntEntry.length = entry["length"] directory += sfntEntry.toString() # calculate the checkSumAdjustment checkSums = [entry["checkSum"] for entry in tables.values()] checkSums.append(calcChecksum(directory)) checkSumAdjustment = sum(checkSums) checkSumAdjustment = (0xB1B0AFBA - checkSumAdjustment) & 0xffffffff # done return checkSumAdjustment
def _writeTableDirectory(self): if self.verbose: debugmsg("writing table directory") self.file.seek(woffHeaderSize) for tag, (index, entry, data) in sorted(self.tables.items()): entry = sstruct.pack(woffDirectoryEntryFormat, entry) self.file.write(entry)
def _calcMasterChecksum(self, directory): # calculate checkSumAdjustment tags = self.tables.keys() checksums = [] for i in range(len(tags)): checksums.append(self.tables[tags[i]].checkSum) # TODO(behdad) I'm fairly sure the checksum for woff is not working correctly. # Haven't debugged. if self.DirectoryEntry != SFNTDirectoryEntry: # Create a SFNT directory for checksum calculation purposes self.searchRange, self.entrySelector, self.rangeShift = getSearchRange( self.numTables) directory = sstruct.pack(sfntDirectoryFormat, self) tables = self.tables.items() tables.sort() for tag, entry in tables: sfntEntry = SFNTDirectoryEntry() for item in ['tag', 'checkSum', 'offset', 'length']: setattr(sfntEntry, item, getattr(entry, item)) directory = directory + sfntEntry.toString() directory_end = sfntDirectorySize + len( self.tables) * sfntDirectoryEntrySize assert directory_end == len(directory) checksums.append(calcChecksum(directory)) checksum = sum(checksums) & 0xffffffff # BiboAfba! checksumadjustment = (0xB1B0AFBA - checksum) & 0xffffffff return checksumadjustment
def compile(self, ttFont): self.numGMAPs = len(self.GMAPs) self.numGlyplets = len(self.glyphlets) GMAPoffsets = [0]*(self.numGMAPs + 1) glyphletOffsets = [0]*(self.numGlyplets + 1) dataList =[ sstruct.pack(GPKGFormat, self)] pos = len(dataList[0]) + (self.numGMAPs + 1)*4 + (self.numGlyplets + 1)*4 GMAPoffsets[0] = pos for i in range(1, self.numGMAPs +1): pos += len(self.GMAPs[i-1]) GMAPoffsets[i] = pos gmapArray = numpy.array(GMAPoffsets, numpy.uint32) if sys.byteorder <> "big": gmapArray = gmapArray.byteswap() dataList.append(gmapArray.tostring()) glyphletOffsets[0] = pos for i in range(1, self.numGlyplets +1): pos += len(self.glyphlets[i-1]) glyphletOffsets[i] = pos glyphletArray = numpy.array(glyphletOffsets, numpy.uint32) if sys.byteorder <> "big": glyphletArray = glyphletArray.byteswap() dataList.append(glyphletArray.tostring()) dataList += self.GMAPs dataList += self.glyphlets data = "".join(dataList) return data
def calcHeadCheckSumAdjustment(flavor, tables): numTables = len(tables) # build the sfnt header searchRange, entrySelector, rangeShift = getSearchRange(numTables) sfntDirectoryData = dict( sfntVersion=flavor, numTables=numTables, searchRange=searchRange, entrySelector=entrySelector, rangeShift=rangeShift ) # build the sfnt directory directory = sstruct.pack(sfntDirectoryFormat, sfntDirectoryData) for tag, entry in sorted(tables.items()): entry = tables[tag] sfntEntry = SFNTDirectoryEntry() sfntEntry.tag = tag sfntEntry.checkSum = entry["checkSum"] sfntEntry.offset = entry["offset"] sfntEntry.length = entry["length"] directory += sfntEntry.toString() # calculate the checkSumAdjustment checkSums = [entry["checkSum"] for entry in tables.values()] checkSums.append(calcChecksum(directory)) checkSumAdjustment = sum(checkSums) checkSumAdjustment = (0xB1B0AFBA - checkSumAdjustment) & 0xffffffff # done return checkSumAdjustment
def close(self): if self.numTables != len(self.tables): raise WOFFLibError( "wrong number of tables; expected %d, found %d" % (self.numTables, len(self.tables))) # first, handle the checkSumAdjustment if self.recalculateHeadChecksum and "head" in self.tables: self._handleHeadChecksum() # check the table directory conformance for tag, (index, entry, data) in sorted(self.tables.items()): self._checkTableConformance(entry, data) # write the header header = sstruct.pack(woffHeaderFormat, self) self.file.seek(0) self.file.write(header) # update the directory offsets offset = woffHeaderSize + (woffDirectoryEntrySize * self.numTables) order = self._tableOrder() for tag in order: index, entry, data = self.tables[tag] entry.offset = offset offset += calc4BytePaddedLength( entry.compLength) # ensure byte alignment # write the directory self._writeTableDirectory() # write the table data self._writeTableData() # write the metadata self._writeMetadata() # write the private data self._writePrivateData() # write the header self._writeHeader() # go to the beginning of the file self.file.seek(0)
def compile(self, parentTable): data = sstruct.pack(METAStringRecordFormat, self) if parentTable.metaFlags == 0: datum = struct.pack(">H", self.offset) elif parentTable.metaFlags == 1: datum = struct.pack(">L", self.offset) data = data + datum return data
def compile(self, ttFont): packed = sstruct.pack(DSIG_HeaderFormat, self) headers = [packed] offset = len(packed) + self.usNumSigs * sstruct.calcsize(DSIG_SignatureFormat) data = [] for sigrec in self.signatureRecords: # first pack signature block sigrec.cbSignature = len(sigrec.pkcs7) packed = sstruct.pack(DSIG_SignatureBlockFormat, sigrec) + sigrec.pkcs7 data.append(packed) # update redundant length field sigrec.ulLength = len(packed) # update running table offset sigrec.ulOffset = offset headers.append(sstruct.pack(DSIG_SignatureFormat, sigrec)) offset += sigrec.ulLength return "".join(headers + data)
def test_incorrect_file_size(self): data = self.file.read(woff2DirectorySize) header = sstruct.unpack(woff2DirectoryFormat, data) header['length'] -= 1 data = sstruct.pack(woff2DirectoryFormat, header) with self.assertRaisesRegex( ttLib.TTLibError, "doesn't match the actual file size"): WOFF2Reader(BytesIO(data + self.file.read()))
def compile(self, ttFont): if self.UV == None: self.UV = 0 nameLen = len(self.name) if nameLen < 32: self.name = self.name + "\0" * (32 - nameLen) data = sstruct.pack(GMAPRecordFormat1, self) return data
def compile(self, ttFont): packed = sstruct.pack(DSIG_HeaderFormat, self) headers = [packed] offset = len(packed) + self.usNumSigs * sstruct.calcsize(DSIG_SignatureFormat) data = [] for sigrec in self.signatureRecords: # first pack signature block sigrec.cbSignature = len(sigrec.pkcs7) packed = sstruct.pack(DSIG_SignatureBlockFormat, sigrec) + sigrec.pkcs7 data.append(packed) # update redundant length field sigrec.ulLength = len(packed) # update running table offset sigrec.ulOffset = offset headers.append(sstruct.pack(DSIG_SignatureFormat, sigrec)) offset += sigrec.ulLength return ''.join(headers+data)
def test_incorrect_file_size(self): data = self.file.read(woff2DirectorySize) header = sstruct.unpack(woff2DirectoryFormat, data) header['length'] -= 1 data = sstruct.pack(woff2DirectoryFormat, header) with self.assertRaisesRegex(ttLib.TTLibError, "doesn't match the actual file size"): WOFF2Reader(BytesIO(data + self.file.read()))
def compile(self, ttFont): if self.UV == None: self.UV = 0 nameLen = len(self.name) if nameLen < 32: self.name = self.name + "\0"*(32 - nameLen) data = sstruct.pack(GMAPRecordFormat1, self) return data
def compile(self, ttFont): sbixData = "" self.numSets = len(self.bitmapSets) sbixHeader = sstruct.pack(sbixHeaderFormat, self) # calculate offset to start of first bitmap set setOffset = sbixHeaderFormatSize + sbixBitmapSetOffsetFormatSize * self.numSets for si in sorted(self.bitmapSets.keys()): myBitmapSet = self.bitmapSets[si] myBitmapSet.compile(ttFont) # append offset to this bitmap set to table header myBitmapSet.offset = setOffset sbixHeader += sstruct.pack(sbixBitmapSetOffsetFormat, myBitmapSet) setOffset += len(myBitmapSet.data) sbixData += myBitmapSet.data return sbixHeader + sbixData
def compile(self, ttFont): offsetOK = 0 self.nMetaRecs = len(self.glyphRecords) count = 0 while ( offsetOK != 1): count = count + 1 if count > 4: pdb_set_trace() metaData = sstruct.pack(METAHeaderFormat, self) stringRecsOffset = len(metaData) + self.nMetaRecs * (6 + 2*(self.metaFlags & 1)) stringRecSize = (6 + 2*(self.metaFlags & 1)) for glyphRec in self.glyphRecords: glyphRec.offset = stringRecsOffset if (glyphRec.offset > 65535) and ((self.metaFlags & 1) == 0): self.metaFlags = self.metaFlags + 1 offsetOK = -1 break metaData = metaData + glyphRec.compile(self) stringRecsOffset = stringRecsOffset + (glyphRec.nMetaEntry * stringRecSize) # this will be the String Record offset for the next GlyphRecord. if offsetOK == -1: offsetOK = 0 continue # metaData now contains the header and all of the GlyphRecords. Its length should bw # the offset to the first StringRecord. stringOffset = stringRecsOffset for glyphRec in self.glyphRecords: assert (glyphRec.offset == len(metaData)), "Glyph record offset did not compile correctly! for rec:" + str(glyphRec) for stringRec in glyphRec.stringRecs: stringRec.offset = stringOffset if (stringRec.offset > 65535) and ((self.metaFlags & 1) == 0): self.metaFlags = self.metaFlags + 1 offsetOK = -1 break metaData = metaData + stringRec.compile(self) stringOffset = stringOffset + stringRec.stringLen if offsetOK == -1: offsetOK = 0 continue if ((self.metaFlags & 1) == 1) and (stringOffset < 65536): self.metaFlags = self.metaFlags - 1 continue else: offsetOK = 1 # metaData now contains the header and all of the GlyphRecords and all of the String Records. # Its length should be the offset to the first string datum. for glyphRec in self.glyphRecords: for stringRec in glyphRec.stringRecs: assert (stringRec.offset == len(metaData)), "String offset did not compile correctly! for string:" + str(stringRec.string) metaData = metaData + stringRec.string return metaData
def compile(self, ttFont): self.recordsCount = len(self.gmapRecords) self.fontNameLength = len(self.psFontName) self.recordsOffset = 4 *(((self.fontNameLength + 12) + 3) /4) data = sstruct.pack(GMAPFormat, self) data = data + self.psFontName data = data + "\0" * (self.recordsOffset - len(data)) for record in self.gmapRecords: data = data + record.compile(ttFont) return data
def _buildheader(self): header = sstruct.pack(headerformat, self) header = header + apply(struct.pack, (">9h", ) + self.ffProperty) header = header + apply(struct.pack, (">hh", ) + self.ffIntl) header = header + struct.pack(">h", self.ffVersion) if DEBUG: print "header is the same?", self._rawheader == header and 'yes.' or 'no.' if self._rawheader <> header: print len(self._rawheader), len(header) self._rawheader = header
def compile(self, ttFont): glyphIds = map(ttFont.getGlyphID, self.names) # Make sure all the ids are consecutive. This is required by Format 2. assert glyphIds == range(self.firstGlyphIndex, self.lastGlyphIndex+1), "Format 2 ids must be consecutive." self.imageDataOffset = min(zip(*self.locations)[0]) dataList = [EblcIndexSubTable.compile(self, ttFont)] dataList.append(struct.pack(">L", self.imageSize)) dataList.append(sstruct.pack(bigGlyphMetricsFormat, self.metrics)) return string.join(dataList, "")
def _buildheader(self): header = sstruct.pack(headerformat, self) header = header + apply(struct.pack, (">9h",) + self.ffProperty) header = header + apply(struct.pack, (">hh",) + self.ffIntl) header = header + struct.pack(">h", self.ffVersion) if DEBUG: print "header is the same?", self._rawheader == header and 'yes.' or 'no.' if self._rawheader <> header: print len(self._rawheader), len(header) self._rawheader = header
def compile(self, ttFont): self.recordsCount = len(self.gmapRecords) self.fontNameLength = len(self.psFontName) self.recordsOffset = 4 * (((self.fontNameLength + 12) + 3) / 4) data = sstruct.pack(GMAPFormat, self) data = data + self.psFontName data = data + "\0" * (self.recordsOffset - len(data)) for record in self.gmapRecords: data = data + record.compile(ttFont) return data
def compile(self, ttFont): self.imageDataOffset = min(zip(*self.locations)[0]) dataList = [EblcIndexSubTable.compile(self, ttFont)] dataList.append(struct.pack(">L", self.imageSize)) dataList.append(sstruct.pack(bigGlyphMetricsFormat, self.metrics)) glyphIds = map(ttFont.getGlyphID, self.names) dataList.append(struct.pack(">L", len(glyphIds))) dataList += [struct.pack(">H", curId) for curId in glyphIds] if len(glyphIds) % 2 == 1: dataList.append(struct.pack(">H", 0)) return string.join(dataList, "")
def getInfo(self): if self.__haveInfo: return data = sstruct.pack(_FCBPBFormat, self) buf = array.array("c", data) ptr = buf.buffer_info()[0] err = _getInfo(ptr) if err: raise Res.Error("can't get file info", err) sstruct.unpack(_FCBPBFormat, buf.tostring(), self) self.__haveInfo = 1
def compile(self, ttFont): self.nameLength = chr(len(self.baseGlyphName)) self.uniqueName = self.compilecompileUniqueName(self.uniqueName, 28) METAMD5List = eval(self.METAMD5) self.METAMD5 = "" for val in METAMD5List: self.METAMD5 = self.METAMD5 + chr(val) assert (len(self.METAMD5) == 16), "Failed to pack 16 byte MD5 hash in SING table" data = sstruct.pack(SINGFormat, self) data = data + self.baseGlyphName return data
def compile(self, ttFont): glyphIds = map(ttFont.getGlyphID, self.names) # Make sure all the ids are consecutive. This is required by Format 2. assert glyphIds == range(self.firstGlyphIndex, self.lastGlyphIndex + 1), "Format 2 ids must be consecutive." self.imageDataOffset = min(zip(*self.locations)[0]) dataList = [EblcIndexSubTable.compile(self, ttFont)] dataList.append(struct.pack(">L", self.imageSize)) dataList.append(sstruct.pack(bigGlyphMetricsFormat, self.metrics)) return string.join(dataList, "")
def compile(self): header = sstruct.pack(nfntHeaderFormat, self) nEntries = len(self.widthTable) owTable = [None] * nEntries locTable = [None] * nEntries for i in range(nEntries): owTable[i] = chr(self.offsetTable[i]) + chr(self.widthTable[i]) locTable[i] = struct.pack("h", self.locTable[i]) owTable = string.join(owTable, "") locTable = string.join(locTable, "") assert len(locTable) == len(owTable) == 2 * (self.lastChar - self.firstChar + 3) return header + self.bits + locTable + owTable
def compile(self, ttFont): if self.glyphName is None: from fontTools import ttLib raise ttLib.TTLibError, "Can't compile bitmap without glyph name" # TODO: if ttFont has no maxp, cmap etc., ignore glyph names and compile by index? # (needed if you just want to compile the sbix table on its own) self.gid = struct.pack(">H", ttFont.getGlyphID(self.glyphName)) if self.imageFormatTag is None: self.rawdata = "" else: self.rawdata = sstruct.pack(sbixBitmapHeaderFormat, self) + self.imageData
def compile(self, ttFont): data = sstruct.pack(postFormat, self) if self.formatType == 1.0: pass # we're done elif self.formatType == 2.0: data = data + self.encode_format_2_0(ttFont) elif self.formatType == 3.0: pass # we're done else: # supported format raise ttLib.TTLibError, "'post' table format %f not supported" % self.formatType return data
def compile(self, ttFont): data = sstruct.pack(postFormat, self) if self.formatType == 1.0: pass # we're done elif self.formatType == 2.0: data = data + self.encode_format_2_0(ttFont) elif self.formatType == 3.0: pass # we're done else: # supported format raise ttLib.TTLibError("'post' table format %f not supported" % self.formatType) return data
def compile(self, ttFont): self.nameLength = chr(len(self.baseGlyphName)) self.uniqueName = self.compilecompileUniqueName(self.uniqueName, 28) METAMD5List = eval(self.METAMD5) self.METAMD5 = "" for val in METAMD5List: self.METAMD5 = self.METAMD5 + chr(val) assert (len(self.METAMD5) == 16 ), "Failed to pack 16 byte MD5 hash in SING table" data = sstruct.pack(SINGFormat, self) data = data + self.baseGlyphName return data
def packSFNT(header, directory, tableData, flavor="cff", calcCheckSum=True, applyPadding=True, sortDirectory=True, searchRange=None, entrySelector=None, rangeShift=None): # update the checkSum if calcCheckSum: if flavor == "cff": f = "OTTO" else: f = "\000\001\000\000" calcHeadCheckSumAdjustmentSFNT(directory, tableData, flavor=f) # update the header cSearchRange, cEntrySelector, cRangeShift = getSearchRange(len(directory)) if searchRange is None: searchRange = cSearchRange if entrySelector is None: entrySelector = cEntrySelector if rangeShift is None: rangeShift = cRangeShift if flavor == "cff": header["sfntVersion"] = "OTTO" else: header["sfntVersion"] = "\000\001\000\000" header["searchRange"] = searchRange header["entrySelector"] = entrySelector header["rangeShift"] = rangeShift # version and num tables should already be set sfntData = sstruct.pack(sfntDirectoryFormat, header) # compile the directory sfntDirectoryEntries = {} entryOrder = [] for entry in directory: sfntEntry = SFNTDirectoryEntry() sfntEntry.tag = entry["tag"] sfntEntry.checkSum = entry["checksum"] sfntEntry.offset = entry["offset"] sfntEntry.length = entry["length"] sfntDirectoryEntries[entry["tag"]] = sfntEntry entryOrder.append(entry["tag"]) if sortDirectory: entryOrder = sorted(entryOrder) for tag in entryOrder: entry = sfntDirectoryEntries[tag] sfntData += entry.toString() # compile the data directory = [(entry["offset"], entry["tag"]) for entry in directory] for o, tag in sorted(directory): data = tableData[tag] if applyPadding: data = padData(data) sfntData += data # done return sfntData
def compile(self, ttFont): self.bitmapOffsets = "" self.bitmapData = "" glyphOrder = ttFont.getGlyphOrder() # first bitmap starts right after the header bitmapOffset = sbixBitmapSetHeaderFormatSize + sbixBitmapOffsetEntryFormatSize * ( len(glyphOrder) + 1) for glyphName in glyphOrder: if glyphName in self.bitmaps: # we have a bitmap for this glyph myBitmap = self.bitmaps[glyphName] else: # must add empty bitmap for this glyph myBitmap = Bitmap(glyphName=glyphName) myBitmap.compile(ttFont) myBitmap.ulOffset = bitmapOffset self.bitmapData += myBitmap.rawdata bitmapOffset += len(myBitmap.rawdata) self.bitmapOffsets += sstruct.pack(sbixBitmapOffsetEntryFormat, myBitmap) # add last "offset", really the end address of the last bitmap dummy = Bitmap() dummy.ulOffset = bitmapOffset self.bitmapOffsets += sstruct.pack(sbixBitmapOffsetEntryFormat, dummy) # bitmap sets are padded to 4 byte boundaries dataLength = len(self.bitmapOffsets) + len(self.bitmapData) if dataLength % 4 != 0: padding = 4 - (dataLength % 4) else: padding = 0 # pack header self.data = sstruct.pack(sbixBitmapSetHeaderFormat, self) # add offset, image data and padding after header self.data += self.bitmapOffsets + self.bitmapData + "\0" * padding
def compile(self, ttFont): dataList = [] dataList.append(sstruct.pack(ebdtTableVersionFormat, self)) dataSize = len(dataList[0]) # Keep a dict of glyphs that have been seen so they aren't remade. # This dict maps the id of the BitmapGlyph to the interval # in the data. glyphDict = {} # Go through the bitmap glyph data. Just in case the data for a glyph # changed the size metrics should be recalculated. There are a variety # of formats and they get stored in the EBLC table. That is why # recalculation is defered to the EblcIndexSubTable class and just # pass what is known about bitmap glyphs from this particular table. locator = ttFont[self.__class__.locatorName] for curStrike, curGlyphDict in itertools.izip(locator.strikes, self.strikeData): for curIndexSubTable in curStrike.indexSubTables: dataLocations = [] for curName in curIndexSubTable.names: # Handle the data placement based on seeing the glyph or not. # Just save a reference to the location if the glyph has already # been saved in compile. This code assumes that glyphs will only # be referenced multiple times from indexFormat5. By luck the # code may still work when referencing poorly ordered fonts with # duplicate references. If there is a font that is unlucky the # respective compile methods for the indexSubTables will fail # their assertions. All fonts seem to follow this assumption. # More complicated packing may be needed if a counter-font exists. glyph = curGlyphDict[curName] objectId = id(glyph) if objectId not in glyphDict: data = glyph.compile(ttFont) data = curIndexSubTable.padBitmapData(data) startByte = dataSize dataSize += len(data) endByte = dataSize dataList.append(data) dataLoc = (startByte, endByte) glyphDict[objectId] = dataLoc else: dataLoc = glyphDict[objectId] dataLocations.append(dataLoc) # Just use the new data locations in the indexSubTable. # The respective compile implementations will take care # of any of the problems in the convertion that may arise. curIndexSubTable.locations = dataLocations return string.join(dataList, "")
def compile(self, ttFont): self.bitmapOffsets = "" self.bitmapData = "" glyphOrder = ttFont.getGlyphOrder() # first bitmap starts right after the header bitmapOffset = sbixBitmapSetHeaderFormatSize + sbixBitmapOffsetEntryFormatSize * (len(glyphOrder) + 1) for glyphName in glyphOrder: if glyphName in self.bitmaps: # we have a bitmap for this glyph myBitmap = self.bitmaps[glyphName] else: # must add empty bitmap for this glyph myBitmap = Bitmap(glyphName=glyphName) myBitmap.compile(ttFont) myBitmap.ulOffset = bitmapOffset self.bitmapData += myBitmap.rawdata bitmapOffset += len(myBitmap.rawdata) self.bitmapOffsets += sstruct.pack(sbixBitmapOffsetEntryFormat, myBitmap) # add last "offset", really the end address of the last bitmap dummy = Bitmap() dummy.ulOffset = bitmapOffset self.bitmapOffsets += sstruct.pack(sbixBitmapOffsetEntryFormat, dummy) # bitmap sets are padded to 4 byte boundaries dataLength = len(self.bitmapOffsets) + len(self.bitmapData) if dataLength % 4 != 0: padding = 4 - (dataLength % 4) else: padding = 0 # pack header self.data = sstruct.pack(sbixBitmapSetHeaderFormat, self) # add offset, image data and padding after header self.data += self.bitmapOffsets + self.bitmapData + "\0" * padding
def compile(self, ttFont): self.version = 0 numGlyphs = ttFont['maxp'].numGlyphs glyphOrder = ttFont.getGlyphOrder() self.recordSize = 4 * ((2 + numGlyphs + 3) / 4) pad = (self.recordSize - 2 - numGlyphs) * "\0" self.numRecords = len(self.hdmx) data = sstruct.pack(hdmxHeaderFormat, self) items = self.hdmx.items() items.sort() for ppem, widths in items: data = data + chr(ppem) + chr(max(widths.values())) for glyphID in range(len(glyphOrder)): width = widths[glyphOrder[glyphID]] data = data + chr(width) data = data + pad return data
def calcHeadCheckSumAdjustmentSFNT(directory, tableData, flavor=None): """ Set the checkSumAdjustment in the head table data. Grumble. """ # if the flavor is None, guess. if flavor is None: flavor = "\000\001\000\000" for entry in directory: if entry["tag"] == "CFF ": flavor = "OTTO" break assert flavor in ("OTTO", "\000\001\000\000") # make the sfnt header searchRange, entrySelector, rangeShift = getSearchRange(len(directory)) sfntHeaderData = dict(sfntVersion=flavor, numTables=len(directory), searchRange=searchRange, entrySelector=entrySelector, rangeShift=rangeShift) sfntData = sstruct.pack(sfntDirectoryFormat, sfntHeaderData) # make a SFNT table directory directory = [(entry["tag"], entry) for entry in directory] for tag, entry in sorted(directory): sfntEntry = SFNTDirectoryEntry() sfntEntry.tag = entry["tag"] sfntEntry.checkSum = entry["checksum"] sfntEntry.offset = entry["offset"] sfntEntry.length = entry["length"] sfntData += sfntEntry.toString() # calculate the checksum sfntDataChecksum = calcChecksum(sfntData) # gather all of the checksums checksums = [entry["checksum"] for o, entry in directory] checksums.append(sfntDataChecksum) # calculate the checksum checkSumAdjustment = sum(checksums) checkSumAdjustment = (0xB1B0AFBA - checkSumAdjustment) & 0xffffffff # set the value in the head table headTableData = tableData["head"] newHeadTableData = headTableData[:8] newHeadTableData += struct.pack(">L", checkSumAdjustment) newHeadTableData += headTableData[12:] tableData["head"] = newHeadTableData
def compile(self, glyfTable, recalcBBoxes=1): if hasattr(self, "data"): return self.data if self.numberOfContours == 0: return "" if recalcBBoxes: self.recalcBounds(glyfTable) data = sstruct.pack(glyphHeaderFormat, self) if self.isComposite(): data = data + self.compileComponents(glyfTable) else: data = data + self.compileCoordinates() # From the spec: "Note that the local offsets should be word-aligned" # From a later MS spec: "Note that the local offsets should be long-aligned" # Let's be modern and align on 4-byte boundaries. if len(data) % 4: # add pad bytes nPadBytes = 4 - (len(data) % 4) data = data + "\0" * nPadBytes return data
def compile(self, ttFont): if not hasattr(self, "names"): # only happens when there are NO name table entries read # from the TTX file self.names = [] self.names.sort() # sort according to the spec; see NameRecord.__cmp__() stringData = "" format = 0 n = len(self.names) stringOffset = 6 + n * sstruct.calcsize(nameRecordFormat) data = struct.pack(">HHH", format, n, stringOffset) lastoffset = 0 done = {} # remember the data so we can reuse the "pointers" for name in self.names: if done.has_key(name.string): name.offset, name.length = done[name.string] else: name.offset, name.length = done[name.string] = len(stringData), len(name.string) stringData = stringData + name.string data = data + sstruct.pack(nameRecordFormat, name) return data + stringData
def close(self): """All tables must have been written to disk. Now write the directory. """ tables = self.tables.items() tables.sort() if len(tables) <> self.numTables: from fontTools import ttLib raise ttLib.TTLibError, "wrong number of tables; expected %d, found %d" % (self.numTables, len(tables)) directory = sstruct.pack(sfntDirectoryFormat, self) self.file.seek(sfntDirectorySize) seenHead = 0 for tag, entry in tables: if tag == "head": seenHead = 1 directory = directory + entry.toString() if seenHead: self.calcMasterChecksum(directory) self.file.seek(0) self.file.write(directory)