diff --git a/Lib/fontTools/misc/loggingTools.py b/Lib/fontTools/misc/loggingTools.py index aacc08582..b4b9a9b05 100644 --- a/Lib/fontTools/misc/loggingTools.py +++ b/Lib/fontTools/misc/loggingTools.py @@ -250,8 +250,8 @@ class Timer(object): upon exiting the with-statement. >>> import logging - >>> log = logging.getLogger("fontTools") - >>> configLogger(level="DEBUG", format="%(message)s", stream=sys.stdout) + >>> log = logging.getLogger("my-fancy-timer-logger") + >>> configLogger(logger=log, level="DEBUG", format="%(message)s", stream=sys.stdout) >>> with Timer(log, 'do something'): ... time.sleep(0.01) Took ... to do something @@ -530,67 +530,6 @@ def deprecateFunction(msg, category=UserWarning): return decorator -class LastResortLogger(logging.Logger): - """ Adds support for 'lastResort' handler introduced in Python 3.2. - It allows to print messages to sys.stderr even when no explicit handler - was configured. - To enable it, you can do: - - import logging - logging.lastResort = StderrHandler(logging.WARNING) - logging.setLoggerClass(LastResortLogger) - """ - - def callHandlers(self, record): - # this is the same as Python 3.5's logging.Logger.callHandlers - c = self - found = 0 - while c: - for hdlr in c.handlers: - found = found + 1 - if record.levelno >= hdlr.level: - hdlr.handle(record) - if not c.propagate: - c = None # break out - else: - c = c.parent - if found == 0: - if logging.lastResort: - if record.levelno >= logging.lastResort.level: - logging.lastResort.handle(record) - elif ( - logging.raiseExceptions - and not self.manager.emittedNoHandlerWarning - ): - sys.stderr.write( - "No handlers could be found for logger" - ' "%s"\n' % self.name - ) - self.manager.emittedNoHandlerWarning = True - - -class StderrHandler(logging.StreamHandler): - """ This class is like a StreamHandler using sys.stderr, but always uses - whateve sys.stderr is currently set to rather than the value of - sys.stderr at handler construction time. - """ - - def __init__(self, level=logging.NOTSET): - """ - Initialize the handler. - """ - logging.Handler.__init__(self, level) - - @property - def stream(self): - # the try/execept avoids failures during interpreter shutdown, when - # globals are set to None - try: - return sys.stderr - except AttributeError: - return __import__("sys").stderr - - if __name__ == "__main__": import doctest sys.exit(doctest.testmod(optionflags=doctest.ELLIPSIS).failed) diff --git a/Lib/fontTools/subset/__init__.py b/Lib/fontTools/subset/__init__.py index 8cf82c122..6863bb6fc 100644 --- a/Lib/fontTools/subset/__init__.py +++ b/Lib/fontTools/subset/__init__.py @@ -1987,7 +1987,8 @@ def closure_glyphs(self, s): @_add_method(ttLib.getTableClass('MATH')) def closure_glyphs(self, s): - self.table.MathVariants.closure_glyphs(s) + if self.table.MathVariants: + self.table.MathVariants.closure_glyphs(s) @_add_method(otTables.MathItalicsCorrectionInfo) def subset_glyphs(self, s): @@ -2039,8 +2040,10 @@ def subset_glyphs(self, s): @_add_method(ttLib.getTableClass('MATH')) def subset_glyphs(self, s): s.glyphs = s.glyphs_mathed - self.table.MathGlyphInfo.subset_glyphs(s) - self.table.MathVariants.subset_glyphs(s) + if self.table.MathGlyphInfo: + self.table.MathGlyphInfo.subset_glyphs(s) + if self.table.MathVariants: + self.table.MathVariants.subset_glyphs(s) return True @_add_method(ttLib.getTableModule('glyf').Glyph) diff --git a/Lib/fontTools/ttLib/woff2.py b/Lib/fontTools/ttLib/woff2.py index c0c0e7044..a8722502e 100644 --- a/Lib/fontTools/ttLib/woff2.py +++ b/Lib/fontTools/ttLib/woff2.py @@ -16,7 +16,7 @@ from fontTools.ttLib.tables import ttProgram import logging -log = logging.getLogger(__name__) +log = logging.getLogger("fontTools.ttLib.woff2") haveBrotli = False try: @@ -82,7 +82,7 @@ class WOFF2Reader(SFNTReader): """Fetch the raw table data. Reconstruct transformed tables.""" entry = self.tables[Tag(tag)] if not hasattr(entry, 'data'): - if tag in woff2TransformedTableTags: + if entry.transformed: entry.data = self.reconstructTable(tag) else: entry.data = entry.loadData(self.transformBuffer) @@ -90,8 +90,6 @@ class WOFF2Reader(SFNTReader): def reconstructTable(self, tag): """Reconstruct table named 'tag' from transformed data.""" - if tag not in woff2TransformedTableTags: - raise TTLibError("transform for table '%s' is unknown" % tag) entry = self.tables[Tag(tag)] rawData = entry.loadData(self.transformBuffer) if tag == 'glyf': @@ -100,8 +98,10 @@ class WOFF2Reader(SFNTReader): data = self._reconstructGlyf(rawData, padding) elif tag == 'loca': data = self._reconstructLoca() + elif tag == 'hmtx': + data = self._reconstructHmtx(rawData) else: - raise NotImplementedError + raise TTLibError("transform for table '%s' is unknown" % tag) return data def _reconstructGlyf(self, data, padding=None): @@ -130,6 +130,34 @@ class WOFF2Reader(SFNTReader): % (self.tables['loca'].origLength, len(data))) return data + def _reconstructHmtx(self, data): + """ Return reconstructed hmtx table data. """ + # Before reconstructing 'hmtx' table we need to parse other tables: + # 'glyf' is required for reconstructing the sidebearings from the glyphs' + # bounding box; 'hhea' is needed for the numberOfHMetrics field. + if "glyf" in self.flavorData.transformedTables: + # transformed 'glyf' table is self-contained, thus 'loca' not needed + tableDependencies = ("maxp", "hhea", "glyf") + else: + # decompiling untransformed 'glyf' requires 'loca', which requires 'head' + tableDependencies = ("maxp", "head", "hhea", "loca", "glyf") + for tag in tableDependencies: + self._decompileTable(tag) + hmtxTable = self.ttFont["hmtx"] = WOFF2HmtxTable() + hmtxTable.reconstruct(data, self.ttFont) + data = hmtxTable.compile(self.ttFont) + return data + + def _decompileTable(self, tag): + """Decompile table data and store it inside self.ttFont.""" + data = self[tag] + if self.ttFont.isLoaded(tag): + return self.ttFont[tag] + tableClass = getTableClass(tag) + table = tableClass(tag) + self.ttFont.tables[tag] = table + table.decompile(data, self.ttFont) + class WOFF2Writer(SFNTWriter): @@ -199,7 +227,7 @@ class WOFF2Writer(SFNTWriter): # See: # https://github.com/khaledhosny/ots/issues/60 # https://github.com/google/woff2/issues/15 - if isTrueType: + if isTrueType and "glyf" in self.flavorData.transformedTables: self._normaliseGlyfAndLoca(padding=4) self._setHeadTransformFlag() @@ -234,13 +262,7 @@ class WOFF2Writer(SFNTWriter): if self.sfntVersion == "OTTO": return - # make up glyph names required to decompile glyf table - self._decompileTable('maxp') - numGlyphs = self.ttFont['maxp'].numGlyphs - glyphOrder = ['.notdef'] + ["glyph%.5d" % i for i in range(1, numGlyphs)] - self.ttFont.setGlyphOrder(glyphOrder) - - for tag in ('head', 'loca', 'glyf'): + for tag in ('maxp', 'head', 'loca', 'glyf'): self._decompileTable(tag) self.ttFont['glyf'].padding = padding for tag in ('glyf', 'loca'): @@ -265,6 +287,8 @@ class WOFF2Writer(SFNTWriter): tableClass = WOFF2LocaTable elif tag == 'glyf': tableClass = WOFF2GlyfTable + elif tag == 'hmtx': + tableClass = WOFF2HmtxTable else: tableClass = getTableClass(tag) table = tableClass(tag) @@ -293,11 +317,17 @@ class WOFF2Writer(SFNTWriter): def _transformTables(self): """Return transformed font data.""" + transformedTables = self.flavorData.transformedTables for tag, entry in self.tables.items(): - if tag in woff2TransformedTableTags: + data = None + if tag in transformedTables: data = self.transformTable(tag) - else: + if data is not None: + entry.transformed = True + if data is None: + # pass-through the table data without transformation data = entry.data + entry.transformed = False entry.offset = self.nextTableOffset entry.saveData(self.transformBuffer, data) self.nextTableOffset += entry.length @@ -306,9 +336,9 @@ class WOFF2Writer(SFNTWriter): return fontData def transformTable(self, tag): - """Return transformed table data.""" - if tag not in woff2TransformedTableTags: - raise TTLibError("Transform for table '%s' is unknown" % tag) + """Return transformed table data, or None if some pre-conditions aren't + met -- in which case, the non-transformed table data will be used. + """ if tag == "loca": data = b"" elif tag == "glyf": @@ -316,8 +346,15 @@ class WOFF2Writer(SFNTWriter): self._decompileTable(tag) glyfTable = self.ttFont['glyf'] data = glyfTable.transform(self.ttFont) + elif tag == "hmtx": + if "glyf" not in self.tables: + return + for tag in ("maxp", "head", "hhea", "loca", "glyf", "hmtx"): + self._decompileTable(tag) + hmtxTable = self.ttFont["hmtx"] + data = hmtxTable.transform(self.ttFont) # can be None else: - raise NotImplementedError + raise TTLibError("Transform for table '%s' is unknown" % tag) return data def _calcMasterChecksum(self): @@ -533,11 +570,9 @@ class WOFF2DirectoryEntry(DirectoryEntry): # otherwise, tag is derived from a fixed 'Known Tags' table self.tag = woff2KnownTags[self.flags & 0x3F] self.tag = Tag(self.tag) - if self.flags & 0xC0 != 0: - raise TTLibError('bits 6-7 are reserved and must be 0') self.origLength, data = unpackBase128(data) self.length = self.origLength - if self.tag in woff2TransformedTableTags: + if self.transformed: self.length, data = unpackBase128(data) if self.tag == 'loca' and self.length != 0: raise TTLibError( @@ -550,10 +585,44 @@ class WOFF2DirectoryEntry(DirectoryEntry): if (self.flags & 0x3F) == 0x3F: data += struct.pack('>4s', self.tag.tobytes()) data += packBase128(self.origLength) - if self.tag in woff2TransformedTableTags: + if self.transformed: data += packBase128(self.length) return data + @property + def transformVersion(self): + """Return bits 6-7 of table entry's flags, which indicate the preprocessing + transformation version number (between 0 and 3). + """ + return self.flags >> 6 + + @transformVersion.setter + def transformVersion(self, value): + assert 0 <= value <= 3 + self.flags |= value << 6 + + @property + def transformed(self): + """Return True if the table has any transformation, else return False.""" + # For all tables in a font, except for 'glyf' and 'loca', the transformation + # version 0 indicates the null transform (where the original table data is + # passed directly to the Brotli compressor). For 'glyf' and 'loca' tables, + # transformation version 3 indicates the null transform + if self.tag in {"glyf", "loca"}: + return self.transformVersion != 3 + else: + return self.transformVersion != 0 + + @transformed.setter + def transformed(self, booleanValue): + # here we assume that a non-null transform means version 0 for 'glyf' and + # 'loca' and 1 for every other table (e.g. hmtx); but that may change as + # new transformation formats are introduced in the future (if ever). + if self.tag in {"glyf", "loca"}: + self.transformVersion = 3 if not booleanValue else 0 + else: + self.transformVersion = int(booleanValue) + class WOFF2LocaTable(getTableClass('loca')): """Same as parent class. The only difference is that it attempts to preserve @@ -652,19 +721,7 @@ class WOFF2GlyfTable(getTableClass('glyf')): def transform(self, ttFont): """ Return transformed 'glyf' data """ self.numGlyphs = len(self.glyphs) - if not hasattr(self, "glyphOrder"): - try: - self.glyphOrder = ttFont.getGlyphOrder() - except: - self.glyphOrder = None - if self.glyphOrder is None: - self.glyphOrder = [".notdef"] - self.glyphOrder.extend(["glyph%.5d" % i for i in range(1, self.numGlyphs)]) - if len(self.glyphOrder) != self.numGlyphs: - raise TTLibError( - "incorrect glyphOrder: expected %d glyphs, found %d" % - (len(self.glyphOrder), self.numGlyphs)) - + assert len(self.glyphOrder) == self.numGlyphs if 'maxp' in ttFont: ttFont['maxp'].numGlyphs = self.numGlyphs self.indexFormat = ttFont['head'].indexToLocFormat @@ -909,13 +966,193 @@ class WOFF2GlyfTable(getTableClass('glyf')): self.glyphStream += triplets.tostring() +class WOFF2HmtxTable(getTableClass("hmtx")): + + def __init__(self, tag=None): + self.tableTag = Tag(tag or 'hmtx') + + def reconstruct(self, data, ttFont): + flags, = struct.unpack(">B", data[:1]) + data = data[1:] + if flags & 0b11111100 != 0: + raise TTLibError("Bits 2-7 of '%s' flags are reserved" % self.tableTag) + + # When bit 0 is _not_ set, the lsb[] array is present + hasLsbArray = flags & 1 == 0 + # When bit 1 is _not_ set, the leftSideBearing[] array is present + hasLeftSideBearingArray = flags & 2 == 0 + if hasLsbArray and hasLeftSideBearingArray: + raise TTLibError( + "either bits 0 or 1 (or both) must set in transformed '%s' flags" + % self.tableTag + ) + + glyfTable = ttFont["glyf"] + headerTable = ttFont["hhea"] + glyphOrder = glyfTable.glyphOrder + numGlyphs = len(glyphOrder) + numberOfHMetrics = min(int(headerTable.numberOfHMetrics), numGlyphs) + + assert len(data) >= 2 * numberOfHMetrics + advanceWidthArray = array.array("H", data[:2 * numberOfHMetrics]) + if sys.byteorder != "big": + advanceWidthArray.byteswap() + data = data[2 * numberOfHMetrics:] + + if hasLsbArray: + assert len(data) >= 2 * numberOfHMetrics + lsbArray = array.array("h", data[:2 * numberOfHMetrics]) + if sys.byteorder != "big": + lsbArray.byteswap() + data = data[2 * numberOfHMetrics:] + else: + # compute (proportional) glyphs' lsb from their xMin + lsbArray = array.array("h") + for i, glyphName in enumerate(glyphOrder): + if i >= numberOfHMetrics: + break + glyph = glyfTable[glyphName] + xMin = getattr(glyph, "xMin", 0) + lsbArray.append(xMin) + + numberOfSideBearings = numGlyphs - numberOfHMetrics + if hasLeftSideBearingArray: + assert len(data) >= 2 * numberOfSideBearings + leftSideBearingArray = array.array("h", data[:2 * numberOfSideBearings]) + if sys.byteorder != "big": + leftSideBearingArray.byteswap() + data = data[2 * numberOfSideBearings:] + else: + # compute (monospaced) glyphs' leftSideBearing from their xMin + leftSideBearingArray = array.array("h") + for i, glyphName in enumerate(glyphOrder): + if i < numberOfHMetrics: + continue + glyph = glyfTable[glyphName] + xMin = getattr(glyph, "xMin", 0) + leftSideBearingArray.append(xMin) + + if data: + raise TTLibError("too much '%s' table data" % self.tableTag) + + self.metrics = {} + for i in range(numberOfHMetrics): + glyphName = glyphOrder[i] + advanceWidth, lsb = advanceWidthArray[i], lsbArray[i] + self.metrics[glyphName] = (advanceWidth, lsb) + lastAdvance = advanceWidthArray[-1] + for i in range(numberOfSideBearings): + glyphName = glyphOrder[i + numberOfHMetrics] + self.metrics[glyphName] = (lastAdvance, leftSideBearingArray[i]) + + def transform(self, ttFont): + glyphOrder = ttFont.getGlyphOrder() + glyf = ttFont["glyf"] + hhea = ttFont["hhea"] + numberOfHMetrics = hhea.numberOfHMetrics + + # check if any of the proportional glyphs has left sidebearings that + # differ from their xMin bounding box values. + hasLsbArray = False + for i in range(numberOfHMetrics): + glyphName = glyphOrder[i] + lsb = self.metrics[glyphName][1] + if lsb != getattr(glyf[glyphName], "xMin", 0): + hasLsbArray = True + break + + # do the same for the monospaced glyphs (if any) at the end of hmtx table + hasLeftSideBearingArray = False + for i in range(numberOfHMetrics, len(glyphOrder)): + glyphName = glyphOrder[i] + lsb = self.metrics[glyphName][1] + if lsb != getattr(glyf[glyphName], "xMin", 0): + hasLeftSideBearingArray = True + break + + # if we need to encode both sidebearings arrays, then no transformation is + # applicable, and we must use the untransformed hmtx data + if hasLsbArray and hasLeftSideBearingArray: + return + + # set bit 0 and 1 when the respective arrays are _not_ present + flags = 0 + if not hasLsbArray: + flags |= 1 << 0 + if not hasLeftSideBearingArray: + flags |= 1 << 1 + + data = struct.pack(">B", flags) + + advanceWidthArray = array.array( + "H", + [ + self.metrics[glyphName][0] + for i, glyphName in enumerate(glyphOrder) + if i < numberOfHMetrics + ] + ) + if sys.byteorder != "big": + advanceWidthArray.byteswap() + data += advanceWidthArray.tostring() + + if hasLsbArray: + lsbArray = array.array( + "h", + [ + self.metrics[glyphName][1] + for i, glyphName in enumerate(glyphOrder) + if i < numberOfHMetrics + ] + ) + if sys.byteorder != "big": + lsbArray.byteswap() + data += lsbArray.tostring() + + if hasLeftSideBearingArray: + leftSideBearingArray = array.array( + "h", + [ + self.metrics[glyphOrder[i]][1] + for i in range(numberOfHMetrics, len(glyphOrder)) + ] + ) + if sys.byteorder != "big": + leftSideBearingArray.byteswap() + data += leftSideBearingArray.tostring() + + return data + + class WOFF2FlavorData(WOFFFlavorData): Flavor = 'woff2' - def __init__(self, reader=None): + def __init__(self, reader=None, transformedTables=None): + """Data class that holds the WOFF2 header major/minor version, any + metadata or private data (as bytes strings), and the set of + table tags that have transformations applied (if reader is not None), + or will have once the WOFF2 font is compiled. + """ if not haveBrotli: raise ImportError("No module named brotli") + + if reader is not None and transformedTables is not None: + raise TypeError( + "'reader' and 'transformedTables' arguments are mutually exclusive" + ) + + if transformedTables is None: + transformedTables = woff2TransformedTableTags + else: + if ( + "glyf" in transformedTables and "loca" not in transformedTables + or "loca" in transformedTables and "glyf" not in transformedTables + ): + raise ValueError( + "'glyf' and 'loca' must be transformed (or not) together" + ) + self.majorVersion = None self.minorVersion = None self.metaData = None @@ -935,6 +1172,13 @@ class WOFF2FlavorData(WOFFFlavorData): data = reader.file.read(reader.privLength) assert len(data) == reader.privLength self.privData = data + transformedTables = [ + tag + for tag, entry in reader.tables.items() + if entry.transformed + ] + + self.transformedTables = set(transformedTables) def unpackBase128(data): @@ -1091,6 +1335,164 @@ def pack255UShort(value): return struct.pack(">BH", 253, value) +def compress(input_file, output_file, transform_tables=None): + """Compress OpenType font to WOFF2. + + Args: + input_file: a file path, file or file-like object (open in binary mode) + containing an OpenType font (either CFF- or TrueType-flavored). + output_file: a file path, file or file-like object where to save the + compressed WOFF2 font. + transform_tables: Optional[Iterable[str]]: a set of table tags for which + to enable preprocessing transformations. By default, only 'glyf' + and 'loca' tables are transformed. An empty set means disable all + transformations. + """ + log.info("Processing %s => %s" % (input_file, output_file)) + + font = TTFont(input_file, recalcBBoxes=False, recalcTimestamp=False) + font.flavor = "woff2" + + if transform_tables is not None: + font.flavorData = WOFF2FlavorData(transformedTables=transform_tables) + + font.save(output_file, reorderTables=False) + + +def decompress(input_file, output_file): + """Decompress WOFF2 font to OpenType font. + + Args: + input_file: a file path, file or file-like object (open in binary mode) + containing a compressed WOFF2 font. + output_file: a file path, file or file-like object where to save the + decompressed OpenType font. + """ + log.info("Processing %s => %s" % (input_file, output_file)) + + font = TTFont(input_file, recalcBBoxes=False, recalcTimestamp=False) + font.flavor = None + font.flavorData = None + font.save(output_file, reorderTables=True) + + +def main(args=None): + import argparse + from fontTools import configLogger + from fontTools.ttx import makeOutputFileName + + class _NoGlyfTransformAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + namespace.transform_tables.difference_update({"glyf", "loca"}) + + class _HmtxTransformAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + namespace.transform_tables.add("hmtx") + + parser = argparse.ArgumentParser( + prog="fonttools ttLib.woff2", + description="Compress and decompress WOFF2 fonts", + ) + + parser_group = parser.add_subparsers(title="sub-commands") + parser_compress = parser_group.add_parser("compress") + parser_decompress = parser_group.add_parser("decompress") + + for subparser in (parser_compress, parser_decompress): + group = subparser.add_mutually_exclusive_group(required=False) + group.add_argument( + "-v", + "--verbose", + action="store_true", + help="print more messages to console", + ) + group.add_argument( + "-q", + "--quiet", + action="store_true", + help="do not print messages to console", + ) + + parser_compress.add_argument( + "input_file", + metavar="INPUT", + help="the input OpenType font (.ttf or .otf)", + ) + parser_decompress.add_argument( + "input_file", + metavar="INPUT", + help="the input WOFF2 font", + ) + + parser_compress.add_argument( + "-o", + "--output-file", + metavar="OUTPUT", + help="the output WOFF2 font", + ) + parser_decompress.add_argument( + "-o", + "--output-file", + metavar="OUTPUT", + help="the output OpenType font", + ) + + transform_group = parser_compress.add_argument_group() + transform_group.add_argument( + "--no-glyf-transform", + dest="transform_tables", + nargs=0, + action=_NoGlyfTransformAction, + help="Do not transform glyf (and loca) tables", + ) + transform_group.add_argument( + "--hmtx-transform", + dest="transform_tables", + nargs=0, + action=_HmtxTransformAction, + help="Enable optional transformation for 'hmtx' table", + ) + + parser_compress.set_defaults( + subcommand=compress, + transform_tables={"glyf", "loca"}, + ) + parser_decompress.set_defaults(subcommand=decompress) + + options = vars(parser.parse_args(args)) + + subcommand = options.pop("subcommand", None) + if not subcommand: + parser.print_help() + return + + quiet = options.pop("quiet") + verbose = options.pop("verbose") + configLogger( + level=("ERROR" if quiet else "DEBUG" if verbose else "INFO"), + ) + + if not options["output_file"]: + if subcommand is compress: + extension = ".woff2" + elif subcommand is decompress: + # choose .ttf/.otf file extension depending on sfntVersion + with open(options["input_file"], "rb") as f: + f.seek(4) # skip 'wOF2' signature + sfntVersion = f.read(4) + assert len(sfntVersion) == 4, "not enough data" + extension = ".otf" if sfntVersion == b"OTTO" else ".ttf" + else: + raise AssertionError(subcommand) + options["output_file"] = makeOutputFileName( + options["input_file"], outputDir=None, extension=extension + ) + + try: + subcommand(**options) + except TTLibError as e: + parser.error(e) + + if __name__ == "__main__": - import doctest - sys.exit(doctest.testmod().failed) + sys.exit(main()) diff --git a/Lib/fontTools/unicodedata/Blocks.py b/Lib/fontTools/unicodedata/Blocks.py index 692fca896..132b09548 100644 --- a/Lib/fontTools/unicodedata/Blocks.py +++ b/Lib/fontTools/unicodedata/Blocks.py @@ -4,9 +4,9 @@ # Source: https://unicode.org/Public/UNIDATA/Blocks.txt # License: http://unicode.org/copyright.html#License # -# Blocks-11.0.0.txt -# Date: 2017-10-16, 24:39:00 GMT [KW] -# © 2017 Unicode®, Inc. +# Blocks-12.1.0.txt +# Date: 2019-03-08, 23:59:00 GMT [KW] +# © 2019 Unicode®, Inc. # For terms of use, see http://www.unicode.org/terms_of_use.html # # Unicode Character Database @@ -237,7 +237,8 @@ RANGES = [ 0x10E80, # .. 0x10EFF ; No_Block 0x10F00, # .. 0x10F2F ; Old Sogdian 0x10F30, # .. 0x10F6F ; Sogdian - 0x10F70, # .. 0x10FFF ; No_Block + 0x10F70, # .. 0x10FDF ; No_Block + 0x10FE0, # .. 0x10FFF ; Elymaic 0x11000, # .. 0x1107F ; Brahmi 0x11080, # .. 0x110CF ; Kaithi 0x110D0, # .. 0x110FF ; Sora Sompeng @@ -264,7 +265,8 @@ RANGES = [ 0x11800, # .. 0x1184F ; Dogra 0x11850, # .. 0x1189F ; No_Block 0x118A0, # .. 0x118FF ; Warang Citi - 0x11900, # .. 0x119FF ; No_Block + 0x11900, # .. 0x1199F ; No_Block + 0x119A0, # .. 0x119FF ; Nandinagari 0x11A00, # .. 0x11A4F ; Zanabazar Square 0x11A50, # .. 0x11AAF ; Soyombo 0x11AB0, # .. 0x11ABF ; No_Block @@ -277,13 +279,15 @@ RANGES = [ 0x11D60, # .. 0x11DAF ; Gunjala Gondi 0x11DB0, # .. 0x11EDF ; No_Block 0x11EE0, # .. 0x11EFF ; Makasar - 0x11F00, # .. 0x11FFF ; No_Block + 0x11F00, # .. 0x11FBF ; No_Block + 0x11FC0, # .. 0x11FFF ; Tamil Supplement 0x12000, # .. 0x123FF ; Cuneiform 0x12400, # .. 0x1247F ; Cuneiform Numbers and Punctuation 0x12480, # .. 0x1254F ; Early Dynastic Cuneiform 0x12550, # .. 0x12FFF ; No_Block 0x13000, # .. 0x1342F ; Egyptian Hieroglyphs - 0x13430, # .. 0x143FF ; No_Block + 0x13430, # .. 0x1343F ; Egyptian Hieroglyph Format Controls + 0x13440, # .. 0x143FF ; No_Block 0x14400, # .. 0x1467F ; Anatolian Hieroglyphs 0x14680, # .. 0x167FF ; No_Block 0x16800, # .. 0x16A3F ; Bamum Supplement @@ -302,7 +306,7 @@ RANGES = [ 0x18B00, # .. 0x1AFFF ; No_Block 0x1B000, # .. 0x1B0FF ; Kana Supplement 0x1B100, # .. 0x1B12F ; Kana Extended-A - 0x1B130, # .. 0x1B16F ; No_Block + 0x1B130, # .. 0x1B16F ; Small Kana Extension 0x1B170, # .. 0x1B2FF ; Nushu 0x1B300, # .. 0x1BBFF ; No_Block 0x1BC00, # .. 0x1BC9F ; Duployan @@ -320,13 +324,19 @@ RANGES = [ 0x1D800, # .. 0x1DAAF ; Sutton SignWriting 0x1DAB0, # .. 0x1DFFF ; No_Block 0x1E000, # .. 0x1E02F ; Glagolitic Supplement - 0x1E030, # .. 0x1E7FF ; No_Block + 0x1E030, # .. 0x1E0FF ; No_Block + 0x1E100, # .. 0x1E14F ; Nyiakeng Puachue Hmong + 0x1E150, # .. 0x1E2BF ; No_Block + 0x1E2C0, # .. 0x1E2FF ; Wancho + 0x1E300, # .. 0x1E7FF ; No_Block 0x1E800, # .. 0x1E8DF ; Mende Kikakui 0x1E8E0, # .. 0x1E8FF ; No_Block 0x1E900, # .. 0x1E95F ; Adlam 0x1E960, # .. 0x1EC6F ; No_Block 0x1EC70, # .. 0x1ECBF ; Indic Siyaq Numbers - 0x1ECC0, # .. 0x1EDFF ; No_Block + 0x1ECC0, # .. 0x1ECFF ; No_Block + 0x1ED00, # .. 0x1ED4F ; Ottoman Siyaq Numbers + 0x1ED50, # .. 0x1EDFF ; No_Block 0x1EE00, # .. 0x1EEFF ; Arabic Mathematical Alphabetic Symbols 0x1EF00, # .. 0x1EFFF ; No_Block 0x1F000, # .. 0x1F02F ; Mahjong Tiles @@ -343,7 +353,8 @@ RANGES = [ 0x1F800, # .. 0x1F8FF ; Supplemental Arrows-C 0x1F900, # .. 0x1F9FF ; Supplemental Symbols and Pictographs 0x1FA00, # .. 0x1FA6F ; Chess Symbols - 0x1FA70, # .. 0x1FFFF ; No_Block + 0x1FA70, # .. 0x1FAFF ; Symbols and Pictographs Extended-A + 0x1FB00, # .. 0x1FFFF ; No_Block 0x20000, # .. 0x2A6DF ; CJK Unified Ideographs Extension B 0x2A6E0, # .. 0x2A6FF ; No_Block 0x2A700, # .. 0x2B73F ; CJK Unified Ideographs Extension C @@ -582,7 +593,8 @@ VALUES = [ 'No_Block', # 10E80..10EFF 'Old Sogdian', # 10F00..10F2F 'Sogdian', # 10F30..10F6F - 'No_Block', # 10F70..10FFF + 'No_Block', # 10F70..10FDF + 'Elymaic', # 10FE0..10FFF 'Brahmi', # 11000..1107F 'Kaithi', # 11080..110CF 'Sora Sompeng', # 110D0..110FF @@ -609,7 +621,8 @@ VALUES = [ 'Dogra', # 11800..1184F 'No_Block', # 11850..1189F 'Warang Citi', # 118A0..118FF - 'No_Block', # 11900..119FF + 'No_Block', # 11900..1199F + 'Nandinagari', # 119A0..119FF 'Zanabazar Square', # 11A00..11A4F 'Soyombo', # 11A50..11AAF 'No_Block', # 11AB0..11ABF @@ -622,13 +635,15 @@ VALUES = [ 'Gunjala Gondi', # 11D60..11DAF 'No_Block', # 11DB0..11EDF 'Makasar', # 11EE0..11EFF - 'No_Block', # 11F00..11FFF + 'No_Block', # 11F00..11FBF + 'Tamil Supplement', # 11FC0..11FFF 'Cuneiform', # 12000..123FF 'Cuneiform Numbers and Punctuation', # 12400..1247F 'Early Dynastic Cuneiform', # 12480..1254F 'No_Block', # 12550..12FFF 'Egyptian Hieroglyphs', # 13000..1342F - 'No_Block', # 13430..143FF + 'Egyptian Hieroglyph Format Controls', # 13430..1343F + 'No_Block', # 13440..143FF 'Anatolian Hieroglyphs', # 14400..1467F 'No_Block', # 14680..167FF 'Bamum Supplement', # 16800..16A3F @@ -647,7 +662,7 @@ VALUES = [ 'No_Block', # 18B00..1AFFF 'Kana Supplement', # 1B000..1B0FF 'Kana Extended-A', # 1B100..1B12F - 'No_Block', # 1B130..1B16F + 'Small Kana Extension', # 1B130..1B16F 'Nushu', # 1B170..1B2FF 'No_Block', # 1B300..1BBFF 'Duployan', # 1BC00..1BC9F @@ -665,13 +680,19 @@ VALUES = [ 'Sutton SignWriting', # 1D800..1DAAF 'No_Block', # 1DAB0..1DFFF 'Glagolitic Supplement', # 1E000..1E02F - 'No_Block', # 1E030..1E7FF + 'No_Block', # 1E030..1E0FF + 'Nyiakeng Puachue Hmong', # 1E100..1E14F + 'No_Block', # 1E150..1E2BF + 'Wancho', # 1E2C0..1E2FF + 'No_Block', # 1E300..1E7FF 'Mende Kikakui', # 1E800..1E8DF 'No_Block', # 1E8E0..1E8FF 'Adlam', # 1E900..1E95F 'No_Block', # 1E960..1EC6F 'Indic Siyaq Numbers', # 1EC70..1ECBF - 'No_Block', # 1ECC0..1EDFF + 'No_Block', # 1ECC0..1ECFF + 'Ottoman Siyaq Numbers', # 1ED00..1ED4F + 'No_Block', # 1ED50..1EDFF 'Arabic Mathematical Alphabetic Symbols', # 1EE00..1EEFF 'No_Block', # 1EF00..1EFFF 'Mahjong Tiles', # 1F000..1F02F @@ -688,7 +709,8 @@ VALUES = [ 'Supplemental Arrows-C', # 1F800..1F8FF 'Supplemental Symbols and Pictographs', # 1F900..1F9FF 'Chess Symbols', # 1FA00..1FA6F - 'No_Block', # 1FA70..1FFFF + 'Symbols and Pictographs Extended-A', # 1FA70..1FAFF + 'No_Block', # 1FB00..1FFFF 'CJK Unified Ideographs Extension B', # 20000..2A6DF 'No_Block', # 2A6E0..2A6FF 'CJK Unified Ideographs Extension C', # 2A700..2B73F diff --git a/Lib/fontTools/unicodedata/ScriptExtensions.py b/Lib/fontTools/unicodedata/ScriptExtensions.py index bfcdbec3c..d7d2e3c8a 100644 --- a/Lib/fontTools/unicodedata/ScriptExtensions.py +++ b/Lib/fontTools/unicodedata/ScriptExtensions.py @@ -4,9 +4,9 @@ # Source: https://unicode.org/Public/UNIDATA/ScriptExtensions.txt # License: http://unicode.org/copyright.html#License # -# ScriptExtensions-11.0.0.txt -# Date: 2018-02-04, 20:04:00 GMT -# © 2018 Unicode®, Inc. +# ScriptExtensions-12.1.0.txt +# Date: 2019-04-01, 09:10:42 GMT +# © 2019 Unicode®, Inc. # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. # For terms of use, see http://www.unicode.org/terms_of_use.html # @@ -75,8 +75,8 @@ RANGES = [ 0x0951, # .. 0x0951 ; {'Beng', 'Deva', 'Gran', 'Gujr', 'Guru', 'Knda', 'Latn', 'Mlym', 'Orya', 'Shrd', 'Taml', 'Telu', 'Tirh'} 0x0952, # .. 0x0952 ; {'Beng', 'Deva', 'Gran', 'Gujr', 'Guru', 'Knda', 'Latn', 'Mlym', 'Orya', 'Taml', 'Telu', 'Tirh'} 0x0953, # .. 0x0963 ; None - 0x0964, # .. 0x0964 ; {'Beng', 'Deva', 'Dogr', 'Gong', 'Gran', 'Gujr', 'Guru', 'Knda', 'Mahj', 'Mlym', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'} - 0x0965, # .. 0x0965 ; {'Beng', 'Deva', 'Dogr', 'Gong', 'Gran', 'Gujr', 'Guru', 'Knda', 'Limb', 'Mahj', 'Mlym', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'} + 0x0964, # .. 0x0964 ; {'Beng', 'Deva', 'Dogr', 'Gong', 'Gonm', 'Gran', 'Gujr', 'Guru', 'Knda', 'Mahj', 'Mlym', 'Nand', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'} + 0x0965, # .. 0x0965 ; {'Beng', 'Deva', 'Dogr', 'Gong', 'Gonm', 'Gran', 'Gujr', 'Guru', 'Knda', 'Limb', 'Mahj', 'Mlym', 'Nand', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'} 0x0966, # .. 0x096F ; {'Deva', 'Dogr', 'Kthi', 'Mahj'} 0x0970, # .. 0x09E5 ; None 0x09E6, # .. 0x09EF ; {'Beng', 'Cakm', 'Sylo'} @@ -86,7 +86,9 @@ RANGES = [ 0x0AE6, # .. 0x0AEF ; {'Gujr', 'Khoj'} 0x0AF0, # .. 0x0BE5 ; None 0x0BE6, # .. 0x0BF3 ; {'Gran', 'Taml'} - 0x0BF4, # .. 0x103F ; None + 0x0BF4, # .. 0x0CE5 ; None + 0x0CE6, # .. 0x0CEF ; {'Knda', 'Nand'} + 0x0CF0, # .. 0x103F ; None 0x1040, # .. 0x1049 ; {'Cakm', 'Mymr', 'Tale'} 0x104A, # .. 0x10FA ; None 0x10FB, # .. 0x10FB ; {'Geor', 'Latn'} @@ -112,19 +114,24 @@ RANGES = [ 0x1CDE, # .. 0x1CDF ; {'Deva'} 0x1CE0, # .. 0x1CE0 ; {'Deva', 'Shrd'} 0x1CE1, # .. 0x1CE1 ; {'Beng', 'Deva'} - 0x1CE2, # .. 0x1CE9 ; {'Deva'} + 0x1CE2, # .. 0x1CE8 ; {'Deva'} + 0x1CE9, # .. 0x1CE9 ; {'Deva', 'Nand'} 0x1CEA, # .. 0x1CEA ; {'Beng', 'Deva'} 0x1CEB, # .. 0x1CEC ; {'Deva'} 0x1CED, # .. 0x1CED ; {'Beng', 'Deva'} 0x1CEE, # .. 0x1CF1 ; {'Deva'} - 0x1CF2, # .. 0x1CF3 ; {'Deva', 'Gran'} + 0x1CF2, # .. 0x1CF2 ; {'Beng', 'Deva', 'Gran', 'Knda', 'Nand', 'Orya', 'Telu', 'Tirh'} + 0x1CF3, # .. 0x1CF3 ; {'Deva', 'Gran'} 0x1CF4, # .. 0x1CF4 ; {'Deva', 'Gran', 'Knda'} 0x1CF5, # .. 0x1CF6 ; {'Beng', 'Deva'} 0x1CF7, # .. 0x1CF7 ; {'Beng'} 0x1CF8, # .. 0x1CF9 ; {'Deva', 'Gran'} - 0x1CFA, # .. 0x1DBF ; None + 0x1CFA, # .. 0x1CFA ; {'Nand'} + 0x1CFB, # .. 0x1DBF ; None 0x1DC0, # .. 0x1DC1 ; {'Grek'} - 0x1DC2, # .. 0x20EF ; None + 0x1DC2, # .. 0x202E ; None + 0x202F, # .. 0x202F ; {'Latn', 'Mong'} + 0x2030, # .. 0x20EF ; None 0x20F0, # .. 0x20F0 ; {'Deva', 'Gran', 'Latn'} 0x20F1, # .. 0x2E42 ; None 0x2E43, # .. 0x2E43 ; {'Cyrl', 'Glag'} @@ -166,7 +173,9 @@ RANGES = [ 0x3280, # .. 0x32B0 ; {'Hani'} 0x32B1, # .. 0x32BF ; None 0x32C0, # .. 0x32CB ; {'Hani'} - 0x32CC, # .. 0x3357 ; None + 0x32CC, # .. 0x32FE ; None + 0x32FF, # .. 0x32FF ; {'Hani'} + 0x3300, # .. 0x3357 ; None 0x3358, # .. 0x3370 ; {'Hani'} 0x3371, # .. 0x337A ; None 0x337B, # .. 0x337F ; {'Hani'} @@ -175,8 +184,8 @@ RANGES = [ 0x33FF, # .. 0xA66E ; None 0xA66F, # .. 0xA66F ; {'Cyrl', 'Glag'} 0xA670, # .. 0xA82F ; None - 0xA830, # .. 0xA832 ; {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Mlym', 'Modi', 'Sind', 'Takr', 'Tirh'} - 0xA833, # .. 0xA835 ; {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Modi', 'Sind', 'Takr', 'Tirh'} + 0xA830, # .. 0xA832 ; {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Mlym', 'Modi', 'Nand', 'Sind', 'Takr', 'Tirh'} + 0xA833, # .. 0xA835 ; {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Modi', 'Nand', 'Sind', 'Takr', 'Tirh'} 0xA836, # .. 0xA839 ; {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Kthi', 'Mahj', 'Modi', 'Sind', 'Takr', 'Tirh'} 0xA83A, # .. 0xA8F0 ; None 0xA8F1, # .. 0xA8F1 ; {'Beng', 'Deva'} @@ -212,7 +221,11 @@ RANGES = [ 0x11303, # .. 0x11303 ; {'Gran', 'Taml'} 0x11304, # .. 0x1133A ; None 0x1133B, # .. 0x1133C ; {'Gran', 'Taml'} - 0x1133D, # .. 0x1BC9F ; None + 0x1133D, # .. 0x11FCF ; None + 0x11FD0, # .. 0x11FD1 ; {'Gran', 'Taml'} + 0x11FD2, # .. 0x11FD2 ; None + 0x11FD3, # .. 0x11FD3 ; {'Gran', 'Taml'} + 0x11FD4, # .. 0x1BC9F ; None 0x1BCA0, # .. 0x1BCA3 ; {'Dupl'} 0x1BCA4, # .. 0x1D35F ; None 0x1D360, # .. 0x1D371 ; {'Hani'} @@ -256,8 +269,8 @@ VALUES = [ {'Beng', 'Deva', 'Gran', 'Gujr', 'Guru', 'Knda', 'Latn', 'Mlym', 'Orya', 'Shrd', 'Taml', 'Telu', 'Tirh'}, # 0951..0951 {'Beng', 'Deva', 'Gran', 'Gujr', 'Guru', 'Knda', 'Latn', 'Mlym', 'Orya', 'Taml', 'Telu', 'Tirh'}, # 0952..0952 None, # 0953..0963 - {'Beng', 'Deva', 'Dogr', 'Gong', 'Gran', 'Gujr', 'Guru', 'Knda', 'Mahj', 'Mlym', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'}, # 0964..0964 - {'Beng', 'Deva', 'Dogr', 'Gong', 'Gran', 'Gujr', 'Guru', 'Knda', 'Limb', 'Mahj', 'Mlym', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'}, # 0965..0965 + {'Beng', 'Deva', 'Dogr', 'Gong', 'Gonm', 'Gran', 'Gujr', 'Guru', 'Knda', 'Mahj', 'Mlym', 'Nand', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'}, # 0964..0964 + {'Beng', 'Deva', 'Dogr', 'Gong', 'Gonm', 'Gran', 'Gujr', 'Guru', 'Knda', 'Limb', 'Mahj', 'Mlym', 'Nand', 'Orya', 'Sind', 'Sinh', 'Sylo', 'Takr', 'Taml', 'Telu', 'Tirh'}, # 0965..0965 {'Deva', 'Dogr', 'Kthi', 'Mahj'}, # 0966..096F None, # 0970..09E5 {'Beng', 'Cakm', 'Sylo'}, # 09E6..09EF @@ -267,7 +280,9 @@ VALUES = [ {'Gujr', 'Khoj'}, # 0AE6..0AEF None, # 0AF0..0BE5 {'Gran', 'Taml'}, # 0BE6..0BF3 - None, # 0BF4..103F + None, # 0BF4..0CE5 + {'Knda', 'Nand'}, # 0CE6..0CEF + None, # 0CF0..103F {'Cakm', 'Mymr', 'Tale'}, # 1040..1049 None, # 104A..10FA {'Geor', 'Latn'}, # 10FB..10FB @@ -293,19 +308,24 @@ VALUES = [ {'Deva'}, # 1CDE..1CDF {'Deva', 'Shrd'}, # 1CE0..1CE0 {'Beng', 'Deva'}, # 1CE1..1CE1 - {'Deva'}, # 1CE2..1CE9 + {'Deva'}, # 1CE2..1CE8 + {'Deva', 'Nand'}, # 1CE9..1CE9 {'Beng', 'Deva'}, # 1CEA..1CEA {'Deva'}, # 1CEB..1CEC {'Beng', 'Deva'}, # 1CED..1CED {'Deva'}, # 1CEE..1CF1 - {'Deva', 'Gran'}, # 1CF2..1CF3 + {'Beng', 'Deva', 'Gran', 'Knda', 'Nand', 'Orya', 'Telu', 'Tirh'}, # 1CF2..1CF2 + {'Deva', 'Gran'}, # 1CF3..1CF3 {'Deva', 'Gran', 'Knda'}, # 1CF4..1CF4 {'Beng', 'Deva'}, # 1CF5..1CF6 {'Beng'}, # 1CF7..1CF7 {'Deva', 'Gran'}, # 1CF8..1CF9 - None, # 1CFA..1DBF + {'Nand'}, # 1CFA..1CFA + None, # 1CFB..1DBF {'Grek'}, # 1DC0..1DC1 - None, # 1DC2..20EF + None, # 1DC2..202E + {'Latn', 'Mong'}, # 202F..202F + None, # 2030..20EF {'Deva', 'Gran', 'Latn'}, # 20F0..20F0 None, # 20F1..2E42 {'Cyrl', 'Glag'}, # 2E43..2E43 @@ -347,7 +367,9 @@ VALUES = [ {'Hani'}, # 3280..32B0 None, # 32B1..32BF {'Hani'}, # 32C0..32CB - None, # 32CC..3357 + None, # 32CC..32FE + {'Hani'}, # 32FF..32FF + None, # 3300..3357 {'Hani'}, # 3358..3370 None, # 3371..337A {'Hani'}, # 337B..337F @@ -356,8 +378,8 @@ VALUES = [ None, # 33FF..A66E {'Cyrl', 'Glag'}, # A66F..A66F None, # A670..A82F - {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Mlym', 'Modi', 'Sind', 'Takr', 'Tirh'}, # A830..A832 - {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Modi', 'Sind', 'Takr', 'Tirh'}, # A833..A835 + {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Mlym', 'Modi', 'Nand', 'Sind', 'Takr', 'Tirh'}, # A830..A832 + {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Knda', 'Kthi', 'Mahj', 'Modi', 'Nand', 'Sind', 'Takr', 'Tirh'}, # A833..A835 {'Deva', 'Dogr', 'Gujr', 'Guru', 'Khoj', 'Kthi', 'Mahj', 'Modi', 'Sind', 'Takr', 'Tirh'}, # A836..A839 None, # A83A..A8F0 {'Beng', 'Deva'}, # A8F1..A8F1 @@ -393,7 +415,11 @@ VALUES = [ {'Gran', 'Taml'}, # 11303..11303 None, # 11304..1133A {'Gran', 'Taml'}, # 1133B..1133C - None, # 1133D..1BC9F + None, # 1133D..11FCF + {'Gran', 'Taml'}, # 11FD0..11FD1 + None, # 11FD2..11FD2 + {'Gran', 'Taml'}, # 11FD3..11FD3 + None, # 11FD4..1BC9F {'Dupl'}, # 1BCA0..1BCA3 None, # 1BCA4..1D35F {'Hani'}, # 1D360..1D371 diff --git a/Lib/fontTools/unicodedata/Scripts.py b/Lib/fontTools/unicodedata/Scripts.py index 30cd8f5ed..dc8c1e2b0 100644 --- a/Lib/fontTools/unicodedata/Scripts.py +++ b/Lib/fontTools/unicodedata/Scripts.py @@ -4,9 +4,9 @@ # Source: https://unicode.org/Public/UNIDATA/Scripts.txt # License: http://unicode.org/copyright.html#License # -# Scripts-11.0.0.txt -# Date: 2018-02-21, 05:34:31 GMT -# © 2018 Unicode®, Inc. +# Scripts-12.1.0.txt +# Date: 2019-04-01, 09:10:42 GMT +# © 2019 Unicode®, Inc. # Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. # For terms of use, see http://www.unicode.org/terms_of_use.html # @@ -128,8 +128,8 @@ RANGES = [ 0x08E2, # .. 0x08E2 ; Common 0x08E3, # .. 0x08FF ; Arabic 0x0900, # .. 0x0950 ; Devanagari - 0x0951, # .. 0x0952 ; Inherited - 0x0953, # .. 0x0963 ; Devanagari + 0x0951, # .. 0x0954 ; Inherited + 0x0955, # .. 0x0963 ; Devanagari 0x0964, # .. 0x0965 ; Common 0x0966, # .. 0x097F ; Devanagari 0x0980, # .. 0x0983 ; Bengali @@ -301,8 +301,8 @@ RANGES = [ 0x0C60, # .. 0x0C63 ; Telugu 0x0C64, # .. 0x0C65 ; Unknown 0x0C66, # .. 0x0C6F ; Telugu - 0x0C70, # .. 0x0C77 ; Unknown - 0x0C78, # .. 0x0C7F ; Telugu + 0x0C70, # .. 0x0C76 ; Unknown + 0x0C77, # .. 0x0C7F ; Telugu 0x0C80, # .. 0x0C8C ; Kannada 0x0C8D, # .. 0x0C8D ; Unknown 0x0C8E, # .. 0x0C90 ; Kannada @@ -377,28 +377,14 @@ RANGES = [ 0x0E81, # .. 0x0E82 ; Lao 0x0E83, # .. 0x0E83 ; Unknown 0x0E84, # .. 0x0E84 ; Lao - 0x0E85, # .. 0x0E86 ; Unknown - 0x0E87, # .. 0x0E88 ; Lao - 0x0E89, # .. 0x0E89 ; Unknown - 0x0E8A, # .. 0x0E8A ; Lao - 0x0E8B, # .. 0x0E8C ; Unknown - 0x0E8D, # .. 0x0E8D ; Lao - 0x0E8E, # .. 0x0E93 ; Unknown - 0x0E94, # .. 0x0E97 ; Lao - 0x0E98, # .. 0x0E98 ; Unknown - 0x0E99, # .. 0x0E9F ; Lao - 0x0EA0, # .. 0x0EA0 ; Unknown - 0x0EA1, # .. 0x0EA3 ; Lao + 0x0E85, # .. 0x0E85 ; Unknown + 0x0E86, # .. 0x0E8A ; Lao + 0x0E8B, # .. 0x0E8B ; Unknown + 0x0E8C, # .. 0x0EA3 ; Lao 0x0EA4, # .. 0x0EA4 ; Unknown 0x0EA5, # .. 0x0EA5 ; Lao 0x0EA6, # .. 0x0EA6 ; Unknown - 0x0EA7, # .. 0x0EA7 ; Lao - 0x0EA8, # .. 0x0EA9 ; Unknown - 0x0EAA, # .. 0x0EAB ; Lao - 0x0EAC, # .. 0x0EAC ; Unknown - 0x0EAD, # .. 0x0EB9 ; Lao - 0x0EBA, # .. 0x0EBA ; Unknown - 0x0EBB, # .. 0x0EBD ; Lao + 0x0EA7, # .. 0x0EBD ; Lao 0x0EBE, # .. 0x0EBF ; Unknown 0x0EC0, # .. 0x0EC4 ; Lao 0x0EC5, # .. 0x0EC5 ; Unknown @@ -585,7 +571,8 @@ RANGES = [ 0x1CF4, # .. 0x1CF4 ; Inherited 0x1CF5, # .. 0x1CF7 ; Common 0x1CF8, # .. 0x1CF9 ; Inherited - 0x1CFA, # .. 0x1CFF ; Unknown + 0x1CFA, # .. 0x1CFA ; Common + 0x1CFB, # .. 0x1CFF ; Unknown 0x1D00, # .. 0x1D25 ; Latin 0x1D26, # .. 0x1D2A ; Greek 0x1D2B, # .. 0x1D2B ; Cyrillic @@ -672,10 +659,7 @@ RANGES = [ 0x2B74, # .. 0x2B75 ; Unknown 0x2B76, # .. 0x2B95 ; Common 0x2B96, # .. 0x2B97 ; Unknown - 0x2B98, # .. 0x2BC8 ; Common - 0x2BC9, # .. 0x2BC9 ; Unknown - 0x2BCA, # .. 0x2BFE ; Common - 0x2BFF, # .. 0x2BFF ; Unknown + 0x2B98, # .. 0x2BFF ; Common 0x2C00, # .. 0x2C2E ; Glagolitic 0x2C2F, # .. 0x2C2F ; Unknown 0x2C30, # .. 0x2C5E ; Glagolitic @@ -714,8 +698,8 @@ RANGES = [ 0x2DD8, # .. 0x2DDE ; Ethiopic 0x2DDF, # .. 0x2DDF ; Unknown 0x2DE0, # .. 0x2DFF ; Cyrillic - 0x2E00, # .. 0x2E4E ; Common - 0x2E4F, # .. 0x2E7F ; Unknown + 0x2E00, # .. 0x2E4F ; Common + 0x2E50, # .. 0x2E7F ; Unknown 0x2E80, # .. 0x2E99 ; Han 0x2E9A, # .. 0x2E9A ; Unknown 0x2E9B, # .. 0x2EF3 ; Han @@ -762,7 +746,7 @@ RANGES = [ 0x3260, # .. 0x327E ; Hangul 0x327F, # .. 0x32CF ; Common 0x32D0, # .. 0x32FE ; Katakana - 0x32FF, # .. 0x32FF ; Unknown + 0x32FF, # .. 0x32FF ; Common 0x3300, # .. 0x3357 ; Katakana 0x3358, # .. 0x33FF ; Common 0x3400, # .. 0x4DB5 ; Han @@ -783,8 +767,10 @@ RANGES = [ 0xA700, # .. 0xA721 ; Common 0xA722, # .. 0xA787 ; Latin 0xA788, # .. 0xA78A ; Common - 0xA78B, # .. 0xA7B9 ; Latin - 0xA7BA, # .. 0xA7F6 ; Unknown + 0xA78B, # .. 0xA7BF ; Latin + 0xA7C0, # .. 0xA7C1 ; Unknown + 0xA7C2, # .. 0xA7C6 ; Latin + 0xA7C7, # .. 0xA7F6 ; Unknown 0xA7F7, # .. 0xA7FF ; Latin 0xA800, # .. 0xA82B ; Syloti_Nagri 0xA82C, # .. 0xA82F ; Unknown @@ -840,7 +826,8 @@ RANGES = [ 0xAB5B, # .. 0xAB5B ; Common 0xAB5C, # .. 0xAB64 ; Latin 0xAB65, # .. 0xAB65 ; Greek - 0xAB66, # .. 0xAB6F ; Unknown + 0xAB66, # .. 0xAB67 ; Latin + 0xAB68, # .. 0xAB6F ; Unknown 0xAB70, # .. 0xABBF ; Cherokee 0xABC0, # .. 0xABED ; Meetei_Mayek 0xABEE, # .. 0xABEF ; Unknown @@ -1086,7 +1073,9 @@ RANGES = [ 0x10F00, # .. 0x10F27 ; Old_Sogdian 0x10F28, # .. 0x10F2F ; Unknown 0x10F30, # .. 0x10F59 ; Sogdian - 0x10F5A, # .. 0x10FFF ; Unknown + 0x10F5A, # .. 0x10FDF ; Unknown + 0x10FE0, # .. 0x10FF6 ; Elymaic + 0x10FF7, # .. 0x10FFF ; Unknown 0x11000, # .. 0x1104D ; Brahmi 0x1104E, # .. 0x11051 ; Unknown 0x11052, # .. 0x1106F ; Brahmi @@ -1165,8 +1154,8 @@ RANGES = [ 0x1145A, # .. 0x1145A ; Unknown 0x1145B, # .. 0x1145B ; Newa 0x1145C, # .. 0x1145C ; Unknown - 0x1145D, # .. 0x1145E ; Newa - 0x1145F, # .. 0x1147F ; Unknown + 0x1145D, # .. 0x1145F ; Newa + 0x11460, # .. 0x1147F ; Unknown 0x11480, # .. 0x114C7 ; Tirhuta 0x114C8, # .. 0x114CF ; Unknown 0x114D0, # .. 0x114D9 ; Tirhuta @@ -1181,8 +1170,8 @@ RANGES = [ 0x1165A, # .. 0x1165F ; Unknown 0x11660, # .. 0x1166C ; Mongolian 0x1166D, # .. 0x1167F ; Unknown - 0x11680, # .. 0x116B7 ; Takri - 0x116B8, # .. 0x116BF ; Unknown + 0x11680, # .. 0x116B8 ; Takri + 0x116B9, # .. 0x116BF ; Unknown 0x116C0, # .. 0x116C9 ; Takri 0x116CA, # .. 0x116FF ; Unknown 0x11700, # .. 0x1171A ; Ahom @@ -1196,12 +1185,16 @@ RANGES = [ 0x118A0, # .. 0x118F2 ; Warang_Citi 0x118F3, # .. 0x118FE ; Unknown 0x118FF, # .. 0x118FF ; Warang_Citi - 0x11900, # .. 0x119FF ; Unknown + 0x11900, # .. 0x1199F ; Unknown + 0x119A0, # .. 0x119A7 ; Nandinagari + 0x119A8, # .. 0x119A9 ; Unknown + 0x119AA, # .. 0x119D7 ; Nandinagari + 0x119D8, # .. 0x119D9 ; Unknown + 0x119DA, # .. 0x119E4 ; Nandinagari + 0x119E5, # .. 0x119FF ; Unknown 0x11A00, # .. 0x11A47 ; Zanabazar_Square 0x11A48, # .. 0x11A4F ; Unknown - 0x11A50, # .. 0x11A83 ; Soyombo - 0x11A84, # .. 0x11A85 ; Unknown - 0x11A86, # .. 0x11AA2 ; Soyombo + 0x11A50, # .. 0x11AA2 ; Soyombo 0x11AA3, # .. 0x11ABF ; Unknown 0x11AC0, # .. 0x11AF8 ; Pau_Cin_Hau 0x11AF9, # .. 0x11BFF ; Unknown @@ -1246,7 +1239,10 @@ RANGES = [ 0x11DA0, # .. 0x11DA9 ; Gunjala_Gondi 0x11DAA, # .. 0x11EDF ; Unknown 0x11EE0, # .. 0x11EF8 ; Makasar - 0x11EF9, # .. 0x11FFF ; Unknown + 0x11EF9, # .. 0x11FBF ; Unknown + 0x11FC0, # .. 0x11FF1 ; Tamil + 0x11FF2, # .. 0x11FFE ; Unknown + 0x11FFF, # .. 0x11FFF ; Tamil 0x12000, # .. 0x12399 ; Cuneiform 0x1239A, # .. 0x123FF ; Unknown 0x12400, # .. 0x1246E ; Cuneiform @@ -1256,7 +1252,9 @@ RANGES = [ 0x12480, # .. 0x12543 ; Cuneiform 0x12544, # .. 0x12FFF ; Unknown 0x13000, # .. 0x1342E ; Egyptian_Hieroglyphs - 0x1342F, # .. 0x143FF ; Unknown + 0x1342F, # .. 0x1342F ; Unknown + 0x13430, # .. 0x13438 ; Egyptian_Hieroglyphs + 0x13439, # .. 0x143FF ; Unknown 0x14400, # .. 0x14646 ; Anatolian_Hieroglyphs 0x14647, # .. 0x167FF ; Unknown 0x16800, # .. 0x16A38 ; Bamum @@ -1283,22 +1281,27 @@ RANGES = [ 0x16B90, # .. 0x16E3F ; Unknown 0x16E40, # .. 0x16E9A ; Medefaidrin 0x16E9B, # .. 0x16EFF ; Unknown - 0x16F00, # .. 0x16F44 ; Miao - 0x16F45, # .. 0x16F4F ; Unknown - 0x16F50, # .. 0x16F7E ; Miao - 0x16F7F, # .. 0x16F8E ; Unknown + 0x16F00, # .. 0x16F4A ; Miao + 0x16F4B, # .. 0x16F4E ; Unknown + 0x16F4F, # .. 0x16F87 ; Miao + 0x16F88, # .. 0x16F8E ; Unknown 0x16F8F, # .. 0x16F9F ; Miao 0x16FA0, # .. 0x16FDF ; Unknown 0x16FE0, # .. 0x16FE0 ; Tangut 0x16FE1, # .. 0x16FE1 ; Nushu - 0x16FE2, # .. 0x16FFF ; Unknown - 0x17000, # .. 0x187F1 ; Tangut - 0x187F2, # .. 0x187FF ; Unknown + 0x16FE2, # .. 0x16FE3 ; Common + 0x16FE4, # .. 0x16FFF ; Unknown + 0x17000, # .. 0x187F7 ; Tangut + 0x187F8, # .. 0x187FF ; Unknown 0x18800, # .. 0x18AF2 ; Tangut 0x18AF3, # .. 0x1AFFF ; Unknown 0x1B000, # .. 0x1B000 ; Katakana 0x1B001, # .. 0x1B11E ; Hiragana - 0x1B11F, # .. 0x1B16F ; Unknown + 0x1B11F, # .. 0x1B14F ; Unknown + 0x1B150, # .. 0x1B152 ; Hiragana + 0x1B153, # .. 0x1B163 ; Unknown + 0x1B164, # .. 0x1B167 ; Katakana + 0x1B168, # .. 0x1B16F ; Unknown 0x1B170, # .. 0x1B2FB ; Nushu 0x1B2FC, # .. 0x1BBFF ; Unknown 0x1BC00, # .. 0x1BC6A ; Duployan @@ -1390,19 +1393,33 @@ RANGES = [ 0x1E023, # .. 0x1E024 ; Glagolitic 0x1E025, # .. 0x1E025 ; Unknown 0x1E026, # .. 0x1E02A ; Glagolitic - 0x1E02B, # .. 0x1E7FF ; Unknown + 0x1E02B, # .. 0x1E0FF ; Unknown + 0x1E100, # .. 0x1E12C ; Nyiakeng_Puachue_Hmong + 0x1E12D, # .. 0x1E12F ; Unknown + 0x1E130, # .. 0x1E13D ; Nyiakeng_Puachue_Hmong + 0x1E13E, # .. 0x1E13F ; Unknown + 0x1E140, # .. 0x1E149 ; Nyiakeng_Puachue_Hmong + 0x1E14A, # .. 0x1E14D ; Unknown + 0x1E14E, # .. 0x1E14F ; Nyiakeng_Puachue_Hmong + 0x1E150, # .. 0x1E2BF ; Unknown + 0x1E2C0, # .. 0x1E2F9 ; Wancho + 0x1E2FA, # .. 0x1E2FE ; Unknown + 0x1E2FF, # .. 0x1E2FF ; Wancho + 0x1E300, # .. 0x1E7FF ; Unknown 0x1E800, # .. 0x1E8C4 ; Mende_Kikakui 0x1E8C5, # .. 0x1E8C6 ; Unknown 0x1E8C7, # .. 0x1E8D6 ; Mende_Kikakui 0x1E8D7, # .. 0x1E8FF ; Unknown - 0x1E900, # .. 0x1E94A ; Adlam - 0x1E94B, # .. 0x1E94F ; Unknown + 0x1E900, # .. 0x1E94B ; Adlam + 0x1E94C, # .. 0x1E94F ; Unknown 0x1E950, # .. 0x1E959 ; Adlam 0x1E95A, # .. 0x1E95D ; Unknown 0x1E95E, # .. 0x1E95F ; Adlam 0x1E960, # .. 0x1EC70 ; Unknown 0x1EC71, # .. 0x1ECB4 ; Common - 0x1ECB5, # .. 0x1EDFF ; Unknown + 0x1ECB5, # .. 0x1ED00 ; Unknown + 0x1ED01, # .. 0x1ED3D ; Common + 0x1ED3E, # .. 0x1EDFF ; Unknown 0x1EE00, # .. 0x1EE03 ; Arabic 0x1EE04, # .. 0x1EE04 ; Unknown 0x1EE05, # .. 0x1EE1F ; Arabic @@ -1485,8 +1502,8 @@ RANGES = [ 0x1F0F6, # .. 0x1F0FF ; Unknown 0x1F100, # .. 0x1F10C ; Common 0x1F10D, # .. 0x1F10F ; Unknown - 0x1F110, # .. 0x1F16B ; Common - 0x1F16C, # .. 0x1F16F ; Unknown + 0x1F110, # .. 0x1F16C ; Common + 0x1F16D, # .. 0x1F16F ; Unknown 0x1F170, # .. 0x1F1AC ; Common 0x1F1AD, # .. 0x1F1E5 ; Unknown 0x1F1E6, # .. 0x1F1FF ; Common @@ -1501,16 +1518,18 @@ RANGES = [ 0x1F252, # .. 0x1F25F ; Unknown 0x1F260, # .. 0x1F265 ; Common 0x1F266, # .. 0x1F2FF ; Unknown - 0x1F300, # .. 0x1F6D4 ; Common - 0x1F6D5, # .. 0x1F6DF ; Unknown + 0x1F300, # .. 0x1F6D5 ; Common + 0x1F6D6, # .. 0x1F6DF ; Unknown 0x1F6E0, # .. 0x1F6EC ; Common 0x1F6ED, # .. 0x1F6EF ; Unknown - 0x1F6F0, # .. 0x1F6F9 ; Common - 0x1F6FA, # .. 0x1F6FF ; Unknown + 0x1F6F0, # .. 0x1F6FA ; Common + 0x1F6FB, # .. 0x1F6FF ; Unknown 0x1F700, # .. 0x1F773 ; Common 0x1F774, # .. 0x1F77F ; Unknown 0x1F780, # .. 0x1F7D8 ; Common - 0x1F7D9, # .. 0x1F7FF ; Unknown + 0x1F7D9, # .. 0x1F7DF ; Unknown + 0x1F7E0, # .. 0x1F7EB ; Common + 0x1F7EC, # .. 0x1F7FF ; Unknown 0x1F800, # .. 0x1F80B ; Common 0x1F80C, # .. 0x1F80F ; Unknown 0x1F810, # .. 0x1F847 ; Common @@ -1522,25 +1541,29 @@ RANGES = [ 0x1F890, # .. 0x1F8AD ; Common 0x1F8AE, # .. 0x1F8FF ; Unknown 0x1F900, # .. 0x1F90B ; Common - 0x1F90C, # .. 0x1F90F ; Unknown - 0x1F910, # .. 0x1F93E ; Common - 0x1F93F, # .. 0x1F93F ; Unknown - 0x1F940, # .. 0x1F970 ; Common - 0x1F971, # .. 0x1F972 ; Unknown + 0x1F90C, # .. 0x1F90C ; Unknown + 0x1F90D, # .. 0x1F971 ; Common + 0x1F972, # .. 0x1F972 ; Unknown 0x1F973, # .. 0x1F976 ; Common 0x1F977, # .. 0x1F979 ; Unknown - 0x1F97A, # .. 0x1F97A ; Common - 0x1F97B, # .. 0x1F97B ; Unknown - 0x1F97C, # .. 0x1F9A2 ; Common - 0x1F9A3, # .. 0x1F9AF ; Unknown - 0x1F9B0, # .. 0x1F9B9 ; Common - 0x1F9BA, # .. 0x1F9BF ; Unknown - 0x1F9C0, # .. 0x1F9C2 ; Common - 0x1F9C3, # .. 0x1F9CF ; Unknown - 0x1F9D0, # .. 0x1F9FF ; Common - 0x1FA00, # .. 0x1FA5F ; Unknown + 0x1F97A, # .. 0x1F9A2 ; Common + 0x1F9A3, # .. 0x1F9A4 ; Unknown + 0x1F9A5, # .. 0x1F9AA ; Common + 0x1F9AB, # .. 0x1F9AD ; Unknown + 0x1F9AE, # .. 0x1F9CA ; Common + 0x1F9CB, # .. 0x1F9CC ; Unknown + 0x1F9CD, # .. 0x1FA53 ; Common + 0x1FA54, # .. 0x1FA5F ; Unknown 0x1FA60, # .. 0x1FA6D ; Common - 0x1FA6E, # .. 0x1FFFF ; Unknown + 0x1FA6E, # .. 0x1FA6F ; Unknown + 0x1FA70, # .. 0x1FA73 ; Common + 0x1FA74, # .. 0x1FA77 ; Unknown + 0x1FA78, # .. 0x1FA7A ; Common + 0x1FA7B, # .. 0x1FA7F ; Unknown + 0x1FA80, # .. 0x1FA82 ; Common + 0x1FA83, # .. 0x1FA8F ; Unknown + 0x1FA90, # .. 0x1FA95 ; Common + 0x1FA96, # .. 0x1FFFF ; Unknown 0x20000, # .. 0x2A6D6 ; Han 0x2A6D7, # .. 0x2A6FF ; Unknown 0x2A700, # .. 0x2B734 ; Han @@ -1669,8 +1692,8 @@ VALUES = [ 'Zyyy', # 08E2..08E2 ; Common 'Arab', # 08E3..08FF ; Arabic 'Deva', # 0900..0950 ; Devanagari - 'Zinh', # 0951..0952 ; Inherited - 'Deva', # 0953..0963 ; Devanagari + 'Zinh', # 0951..0954 ; Inherited + 'Deva', # 0955..0963 ; Devanagari 'Zyyy', # 0964..0965 ; Common 'Deva', # 0966..097F ; Devanagari 'Beng', # 0980..0983 ; Bengali @@ -1842,8 +1865,8 @@ VALUES = [ 'Telu', # 0C60..0C63 ; Telugu 'Zzzz', # 0C64..0C65 ; Unknown 'Telu', # 0C66..0C6F ; Telugu - 'Zzzz', # 0C70..0C77 ; Unknown - 'Telu', # 0C78..0C7F ; Telugu + 'Zzzz', # 0C70..0C76 ; Unknown + 'Telu', # 0C77..0C7F ; Telugu 'Knda', # 0C80..0C8C ; Kannada 'Zzzz', # 0C8D..0C8D ; Unknown 'Knda', # 0C8E..0C90 ; Kannada @@ -1918,28 +1941,14 @@ VALUES = [ 'Laoo', # 0E81..0E82 ; Lao 'Zzzz', # 0E83..0E83 ; Unknown 'Laoo', # 0E84..0E84 ; Lao - 'Zzzz', # 0E85..0E86 ; Unknown - 'Laoo', # 0E87..0E88 ; Lao - 'Zzzz', # 0E89..0E89 ; Unknown - 'Laoo', # 0E8A..0E8A ; Lao - 'Zzzz', # 0E8B..0E8C ; Unknown - 'Laoo', # 0E8D..0E8D ; Lao - 'Zzzz', # 0E8E..0E93 ; Unknown - 'Laoo', # 0E94..0E97 ; Lao - 'Zzzz', # 0E98..0E98 ; Unknown - 'Laoo', # 0E99..0E9F ; Lao - 'Zzzz', # 0EA0..0EA0 ; Unknown - 'Laoo', # 0EA1..0EA3 ; Lao + 'Zzzz', # 0E85..0E85 ; Unknown + 'Laoo', # 0E86..0E8A ; Lao + 'Zzzz', # 0E8B..0E8B ; Unknown + 'Laoo', # 0E8C..0EA3 ; Lao 'Zzzz', # 0EA4..0EA4 ; Unknown 'Laoo', # 0EA5..0EA5 ; Lao 'Zzzz', # 0EA6..0EA6 ; Unknown - 'Laoo', # 0EA7..0EA7 ; Lao - 'Zzzz', # 0EA8..0EA9 ; Unknown - 'Laoo', # 0EAA..0EAB ; Lao - 'Zzzz', # 0EAC..0EAC ; Unknown - 'Laoo', # 0EAD..0EB9 ; Lao - 'Zzzz', # 0EBA..0EBA ; Unknown - 'Laoo', # 0EBB..0EBD ; Lao + 'Laoo', # 0EA7..0EBD ; Lao 'Zzzz', # 0EBE..0EBF ; Unknown 'Laoo', # 0EC0..0EC4 ; Lao 'Zzzz', # 0EC5..0EC5 ; Unknown @@ -2126,7 +2135,8 @@ VALUES = [ 'Zinh', # 1CF4..1CF4 ; Inherited 'Zyyy', # 1CF5..1CF7 ; Common 'Zinh', # 1CF8..1CF9 ; Inherited - 'Zzzz', # 1CFA..1CFF ; Unknown + 'Zyyy', # 1CFA..1CFA ; Common + 'Zzzz', # 1CFB..1CFF ; Unknown 'Latn', # 1D00..1D25 ; Latin 'Grek', # 1D26..1D2A ; Greek 'Cyrl', # 1D2B..1D2B ; Cyrillic @@ -2213,10 +2223,7 @@ VALUES = [ 'Zzzz', # 2B74..2B75 ; Unknown 'Zyyy', # 2B76..2B95 ; Common 'Zzzz', # 2B96..2B97 ; Unknown - 'Zyyy', # 2B98..2BC8 ; Common - 'Zzzz', # 2BC9..2BC9 ; Unknown - 'Zyyy', # 2BCA..2BFE ; Common - 'Zzzz', # 2BFF..2BFF ; Unknown + 'Zyyy', # 2B98..2BFF ; Common 'Glag', # 2C00..2C2E ; Glagolitic 'Zzzz', # 2C2F..2C2F ; Unknown 'Glag', # 2C30..2C5E ; Glagolitic @@ -2255,8 +2262,8 @@ VALUES = [ 'Ethi', # 2DD8..2DDE ; Ethiopic 'Zzzz', # 2DDF..2DDF ; Unknown 'Cyrl', # 2DE0..2DFF ; Cyrillic - 'Zyyy', # 2E00..2E4E ; Common - 'Zzzz', # 2E4F..2E7F ; Unknown + 'Zyyy', # 2E00..2E4F ; Common + 'Zzzz', # 2E50..2E7F ; Unknown 'Hani', # 2E80..2E99 ; Han 'Zzzz', # 2E9A..2E9A ; Unknown 'Hani', # 2E9B..2EF3 ; Han @@ -2303,7 +2310,7 @@ VALUES = [ 'Hang', # 3260..327E ; Hangul 'Zyyy', # 327F..32CF ; Common 'Kana', # 32D0..32FE ; Katakana - 'Zzzz', # 32FF..32FF ; Unknown + 'Zyyy', # 32FF..32FF ; Common 'Kana', # 3300..3357 ; Katakana 'Zyyy', # 3358..33FF ; Common 'Hani', # 3400..4DB5 ; Han @@ -2324,8 +2331,10 @@ VALUES = [ 'Zyyy', # A700..A721 ; Common 'Latn', # A722..A787 ; Latin 'Zyyy', # A788..A78A ; Common - 'Latn', # A78B..A7B9 ; Latin - 'Zzzz', # A7BA..A7F6 ; Unknown + 'Latn', # A78B..A7BF ; Latin + 'Zzzz', # A7C0..A7C1 ; Unknown + 'Latn', # A7C2..A7C6 ; Latin + 'Zzzz', # A7C7..A7F6 ; Unknown 'Latn', # A7F7..A7FF ; Latin 'Sylo', # A800..A82B ; Syloti_Nagri 'Zzzz', # A82C..A82F ; Unknown @@ -2381,7 +2390,8 @@ VALUES = [ 'Zyyy', # AB5B..AB5B ; Common 'Latn', # AB5C..AB64 ; Latin 'Grek', # AB65..AB65 ; Greek - 'Zzzz', # AB66..AB6F ; Unknown + 'Latn', # AB66..AB67 ; Latin + 'Zzzz', # AB68..AB6F ; Unknown 'Cher', # AB70..ABBF ; Cherokee 'Mtei', # ABC0..ABED ; Meetei_Mayek 'Zzzz', # ABEE..ABEF ; Unknown @@ -2627,7 +2637,9 @@ VALUES = [ 'Sogo', # 10F00..10F27 ; Old_Sogdian 'Zzzz', # 10F28..10F2F ; Unknown 'Sogd', # 10F30..10F59 ; Sogdian - 'Zzzz', # 10F5A..10FFF ; Unknown + 'Zzzz', # 10F5A..10FDF ; Unknown + 'Elym', # 10FE0..10FF6 ; Elymaic + 'Zzzz', # 10FF7..10FFF ; Unknown 'Brah', # 11000..1104D ; Brahmi 'Zzzz', # 1104E..11051 ; Unknown 'Brah', # 11052..1106F ; Brahmi @@ -2706,8 +2718,8 @@ VALUES = [ 'Zzzz', # 1145A..1145A ; Unknown 'Newa', # 1145B..1145B ; Newa 'Zzzz', # 1145C..1145C ; Unknown - 'Newa', # 1145D..1145E ; Newa - 'Zzzz', # 1145F..1147F ; Unknown + 'Newa', # 1145D..1145F ; Newa + 'Zzzz', # 11460..1147F ; Unknown 'Tirh', # 11480..114C7 ; Tirhuta 'Zzzz', # 114C8..114CF ; Unknown 'Tirh', # 114D0..114D9 ; Tirhuta @@ -2722,8 +2734,8 @@ VALUES = [ 'Zzzz', # 1165A..1165F ; Unknown 'Mong', # 11660..1166C ; Mongolian 'Zzzz', # 1166D..1167F ; Unknown - 'Takr', # 11680..116B7 ; Takri - 'Zzzz', # 116B8..116BF ; Unknown + 'Takr', # 11680..116B8 ; Takri + 'Zzzz', # 116B9..116BF ; Unknown 'Takr', # 116C0..116C9 ; Takri 'Zzzz', # 116CA..116FF ; Unknown 'Ahom', # 11700..1171A ; Ahom @@ -2737,12 +2749,16 @@ VALUES = [ 'Wara', # 118A0..118F2 ; Warang_Citi 'Zzzz', # 118F3..118FE ; Unknown 'Wara', # 118FF..118FF ; Warang_Citi - 'Zzzz', # 11900..119FF ; Unknown + 'Zzzz', # 11900..1199F ; Unknown + 'Nand', # 119A0..119A7 ; Nandinagari + 'Zzzz', # 119A8..119A9 ; Unknown + 'Nand', # 119AA..119D7 ; Nandinagari + 'Zzzz', # 119D8..119D9 ; Unknown + 'Nand', # 119DA..119E4 ; Nandinagari + 'Zzzz', # 119E5..119FF ; Unknown 'Zanb', # 11A00..11A47 ; Zanabazar_Square 'Zzzz', # 11A48..11A4F ; Unknown - 'Soyo', # 11A50..11A83 ; Soyombo - 'Zzzz', # 11A84..11A85 ; Unknown - 'Soyo', # 11A86..11AA2 ; Soyombo + 'Soyo', # 11A50..11AA2 ; Soyombo 'Zzzz', # 11AA3..11ABF ; Unknown 'Pauc', # 11AC0..11AF8 ; Pau_Cin_Hau 'Zzzz', # 11AF9..11BFF ; Unknown @@ -2787,7 +2803,10 @@ VALUES = [ 'Gong', # 11DA0..11DA9 ; Gunjala_Gondi 'Zzzz', # 11DAA..11EDF ; Unknown 'Maka', # 11EE0..11EF8 ; Makasar - 'Zzzz', # 11EF9..11FFF ; Unknown + 'Zzzz', # 11EF9..11FBF ; Unknown + 'Taml', # 11FC0..11FF1 ; Tamil + 'Zzzz', # 11FF2..11FFE ; Unknown + 'Taml', # 11FFF..11FFF ; Tamil 'Xsux', # 12000..12399 ; Cuneiform 'Zzzz', # 1239A..123FF ; Unknown 'Xsux', # 12400..1246E ; Cuneiform @@ -2797,7 +2816,9 @@ VALUES = [ 'Xsux', # 12480..12543 ; Cuneiform 'Zzzz', # 12544..12FFF ; Unknown 'Egyp', # 13000..1342E ; Egyptian_Hieroglyphs - 'Zzzz', # 1342F..143FF ; Unknown + 'Zzzz', # 1342F..1342F ; Unknown + 'Egyp', # 13430..13438 ; Egyptian_Hieroglyphs + 'Zzzz', # 13439..143FF ; Unknown 'Hluw', # 14400..14646 ; Anatolian_Hieroglyphs 'Zzzz', # 14647..167FF ; Unknown 'Bamu', # 16800..16A38 ; Bamum @@ -2824,22 +2845,27 @@ VALUES = [ 'Zzzz', # 16B90..16E3F ; Unknown 'Medf', # 16E40..16E9A ; Medefaidrin 'Zzzz', # 16E9B..16EFF ; Unknown - 'Plrd', # 16F00..16F44 ; Miao - 'Zzzz', # 16F45..16F4F ; Unknown - 'Plrd', # 16F50..16F7E ; Miao - 'Zzzz', # 16F7F..16F8E ; Unknown + 'Plrd', # 16F00..16F4A ; Miao + 'Zzzz', # 16F4B..16F4E ; Unknown + 'Plrd', # 16F4F..16F87 ; Miao + 'Zzzz', # 16F88..16F8E ; Unknown 'Plrd', # 16F8F..16F9F ; Miao 'Zzzz', # 16FA0..16FDF ; Unknown 'Tang', # 16FE0..16FE0 ; Tangut 'Nshu', # 16FE1..16FE1 ; Nushu - 'Zzzz', # 16FE2..16FFF ; Unknown - 'Tang', # 17000..187F1 ; Tangut - 'Zzzz', # 187F2..187FF ; Unknown + 'Zyyy', # 16FE2..16FE3 ; Common + 'Zzzz', # 16FE4..16FFF ; Unknown + 'Tang', # 17000..187F7 ; Tangut + 'Zzzz', # 187F8..187FF ; Unknown 'Tang', # 18800..18AF2 ; Tangut 'Zzzz', # 18AF3..1AFFF ; Unknown 'Kana', # 1B000..1B000 ; Katakana 'Hira', # 1B001..1B11E ; Hiragana - 'Zzzz', # 1B11F..1B16F ; Unknown + 'Zzzz', # 1B11F..1B14F ; Unknown + 'Hira', # 1B150..1B152 ; Hiragana + 'Zzzz', # 1B153..1B163 ; Unknown + 'Kana', # 1B164..1B167 ; Katakana + 'Zzzz', # 1B168..1B16F ; Unknown 'Nshu', # 1B170..1B2FB ; Nushu 'Zzzz', # 1B2FC..1BBFF ; Unknown 'Dupl', # 1BC00..1BC6A ; Duployan @@ -2931,19 +2957,33 @@ VALUES = [ 'Glag', # 1E023..1E024 ; Glagolitic 'Zzzz', # 1E025..1E025 ; Unknown 'Glag', # 1E026..1E02A ; Glagolitic - 'Zzzz', # 1E02B..1E7FF ; Unknown + 'Zzzz', # 1E02B..1E0FF ; Unknown + 'Hmnp', # 1E100..1E12C ; Nyiakeng_Puachue_Hmong + 'Zzzz', # 1E12D..1E12F ; Unknown + 'Hmnp', # 1E130..1E13D ; Nyiakeng_Puachue_Hmong + 'Zzzz', # 1E13E..1E13F ; Unknown + 'Hmnp', # 1E140..1E149 ; Nyiakeng_Puachue_Hmong + 'Zzzz', # 1E14A..1E14D ; Unknown + 'Hmnp', # 1E14E..1E14F ; Nyiakeng_Puachue_Hmong + 'Zzzz', # 1E150..1E2BF ; Unknown + 'Wcho', # 1E2C0..1E2F9 ; Wancho + 'Zzzz', # 1E2FA..1E2FE ; Unknown + 'Wcho', # 1E2FF..1E2FF ; Wancho + 'Zzzz', # 1E300..1E7FF ; Unknown 'Mend', # 1E800..1E8C4 ; Mende_Kikakui 'Zzzz', # 1E8C5..1E8C6 ; Unknown 'Mend', # 1E8C7..1E8D6 ; Mende_Kikakui 'Zzzz', # 1E8D7..1E8FF ; Unknown - 'Adlm', # 1E900..1E94A ; Adlam - 'Zzzz', # 1E94B..1E94F ; Unknown + 'Adlm', # 1E900..1E94B ; Adlam + 'Zzzz', # 1E94C..1E94F ; Unknown 'Adlm', # 1E950..1E959 ; Adlam 'Zzzz', # 1E95A..1E95D ; Unknown 'Adlm', # 1E95E..1E95F ; Adlam 'Zzzz', # 1E960..1EC70 ; Unknown 'Zyyy', # 1EC71..1ECB4 ; Common - 'Zzzz', # 1ECB5..1EDFF ; Unknown + 'Zzzz', # 1ECB5..1ED00 ; Unknown + 'Zyyy', # 1ED01..1ED3D ; Common + 'Zzzz', # 1ED3E..1EDFF ; Unknown 'Arab', # 1EE00..1EE03 ; Arabic 'Zzzz', # 1EE04..1EE04 ; Unknown 'Arab', # 1EE05..1EE1F ; Arabic @@ -3026,8 +3066,8 @@ VALUES = [ 'Zzzz', # 1F0F6..1F0FF ; Unknown 'Zyyy', # 1F100..1F10C ; Common 'Zzzz', # 1F10D..1F10F ; Unknown - 'Zyyy', # 1F110..1F16B ; Common - 'Zzzz', # 1F16C..1F16F ; Unknown + 'Zyyy', # 1F110..1F16C ; Common + 'Zzzz', # 1F16D..1F16F ; Unknown 'Zyyy', # 1F170..1F1AC ; Common 'Zzzz', # 1F1AD..1F1E5 ; Unknown 'Zyyy', # 1F1E6..1F1FF ; Common @@ -3042,16 +3082,18 @@ VALUES = [ 'Zzzz', # 1F252..1F25F ; Unknown 'Zyyy', # 1F260..1F265 ; Common 'Zzzz', # 1F266..1F2FF ; Unknown - 'Zyyy', # 1F300..1F6D4 ; Common - 'Zzzz', # 1F6D5..1F6DF ; Unknown + 'Zyyy', # 1F300..1F6D5 ; Common + 'Zzzz', # 1F6D6..1F6DF ; Unknown 'Zyyy', # 1F6E0..1F6EC ; Common 'Zzzz', # 1F6ED..1F6EF ; Unknown - 'Zyyy', # 1F6F0..1F6F9 ; Common - 'Zzzz', # 1F6FA..1F6FF ; Unknown + 'Zyyy', # 1F6F0..1F6FA ; Common + 'Zzzz', # 1F6FB..1F6FF ; Unknown 'Zyyy', # 1F700..1F773 ; Common 'Zzzz', # 1F774..1F77F ; Unknown 'Zyyy', # 1F780..1F7D8 ; Common - 'Zzzz', # 1F7D9..1F7FF ; Unknown + 'Zzzz', # 1F7D9..1F7DF ; Unknown + 'Zyyy', # 1F7E0..1F7EB ; Common + 'Zzzz', # 1F7EC..1F7FF ; Unknown 'Zyyy', # 1F800..1F80B ; Common 'Zzzz', # 1F80C..1F80F ; Unknown 'Zyyy', # 1F810..1F847 ; Common @@ -3063,25 +3105,29 @@ VALUES = [ 'Zyyy', # 1F890..1F8AD ; Common 'Zzzz', # 1F8AE..1F8FF ; Unknown 'Zyyy', # 1F900..1F90B ; Common - 'Zzzz', # 1F90C..1F90F ; Unknown - 'Zyyy', # 1F910..1F93E ; Common - 'Zzzz', # 1F93F..1F93F ; Unknown - 'Zyyy', # 1F940..1F970 ; Common - 'Zzzz', # 1F971..1F972 ; Unknown + 'Zzzz', # 1F90C..1F90C ; Unknown + 'Zyyy', # 1F90D..1F971 ; Common + 'Zzzz', # 1F972..1F972 ; Unknown 'Zyyy', # 1F973..1F976 ; Common 'Zzzz', # 1F977..1F979 ; Unknown - 'Zyyy', # 1F97A..1F97A ; Common - 'Zzzz', # 1F97B..1F97B ; Unknown - 'Zyyy', # 1F97C..1F9A2 ; Common - 'Zzzz', # 1F9A3..1F9AF ; Unknown - 'Zyyy', # 1F9B0..1F9B9 ; Common - 'Zzzz', # 1F9BA..1F9BF ; Unknown - 'Zyyy', # 1F9C0..1F9C2 ; Common - 'Zzzz', # 1F9C3..1F9CF ; Unknown - 'Zyyy', # 1F9D0..1F9FF ; Common - 'Zzzz', # 1FA00..1FA5F ; Unknown + 'Zyyy', # 1F97A..1F9A2 ; Common + 'Zzzz', # 1F9A3..1F9A4 ; Unknown + 'Zyyy', # 1F9A5..1F9AA ; Common + 'Zzzz', # 1F9AB..1F9AD ; Unknown + 'Zyyy', # 1F9AE..1F9CA ; Common + 'Zzzz', # 1F9CB..1F9CC ; Unknown + 'Zyyy', # 1F9CD..1FA53 ; Common + 'Zzzz', # 1FA54..1FA5F ; Unknown 'Zyyy', # 1FA60..1FA6D ; Common - 'Zzzz', # 1FA6E..1FFFF ; Unknown + 'Zzzz', # 1FA6E..1FA6F ; Unknown + 'Zyyy', # 1FA70..1FA73 ; Common + 'Zzzz', # 1FA74..1FA77 ; Unknown + 'Zyyy', # 1FA78..1FA7A ; Common + 'Zzzz', # 1FA7B..1FA7F ; Unknown + 'Zyyy', # 1FA80..1FA82 ; Common + 'Zzzz', # 1FA83..1FA8F ; Unknown + 'Zyyy', # 1FA90..1FA95 ; Common + 'Zzzz', # 1FA96..1FFFF ; Unknown 'Hani', # 20000..2A6D6 ; Han 'Zzzz', # 2A6D7..2A6FF ; Unknown 'Hani', # 2A700..2B734 ; Han @@ -3135,6 +3181,7 @@ NAMES = { 'Dupl': 'Duployan', 'Egyp': 'Egyptian_Hieroglyphs', 'Elba': 'Elbasan', + 'Elym': 'Elymaic', 'Ethi': 'Ethiopic', 'Geor': 'Georgian', 'Glag': 'Glagolitic', @@ -3153,6 +3200,7 @@ NAMES = { 'Hira': 'Hiragana', 'Hluw': 'Anatolian_Hieroglyphs', 'Hmng': 'Pahawh_Hmong', + 'Hmnp': 'Nyiakeng_Puachue_Hmong', 'Hrkt': 'Katakana_Or_Hiragana', 'Hung': 'Old_Hungarian', 'Ital': 'Old_Italic', @@ -3190,6 +3238,7 @@ NAMES = { 'Mtei': 'Meetei_Mayek', 'Mult': 'Multani', 'Mymr': 'Myanmar', + 'Nand': 'Nandinagari', 'Narb': 'Old_North_Arabian', 'Nbat': 'Nabataean', 'Newa': 'Newa', @@ -3246,6 +3295,7 @@ NAMES = { 'Ugar': 'Ugaritic', 'Vaii': 'Vai', 'Wara': 'Warang_Citi', + 'Wcho': 'Wancho', 'Xpeo': 'Old_Persian', 'Xsux': 'Cuneiform', 'Yiii': 'Yi', diff --git a/Lib/fontTools/varLib/cff.py b/Lib/fontTools/varLib/cff.py index a8da21456..bcafdeee7 100755 --- a/Lib/fontTools/varLib/cff.py +++ b/Lib/fontTools/varLib/cff.py @@ -161,7 +161,7 @@ def merge_PrivateDicts(top_dicts, vsindex_dict, var_model, fd_map): For each key, step through each relevant source font Private dict, and build a list of values to blend. The 'relevant' source fonts are selected by first getting the right - submodel using model_keys[vsindex]. The indices of the + submodel using vsindex_dict[vsindex]. The indices of the subModel.locations are mapped to source font list indices by assuming the latter order is the same as the order of the var_model.locations. I can then get the index of each subModel @@ -180,7 +180,7 @@ def merge_PrivateDicts(top_dicts, vsindex_dict, var_model, fd_map): # At the moment, no PrivateDict has a vsindex key, but let's support # how it should work. See comment at end of # merge_charstrings() - still need to optimize use of vsindex. - sub_model, model_keys = vsindex_dict[vsindex] + sub_model, _ = vsindex_dict[vsindex] master_indices = [] for loc in sub_model.locations[1:]: i = var_model.locations.index(loc) - 1 @@ -270,7 +270,9 @@ def getfd_map(varFont, fonts_list): num_regions = len(region_fonts) topDict = default_font['CFF '].cff.topDictIndex[0] if not hasattr(topDict, 'FDSelect'): - fd_map[0] = [0]*num_regions + # All glyphs reference only one FontDict. + # Map the FD index for regions to index 0. + fd_map[0] = {ri:0 for ri in range(num_regions)} return fd_map gname_mapping = {} @@ -317,6 +319,19 @@ def _get_cs(charstrings, glyphName): return None return charstrings[glyphName] +def _add_new_vsindex(model, key, masterSupports, vsindex_dict, + vsindex_by_key, varDataList): + varTupleIndexes = [] + for support in model.supports[1:]: + if support not in masterSupports: + masterSupports.append(support) + varTupleIndexes.append(masterSupports.index(support)) + var_data = varLib.builder.buildVarData(varTupleIndexes, None, False) + vsindex = len(vsindex_dict) + vsindex_by_key[key] = vsindex + vsindex_dict[vsindex] = (model, [key]) + varDataList.append(var_data) + return vsindex def merge_charstrings(glyphOrder, num_masters, top_dicts, masterModel): @@ -365,24 +380,24 @@ def merge_charstrings(glyphOrder, num_masters, top_dicts, masterModel): # If the charstring required a new model, create # a VarData table to go with, and set vsindex. + key = tuple(v is not None for v in all_cs) try: - key = tuple(v is not None for v in all_cs) vsindex = vsindex_by_key[key] except KeyError: - varTupleIndexes = [] - for support in model.supports[1:]: - if support not in masterSupports: - masterSupports.append(support) - varTupleIndexes.append(masterSupports.index(support)) - var_data = varLib.builder.buildVarData(varTupleIndexes, None, False) - vsindex = len(vsindex_dict) - vsindex_by_key[key] = vsindex - vsindex_dict[vsindex] = (model, [key]) - varDataList.append(var_data) + vsindex = _add_new_vsindex(model, key, masterSupports, vsindex_dict, + vsindex_by_key, varDataList) # We do not need to check for an existing new_cs.private.vsindex, # as we know it doesn't exist yet. if vsindex != 0: new_cs.program[:0] = [vsindex, 'vsindex'] + + # If there is no variation in any of the charstrings, then vsindex_dict + # never gets built. This could still be needed if there is variation + # in the PrivatDict, so we will build the default data for vsindex = 0. + if not vsindex_dict: + key = (True,) * num_masters + _add_new_vsindex(model, key, masterSupports, vsindex_dict, + vsindex_by_key, varDataList) cvData = CVarData(varDataList=varDataList, masterSupports=masterSupports, vsindex_dict=vsindex_dict) # XXX To do: optimize use of vsindex between the PrivateDicts and diff --git a/Lib/fontTools/varLib/merger.py b/Lib/fontTools/varLib/merger.py index ff1321e5b..d77f5c7e4 100644 --- a/Lib/fontTools/varLib/merger.py +++ b/Lib/fontTools/varLib/merger.py @@ -2,6 +2,7 @@ Merge OpenType Layout tables (GDEF / GPOS / GSUB). """ from __future__ import print_function, division, absolute_import +import copy from operator import ior from fontTools.misc.py23 import * from fontTools.misc.fixedTools import otRound @@ -457,7 +458,7 @@ def _PairPosFormat2_align_matrices(self, lst, font, transparent=False): exemplarGlyph = next(iter(classSet)) klass = classDef2.get(exemplarGlyph, 0) rec2 = oldClass2Records[klass] - class2Records.append(rec2) + class2Records.append(copy.deepcopy(rec2)) class1Records.append(rec1new) new_matrices.append(class1Records) matrices = new_matrices diff --git a/NEWS.rst b/NEWS.rst index c862424ac..a1a922ef5 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,23 @@ +- [woff2] Added support for compressing/decompressing WOFF2 fonts with non-transformed + ``glyf`` and ``loca`` tables, as well as with transformed ``hmtx`` table. + Removed ``Snippets/woff2_compress.py`` and ``Snippets/woff2_decompress.py`` scripts, + and replaced them with a new console entry point ``fonttools ttLib.woff2`` + that provides two sub-commands ``compress`` and ``decompress``. +- [varLib.cff] Fixed bug when merging CFF2 ``PrivateDicts``. The ``PrivateDict`` + data from the first region font was incorrecty used for all subsequent fonts. + The bug would only affect variable CFF2 fonts with hinting (#1643, #1644). + Also, fixed a merging bug when VF masters have no blends or marking glyphs (#1632, + #1642). +- [loggingTools] Removed unused backport of ``LastResortLogger`` class. +- [subset] Gracefully handle partial MATH table (#1635). +- [featureVars] Avoid duplicate references to ``rvrn`` feature record in + ``DefaultLangSys`` tables when calling ``addFeatureVariations`` on a font that + does not already have a ``GSUB`` table (aa8a5bc6). +- [varLib] Fixed merging of class-based kerning. Before, the process could introduce + rogue kerning values and variations for random classes against class zero (everything + not otherwise classed). +- [unicodedata] Updated Blocks, Scripts and ScriptExtensions to Unicode 12.1. + 3.42.0 (released 2019-05-28) ---------------------------- diff --git a/Snippets/woff2_compress.py b/Snippets/woff2_compress.py deleted file mode 100755 index 689ebdcc1..000000000 --- a/Snippets/woff2_compress.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function, division, absolute_import -from fontTools.misc.py23 import * -from fontTools.ttLib import TTFont -from fontTools.ttx import makeOutputFileName -import sys -import os - - -def main(args=None): - if args is None: - args = sys.argv[1:] - if len(args) < 1: - print("One argument, the input filename, must be provided.", file=sys.stderr) - return 1 - - filename = args[0] - outfilename = makeOutputFileName(filename, outputDir=None, extension='.woff2') - - print("Processing %s => %s" % (filename, outfilename)) - - font = TTFont(filename, recalcBBoxes=False, recalcTimestamp=False) - font.flavor = "woff2" - font.save(outfilename, reorderTables=False) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/Snippets/woff2_decompress.py b/Snippets/woff2_decompress.py deleted file mode 100755 index e7c1beaa5..000000000 --- a/Snippets/woff2_decompress.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function, division, absolute_import -from fontTools.misc.py23 import * -from fontTools.ttLib import TTFont -from fontTools.ttx import makeOutputFileName -import sys -import os - - -def make_output_name(filename): - with open(filename, "rb") as f: - f.seek(4) - sfntVersion = f.read(4) - assert len(sfntVersion) == 4, "not enough data" - ext = '.ttf' if sfntVersion == b"\x00\x01\x00\x00" else ".otf" - outfilename = makeOutputFileName(filename, outputDir=None, extension=ext) - return outfilename - - -def main(args=None): - if args is None: - args = sys.argv[1:] - if len(args) < 1: - print("One argument, the input filename, must be provided.", file=sys.stderr) - return 1 - - filename = args[0] - outfilename = make_output_name(filename) - - print("Processing %s => %s" % (filename, outfilename)) - - font = TTFont(filename, recalcBBoxes=False, recalcTimestamp=False) - font.flavor = None - font.save(outfilename, reorderTables=True) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/Tests/feaLib/builder_test.py b/Tests/feaLib/builder_test.py index eb800782d..3d852afd9 100644 --- a/Tests/feaLib/builder_test.py +++ b/Tests/feaLib/builder_test.py @@ -513,10 +513,9 @@ class BuilderTest(unittest.TestCase): addOpenTypeFeatures(font, tree) assert "GSUB" in font - @unittest.skipIf(sys.version_info[0:2] < (3, 4), - "assertLogs() was introduced in 3.4") def test_unsupported_subtable_break(self): - with self.assertLogs(level='WARNING') as logs: + logger = logging.getLogger("fontTools.feaLib.builder") + with CapturingLogHandler(logger, level='WARNING') as captor: self.build( "feature test {" " pos a 10;" @@ -524,9 +523,10 @@ class BuilderTest(unittest.TestCase): " pos b 10;" "} test;" ) - self.assertEqual(logs.output, - ['WARNING:fontTools.feaLib.builder::1:32: ' - 'unsupported "subtable" statement for lookup type']) + + captor.assertRegex( + ':1:32: unsupported "subtable" statement for lookup type' + ) def test_skip_featureNames_if_no_name_table(self): features = ( diff --git a/Tests/misc/loggingTools_test.py b/Tests/misc/loggingTools_test.py index 18b71b192..fd64b8b3a 100644 --- a/Tests/misc/loggingTools_test.py +++ b/Tests/misc/loggingTools_test.py @@ -6,15 +6,11 @@ from fontTools.misc.loggingTools import ( configLogger, ChannelsFilter, LogMixin, - StderrHandler, - LastResortLogger, - _resetExistingLoggers, ) import logging import textwrap import time import re -import sys import pytest @@ -179,32 +175,3 @@ def test_LogMixin(): assert isinstance(b.log, logging.Logger) assert a.log.name == "loggingTools_test.A" assert b.log.name == "loggingTools_test.B" - - -@pytest.mark.skipif(sys.version_info[:2] > (2, 7), reason="only for python2.7") -@pytest.mark.parametrize( - "reset", [True, False], ids=["reset", "no-reset"] -) -def test_LastResortLogger(reset, capsys, caplog): - current = logging.getLoggerClass() - msg = "The quick brown fox jumps over the lazy dog" - try: - if reset: - _resetExistingLoggers() - else: - caplog.set_level(logging.ERROR, logger="myCustomLogger") - logging.lastResort = StderrHandler(logging.WARNING) - logging.setLoggerClass(LastResortLogger) - logger = logging.getLogger("myCustomLogger") - logger.error(msg) - finally: - del logging.lastResort - logging.setLoggerClass(current) - - captured = capsys.readouterr() - if reset: - assert msg in captured.err - msg not in caplog.text - else: - msg in caplog.text - msg not in captured.err diff --git a/Tests/subset/data/expect_math_partial.ttx b/Tests/subset/data/expect_math_partial.ttx new file mode 100644 index 000000000..34136e96b --- /dev/null +++ b/Tests/subset/data/expect_math_partial.ttx @@ -0,0 +1,168 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/subset/data/test_math_partial.ttx b/Tests/subset/data/test_math_partial.ttx new file mode 100644 index 000000000..c0a70da0a --- /dev/null +++ b/Tests/subset/data/test_math_partial.ttx @@ -0,0 +1,391 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + XITS Math + + + Regular + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 19 vlineto + -52 6 -14 21 -28 65 rrcurveto + -246 563 -20 0 -206 -488 rlineto + -59 -140 -9 -21 -58 -6 rrcurveto + -19 199 19 vlineto + -48 -22 10 31 hvcurveto + 0 12 4 17 5 13 rrcurveto + 46 114 262 0 41 -94 rlineto + 12 -28 7 -27 0 -15 0 -9 -6 -11 -8 -4 -12 -7 -7 -2 -36 0 rrcurveto + -19 vlineto + return + + + -231 0 115 275 rlineto + return + + + + + + -351 endchar + + + 121 0 20 196 41 397 20 hstem + 707 hmoveto + -107 callsubr + -5 257 rmoveto + -106 callsubr + endchar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/subset/subset_test.py b/Tests/subset/subset_test.py index 956197a32..e04027a7d 100644 --- a/Tests/subset/subset_test.py +++ b/Tests/subset/subset_test.py @@ -237,6 +237,13 @@ class SubsetTest(unittest.TestCase): subsetfont = TTFont(subsetpath) self.expect_ttx(subsetfont, self.getpath("expect_keep_math.ttx"), ["GlyphOrder", "CFF ", "MATH", "hmtx"]) + def test_subset_math_partial(self): + _, fontpath = self.compile_font(self.getpath("test_math_partial.ttx"), ".ttf") + subsetpath = self.temp_path(".ttf") + subset.main([fontpath, "--text=A", "--output-file=%s" % subsetpath]) + subsetfont = TTFont(subsetpath) + self.expect_ttx(subsetfont, self.getpath("expect_math_partial.ttx"), ["MATH"]) + def test_subset_opbd_remove(self): # In the test font, only the glyphs 'A' and 'zero' have an entry in # the Optical Bounds table. When subsetting, we do not request any diff --git a/Tests/ttLib/woff2_test.py b/Tests/ttLib/woff2_test.py index 55c4b778e..e4c8bb215 100644 --- a/Tests/ttLib/woff2_test.py +++ b/Tests/ttLib/woff2_test.py @@ -1,19 +1,24 @@ from __future__ import print_function, division, absolute_import, unicode_literals from fontTools.misc.py23 import * from fontTools import ttLib +from fontTools.ttLib import woff2 from fontTools.ttLib.woff2 import ( WOFF2Reader, woff2DirectorySize, woff2DirectoryFormat, woff2FlagsSize, woff2UnknownTagSize, woff2Base128MaxSize, WOFF2DirectoryEntry, getKnownTagIndex, packBase128, base128Size, woff2UnknownTagIndex, WOFF2FlavorData, woff2TransformedTableTags, WOFF2GlyfTable, WOFF2LocaTable, - WOFF2Writer, unpackBase128, unpack255UShort, pack255UShort) + WOFF2HmtxTable, WOFF2Writer, unpackBase128, unpack255UShort, pack255UShort) import unittest from fontTools.misc import sstruct +from fontTools import fontBuilder +from fontTools.pens.ttGlyphPen import TTGlyphPen import struct import os import random import copy from collections import OrderedDict +from functools import partial +import pytest haveBrotli = False try: @@ -122,7 +127,7 @@ class WOFF2ReaderTest(unittest.TestCase): def test_reconstruct_unknown(self): reader = WOFF2Reader(self.file) with self.assertRaisesRegex(ttLib.TTLibError, 'transform for table .* unknown'): - reader.reconstructTable('ZZZZ') + reader.reconstructTable('head') class WOFF2ReaderTTFTest(WOFF2ReaderTest): @@ -145,6 +150,7 @@ class WOFF2ReaderTTFTest(WOFF2ReaderTest): def test_reconstruct_loca(self): woff2Reader = WOFF2Reader(self.file) reconstructedData = woff2Reader['loca'] + self.font.getTableData("glyf") # 'glyf' needs to be compiled before 'loca' self.assertEqual(self.font.getTableData('loca'), reconstructedData) self.assertTrue(hasattr(woff2Reader.tables['glyf'], 'data')) @@ -242,10 +248,6 @@ class WOFF2DirectoryEntryTest(unittest.TestCase): with self.assertRaisesRegex(ttLib.TTLibError, "can't read table 'tag'"): self.entry.fromString(bytes(incompleteData)) - def test_table_reserved_flags(self): - with self.assertRaisesRegex(ttLib.TTLibError, "bits 6-7 are reserved"): - self.entry.fromString(bytechr(0xC0)) - def test_loca_zero_transformLength(self): data = bytechr(getKnownTagIndex('loca')) # flags data += packBase128(random.randint(1, 100)) # origLength @@ -291,6 +293,35 @@ class WOFF2DirectoryEntryTest(unittest.TestCase): data = self.entry.toString() self.assertEqual(len(data), expectedSize) + def test_glyf_loca_transform_flags(self): + for tag in ("glyf", "loca"): + entry = WOFF2DirectoryEntry() + entry.tag = Tag(tag) + entry.flags = getKnownTagIndex(entry.tag) + + self.assertEqual(entry.transformVersion, 0) + self.assertTrue(entry.transformed) + + entry.transformed = False + + self.assertEqual(entry.transformVersion, 3) + self.assertEqual(entry.flags & 0b11000000, (3 << 6)) + self.assertFalse(entry.transformed) + + def test_other_transform_flags(self): + entry = WOFF2DirectoryEntry() + entry.tag = Tag('ZZZZ') + entry.flags = woff2UnknownTagIndex + + self.assertEqual(entry.transformVersion, 0) + self.assertFalse(entry.transformed) + + entry.transformed = True + + self.assertEqual(entry.transformVersion, 1) + self.assertEqual(entry.flags & 0b11000000, (1 << 6)) + self.assertTrue(entry.transformed) + class DummyReader(WOFF2Reader): @@ -299,6 +330,7 @@ class DummyReader(WOFF2Reader): for attr in ('majorVersion', 'minorVersion', 'metaOffset', 'metaLength', 'metaOrigLength', 'privLength', 'privOffset'): setattr(self, attr, 0) + self.tables = {} class WOFF2FlavorDataTest(unittest.TestCase): @@ -353,6 +385,24 @@ class WOFF2FlavorDataTest(unittest.TestCase): self.assertEqual(flavorData.majorVersion, 1) self.assertEqual(flavorData.minorVersion, 1) + def test_mutually_exclusive_args(self): + reader = DummyReader(self.file) + with self.assertRaisesRegex(TypeError, "arguments are mutually exclusive"): + WOFF2FlavorData(reader, transformedTables={"hmtx"}) + + def test_transformTables_default(self): + flavorData = WOFF2FlavorData() + self.assertEqual(flavorData.transformedTables, set(woff2TransformedTableTags)) + + def test_transformTables_invalid(self): + msg = r"'glyf' and 'loca' must be transformed \(or not\) together" + + with self.assertRaisesRegex(ValueError, msg): + WOFF2FlavorData(transformedTables={"glyf"}) + + with self.assertRaisesRegex(ValueError, msg): + WOFF2FlavorData(transformedTables={"loca"}) + class WOFF2WriterTest(unittest.TestCase): @@ -360,7 +410,7 @@ class WOFF2WriterTest(unittest.TestCase): def setUpClass(cls): cls.font = ttLib.TTFont(recalcBBoxes=False, recalcTimestamp=False, flavor="woff2") cls.font.importXML(OTX) - cls.tags = [t for t in cls.font.keys() if t != 'GlyphOrder'] + cls.tags = sorted(t for t in cls.font.keys() if t != 'GlyphOrder') cls.numTables = len(cls.tags) cls.file = BytesIO(CFF_WOFF2.getvalue()) cls.file.seek(0, 2) @@ -511,6 +561,30 @@ class WOFF2WriterTest(unittest.TestCase): flavorData.majorVersion, flavorData.minorVersion = (10, 11) self.assertEqual((10, 11), self.writer._getVersion()) + def test_hmtx_trasform(self): + tableTransforms = {"glyf", "loca", "hmtx"} + + writer = WOFF2Writer(BytesIO(), self.numTables, self.font.sfntVersion) + writer.flavorData = WOFF2FlavorData(transformedTables=tableTransforms) + + for tag in self.tags: + writer[tag] = self.font.getTableData(tag) + writer.close() + + # enabling hmtx transform has no effect when font has no glyf table + self.assertEqual(writer.file.getvalue(), CFF_WOFF2.getvalue()) + + def test_no_transforms(self): + writer = WOFF2Writer(BytesIO(), self.numTables, self.font.sfntVersion) + writer.flavorData = WOFF2FlavorData(transformedTables=()) + + for tag in self.tags: + writer[tag] = self.font.getTableData(tag) + writer.close() + + # transforms settings have no effect when font is CFF-flavored, since + # all the current transforms only apply to TrueType-flavored fonts. + self.assertEqual(writer.file.getvalue(), CFF_WOFF2.getvalue()) class WOFF2WriterTTFTest(WOFF2WriterTest): @@ -518,7 +592,7 @@ class WOFF2WriterTTFTest(WOFF2WriterTest): def setUpClass(cls): cls.font = ttLib.TTFont(recalcBBoxes=False, recalcTimestamp=False, flavor="woff2") cls.font.importXML(TTX) - cls.tags = [t for t in cls.font.keys() if t != 'GlyphOrder'] + cls.tags = sorted(t for t in cls.font.keys() if t != 'GlyphOrder') cls.numTables = len(cls.tags) cls.file = BytesIO(TT_WOFF2.getvalue()) cls.file.seek(0, 2) @@ -539,6 +613,35 @@ class WOFF2WriterTTFTest(WOFF2WriterTest): for tag in normTables: self.assertEqual(self.writer.tables[tag].data, normTables[tag]) + def test_hmtx_trasform(self): + tableTransforms = {"glyf", "loca", "hmtx"} + + writer = WOFF2Writer(BytesIO(), self.numTables, self.font.sfntVersion) + writer.flavorData = WOFF2FlavorData(transformedTables=tableTransforms) + + for tag in self.tags: + writer[tag] = self.font.getTableData(tag) + writer.close() + + length = len(writer.file.getvalue()) + + # enabling optional hmtx transform shaves off a few bytes + self.assertLess(length, len(TT_WOFF2.getvalue())) + + def test_no_transforms(self): + writer = WOFF2Writer(BytesIO(), self.numTables, self.font.sfntVersion) + writer.flavorData = WOFF2FlavorData(transformedTables=()) + + for tag in self.tags: + writer[tag] = self.font.getTableData(tag) + writer.close() + + self.assertNotEqual(writer.file.getvalue(), TT_WOFF2.getvalue()) + + writer.file.seek(0) + reader = WOFF2Reader(writer.file) + self.assertEqual(len(reader.flavorData.transformedTables), 0) + class WOFF2LocaTableTest(unittest.TestCase): @@ -708,28 +811,6 @@ class WOFF2GlyfTableTest(unittest.TestCase): data = glyfTable.transform(self.font) self.assertEqual(self.transformedGlyfData, data) - def test_transform_glyf_incorrect_glyphOrder(self): - glyfTable = self.font['glyf'] - badGlyphOrder = self.font.getGlyphOrder()[:-1] - del glyfTable.glyphOrder - self.font.setGlyphOrder(badGlyphOrder) - with self.assertRaisesRegex(ttLib.TTLibError, "incorrect glyphOrder"): - glyfTable.transform(self.font) - glyfTable.glyphOrder = badGlyphOrder - with self.assertRaisesRegex(ttLib.TTLibError, "incorrect glyphOrder"): - glyfTable.transform(self.font) - - def test_transform_glyf_missing_glyphOrder(self): - glyfTable = self.font['glyf'] - del glyfTable.glyphOrder - del self.font.glyphOrder - numGlyphs = self.font['maxp'].numGlyphs - del self.font['maxp'] - glyfTable.transform(self.font) - expected = [".notdef"] - expected.extend(["glyph%.5d" % i for i in range(1, numGlyphs)]) - self.assertEqual(expected, glyfTable.glyphOrder) - def test_roundtrip_glyf_reconstruct_and_transform(self): glyfTable = WOFF2GlyfTable() glyfTable.reconstruct(self.transformedGlyfData, self.font) @@ -747,6 +828,471 @@ class WOFF2GlyfTableTest(unittest.TestCase): self.assertEqual(normGlyfData, reconstructedData) +@pytest.fixture(scope="module") +def fontfile(): + + class Glyph(object): + def __init__(self, empty=False, **kwargs): + if not empty: + self.draw = partial(self.drawRect, **kwargs) + else: + self.draw = lambda pen: None + + @staticmethod + def drawRect(pen, xMin, xMax): + pen.moveTo((xMin, 0)) + pen.lineTo((xMin, 1000)) + pen.lineTo((xMax, 1000)) + pen.lineTo((xMax, 0)) + pen.closePath() + + class CompositeGlyph(object): + def __init__(self, components): + self.components = components + + def draw(self, pen): + for baseGlyph, (offsetX, offsetY) in self.components: + pen.addComponent(baseGlyph, (1, 0, 0, 1, offsetX, offsetY)) + + fb = fontBuilder.FontBuilder(unitsPerEm=1000, isTTF=True) + fb.setupGlyphOrder( + [".notdef", "space", "A", "acutecomb", "Aacute", "zero", "one", "two"] + ) + fb.setupCharacterMap( + { + 0x20: "space", + 0x41: "A", + 0x0301: "acutecomb", + 0xC1: "Aacute", + 0x30: "zero", + 0x31: "one", + 0x32: "two", + } + ) + fb.setupHorizontalMetrics( + { + ".notdef": (500, 50), + "space": (600, 0), + "A": (550, 40), + "acutecomb": (0, -40), + "Aacute": (550, 40), + "zero": (500, 30), + "one": (500, 50), + "two": (500, 40), + } + ) + fb.setupHorizontalHeader(ascent=1000, descent=-200) + + srcGlyphs = { + ".notdef": Glyph(xMin=50, xMax=450), + "space": Glyph(empty=True), + "A": Glyph(xMin=40, xMax=510), + "acutecomb": Glyph(xMin=-40, xMax=60), + "Aacute": CompositeGlyph([("A", (0, 0)), ("acutecomb", (200, 0))]), + "zero": Glyph(xMin=30, xMax=470), + "one": Glyph(xMin=50, xMax=450), + "two": Glyph(xMin=40, xMax=460), + } + pen = TTGlyphPen(srcGlyphs) + glyphSet = {} + for glyphName, glyph in srcGlyphs.items(): + glyph.draw(pen) + glyphSet[glyphName] = pen.glyph() + fb.setupGlyf(glyphSet) + + fb.setupNameTable( + { + "familyName": "TestWOFF2", + "styleName": "Regular", + "uniqueFontIdentifier": "TestWOFF2 Regular; Version 1.000; ABCD", + "fullName": "TestWOFF2 Regular", + "version": "Version 1.000", + "psName": "TestWOFF2-Regular", + } + ) + fb.setupOS2() + fb.setupPost() + + buf = BytesIO() + fb.save(buf) + buf.seek(0) + + assert fb.font["maxp"].numGlyphs == 8 + assert fb.font["hhea"].numberOfHMetrics == 6 + for glyphName in fb.font.getGlyphOrder(): + xMin = getattr(fb.font["glyf"][glyphName], "xMin", 0) + assert xMin == fb.font["hmtx"][glyphName][1] + + return buf + + +@pytest.fixture +def ttFont(fontfile): + return ttLib.TTFont(fontfile, recalcBBoxes=False, recalcTimestamp=False) + + +class WOFF2HmtxTableTest(object): + def test_transform_no_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + hmtxTable.metrics = ttFont["hmtx"].metrics + + data = hmtxTable.transform(ttFont) + + assert data == ( + b"\x03" # 00000011 | bits 0 and 1 are set (no sidebearings arrays) + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + ) + + def test_transform_proportional_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + metrics = ttFont["hmtx"].metrics + # force one of the proportional glyphs to have its left sidebearing be + # different from its xMin (40) + metrics["A"] = (550, 39) + hmtxTable.metrics = metrics + + assert ttFont["glyf"]["A"].xMin != metrics["A"][1] + + data = hmtxTable.transform(ttFont) + + assert data == ( + b"\x02" # 00000010 | bits 0 unset: explicit proportional sidebearings + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + + # lsbArray + b'\x002' # .notdef: 50 + b'\x00\x00' # space: 0 + b"\x00'" # A: 39 (xMin: 40) + b'\xff\xd8' # acutecomb: -40 + b'\x00(' # Aacute: 40 + b'\x00\x1e' # zero: 30 + ) + + def test_transform_monospaced_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + metrics = ttFont["hmtx"].metrics + hmtxTable.metrics = metrics + + # force one of the monospaced glyphs at the end of hmtx table to have + # its xMin different from its left sidebearing (50) + ttFont["glyf"]["one"].xMin = metrics["one"][1] + 1 + + data = hmtxTable.transform(ttFont) + + assert data == ( + b"\x01" # 00000001 | bits 1 unset: explicit monospaced sidebearings + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + + # leftSideBearingArray + b'\x002' # one: 50 (xMin: 51) + b'\x00(' # two: 40 + ) + + def test_transform_not_applicable(self, ttFont): + hmtxTable = WOFF2HmtxTable() + metrics = ttFont["hmtx"].metrics + # force both a proportional and monospaced glyph to have sidebearings + # different from the respective xMin coordinates + metrics["A"] = (550, 39) + metrics["one"] = (500, 51) + hmtxTable.metrics = metrics + + # 'None' signals to fall back using untransformed hmtx table data + assert hmtxTable.transform(ttFont) is None + + def test_reconstruct_no_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + + data = ( + b"\x03" # 00000011 | bits 0 and 1 are set (no sidebearings arrays) + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + ) + + hmtxTable.reconstruct(data, ttFont) + + assert hmtxTable.metrics == { + ".notdef": (500, 50), + "space": (600, 0), + "A": (550, 40), + "acutecomb": (0, -40), + "Aacute": (550, 40), + "zero": (500, 30), + "one": (500, 50), + "two": (500, 40), + } + + def test_reconstruct_proportional_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + + data = ( + b"\x02" # 00000010 | bits 0 unset: explicit proportional sidebearings + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + + # lsbArray + b'\x002' # .notdef: 50 + b'\x00\x00' # space: 0 + b"\x00'" # A: 39 (xMin: 40) + b'\xff\xd8' # acutecomb: -40 + b'\x00(' # Aacute: 40 + b'\x00\x1e' # zero: 30 + ) + + hmtxTable.reconstruct(data, ttFont) + + assert hmtxTable.metrics == { + ".notdef": (500, 50), + "space": (600, 0), + "A": (550, 39), + "acutecomb": (0, -40), + "Aacute": (550, 40), + "zero": (500, 30), + "one": (500, 50), + "two": (500, 40), + } + + assert ttFont["glyf"]["A"].xMin == 40 + + def test_reconstruct_monospaced_sidebearings(self, ttFont): + hmtxTable = WOFF2HmtxTable() + + data = ( + b"\x01" # 00000001 | bits 1 unset: explicit monospaced sidebearings + + # advanceWidthArray + b'\x01\xf4' # .notdef: 500 + b'\x02X' # space: 600 + b'\x02&' # A: 550 + b'\x00\x00' # acutecomb: 0 + b'\x02&' # Aacute: 550 + b'\x01\xf4' # zero: 500 + + # leftSideBearingArray + b'\x003' # one: 51 (xMin: 50) + b'\x00(' # two: 40 + ) + + hmtxTable.reconstruct(data, ttFont) + + assert hmtxTable.metrics == { + ".notdef": (500, 50), + "space": (600, 0), + "A": (550, 40), + "acutecomb": (0, -40), + "Aacute": (550, 40), + "zero": (500, 30), + "one": (500, 51), + "two": (500, 40), + } + + assert ttFont["glyf"]["one"].xMin == 50 + + def test_reconstruct_flags_reserved_bits(self): + hmtxTable = WOFF2HmtxTable() + + with pytest.raises( + ttLib.TTLibError, match="Bits 2-7 of 'hmtx' flags are reserved" + ): + hmtxTable.reconstruct(b"\xFF", ttFont=None) + + def test_reconstruct_flags_required_bits(self): + hmtxTable = WOFF2HmtxTable() + + with pytest.raises(ttLib.TTLibError, match="either bits 0 or 1 .* must set"): + hmtxTable.reconstruct(b"\x00", ttFont=None) + + def test_reconstruct_too_much_data(self, ttFont): + ttFont["hhea"].numberOfHMetrics = 2 + data = b'\x03\x01\xf4\x02X\x02&' + hmtxTable = WOFF2HmtxTable() + + with pytest.raises(ttLib.TTLibError, match="too much 'hmtx' table data"): + hmtxTable.reconstruct(data, ttFont) + + +class WOFF2RoundtripTest(object): + @staticmethod + def roundtrip(infile): + infile.seek(0) + ttFont = ttLib.TTFont(infile, recalcBBoxes=False, recalcTimestamp=False) + outfile = BytesIO() + ttFont.save(outfile) + return outfile, ttFont + + def test_roundtrip_default_transforms(self, ttFont): + ttFont.flavor = "woff2" + # ttFont.flavorData = None + tmp = BytesIO() + ttFont.save(tmp) + + tmp2, ttFont2 = self.roundtrip(tmp) + + assert tmp.getvalue() == tmp2.getvalue() + assert ttFont2.reader.flavorData.transformedTables == {"glyf", "loca"} + + def test_roundtrip_no_transforms(self, ttFont): + ttFont.flavor = "woff2" + ttFont.flavorData = WOFF2FlavorData(transformedTables=[]) + tmp = BytesIO() + ttFont.save(tmp) + + tmp2, ttFont2 = self.roundtrip(tmp) + + assert tmp.getvalue() == tmp2.getvalue() + assert not ttFont2.reader.flavorData.transformedTables + + def test_roundtrip_all_transforms(self, ttFont): + ttFont.flavor = "woff2" + ttFont.flavorData = WOFF2FlavorData(transformedTables=["glyf", "loca", "hmtx"]) + tmp = BytesIO() + ttFont.save(tmp) + + tmp2, ttFont2 = self.roundtrip(tmp) + + assert tmp.getvalue() == tmp2.getvalue() + assert ttFont2.reader.flavorData.transformedTables == {"glyf", "loca", "hmtx"} + + def test_roundtrip_only_hmtx_no_glyf_transform(self, ttFont): + ttFont.flavor = "woff2" + ttFont.flavorData = WOFF2FlavorData(transformedTables=["hmtx"]) + tmp = BytesIO() + ttFont.save(tmp) + + tmp2, ttFont2 = self.roundtrip(tmp) + + assert tmp.getvalue() == tmp2.getvalue() + assert ttFont2.reader.flavorData.transformedTables == {"hmtx"} + + +class MainTest(object): + + @staticmethod + def make_ttf(tmpdir): + ttFont = ttLib.TTFont(recalcBBoxes=False, recalcTimestamp=False) + ttFont.importXML(TTX) + filename = str(tmpdir / "TestTTF-Regular.ttf") + ttFont.save(filename) + return filename + + def test_compress_ttf(self, tmpdir): + input_file = self.make_ttf(tmpdir) + + assert woff2.main(["compress", input_file]) is None + + assert (tmpdir / "TestTTF-Regular.woff2").check(file=True) + + def test_compress_ttf_no_glyf_transform(self, tmpdir): + input_file = self.make_ttf(tmpdir) + + assert woff2.main(["compress", "--no-glyf-transform", input_file]) is None + + assert (tmpdir / "TestTTF-Regular.woff2").check(file=True) + + def test_compress_ttf_hmtx_transform(self, tmpdir): + input_file = self.make_ttf(tmpdir) + + assert woff2.main(["compress", "--hmtx-transform", input_file]) is None + + assert (tmpdir / "TestTTF-Regular.woff2").check(file=True) + + def test_compress_ttf_no_glyf_transform_hmtx_transform(self, tmpdir): + input_file = self.make_ttf(tmpdir) + + assert woff2.main( + ["compress", "--no-glyf-transform", "--hmtx-transform", input_file] + ) is None + + assert (tmpdir / "TestTTF-Regular.woff2").check(file=True) + + def test_compress_output_file(self, tmpdir): + input_file = self.make_ttf(tmpdir) + output_file = tmpdir / "TestTTF.woff2" + + assert woff2.main( + ["compress", "-o", str(output_file), str(input_file)] + ) is None + + assert output_file.check(file=True) + + def test_compress_otf(self, tmpdir): + ttFont = ttLib.TTFont(recalcBBoxes=False, recalcTimestamp=False) + ttFont.importXML(OTX) + input_file = str(tmpdir / "TestOTF-Regular.otf") + ttFont.save(input_file) + + assert woff2.main(["compress", input_file]) is None + + assert (tmpdir / "TestOTF-Regular.woff2").check(file=True) + + def test_decompress_ttf(self, tmpdir): + input_file = tmpdir / "TestTTF-Regular.woff2" + input_file.write_binary(TT_WOFF2.getvalue()) + + assert woff2.main(["decompress", str(input_file)]) is None + + assert (tmpdir / "TestTTF-Regular.ttf").check(file=True) + + def test_decompress_otf(self, tmpdir): + input_file = tmpdir / "TestTTF-Regular.woff2" + input_file.write_binary(CFF_WOFF2.getvalue()) + + assert woff2.main(["decompress", str(input_file)]) is None + + assert (tmpdir / "TestTTF-Regular.otf").check(file=True) + + def test_decompress_output_file(self, tmpdir): + input_file = tmpdir / "TestTTF-Regular.woff2" + input_file.write_binary(TT_WOFF2.getvalue()) + output_file = tmpdir / "TestTTF.ttf" + + assert woff2.main( + ["decompress", "-o", str(output_file), str(input_file)] + ) is None + + assert output_file.check(file=True) + + def test_no_subcommand_show_help(self, capsys): + with pytest.raises(SystemExit): + woff2.main(["--help"]) + + captured = capsys.readouterr() + assert "usage: fonttools ttLib.woff2" in captured.out + + class Base128Test(unittest.TestCase): def test_unpackBase128(self): diff --git a/Tests/varLib/data/KerningMerging.designspace b/Tests/varLib/data/KerningMerging.designspace new file mode 100644 index 000000000..f7ce7ef8c --- /dev/null +++ b/Tests/varLib/data/KerningMerging.designspace @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/TestNonMarkingCFF2.designspace b/Tests/varLib/data/TestNonMarkingCFF2.designspace new file mode 100644 index 000000000..23c7797d1 --- /dev/null +++ b/Tests/varLib/data/TestNonMarkingCFF2.designspace @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/master_kerning_merging/0.ttx b/Tests/varLib/data/master_kerning_merging/0.ttx new file mode 100644 index 000000000..858f9275d --- /dev/null +++ b/Tests/varLib/data/master_kerning_merging/0.ttx @@ -0,0 +1,304 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Test Thin + + + Regular + + + 0.000;NONE;Test-Thin + + + Test Thin + + + Version 0.000 + + + Test-Thin + + + Test + + + Thin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/master_kerning_merging/1.ttx b/Tests/varLib/data/master_kerning_merging/1.ttx new file mode 100644 index 000000000..d60a70845 --- /dev/null +++ b/Tests/varLib/data/master_kerning_merging/1.ttx @@ -0,0 +1,292 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Test + + + Regular + + + 0.000;NONE;Test-Regular + + + Test Regular + + + Version 0.000 + + + Test-Regular + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/master_kerning_merging/2.ttx b/Tests/varLib/data/master_kerning_merging/2.ttx new file mode 100644 index 000000000..e01a7b1f6 --- /dev/null +++ b/Tests/varLib/data/master_kerning_merging/2.ttx @@ -0,0 +1,304 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Test Black + + + Regular + + + 0.000;NONE;Test-Black + + + Test Black + + + Version 0.000 + + + Test-Black + + + Test + + + Black + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_ExtraLight.ttx b/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_ExtraLight.ttx new file mode 100644 index 000000000..9d3b2679e --- /dev/null +++ b/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_ExtraLight.ttx @@ -0,0 +1,233 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Source Code Variable + + + Regular + + + 1.010;ADBO;SourceCode_ExtraLight + + + Source Code Variable + + + Version 1.010;hotconv 1.0.109;makeotfexe 2.5.65596 + + + SourceCode_ExtraLight + + + Roman Master 0 + + + Source Code Variable + + + Regular + + + 1.010;ADBO;SourceCode_ExtraLight + + + Source Code Variable + + + Version 1.010;hotconv 1.0.109;makeotfexe 2.5.65596 + + + SourceCode_ExtraLight + + + Roman Master 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + endchar + + + endchar + += + + + + + + + + + + + + + diff --git a/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_Regular.ttx b/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_Regular.ttx new file mode 100644 index 000000000..4e6775da8 --- /dev/null +++ b/Tests/varLib/data/master_non_marking_cff2/TestNonMarkingCFF2_Regular.ttx @@ -0,0 +1,233 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Source Code Variable + + + Regular + + + 1.010;ADBO;SourceCodeVariable-Roman + + + Source Code Variable + + + Version 1.010;hotconv 1.0.109;makeotfexe 2.5.65596 + + + SourceCodeVariable-Roman + + + Roman + + + Source Code Variable + + + Regular + + + 1.010;ADBO;SourceCodeVariable-Roman + + + Source Code Variable + + + Version 1.010;hotconv 1.0.109;makeotfexe 2.5.65596 + + + SourceCodeVariable-Roman + + + Roman + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + endchar + + + endchar + + + + + + + + + + + + + + + diff --git a/Tests/varLib/data/test_results/BuildTestCFF2.ttx b/Tests/varLib/data/test_results/BuildTestCFF2.ttx index 37ed3aaf4..91b83fc01 100644 --- a/Tests/varLib/data/test_results/BuildTestCFF2.ttx +++ b/Tests/varLib/data/test_results/BuildTestCFF2.ttx @@ -1,5 +1,5 @@ - + @@ -67,29 +67,29 @@ - + - + - + - + - + - + - + - + diff --git a/Tests/varLib/data/test_results/TestNonMarkingCFF2.ttx b/Tests/varLib/data/test_results/TestNonMarkingCFF2.ttx new file mode 100644 index 000000000..5ded5b915 --- /dev/null +++ b/Tests/varLib/data/test_results/TestNonMarkingCFF2.ttx @@ -0,0 +1,76 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tests/varLib/varLib_test.py b/Tests/varLib/varLib_test.py index ddf23132b..e29befbf4 100644 --- a/Tests/varLib/varLib_test.py +++ b/Tests/varLib/varLib_test.py @@ -2,6 +2,7 @@ from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * from fontTools.ttLib import TTFont, newTable from fontTools.varLib import build +from fontTools.varLib.mutator import instantiateVariableFont from fontTools.varLib import main as varLib_main, load_masters from fontTools.designspaceLib import ( DesignSpaceDocumentError, DesignSpaceDocument, SourceDescriptor, @@ -227,6 +228,28 @@ class BuildTest(unittest.TestCase): expected_ttx_name=test_name ) + def test_varlib_nonmarking_CFF2(self): + ds_path = self.get_test_input('TestNonMarkingCFF2.designspace') + ttx_dir = self.get_test_input("master_non_marking_cff2") + expected_ttx_path = self.get_test_output("TestNonMarkingCFF2.ttx") + + self.temp_dir() + for path in self.get_file_list(ttx_dir, '.ttx', 'TestNonMarkingCFF2_'): + self.compile_font(path, ".otf", self.tempdir) + + ds = DesignSpaceDocument.fromfile(ds_path) + for source in ds.sources: + source.path = os.path.join( + self.tempdir, os.path.basename(source.filename).replace(".ufo", ".otf") + ) + ds.updatePaths() + + varfont, _, _ = build(ds) + varfont = reload_font(varfont) + + tables = ["CFF2"] + self.expect_ttx(varfont, expected_ttx_path, tables) + def test_varlib_build_CFF2(self): ds_path = self.get_test_input('TestCFF2.designspace') ttx_dir = self.get_test_input("master_cff2") @@ -249,7 +272,6 @@ class BuildTest(unittest.TestCase): tables = ["fvar", "CFF2"] self.expect_ttx(varfont, expected_ttx_path, tables) - def test_varlib_build_sparse_CFF2(self): ds_path = self.get_test_input('TestSparseCFF2VF.designspace') ttx_dir = self.get_test_input("master_sparse_cff2") @@ -272,7 +294,6 @@ class BuildTest(unittest.TestCase): tables = ["fvar", "CFF2"] self.expect_ttx(varfont, expected_ttx_path, tables) - def test_varlib_build_vpal(self): ds_path = self.get_test_input('test_vpal.designspace') ttx_dir = self.get_test_input("master_vpal_test") @@ -295,7 +316,6 @@ class BuildTest(unittest.TestCase): tables = ["GPOS"] self.expect_ttx(varfont, expected_ttx_path, tables) - def test_varlib_main_ttf(self): """Mostly for testing varLib.main() """ @@ -519,6 +539,98 @@ class BuildTest(unittest.TestCase): self.expect_ttx(varfont, expected_ttx_path, tables) self.check_ttx_dump(varfont, expected_ttx_path, tables, suffix) + def test_kerning_merging(self): + """Test the correct merging of class-based pair kerning. + + Problem description at https://github.com/fonttools/fonttools/pull/1638. + Test font and Designspace generated by + https://gist.github.com/madig/183d0440c9f7d05f04bd1280b9664bd1. + """ + ds_path = self.get_test_input("KerningMerging.designspace") + ttx_dir = self.get_test_input("master_kerning_merging") + + ds = DesignSpaceDocument.fromfile(ds_path) + for source in ds.sources: + ttx_dump = TTFont() + ttx_dump.importXML( + os.path.join( + ttx_dir, os.path.basename(source.filename).replace(".ttf", ".ttx") + ) + ) + source.font = reload_font(ttx_dump) + + varfont, _, _ = build(ds) + varfont = reload_font(varfont) + + class_kerning_tables = [ + t + for l in varfont["GPOS"].table.LookupList.Lookup + for t in l.SubTable + if t.Format == 2 + ] + assert len(class_kerning_tables) == 1 + class_kerning_table = class_kerning_tables[0] + + # Test that no class kerned against class zero (containing all glyphs not + # classed) has a `XAdvDevice` table attached, which in the variable font + # context is a "VariationIndex" table and points to kerning deltas in the GDEF + # table. Variation deltas of any kerning class against class zero should + # probably never exist. + for class1_record in class_kerning_table.Class1Record: + class2_zero = class1_record.Class2Record[0] + assert getattr(class2_zero.Value1, "XAdvDevice", None) is None + + # Assert the variable font's kerning table (without deltas) is equal to the + # default font's kerning table. The bug fixed in + # https://github.com/fonttools/fonttools/pull/1638 caused rogue kerning + # values to be written to the variable font. + assert _extract_flat_kerning(varfont, class_kerning_table) == { + ("A", ".notdef"): 0, + ("A", "A"): 0, + ("A", "B"): -20, + ("A", "C"): 0, + ("A", "D"): -20, + ("B", ".notdef"): 0, + ("B", "A"): 0, + ("B", "B"): 0, + ("B", "C"): 0, + ("B", "D"): 0, + } + + instance_thin = instantiateVariableFont(varfont, {"wght": 100}) + instance_thin_kerning_table = ( + instance_thin["GPOS"].table.LookupList.Lookup[0].SubTable[0] + ) + assert _extract_flat_kerning(instance_thin, instance_thin_kerning_table) == { + ("A", ".notdef"): 0, + ("A", "A"): 0, + ("A", "B"): 0, + ("A", "C"): 10, + ("A", "D"): 0, + ("B", ".notdef"): 0, + ("B", "A"): 0, + ("B", "B"): 0, + ("B", "C"): 10, + ("B", "D"): 0, + } + + instance_black = instantiateVariableFont(varfont, {"wght": 900}) + instance_black_kerning_table = ( + instance_black["GPOS"].table.LookupList.Lookup[0].SubTable[0] + ) + assert _extract_flat_kerning(instance_black, instance_black_kerning_table) == { + ("A", ".notdef"): 0, + ("A", "A"): 0, + ("A", "B"): 0, + ("A", "C"): 0, + ("A", "D"): 40, + ("B", ".notdef"): 0, + ("B", "A"): 0, + ("B", "B"): 0, + ("B", "C"): 0, + ("B", "D"): 40, + } + def test_load_masters_layerName_without_required_font(): ds = DesignSpaceDocument() @@ -534,5 +646,20 @@ def test_load_masters_layerName_without_required_font(): load_masters(ds) +def _extract_flat_kerning(font, pairpos_table): + extracted_kerning = {} + for glyph_name_1 in pairpos_table.Coverage.glyphs: + class_def_1 = pairpos_table.ClassDef1.classDefs.get(glyph_name_1, 0) + for glyph_name_2 in font.getGlyphOrder(): + class_def_2 = pairpos_table.ClassDef2.classDefs.get(glyph_name_2, 0) + kern_value = ( + pairpos_table.Class1Record[class_def_1] + .Class2Record[class_def_2] + .Value1.XAdvance + ) + extracted_kerning[(glyph_name_1, glyph_name_2)] = kern_value + return extracted_kerning + + if __name__ == "__main__": sys.exit(unittest.main()) diff --git a/requirements.txt b/requirements.txt index bdfd52ba7..8e1fe6ed5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,4 @@ scipy==1.3.0; platform_python_implementation != "PyPy" and python_version >= '3. munkres==1.0.12; platform_python_implementation == "PyPy" and python_version < '3.5' # pyup: ignore munkres==1.1.2; platform_python_implementation == "PyPy" and python_version >= '3.5' zopfli==0.1.6 -fs==2.4.5 +fs==2.4.8 diff --git a/tox.ini b/tox.ini index 2e8d9ee0f..6a01501fb 100644 --- a/tox.ini +++ b/tox.ini @@ -6,6 +6,7 @@ envlist = py{27,37}-cov, htmlcov deps = cov: coverage>=4.3 pytest + pytest-randomly -rrequirements.txt extras = ufo