Merge pull request #2261 from fonttools/faster-GlyphCoordinates

Faster glyph coordinates
This commit is contained in:
Behdad Esfahbod 2021-04-09 14:05:19 -06:00 committed by GitHub
commit 3cfc87be71
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 328 additions and 367 deletions

View File

@ -71,10 +71,10 @@ def strjoin(iterable, joiner=""):
def tobytes(s, encoding="ascii", errors="strict"):
if not isinstance(s, bytes):
if isinstance(s, str):
return s.encode(encoding, errors)
else:
return s
return bytes(s)
def tounicode(s, encoding="ascii", errors="strict"):

View File

@ -2203,7 +2203,7 @@ def subset_glyphs(self, s):
def remapComponentsFast(self, glyphidmap):
if not self.data or struct.unpack(">h", self.data[:2])[0] >= 0:
return # Not composite
data = array.array("B", self.data)
data = self.data = bytearray(self.data)
i = 10
more = 1
while more:
@ -2223,8 +2223,6 @@ def remapComponentsFast(self, glyphidmap):
elif flags & 0x0080: i += 8 # WE_HAVE_A_TWO_BY_TWO
more = flags & 0x0020 # MORE_COMPONENTS
self.data = data.tobytes()
@_add_method(ttLib.getTableClass('glyf'))
def closure_glyphs(self, s):
glyphSet = self.glyphs
@ -2247,7 +2245,7 @@ def prune_pre_subset(self, font, options):
g = self[self.glyphOrder[0]]
# Yay, easy!
g.__dict__.clear()
g.data = ""
g.data = b''
return True
@_add_method(ttLib.getTableClass('glyf'))
@ -2262,7 +2260,7 @@ def subset_glyphs(self, s):
Glyph = ttLib.getTableModule('glyf').Glyph
for g in s.glyphs_emptied:
self.glyphs[g] = Glyph()
self.glyphs[g].data = ''
self.glyphs[g].data = b''
self.glyphOrder = [g for g in self.glyphOrder if g in s.glyphs or g in s.glyphs_emptied]
# Don't drop empty 'glyf' tables, otherwise 'loca' doesn't get subset.
return True

View File

@ -1,4 +1,3 @@
from fontTools.misc.py23 import bytechr, byteord, bytesjoin
from fontTools.misc.fixedTools import (
fixedToFloat as fi2fl,
floatToFixed as fl2fi,
@ -8,6 +7,7 @@ from fontTools.misc.fixedTools import (
)
from fontTools.misc.textTools import safeEval
import array
from collections import Counter, defaultdict
import io
import logging
import struct
@ -38,7 +38,7 @@ class TupleVariation(object):
def __init__(self, axes, coordinates):
self.axes = axes.copy()
self.coordinates = coordinates[:]
self.coordinates = list(coordinates)
def __repr__(self):
axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()]))
@ -48,11 +48,12 @@ class TupleVariation(object):
return self.coordinates == other.coordinates and self.axes == other.axes
def getUsedPoints(self):
result = set()
for i, point in enumerate(self.coordinates):
if point is not None:
result.add(i)
return result
# Empty set means "all points used".
if None not in self.coordinates:
return frozenset()
used = frozenset([i for i,p in enumerate(self.coordinates) if p is not None])
# Return None if no points used.
return used if used else None
def hasImpact(self):
"""Returns True if this TupleVariation has any visible impact.
@ -126,15 +127,21 @@ class TupleVariation(object):
log.warning("bad delta format: %s" %
", ".join(sorted(attrs.keys())))
def compile(self, axisTags, sharedCoordIndices, sharedPoints):
tupleData = []
def compile(self, axisTags, sharedCoordIndices={}, pointData=None):
assert set(self.axes.keys()) <= set(axisTags), ("Unknown axis tag found.", self.axes.keys(), axisTags)
assert all(tag in axisTags for tag in self.axes.keys()), ("Unknown axis tag found.", self.axes.keys(), axisTags)
tupleData = []
auxData = []
if pointData is None:
usedPoints = self.getUsedPoints()
if usedPoints is None: # Nothing to encode
return b'', b''
pointData = self.compilePoints(usedPoints)
coord = self.compileCoord(axisTags)
if coord in sharedCoordIndices:
flags = sharedCoordIndices[coord]
else:
flags = sharedCoordIndices.get(coord)
if flags is None:
flags = EMBEDDED_PEAK_TUPLE
tupleData.append(coord)
@ -143,26 +150,27 @@ class TupleVariation(object):
flags |= INTERMEDIATE_REGION
tupleData.append(intermediateCoord)
points = self.getUsedPoints()
if sharedPoints == points:
# Only use the shared points if they are identical to the actually used points
auxData = self.compileDeltas(sharedPoints)
usesSharedPoints = True
else:
# pointData of b'' implies "use shared points".
if pointData:
flags |= PRIVATE_POINT_NUMBERS
numPointsInGlyph = len(self.coordinates)
auxData = self.compilePoints(points, numPointsInGlyph) + self.compileDeltas(points)
usesSharedPoints = False
auxData.append(pointData)
tupleData = struct.pack('>HH', len(auxData), flags) + bytesjoin(tupleData)
return (tupleData, auxData, usesSharedPoints)
auxData.append(self.compileDeltas())
auxData = b''.join(auxData)
tupleData.insert(0, struct.pack('>HH', len(auxData), flags))
return b''.join(tupleData), auxData
def compileCoord(self, axisTags):
result = []
result = bytearray()
axes = self.axes
for axis in axisTags:
_minValue, value, _maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
result.append(struct.pack(">h", fl2fi(value, 14)))
return bytesjoin(result)
triple = axes.get(axis)
if triple is None:
result.extend(b'\0\0')
else:
result.extend(struct.pack(">h", fl2fi(triple[1], 14)))
return bytes(result)
def compileIntermediateCoord(self, axisTags):
needed = False
@ -175,13 +183,13 @@ class TupleVariation(object):
break
if not needed:
return None
minCoords = []
maxCoords = []
minCoords = bytearray()
maxCoords = bytearray()
for axis in axisTags:
minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
minCoords.append(struct.pack(">h", fl2fi(minValue, 14)))
maxCoords.append(struct.pack(">h", fl2fi(maxValue, 14)))
return bytesjoin(minCoords + maxCoords)
minCoords.extend(struct.pack(">h", fl2fi(minValue, 14)))
maxCoords.extend(struct.pack(">h", fl2fi(maxValue, 14)))
return minCoords + maxCoords
@staticmethod
def decompileCoord_(axisTags, data, offset):
@ -193,11 +201,15 @@ class TupleVariation(object):
return coord, pos
@staticmethod
def compilePoints(points, numPointsInGlyph):
def compilePoints(points):
# If the set consists of all points in the glyph, it gets encoded with
# a special encoding: a single zero byte.
if len(points) == numPointsInGlyph:
return b"\0"
#
# To use this optimization, points passed in must be empty set.
# The following two lines are not strictly necessary as the main code
# below would emit the same. But this is most common and faster.
if not points:
return b'\0'
# In the 'gvar' table, the packing of point numbers is a little surprising.
# It consists of multiple runs, each being a delta-encoded list of integers.
@ -209,19 +221,24 @@ class TupleVariation(object):
points.sort()
numPoints = len(points)
result = bytearray()
# The binary representation starts with the total number of points in the set,
# encoded into one or two bytes depending on the value.
if numPoints < 0x80:
result = [bytechr(numPoints)]
result.append(numPoints)
else:
result = [bytechr((numPoints >> 8) | 0x80) + bytechr(numPoints & 0xff)]
result.append((numPoints >> 8) | 0x80)
result.append(numPoints & 0xff)
MAX_RUN_LENGTH = 127
pos = 0
lastValue = 0
while pos < numPoints:
run = io.BytesIO()
runLength = 0
headerPos = len(result)
result.append(0)
useByteEncoding = None
while pos < numPoints and runLength <= MAX_RUN_LENGTH:
curValue = points[pos]
@ -234,38 +251,36 @@ class TupleVariation(object):
# TODO This never switches back to a byte-encoding from a short-encoding.
# That's suboptimal.
if useByteEncoding:
run.write(bytechr(delta))
result.append(delta)
else:
run.write(bytechr(delta >> 8))
run.write(bytechr(delta & 0xff))
result.append(delta >> 8)
result.append(delta & 0xff)
lastValue = curValue
pos += 1
runLength += 1
if useByteEncoding:
runHeader = bytechr(runLength - 1)
result[headerPos] = runLength - 1
else:
runHeader = bytechr((runLength - 1) | POINTS_ARE_WORDS)
result.append(runHeader)
result.append(run.getvalue())
result[headerPos] = (runLength - 1) | POINTS_ARE_WORDS
return bytesjoin(result)
return result
@staticmethod
def decompilePoints_(numPoints, data, offset, tableTag):
"""(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)"""
assert tableTag in ('cvar', 'gvar')
pos = offset
numPointsInData = byteord(data[pos])
numPointsInData = data[pos]
pos += 1
if (numPointsInData & POINTS_ARE_WORDS) != 0:
numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | byteord(data[pos])
numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | data[pos]
pos += 1
if numPointsInData == 0:
return (range(numPoints), pos)
result = []
while len(result) < numPointsInData:
runHeader = byteord(data[pos])
runHeader = data[pos]
pos += 1
numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1
point = 0
@ -298,23 +313,28 @@ class TupleVariation(object):
(",".join(sorted(badPoints)), tableTag))
return (result, pos)
def compileDeltas(self, points):
def compileDeltas(self):
deltaX = []
deltaY = []
for p in sorted(list(points)):
c = self.coordinates[p]
if type(c) is tuple and len(c) == 2:
if self.getCoordWidth() == 2:
for c in self.coordinates:
if c is None:
continue
deltaX.append(c[0])
deltaY.append(c[1])
elif type(c) is int:
else:
for c in self.coordinates:
if c is None:
continue
deltaX.append(c)
elif c is not None:
raise TypeError("invalid type of delta: %s" % type(c))
return self.compileDeltaValues_(deltaX) + self.compileDeltaValues_(deltaY)
bytearr = bytearray()
self.compileDeltaValues_(deltaX, bytearr)
self.compileDeltaValues_(deltaY, bytearr)
return bytearr
@staticmethod
def compileDeltaValues_(deltas):
"""[value1, value2, value3, ...] --> bytestring
def compileDeltaValues_(deltas, bytearr=None):
"""[value1, value2, value3, ...] --> bytearray
Emits a sequence of runs. Each run starts with a
byte-sized header whose 6 least significant bits
@ -329,38 +349,41 @@ class TupleVariation(object):
bytes; if (header & 0x40) is set, the delta values are
signed 16-bit integers.
""" # Explaining the format because the 'gvar' spec is hard to understand.
stream = io.BytesIO()
if bytearr is None:
bytearr = bytearray()
pos = 0
while pos < len(deltas):
numDeltas = len(deltas)
while pos < numDeltas:
value = deltas[pos]
if value == 0:
pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, stream)
elif value >= -128 and value <= 127:
pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, stream)
pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, bytearr)
elif -128 <= value <= 127:
pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, bytearr)
else:
pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, stream)
return stream.getvalue()
pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, bytearr)
return bytearr
@staticmethod
def encodeDeltaRunAsZeroes_(deltas, offset, stream):
runLength = 0
def encodeDeltaRunAsZeroes_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64 and deltas[pos] == 0:
while pos < numDeltas and deltas[pos] == 0:
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(DELTAS_ARE_ZERO | (runLength - 1)))
runLength = pos - offset
while runLength >= 64:
bytearr.append(DELTAS_ARE_ZERO | 63)
runLength -= 64
if runLength:
bytearr.append(DELTAS_ARE_ZERO | (runLength - 1))
return pos
@staticmethod
def encodeDeltaRunAsBytes_(deltas, offset, stream):
runLength = 0
def encodeDeltaRunAsBytes_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64:
while pos < numDeltas:
value = deltas[pos]
if value < -128 or value > 127:
if not (-128 <= value <= 127):
break
# Within a byte-encoded run of deltas, a single zero
# is best stored literally as 0x00 value. However,
@ -373,19 +396,22 @@ class TupleVariation(object):
if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0:
break
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(runLength - 1))
for i in range(offset, pos):
stream.write(struct.pack('b', otRound(deltas[i])))
runLength = pos - offset
while runLength >= 64:
bytearr.append(63)
bytearr.extend(array.array('b', deltas[offset:offset+64]))
offset += 64
runLength -= 64
if runLength:
bytearr.append(runLength - 1)
bytearr.extend(array.array('b', deltas[offset:pos]))
return pos
@staticmethod
def encodeDeltaRunAsWords_(deltas, offset, stream):
runLength = 0
def encodeDeltaRunAsWords_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64:
while pos < numDeltas:
value = deltas[pos]
# Within a word-encoded run of deltas, it is easiest
# to start a new run (with a different encoding)
@ -403,15 +429,22 @@ class TupleVariation(object):
# [0x6666, 2, 0x7777] becomes 7 bytes when storing
# the value literally (42 66 66 00 02 77 77), but 8 bytes
# when starting a new run (40 66 66 00 02 40 77 77).
isByteEncodable = lambda value: value >= -128 and value <= 127
if isByteEncodable(value) and pos+1 < numDeltas and isByteEncodable(deltas[pos+1]):
if (-128 <= value <= 127) and pos+1 < numDeltas and (-128 <= deltas[pos+1] <= 127):
break
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(DELTAS_ARE_WORDS | (runLength - 1)))
for i in range(offset, pos):
stream.write(struct.pack('>h', otRound(deltas[i])))
runLength = pos - offset
while runLength >= 64:
bytearr.append(DELTAS_ARE_WORDS | 63)
a = array.array('h', deltas[offset:offset+64])
if sys.byteorder != "big": a.byteswap()
bytearr.extend(a)
offset += 64
runLength -= 64
if runLength:
bytearr.append(DELTAS_ARE_WORDS | (runLength - 1))
a = array.array('h', deltas[offset:pos])
if sys.byteorder != "big": a.byteswap()
bytearr.extend(a)
return pos
@staticmethod
@ -420,7 +453,7 @@ class TupleVariation(object):
result = []
pos = offset
while len(result) < numDeltas:
runHeader = byteord(data[pos])
runHeader = data[pos]
pos += 1
numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1
if (runHeader & DELTAS_ARE_ZERO) != 0:
@ -523,9 +556,9 @@ class TupleVariation(object):
# Shouldn't matter that this is different from fvar...?
axisTags = sorted(self.axes.keys())
tupleData, auxData, _ = self.compile(axisTags, [], None)
tupleData, auxData = self.compile(axisTags)
unoptimizedLength = len(tupleData) + len(auxData)
tupleData, auxData, _ = varOpt.compile(axisTags, [], None)
tupleData, auxData = varOpt.compile(axisTags)
optimizedLength = len(tupleData) + len(auxData)
if optimizedLength < unoptimizedLength:
@ -577,87 +610,72 @@ def decompileSharedTuples(axisTags, sharedTupleCount, data, offset):
return result
def compileSharedTuples(axisTags, variations):
coordCount = {}
def compileSharedTuples(axisTags, variations,
MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1):
coordCount = Counter()
for var in variations:
coord = var.compileCoord(axisTags)
coordCount[coord] = coordCount.get(coord, 0) + 1
sharedCoords = [(count, coord)
for (coord, count) in coordCount.items() if count > 1]
sharedCoords.sort(reverse=True)
MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1
sharedCoords = sharedCoords[:MAX_NUM_SHARED_COORDS]
return [c[1] for c in sharedCoords] # Strip off counts.
coordCount[coord] += 1
sharedCoords = coordCount.most_common(MAX_NUM_SHARED_COORDS)
return [c[0] for c in sharedCoords if c[1] > 1]
def compileTupleVariationStore(variations, pointCount,
axisTags, sharedTupleIndices,
useSharedPoints=True):
variations = [v for v in variations if v.hasImpact()]
if len(variations) == 0:
newVariations = []
pointDatas = []
# Compile all points and figure out sharing if desired
sharedPoints = None
# Collect, count, and compile point-sets for all variation sets
pointSetCount = defaultdict(int)
for v in variations:
points = v.getUsedPoints()
if points is None: # Empty variations
continue
pointSetCount[points] += 1
newVariations.append(v)
pointDatas.append(points)
variations = newVariations
del newVariations
if not variations:
return (0, b"", b"")
# Each glyph variation tuples modifies a set of control points. To
# indicate which exact points are getting modified, a single tuple
# can either refer to a shared set of points, or the tuple can
# supply its private point numbers. Because the impact of sharing
# can be positive (no need for a private point list) or negative
# (need to supply 0,0 deltas for unused points), it is not obvious
# how to determine which tuples should take their points from the
# shared pool versus have their own. Perhaps we should resort to
# brute force, and try all combinations? However, if a glyph has n
# variation tuples, we would need to try 2^n combinations (because
# each tuple may or may not be part of the shared set). How many
# variations tuples do glyphs have?
#
# Skia.ttf: {3: 1, 5: 11, 6: 41, 7: 62, 8: 387, 13: 1, 14: 3}
# JamRegular.ttf: {3: 13, 4: 122, 5: 1, 7: 4, 8: 1, 9: 1, 10: 1}
# BuffaloGalRegular.ttf: {1: 16, 2: 13, 4: 2, 5: 4, 6: 19, 7: 1, 8: 3, 9: 8}
# (Reading example: In Skia.ttf, 41 glyphs have 6 variation tuples).
#
n = len(variations[0].coordinates)
assert all(len(v.coordinates) == n for v in variations), "Variation sets have different sizes"
# Is this even worth optimizing? If we never use a shared point
# list, the private lists will consume 112K for Skia, 5K for
# BuffaloGalRegular, and 15K for JamRegular. If we always use a
# shared point list, the shared lists will consume 16K for Skia,
# 3K for BuffaloGalRegular, and 10K for JamRegular. However, in
# the latter case the delta arrays will become larger, but I
# haven't yet measured by how much. From gut feeling (which may be
# wrong), the optimum is to share some but not all points;
# however, then we would need to try all combinations.
#
# For the time being, we try two variants and then pick the better one:
# (a) each tuple supplies its own private set of points;
# (b) all tuples refer to a shared set of points, which consists of
# "every control point in the glyph that has explicit deltas".
usedPoints = set()
for v in variations:
usedPoints |= v.getUsedPoints()
compiledPoints = {pointSet:TupleVariation.compilePoints(pointSet)
for pointSet in pointSetCount}
tupleVariationCount = len(variations)
tuples = []
data = []
someTuplesSharePoints = False
sharedPointVariation = None # To keep track of a variation that uses shared points
for v in variations:
privateTuple, privateData, _ = v.compile(
axisTags, sharedTupleIndices, sharedPoints=None)
sharedTuple, sharedData, usesSharedPoints = v.compile(
axisTags, sharedTupleIndices, sharedPoints=usedPoints)
if useSharedPoints and (len(sharedTuple) + len(sharedData)) < (len(privateTuple) + len(privateData)):
tuples.append(sharedTuple)
data.append(sharedData)
someTuplesSharePoints |= usesSharedPoints
sharedPointVariation = v
else:
tuples.append(privateTuple)
data.append(privateData)
if someTuplesSharePoints:
# Use the last of the variations that share points for compiling the packed point data
data = sharedPointVariation.compilePoints(usedPoints, len(sharedPointVariation.coordinates)) + bytesjoin(data)
tupleVariationCount = TUPLES_SHARE_POINT_NUMBERS | len(tuples)
else:
data = bytesjoin(data)
tupleVariationCount = len(tuples)
tuples = bytesjoin(tuples)
if useSharedPoints:
# Find point-set which saves most bytes.
def key(pn):
pointSet = pn[0]
count = pn[1]
return len(compiledPoints[pointSet]) * (count - 1)
sharedPoints = max(pointSetCount.items(), key=key)[0]
data.append(compiledPoints[sharedPoints])
tupleVariationCount |= TUPLES_SHARE_POINT_NUMBERS
# b'' implies "use shared points"
pointDatas = [compiledPoints[points] if points != sharedPoints else b''
for points in pointDatas]
for v,p in zip(variations, pointDatas):
thisTuple, thisData = v.compile(axisTags, sharedTupleIndices, pointData=p)
tuples.append(thisTuple)
data.append(thisData)
tuples = b''.join(tuples)
data = b''.join(data)
return tupleVariationCount, tuples, data

View File

@ -41,7 +41,7 @@ class table__c_v_a_r(DefaultTable.DefaultTable):
"tupleVariationCount": tupleVariationCount,
"offsetToData": CVAR_HEADER_SIZE + len(tuples),
}
return bytesjoin([
return b''.join([
sstruct.pack(CVAR_HEADER_FORMAT, header),
tuples,
data

View File

@ -1,7 +1,7 @@
"""_g_l_y_f.py -- Converter classes for the 'glyf' table."""
from collections import namedtuple
from fontTools.misc.py23 import bytechr, byteord, bytesjoin, tostr
from fontTools.misc.py23 import tostr
from fontTools.misc import sstruct
from fontTools import ttLib
from fontTools import version
@ -117,7 +117,7 @@ class table__g_l_y_f(DefaultTable.DefaultTable):
currentLocation += len(glyphData)
locations[len(dataList)] = currentLocation
data = bytesjoin(dataList)
data = b''.join(dataList)
if 'loca' in ttFont:
ttFont['loca'].set(locations)
if 'maxp' in ttFont:
@ -488,8 +488,7 @@ def flagEncodeCoord(flag, mask, coord, coordBytes):
elif byteCount == -1:
coordBytes.append(-coord)
elif byteCount == 2:
coordBytes.append((coord >> 8) & 0xFF)
coordBytes.append(coord & 0xFF)
coordBytes.extend(struct.pack('>h', coord))
def flagEncodeCoords(flag, x, y, xBytes, yBytes):
flagEncodeCoord(flag, flagXsame|flagXShort, x, xBytes)
@ -516,7 +515,7 @@ CompositeMaxpValues = namedtuple('CompositeMaxpValues', ['nPoints', 'nContours',
class Glyph(object):
def __init__(self, data=""):
def __init__(self, data=b""):
if not data:
# empty char
self.numberOfContours = 0
@ -557,7 +556,7 @@ class Glyph(object):
else:
return self.data
if self.numberOfContours == 0:
return ""
return b''
if recalcBBoxes:
self.recalcBounds(glyfTable)
data = sstruct.pack(glyphHeaderFormat, self)
@ -608,7 +607,7 @@ class Glyph(object):
raise ttLib.TTLibError("can't mix composites and contours in glyph")
self.numberOfContours = self.numberOfContours + 1
coordinates = GlyphCoordinates()
flags = []
flags = bytearray()
for element in content:
if not isinstance(element, tuple):
continue
@ -616,11 +615,10 @@ class Glyph(object):
if name != "pt":
continue # ignore anything but "pt"
coordinates.append((safeEval(attrs["x"]), safeEval(attrs["y"])))
flag = not not safeEval(attrs["on"])
flag = bool(safeEval(attrs["on"]))
if "overlap" in attrs and bool(safeEval(attrs["overlap"])):
flag |= flagOverlapSimple
flags.append(flag)
flags = array.array("B", flags)
if not hasattr(self, "coordinates"):
self.coordinates = coordinates
self.flags = flags
@ -695,16 +693,14 @@ class Glyph(object):
if sys.byteorder != "big": endPtsOfContours.byteswap()
self.endPtsOfContours = endPtsOfContours.tolist()
data = data[2*self.numberOfContours:]
instructionLength, = struct.unpack(">h", data[:2])
data = data[2:]
pos = 2*self.numberOfContours
instructionLength, = struct.unpack(">h", data[pos:pos+2])
self.program = ttProgram.Program()
self.program.fromBytecode(data[:instructionLength])
data = data[instructionLength:]
self.program.fromBytecode(data[pos+2:pos+2+instructionLength])
pos += 2 + instructionLength
nCoordinates = self.endPtsOfContours[-1] + 1
flags, xCoordinates, yCoordinates = \
self.decompileCoordinatesRaw(nCoordinates, data)
self.decompileCoordinatesRaw(nCoordinates, data, pos)
# fill in repetitions and apply signs
self.coordinates = coordinates = GlyphCoordinates.zeros(nCoordinates)
@ -741,24 +737,26 @@ class Glyph(object):
assert yIndex == len(yCoordinates)
coordinates.relativeToAbsolute()
# discard all flags except "keepFlags"
self.flags = array.array("B", (f & keepFlags for f in flags))
for i in range(len(flags)):
flags[i] &= keepFlags
self.flags = flags
def decompileCoordinatesRaw(self, nCoordinates, data):
def decompileCoordinatesRaw(self, nCoordinates, data, pos=0):
# unpack flags and prepare unpacking of coordinates
flags = array.array("B", [0] * nCoordinates)
flags = bytearray(nCoordinates)
# Warning: deep Python trickery going on. We use the struct module to unpack
# the coordinates. We build a format string based on the flags, so we can
# unpack the coordinates in one struct.unpack() call.
xFormat = ">" # big endian
yFormat = ">" # big endian
i = j = 0
j = 0
while True:
flag = byteord(data[i])
i = i + 1
flag = data[pos]
pos += 1
repeat = 1
if flag & flagRepeat:
repeat = byteord(data[i]) + 1
i = i + 1
repeat = data[pos] + 1
pos += 1
for k in range(repeat):
if flag & flagXShort:
xFormat = xFormat + 'B'
@ -773,15 +771,14 @@ class Glyph(object):
if j >= nCoordinates:
break
assert j == nCoordinates, "bad glyph flags"
data = data[i:]
# unpack raw coordinates, krrrrrr-tching!
xDataLen = struct.calcsize(xFormat)
yDataLen = struct.calcsize(yFormat)
if len(data) - (xDataLen + yDataLen) >= 4:
if len(data) - pos - (xDataLen + yDataLen) >= 4:
log.warning(
"too much glyph data: %d excess bytes", len(data) - (xDataLen + yDataLen))
xCoordinates = struct.unpack(xFormat, data[:xDataLen])
yCoordinates = struct.unpack(yFormat, data[xDataLen:xDataLen+yDataLen])
"too much glyph data: %d excess bytes", len(data) - pos - (xDataLen + yDataLen))
xCoordinates = struct.unpack(xFormat, data[pos:pos+xDataLen])
yCoordinates = struct.unpack(yFormat, data[pos+xDataLen:pos+xDataLen+yDataLen])
return flags, xCoordinates, yCoordinates
def compileComponents(self, glyfTable):
@ -811,9 +808,7 @@ class Glyph(object):
data.append(instructions)
deltas = self.coordinates.copy()
if deltas.isFloat():
# Warn?
deltas.toInt()
deltas.toInt()
deltas.absoluteToRelative()
# TODO(behdad): Add a configuration option for this?
@ -821,14 +816,14 @@ class Glyph(object):
#deltas = self.compileDeltasOptimal(self.flags, deltas)
data.extend(deltas)
return bytesjoin(data)
return b''.join(data)
def compileDeltasGreedy(self, flags, deltas):
# Implements greedy algorithm for packing coordinate deltas:
# uses shortest representation one coordinate at a time.
compressedflags = []
xPoints = []
yPoints = []
compressedFlags = bytearray()
compressedXs = bytearray()
compressedYs = bytearray()
lastflag = None
repeat = 0
for flag,(x,y) in zip(flags, deltas):
@ -842,9 +837,9 @@ class Glyph(object):
flag = flag | flagXsame
else:
x = -x
xPoints.append(bytechr(x))
compressedXs.append(x)
else:
xPoints.append(struct.pack(">h", x))
compressedXs.extend(struct.pack('>h', x))
# do y
if y == 0:
flag = flag | flagYsame
@ -854,24 +849,21 @@ class Glyph(object):
flag = flag | flagYsame
else:
y = -y
yPoints.append(bytechr(y))
compressedYs.append(y)
else:
yPoints.append(struct.pack(">h", y))
compressedYs.extend(struct.pack('>h', y))
# handle repeating flags
if flag == lastflag and repeat != 255:
repeat = repeat + 1
if repeat == 1:
compressedflags.append(flag)
compressedFlags.append(flag)
else:
compressedflags[-2] = flag | flagRepeat
compressedflags[-1] = repeat
compressedFlags[-2] = flag | flagRepeat
compressedFlags[-1] = repeat
else:
repeat = 0
compressedflags.append(flag)
compressedFlags.append(flag)
lastflag = flag
compressedFlags = array.array("B", compressedflags).tobytes()
compressedXs = bytesjoin(xPoints)
compressedYs = bytesjoin(yPoints)
return (compressedFlags, compressedXs, compressedYs)
def compileDeltasOptimal(self, flags, deltas):
@ -902,9 +894,9 @@ class Glyph(object):
flags.append(flag)
flags.reverse()
compressedFlags = array.array("B")
compressedXs = array.array("B")
compressedYs = array.array("B")
compressedFlags = bytearray()
compressedXs = bytearray()
compressedYs = bytearray()
coords = iter(deltas)
ff = []
for flag in flags:
@ -924,9 +916,6 @@ class Glyph(object):
raise Exception("internal error")
except StopIteration:
pass
compressedFlags = compressedFlags.tobytes()
compressedXs = compressedXs.tobytes()
compressedYs = compressedYs.tobytes()
return (compressedFlags, compressedXs, compressedYs)
@ -1006,7 +995,7 @@ class Glyph(object):
elif self.isComposite():
# it's a composite
allCoords = GlyphCoordinates()
allFlags = array.array("B")
allFlags = bytearray()
allEndPts = []
for compo in self.components:
g = glyfTable[compo.glyphName]
@ -1051,7 +1040,7 @@ class Glyph(object):
allFlags.extend(flags)
return allCoords, allEndPts, allFlags
else:
return GlyphCoordinates(), [], array.array("B")
return GlyphCoordinates(), [], bytearray()
def getComponentNames(self, glyfTable):
if not hasattr(self, "data"):
@ -1101,7 +1090,7 @@ class Glyph(object):
if not self.data:
return
numContours = struct.unpack(">h", self.data[:2])[0]
data = array.array("B", self.data)
data = bytearray(self.data)
i = 10
if numContours >= 0:
i += 2 * numContours # endPtsOfContours
@ -1170,7 +1159,7 @@ class Glyph(object):
# Remove padding
data = data[:i]
self.data = data.tobytes()
self.data = data
def removeHinting(self):
self.trim (remove_hinting=True)
@ -1432,41 +1421,22 @@ class GlyphComponent(object):
class GlyphCoordinates(object):
def __init__(self, iterable=[], typecode="h"):
self._a = array.array(typecode)
def __init__(self, iterable=[]):
self._a = array.array('d')
self.extend(iterable)
@property
def array(self):
return self._a
def isFloat(self):
return self._a.typecode == 'd'
def _ensureFloat(self):
if self.isFloat():
return
# The conversion to list() is to work around Jython bug
self._a = array.array("d", list(self._a))
def _checkFloat(self, p):
if self.isFloat():
return p
if any(v > 0x7FFF or v < -0x8000 for v in p):
self._ensureFloat()
return p
if any(isinstance(v, float) for v in p):
p = [int(v) if int(v) == v else v for v in p]
if any(isinstance(v, float) for v in p):
self._ensureFloat()
return p
@staticmethod
def zeros(count):
return GlyphCoordinates([(0,0)] * count)
g = GlyphCoordinates()
g._a.frombytes(bytes(count * 2 * g._a.itemsize))
return g
def copy(self):
c = GlyphCoordinates(typecode=self._a.typecode)
c = GlyphCoordinates()
c._a.extend(self._a)
return c
@ -1477,7 +1447,11 @@ class GlyphCoordinates(object):
if isinstance(k, slice):
indices = range(*k.indices(len(self)))
return [self[i] for i in indices]
return self._a[2*k],self._a[2*k+1]
a = self._a
x = a[2*k]
y = a[2*k+1]
return (int(x) if x.is_integer() else x,
int(y) if y.is_integer() else y)
def __setitem__(self, k, v):
if isinstance(k, slice):
@ -1486,7 +1460,6 @@ class GlyphCoordinates(object):
for j,i in enumerate(indices):
self[i] = v[j]
return
v = self._checkFloat(v)
self._a[2*k],self._a[2*k+1] = v
def __delitem__(self, i):
@ -1498,69 +1471,71 @@ class GlyphCoordinates(object):
return 'GlyphCoordinates(['+','.join(str(c) for c in self)+'])'
def append(self, p):
p = self._checkFloat(p)
self._a.extend(tuple(p))
def extend(self, iterable):
for p in iterable:
p = self._checkFloat(p)
self._a.extend(p)
def toInt(self, *, round=otRound):
if not self.isFloat():
return
a = array.array("h")
for n in self._a:
a.append(round(n))
self._a = a
a = self._a
for i in range(len(a)):
a[i] = round(a[i])
def relativeToAbsolute(self):
a = self._a
x,y = 0,0
for i in range(len(a) // 2):
x = a[2*i ] + x
y = a[2*i+1] + y
self[i] = (x, y)
for i in range(0, len(a), 2):
a[i ] = x = a[i ] + x
a[i+1] = y = a[i+1] + y
def absoluteToRelative(self):
a = self._a
x,y = 0,0
for i in range(len(a) // 2):
dx = a[2*i ] - x
dy = a[2*i+1] - y
x = a[2*i ]
y = a[2*i+1]
self[i] = (dx, dy)
for i in range(0, len(a), 2):
nx = a[i ]
ny = a[i+1]
a[i] = nx - x
a[i+1] = ny - y
x = nx
y = ny
def translate(self, p):
"""
>>> GlyphCoordinates([(1,2)]).translate((.5,0))
"""
(x,y) = self._checkFloat(p)
x,y = p
if x == 0 and y == 0:
return
a = self._a
for i in range(len(a) // 2):
self[i] = (a[2*i] + x, a[2*i+1] + y)
for i in range(0, len(a), 2):
a[i] += x
a[i+1] += y
def scale(self, p):
"""
>>> GlyphCoordinates([(1,2)]).scale((.5,0))
"""
(x,y) = self._checkFloat(p)
x,y = p
if x == 1 and y == 1:
return
a = self._a
for i in range(len(a) // 2):
self[i] = (a[2*i] * x, a[2*i+1] * y)
for i in range(0, len(a), 2):
a[i] *= x
a[i+1] *= y
def transform(self, t):
"""
>>> GlyphCoordinates([(1,2)]).transform(((.5,0),(.2,.5)))
"""
a = self._a
for i in range(len(a) // 2):
x = a[2*i ]
y = a[2*i+1]
for i in range(0, len(a), 2):
x = a[i ]
y = a[i+1]
px = x * t[0][0] + y * t[1][0]
py = x * t[0][1] + y * t[1][1]
self[i] = (px, py)
a[i] = px
a[i+1] = py
def __eq__(self, other):
"""
@ -1645,23 +1620,22 @@ class GlyphCoordinates(object):
>>> g = GlyphCoordinates([(1,2)])
>>> g += (.5,0)
>>> g
GlyphCoordinates([(1.5, 2.0)])
GlyphCoordinates([(1.5, 2)])
>>> g2 = GlyphCoordinates([(3,4)])
>>> g += g2
>>> g
GlyphCoordinates([(4.5, 6.0)])
GlyphCoordinates([(4.5, 6)])
"""
if isinstance(other, tuple):
assert len(other) == 2
self.translate(other)
return self
if isinstance(other, GlyphCoordinates):
if other.isFloat(): self._ensureFloat()
other = other._a
a = self._a
assert len(a) == len(other)
for i in range(len(a) // 2):
self[i] = (a[2*i] + other[2*i], a[2*i+1] + other[2*i+1])
for i in range(len(a)):
a[i] += other[i]
return self
return NotImplemented
@ -1670,23 +1644,22 @@ class GlyphCoordinates(object):
>>> g = GlyphCoordinates([(1,2)])
>>> g -= (.5,0)
>>> g
GlyphCoordinates([(0.5, 2.0)])
GlyphCoordinates([(0.5, 2)])
>>> g2 = GlyphCoordinates([(3,4)])
>>> g -= g2
>>> g
GlyphCoordinates([(-2.5, -2.0)])
GlyphCoordinates([(-2.5, -2)])
"""
if isinstance(other, tuple):
assert len(other) == 2
self.translate((-other[0],-other[1]))
return self
if isinstance(other, GlyphCoordinates):
if other.isFloat(): self._ensureFloat()
other = other._a
a = self._a
assert len(a) == len(other)
for i in range(len(a) // 2):
self[i] = (a[2*i] - other[2*i], a[2*i+1] - other[2*i+1])
for i in range(len(a)):
a[i] -= other[i]
return self
return NotImplemented
@ -1696,20 +1669,23 @@ class GlyphCoordinates(object):
>>> g *= (2,.5)
>>> g *= 2
>>> g
GlyphCoordinates([(4.0, 2.0)])
GlyphCoordinates([(4, 2)])
>>> g = GlyphCoordinates([(1,2)])
>>> g *= 2
>>> g
GlyphCoordinates([(2, 4)])
"""
if isinstance(other, Number):
other = (other, other)
if isinstance(other, tuple):
if other == (1,1):
return self
assert len(other) == 2
self.scale(other)
return self
if isinstance(other, Number):
if other == 1:
return self
a = self._a
for i in range(len(a)):
a[i] *= other
return self
return NotImplemented
def __itruediv__(self, other):
@ -1718,7 +1694,7 @@ class GlyphCoordinates(object):
>>> g /= (.5,1.5)
>>> g /= 2
>>> g
GlyphCoordinates([(1.0, 1.0)])
GlyphCoordinates([(1, 1)])
"""
if isinstance(other, Number):
other = (other, other)
@ -1750,20 +1726,6 @@ class GlyphCoordinates(object):
__nonzero__ = __bool__
def reprflag(flag):
bin = ""
if isinstance(flag, str):
flag = byteord(flag)
while flag:
if flag & 0x01:
bin = "1" + bin
else:
bin = "0" + bin
flag = flag >> 1
bin = (14 - len(bin)) * "0" + bin
return bin
if __name__ == "__main__":
import doctest, sys
sys.exit(doctest.testmod().failed)

View File

@ -1,4 +1,3 @@
from fontTools.misc.py23 import bytesjoin
from fontTools.misc import sstruct
from fontTools.misc.textTools import safeEval
from . import DefaultTable
@ -76,7 +75,7 @@ class table__g_v_a_r(DefaultTable.DefaultTable):
result = [compiledHeader, compiledOffsets]
result.extend(sharedTuples)
result.extend(compiledGlyphs)
return bytesjoin(result)
return b''.join(result)
def compileGlyphs_(self, ttFont, axisTags, sharedCoordIndices):
result = []
@ -214,12 +213,14 @@ def compileGlyph_(variations, pointCount, axisTags, sharedCoordIndices):
variations, pointCount, axisTags, sharedCoordIndices)
if tupleVariationCount == 0:
return b""
result = (
struct.pack(">HH", tupleVariationCount, 4 + len(tuples)) + tuples + data
)
if len(result) % 2 != 0:
result = result + b"\0" # padding
return result
result = [
struct.pack(">HH", tupleVariationCount, 4 + len(tuples)),
tuples,
data
]
if (len(tuples) + len(data)) % 2 != 0:
result.append(b"\0") # padding
return b''.join(result)
def decompileGlyph_(pointCount, sharedTuples, axisTags, data):

View File

@ -284,9 +284,9 @@ def _add_gvar(font, masterModel, master_ttfs, tolerance=0.5, optimize=True):
var_opt = TupleVariation(support, delta_opt)
axis_tags = sorted(support.keys()) # Shouldn't matter that this is different from fvar...?
tupleData, auxData, _ = var.compile(axis_tags, [], None)
tupleData, auxData = var.compile(axis_tags)
unoptimized_len = len(tupleData) + len(auxData)
tupleData, auxData, _ = var_opt.compile(axis_tags, [], None)
tupleData, auxData = var_opt.compile(axis_tags)
optimized_len = len(tupleData) + len(auxData)
if optimized_len < unoptimized_len:

View File

@ -375,7 +375,10 @@ class VariationModel(object):
for i,weights in enumerate(self.deltaWeights):
delta = masterValues[mapping[i]]
for j,weight in weights.items():
delta -= out[j] * weight
if weight == 1:
delta -= out[j]
else:
delta -= out[j] * weight
out.append(round(delta))
return out

View File

@ -231,8 +231,7 @@ class TupleVariationTest(unittest.TestCase):
[(7,4), (8,5), (9,6)])
axisTags = ["wght", "wdth"]
sharedPeakIndices = { var.compileCoord(axisTags): 0x77 }
tup, deltas, _ = var.compile(axisTags, sharedPeakIndices,
sharedPoints={0,1,2})
tup, deltas = var.compile(axisTags, sharedPeakIndices, pointData=b'')
# len(deltas)=8; flags=None; tupleIndex=0x77
# embeddedPeaks=[]; intermediateCoord=[]
self.assertEqual("00 08 00 77", hexencode(tup))
@ -246,8 +245,7 @@ class TupleVariationTest(unittest.TestCase):
[(7,4), (8,5), (9,6)])
axisTags = ["wght", "wdth"]
sharedPeakIndices = { var.compileCoord(axisTags): 0x77 }
tup, deltas, _ = var.compile(axisTags, sharedPeakIndices,
sharedPoints={0,1,2})
tup, deltas = var.compile(axisTags, sharedPeakIndices, pointData=b'')
# len(deltas)=8; flags=INTERMEDIATE_REGION; tupleIndex=0x77
# embeddedPeak=[]; intermediateCoord=[(0.3, 0.1), (0.7, 0.9)]
self.assertEqual("00 08 40 77 13 33 06 66 2C CD 39 9A", hexencode(tup))
@ -261,8 +259,7 @@ class TupleVariationTest(unittest.TestCase):
[(7,4), (8,5), (9,6)])
axisTags = ["wght", "wdth"]
sharedPeakIndices = { var.compileCoord(axisTags): 0x77 }
tup, deltas, _ = var.compile(axisTags, sharedPeakIndices,
sharedPoints=None)
tup, deltas = var.compile(axisTags, sharedPeakIndices)
# len(deltas)=9; flags=PRIVATE_POINT_NUMBERS; tupleIndex=0x77
# embeddedPeak=[]; intermediateCoord=[]
self.assertEqual("00 09 20 77", hexencode(tup))
@ -277,8 +274,7 @@ class TupleVariationTest(unittest.TestCase):
[(7,4), (8,5), (9,6)])
axisTags = ["wght", "wdth"]
sharedPeakIndices = { var.compileCoord(axisTags): 0x77 }
tuple, deltas, _ = var.compile(axisTags,
sharedPeakIndices, sharedPoints=None)
tuple, deltas = var.compile(axisTags, sharedPeakIndices)
# len(deltas)=9; flags=PRIVATE_POINT_NUMBERS; tupleIndex=0x77
# embeddedPeak=[]; intermediateCoord=[(0.0, 0.0), (1.0, 1.0)]
self.assertEqual("00 09 60 77 00 00 00 00 40 00 40 00",
@ -292,8 +288,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.0, 0.5, 0.5), "wdth": (0.0, 0.8, 0.8)},
[(7,4), (8,5), (9,6)])
tup, deltas, _ = var.compile(axisTags=["wght", "wdth"],
sharedCoordIndices={}, sharedPoints={0, 1, 2})
tup, deltas = var.compile(axisTags=["wght", "wdth"], pointData=b'')
# len(deltas)=8; flags=EMBEDDED_PEAK_TUPLE
# embeddedPeak=[(0.5, 0.8)]; intermediateCoord=[]
self.assertEqual("00 08 80 00 20 00 33 33", hexencode(tup))
@ -305,8 +300,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.0, 0.5, 0.5), "wdth": (0.0, 0.8, 0.8)},
[3, 1, 4])
tup, deltas, _ = var.compile(axisTags=["wght", "wdth"],
sharedCoordIndices={}, sharedPoints={0, 1, 2})
tup, deltas = var.compile(axisTags=["wght", "wdth"], pointData=b'')
# len(deltas)=4; flags=EMBEDDED_PEAK_TUPLE
# embeddedPeak=[(0.5, 0.8)]; intermediateCoord=[]
self.assertEqual("00 04 80 00 20 00 33 33", hexencode(tup))
@ -317,9 +311,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.0, 0.5, 1.0), "wdth": (0.0, 0.8, 0.8)},
[(7,4), (8,5), (9,6)])
tup, deltas, _ = var.compile(axisTags=["wght", "wdth"],
sharedCoordIndices={},
sharedPoints={0, 1, 2})
tup, deltas = var.compile(axisTags=["wght", "wdth"], pointData=b'')
# len(deltas)=8; flags=EMBEDDED_PEAK_TUPLE
# embeddedPeak=[(0.5, 0.8)]; intermediateCoord=[(0.0, 0.0), (1.0, 0.8)]
self.assertEqual("00 08 C0 00 20 00 33 33 00 00 00 00 40 00 33 33",
@ -332,8 +324,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.0, 0.5, 0.5), "wdth": (0.0, 0.8, 0.8)},
[(7,4), (8,5), (9,6)])
tup, deltas, _ = var.compile(
axisTags=["wght", "wdth"], sharedCoordIndices={}, sharedPoints=None)
tup, deltas = var.compile(axisTags=["wght", "wdth"])
# len(deltas)=9; flags=PRIVATE_POINT_NUMBERS|EMBEDDED_PEAK_TUPLE
# embeddedPeak=[(0.5, 0.8)]; intermediateCoord=[]
self.assertEqual("00 09 A0 00 20 00 33 33", hexencode(tup))
@ -346,8 +337,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.0, 0.5, 0.5), "wdth": (0.0, 0.8, 0.8)},
[7, 8, 9])
tup, deltas, _ = var.compile(
axisTags=["wght", "wdth"], sharedCoordIndices={}, sharedPoints=None)
tup, deltas = var.compile(axisTags=["wght", "wdth"])
# len(deltas)=5; flags=PRIVATE_POINT_NUMBERS|EMBEDDED_PEAK_TUPLE
# embeddedPeak=[(0.5, 0.8)]; intermediateCoord=[]
self.assertEqual("00 05 A0 00 20 00 33 33", hexencode(tup))
@ -359,9 +349,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.4, 0.5, 0.6), "wdth": (0.7, 0.8, 0.9)},
[(7,4), (8,5), (9,6)])
tup, deltas, _ = var.compile(
axisTags = ["wght", "wdth"],
sharedCoordIndices={}, sharedPoints=None)
tup, deltas = var.compile(axisTags = ["wght", "wdth"])
# len(deltas)=9;
# flags=PRIVATE_POINT_NUMBERS|INTERMEDIATE_REGION|EMBEDDED_PEAK_TUPLE
# embeddedPeak=(0.5, 0.8); intermediateCoord=[(0.4, 0.7), (0.6, 0.9)]
@ -376,9 +364,7 @@ class TupleVariationTest(unittest.TestCase):
var = TupleVariation(
{"wght": (0.4, 0.5, 0.6), "wdth": (0.7, 0.8, 0.9)},
[7, 8, 9])
tup, deltas, _ = var.compile(
axisTags = ["wght", "wdth"],
sharedCoordIndices={}, sharedPoints=None)
tup, deltas = var.compile(axisTags = ["wght", "wdth"])
# len(deltas)=5;
# flags=PRIVATE_POINT_NUMBERS|INTERMEDIATE_REGION|EMBEDDED_PEAK_TUPLE
# embeddedPeak=(0.5, 0.8); intermediateCoord=[(0.4, 0.7), (0.6, 0.9)]
@ -415,8 +401,8 @@ class TupleVariationTest(unittest.TestCase):
self.assertEqual("7F B9 80 35", hexencode(var.compileCoord(["wght", "wdth"])))
def test_compilePoints(self):
compilePoints = lambda p: TupleVariation.compilePoints(set(p), numPointsInGlyph=999)
self.assertEqual("00", hexencode(compilePoints(range(999)))) # all points in glyph
compilePoints = lambda p: TupleVariation.compilePoints(set(p))
self.assertEqual("00", hexencode(compilePoints(set()))) # all points in glyph
self.assertEqual("01 00 07", hexencode(compilePoints([7])))
self.assertEqual("01 80 FF FF", hexencode(compilePoints([65535])))
self.assertEqual("02 01 09 06", hexencode(compilePoints([9, 15])))
@ -488,7 +474,7 @@ class TupleVariationTest(unittest.TestCase):
def test_decompilePoints_roundTrip(self):
numPointsInGlyph = 500 # greater than 255, so we also exercise code path for 16-bit encoding
compile = lambda points: TupleVariation.compilePoints(points, numPointsInGlyph)
compile = lambda points: TupleVariation.compilePoints(points)
decompile = lambda data: set(TupleVariation.decompilePoints_(numPointsInGlyph, data, 0, "gvar")[0])
for i in range(50):
points = set(random.sample(range(numPointsInGlyph), 30))
@ -496,18 +482,17 @@ class TupleVariationTest(unittest.TestCase):
"failed round-trip decompile/compilePoints; points=%s" % points)
allPoints = set(range(numPointsInGlyph))
self.assertSetEqual(allPoints, decompile(compile(allPoints)))
self.assertSetEqual(allPoints, decompile(compile(set())))
def test_compileDeltas_points(self):
var = TupleVariation({}, [(0,0), (1, 0), (2, 0), None, (4, 0), (5, 0)])
points = {1, 2, 3, 4}
var = TupleVariation({}, [None, (1, 0), (2, 0), None, (4, 0), None])
# deltaX for points: [1, 2, 4]; deltaY for points: [0, 0, 0]
self.assertEqual("02 01 02 04 82", hexencode(var.compileDeltas(points)))
self.assertEqual("02 01 02 04 82", hexencode(var.compileDeltas()))
def test_compileDeltas_constants(self):
var = TupleVariation({}, [0, 1, 2, None, 4, 5])
cvts = {1, 2, 3, 4}
var = TupleVariation({}, [None, 1, 2, None, 4, None])
# delta for cvts: [1, 2, 4]
self.assertEqual("02 01 02 04", hexencode(var.compileDeltas(cvts)))
self.assertEqual("02 01 02 04", hexencode(var.compileDeltas()))
def test_compileDeltaValues(self):
compileDeltaValues = lambda values: hexencode(TupleVariation.compileDeltaValues_(values))
@ -549,11 +534,6 @@ class TupleVariationTest(unittest.TestCase):
# words, zeroes
self.assertEqual("40 66 66 80", compileDeltaValues([0x6666, 0]))
self.assertEqual("40 66 66 81", compileDeltaValues([0x6666, 0, 0]))
# bytes or words from floats
self.assertEqual("00 01", compileDeltaValues([1.1]))
self.assertEqual("00 02", compileDeltaValues([1.9]))
self.assertEqual("40 66 66", compileDeltaValues([0x6666 + 0.1]))
self.assertEqual("40 66 66", compileDeltaValues([0x6665 + 0.9]))
def test_decompileDeltas(self):
decompileDeltas = TupleVariation.decompileDeltas_

View File

@ -173,10 +173,9 @@ class GlyphCoordinatesTest(object):
assert g[0][0] == otRound(afloat)
def test__checkFloat_overflow(self):
g = GlyphCoordinates([(1, 1)], typecode="h")
g = GlyphCoordinates([(1, 1)])
g.append((0x8000, 0))
assert g.array.typecode == "d"
assert g.array == array.array("d", [1.0, 1.0, 32768.0, 0.0])
assert list(g.array) == [1.0, 1.0, 32768.0, 0.0]
CURR_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))