2016-06-15 18:46:59 +04:00
|
|
|
"""
|
2017-02-20 09:06:50 -06:00
|
|
|
Tool to find wrong contour order between different masters, and
|
2016-06-15 18:46:59 +04:00
|
|
|
other interpolatability (or lack thereof) issues.
|
2017-02-26 10:42:31 -08:00
|
|
|
|
|
|
|
Call as:
|
|
|
|
$ fonttools varLib.interpolatable font1 font2 ...
|
2016-06-15 18:46:59 +04:00
|
|
|
"""
|
|
|
|
|
2023-10-14 18:58:53 -04:00
|
|
|
from fontTools.pens.basePen import AbstractPen, BasePen
|
2023-10-16 17:00:43 -06:00
|
|
|
from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
|
2023-10-14 18:58:53 -04:00
|
|
|
from fontTools.pens.recordingPen import RecordingPen
|
2023-11-19 13:28:02 -07:00
|
|
|
from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
|
2023-10-14 18:58:53 -04:00
|
|
|
from fontTools.pens.momentsPen import OpenContourError
|
2023-11-16 21:06:25 -07:00
|
|
|
from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
|
2023-11-16 16:19:52 -07:00
|
|
|
from fontTools.misc.fixedTools import floatToFixedToStr
|
2023-11-20 18:51:23 -07:00
|
|
|
from fontTools.misc.transform import Transform
|
2023-11-17 14:20:26 -07:00
|
|
|
from collections import defaultdict, deque
|
2023-11-25 15:26:40 -07:00
|
|
|
from types import SimpleNamespace
|
2023-11-15 21:05:37 -07:00
|
|
|
from functools import wraps
|
2023-11-17 13:31:28 -07:00
|
|
|
from pprint import pformat
|
2023-11-20 18:51:23 -07:00
|
|
|
from math import sqrt, copysign, atan2, pi
|
2023-10-14 18:58:53 -04:00
|
|
|
import itertools
|
2023-11-14 08:46:02 -07:00
|
|
|
import logging
|
|
|
|
|
|
|
|
log = logging.getLogger("fontTools.varLib.interpolatable")
|
2023-10-14 18:58:53 -04:00
|
|
|
|
2023-11-23 23:42:41 -07:00
|
|
|
DEFAULT_TOLERANCE = 0.95
|
2023-11-26 09:48:53 -07:00
|
|
|
DEFAULT_KINKINESS = 0.5
|
2023-11-26 15:18:06 -07:00
|
|
|
DEFAULT_KINKINESS_LENGTH = 0.002 # ratio of UPEM
|
2023-11-25 15:02:07 -07:00
|
|
|
DEFAULT_UPEM = 1000
|
2023-11-23 23:42:41 -07:00
|
|
|
|
2023-10-14 18:58:53 -04:00
|
|
|
|
|
|
|
def _rot_list(l, k):
|
|
|
|
"""Rotate list by k items forward. Ie. item at position 0 will be
|
|
|
|
at position k in returned list. Negative k is allowed."""
|
2023-10-20 16:43:24 -06:00
|
|
|
return l[-k:] + l[:-k]
|
2023-10-14 18:58:53 -04:00
|
|
|
|
|
|
|
|
|
|
|
class PerContourPen(BasePen):
|
|
|
|
def __init__(self, Pen, glyphset=None):
|
|
|
|
BasePen.__init__(self, glyphset)
|
|
|
|
self._glyphset = glyphset
|
|
|
|
self._Pen = Pen
|
|
|
|
self._pen = None
|
|
|
|
self.value = []
|
|
|
|
|
|
|
|
def _moveTo(self, p0):
|
|
|
|
self._newItem()
|
|
|
|
self._pen.moveTo(p0)
|
|
|
|
|
|
|
|
def _lineTo(self, p1):
|
|
|
|
self._pen.lineTo(p1)
|
|
|
|
|
|
|
|
def _qCurveToOne(self, p1, p2):
|
|
|
|
self._pen.qCurveTo(p1, p2)
|
|
|
|
|
|
|
|
def _curveToOne(self, p1, p2, p3):
|
|
|
|
self._pen.curveTo(p1, p2, p3)
|
|
|
|
|
|
|
|
def _closePath(self):
|
|
|
|
self._pen.closePath()
|
|
|
|
self._pen = None
|
|
|
|
|
|
|
|
def _endPath(self):
|
|
|
|
self._pen.endPath()
|
|
|
|
self._pen = None
|
|
|
|
|
|
|
|
def _newItem(self):
|
|
|
|
self._pen = pen = self._Pen()
|
|
|
|
self.value.append(pen)
|
|
|
|
|
|
|
|
|
|
|
|
class PerContourOrComponentPen(PerContourPen):
|
|
|
|
def addComponent(self, glyphName, transformation):
|
|
|
|
self._newItem()
|
|
|
|
self.value[-1].addComponent(glyphName, transformation)
|
|
|
|
|
|
|
|
|
2023-11-19 17:21:07 -07:00
|
|
|
class SimpleRecordingPointPen(AbstractPointPen):
|
2023-10-14 18:58:53 -04:00
|
|
|
def __init__(self):
|
|
|
|
self.value = []
|
|
|
|
|
|
|
|
def beginPath(self, identifier=None, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def endPath(self) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def addPoint(self, pt, segmentType=None):
|
|
|
|
self.value.append((pt, False if segmentType is None else True))
|
|
|
|
|
|
|
|
|
|
|
|
def _vdiff_hypot2(v0, v1):
|
|
|
|
s = 0
|
|
|
|
for x0, x1 in zip(v0, v1):
|
|
|
|
d = x1 - x0
|
|
|
|
s += d * d
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def _vdiff_hypot2_complex(v0, v1):
|
|
|
|
s = 0
|
|
|
|
for x0, x1 in zip(v0, v1):
|
|
|
|
d = x1 - x0
|
2023-11-21 10:09:44 -07:00
|
|
|
s += d.real * d.real + d.imag * d.imag
|
|
|
|
# This does the same but seems to be slower:
|
|
|
|
# s += (d * d.conjugate()).real
|
2023-10-14 18:58:53 -04:00
|
|
|
return s
|
|
|
|
|
|
|
|
|
2023-11-21 12:56:40 -07:00
|
|
|
def _hypot2_complex(d):
|
|
|
|
return d.real * d.real + d.imag * d.imag
|
|
|
|
|
|
|
|
|
2023-10-14 18:58:53 -04:00
|
|
|
def _matching_cost(G, matching):
|
|
|
|
return sum(G[i][j] for i, j in enumerate(matching))
|
|
|
|
|
|
|
|
|
|
|
|
def min_cost_perfect_bipartite_matching_scipy(G):
|
|
|
|
n = len(G)
|
|
|
|
rows, cols = linear_sum_assignment(G)
|
|
|
|
assert (rows == list(range(n))).all()
|
|
|
|
return list(cols), _matching_cost(G, cols)
|
|
|
|
|
|
|
|
|
|
|
|
def min_cost_perfect_bipartite_matching_munkres(G):
|
|
|
|
n = len(G)
|
|
|
|
cols = [None] * n
|
|
|
|
for row, col in Munkres().compute(G):
|
|
|
|
cols[row] = col
|
|
|
|
return cols, _matching_cost(G, cols)
|
|
|
|
|
|
|
|
|
|
|
|
def min_cost_perfect_bipartite_matching_bruteforce(G):
|
|
|
|
n = len(G)
|
|
|
|
|
|
|
|
if n > 6:
|
|
|
|
raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
|
|
|
|
|
|
|
|
# Otherwise just brute-force
|
|
|
|
permutations = itertools.permutations(range(n))
|
|
|
|
best = list(next(permutations))
|
|
|
|
best_cost = _matching_cost(G, best)
|
|
|
|
for p in permutations:
|
|
|
|
cost = _matching_cost(G, p)
|
|
|
|
if cost < best_cost:
|
|
|
|
best, best_cost = list(p), cost
|
|
|
|
return best, best_cost
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
from scipy.optimize import linear_sum_assignment
|
|
|
|
|
|
|
|
min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
|
|
|
|
except ImportError:
|
|
|
|
try:
|
|
|
|
from munkres import Munkres
|
|
|
|
|
|
|
|
min_cost_perfect_bipartite_matching = (
|
|
|
|
min_cost_perfect_bipartite_matching_munkres
|
|
|
|
)
|
|
|
|
except ImportError:
|
|
|
|
min_cost_perfect_bipartite_matching = (
|
|
|
|
min_cost_perfect_bipartite_matching_bruteforce
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-11-17 17:45:12 -07:00
|
|
|
def _contour_vector_from_stats(stats):
|
2023-11-20 18:51:23 -07:00
|
|
|
# Don't change the order of items here.
|
|
|
|
# It's okay to add to the end, but otherwise, other
|
|
|
|
# code depends on it. Search for "covariance".
|
2023-11-17 18:18:19 -07:00
|
|
|
size = sqrt(abs(stats.area))
|
2023-11-17 17:45:12 -07:00
|
|
|
return (
|
2023-11-22 19:34:32 -07:00
|
|
|
copysign((size), stats.area),
|
2023-11-17 17:45:12 -07:00
|
|
|
stats.meanX,
|
|
|
|
stats.meanY,
|
|
|
|
stats.stddevX * 2,
|
|
|
|
stats.stddevY * 2,
|
|
|
|
stats.correlation * size,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-11-23 08:50:52 -07:00
|
|
|
def _matching_for_vectors(m0, m1):
|
|
|
|
n = len(m0)
|
|
|
|
|
|
|
|
identity_matching = list(range(n))
|
|
|
|
|
|
|
|
costs = [[_vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
|
|
|
|
(
|
|
|
|
matching,
|
|
|
|
matching_cost,
|
|
|
|
) = min_cost_perfect_bipartite_matching(costs)
|
|
|
|
identity_cost = sum(costs[i][i] for i in range(n))
|
|
|
|
return matching, matching_cost, identity_cost
|
|
|
|
|
|
|
|
|
2023-11-17 17:32:58 -07:00
|
|
|
def _points_characteristic_bits(points):
|
|
|
|
bits = 0
|
2023-11-18 18:35:58 -07:00
|
|
|
for pt, b in reversed(points):
|
2023-11-17 17:32:58 -07:00
|
|
|
bits = (bits << 1) | b
|
|
|
|
return bits
|
|
|
|
|
|
|
|
|
2023-11-21 16:39:29 -07:00
|
|
|
_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
|
2023-11-21 12:56:40 -07:00
|
|
|
|
|
|
|
|
2023-11-17 17:32:58 -07:00
|
|
|
def _points_complex_vector(points):
|
2023-11-17 17:56:11 -07:00
|
|
|
vector = []
|
2023-11-20 11:55:30 -07:00
|
|
|
if not points:
|
|
|
|
return vector
|
2023-11-17 17:53:39 -07:00
|
|
|
points = [complex(*pt) for pt, _ in points]
|
2023-11-17 17:56:11 -07:00
|
|
|
n = len(points)
|
2023-11-21 16:39:29 -07:00
|
|
|
assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
|
2023-11-21 12:56:40 -07:00
|
|
|
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
2023-11-21 16:39:29 -07:00
|
|
|
while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
|
|
|
|
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
2023-11-17 17:56:11 -07:00
|
|
|
for i in range(n):
|
2023-11-21 16:39:29 -07:00
|
|
|
# The weights are magic numbers.
|
2023-11-21 14:03:45 -07:00
|
|
|
|
2023-11-17 18:18:19 -07:00
|
|
|
# The point itself
|
2023-11-21 16:39:29 -07:00
|
|
|
p0 = points[i]
|
2023-11-17 17:56:11 -07:00
|
|
|
vector.append(p0)
|
2023-11-17 18:18:19 -07:00
|
|
|
|
2023-11-21 16:39:29 -07:00
|
|
|
# The vector to the next point
|
2023-11-17 18:18:19 -07:00
|
|
|
p1 = points[i + 1]
|
|
|
|
d0 = p1 - p0
|
2023-11-24 15:36:08 -07:00
|
|
|
vector.append(d0 * 3)
|
2023-11-17 18:18:19 -07:00
|
|
|
|
2023-11-21 16:39:29 -07:00
|
|
|
# The turn vector
|
2023-11-17 18:18:19 -07:00
|
|
|
p2 = points[i + 2]
|
|
|
|
d1 = p2 - p1
|
2023-11-22 15:28:55 -07:00
|
|
|
vector.append(d1 - d0)
|
2023-11-21 16:39:29 -07:00
|
|
|
|
|
|
|
# The angle to the next point, as a cross product;
|
|
|
|
# Square root of, to match dimentionality of distance.
|
2023-11-17 18:18:19 -07:00
|
|
|
cross = d0.real * d1.imag - d0.imag * d1.real
|
|
|
|
cross = copysign(sqrt(abs(cross)), cross)
|
2023-11-20 11:55:30 -07:00
|
|
|
vector.append(cross * 4)
|
2023-11-17 18:18:19 -07:00
|
|
|
|
2023-11-17 17:56:11 -07:00
|
|
|
return vector
|
2023-11-17 17:32:58 -07:00
|
|
|
|
|
|
|
|
2023-11-18 15:46:53 -07:00
|
|
|
def _add_isomorphisms(points, isomorphisms, reverse):
|
|
|
|
reference_bits = _points_characteristic_bits(points)
|
2023-11-17 17:32:58 -07:00
|
|
|
n = len(points)
|
|
|
|
|
2023-11-18 18:35:58 -07:00
|
|
|
# if points[0][0] == points[-1][0]:
|
|
|
|
# abort
|
|
|
|
|
2023-11-18 15:46:53 -07:00
|
|
|
if reverse:
|
|
|
|
points = points[::-1]
|
|
|
|
bits = _points_characteristic_bits(points)
|
|
|
|
else:
|
|
|
|
bits = reference_bits
|
|
|
|
|
2023-11-17 17:32:58 -07:00
|
|
|
vector = _points_complex_vector(points)
|
|
|
|
|
|
|
|
assert len(vector) % n == 0
|
|
|
|
mult = len(vector) // n
|
|
|
|
mask = (1 << n) - 1
|
|
|
|
|
|
|
|
for i in range(n):
|
2023-11-18 18:35:58 -07:00
|
|
|
b = ((bits << (n - i)) & mask) | (bits >> i)
|
2023-11-17 17:32:58 -07:00
|
|
|
if b == reference_bits:
|
2023-11-18 17:05:57 -07:00
|
|
|
isomorphisms.append(
|
2023-11-18 18:35:58 -07:00
|
|
|
(_rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
|
2023-11-18 17:05:57 -07:00
|
|
|
)
|
2023-11-17 17:32:58 -07:00
|
|
|
|
2023-11-16 21:06:25 -07:00
|
|
|
|
2023-11-17 19:01:24 -07:00
|
|
|
def _find_parents_and_order(glyphsets, locations):
|
2023-11-17 14:20:26 -07:00
|
|
|
parents = [None] + list(range(len(glyphsets) - 1))
|
2023-11-17 15:22:03 -07:00
|
|
|
order = list(range(len(glyphsets)))
|
2023-11-16 21:06:25 -07:00
|
|
|
if locations:
|
2023-11-16 22:16:49 -07:00
|
|
|
# Order base master first
|
2023-11-16 21:06:25 -07:00
|
|
|
bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
|
|
|
|
if bases:
|
|
|
|
base = next(bases)
|
2023-11-17 13:31:28 -07:00
|
|
|
logging.info("Base master index %s, location %s", base, locations[base])
|
2023-11-16 21:06:25 -07:00
|
|
|
else:
|
2023-11-17 14:20:26 -07:00
|
|
|
base = 0
|
2023-11-16 21:06:25 -07:00
|
|
|
logging.warning("No base master location found")
|
|
|
|
|
2023-11-16 22:16:49 -07:00
|
|
|
# Form a minimum spanning tree of the locations
|
|
|
|
try:
|
|
|
|
from scipy.sparse.csgraph import minimum_spanning_tree
|
|
|
|
|
|
|
|
graph = [[0] * len(locations) for _ in range(len(locations))]
|
|
|
|
axes = set()
|
|
|
|
for l in locations:
|
|
|
|
axes.update(l.keys())
|
|
|
|
axes = sorted(axes)
|
2023-11-17 14:20:26 -07:00
|
|
|
vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
|
2023-11-16 22:16:49 -07:00
|
|
|
for i, j in itertools.combinations(range(len(locations)), 2):
|
2023-11-17 14:20:26 -07:00
|
|
|
graph[i][j] = _vdiff_hypot2(vectors[i], vectors[j])
|
2023-11-16 22:16:49 -07:00
|
|
|
|
|
|
|
tree = minimum_spanning_tree(graph)
|
|
|
|
rows, cols = tree.nonzero()
|
2023-11-17 14:20:26 -07:00
|
|
|
graph = defaultdict(set)
|
2023-11-16 22:16:49 -07:00
|
|
|
for row, col in zip(rows, cols):
|
2023-11-17 14:20:26 -07:00
|
|
|
graph[row].add(col)
|
|
|
|
graph[col].add(row)
|
2023-11-17 13:31:28 -07:00
|
|
|
|
2023-11-17 14:20:26 -07:00
|
|
|
# Traverse graph from the base and assign parents
|
|
|
|
parents = [None] * len(locations)
|
2023-11-17 13:31:28 -07:00
|
|
|
order = []
|
|
|
|
visited = set()
|
2023-11-17 14:20:26 -07:00
|
|
|
queue = deque([base])
|
|
|
|
while queue:
|
|
|
|
i = queue.popleft()
|
|
|
|
visited.add(i)
|
|
|
|
order.append(i)
|
|
|
|
for j in sorted(graph[i]):
|
|
|
|
if j not in visited:
|
|
|
|
parents[j] = i
|
|
|
|
queue.append(j)
|
2023-11-17 13:31:28 -07:00
|
|
|
|
2023-11-16 22:16:49 -07:00
|
|
|
except ImportError:
|
2023-11-17 14:20:26 -07:00
|
|
|
pass
|
2023-11-17 13:31:28 -07:00
|
|
|
|
|
|
|
log.info("Parents: %s", parents)
|
2023-11-17 18:57:45 -07:00
|
|
|
log.info("Order: %s", order)
|
|
|
|
return parents, order
|
|
|
|
|
|
|
|
|
2023-11-27 20:21:16 -07:00
|
|
|
def lerp_recordings(recording1, recording2, factor=0.5):
|
|
|
|
pen = RecordingPen()
|
|
|
|
value = pen.value
|
|
|
|
for (op1, args1), (op2, args2) in zip(recording1.value, recording2.value):
|
|
|
|
if op1 != op2:
|
|
|
|
raise ValueError("Mismatched operations: %s, %s" % (op1, op2))
|
|
|
|
if op1 == "addComponent":
|
|
|
|
mid_args = args1 # XXX Interpolate transformation?
|
|
|
|
else:
|
|
|
|
mid_args = [
|
|
|
|
(x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
|
|
|
|
for (x1, y1), (x2, y2) in zip(args1, args2)
|
|
|
|
]
|
|
|
|
value.append((op1, mid_args))
|
|
|
|
return pen
|
|
|
|
|
|
|
|
|
2023-11-17 18:57:45 -07:00
|
|
|
def test_gen(
|
|
|
|
glyphsets,
|
|
|
|
glyphs=None,
|
|
|
|
names=None,
|
|
|
|
ignore_missing=False,
|
|
|
|
*,
|
|
|
|
locations=None,
|
2023-11-23 23:42:41 -07:00
|
|
|
tolerance=DEFAULT_TOLERANCE,
|
2023-11-25 17:32:32 -07:00
|
|
|
kinkiness=DEFAULT_KINKINESS,
|
2023-11-25 15:02:07 -07:00
|
|
|
upem=DEFAULT_UPEM,
|
2023-11-20 16:19:28 -07:00
|
|
|
show_all=False,
|
2023-11-17 18:57:45 -07:00
|
|
|
):
|
2023-11-26 09:48:53 -07:00
|
|
|
if tolerance >= 10:
|
2023-11-26 09:35:10 -07:00
|
|
|
tolerance *= 0.01
|
2023-11-25 14:46:43 -07:00
|
|
|
assert 0 <= tolerance <= 1
|
2023-11-26 09:48:53 -07:00
|
|
|
if kinkiness >= 10:
|
2023-11-26 09:35:10 -07:00
|
|
|
kinkiness *= 0.01
|
2023-11-26 09:58:29 -07:00
|
|
|
assert 0 <= kinkiness
|
2023-11-25 14:46:43 -07:00
|
|
|
|
2023-11-17 18:57:45 -07:00
|
|
|
if names is None:
|
|
|
|
names = glyphsets
|
|
|
|
|
|
|
|
if glyphs is None:
|
|
|
|
# `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
|
|
|
|
# ... risks the sparse master being the first one, and only processing a subset of the glyphs
|
|
|
|
glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
|
|
|
|
|
|
|
|
parents, order = _find_parents_and_order(glyphsets, locations)
|
2023-11-17 13:31:28 -07:00
|
|
|
|
2023-11-17 14:22:55 -07:00
|
|
|
def grand_parent(i, glyphname):
|
2023-11-17 14:21:51 -07:00
|
|
|
if i is None:
|
|
|
|
return None
|
2023-11-17 14:20:26 -07:00
|
|
|
i = parents[i]
|
2023-11-17 14:21:51 -07:00
|
|
|
if i is None:
|
|
|
|
return None
|
2023-11-17 14:22:55 -07:00
|
|
|
while parents[i] is not None and glyphsets[i][glyphname] is None:
|
2023-11-17 14:20:26 -07:00
|
|
|
i = parents[i]
|
|
|
|
return i
|
2023-11-16 21:06:25 -07:00
|
|
|
|
2023-10-14 18:58:53 -04:00
|
|
|
for glyph_name in glyphs:
|
2023-11-17 14:20:26 -07:00
|
|
|
log.info("Testing glyph %s", glyph_name)
|
2023-11-19 13:28:02 -07:00
|
|
|
allGreenVectors = []
|
|
|
|
allControlVectors = []
|
2023-11-18 13:46:42 -07:00
|
|
|
allNodeTypes = []
|
|
|
|
allContourIsomorphisms = []
|
2023-11-20 18:51:23 -07:00
|
|
|
allContourPoints = []
|
2023-11-27 20:21:16 -07:00
|
|
|
allContourPens = []
|
2023-11-18 13:46:42 -07:00
|
|
|
allGlyphs = [glyphset[glyph_name] for glyphset in glyphsets]
|
|
|
|
if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
|
|
|
|
continue
|
2023-11-20 08:59:11 -07:00
|
|
|
for master_idx, (glyph, glyphset, name) in enumerate(
|
|
|
|
zip(allGlyphs, glyphsets, names)
|
|
|
|
):
|
2023-11-18 13:46:42 -07:00
|
|
|
if glyph is None:
|
|
|
|
if not ignore_missing:
|
2023-11-20 08:59:11 -07:00
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{"type": "missing", "master": name, "master_idx": master_idx},
|
|
|
|
)
|
2023-11-18 13:46:42 -07:00
|
|
|
allNodeTypes.append(None)
|
2023-11-19 13:28:02 -07:00
|
|
|
allControlVectors.append(None)
|
|
|
|
allGreenVectors.append(None)
|
2023-11-18 13:46:42 -07:00
|
|
|
allContourIsomorphisms.append(None)
|
2023-11-20 18:51:23 -07:00
|
|
|
allContourPoints.append(None)
|
2023-11-27 20:21:16 -07:00
|
|
|
allContourPens.append(None)
|
2023-10-14 18:58:53 -04:00
|
|
|
continue
|
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
|
|
|
|
try:
|
|
|
|
glyph.draw(perContourPen, outputImpliedClosingLine=True)
|
|
|
|
except TypeError:
|
|
|
|
glyph.draw(perContourPen)
|
|
|
|
contourPens = perContourPen.value
|
|
|
|
del perContourPen
|
|
|
|
|
2023-11-19 13:28:02 -07:00
|
|
|
contourControlVectors = []
|
|
|
|
contourGreenVectors = []
|
2023-11-18 13:46:42 -07:00
|
|
|
contourIsomorphisms = []
|
2023-11-20 18:51:23 -07:00
|
|
|
contourPoints = []
|
2023-11-18 13:46:42 -07:00
|
|
|
nodeTypes = []
|
|
|
|
allNodeTypes.append(nodeTypes)
|
2023-11-19 13:28:02 -07:00
|
|
|
allControlVectors.append(contourControlVectors)
|
|
|
|
allGreenVectors.append(contourGreenVectors)
|
2023-11-18 13:46:42 -07:00
|
|
|
allContourIsomorphisms.append(contourIsomorphisms)
|
2023-11-20 18:51:23 -07:00
|
|
|
allContourPoints.append(contourPoints)
|
2023-11-27 20:21:16 -07:00
|
|
|
allContourPens.append(contourPens)
|
2023-11-18 13:46:42 -07:00
|
|
|
for ix, contour in enumerate(contourPens):
|
|
|
|
contourOps = tuple(op for op, arg in contour.value)
|
|
|
|
nodeTypes.append(contourOps)
|
|
|
|
|
2023-11-19 13:28:02 -07:00
|
|
|
greenStats = StatisticsPen(glyphset=glyphset)
|
|
|
|
controlStats = StatisticsControlPen(glyphset=glyphset)
|
2023-10-14 18:58:53 -04:00
|
|
|
try:
|
2023-11-19 13:28:02 -07:00
|
|
|
contour.replay(greenStats)
|
|
|
|
contour.replay(controlStats)
|
2023-11-18 13:46:42 -07:00
|
|
|
except OpenContourError as e:
|
|
|
|
yield (
|
|
|
|
glyph_name,
|
2023-11-20 08:59:11 -07:00
|
|
|
{
|
|
|
|
"master": name,
|
|
|
|
"master_idx": master_idx,
|
|
|
|
"contour": ix,
|
|
|
|
"type": "open_path",
|
|
|
|
},
|
2023-11-17 17:32:58 -07:00
|
|
|
)
|
2023-10-14 18:58:53 -04:00
|
|
|
continue
|
2023-11-19 13:28:02 -07:00
|
|
|
contourGreenVectors.append(_contour_vector_from_stats(greenStats))
|
|
|
|
contourControlVectors.append(_contour_vector_from_stats(controlStats))
|
2023-11-18 13:46:42 -07:00
|
|
|
|
|
|
|
# Check starting point
|
|
|
|
if contourOps[0] == "addComponent":
|
2023-11-17 14:20:26 -07:00
|
|
|
continue
|
2023-11-18 13:46:42 -07:00
|
|
|
assert contourOps[0] == "moveTo"
|
|
|
|
assert contourOps[-1] in ("closePath", "endPath")
|
2023-11-19 17:21:07 -07:00
|
|
|
points = SimpleRecordingPointPen()
|
2023-11-18 13:46:42 -07:00
|
|
|
converter = SegmentToPointPen(points, False)
|
|
|
|
contour.replay(converter)
|
|
|
|
# points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
|
|
|
|
# now check all rotations and mirror-rotations of the contour and build list of isomorphic
|
|
|
|
# possible starting points.
|
|
|
|
|
|
|
|
isomorphisms = []
|
|
|
|
contourIsomorphisms.append(isomorphisms)
|
|
|
|
|
|
|
|
# Add rotations
|
2023-11-18 15:46:53 -07:00
|
|
|
_add_isomorphisms(points.value, isomorphisms, False)
|
2023-11-18 13:46:42 -07:00
|
|
|
# Add mirrored rotations
|
2023-11-18 17:05:57 -07:00
|
|
|
_add_isomorphisms(points.value, isomorphisms, True)
|
2023-11-18 13:46:42 -07:00
|
|
|
|
2023-11-20 18:51:23 -07:00
|
|
|
contourPoints.append(points.value)
|
|
|
|
|
2023-11-19 22:55:12 -07:00
|
|
|
matchings = [None] * len(allControlVectors)
|
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
for m1idx in order:
|
2023-11-19 22:55:12 -07:00
|
|
|
if allNodeTypes[m1idx] is None:
|
2023-11-18 13:46:42 -07:00
|
|
|
continue
|
|
|
|
m0idx = grand_parent(m1idx, glyph_name)
|
|
|
|
if m0idx is None:
|
|
|
|
continue
|
2023-11-19 22:55:12 -07:00
|
|
|
if allNodeTypes[m0idx] is None:
|
2023-11-18 13:46:42 -07:00
|
|
|
continue
|
2023-11-19 22:55:12 -07:00
|
|
|
|
2023-11-25 09:12:23 -07:00
|
|
|
#
|
|
|
|
# Basic compatibility checks
|
|
|
|
#
|
|
|
|
|
2023-11-19 22:55:12 -07:00
|
|
|
m1 = allNodeTypes[m1idx]
|
|
|
|
m0 = allNodeTypes[m0idx]
|
2023-11-18 13:46:42 -07:00
|
|
|
if len(m0) != len(m1):
|
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "path_count",
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
2023-11-20 08:59:11 -07:00
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
2023-11-18 13:46:42 -07:00
|
|
|
"value_1": len(m0),
|
|
|
|
"value_2": len(m1),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
continue
|
2023-11-19 22:55:12 -07:00
|
|
|
|
|
|
|
if m0 != m1:
|
|
|
|
for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
|
|
|
|
if nodes1 == nodes2:
|
|
|
|
continue
|
|
|
|
if len(nodes1) != len(nodes2):
|
2023-10-31 08:45:08 -06:00
|
|
|
yield (
|
2023-10-14 18:58:53 -04:00
|
|
|
glyph_name,
|
|
|
|
{
|
2023-11-19 22:55:12 -07:00
|
|
|
"type": "node_count",
|
2023-10-14 18:58:53 -04:00
|
|
|
"path": pathIx,
|
|
|
|
"master_1": names[m0idx],
|
2023-11-17 14:20:26 -07:00
|
|
|
"master_2": names[m1idx],
|
2023-11-20 08:59:11 -07:00
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
2023-11-19 22:55:12 -07:00
|
|
|
"value_1": len(nodes1),
|
|
|
|
"value_2": len(nodes2),
|
2023-10-14 18:58:53 -04:00
|
|
|
},
|
|
|
|
)
|
|
|
|
continue
|
2023-11-19 22:55:12 -07:00
|
|
|
for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
|
|
|
|
if n1 != n2:
|
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "node_incompatibility",
|
|
|
|
"path": pathIx,
|
|
|
|
"node": nodeIx,
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
2023-11-20 08:59:11 -07:00
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
2023-11-19 22:55:12 -07:00
|
|
|
"value_1": n1,
|
|
|
|
"value_2": n2,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
continue
|
2023-10-14 18:58:53 -04:00
|
|
|
|
2023-11-25 09:12:23 -07:00
|
|
|
#
|
|
|
|
# "contour_order" check
|
|
|
|
#
|
|
|
|
|
2023-11-23 08:50:52 -07:00
|
|
|
# We try matching both the StatisticsControlPen vector
|
|
|
|
# and the StatisticsPen vector.
|
|
|
|
#
|
|
|
|
# If either method found a identity matching, accept it.
|
|
|
|
# This is crucial for fonts like Kablammo[MORF].ttf and
|
|
|
|
# Nabla[EDPT,EHLT].ttf, since they really confuse the
|
|
|
|
# StatisticsPen vector because of their area=0 contours.
|
|
|
|
#
|
|
|
|
# TODO: Optimize by only computing the StatisticsPen vector
|
|
|
|
# and then checking if it is the identity vector. Only if
|
|
|
|
# not, compute the StatisticsControlPen vector and check both.
|
|
|
|
|
2023-11-23 09:21:40 -07:00
|
|
|
n = len(allControlVectors[m0idx])
|
|
|
|
done = n <= 1
|
2023-11-23 08:50:52 -07:00
|
|
|
if not done:
|
2023-11-23 09:21:40 -07:00
|
|
|
m1Control = allControlVectors[m1idx]
|
|
|
|
m0Control = allControlVectors[m0idx]
|
|
|
|
(
|
|
|
|
matching_control,
|
|
|
|
matching_cost_control,
|
|
|
|
identity_cost_control,
|
|
|
|
) = _matching_for_vectors(m0Control, m1Control)
|
|
|
|
done = matching_cost_control == identity_cost_control
|
|
|
|
if not done:
|
|
|
|
m1Green = allGreenVectors[m1idx]
|
|
|
|
m0Green = allGreenVectors[m0idx]
|
2023-11-19 22:55:12 -07:00
|
|
|
(
|
2023-11-23 08:50:52 -07:00
|
|
|
matching_green,
|
|
|
|
matching_cost_green,
|
|
|
|
identity_cost_green,
|
|
|
|
) = _matching_for_vectors(m0Green, m1Green)
|
|
|
|
done = matching_cost_green == identity_cost_green
|
|
|
|
|
|
|
|
if not done:
|
2023-11-23 09:21:40 -07:00
|
|
|
# See if reversing contours in one master helps.
|
|
|
|
# That's a common problem. Then the wrong_start_point
|
|
|
|
# test will fix them.
|
|
|
|
#
|
|
|
|
# Reverse the sign of the area (0); the rest stay the same.
|
|
|
|
if not done:
|
|
|
|
m1ControlReversed = [(-m[0],) + m[1:] for m in m1Control]
|
|
|
|
(
|
|
|
|
matching_control_reversed,
|
|
|
|
matching_cost_control_reversed,
|
|
|
|
identity_cost_control_reversed,
|
|
|
|
) = _matching_for_vectors(m0Control, m1ControlReversed)
|
|
|
|
done = (
|
|
|
|
matching_cost_control_reversed == identity_cost_control_reversed
|
|
|
|
)
|
|
|
|
if not done:
|
|
|
|
m1GreenReversed = [(-m[0],) + m[1:] for m in m1Green]
|
|
|
|
(
|
|
|
|
matching_control_reversed,
|
|
|
|
matching_cost_control_reversed,
|
|
|
|
identity_cost_control_reversed,
|
|
|
|
) = _matching_for_vectors(m0Control, m1ControlReversed)
|
|
|
|
done = (
|
|
|
|
matching_cost_control_reversed == identity_cost_control_reversed
|
2023-11-19 22:55:12 -07:00
|
|
|
)
|
2023-11-23 09:21:40 -07:00
|
|
|
|
|
|
|
if not done:
|
|
|
|
# Otherwise, use the worst of the two matchings.
|
|
|
|
if (
|
|
|
|
matching_cost_control / identity_cost_control
|
|
|
|
< matching_cost_green / identity_cost_green
|
|
|
|
):
|
|
|
|
matching = matching_control
|
|
|
|
matching_cost = matching_cost_control
|
|
|
|
identity_cost = identity_cost_control
|
|
|
|
else:
|
|
|
|
matching = matching_green
|
|
|
|
matching_cost = matching_cost_green
|
|
|
|
identity_cost = identity_cost_green
|
|
|
|
|
|
|
|
if matching_cost < identity_cost * tolerance:
|
2023-11-27 20:21:16 -07:00
|
|
|
log.debug(
|
|
|
|
"matching_control_ratio %g; matching_green_ratio %g.",
|
|
|
|
matching_cost_control / identity_cost_control,
|
|
|
|
matching_cost_green / identity_cost_green,
|
|
|
|
)
|
|
|
|
this_tolerance = matching_cost / identity_cost
|
2023-11-28 11:41:21 +00:00
|
|
|
log.debug("tolerance: %g", this_tolerance)
|
2023-11-23 09:21:40 -07:00
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "contour_order",
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
|
|
|
"value_1": list(range(n)),
|
|
|
|
"value_2": matching,
|
2023-11-27 20:21:16 -07:00
|
|
|
"tolerance": this_tolerance,
|
2023-11-23 09:21:40 -07:00
|
|
|
},
|
|
|
|
)
|
|
|
|
matchings[m1idx] = matching
|
2023-11-18 13:46:42 -07:00
|
|
|
|
2023-11-25 09:12:23 -07:00
|
|
|
#
|
2023-11-27 20:21:16 -07:00
|
|
|
# "wrong_start_point" / weight check
|
2023-11-25 09:12:23 -07:00
|
|
|
#
|
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
m1 = allContourIsomorphisms[m1idx]
|
|
|
|
m0 = allContourIsomorphisms[m0idx]
|
2023-11-27 20:21:16 -07:00
|
|
|
m1Vectors = allGreenVectors[m1idx]
|
|
|
|
m0Vectors = allGreenVectors[m0idx]
|
|
|
|
recording0 = allContourPens[m0idx]
|
|
|
|
recording1 = allContourPens[m1idx]
|
2023-11-20 06:23:06 -07:00
|
|
|
|
2023-11-20 07:33:27 -07:00
|
|
|
# If contour-order is wrong, adjust it
|
2023-11-20 07:38:39 -07:00
|
|
|
if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
|
2023-11-20 07:33:27 -07:00
|
|
|
m1 = [m1[i] for i in matchings[m1idx]]
|
2023-11-27 20:21:16 -07:00
|
|
|
m1Vectors = [m1Vectors[i] for i in matchings[m1idx]]
|
|
|
|
recording1 = [recording1[i] for i in matchings[m1idx]]
|
|
|
|
|
|
|
|
midRecording = []
|
|
|
|
for c0, c1 in zip(recording0, recording1):
|
|
|
|
try:
|
|
|
|
midRecording.append(lerp_recordings(c0, c1))
|
|
|
|
except ValueError:
|
|
|
|
# Mismatch because of the reordering above
|
|
|
|
midRecording.append(None)
|
2023-11-20 07:33:27 -07:00
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
|
2023-11-20 06:23:06 -07:00
|
|
|
if len(contour0) == 0 or len(contour0) != len(contour1):
|
2023-11-20 07:33:27 -07:00
|
|
|
# We already reported this; or nothing to do; or not compatible
|
|
|
|
# after reordering above.
|
2023-11-18 13:46:42 -07:00
|
|
|
continue
|
2023-11-20 06:23:06 -07:00
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
c0 = contour0[0]
|
2023-11-20 18:51:23 -07:00
|
|
|
# Next few lines duplicated below.
|
2023-11-18 13:46:42 -07:00
|
|
|
costs = [_vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
|
|
|
|
min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
|
|
|
|
first_cost = costs[0]
|
2023-11-21 12:56:40 -07:00
|
|
|
|
2023-11-18 13:46:42 -07:00
|
|
|
if min_cost < first_cost * tolerance:
|
2023-11-24 00:16:22 -07:00
|
|
|
this_tolerance = min_cost / first_cost
|
2023-11-20 18:51:23 -07:00
|
|
|
# c0 is the first isomorphism of the m0 master
|
|
|
|
# contour1 is list of all isomorphisms of the m1 master
|
|
|
|
#
|
|
|
|
# If the two shapes are both circle-ish and slightly
|
|
|
|
# rotated, we detect wrong start point. This is for
|
|
|
|
# example the case hundreds of times in
|
|
|
|
# RobotoSerif-Italic[GRAD,opsz,wdth,wght].ttf
|
|
|
|
#
|
|
|
|
# If the proposed point is only one off from the first
|
|
|
|
# point (and not reversed), try harder:
|
|
|
|
#
|
|
|
|
# Find the major eigenvector of the covariance matrix,
|
|
|
|
# and rotate the contours by that angle. Then find the
|
|
|
|
# closest point again. If it matches this time, let it
|
|
|
|
# pass.
|
|
|
|
|
|
|
|
proposed_point = contour1[min_cost_idx][1]
|
|
|
|
reverse = contour1[min_cost_idx][2]
|
|
|
|
num_points = len(allContourPoints[m1idx][ix])
|
|
|
|
leeway = 3
|
|
|
|
okay = False
|
|
|
|
if not reverse and (
|
|
|
|
proposed_point <= leeway
|
|
|
|
or proposed_point >= num_points - leeway
|
|
|
|
):
|
|
|
|
# Try harder
|
|
|
|
|
|
|
|
# Recover the covariance matrix from the GreenVectors.
|
|
|
|
# This is a 2x2 matrix.
|
|
|
|
transforms = []
|
2023-11-27 20:21:16 -07:00
|
|
|
for vector in (m0Vectors[ix], m1Vectors[ix]):
|
2023-11-20 18:51:23 -07:00
|
|
|
meanX = vector[1]
|
|
|
|
meanY = vector[2]
|
2023-11-29 13:56:32 -07:00
|
|
|
stddevX = vector[3] * 0.5
|
|
|
|
stddevY = vector[4] * 0.5
|
2023-11-20 18:51:23 -07:00
|
|
|
correlation = vector[5] / abs(vector[0])
|
|
|
|
|
2023-11-21 09:10:58 -07:00
|
|
|
# https://cookierobotics.com/007/
|
2023-11-20 18:51:23 -07:00
|
|
|
a = stddevX * stddevX # VarianceX
|
|
|
|
c = stddevY * stddevY # VarianceY
|
|
|
|
b = correlation * stddevX * stddevY # Covariance
|
|
|
|
|
|
|
|
delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
|
|
|
|
lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
|
|
|
|
lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
|
|
|
|
theta = (
|
|
|
|
atan2(lambda1 - a, b)
|
|
|
|
if b != 0
|
|
|
|
else (pi * 0.5 if a < c else 0)
|
|
|
|
)
|
|
|
|
trans = Transform()
|
2023-11-24 11:37:20 -07:00
|
|
|
# Don't translate here. We are working on the complex-vector
|
|
|
|
# that includes more than just the points. It's horrible what
|
|
|
|
# we are doing anyway...
|
2023-11-24 11:44:28 -07:00
|
|
|
# trans = trans.translate(meanX, meanY)
|
2023-11-20 18:51:23 -07:00
|
|
|
trans = trans.rotate(theta)
|
|
|
|
trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
|
|
|
|
transforms.append(trans)
|
|
|
|
|
|
|
|
trans = transforms[0]
|
|
|
|
new_c0 = (
|
|
|
|
[
|
|
|
|
complex(*trans.transformPoint((pt.real, pt.imag)))
|
|
|
|
for pt in c0[0]
|
|
|
|
],
|
|
|
|
) + c0[1:]
|
2023-11-20 19:34:06 -07:00
|
|
|
trans = transforms[1]
|
2023-11-20 18:51:23 -07:00
|
|
|
new_contour1 = []
|
|
|
|
for c1 in contour1:
|
|
|
|
new_c1 = (
|
|
|
|
[
|
|
|
|
complex(*trans.transformPoint((pt.real, pt.imag)))
|
|
|
|
for pt in c1[0]
|
|
|
|
],
|
|
|
|
) + c1[1:]
|
|
|
|
new_contour1.append(new_c1)
|
|
|
|
|
|
|
|
# Next few lines duplicate from above.
|
|
|
|
costs = [
|
|
|
|
_vdiff_hypot2_complex(new_c0[0], new_c1[0])
|
|
|
|
for new_c1 in new_contour1
|
|
|
|
]
|
|
|
|
min_cost_idx, min_cost = min(
|
|
|
|
enumerate(costs), key=lambda x: x[1]
|
|
|
|
)
|
|
|
|
first_cost = costs[0]
|
2023-11-24 00:16:22 -07:00
|
|
|
if min_cost < first_cost * tolerance:
|
2023-11-24 11:37:20 -07:00
|
|
|
pass
|
2023-11-24 11:44:28 -07:00
|
|
|
# this_tolerance = min_cost / first_cost
|
|
|
|
# proposed_point = new_contour1[min_cost_idx][1]
|
2023-11-24 00:16:22 -07:00
|
|
|
else:
|
2023-11-20 18:51:23 -07:00
|
|
|
okay = True
|
|
|
|
|
|
|
|
if not okay:
|
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "wrong_start_point",
|
|
|
|
"contour": ix,
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
|
|
|
"value_1": 0,
|
|
|
|
"value_2": proposed_point,
|
|
|
|
"reversed": reverse,
|
2023-11-24 00:16:22 -07:00
|
|
|
"tolerance": this_tolerance,
|
2023-11-20 18:51:23 -07:00
|
|
|
},
|
|
|
|
)
|
2023-11-21 12:56:40 -07:00
|
|
|
else:
|
2023-11-27 20:21:16 -07:00
|
|
|
# Weight check.
|
|
|
|
#
|
|
|
|
# If contour could be mid-interpolated, and the two
|
|
|
|
# contours have the same area sign, proceeed.
|
|
|
|
#
|
|
|
|
# The sign difference can happen if it's a werido
|
|
|
|
# self-intersecting contour; ignore it.
|
|
|
|
contour = midRecording[ix]
|
|
|
|
if contour and (m0Vectors[ix][0] < 0) == (m1Vectors[ix][0] < 0):
|
|
|
|
size0 = m0Vectors[ix][0] * m0Vectors[ix][0]
|
|
|
|
size1 = m1Vectors[ix][0] * m1Vectors[ix][0]
|
|
|
|
|
|
|
|
midStats = StatisticsPen(glyphset=glyphset)
|
|
|
|
contour.replay(midStats)
|
|
|
|
midVector = _contour_vector_from_stats(midStats)
|
|
|
|
midSize = midVector[0] * midVector[0]
|
|
|
|
|
|
|
|
geomAvg = (size0 * size1) ** 0.5
|
|
|
|
if not (geomAvg * tolerance <= midSize + 1e-5):
|
|
|
|
try:
|
|
|
|
this_tolerance = midSize / geomAvg
|
|
|
|
except ZeroDivisionError:
|
|
|
|
this_tolerance = 0
|
|
|
|
log.debug(
|
|
|
|
"average size %g; actual size %g; master sizes: %g, %g",
|
|
|
|
geomAvg,
|
|
|
|
midSize,
|
|
|
|
size0,
|
|
|
|
size1,
|
|
|
|
)
|
|
|
|
log.debug("tolerance %g", this_tolerance)
|
2023-11-21 12:56:40 -07:00
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
2023-11-27 20:21:16 -07:00
|
|
|
"type": "underweight",
|
2023-11-21 12:56:40 -07:00
|
|
|
"contour": ix,
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
2023-11-27 20:21:16 -07:00
|
|
|
"tolerance": this_tolerance,
|
2023-11-21 12:56:40 -07:00
|
|
|
},
|
|
|
|
)
|
2023-10-31 08:45:08 -06:00
|
|
|
|
2023-11-25 12:57:58 -07:00
|
|
|
#
|
|
|
|
# "kink" detector
|
|
|
|
#
|
|
|
|
m1 = allContourPoints[m1idx]
|
|
|
|
m0 = allContourPoints[m0idx]
|
|
|
|
|
|
|
|
# If contour-order is wrong, adjust it
|
|
|
|
if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
|
|
|
|
m1 = [m1[i] for i in matchings[m1idx]]
|
|
|
|
|
2023-11-25 23:03:57 -07:00
|
|
|
t = 0.1 # ~sin(radian(6)) for tolerance 0.95
|
2023-11-26 15:24:59 -07:00
|
|
|
deviation_threshold = (
|
|
|
|
upem * DEFAULT_KINKINESS_LENGTH * DEFAULT_KINKINESS / kinkiness
|
|
|
|
)
|
2023-11-25 23:03:57 -07:00
|
|
|
|
2023-11-25 12:57:58 -07:00
|
|
|
for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
|
|
|
|
if len(contour0) == 0 or len(contour0) != len(contour1):
|
|
|
|
# We already reported this; or nothing to do; or not compatible
|
|
|
|
# after reordering above.
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Walk the contour, keeping track of three consecutive points, with
|
|
|
|
# middle one being an on-curve. If the three are co-linear then
|
|
|
|
# check for kinky-ness.
|
|
|
|
for i in range(len(contour0)):
|
|
|
|
pt0 = contour0[i]
|
|
|
|
pt1 = contour1[i]
|
|
|
|
if not pt0[1] or not pt1[1]:
|
|
|
|
# Skip off-curves
|
|
|
|
continue
|
|
|
|
pt0_prev = contour0[i - 1]
|
|
|
|
pt1_prev = contour1[i - 1]
|
|
|
|
pt0_next = contour0[(i + 1) % len(contour0)]
|
|
|
|
pt1_next = contour1[(i + 1) % len(contour1)]
|
|
|
|
|
|
|
|
if pt0_prev[1] and pt1_prev[1]:
|
|
|
|
# At least one off-curve is required
|
|
|
|
continue
|
|
|
|
if pt0_prev[1] and pt1_prev[1]:
|
|
|
|
# At least one off-curve is required
|
|
|
|
continue
|
|
|
|
|
|
|
|
pt0 = complex(*pt0[0])
|
|
|
|
pt1 = complex(*pt1[0])
|
|
|
|
pt0_prev = complex(*pt0_prev[0])
|
|
|
|
pt1_prev = complex(*pt1_prev[0])
|
|
|
|
pt0_next = complex(*pt0_next[0])
|
|
|
|
pt1_next = complex(*pt1_next[0])
|
|
|
|
|
|
|
|
# We have three consecutive points. Check whether
|
|
|
|
# they are colinear.
|
2023-11-26 10:51:00 -07:00
|
|
|
d0_prev = pt0 - pt0_prev
|
2023-11-26 11:29:16 -07:00
|
|
|
d0_next = pt0_next - pt0
|
2023-11-26 10:51:00 -07:00
|
|
|
d1_prev = pt1 - pt1_prev
|
|
|
|
d1_next = pt1_next - pt1
|
|
|
|
|
2023-11-26 12:42:57 -07:00
|
|
|
sin0 = d0_prev.real * d0_next.imag - d0_prev.imag * d0_next.real
|
|
|
|
sin1 = d1_prev.real * d1_next.imag - d1_prev.imag * d1_next.real
|
2023-11-25 21:42:28 -07:00
|
|
|
try:
|
2023-11-26 12:42:57 -07:00
|
|
|
sin0 /= abs(d0_prev) * abs(d0_next)
|
|
|
|
sin1 /= abs(d1_prev) * abs(d1_next)
|
2023-11-25 21:42:28 -07:00
|
|
|
except ZeroDivisionError:
|
|
|
|
continue
|
|
|
|
|
2023-11-26 12:42:57 -07:00
|
|
|
if abs(sin0) > t or abs(sin1) > t:
|
2023-11-25 21:31:26 -07:00
|
|
|
# Not colinear / not smooth.
|
2023-11-25 12:57:58 -07:00
|
|
|
continue
|
|
|
|
|
2023-11-26 10:58:59 -07:00
|
|
|
# Check the mid-point is actually, well, in the middle.
|
|
|
|
dot0 = d0_prev.real * d0_next.real + d0_prev.imag * d0_next.imag
|
|
|
|
dot1 = d1_prev.real * d1_next.real + d1_prev.imag * d1_next.imag
|
|
|
|
if dot0 < 0 or dot1 < 0:
|
2023-11-26 12:24:16 -07:00
|
|
|
# Sharp corner.
|
|
|
|
continue
|
|
|
|
|
2023-11-26 14:45:06 -07:00
|
|
|
# Fine, if handle ratios are similar...
|
|
|
|
r0 = abs(d0_prev) / (abs(d0_prev) + abs(d0_next))
|
|
|
|
r1 = abs(d1_prev) / (abs(d1_prev) + abs(d1_next))
|
2023-11-26 12:24:16 -07:00
|
|
|
r_diff = abs(r0 - r1)
|
2023-11-26 14:45:06 -07:00
|
|
|
if abs(r_diff) < t:
|
2023-11-26 12:24:16 -07:00
|
|
|
# Smooth enough.
|
2023-11-26 10:58:59 -07:00
|
|
|
continue
|
|
|
|
|
2023-11-25 12:57:58 -07:00
|
|
|
mid = (pt0 + pt1) / 2
|
|
|
|
mid_prev = (pt0_prev + pt1_prev) / 2
|
|
|
|
mid_next = (pt0_next + pt1_next) / 2
|
|
|
|
|
2023-11-26 12:42:57 -07:00
|
|
|
mid_d0 = mid - mid_prev
|
|
|
|
mid_d1 = mid_next - mid
|
2023-11-25 21:42:28 -07:00
|
|
|
|
2023-11-26 12:42:57 -07:00
|
|
|
sin_mid = mid_d0.real * mid_d1.imag - mid_d0.imag * mid_d1.real
|
2023-11-25 21:42:28 -07:00
|
|
|
try:
|
2023-11-26 12:42:57 -07:00
|
|
|
sin_mid /= abs(mid_d0) * abs(mid_d1)
|
2023-11-25 21:42:28 -07:00
|
|
|
except ZeroDivisionError:
|
2023-11-25 12:57:58 -07:00
|
|
|
continue
|
2023-11-25 21:42:28 -07:00
|
|
|
|
2023-11-26 14:45:06 -07:00
|
|
|
# ...or if the angles are similar.
|
2023-11-26 12:42:57 -07:00
|
|
|
if abs(sin_mid) * (tolerance * kinkiness) <= t:
|
|
|
|
# Smooth enough.
|
2023-11-25 12:57:58 -07:00
|
|
|
continue
|
|
|
|
|
2023-11-26 14:45:06 -07:00
|
|
|
# How visible is the kink?
|
|
|
|
|
|
|
|
cross = sin_mid * abs(mid_d0) * abs(mid_d1)
|
|
|
|
arc_len = abs(mid_d0 + mid_d1)
|
|
|
|
deviation = abs(cross / arc_len)
|
2023-11-26 15:24:59 -07:00
|
|
|
if deviation < deviation_threshold:
|
2023-11-26 14:45:06 -07:00
|
|
|
continue
|
|
|
|
deviation_ratio = deviation / arc_len
|
|
|
|
if deviation_ratio > t:
|
|
|
|
continue
|
|
|
|
|
2023-11-26 12:42:57 -07:00
|
|
|
this_tolerance = t / (abs(sin_mid) * kinkiness)
|
2023-11-25 12:57:58 -07:00
|
|
|
|
2023-11-27 20:21:16 -07:00
|
|
|
log.debug(
|
|
|
|
"deviation %g; deviation_ratio %g; sin_mid %g; r_diff %g",
|
|
|
|
deviation,
|
|
|
|
deviation_ratio,
|
|
|
|
sin_mid,
|
|
|
|
r_diff,
|
|
|
|
)
|
|
|
|
log.debug("tolerance %g", this_tolerance)
|
2023-11-25 12:57:58 -07:00
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "kink",
|
|
|
|
"contour": ix,
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
|
|
|
"value": i,
|
|
|
|
"tolerance": this_tolerance,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-11-25 09:12:23 -07:00
|
|
|
#
|
|
|
|
# --show-all
|
|
|
|
#
|
|
|
|
|
2023-11-25 21:20:12 -07:00
|
|
|
if show_all:
|
2023-11-20 16:19:28 -07:00
|
|
|
yield (
|
|
|
|
glyph_name,
|
|
|
|
{
|
|
|
|
"type": "nothing",
|
|
|
|
"master_1": names[m0idx],
|
|
|
|
"master_2": names[m1idx],
|
|
|
|
"master_1_idx": m0idx,
|
|
|
|
"master_2_idx": m1idx,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-10-31 08:45:08 -06:00
|
|
|
|
2023-11-15 21:05:37 -07:00
|
|
|
@wraps(test_gen)
|
|
|
|
def test(*args, **kwargs):
|
2023-10-31 08:45:08 -06:00
|
|
|
problems = defaultdict(list)
|
2023-11-15 21:05:37 -07:00
|
|
|
for glyphname, problem in test_gen(*args, **kwargs):
|
2023-10-31 08:45:08 -06:00
|
|
|
problems[glyphname].append(problem)
|
2023-10-14 18:58:53 -04:00
|
|
|
return problems
|
2020-11-20 10:02:46 +00:00
|
|
|
|
2016-06-15 18:46:59 +04:00
|
|
|
|
2023-10-12 20:45:23 -04:00
|
|
|
def recursivelyAddGlyph(glyphname, glyphset, ttGlyphSet, glyf):
|
|
|
|
if glyphname in glyphset:
|
|
|
|
return
|
|
|
|
glyphset[glyphname] = ttGlyphSet[glyphname]
|
|
|
|
|
|
|
|
for component in getattr(glyf[glyphname], "components", []):
|
|
|
|
recursivelyAddGlyph(component.glyphName, glyphset, ttGlyphSet, glyf)
|
|
|
|
|
|
|
|
|
2020-05-12 15:11:30 +01:00
|
|
|
def main(args=None):
|
2020-11-20 10:02:46 +00:00
|
|
|
"""Test for interpolatability issues between fonts"""
|
|
|
|
import argparse
|
2023-11-17 20:12:01 -07:00
|
|
|
import sys
|
2020-11-20 10:02:46 +00:00
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
"fonttools varLib.interpolatable",
|
|
|
|
description=main.__doc__,
|
|
|
|
)
|
2023-06-27 16:59:05 -06:00
|
|
|
parser.add_argument(
|
|
|
|
"--glyphs",
|
|
|
|
action="store",
|
|
|
|
help="Space-separate name of glyphs to check",
|
|
|
|
)
|
2023-11-20 16:19:28 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--show-all",
|
|
|
|
action="store_true",
|
|
|
|
help="Show all glyph pairs, even if no problems are found",
|
|
|
|
)
|
2023-11-15 21:05:37 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--tolerance",
|
|
|
|
action="store",
|
|
|
|
type=float,
|
2023-11-25 14:46:43 -07:00
|
|
|
help="Error tolerance. Between 0 and 1. Default %s" % DEFAULT_TOLERANCE,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--kinkiness",
|
|
|
|
action="store",
|
|
|
|
type=float,
|
2023-11-26 15:24:59 -07:00
|
|
|
help="How aggressively report kinks. Default %s" % DEFAULT_KINKINESS,
|
2023-11-15 21:05:37 -07:00
|
|
|
)
|
Tidy up output, add machine readable JSON option
Example text output:
```
Glyph D was not compatible:
Node 1 incompatible in path 0: curveTo in Coolangatta-Regular, lineTo in Coolangatta-Bold
Node count differs in path 1: 7 in Coolangatta-Regular, 6 in Coolangatta-Bold
Glyph N was not compatible:
Node 11 incompatible in path 0: curveTo in Coolangatta-Regular, lineTo in Coolangatta-Bold
Glyph v was not compatible:
Node 7 incompatible in path 0: lineTo in Coolangatta-Regular, curveTo in Coolangatta-Bold
```
Example JSON output:
```
{
"D": [
{
"type": "node_incompatibility",
"path": 0,
"node": 1,
"master_1": "Coolangatta-Regular",
"master_2": "Coolangatta-Bold",
"value_1": "curveTo",
"value_2": "lineTo"
},
{
"type": "node_count",
"path": 1,
"master_1": "Coolangatta-Regular",
"master_2": "Coolangatta-Bold",
"value_1": 7,
"value_2": 6
}
],
"N": [
{
"type": "node_incompatibility",
"path": 0,
"node": 9,
"master_1": "Coolangatta-Regular",
"master_2": "Coolangatta-Bold",
"value_1": "curveTo",
"value_2": "lineTo"
}
],
"v": [
{
"type": "node_incompatibility",
"path": 0,
"node": 7,
"master_1": "Coolangatta-Regular",
"master_2": "Coolangatta-Bold",
"value_1": "lineTo",
"value_2": "curveTo"
}
]
}
```
2020-11-20 10:36:15 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--json",
|
|
|
|
action="store_true",
|
|
|
|
help="Output report in JSON format",
|
|
|
|
)
|
2023-11-15 16:11:31 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--pdf",
|
|
|
|
action="store",
|
|
|
|
help="Output report in PDF format",
|
|
|
|
)
|
2023-11-16 17:47:57 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--html",
|
|
|
|
action="store",
|
|
|
|
help="Output report in HTML format",
|
|
|
|
)
|
2020-11-20 10:02:46 +00:00
|
|
|
parser.add_argument(
|
2023-04-05 16:35:03 -04:00
|
|
|
"--quiet",
|
|
|
|
action="store_true",
|
|
|
|
help="Only exit with code 1 or 0, no output",
|
|
|
|
)
|
2023-11-17 20:12:01 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--output",
|
|
|
|
action="store",
|
|
|
|
help="Output file for the problem report; Default: stdout",
|
|
|
|
)
|
2023-04-05 16:35:03 -04:00
|
|
|
parser.add_argument(
|
|
|
|
"--ignore-missing",
|
|
|
|
action="store_true",
|
|
|
|
help="Will not report glyphs missing from sparse masters as errors",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2023-04-05 22:33:38 -04:00
|
|
|
"inputs",
|
|
|
|
metavar="FILE",
|
|
|
|
type=str,
|
|
|
|
nargs="+",
|
2023-10-19 09:49:18 -06:00
|
|
|
help="Input a single variable font / DesignSpace / Glyphs file, or multiple TTF/UFO files",
|
2020-11-20 10:02:46 +00:00
|
|
|
)
|
2023-11-20 20:29:06 -07:00
|
|
|
parser.add_argument(
|
|
|
|
"--name",
|
|
|
|
metavar="NAME",
|
|
|
|
type=str,
|
|
|
|
action="append",
|
|
|
|
help="Name of the master to use in the report. If not provided, all are used.",
|
|
|
|
)
|
2023-11-14 08:46:02 -07:00
|
|
|
parser.add_argument("-v", "--verbose", action="store_true", help="Run verbosely.")
|
2023-11-27 20:21:16 -07:00
|
|
|
parser.add_argument("--debug", action="store_true", help="Run with debug output.")
|
2020-11-20 10:02:46 +00:00
|
|
|
|
|
|
|
args = parser.parse_args(args)
|
2023-06-27 16:59:05 -06:00
|
|
|
|
2023-11-14 08:46:02 -07:00
|
|
|
from fontTools import configLogger
|
|
|
|
|
2023-11-14 08:47:27 -07:00
|
|
|
configLogger(level=("INFO" if args.verbose else "ERROR"))
|
2023-11-27 20:21:16 -07:00
|
|
|
if args.debug:
|
|
|
|
configLogger(level="DEBUG")
|
2023-11-14 08:46:02 -07:00
|
|
|
|
2023-10-12 20:57:28 -04:00
|
|
|
glyphs = args.glyphs.split() if args.glyphs else None
|
2020-11-20 10:02:46 +00:00
|
|
|
|
|
|
|
from os.path import basename
|
|
|
|
|
|
|
|
fonts = []
|
2022-07-28 16:30:23 -06:00
|
|
|
names = []
|
2023-11-17 10:33:38 -07:00
|
|
|
locations = []
|
2023-11-25 15:02:07 -07:00
|
|
|
upem = DEFAULT_UPEM
|
2022-07-28 16:30:23 -06:00
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
original_args_inputs = tuple(args.inputs)
|
|
|
|
|
2022-07-28 16:30:23 -06:00
|
|
|
if len(args.inputs) == 1:
|
2023-11-16 21:06:25 -07:00
|
|
|
designspace = None
|
2022-07-28 16:30:23 -06:00
|
|
|
if args.inputs[0].endswith(".designspace"):
|
|
|
|
from fontTools.designspaceLib import DesignSpaceDocument
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2022-07-28 16:30:23 -06:00
|
|
|
designspace = DesignSpaceDocument.fromfile(args.inputs[0])
|
|
|
|
args.inputs = [master.path for master in designspace.sources]
|
2023-11-16 20:36:36 -07:00
|
|
|
locations = [master.location for master in designspace.sources]
|
2023-11-16 21:06:25 -07:00
|
|
|
axis_triples = {
|
|
|
|
a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
|
|
|
|
}
|
|
|
|
axis_mappings = {a.name: a.map for a in designspace.axes}
|
|
|
|
axis_triples = {
|
|
|
|
k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
|
|
|
|
for k, vv in axis_triples.items()
|
|
|
|
}
|
2022-07-28 16:30:23 -06:00
|
|
|
|
|
|
|
elif args.inputs[0].endswith(".glyphs"):
|
2023-11-16 20:25:40 -07:00
|
|
|
from glyphsLib import GSFont, to_designspace
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2022-07-28 16:30:23 -06:00
|
|
|
gsfont = GSFont(args.inputs[0])
|
2023-11-25 15:02:07 -07:00
|
|
|
upem = gsfont.upm
|
2023-11-16 20:36:36 -07:00
|
|
|
designspace = to_designspace(gsfont)
|
|
|
|
fonts = [source.font for source in designspace.sources]
|
2022-07-28 16:30:23 -06:00
|
|
|
names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
|
|
|
|
args.inputs = []
|
2023-11-16 20:36:36 -07:00
|
|
|
locations = [master.location for master in designspace.sources]
|
2023-11-16 21:06:25 -07:00
|
|
|
axis_triples = {
|
|
|
|
a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
|
|
|
|
}
|
|
|
|
axis_mappings = {a.name: a.map for a in designspace.axes}
|
|
|
|
axis_triples = {
|
|
|
|
k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
|
|
|
|
for k, vv in axis_triples.items()
|
|
|
|
}
|
2022-07-28 16:03:57 -06:00
|
|
|
|
2022-08-12 13:44:49 -06:00
|
|
|
elif args.inputs[0].endswith(".ttf"):
|
|
|
|
from fontTools.ttLib import TTFont
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2022-08-12 13:44:49 -06:00
|
|
|
font = TTFont(args.inputs[0])
|
2023-11-25 15:02:07 -07:00
|
|
|
upem = font["head"].unitsPerEm
|
2022-08-12 13:44:49 -06:00
|
|
|
if "gvar" in font:
|
|
|
|
# Is variable font
|
2023-11-14 09:33:28 -07:00
|
|
|
|
|
|
|
axisMapping = {}
|
|
|
|
fvar = font["fvar"]
|
|
|
|
for axis in fvar.axes:
|
|
|
|
axisMapping[axis.axisTag] = {
|
|
|
|
-1: axis.minValue,
|
|
|
|
0: axis.defaultValue,
|
|
|
|
1: axis.maxValue,
|
|
|
|
}
|
|
|
|
if "avar" in font:
|
|
|
|
avar = font["avar"]
|
|
|
|
for axisTag, segments in avar.segments.items():
|
2023-11-14 09:40:37 -07:00
|
|
|
fvarMapping = axisMapping[axisTag].copy()
|
2023-11-14 09:33:28 -07:00
|
|
|
for location, value in segments.items():
|
2023-11-14 09:40:37 -07:00
|
|
|
axisMapping[axisTag][value] = piecewiseLinearMap(
|
|
|
|
location, fvarMapping
|
|
|
|
)
|
2023-11-14 09:33:28 -07:00
|
|
|
|
2022-08-12 13:44:49 -06:00
|
|
|
gvar = font["gvar"]
|
2023-10-12 20:45:23 -04:00
|
|
|
glyf = font["glyf"]
|
2023-10-12 16:03:22 -06:00
|
|
|
# Gather all glyphs at their "master" locations
|
|
|
|
ttGlyphSets = {}
|
|
|
|
glyphsets = defaultdict(dict)
|
|
|
|
|
2023-10-12 19:23:51 -04:00
|
|
|
if glyphs is None:
|
2023-10-12 20:57:28 -04:00
|
|
|
glyphs = sorted(gvar.variations.keys())
|
2023-10-12 19:23:51 -04:00
|
|
|
for glyphname in glyphs:
|
|
|
|
for var in gvar.variations[glyphname]:
|
2023-10-12 16:03:22 -06:00
|
|
|
locDict = {}
|
2022-08-12 13:44:49 -06:00
|
|
|
loc = []
|
|
|
|
for tag, val in sorted(var.axes.items()):
|
2023-10-12 16:03:22 -06:00
|
|
|
locDict[tag] = val[1]
|
2022-08-12 13:44:49 -06:00
|
|
|
loc.append((tag, val[1]))
|
|
|
|
|
2023-10-12 16:03:22 -06:00
|
|
|
locTuple = tuple(loc)
|
|
|
|
if locTuple not in ttGlyphSets:
|
|
|
|
ttGlyphSets[locTuple] = font.getGlyphSet(
|
2023-11-21 12:06:57 -07:00
|
|
|
location=locDict, normalized=True, recalcBounds=False
|
2023-10-12 16:03:22 -06:00
|
|
|
)
|
|
|
|
|
2023-10-12 20:45:23 -04:00
|
|
|
recursivelyAddGlyph(
|
|
|
|
glyphname, glyphsets[locTuple], ttGlyphSets[locTuple], glyf
|
|
|
|
)
|
2023-10-12 16:03:22 -06:00
|
|
|
|
2023-11-14 09:33:28 -07:00
|
|
|
names = ["''"]
|
2023-10-12 16:03:22 -06:00
|
|
|
fonts = [font.getGlyphSet()]
|
2023-11-16 20:36:36 -07:00
|
|
|
locations = [{}]
|
|
|
|
axis_triples = {a: (-1, 0, +1) for a in sorted(axisMapping.keys())}
|
2023-10-12 16:03:22 -06:00
|
|
|
for locTuple in sorted(glyphsets.keys(), key=lambda v: (len(v), v)):
|
2023-11-14 09:33:28 -07:00
|
|
|
name = (
|
|
|
|
"'"
|
|
|
|
+ " ".join(
|
2023-11-16 16:19:52 -07:00
|
|
|
"%s=%s"
|
|
|
|
% (
|
|
|
|
k,
|
|
|
|
floatToFixedToStr(
|
|
|
|
piecewiseLinearMap(v, axisMapping[k]), 14
|
|
|
|
),
|
|
|
|
)
|
2023-11-14 09:33:28 -07:00
|
|
|
for k, v in locTuple
|
|
|
|
)
|
|
|
|
+ "'"
|
|
|
|
)
|
|
|
|
names.append(name)
|
2023-10-12 16:03:22 -06:00
|
|
|
fonts.append(glyphsets[locTuple])
|
2023-11-16 20:36:36 -07:00
|
|
|
locations.append(dict(locTuple))
|
2023-10-12 16:03:22 -06:00
|
|
|
args.ignore_missing = True
|
2022-08-12 13:44:49 -06:00
|
|
|
args.inputs = []
|
|
|
|
|
2023-11-17 10:33:38 -07:00
|
|
|
if not locations:
|
|
|
|
locations = [{} for _ in fonts]
|
|
|
|
|
2020-11-20 10:02:46 +00:00
|
|
|
for filename in args.inputs:
|
|
|
|
if filename.endswith(".ufo"):
|
|
|
|
from fontTools.ufoLib import UFOReader
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2023-11-25 15:26:40 -07:00
|
|
|
font = UFOReader(filename)
|
|
|
|
info = SimpleNamespace()
|
|
|
|
font.readInfo(info)
|
|
|
|
upem = info.unitsPerEm
|
|
|
|
fonts.append(font)
|
2020-11-20 10:02:46 +00:00
|
|
|
else:
|
|
|
|
from fontTools.ttLib import TTFont
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2023-11-25 15:26:40 -07:00
|
|
|
font = TTFont(filename)
|
|
|
|
upem = font["head"].unitsPerEm
|
|
|
|
fonts.append(font)
|
2020-11-20 10:02:46 +00:00
|
|
|
|
2022-07-28 16:30:23 -06:00
|
|
|
names.append(basename(filename).rsplit(".", 1)[0])
|
|
|
|
|
2023-04-05 18:21:17 -04:00
|
|
|
glyphsets = []
|
|
|
|
for font in fonts:
|
|
|
|
if hasattr(font, "getGlyphSet"):
|
|
|
|
glyphset = font.getGlyphSet()
|
|
|
|
else:
|
|
|
|
glyphset = font
|
2023-04-05 22:33:38 -04:00
|
|
|
glyphsets.append({k: glyphset[k] for k in glyphset.keys()})
|
|
|
|
|
2023-11-20 20:29:06 -07:00
|
|
|
if args.name:
|
|
|
|
accepted_names = set(args.name)
|
|
|
|
glyphsets = [
|
|
|
|
glyphset
|
|
|
|
for name, glyphset in zip(names, glyphsets)
|
|
|
|
if name in accepted_names
|
|
|
|
]
|
|
|
|
locations = [
|
|
|
|
location
|
|
|
|
for name, location in zip(names, locations)
|
|
|
|
if name in accepted_names
|
|
|
|
]
|
|
|
|
names = [name for name in names if name in accepted_names]
|
2023-11-17 19:39:57 -07:00
|
|
|
|
2023-04-05 16:35:03 -04:00
|
|
|
if not glyphs:
|
2023-10-12 20:57:28 -04:00
|
|
|
glyphs = sorted(set([gn for glyphset in glyphsets for gn in glyphset.keys()]))
|
2023-04-05 22:33:38 -04:00
|
|
|
|
2023-10-12 20:57:28 -04:00
|
|
|
glyphsSet = set(glyphs)
|
2023-04-05 16:35:03 -04:00
|
|
|
for glyphset in glyphsets:
|
|
|
|
glyphSetGlyphNames = set(glyphset.keys())
|
2023-10-12 20:57:28 -04:00
|
|
|
diff = glyphsSet - glyphSetGlyphNames
|
2023-04-05 16:35:03 -04:00
|
|
|
if diff:
|
|
|
|
for gn in diff:
|
|
|
|
glyphset[gn] = None
|
|
|
|
|
2023-11-16 21:06:25 -07:00
|
|
|
# Normalize locations
|
|
|
|
locations = [normalizeLocation(loc, axis_triples) for loc in locations]
|
2023-11-25 18:19:15 -07:00
|
|
|
tolerance = args.tolerance or DEFAULT_TOLERANCE
|
2023-11-26 09:58:29 -07:00
|
|
|
kinkiness = args.kinkiness if args.kinkiness is not None else DEFAULT_KINKINESS
|
2023-11-16 21:06:25 -07:00
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
try:
|
|
|
|
log.info("Running on %d glyphsets", len(glyphsets))
|
|
|
|
log.info("Locations: %s", pformat(locations))
|
|
|
|
problems_gen = test_gen(
|
|
|
|
glyphsets,
|
|
|
|
glyphs=glyphs,
|
|
|
|
names=names,
|
|
|
|
locations=locations,
|
2023-11-25 15:02:07 -07:00
|
|
|
upem=upem,
|
2023-11-20 12:03:53 -07:00
|
|
|
ignore_missing=args.ignore_missing,
|
2023-11-25 18:19:15 -07:00
|
|
|
tolerance=tolerance,
|
|
|
|
kinkiness=kinkiness,
|
2023-11-20 16:19:28 -07:00
|
|
|
show_all=args.show_all,
|
2023-11-20 12:03:53 -07:00
|
|
|
)
|
|
|
|
problems = defaultdict(list)
|
2023-04-05 22:33:38 -04:00
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
f = sys.stdout if args.output is None else open(args.output, "w")
|
2023-11-17 20:12:01 -07:00
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
if not args.quiet:
|
|
|
|
if args.json:
|
|
|
|
import json
|
2023-04-05 16:35:03 -04:00
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
for glyphname, problem in problems_gen:
|
|
|
|
problems[glyphname].append(problem)
|
|
|
|
|
|
|
|
print(json.dumps(problems), file=f)
|
|
|
|
else:
|
|
|
|
last_glyphname = None
|
|
|
|
for glyphname, p in problems_gen:
|
|
|
|
problems[glyphname].append(p)
|
|
|
|
|
|
|
|
if glyphname != last_glyphname:
|
|
|
|
print(f"Glyph {glyphname} was not compatible:", file=f)
|
|
|
|
last_glyphname = glyphname
|
|
|
|
last_master_idxs = None
|
|
|
|
|
|
|
|
master_idxs = (
|
|
|
|
(p["master_idx"])
|
|
|
|
if "master_idx" in p
|
|
|
|
else (p["master_1_idx"], p["master_2_idx"])
|
|
|
|
)
|
|
|
|
if master_idxs != last_master_idxs:
|
|
|
|
master_names = (
|
|
|
|
(p["master"])
|
|
|
|
if "master" in p
|
|
|
|
else (p["master_1"], p["master_2"])
|
|
|
|
)
|
|
|
|
print(f" Masters: %s:" % ", ".join(master_names), file=f)
|
|
|
|
last_master_idxs = master_idxs
|
|
|
|
|
|
|
|
if p["type"] == "missing":
|
|
|
|
print(
|
|
|
|
" Glyph was missing in master %s" % p["master"], file=f
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "open_path":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Glyph has an open path in master %s" % p["master"],
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "path_count":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Path count differs: %i in %s, %i in %s"
|
|
|
|
% (
|
|
|
|
p["value_1"],
|
|
|
|
p["master_1"],
|
|
|
|
p["value_2"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "node_count":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Node count differs in path %i: %i in %s, %i in %s"
|
|
|
|
% (
|
|
|
|
p["path"],
|
|
|
|
p["value_1"],
|
|
|
|
p["master_1"],
|
|
|
|
p["value_2"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "node_incompatibility":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Node %o incompatible in path %i: %s in %s, %s in %s"
|
|
|
|
% (
|
|
|
|
p["node"],
|
|
|
|
p["path"],
|
|
|
|
p["value_1"],
|
|
|
|
p["master_1"],
|
|
|
|
p["value_2"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "contour_order":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Contour order differs: %s in %s, %s in %s"
|
|
|
|
% (
|
|
|
|
p["value_1"],
|
|
|
|
p["master_1"],
|
|
|
|
p["value_2"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:39:59 -07:00
|
|
|
elif p["type"] == "wrong_start_point":
|
2023-11-20 12:03:53 -07:00
|
|
|
print(
|
|
|
|
" Contour %d start point differs: %s in %s, %s in %s; reversed: %s"
|
|
|
|
% (
|
|
|
|
p["contour"],
|
|
|
|
p["value_1"],
|
|
|
|
p["master_1"],
|
|
|
|
p["value_2"],
|
|
|
|
p["master_2"],
|
|
|
|
p["reversed"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-27 20:21:16 -07:00
|
|
|
elif p["type"] == "underweight":
|
2023-11-21 12:56:40 -07:00
|
|
|
print(
|
2023-11-27 20:21:16 -07:00
|
|
|
" Contour %d interpolation is underweight: %s, %s"
|
2023-11-21 12:56:40 -07:00
|
|
|
% (
|
|
|
|
p["contour"],
|
|
|
|
p["master_1"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-25 12:57:58 -07:00
|
|
|
elif p["type"] == "kink":
|
|
|
|
print(
|
|
|
|
" Contour %d has a kink at %s: %s, %s"
|
|
|
|
% (
|
|
|
|
p["contour"],
|
|
|
|
p["value"],
|
|
|
|
p["master_1"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 17:41:08 -07:00
|
|
|
elif p["type"] == "nothing":
|
|
|
|
print(
|
2023-11-25 21:21:36 -07:00
|
|
|
" Showing %s and %s"
|
2023-11-20 17:41:08 -07:00
|
|
|
% (
|
|
|
|
p["master_1"],
|
|
|
|
p["master_2"],
|
|
|
|
),
|
|
|
|
file=f,
|
|
|
|
)
|
2023-11-20 12:03:53 -07:00
|
|
|
else:
|
2023-10-31 08:58:40 -06:00
|
|
|
for glyphname, problem in problems_gen:
|
2023-10-31 08:54:33 -06:00
|
|
|
problems[glyphname].append(problem)
|
|
|
|
|
2023-11-20 12:03:53 -07:00
|
|
|
if args.pdf:
|
|
|
|
log.info("Writing PDF to %s", args.pdf)
|
|
|
|
from .interpolatablePlot import InterpolatablePDF
|
|
|
|
|
|
|
|
with InterpolatablePDF(args.pdf, glyphsets=glyphsets, names=names) as pdf:
|
2023-11-25 18:19:15 -07:00
|
|
|
pdf.add_title_page(
|
|
|
|
original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
|
|
|
|
)
|
2023-11-20 12:03:53 -07:00
|
|
|
pdf.add_problems(problems)
|
|
|
|
if not problems and not args.quiet:
|
|
|
|
pdf.draw_cupcake()
|
|
|
|
|
|
|
|
if args.html:
|
|
|
|
log.info("Writing HTML to %s", args.html)
|
|
|
|
from .interpolatablePlot import InterpolatableSVG
|
|
|
|
|
|
|
|
svgs = []
|
2023-11-24 18:49:44 -07:00
|
|
|
glyph_starts = {}
|
2023-11-20 12:03:53 -07:00
|
|
|
with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
|
2023-11-25 18:19:15 -07:00
|
|
|
svg.add_title_page(
|
|
|
|
original_args_inputs,
|
|
|
|
show_tolerance=False,
|
|
|
|
tolerance=tolerance,
|
|
|
|
kinkiness=kinkiness,
|
|
|
|
)
|
2023-11-24 18:49:44 -07:00
|
|
|
for glyph, glyph_problems in problems.items():
|
|
|
|
glyph_starts[len(svgs)] = glyph
|
2023-11-25 08:22:18 -07:00
|
|
|
svg.add_problems(
|
|
|
|
{glyph: glyph_problems},
|
|
|
|
show_tolerance=False,
|
|
|
|
show_page_number=False,
|
|
|
|
)
|
2023-11-20 12:03:53 -07:00
|
|
|
if not problems and not args.quiet:
|
|
|
|
svg.draw_cupcake()
|
|
|
|
|
|
|
|
import base64
|
|
|
|
|
|
|
|
with open(args.html, "wb") as f:
|
|
|
|
f.write(b"<!DOCTYPE html>\n")
|
2023-11-24 18:49:44 -07:00
|
|
|
f.write(
|
|
|
|
b'<html><body align="center" style="font-family: sans-serif; text-color: #222">\n'
|
|
|
|
)
|
|
|
|
f.write(b"<title>fonttools varLib.interpolatable report</title>\n")
|
|
|
|
for i, svg in enumerate(svgs):
|
|
|
|
if i in glyph_starts:
|
|
|
|
f.write(f"<h1>Glyph {glyph_starts[i]}</h1>\n".encode("utf-8"))
|
2023-11-20 12:03:53 -07:00
|
|
|
f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
|
|
|
|
f.write(base64.b64encode(svg))
|
|
|
|
f.write(b"' />\n")
|
|
|
|
f.write(b"<hr>\n")
|
|
|
|
f.write(b"</body></html>\n")
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
e.args += original_args_inputs
|
|
|
|
log.error(e)
|
|
|
|
raise
|
2023-11-16 17:47:57 -07:00
|
|
|
|
2020-11-20 10:49:31 +00:00
|
|
|
if problems:
|
|
|
|
return problems
|
2020-11-20 10:02:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import sys
|
|
|
|
|
2020-11-20 10:49:31 +00:00
|
|
|
problems = main()
|
|
|
|
sys.exit(int(bool(problems)))
|