Rewriting varLib.interpolatable to allow for sparse masters + tests

This commit is contained in:
Colin M. Ford 2023-04-05 16:35:03 -04:00
parent 3b9a73ff83
commit 7a86dd325e
2 changed files with 226 additions and 146 deletions

View File

@ -137,12 +137,14 @@ def min_cost_perfect_bipartite_matching(G):
return best, best_cost
def test(glyphsets, glyphs=None, names=None):
def test(glyphsets, glyphs=None, names=None, ignore_missing=False):
if names is None:
names = glyphsets
if glyphs is None:
glyphs = glyphsets[0].keys()
# `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
# ... risks the sparse master being the first one, and only processing a subset of the glyphs
glyphs = set([g for glyphset in glyphsets for g in glyphset.keys()])
hist = []
problems = OrderedDict()
@ -151,20 +153,23 @@ def test(glyphsets, glyphs=None, names=None):
problems.setdefault(glyphname, []).append(problem)
for glyph_name in glyphs:
# print()
# print(glyph_name)
try:
m0idx = 0
allVectors = []
allNodeTypes = []
allContourIsomorphisms = []
for glyphset, name in zip(glyphsets, names):
# print('.', end='')
if glyph_name not in glyphset:
add_problem(glyph_name, {"type": "missing", "master": name})
continue
glyph = glyphset[glyph_name]
if glyph is None:
if not ignore_missing:
add_problem(glyph_name, {"type": "missing", "master": name})
allNodeTypes.append(None)
allVectors.append(None)
allContourIsomorphisms.append(None)
continue
perContourPen = PerContourOrComponentPen(
RecordingPen, glyphset=glyphset
)
@ -243,105 +248,125 @@ def test(glyphsets, glyphs=None, names=None):
_rot_list([complex(*pt) for pt, bl in mirrored], i)
)
# Check each master against the first on in the list.
m0 = allNodeTypes[0]
for i, m1 in enumerate(allNodeTypes[1:]):
if len(m0) != len(m1):
add_problem(
glyph_name,
{
"type": "path_count",
"master_1": names[0],
"master_2": names[i + 1],
"value_1": len(m0),
"value_2": len(m1),
},
)
if m0 == m1:
continue
for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
if nodes1 == nodes2:
if any(allNodeTypes):
# m0idx should be the index of the first non-None item in allNodeTypes,
# else give it the first index of the empty list, which is likely 0
m0idx = allNodeTypes.index(next((x for x in allNodeTypes if x is not None), None))
# m0 is the first non-None item in allNodeTypes, or the first item if all are None
m0 = allNodeTypes[m0idx]
for i, m1 in enumerate(allNodeTypes[m0idx+1:]):
if m1 is None:
continue
if len(nodes1) != len(nodes2):
if len(m0) != len(m1):
add_problem(
glyph_name,
{
"type": "node_count",
"path": pathIx,
"master_1": names[0],
"master_2": names[i + 1],
"value_1": len(nodes1),
"value_2": len(nodes2),
"type": "path_count",
"master_1": names[m0idx],
"master_2": names[m0idx + i + 1],
"value_1": len(m0),
"value_2": len(m1),
},
)
if m0 == m1:
continue
for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
if n1 != n2:
for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
if nodes1 == nodes2:
continue
if len(nodes1) != len(nodes2):
add_problem(
glyph_name,
{
"type": "node_incompatibility",
"type": "node_count",
"path": pathIx,
"node": nodeIx,
"master_1": names[0],
"master_2": names[i + 1],
"value_1": n1,
"value_2": n2,
"master_1": names[m0idx],
"master_2": names[m0idx + i + 1],
"value_1": len(nodes1),
"value_2": len(nodes2),
},
)
continue
for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
if n1 != n2:
add_problem(
glyph_name,
{
"type": "node_incompatibility",
"path": pathIx,
"node": nodeIx,
"master_1": names[0],
"master_2": names[m0idx + i + 1],
"value_1": n1,
"value_2": n2,
},
)
continue
m0 = allVectors[0]
for i, m1 in enumerate(allVectors[1:]):
if len(m0) != len(m1):
# We already reported this
continue
if not m0:
continue
costs = [[_vlen(_vdiff(v0, v1)) for v1 in m1] for v0 in m0]
matching, matching_cost = min_cost_perfect_bipartite_matching(costs)
identity_matching = list(range(len(m0)))
identity_cost = sum(costs[i][i] for i in range(len(m0)))
if (
matching != identity_matching
and matching_cost < identity_cost * 0.95
):
add_problem(
glyph_name,
{
"type": "contour_order",
"master_1": names[0],
"master_2": names[i + 1],
"value_1": list(range(len(m0))),
"value_2": matching,
},
)
break
m0 = allContourIsomorphisms[0]
for i, m1 in enumerate(allContourIsomorphisms[1:]):
if len(m0) != len(m1):
# We already reported this
continue
if not m0:
continue
for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
c0 = contour0[0]
costs = [
v for v in (_complex_vlen(_vdiff(c0, c1)) for c1 in contour1)
]
min_cost = min(costs)
first_cost = costs[0]
if min_cost < first_cost * 0.95:
if any(allVectors):
# m0idx should be the index of the first non-None item in allVectors,
# else give it the first index of the empty list, which is likely 0
m0idx = allVectors.index(next((x for x in allVectors if x is not None), None))
# m0 is the first non-None item in allVectors, or the first item if all are None
m0 = allVectors[m0idx]
for i, m1 in enumerate(allVectors[m0idx+1:]):
if m1 is None:
continue
if len(m0) != len(m1):
# We already reported this
continue
if not m0:
continue
costs = [[_vlen(_vdiff(v0, v1)) for v1 in m1] for v0 in m0]
matching, matching_cost = min_cost_perfect_bipartite_matching(costs)
identity_matching = list(range(len(m0)))
identity_cost = sum(costs[i][i] for i in range(len(m0)))
if (
matching != identity_matching
and matching_cost < identity_cost * 0.95
):
add_problem(
glyph_name,
{
"type": "wrong_start_point",
"contour": ix,
"master_1": names[0],
"master_2": names[i + 1],
"type": "contour_order",
"master_1": names[m0idx],
"master_2": names[m0idx + i + 1],
"value_1": list(range(len(m0))),
"value_2": matching,
},
)
break
if any(allContourIsomorphisms):
# m0idx should be the index of the first non-None item in allContourIsomorphisms,
# else give it the first index of the empty list, which is likely 0
m0idx = allContourIsomorphisms.index(next((x for x in allContourIsomorphisms if x is not None), None))
# m0 is the first non-None item in allContourIsomorphisms, or the first item if all are None
m0 = allContourIsomorphisms[m0idx]
for i, m1 in enumerate(allContourIsomorphisms[m0idx+1:]):
if m1 is None:
continue
if len(m0) != len(m1):
# We already reported this
continue
if not m0:
continue
for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
c0 = contour0[0]
costs = [
v for v in (_complex_vlen(_vdiff(c0, c1)) for c1 in contour1)
]
min_cost = min(costs)
first_cost = costs[0]
if min_cost < first_cost * 0.95:
add_problem(
glyph_name,
{
"type": "wrong_start_point",
"contour": ix,
"master_1": names[m0idx],
"master_2": names[m0idx + i + 1],
},
)
except ValueError as e:
add_problem(
@ -365,7 +390,17 @@ def main(args=None):
help="Output report in JSON format",
)
parser.add_argument(
"inputs", metavar="FILE", type=str, nargs="+", help="Input TTF/UFO files"
"--quiet",
action="store_true",
help="Only exit with code 1 or 0, no output",
)
parser.add_argument(
"--ignore-missing",
action="store_true",
help="Will not report glyphs missing from sparse masters as errors",
)
parser.add_argument(
"inputs", metavar="FILE", type=str, nargs="+", help="Input a single DesignSpace/Glyphs file, or multiple TTF/UFO files"
)
args = parser.parse_args(args)
@ -440,70 +475,90 @@ def main(args=None):
names.append(basename(filename).rsplit(".", 1)[0])
if hasattr(fonts[0], "getGlyphSet"):
glyphsets = [font.getGlyphSet() for font in fonts]
glyphsets = [dict(font.getGlyphSet().items()) for font in fonts]
else:
glyphsets = fonts
glyphsets = [dict(font.items()) for font in fonts]
problems = test(glyphsets, glyphs=glyphs, names=names)
if args.json:
import json
if not glyphs:
glyphs = set([gn for glyphset in glyphsets for gn in glyphset.keys()])
print(json.dumps(problems))
else:
for glyph, glyph_problems in problems.items():
print(f"Glyph {glyph} was not compatible: ")
for p in glyph_problems:
if p["type"] == "missing":
print(" Glyph was missing in master %s" % p["master"])
if p["type"] == "open_path":
print(" Glyph has an open path in master %s" % p["master"])
if p["type"] == "path_count":
print(
" Path count differs: %i in %s, %i in %s"
% (p["value_1"], p["master_1"], p["value_2"], p["master_2"])
)
if p["type"] == "node_count":
print(
" Node count differs in path %i: %i in %s, %i in %s"
% (
p["path"],
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
for glyphset in glyphsets:
glyphSetGlyphNames = set(glyphset.keys())
diff = glyphs - glyphSetGlyphNames
if diff:
for gn in diff:
glyphset[gn] = None
problems = test(glyphsets, glyphs=glyphs, names=names, ignore_missing=args.ignore_missing)
if not args.quiet:
if args.json:
import json
print(json.dumps(problems))
else:
for glyph, glyph_problems in problems.items():
print(f"Glyph {glyph} was not compatible: ")
for p in glyph_problems:
if p["type"] == "missing":
print(" Glyph was missing in master %s" % p["master"])
if p["type"] == "open_path":
print(" Glyph has an open path in master %s" % p["master"])
if p["type"] == "path_count":
print(
" Path count differs: %i in %s, %i in %s"
% (p["value_1"], p["master_1"], p["value_2"], p["master_2"])
)
)
if p["type"] == "node_incompatibility":
print(
" Node %o incompatible in path %i: %s in %s, %s in %s"
% (
p["node"],
p["path"],
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
if p["type"] == "node_count":
print(
" Node count differs in path %i: %i in %s, %i in %s"
% (
p["path"],
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
)
)
)
if p["type"] == "contour_order":
print(
" Contour order differs: %s in %s, %s in %s"
% (
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
if p["type"] == "node_incompatibility":
print(
" Node %o incompatible in path %i: %s in %s, %s in %s"
% (
p["node"],
p["path"],
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
)
)
)
if p["type"] == "wrong_start_point":
print(
" Contour %d start point differs: %s, %s"
% (
p["contour"],
p["master_1"],
p["master_2"],
if p["type"] == "contour_order":
print(
" Contour order differs: %s in %s, %s in %s"
% (
p["value_1"],
p["master_1"],
p["value_2"],
p["master_2"],
)
)
if p["type"] == "wrong_start_point":
print(
" Contour %d start point differs: %s, %s"
% (
p["contour"],
p["master_1"],
p["master_2"],
)
)
if p["type"] == "math_error":
print(
" Miscellaneous error in %s: %s"
% (
p["master"],
p["error"],
)
)
)
if problems:
return problems

View File

@ -93,6 +93,31 @@ class InterpolatableTest(unittest.TestCase):
otf_paths = self.get_file_list(self.tempdir, suffix)
self.assertIsNone(interpolatable_main(otf_paths))
def test_sparse_interpolatable_ttfs(self):
suffix = ".ttf"
ttx_dir = self.get_test_input("master_ttx_interpolatable_ttf")
self.temp_dir()
ttx_paths = self.get_file_list(ttx_dir, ".ttx", "SparseMasters-")
for path in ttx_paths:
self.compile_font(path, suffix, self.tempdir)
ttf_paths = self.get_file_list(self.tempdir, suffix)
# without --ignore-missing
problems = interpolatable_main(["--quiet"] + ttf_paths)
self.assertEqual(problems['a'], [{'type': 'missing', 'master': 'SparseMasters-Medium'}])
self.assertEqual(problems['s'], [{'type': 'missing', 'master': 'SparseMasters-Medium'}])
self.assertEqual(problems['edotabove'], [{'type': 'missing', 'master': 'SparseMasters-Medium'}])
self.assertEqual(problems['dotabovecomb'], [{'type': 'missing', 'master': 'SparseMasters-Medium'}])
# normal order, with --ignore-missing
self.assertIsNone(interpolatable_main(["--ignore-missing"] + ttf_paths))
# purposely putting the sparse master (medium) first
self.assertIsNone(interpolatable_main(["--ignore-missing"] + [ttf_paths[1]] + [ttf_paths[0]] + [ttf_paths[2]]))
# purposely putting the sparse master (medium) last
self.assertIsNone(interpolatable_main(["--ignore-missing"] + [ttf_paths[0]] + [ttf_paths[2]] + [ttf_paths[1]]))
def test_interpolatable_varComposite(self):
input_path = self.get_test_input(
"..", "..", "ttLib", "data", "varc-ac00-ac01.ttf"