Merge pull request #1368 from fonttools/varLib-sparse-masters

[varLib] Support sparse masters
This commit is contained in:
Behdad Esfahbod 2018-11-15 15:00:00 -08:00 committed by GitHub
commit e30b830403
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 366 additions and 125 deletions

View File

@ -378,12 +378,6 @@ def _add_method(*clazzes):
def _uniq_sort(l):
return sorted(set(l))
def _set_update(s, *others):
# Jython's set.update only takes one other argument.
# Emulate real set.update...
for other in others:
s.update(other)
def _dict_subset(d, glyphs):
return {g:d[g] for g in glyphs}
@ -457,7 +451,7 @@ def subset_glyphs(self, s):
def closure_glyphs(self, s, cur_glyphs):
for glyph, subst in self.mapping.items():
if glyph in cur_glyphs:
_set_update(s.glyphs, subst)
s.glyphs.update(subst)
@_add_method(otTables.MultipleSubst)
def subset_glyphs(self, s):
@ -467,8 +461,8 @@ def subset_glyphs(self, s):
@_add_method(otTables.AlternateSubst)
def closure_glyphs(self, s, cur_glyphs):
_set_update(s.glyphs, *(vlist for g,vlist in self.alternates.items()
if g in cur_glyphs))
s.glyphs.update(*(vlist for g,vlist in self.alternates.items()
if g in cur_glyphs))
@_add_method(otTables.AlternateSubst)
def subset_glyphs(self, s):
@ -480,10 +474,10 @@ def subset_glyphs(self, s):
@_add_method(otTables.LigatureSubst)
def closure_glyphs(self, s, cur_glyphs):
_set_update(s.glyphs, *([seq.LigGlyph for seq in seqs
if all(c in s.glyphs for c in seq.Component)]
for g,seqs in self.ligatures.items()
if g in cur_glyphs))
s.glyphs.update(*([seq.LigGlyph for seq in seqs
if all(c in s.glyphs for c in seq.Component)]
for g,seqs in self.ligatures.items()
if g in cur_glyphs))
@_add_method(otTables.LigatureSubst)
def subset_glyphs(self, s):

View File

@ -32,7 +32,7 @@ from fontTools.ttLib.tables.TupleVariation import TupleVariation
from fontTools.ttLib.tables import otTables as ot
from fontTools.ttLib.tables.otBase import OTTableWriter
from fontTools.varLib import builder, models, varStore
from fontTools.varLib.merger import VariationMerger, _all_equal
from fontTools.varLib.merger import VariationMerger
from fontTools.varLib.mvar import MVAR_ENTRIES
from fontTools.varLib.iup import iup_delta_optimize
from fontTools.varLib.featureVars import addFeatureVariations
@ -280,7 +280,7 @@ def _SetCoordinates(font, glyphName, coord):
# XXX Handle vertical
font["hmtx"].metrics[glyphName] = horizontalAdvanceWidth, leftSideBearing
def _add_gvar(font, model, master_ttfs, tolerance=0.5, optimize=True):
def _add_gvar(font, masterModel, master_ttfs, tolerance=0.5, optimize=True):
assert tolerance >= 0
@ -294,10 +294,12 @@ def _add_gvar(font, model, master_ttfs, tolerance=0.5, optimize=True):
for glyph in font.getGlyphOrder():
allData = [_GetCoordinates(m, glyph) for m in master_ttfs]
model, allData = masterModel.getSubModel(allData)
allCoords = [d[0] for d in allData]
allControls = [d[1] for d in allData]
control = allControls[0]
if (any(c != control for c in allControls)):
if not models.allEqual(allControls):
log.warning("glyph %s has incompatible masters; skipping" % glyph)
continue
del allControls
@ -344,7 +346,7 @@ def _remove_TTHinting(font):
font["glyf"].removeHinting()
# TODO: Modify gasp table to deactivate gridfitting for all ranges?
def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
def _merge_TTHinting(font, masterModel, master_ttfs, tolerance=0.5):
log.info("Merging TT hinting")
assert "cvar" not in font
@ -372,7 +374,7 @@ def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
all_pgms = [
m["glyf"][name].program
for m in master_ttfs
if hasattr(m["glyf"][name], "program")
if name in m['glyf'] and hasattr(m["glyf"][name], "program")
]
if not any(all_pgms):
continue
@ -383,24 +385,21 @@ def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
font_pgm = Program()
if any(pgm != font_pgm for pgm in all_pgms if pgm):
log.warning("Masters have incompatible glyph programs in glyph '%s', hinting is discarded." % name)
# TODO Only drop hinting from this glyph.
_remove_TTHinting(font)
return
# cvt table
all_cvs = [Vector(m["cvt "].values) for m in master_ttfs if "cvt " in m]
all_cvs = [Vector(m["cvt "].values) if 'cvt ' in m else None
for m in master_ttfs]
if len(all_cvs) == 0:
nonNone_cvs = models.nonNone(all_cvs)
if not nonNone_cvs:
# There is no cvt table to make a cvar table from, we're done here.
return
if len(all_cvs) != len(master_ttfs):
log.warning("Some masters have no cvt table, hinting is discarded.")
_remove_TTHinting(font)
return
num_cvt0 = len(all_cvs[0])
if (any(len(c) != num_cvt0 for c in all_cvs)):
if not models.allEqual(len(c) for c in nonNone_cvs):
log.warning("Masters have incompatible cvt tables, hinting is discarded.")
_remove_TTHinting(font)
return
@ -411,8 +410,7 @@ def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
cvar.version = 1
cvar.variations = []
deltas = model.getDeltas(all_cvs)
supports = model.supports
deltas, supports = masterModel.getDeltasAndSupports(all_cvs)
for i,(delta,support) in enumerate(zip(deltas[1:], supports[1:])):
delta = [otRound(d) for d in delta]
if all(abs(v) <= tolerance for v in delta):
@ -420,54 +418,59 @@ def _merge_TTHinting(font, model, master_ttfs, tolerance=0.5):
var = TupleVariation(support, delta)
cvar.variations.append(var)
def _add_HVAR(font, model, master_ttfs, axisTags):
def _add_HVAR(font, masterModel, master_ttfs, axisTags):
log.info("Generating HVAR")
hAdvanceDeltas = {}
glyphOrder = font.getGlyphOrder()
hAdvanceDeltasAndSupports = {}
metricses = [m["hmtx"].metrics for m in master_ttfs]
for glyph in font.getGlyphOrder():
hAdvances = [metrics[glyph][0] for metrics in metricses]
# TODO move round somewhere else?
hAdvanceDeltas[glyph] = tuple(otRound(d) for d in model.getDeltas(hAdvances)[1:])
for glyph in glyphOrder:
hAdvances = [metrics[glyph][0] if glyph in metrics else None for metrics in metricses]
hAdvanceDeltasAndSupports[glyph] = masterModel.getDeltasAndSupports(hAdvances)
# Direct mapping
supports = model.supports[1:]
varTupleList = builder.buildVarRegionList(supports, axisTags)
varTupleIndexes = list(range(len(supports)))
n = len(supports)
items = []
for glyphName in font.getGlyphOrder():
items.append(hAdvanceDeltas[glyphName])
singleModel = models.allEqual(id(v[1]) for v in hAdvanceDeltasAndSupports.values())
# Build indirect mapping to save on duplicates, compare both sizes
uniq = list(set(items))
mapper = {v:i for i,v in enumerate(uniq)}
mapping = [mapper[item] for item in items]
advanceMapping = builder.buildVarIdxMap(mapping, font.getGlyphOrder())
directStore = None
if singleModel:
# Build direct mapping
# Direct
varData = builder.buildVarData(varTupleIndexes, items)
directStore = builder.buildVarStore(varTupleList, [varData])
supports = next(iter(hAdvanceDeltasAndSupports.values()))[1][1:]
varTupleList = builder.buildVarRegionList(supports, axisTags)
varTupleIndexes = list(range(len(supports)))
varData = builder.buildVarData(varTupleIndexes, [], optimize=False)
for glyphName in glyphOrder:
varData.addItem(hAdvanceDeltasAndSupports[glyphName][0])
varData.optimize()
directStore = builder.buildVarStore(varTupleList, [varData])
# Indirect
varData = builder.buildVarData(varTupleIndexes, uniq)
indirectStore = builder.buildVarStore(varTupleList, [varData])
mapping = indirectStore.optimize()
advanceMapping.mapping = {k:mapping[v] for k,v in advanceMapping.mapping.items()}
# Build optimized indirect mapping
storeBuilder = varStore.OnlineVarStoreBuilder(axisTags)
mapping = {}
for glyphName in glyphOrder:
deltas,supports = hAdvanceDeltasAndSupports[glyphName]
storeBuilder.setSupports(supports)
mapping[glyphName] = storeBuilder.storeDeltas(deltas)
indirectStore = storeBuilder.finish()
mapping2 = indirectStore.optimize()
mapping = [mapping2[mapping[g]] for g in glyphOrder]
advanceMapping = builder.buildVarIdxMap(mapping, glyphOrder)
# Compile both, see which is more compact
use_direct = False
if directStore:
# Compile both, see which is more compact
writer = OTTableWriter()
directStore.compile(writer, font)
directSize = len(writer.getAllData())
writer = OTTableWriter()
directStore.compile(writer, font)
directSize = len(writer.getAllData())
writer = OTTableWriter()
indirectStore.compile(writer, font)
advanceMapping.compile(writer, font)
indirectSize = len(writer.getAllData())
writer = OTTableWriter()
indirectStore.compile(writer, font)
advanceMapping.compile(writer, font)
indirectSize = len(writer.getAllData())
use_direct = directSize < indirectSize
use_direct = directSize < indirectSize
# Done; put it all together.
assert "HVAR" not in font
@ -482,12 +485,11 @@ def _add_HVAR(font, model, master_ttfs, axisTags):
hvar.VarStore = indirectStore
hvar.AdvWidthMap = advanceMapping
def _add_MVAR(font, model, master_ttfs, axisTags):
def _add_MVAR(font, masterModel, master_ttfs, axisTags):
log.info("Generating MVAR")
store_builder = varStore.OnlineVarStoreBuilder(axisTags)
store_builder.setModel(model)
records = []
lastTableTag = None
@ -497,17 +499,20 @@ def _add_MVAR(font, model, master_ttfs, axisTags):
if tableTag != lastTableTag:
tables = fontTable = None
if tableTag in font:
# TODO Check all masters have same table set?
fontTable = font[tableTag]
tables = [master[tableTag] for master in master_ttfs]
tables = [master[tableTag] if tableTag in master else None
for master in master_ttfs]
lastTableTag = tableTag
if tables is None:
continue
# TODO support gasp entries
model, tables = masterModel.getSubModel(tables)
store_builder.setModel(model)
master_values = [getattr(table, itemName) for table in tables]
if _all_equal(master_values):
if models.allEqual(master_values):
base, varIdx = master_values[0], None
else:
base, varIdx = store_builder.storeMasters(master_values)
@ -545,9 +550,7 @@ def _merge_OTL(font, model, master_fonts, axisTags):
log.info("Merging OpenType Layout tables")
merger = VariationMerger(model, axisTags, font)
merger.mergeTables(font, master_fonts, ['GPOS'])
# TODO Merge GSUB
# TODO Merge GDEF itself!
merger.mergeTables(font, master_fonts, ['GSUB', 'GDEF', 'GPOS'])
store = merger.store_builder.finish()
if not store.VarData:
return

View File

@ -39,7 +39,7 @@ def _reorderItem(lst, narrows, zeroes):
out.append(lst[i])
return out
def VarData_CalculateNumShorts(self, optimize=True):
def VarData_calculateNumShorts(self, optimize=False):
count = self.VarRegionCount
items = self.Item
narrows = set(range(count))
@ -55,14 +55,29 @@ def VarData_CalculateNumShorts(self, optimize=True):
# Reorder columns such that all SHORT columns come before UINT8
self.VarRegionIndex = _reorderItem(self.VarRegionIndex, narrows, zeroes)
self.VarRegionCount = len(self.VarRegionIndex)
for i in range(self.ItemCount):
for i in range(len(items)):
items[i] = _reorderItem(items[i], narrows, zeroes)
self.NumShorts = count - len(narrows)
else:
wides = set(range(count)) - narrows
self.NumShorts = 1+max(wides) if wides else 0
self.VarRegionCount = len(self.VarRegionIndex)
return self
ot.VarData.calculateNumShorts = VarData_calculateNumShorts
def VarData_CalculateNumShorts(self, optimize=True):
"""Deprecated name for VarData_calculateNumShorts() which
defaults to optimize=True. Use varData.calculateNumShorts()
or varData.optimize()."""
return VarData_calculateNumShorts(self, optimize=optimize)
def VarData_optimize(self):
return VarData_calculateNumShorts(self, optimize=True)
ot.VarData.optimize = VarData_optimize
def buildVarData(varRegionIndices, items, optimize=True):
self = ot.VarData()
self.VarRegionIndex = list(varRegionIndices)
@ -73,7 +88,7 @@ def buildVarData(varRegionIndices, items, optimize=True):
assert len(item) == regionCount
records.append(list(item))
self.ItemCount = len(self.Item)
VarData_CalculateNumShorts(self, optimize=optimize)
self.calculateNumShorts(optimize=optimize)
return self

View File

@ -53,6 +53,7 @@ def interpolate_layout(designspace_filename, loc, master_finder=lambda s:s, mapp
merger = InstancerMerger(font, model, loc)
log.info("Building interpolated tables")
# TODO GSUB/GDEF
merger.mergeTables(font, master_fonts, ['GPOS'])
return font

View File

@ -8,7 +8,8 @@ from fontTools.misc import classifyTools
from fontTools.ttLib.tables import otTables as ot
from fontTools.ttLib.tables import otBase as otBase
from fontTools.ttLib.tables.DefaultTable import DefaultTable
from fontTools.varLib import builder, varStore
from fontTools.varLib import builder, models, varStore
from fontTools.varLib.models import nonNone, allNone, allEqual, allEqualTo
from fontTools.varLib.varStore import VarStoreInstancer
from functools import reduce
@ -75,8 +76,7 @@ class Merger(object):
raise
def mergeLists(self, out, lst):
count = len(out)
assert all(count == len(v) for v in lst), (count, [len(v) for v in lst])
assert allEqualTo(out, lst, len), (len(out), [len(v) for v in lst])
for i,(value,values) in enumerate(zip(out, zip(*lst))):
try:
self.mergeThings(value, values)
@ -85,9 +85,8 @@ class Merger(object):
raise
def mergeThings(self, out, lst):
clazz = type(out)
try:
assert all(type(item) == clazz for item in lst), (out, lst)
assert allEqualTo(out, lst, type), (out, lst)
mergerFunc = self.mergersFor(out).get(None, None)
if mergerFunc is not None:
mergerFunc(self, out, lst)
@ -96,16 +95,17 @@ class Merger(object):
elif isinstance(out, list):
self.mergeLists(out, lst)
else:
assert all(out == v for v in lst), (out, lst)
assert allEqualTo(out, lst), (out, lst)
except Exception as e:
e.args = e.args + (clazz.__name__,)
e.args = e.args + (type(out).__name__,)
raise
def mergeTables(self, font, master_ttfs, tables):
def mergeTables(self, font, master_ttfs, tableTags):
for tag in tables:
for tag in tableTags:
if tag not in font: continue
self.mergeThings(font[tag], [m[tag] for m in master_ttfs])
self.mergeThings(font[tag], [m[tag] if tag in m else None
for m in master_ttfs])
#
# Aligning merger
@ -113,6 +113,23 @@ class Merger(object):
class AligningMerger(Merger):
pass
@AligningMerger.merger(ot.GDEF, "GlyphClassDef")
def merge(merger, self, lst):
self.classDefs = {}
# We only care about the .classDefs
self = self.classDefs
lst = [l.classDefs for l in lst]
allKeys = set()
allKeys.update(*[l.keys() for l in lst])
for k in allKeys:
allValues = nonNone(l.get(k) for l in lst)
assert allEqual(allValues), allValues
if not allValues:
self[k] = None
else:
self[k] = allValues[0]
def _SinglePosUpgradeToFormat2(self):
if self.Format == 2: return self
@ -201,7 +218,8 @@ def merge(merger, self, lst):
assert len(lst) == 1 or (valueFormat & ~0xF == 0), valueFormat
# If all have same coverage table and all are format 1,
if all(v.Format == 1 for v in lst) and all(self.Coverage.glyphs == v.Coverage.glyphs for v in lst):
coverageGlyphs = self.Coverage.glyphs
if all(v.Format == 1 for v in lst) and all(coverageGlyphs == v.Coverage.glyphs for v in lst):
self.Value = otBase.ValueRecord(valueFormat)
merger.mergeThings(self.Value, [v.Value for v in lst])
self.ValueFormat = self.Value.getFormat()
@ -276,7 +294,7 @@ def merge(merger, self, lst):
merger.mergeLists(self.PairValueRecord, padded)
def _PairPosFormat1_merge(self, lst, merger):
assert _all_equal([l.ValueFormat2 == 0 for l in lst if l.PairSet]), "Report bug against fonttools."
assert allEqual([l.ValueFormat2 == 0 for l in lst if l.PairSet]), "Report bug against fonttools."
# Merge everything else; makes sure Format is the same.
merger.mergeObjects(self, lst,
@ -353,6 +371,9 @@ def _ClassDef_merge_classify(lst, allGlyphs=None):
return self, classes
# It's stupid that we need to do this here. Just need to, to match test
# expecatation results, since ttx prints out format of ClassDef (and Coverage)
# even though it should not.
def _ClassDef_calculate_Format(self, font):
fmt = 2
ranges = self._getClassRanges(font)
@ -436,7 +457,7 @@ def _PairPosFormat2_align_matrices(self, lst, font, transparent=False):
return matrices
def _PairPosFormat2_merge(self, lst, merger):
assert _all_equal([l.ValueFormat2 == 0 for l in lst if l.Class1Record]), "Report bug against fonttools."
assert allEqual([l.ValueFormat2 == 0 for l in lst if l.Class1Record]), "Report bug against fonttools."
merger.mergeObjects(self, lst,
exclude=('Coverage',
@ -505,6 +526,105 @@ def merge(merger, self, lst):
self.ValueFormat1 = vf1
self.ValueFormat2 = vf2
def _MarkBasePosFormat1_merge(self, lst, merger, Mark='Mark', Base='Base'):
self.ClassCount = max(l.ClassCount for l in lst)
MarkCoverageGlyphs, MarkRecords = \
_merge_GlyphOrders(merger.font,
[getattr(l, Mark+'Coverage').glyphs for l in lst],
[getattr(l, Mark+'Array').MarkRecord for l in lst])
getattr(self, Mark+'Coverage').glyphs = MarkCoverageGlyphs
BaseCoverageGlyphs, BaseRecords = \
_merge_GlyphOrders(merger.font,
[getattr(l, Base+'Coverage').glyphs for l in lst],
[getattr(getattr(l, Base+'Array'), Base+'Record') for l in lst])
getattr(self, Base+'Coverage').glyphs = BaseCoverageGlyphs
# MarkArray
records = []
for g,glyphRecords in zip(MarkCoverageGlyphs, zip(*MarkRecords)):
allClasses = [r.Class for r in glyphRecords if r is not None]
# TODO Right now we require that all marks have same class in
# all masters that cover them. This is not required.
#
# We can relax that by just requiring that all marks that have
# the same class in a master, have the same class in every other
# master. Indeed, if, say, a sparse master only covers one mark,
# that mark probably will get class 0, which would possibly be
# different from its class in other masters.
#
# We can even go further and reclassify marks to support any
# input. But, since, it's unlikely that two marks being both,
# say, "top" in one master, and one being "top" and other being
# "top-right" in another master, we shouldn't do that, as any
# failures in that case will probably signify mistakes in the
# input masters.
assert allEqual(allClasses), allClasses
if not allClasses:
rec = None
else:
rec = ot.MarkRecord()
rec.Class = allClasses[0]
allAnchors = [None if r is None else r.MarkAnchor for r in glyphRecords]
if allNone(allAnchors):
anchor = None
else:
anchor = ot.Anchor()
anchor.Format = 1
merger.mergeThings(anchor, allAnchors)
rec. MarkAnchor = anchor
records.append(rec)
array = ot.MarkArray()
array.MarkRecord = records
array.MarkCount = len(records)
setattr(self, Mark+"Array", array)
# BaseArray
records = []
for g,glyphRecords in zip(BaseCoverageGlyphs, zip(*BaseRecords)):
if allNone(glyphRecords):
rec = None
else:
rec = getattr(ot, Base+'Record')()
anchors = []
setattr(rec, Base+'Anchor', anchors)
glyphAnchors = [[] if r is None else getattr(r, Base+'Anchor')
for r in glyphRecords]
for l in glyphAnchors:
l.extend([None] * (self.ClassCount - len(l)))
for allAnchors in zip(*glyphAnchors):
if allNone(allAnchors):
anchor = None
else:
anchor = ot.Anchor()
anchor.Format = 1
merger.mergeThings(anchor, allAnchors)
anchors.append(anchor)
records.append(rec)
array = getattr(ot, Base+'Array')()
setattr(array, Base+'Record', records)
setattr(array, Base+'Count', len(records))
setattr(self, Base+'Array', array)
@AligningMerger.merger(ot.MarkBasePos)
def merge(merger, self, lst):
assert allEqualTo(self.Format, (l.Format for l in lst))
if self.Format == 1:
_MarkBasePosFormat1_merge(self, lst, merger)
else:
assert False
@AligningMerger.merger(ot.MarkMarkPos)
def merge(merger, self, lst):
assert allEqualTo(self.Format, (l.Format for l in lst))
if self.Format == 1:
_MarkBasePosFormat1_merge(self, lst, merger, 'Mark1', 'Mark2')
else:
assert False
def _PairSet_flatten(lst, font):
self = ot.PairSet()
@ -530,7 +650,7 @@ def _PairSet_flatten(lst, font):
return self
def _Lookup_PairPosFormat1_subtables_flatten(lst, font):
assert _all_equal([l.ValueFormat2 == 0 for l in lst if l.PairSet]), "Report bug against fonttools."
assert allEqual([l.ValueFormat2 == 0 for l in lst if l.PairSet]), "Report bug against fonttools."
self = ot.PairPos()
self.Format = 1
@ -551,7 +671,7 @@ def _Lookup_PairPosFormat1_subtables_flatten(lst, font):
return self
def _Lookup_PairPosFormat2_subtables_flatten(lst, font):
assert _all_equal([l.ValueFormat2 == 0 for l in lst if l.Class1Record]), "Report bug against fonttools."
assert allEqual([l.ValueFormat2 == 0 for l in lst if l.Class1Record]), "Report bug against fonttools."
self = ot.PairPos()
self.Format = 2
@ -608,8 +728,8 @@ def merge(merger, self, lst):
if not sts:
continue
if sts[0].__class__.__name__.startswith('Extension'):
assert _all_equal([st.__class__ for st in sts])
assert _all_equal([st.ExtensionLookupType for st in sts])
assert allEqual([st.__class__ for st in sts])
assert allEqual([st.ExtensionLookupType for st in sts])
l.LookupType = sts[0].ExtensionLookupType
new_sts = [st.ExtSubTable for st in sts]
del sts[:]
@ -821,22 +941,31 @@ class VariationMerger(AligningMerger):
def __init__(self, model, axisTags, font):
Merger.__init__(self, font)
self.model = model
self.store_builder = varStore.OnlineVarStoreBuilder(axisTags)
self.setModel(model)
def setModel(self, model):
self.model = model
self.store_builder.setModel(model)
def _all_equal(lst):
if not lst:
return True
it = iter(lst)
v0 = next(it)
for v in it:
if v0 != v:
return False
return True
def mergeThings(self, out, lst):
masterModel = None
if None in lst:
if allNone(lst):
assert out is None, (out, lst)
return
masterModel = self.model
model, lst = masterModel.getSubModel(lst)
self.setModel(model)
super(VariationMerger, self).mergeThings(out, lst)
if masterModel:
self.setModel(masterModel)
def buildVarDevTable(store_builder, master_values):
if _all_equal(master_values):
if allEqual(master_values):
return master_values[0], None
base, varIdx = store_builder.storeMasters(master_values)
return base, builder.buildVarDevTable(varIdx)

View File

@ -2,9 +2,36 @@
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
__all__ = ['normalizeValue', 'normalizeLocation', 'supportScalar', 'VariationModel']
__all__ = ['nonNone', 'allNone', 'allEqual', 'allEqualTo', 'subList',
'normalizeValue', 'normalizeLocation',
'supportScalar',
'VariationModel']
def nonNone(lst):
return [l for l in lst if l is not None]
def allNone(lst):
return all(l is None for l in lst)
def allEqualTo(ref, lst, mapper=None):
if mapper is None:
return all(ref == item for item in lst)
else:
mapped = mapper(ref)
return all(mapped == mapper(item) for item in lst)
def allEqual(lst, mapper=None):
if not lst:
return True
it = iter(lst)
first = next(it)
return allEqualTo(first, it, mapper=mapper)
def subList(truth, lst):
assert len(truth) == len(lst)
return [l for l,t in zip(lst,truth) if t]
def normalizeValue(v, triple):
"""Normalizes value based on a min/default/max triple.
>>> normalizeValue(400, (100, 400, 900))
@ -163,6 +190,9 @@ class VariationModel(object):
"""
def __init__(self, locations, axisOrder=[]):
self.origLocations = locations
self.axisOrder = axisOrder
locations = [{k:v for k,v in loc.items() if v != 0.} for loc in locations]
keyFunc = self.getMasterLocationsSortKeyFunc(locations, axisOrder=axisOrder)
axisPoints = keyFunc.axisPoints
@ -172,6 +202,17 @@ class VariationModel(object):
self.reverseMapping = [locations.index(l) for l in self.locations] # Reverse of above
self._computeMasterSupports(axisPoints, axisOrder)
self._subModels = {}
def getSubModel(self, items):
if None not in items:
return self, items
key = tuple(v is not None for v in items)
subModel = self._subModels.get(key)
if subModel is None:
subModel = VariationModel(subList(key, self.origLocations), self.axisOrder)
self._subModels[key] = subModel
return subModel, subList(key, items)
@staticmethod
def getMasterLocationsSortKeyFunc(locations, axisOrder=[]):
@ -314,6 +355,10 @@ class VariationModel(object):
out.append(delta)
return out
def getDeltasAndSupports(self, items):
model, items = self.getSubModel(items)
return model.getDeltas(items), model.supports
def getScalars(self, loc):
return [supportScalar(loc, support) for support in self.supports]

View File

@ -4,8 +4,7 @@ from fontTools.misc.fixedTools import otRound
from fontTools.ttLib.tables import otTables as ot
from fontTools.varLib.models import supportScalar
from fontTools.varLib.builder import (buildVarRegionList, buildVarStore,
buildVarRegion, buildVarData,
VarData_CalculateNumShorts)
buildVarRegion, buildVarData)
from functools import partial
from collections import defaultdict
from array import array
@ -24,25 +23,36 @@ class OnlineVarStoreBuilder(object):
self._store = buildVarStore(self._regionList, [])
self._data = None
self._model = None
self._supports = None
self._varDataIndices = {}
self._varDataCaches = {}
self._cache = {}
def setModel(self, model):
self.setSupports(model.supports)
self._model = model
self._cache = {} # Empty cached items
def setSupports(self, supports):
self._model = None
self._supports = list(supports)
if not self._supports[0]:
del self._supports[0] # Drop base master support
self._cache = {}
self._data = None
def finish(self, optimize=True):
self._regionList.RegionCount = len(self._regionList.Region)
self._store.VarDataCount = len(self._store.VarData)
for data in self._store.VarData:
data.ItemCount = len(data.Item)
VarData_CalculateNumShorts(data, optimize)
data.calculateNumShorts(optimize=optimize)
return self._store
def _add_VarData(self):
regionMap = self._regionMap
regionList = self._regionList
regions = self._model.supports[1:]
regions = self._supports
regionIndices = []
for region in regions:
key = _getLocationKey(region)
@ -53,17 +63,46 @@ class OnlineVarStoreBuilder(object):
regionList.Region.append(varRegion)
regionIndices.append(idx)
data = self._data = buildVarData(regionIndices, [], optimize=False)
self._outer = len(self._store.VarData)
self._store.VarData.append(data)
# Check if we have one already...
key = tuple(regionIndices)
varDataIdx = self._varDataIndices.get(key)
if varDataIdx is not None:
self._outer = varDataIdx
self._data = self._store.VarData[varDataIdx]
self._cache = self._varDataCaches[key]
if len(self._data.Item) == 0xFFF:
# This is full. Need new one.
varDataIdx = None
if varDataIdx is None:
self._data = buildVarData(regionIndices, [], optimize=False)
self._outer = len(self._store.VarData)
self._store.VarData.append(self._data)
self._varDataIndices[key] = self._outer
if key not in self._varDataCaches:
self._varDataCaches[key] = {}
self._cache = self._varDataCaches[key]
def storeMasters(self, master_values):
deltas = [otRound(d) for d in self._model.getDeltas(master_values)]
base = deltas.pop(0)
deltas = tuple(deltas)
deltas = self._model.getDeltas(master_values)
base = otRound(deltas.pop(0))
return base, self.storeDeltas(deltas)
def storeDeltas(self, deltas):
# Pity that this exists here, since VarData_addItem
# does the same. But to look into our cache, it's
# good to adjust deltas here as well...
deltas = [otRound(d) for d in deltas]
if len(deltas) == len(self._supports) + 1:
deltas = tuple(deltas[1:])
else:
assert len(deltas) == len(self._supports)
deltas = tuple(deltas)
varIdx = self._cache.get(deltas)
if varIdx is not None:
return base, varIdx
return varIdx
if not self._data:
self._add_VarData()
@ -71,18 +110,34 @@ class OnlineVarStoreBuilder(object):
if inner == 0xFFFF:
# Full array. Start new one.
self._add_VarData()
return self.storeMasters(master_values)
self._data.Item.append(deltas)
return self.storeDeltas(deltas)
self._data.addItem(deltas)
varIdx = (self._outer << 16) + inner
self._cache[deltas] = varIdx
return base, varIdx
return varIdx
def VarData_addItem(self, deltas):
deltas = [otRound(d) for d in deltas]
countUs = self.VarRegionCount
countThem = len(deltas)
if countUs + 1 == countThem:
deltas = tuple(deltas[1:])
else:
assert countUs == countThem, (countUs, countThem)
deltas = tuple(deltas)
self.Item.append(list(deltas))
self.ItemCount = len(self.Item)
ot.VarData.addItem = VarData_addItem
def VarRegion_get_support(self, fvar_axes):
return {fvar_axes[i].axisTag: (reg.StartCoord,reg.PeakCoord,reg.EndCoord)
for i,reg in enumerate(self.VarRegionAxis)}
ot.VarRegion.get_support = VarRegion_get_support
class VarStoreInstancer(object):
def __init__(self, varstore, fvar_axes, location={}):
@ -102,7 +157,7 @@ class VarStoreInstancer(object):
def _getScalar(self, regionIdx):
scalar = self._scalars.get(regionIdx)
if scalar is None:
support = VarRegion_get_support(self._regions[regionIdx], self.fvar_axes)
support = self._regions[regionIdx].get_support(self.fvar_axes)
scalar = supportScalar(self.location, support)
self._scalars[regionIdx] = scalar
return scalar
@ -162,8 +217,7 @@ def VarStore_subset_varidxes(self, varIdxes, optimize=True):
data.Item = newItems
data.ItemCount = len(data.Item)
if optimize:
VarData_CalculateNumShorts(data)
data.calculateNumShorts(optimize=optimize)
self.VarData = newVarData
self.VarDataCount = len(self.VarData)
@ -213,7 +267,7 @@ def _visit(self, objType, func):
for that in self:
_visit(that, objType, func)
if hasattr(self, 'getConverters'):
if hasattr(self, 'getConverters') and not hasattr(self, 'postRead'):
for conv in self.getConverters():
that = getattr(self, conv.name, None)
if that is not None:
@ -464,7 +518,7 @@ def VarStore_optimize(self):
self.VarDataCount = len(self.VarData)
for data in self.VarData:
data.ItemCount = len(data.Item)
VarData_CalculateNumShorts(data)
data.optimize()
return varidx_map