2019-03-06 17:43:28 -08:00
|
|
|
""" Partially instantiate a variable font.
|
|
|
|
|
2019-06-14 11:24:33 +01:00
|
|
|
The module exports an `instantiateVariableFont` function and CLI that allow to
|
|
|
|
create full instances (i.e. static fonts) from variable fonts, as well as "partial"
|
|
|
|
variable fonts that only contain a subset of the original variation space.
|
|
|
|
|
2019-10-28 17:39:15 +00:00
|
|
|
For example, if you wish to pin the width axis to a given location while also
|
2021-12-02 15:31:49 +00:00
|
|
|
restricting the weight axis to 400..700 range, you can do::
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2021-12-02 15:31:49 +00:00
|
|
|
$ fonttools varLib.instancer ./NotoSans-VF.ttf wdth=85 wght=400:700
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-05-31 11:52:39 +01:00
|
|
|
See `fonttools varLib.instancer --help` for more info on the CLI options.
|
|
|
|
|
|
|
|
The module's entry point is the `instantiateVariableFont` function, which takes
|
2019-10-28 17:39:15 +00:00
|
|
|
a TTFont object and a dict specifying either axis coodinates or (min, max) ranges,
|
|
|
|
and returns a new TTFont representing either a partial VF, or full instance if all
|
|
|
|
the VF axes were given an explicit coordinate.
|
2019-05-31 11:52:39 +01:00
|
|
|
|
|
|
|
E.g. here's how to pin the wght axis at a given location in a wght+wdth variable
|
2021-12-02 15:31:49 +00:00
|
|
|
font, keeping only the deltas associated with the wdth axis::
|
2019-05-31 11:52:39 +01:00
|
|
|
|
|
|
|
| >>> from fontTools import ttLib
|
|
|
|
| >>> from fontTools.varLib import instancer
|
|
|
|
| >>> varfont = ttLib.TTFont("path/to/MyVariableFont.ttf")
|
2020-05-02 11:00:29 -04:00
|
|
|
| >>> [a.axisTag for a in varfont["fvar"].axes] # the varfont's current axes
|
2019-05-31 11:52:39 +01:00
|
|
|
| ['wght', 'wdth']
|
|
|
|
| >>> partial = instancer.instantiateVariableFont(varfont, {"wght": 300})
|
|
|
|
| >>> [a.axisTag for a in partial["fvar"].axes] # axes left after pinning 'wght'
|
|
|
|
| ['wdth']
|
|
|
|
|
|
|
|
If the input location specifies all the axes, the resulting instance is no longer
|
|
|
|
'variable' (same as using fontools varLib.mutator):
|
|
|
|
|
|
|
|
| >>> instance = instancer.instantiateVariableFont(
|
|
|
|
| ... varfont, {"wght": 700, "wdth": 67.5}
|
|
|
|
| ... )
|
|
|
|
| >>> "fvar" not in instance
|
|
|
|
| True
|
|
|
|
|
|
|
|
If one just want to drop an axis at the default location, without knowing in
|
|
|
|
advance what the default value for that axis is, one can pass a `None` value:
|
|
|
|
|
|
|
|
| >>> instance = instancer.instantiateVariableFont(varfont, {"wght": None})
|
|
|
|
| >>> len(varfont["fvar"].axes)
|
|
|
|
| 1
|
|
|
|
|
|
|
|
From the console script, this is equivalent to passing `wght=drop` as input.
|
|
|
|
|
2019-06-14 11:24:33 +01:00
|
|
|
This module is similar to fontTools.varLib.mutator, which it's intended to supersede.
|
2019-05-31 11:52:39 +01:00
|
|
|
Note that, unlike varLib.mutator, when an axis is not mentioned in the input
|
|
|
|
location, the varLib.instancer will keep the axis and the corresponding deltas,
|
|
|
|
whereas mutator implicitly drops the axis at its default coordinate.
|
|
|
|
|
2019-10-28 17:39:15 +00:00
|
|
|
The module currently supports only the first three "levels" of partial instancing,
|
2019-05-31 11:52:39 +01:00
|
|
|
with the rest planned to be implemented in the future, namely:
|
2021-12-02 15:31:49 +00:00
|
|
|
|
|
|
|
L1
|
|
|
|
dropping one or more axes while leaving the default tables unmodified;
|
|
|
|
L2
|
|
|
|
dropping one or more axes while pinning them at non-default locations;
|
|
|
|
L3
|
|
|
|
restricting the range of variation of one or more axes, by setting either
|
2019-05-31 11:52:39 +01:00
|
|
|
a new minimum or maximum, potentially -- though not necessarily -- dropping
|
|
|
|
entire regions of variations that fall completely outside this new range.
|
2021-12-02 15:31:49 +00:00
|
|
|
L4
|
|
|
|
moving the default location of an axis.
|
2019-05-31 11:52:39 +01:00
|
|
|
|
|
|
|
Currently only TrueType-flavored variable fonts (i.e. containing 'glyf' table)
|
|
|
|
are supported, but support for CFF2 variable fonts will be added soon.
|
|
|
|
|
|
|
|
The discussion and implementation of these features are tracked at
|
|
|
|
https://github.com/fonttools/fonttools/issues/1537
|
2019-03-06 17:43:28 -08:00
|
|
|
"""
|
2019-09-18 17:00:53 +01:00
|
|
|
from fontTools.misc.fixedTools import (
|
|
|
|
floatToFixedToFloat,
|
|
|
|
strToFixedToFloat,
|
|
|
|
otRound,
|
|
|
|
MAX_F2DOT14,
|
|
|
|
)
|
2019-03-07 19:18:14 -08:00
|
|
|
from fontTools.varLib.models import supportScalar, normalizeValue, piecewiseLinearMap
|
2019-03-06 17:43:28 -08:00
|
|
|
from fontTools.ttLib import TTFont
|
2019-04-16 18:14:05 +01:00
|
|
|
from fontTools.ttLib.tables.TupleVariation import TupleVariation
|
2019-05-08 16:24:17 +01:00
|
|
|
from fontTools.ttLib.tables import _g_l_y_f
|
2019-04-20 12:42:31 +01:00
|
|
|
from fontTools import varLib
|
2019-05-31 12:37:31 +01:00
|
|
|
|
2019-05-29 18:22:30 +01:00
|
|
|
# we import the `subset` module because we use the `prune_lookups` method on the GSUB
|
|
|
|
# table class, and that method is only defined dynamically upon importing `subset`
|
|
|
|
from fontTools import subset # noqa: F401
|
2019-04-16 18:14:05 +01:00
|
|
|
from fontTools.varLib import builder
|
2019-03-19 10:44:39 -04:00
|
|
|
from fontTools.varLib.mvar import MVAR_ENTRIES
|
2019-04-17 19:20:26 +01:00
|
|
|
from fontTools.varLib.merger import MutatorMerger
|
2021-02-15 12:22:48 +00:00
|
|
|
from fontTools.varLib.instancer import names
|
2022-08-19 12:20:21 -06:00
|
|
|
from fontTools.misc.cliTools import makeOutputFileName
|
2022-08-22 15:59:18 -06:00
|
|
|
from fontTools.varLib.instancer import solver
|
2019-04-01 11:03:45 +01:00
|
|
|
import collections
|
2019-03-08 16:24:13 -08:00
|
|
|
from copy import deepcopy
|
2020-09-27 17:48:52 +01:00
|
|
|
from enum import IntEnum
|
2019-03-06 17:43:28 -08:00
|
|
|
import logging
|
2019-03-06 21:54:15 -08:00
|
|
|
import os
|
|
|
|
import re
|
2019-03-06 17:43:28 -08:00
|
|
|
|
|
|
|
|
2019-05-29 19:22:02 +01:00
|
|
|
log = logging.getLogger("fontTools.varLib.instancer")
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2022-08-06 16:51:21 -06:00
|
|
|
def _expand(v):
|
|
|
|
if not isinstance(v, tuple):
|
|
|
|
return (v, v, v)
|
|
|
|
else:
|
|
|
|
if len(v) == 2:
|
|
|
|
return (v[0], None, v[1])
|
|
|
|
return v
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
class AxisRange(collections.namedtuple("AxisRange", "minimum maximum")):
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
self = super().__new__(cls, *args, **kwargs)
|
|
|
|
if self.minimum > self.maximum:
|
|
|
|
raise ValueError(
|
|
|
|
f"Range minimum ({self.minimum:g}) must be <= maximum ({self.maximum:g})"
|
|
|
|
)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f"{type(self).__name__}({self.minimum:g}, {self.maximum:g})"
|
|
|
|
|
|
|
|
|
|
|
|
class NormalizedAxisRange(AxisRange):
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
self = super().__new__(cls, *args, **kwargs)
|
|
|
|
if self.minimum < -1.0 or self.maximum > 1.0:
|
|
|
|
raise ValueError("Axis range values must be normalized to -1..+1 range")
|
|
|
|
return self
|
|
|
|
|
|
|
|
|
2022-08-05 19:14:18 -06:00
|
|
|
class AxisTent(collections.namedtuple("AxisTent", "minimum default maximum")):
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
self = super().__new__(cls, *args, **kwargs)
|
|
|
|
if not (self.minimum <= self.default <= self.maximum):
|
|
|
|
raise ValueError(
|
|
|
|
f"Tent minimum ({self.minimum:g}) must be <= default ({self.default:g}) which must be <= maximum ({self.maximum:g})"
|
|
|
|
)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f"{type(self).__name__}({self.minimum:g}, {self.default:g}, {self.maximum:g})"
|
|
|
|
|
|
|
|
|
|
|
|
class NormalizedAxisTent(AxisTent):
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
self = super().__new__(cls, *args, **kwargs)
|
|
|
|
if self.minimum < -1.0 or self.maximum > 1.0:
|
|
|
|
raise ValueError("Axis tent values must be normalized to -1..+1 range")
|
|
|
|
return self
|
|
|
|
|
|
|
|
|
2020-09-29 19:33:26 +01:00
|
|
|
class OverlapMode(IntEnum):
|
2020-09-27 17:48:52 +01:00
|
|
|
KEEP_AND_DONT_SET_FLAGS = 0
|
|
|
|
KEEP_AND_SET_FLAGS = 1
|
|
|
|
REMOVE = 2
|
2021-07-29 16:59:04 +02:00
|
|
|
REMOVE_AND_IGNORE_ERRORS = 3
|
2020-09-27 17:48:52 +01:00
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def instantiateTupleVariationStore(
|
|
|
|
variations, axisLimits, origCoords=None, endPts=None
|
|
|
|
):
|
|
|
|
"""Instantiate TupleVariation list at the given location, or limit axes' min/max.
|
2019-06-14 12:51:31 +01:00
|
|
|
|
|
|
|
The 'variations' list of TupleVariation objects is modified in-place.
|
2019-09-18 17:00:53 +01:00
|
|
|
The 'axisLimits' (dict) maps axis tags (str) to either a single coordinate along the
|
|
|
|
axis (float), or to minimum/maximum coordinates (NormalizedAxisRange).
|
|
|
|
|
|
|
|
A 'full' instance (i.e. static font) is produced when all the axes are pinned to
|
|
|
|
single coordinates; a 'partial' instance (i.e. a less variable font) is produced
|
|
|
|
when some of the axes are omitted, or restricted with a new range.
|
|
|
|
|
2019-06-14 12:51:31 +01:00
|
|
|
Tuples that do not participate are kept as they are. Those that have 0 influence
|
|
|
|
at the given location are removed from the variation store.
|
|
|
|
Those that are fully instantiated (i.e. all their axes are being pinned) are also
|
|
|
|
removed from the variation store, their scaled deltas accummulated and returned, so
|
|
|
|
that they can be added by the caller to the default instance's coordinates.
|
|
|
|
Tuples that are only partially instantiated (i.e. not all the axes that they
|
|
|
|
participate in are being pinned) are kept in the store, and their deltas multiplied
|
|
|
|
by the scalar support of the axes to be pinned at the desired location.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
variations: List[TupleVariation] from either 'gvar' or 'cvar'.
|
2019-09-18 17:00:53 +01:00
|
|
|
axisLimits: Dict[str, Union[float, NormalizedAxisRange]]: axes' coordinates for
|
|
|
|
the full or partial instance, or ranges for restricting an axis' min/max.
|
2019-06-14 12:51:31 +01:00
|
|
|
origCoords: GlyphCoordinates: default instance's coordinates for computing 'gvar'
|
2021-04-14 14:09:40 -06:00
|
|
|
inferred points (cf. table__g_l_y_f._getCoordinatesAndControls).
|
2019-06-14 12:51:31 +01:00
|
|
|
endPts: List[int]: indices of contour end points, for inferring 'gvar' deltas.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
List[float]: the overall delta adjustment after applicable deltas were summed.
|
|
|
|
"""
|
2019-09-18 17:00:53 +01:00
|
|
|
|
2022-08-06 15:06:20 -06:00
|
|
|
newVariations = changeTupleVariationsAxisLimits(variations, axisLimits)
|
2019-09-18 17:00:53 +01:00
|
|
|
|
|
|
|
mergedVariations = collections.OrderedDict()
|
|
|
|
for var in newVariations:
|
|
|
|
# compute inferred deltas only for gvar ('origCoords' is None for cvar)
|
|
|
|
if origCoords is not None:
|
|
|
|
var.calcInferredDeltas(origCoords, endPts)
|
|
|
|
|
|
|
|
# merge TupleVariations with overlapping "tents"
|
|
|
|
axes = frozenset(var.axes.items())
|
|
|
|
if axes in mergedVariations:
|
|
|
|
mergedVariations[axes] += var
|
|
|
|
else:
|
|
|
|
mergedVariations[axes] = var
|
|
|
|
|
|
|
|
# drop TupleVariation if all axes have been pinned (var.axes.items() is empty);
|
|
|
|
# its deltas will be added to the default instance's coordinates
|
|
|
|
defaultVar = mergedVariations.pop(frozenset(), None)
|
|
|
|
|
|
|
|
for var in mergedVariations.values():
|
|
|
|
var.roundDeltas()
|
|
|
|
variations[:] = list(mergedVariations.values())
|
|
|
|
|
|
|
|
return defaultVar.coordinates if defaultVar is not None else []
|
|
|
|
|
|
|
|
|
2022-08-06 15:06:20 -06:00
|
|
|
def changeTupleVariationsAxisLimits(variations, axisLimits):
|
|
|
|
for axisTag, axisLimit in sorted(axisLimits.items()):
|
2019-09-18 17:00:53 +01:00
|
|
|
newVariations = []
|
|
|
|
for var in variations:
|
2022-08-06 15:06:20 -06:00
|
|
|
newVariations.extend(changeTupleVariationAxisLimit(var, axisTag, axisLimit))
|
2019-09-18 17:00:53 +01:00
|
|
|
variations = newVariations
|
|
|
|
return variations
|
2019-04-04 17:21:01 +01:00
|
|
|
|
|
|
|
|
2022-08-06 15:06:20 -06:00
|
|
|
def changeTupleVariationAxisLimit(var, axisTag, axisLimit):
|
|
|
|
assert isinstance(axisLimit, NormalizedAxisTent)
|
2022-08-06 16:01:03 -06:00
|
|
|
|
|
|
|
# Skip when current axis is missing (i.e. doesn't participate),
|
|
|
|
lower, peak, upper = var.axes.get(axisTag, (-1, 0, 1))
|
|
|
|
if peak == 0:
|
|
|
|
return [var]
|
|
|
|
# Drop if the var 'tent' isn't well-formed
|
|
|
|
if not (lower <= peak <= upper) or (lower < 0 and upper > 0):
|
|
|
|
return []
|
|
|
|
|
|
|
|
if axisTag not in var.axes:
|
|
|
|
return [var]
|
|
|
|
|
2022-08-06 16:17:43 -06:00
|
|
|
tent = var.axes[axisTag]
|
|
|
|
|
|
|
|
solutions = solver.rebaseTent(tent, axisLimit)
|
|
|
|
|
|
|
|
out = []
|
|
|
|
# TODO Reuse original var
|
|
|
|
for scalar,tent in solutions:
|
|
|
|
if scalar == 0: continue
|
|
|
|
newVar = TupleVariation(var.axes, var.coordinates)
|
|
|
|
newVar.axes.pop(axisTag)
|
|
|
|
if tent[1] != 0:
|
|
|
|
newVar.axes[axisTag] == tent
|
|
|
|
if scalar != 1:
|
|
|
|
newVar.scaleDeltas(scalar)
|
|
|
|
out.append(newVar)
|
|
|
|
|
|
|
|
return out
|
2019-03-22 14:15:53 +00:00
|
|
|
|
2022-08-22 06:28:48 -06:00
|
|
|
def _instantiateGvarGlyph(
|
|
|
|
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=True
|
|
|
|
):
|
2021-04-14 14:09:40 -06:00
|
|
|
coordinates, ctrl = glyf._getCoordinatesAndControls(glyphname, hMetrics, vMetrics)
|
2019-04-01 11:03:45 +01:00
|
|
|
endPts = ctrl.endPts
|
2019-03-22 14:15:53 +00:00
|
|
|
|
2021-04-13 13:49:12 -06:00
|
|
|
# Not every glyph may have variations
|
2019-06-06 13:38:11 +01:00
|
|
|
tupleVarStore = gvar.variations.get(glyphname)
|
|
|
|
|
|
|
|
if tupleVarStore:
|
|
|
|
defaultDeltas = instantiateTupleVariationStore(
|
2019-11-15 19:36:57 +00:00
|
|
|
tupleVarStore, axisLimits, coordinates, endPts
|
2019-06-06 13:38:11 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if defaultDeltas:
|
|
|
|
coordinates += _g_l_y_f.GlyphCoordinates(defaultDeltas)
|
|
|
|
|
2021-04-14 14:09:40 -06:00
|
|
|
# _setCoordinates also sets the hmtx/vmtx advance widths and sidebearings from
|
2019-06-06 13:38:11 +01:00
|
|
|
# the four phantom points and glyph bounding boxes.
|
|
|
|
# We call it unconditionally even if a glyph has no variations or no deltas are
|
|
|
|
# applied at this location, in case the glyph's xMin and in turn its sidebearing
|
|
|
|
# have changed. E.g. a composite glyph has no deltas for the component's (x, y)
|
|
|
|
# offset nor for the 4 phantom points (e.g. it's monospaced). Thus its entry in
|
|
|
|
# gvar table is empty; however, the composite's base glyph may have deltas
|
|
|
|
# applied, hence the composite's bbox and left/top sidebearings may need updating
|
|
|
|
# in the instanced font.
|
2021-04-14 14:09:40 -06:00
|
|
|
glyf._setCoordinates(glyphname, coordinates, hMetrics, vMetrics)
|
2019-03-22 14:15:53 +00:00
|
|
|
|
2019-04-01 11:03:45 +01:00
|
|
|
if not tupleVarStore:
|
2019-06-06 13:38:11 +01:00
|
|
|
if glyphname in gvar.variations:
|
|
|
|
del gvar.variations[glyphname]
|
2019-04-01 11:03:45 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if optimize:
|
|
|
|
isComposite = glyf[glyphname].isComposite()
|
|
|
|
for var in tupleVarStore:
|
|
|
|
var.optimize(coordinates, endPts, isComposite)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2022-08-22 06:28:48 -06:00
|
|
|
|
2021-04-14 14:40:41 -06:00
|
|
|
def instantiateGvarGlyph(varfont, glyphname, axisLimits, optimize=True):
|
|
|
|
"""Remove?
|
|
|
|
https://github.com/fonttools/fonttools/pull/2266"""
|
|
|
|
gvar = varfont["gvar"]
|
|
|
|
glyf = varfont["glyf"]
|
2022-08-22 06:28:48 -06:00
|
|
|
hMetrics = varfont["hmtx"].metrics
|
|
|
|
vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
|
|
|
|
_instantiateGvarGlyph(
|
|
|
|
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
|
|
|
|
)
|
|
|
|
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateGvar(varfont, axisLimits, optimize=True):
|
2019-03-06 17:43:28 -08:00
|
|
|
log.info("Instantiating glyf/gvar tables")
|
|
|
|
|
2019-03-07 19:18:14 -08:00
|
|
|
gvar = varfont["gvar"]
|
|
|
|
glyf = varfont["glyf"]
|
2022-08-22 06:28:48 -06:00
|
|
|
hMetrics = varfont["hmtx"].metrics
|
|
|
|
vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
|
2019-06-06 13:38:11 +01:00
|
|
|
# Get list of glyph names sorted by component depth.
|
2019-03-06 17:43:28 -08:00
|
|
|
# If a composite glyph is processed before its base glyph, the bounds may
|
|
|
|
# be calculated incorrectly because deltas haven't been applied to the
|
|
|
|
# base glyph yet.
|
|
|
|
glyphnames = sorted(
|
2019-06-06 13:38:11 +01:00
|
|
|
glyf.glyphOrder,
|
2019-03-06 17:43:28 -08:00
|
|
|
key=lambda name: (
|
|
|
|
glyf[name].getCompositeMaxpValues(glyf).maxComponentDepth
|
2019-03-07 19:18:14 -08:00
|
|
|
if glyf[name].isComposite()
|
|
|
|
else 0,
|
|
|
|
name,
|
|
|
|
),
|
2019-03-06 17:43:28 -08:00
|
|
|
)
|
|
|
|
for glyphname in glyphnames:
|
2022-08-22 06:28:48 -06:00
|
|
|
_instantiateGvarGlyph(
|
|
|
|
glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
|
|
|
|
)
|
2019-03-22 14:15:53 +00:00
|
|
|
|
|
|
|
if not gvar.variations:
|
|
|
|
del varfont["gvar"]
|
|
|
|
|
|
|
|
|
2019-04-01 11:03:45 +01:00
|
|
|
def setCvarDeltas(cvt, deltas):
|
|
|
|
for i, delta in enumerate(deltas):
|
2019-06-14 15:41:33 +01:00
|
|
|
if delta:
|
2019-05-07 14:05:16 +01:00
|
|
|
cvt[i] += otRound(delta)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateCvar(varfont, axisLimits):
|
2019-03-14 10:59:15 -04:00
|
|
|
log.info("Instantiating cvt/cvar tables")
|
2019-04-01 11:03:45 +01:00
|
|
|
|
2019-03-14 10:59:15 -04:00
|
|
|
cvar = varfont["cvar"]
|
2019-04-01 11:03:45 +01:00
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
defaultDeltas = instantiateTupleVariationStore(cvar.variations, axisLimits)
|
2019-04-01 11:03:45 +01:00
|
|
|
|
|
|
|
if defaultDeltas:
|
|
|
|
setCvarDeltas(varfont["cvt "], defaultDeltas)
|
|
|
|
|
2019-03-22 14:15:53 +00:00
|
|
|
if not cvar.variations:
|
2019-03-19 10:44:39 -04:00
|
|
|
del varfont["cvar"]
|
|
|
|
|
|
|
|
|
2019-04-17 19:20:26 +01:00
|
|
|
def setMvarDeltas(varfont, deltas):
|
2019-03-19 10:44:39 -04:00
|
|
|
mvar = varfont["MVAR"].table
|
|
|
|
records = mvar.ValueRecord
|
|
|
|
for rec in records:
|
|
|
|
mvarTag = rec.ValueTag
|
|
|
|
if mvarTag not in MVAR_ENTRIES:
|
|
|
|
continue
|
|
|
|
tableTag, itemName = MVAR_ENTRIES[mvarTag]
|
2019-04-17 19:20:26 +01:00
|
|
|
delta = deltas[rec.VarIdx]
|
2019-03-28 17:41:58 +00:00
|
|
|
if delta != 0:
|
|
|
|
setattr(
|
|
|
|
varfont[tableTag],
|
|
|
|
itemName,
|
2019-05-07 14:05:16 +01:00
|
|
|
getattr(varfont[tableTag], itemName) + otRound(delta),
|
2019-03-28 17:41:58 +00:00
|
|
|
)
|
2019-03-19 10:44:39 -04:00
|
|
|
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateMVAR(varfont, axisLimits):
|
2019-03-19 10:44:39 -04:00
|
|
|
log.info("Instantiating MVAR table")
|
|
|
|
|
2019-04-15 18:45:46 +01:00
|
|
|
mvar = varfont["MVAR"].table
|
2019-03-28 17:41:58 +00:00
|
|
|
fvarAxes = varfont["fvar"].axes
|
2019-04-20 12:25:25 +01:00
|
|
|
varStore = mvar.VarStore
|
2019-11-15 19:36:57 +00:00
|
|
|
defaultDeltas = instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
|
2019-03-28 17:41:58 +00:00
|
|
|
setMvarDeltas(varfont, defaultDeltas)
|
2019-03-19 10:44:39 -04:00
|
|
|
|
2019-04-20 12:25:25 +01:00
|
|
|
if varStore.VarRegionList.Region:
|
|
|
|
varIndexMapping = varStore.optimize()
|
2019-04-15 18:45:46 +01:00
|
|
|
for rec in mvar.ValueRecord:
|
|
|
|
rec.VarIdx = varIndexMapping[rec.VarIdx]
|
|
|
|
else:
|
2019-03-26 13:48:54 +00:00
|
|
|
del varfont["MVAR"]
|
2019-03-19 10:44:39 -04:00
|
|
|
|
|
|
|
|
2019-04-20 12:42:31 +01:00
|
|
|
def _remapVarIdxMap(table, attrName, varIndexMapping, glyphOrder):
|
|
|
|
oldMapping = getattr(table, attrName).mapping
|
|
|
|
newMapping = [varIndexMapping[oldMapping[glyphName]] for glyphName in glyphOrder]
|
|
|
|
setattr(table, attrName, builder.buildVarIdxMap(newMapping, glyphOrder))
|
|
|
|
|
|
|
|
|
|
|
|
# TODO(anthrotype) Add support for HVAR/VVAR in CFF2
|
2019-09-18 17:00:53 +01:00
|
|
|
def _instantiateVHVAR(varfont, axisLimits, tableFields):
|
2019-04-20 12:42:31 +01:00
|
|
|
tableTag = tableFields.tableTag
|
|
|
|
fvarAxes = varfont["fvar"].axes
|
|
|
|
# Deltas from gvar table have already been applied to the hmtx/vmtx. For full
|
|
|
|
# instances (i.e. all axes pinned), we can simply drop HVAR/VVAR and return
|
2019-09-18 17:00:53 +01:00
|
|
|
if set(
|
|
|
|
axisTag for axisTag, value in axisLimits.items() if not isinstance(value, tuple)
|
|
|
|
).issuperset(axis.axisTag for axis in fvarAxes):
|
2019-04-20 12:42:31 +01:00
|
|
|
log.info("Dropping %s table", tableTag)
|
|
|
|
del varfont[tableTag]
|
|
|
|
return
|
|
|
|
|
|
|
|
log.info("Instantiating %s table", tableTag)
|
|
|
|
vhvar = varfont[tableTag].table
|
|
|
|
varStore = vhvar.VarStore
|
|
|
|
# since deltas were already applied, the return value here is ignored
|
2019-09-18 17:00:53 +01:00
|
|
|
instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
|
2019-04-20 12:42:31 +01:00
|
|
|
|
|
|
|
if varStore.VarRegionList.Region:
|
2019-05-01 11:49:01 +01:00
|
|
|
# Only re-optimize VarStore if the HVAR/VVAR already uses indirect AdvWidthMap
|
|
|
|
# or AdvHeightMap. If a direct, implicit glyphID->VariationIndex mapping is
|
|
|
|
# used for advances, skip re-optimizing and maintain original VariationIndex.
|
2019-04-20 12:42:31 +01:00
|
|
|
if getattr(vhvar, tableFields.advMapping):
|
2022-07-06 16:36:05 +01:00
|
|
|
varIndexMapping = varStore.optimize(use_NO_VARIATION_INDEX=False)
|
2019-04-20 12:42:31 +01:00
|
|
|
glyphOrder = varfont.getGlyphOrder()
|
|
|
|
_remapVarIdxMap(vhvar, tableFields.advMapping, varIndexMapping, glyphOrder)
|
2019-05-01 11:49:01 +01:00
|
|
|
if getattr(vhvar, tableFields.sb1): # left or top sidebearings
|
2019-04-20 12:42:31 +01:00
|
|
|
_remapVarIdxMap(vhvar, tableFields.sb1, varIndexMapping, glyphOrder)
|
2019-05-01 11:49:01 +01:00
|
|
|
if getattr(vhvar, tableFields.sb2): # right or bottom sidebearings
|
2019-04-20 12:42:31 +01:00
|
|
|
_remapVarIdxMap(vhvar, tableFields.sb2, varIndexMapping, glyphOrder)
|
2019-05-01 11:49:01 +01:00
|
|
|
if tableTag == "VVAR" and getattr(vhvar, tableFields.vOrigMapping):
|
2019-04-20 12:42:31 +01:00
|
|
|
_remapVarIdxMap(
|
|
|
|
vhvar, tableFields.vOrigMapping, varIndexMapping, glyphOrder
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateHVAR(varfont, axisLimits):
|
|
|
|
return _instantiateVHVAR(varfont, axisLimits, varLib.HVAR_FIELDS)
|
2019-04-20 12:42:31 +01:00
|
|
|
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateVVAR(varfont, axisLimits):
|
|
|
|
return _instantiateVHVAR(varfont, axisLimits, varLib.VVAR_FIELDS)
|
2019-04-20 12:42:31 +01:00
|
|
|
|
|
|
|
|
2019-04-16 18:14:05 +01:00
|
|
|
class _TupleVarStoreAdapter(object):
|
|
|
|
def __init__(self, regions, axisOrder, tupleVarData, itemCounts):
|
|
|
|
self.regions = regions
|
|
|
|
self.axisOrder = axisOrder
|
|
|
|
self.tupleVarData = tupleVarData
|
|
|
|
self.itemCounts = itemCounts
|
2019-03-26 16:07:46 +00:00
|
|
|
|
2019-04-16 18:14:05 +01:00
|
|
|
@classmethod
|
|
|
|
def fromItemVarStore(cls, itemVarStore, fvarAxes):
|
|
|
|
axisOrder = [axis.axisTag for axis in fvarAxes]
|
|
|
|
regions = [
|
|
|
|
region.get_support(fvarAxes) for region in itemVarStore.VarRegionList.Region
|
2019-03-28 17:41:58 +00:00
|
|
|
]
|
2019-04-16 18:14:05 +01:00
|
|
|
tupleVarData = []
|
|
|
|
itemCounts = []
|
|
|
|
for varData in itemVarStore.VarData:
|
|
|
|
variations = []
|
|
|
|
varDataRegions = (regions[i] for i in varData.VarRegionIndex)
|
|
|
|
for axes, coordinates in zip(varDataRegions, zip(*varData.Item)):
|
|
|
|
variations.append(TupleVariation(axes, list(coordinates)))
|
|
|
|
tupleVarData.append(variations)
|
|
|
|
itemCounts.append(varData.ItemCount)
|
|
|
|
return cls(regions, axisOrder, tupleVarData, itemCounts)
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def rebuildRegions(self):
|
|
|
|
# Collect the set of all unique region axes from the current TupleVariations.
|
|
|
|
# We use an OrderedDict to de-duplicate regions while keeping the order.
|
|
|
|
uniqueRegions = collections.OrderedDict.fromkeys(
|
|
|
|
(
|
|
|
|
frozenset(var.axes.items())
|
|
|
|
for variations in self.tupleVarData
|
|
|
|
for var in variations
|
2019-04-16 18:14:05 +01:00
|
|
|
)
|
|
|
|
)
|
2019-09-18 17:00:53 +01:00
|
|
|
# Maintain the original order for the regions that pre-existed, appending
|
|
|
|
# the new regions at the end of the region list.
|
|
|
|
newRegions = []
|
|
|
|
for region in self.regions:
|
|
|
|
regionAxes = frozenset(region.items())
|
|
|
|
if regionAxes in uniqueRegions:
|
|
|
|
newRegions.append(region)
|
|
|
|
del uniqueRegions[regionAxes]
|
|
|
|
if uniqueRegions:
|
|
|
|
newRegions.extend(dict(region) for region in uniqueRegions)
|
|
|
|
self.regions = newRegions
|
|
|
|
|
|
|
|
def instantiate(self, axisLimits):
|
2019-04-16 18:14:05 +01:00
|
|
|
defaultDeltaArray = []
|
|
|
|
for variations, itemCount in zip(self.tupleVarData, self.itemCounts):
|
2019-09-18 17:00:53 +01:00
|
|
|
defaultDeltas = instantiateTupleVariationStore(variations, axisLimits)
|
2019-04-16 18:14:05 +01:00
|
|
|
if not defaultDeltas:
|
|
|
|
defaultDeltas = [0] * itemCount
|
|
|
|
defaultDeltaArray.append(defaultDeltas)
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
# rebuild regions whose axes were dropped or limited
|
|
|
|
self.rebuildRegions()
|
|
|
|
|
|
|
|
pinnedAxes = {
|
|
|
|
axisTag
|
2022-08-05 19:14:18 -06:00
|
|
|
for axisTag, (minimum, default, maximum) in axisLimits.items()
|
2022-08-05 18:41:52 -06:00
|
|
|
if minimum == maximum
|
2019-09-18 17:00:53 +01:00
|
|
|
}
|
|
|
|
self.axisOrder = [
|
|
|
|
axisTag for axisTag in self.axisOrder if axisTag not in pinnedAxes
|
|
|
|
]
|
2019-04-16 18:14:05 +01:00
|
|
|
|
|
|
|
return defaultDeltaArray
|
|
|
|
|
|
|
|
def asItemVarStore(self):
|
|
|
|
regionOrder = [frozenset(axes.items()) for axes in self.regions]
|
|
|
|
varDatas = []
|
|
|
|
for variations, itemCount in zip(self.tupleVarData, self.itemCounts):
|
|
|
|
if variations:
|
|
|
|
assert len(variations[0].coordinates) == itemCount
|
|
|
|
varRegionIndices = [
|
|
|
|
regionOrder.index(frozenset(var.axes.items())) for var in variations
|
|
|
|
]
|
|
|
|
varDataItems = list(zip(*(var.coordinates for var in variations)))
|
|
|
|
varDatas.append(
|
|
|
|
builder.buildVarData(varRegionIndices, varDataItems, optimize=False)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
varDatas.append(
|
|
|
|
builder.buildVarData([], [[] for _ in range(itemCount)])
|
|
|
|
)
|
|
|
|
regionList = builder.buildVarRegionList(self.regions, self.axisOrder)
|
|
|
|
itemVarStore = builder.buildVarStore(regionList, varDatas)
|
2019-04-20 19:02:47 +01:00
|
|
|
# remove unused regions from VarRegionList
|
|
|
|
itemVarStore.prune_regions()
|
2019-04-16 18:14:05 +01:00
|
|
|
return itemVarStore
|
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def instantiateItemVariationStore(itemVarStore, fvarAxes, axisLimits):
|
2020-09-27 17:48:52 +01:00
|
|
|
"""Compute deltas at partial location, and update varStore in-place.
|
2019-04-16 18:14:05 +01:00
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
Remove regions in which all axes were instanced, or fall outside the new axis
|
|
|
|
limits. Scale the deltas of the remaining regions where only some of the axes
|
|
|
|
were instanced.
|
2019-04-16 18:14:05 +01:00
|
|
|
|
2019-04-20 12:25:25 +01:00
|
|
|
The number of VarData subtables, and the number of items within each, are
|
|
|
|
not modified, in order to keep the existing VariationIndex valid.
|
|
|
|
One may call VarStore.optimize() method after this to further optimize those.
|
|
|
|
|
2019-04-16 18:14:05 +01:00
|
|
|
Args:
|
|
|
|
varStore: An otTables.VarStore object (Item Variation Store)
|
|
|
|
fvarAxes: list of fvar's Axis objects
|
2019-11-15 19:36:57 +00:00
|
|
|
axisLimits: Dict[str, float] mapping axis tags to normalized axis coordinates
|
|
|
|
(float) or ranges for restricting an axis' min/max (NormalizedAxisRange).
|
|
|
|
May not specify coordinates/ranges for all the fvar axes.
|
2019-04-16 18:14:05 +01:00
|
|
|
|
|
|
|
Returns:
|
2019-05-07 14:05:16 +01:00
|
|
|
defaultDeltas: to be added to the default instance, of type dict of floats
|
|
|
|
keyed by VariationIndex compound values: i.e. (outer << 16) + inner.
|
2019-04-16 18:14:05 +01:00
|
|
|
"""
|
|
|
|
tupleVarStore = _TupleVarStoreAdapter.fromItemVarStore(itemVarStore, fvarAxes)
|
2019-09-18 17:00:53 +01:00
|
|
|
defaultDeltaArray = tupleVarStore.instantiate(axisLimits)
|
2019-04-16 18:14:05 +01:00
|
|
|
newItemVarStore = tupleVarStore.asItemVarStore()
|
|
|
|
|
|
|
|
itemVarStore.VarRegionList = newItemVarStore.VarRegionList
|
|
|
|
assert itemVarStore.VarDataCount == newItemVarStore.VarDataCount
|
|
|
|
itemVarStore.VarData = newItemVarStore.VarData
|
|
|
|
|
2019-04-17 19:20:26 +01:00
|
|
|
defaultDeltas = {
|
|
|
|
((major << 16) + minor): delta
|
|
|
|
for major, deltas in enumerate(defaultDeltaArray)
|
|
|
|
for minor, delta in enumerate(deltas)
|
|
|
|
}
|
2022-06-21 14:27:32 -06:00
|
|
|
defaultDeltas[itemVarStore.NO_VARIATION_INDEX] = 0
|
2019-04-20 12:25:25 +01:00
|
|
|
return defaultDeltas
|
2019-04-17 19:20:26 +01:00
|
|
|
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
def instantiateOTL(varfont, axisLimits):
|
2019-04-17 19:20:26 +01:00
|
|
|
# TODO(anthrotype) Support partial instancing of JSTF and BASE tables
|
|
|
|
|
2019-05-21 18:42:13 -04:00
|
|
|
if (
|
|
|
|
"GDEF" not in varfont
|
|
|
|
or varfont["GDEF"].table.Version < 0x00010003
|
|
|
|
or not varfont["GDEF"].table.VarStore
|
|
|
|
):
|
2019-04-17 19:20:26 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if "GPOS" in varfont:
|
|
|
|
msg = "Instantiating GDEF and GPOS tables"
|
|
|
|
else:
|
|
|
|
msg = "Instantiating GDEF table"
|
|
|
|
log.info(msg)
|
|
|
|
|
|
|
|
gdef = varfont["GDEF"].table
|
2019-04-20 12:25:25 +01:00
|
|
|
varStore = gdef.VarStore
|
2019-04-17 19:20:26 +01:00
|
|
|
fvarAxes = varfont["fvar"].axes
|
|
|
|
|
2019-11-15 19:36:57 +00:00
|
|
|
defaultDeltas = instantiateItemVariationStore(varStore, fvarAxes, axisLimits)
|
2019-04-17 19:20:26 +01:00
|
|
|
|
|
|
|
# When VF are built, big lookups may overflow and be broken into multiple
|
|
|
|
# subtables. MutatorMerger (which inherits from AligningMerger) reattaches
|
|
|
|
# them upon instancing, in case they can now fit a single subtable (if not,
|
|
|
|
# they will be split again upon compilation).
|
|
|
|
# This 'merger' also works as a 'visitor' that traverses the OTL tables and
|
|
|
|
# calls specific methods when instances of a given type are found.
|
|
|
|
# Specifically, it adds default deltas to GPOS Anchors/ValueRecords and GDEF
|
|
|
|
# LigatureCarets, and optionally deletes all VariationIndex tables if the
|
|
|
|
# VarStore is fully instanced.
|
|
|
|
merger = MutatorMerger(
|
2019-04-20 12:25:25 +01:00
|
|
|
varfont, defaultDeltas, deleteVariations=(not varStore.VarRegionList.Region)
|
2019-04-17 19:20:26 +01:00
|
|
|
)
|
|
|
|
merger.mergeTables(varfont, [varfont], ["GDEF", "GPOS"])
|
|
|
|
|
2019-04-20 12:25:25 +01:00
|
|
|
if varStore.VarRegionList.Region:
|
|
|
|
varIndexMapping = varStore.optimize()
|
2019-04-17 19:20:26 +01:00
|
|
|
gdef.remap_device_varidxes(varIndexMapping)
|
|
|
|
if "GPOS" in varfont:
|
|
|
|
varfont["GPOS"].table.remap_device_varidxes(varIndexMapping)
|
|
|
|
else:
|
|
|
|
# Downgrade GDEF.
|
|
|
|
del gdef.VarStore
|
|
|
|
gdef.Version = 0x00010002
|
|
|
|
if gdef.MarkGlyphSetsDef is None:
|
|
|
|
del gdef.MarkGlyphSetsDef
|
|
|
|
gdef.Version = 0x00010000
|
|
|
|
|
|
|
|
if not (
|
|
|
|
gdef.LigCaretList
|
|
|
|
or gdef.MarkAttachClassDef
|
|
|
|
or gdef.GlyphClassDef
|
|
|
|
or gdef.AttachList
|
|
|
|
or (gdef.Version >= 0x00010002 and gdef.MarkGlyphSetsDef)
|
|
|
|
):
|
|
|
|
del varfont["GDEF"]
|
2019-03-28 17:41:58 +00:00
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def instantiateFeatureVariations(varfont, axisLimits):
|
2019-03-26 10:14:16 +00:00
|
|
|
for tableTag in ("GPOS", "GSUB"):
|
2020-10-05 17:50:24 +01:00
|
|
|
if tableTag not in varfont or not getattr(
|
|
|
|
varfont[tableTag].table, "FeatureVariations", None
|
2019-03-26 10:14:16 +00:00
|
|
|
):
|
|
|
|
continue
|
|
|
|
log.info("Instantiating FeatureVariations of %s table", tableTag)
|
|
|
|
_instantiateFeatureVariations(
|
2019-09-18 17:00:53 +01:00
|
|
|
varfont[tableTag].table, varfont["fvar"].axes, axisLimits
|
2019-03-26 10:14:16 +00:00
|
|
|
)
|
2019-05-29 18:22:30 +01:00
|
|
|
# remove unreferenced lookups
|
|
|
|
varfont[tableTag].prune_lookups()
|
2019-03-25 16:14:57 -04:00
|
|
|
|
2019-03-26 10:14:16 +00:00
|
|
|
|
2019-05-29 18:28:03 +01:00
|
|
|
def _featureVariationRecordIsUnique(rec, seen):
|
|
|
|
conditionSet = []
|
|
|
|
for cond in rec.ConditionSet.ConditionTable:
|
|
|
|
if cond.Format != 1:
|
2019-05-30 11:22:01 +01:00
|
|
|
# can't tell whether this is duplicate, assume is unique
|
|
|
|
return True
|
2019-05-29 18:28:03 +01:00
|
|
|
conditionSet.append(
|
|
|
|
(cond.AxisIndex, cond.FilterRangeMinValue, cond.FilterRangeMaxValue)
|
|
|
|
)
|
|
|
|
# besides the set of conditions, we also include the FeatureTableSubstitution
|
|
|
|
# version to identify unique FeatureVariationRecords, even though only one
|
|
|
|
# version is currently defined. It's theoretically possible that multiple
|
|
|
|
# records with same conditions but different substitution table version be
|
|
|
|
# present in the same font for backward compatibility.
|
|
|
|
recordKey = frozenset([rec.FeatureTableSubstitution.Version] + conditionSet)
|
|
|
|
if recordKey in seen:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
seen.add(recordKey) # side effect
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def _limitFeatureVariationConditionRange(condition, axisRange):
|
|
|
|
minValue = condition.FilterRangeMinValue
|
|
|
|
maxValue = condition.FilterRangeMaxValue
|
|
|
|
|
|
|
|
if (
|
|
|
|
minValue > maxValue
|
|
|
|
or minValue > axisRange.maximum
|
|
|
|
or maxValue < axisRange.minimum
|
|
|
|
):
|
|
|
|
# condition invalid or out of range
|
|
|
|
return
|
|
|
|
|
|
|
|
values = [minValue, maxValue]
|
|
|
|
for i, value in enumerate(values):
|
2022-08-15 11:47:09 -06:00
|
|
|
values[i] = normalizeValue(value, (axisRange.minimum, 0, axisRange.maximum))
|
2019-09-18 17:00:53 +01:00
|
|
|
|
|
|
|
return AxisRange(*values)
|
|
|
|
|
|
|
|
|
2019-06-14 14:34:47 +01:00
|
|
|
def _instantiateFeatureVariationRecord(
|
|
|
|
record, recIdx, location, fvarAxes, axisIndexMap
|
|
|
|
):
|
|
|
|
applies = True
|
|
|
|
newConditions = []
|
|
|
|
for i, condition in enumerate(record.ConditionSet.ConditionTable):
|
|
|
|
if condition.Format == 1:
|
|
|
|
axisIdx = condition.AxisIndex
|
|
|
|
axisTag = fvarAxes[axisIdx].axisTag
|
|
|
|
if axisTag in location:
|
|
|
|
minValue = condition.FilterRangeMinValue
|
|
|
|
maxValue = condition.FilterRangeMaxValue
|
|
|
|
v = location[axisTag]
|
|
|
|
if not (minValue <= v <= maxValue):
|
|
|
|
# condition not met so remove entire record
|
|
|
|
applies = False
|
|
|
|
newConditions = None
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# axis not pinned, keep condition with remapped axis index
|
|
|
|
applies = False
|
|
|
|
condition.AxisIndex = axisIndexMap[axisTag]
|
|
|
|
newConditions.append(condition)
|
|
|
|
else:
|
|
|
|
log.warning(
|
|
|
|
"Condition table {0} of FeatureVariationRecord {1} has "
|
|
|
|
"unsupported format ({2}); ignored".format(i, recIdx, condition.Format)
|
|
|
|
)
|
|
|
|
applies = False
|
|
|
|
newConditions.append(condition)
|
|
|
|
|
|
|
|
if newConditions:
|
|
|
|
record.ConditionSet.ConditionTable = newConditions
|
|
|
|
shouldKeep = True
|
2019-09-18 17:00:53 +01:00
|
|
|
else:
|
|
|
|
shouldKeep = False
|
2019-06-14 14:34:47 +01:00
|
|
|
|
|
|
|
return applies, shouldKeep
|
|
|
|
|
|
|
|
|
2022-08-09 17:14:37 -06:00
|
|
|
def _limitFeatureVariationRecord(record, axisRanges, axisOrder):
|
2019-09-18 17:00:53 +01:00
|
|
|
newConditions = []
|
|
|
|
for i, condition in enumerate(record.ConditionSet.ConditionTable):
|
|
|
|
if condition.Format == 1:
|
|
|
|
axisIdx = condition.AxisIndex
|
2022-08-09 17:14:37 -06:00
|
|
|
axisTag = axisOrder[axisIdx]
|
2019-09-18 17:00:53 +01:00
|
|
|
if axisTag in axisRanges:
|
|
|
|
axisRange = axisRanges[axisTag]
|
|
|
|
newRange = _limitFeatureVariationConditionRange(condition, axisRange)
|
|
|
|
if newRange:
|
|
|
|
# keep condition with updated limits and remapped axis index
|
|
|
|
condition.FilterRangeMinValue = newRange.minimum
|
|
|
|
condition.FilterRangeMaxValue = newRange.maximum
|
|
|
|
newConditions.append(condition)
|
|
|
|
else:
|
|
|
|
# condition out of range, remove entire record
|
|
|
|
newConditions = None
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
newConditions.append(condition)
|
|
|
|
else:
|
|
|
|
newConditions.append(condition)
|
|
|
|
|
|
|
|
if newConditions:
|
|
|
|
record.ConditionSet.ConditionTable = newConditions
|
|
|
|
shouldKeep = True
|
|
|
|
else:
|
|
|
|
shouldKeep = False
|
|
|
|
|
|
|
|
return shouldKeep
|
|
|
|
|
|
|
|
|
|
|
|
def _instantiateFeatureVariations(table, fvarAxes, axisLimits):
|
|
|
|
location, axisRanges = splitAxisLocationAndRanges(
|
|
|
|
axisLimits, rangeType=NormalizedAxisRange
|
|
|
|
)
|
2019-03-25 16:14:57 -04:00
|
|
|
pinnedAxes = set(location.keys())
|
2019-05-29 18:20:11 +01:00
|
|
|
axisOrder = [axis.axisTag for axis in fvarAxes if axis.axisTag not in pinnedAxes]
|
|
|
|
axisIndexMap = {axisTag: axisOrder.index(axisTag) for axisTag in axisOrder}
|
|
|
|
|
2019-03-25 16:14:57 -04:00
|
|
|
featureVariationApplied = False
|
2019-05-29 18:28:03 +01:00
|
|
|
uniqueRecords = set()
|
2019-05-29 18:20:11 +01:00
|
|
|
newRecords = []
|
|
|
|
|
|
|
|
for i, record in enumerate(table.FeatureVariations.FeatureVariationRecord):
|
2019-06-14 14:34:47 +01:00
|
|
|
applies, shouldKeep = _instantiateFeatureVariationRecord(
|
|
|
|
record, i, location, fvarAxes, axisIndexMap
|
|
|
|
)
|
|
|
|
if shouldKeep:
|
2022-08-09 17:14:37 -06:00
|
|
|
shouldKeep = _limitFeatureVariationRecord(record, axisRanges, axisOrder)
|
2019-09-18 17:00:53 +01:00
|
|
|
|
|
|
|
if shouldKeep and _featureVariationRecordIsUnique(record, uniqueRecords):
|
|
|
|
newRecords.append(record)
|
2019-03-25 16:14:57 -04:00
|
|
|
|
|
|
|
if applies and not featureVariationApplied:
|
|
|
|
assert record.FeatureTableSubstitution.Version == 0x00010000
|
|
|
|
for rec in record.FeatureTableSubstitution.SubstitutionRecord:
|
|
|
|
table.FeatureList.FeatureRecord[rec.FeatureIndex].Feature = rec.Feature
|
|
|
|
# Set variations only once
|
|
|
|
featureVariationApplied = True
|
2019-03-26 10:14:16 +00:00
|
|
|
|
|
|
|
if newRecords:
|
|
|
|
table.FeatureVariations.FeatureVariationRecord = newRecords
|
2019-05-29 18:20:11 +01:00
|
|
|
table.FeatureVariations.FeatureVariationCount = len(newRecords)
|
2019-03-26 10:14:16 +00:00
|
|
|
else:
|
|
|
|
del table.FeatureVariations
|
2022-09-13 10:44:26 +01:00
|
|
|
# downgrade table version if there are no FeatureVariations left
|
|
|
|
table.Version = 0x00010000
|
2019-03-25 16:14:57 -04:00
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def _isValidAvarSegmentMap(axisTag, segmentMap):
|
|
|
|
if not segmentMap:
|
|
|
|
return True
|
|
|
|
if not {(-1.0, -1.0), (0, 0), (1.0, 1.0)}.issubset(segmentMap.items()):
|
|
|
|
log.warning(
|
|
|
|
f"Invalid avar SegmentMap record for axis '{axisTag}': does not "
|
|
|
|
"include all required value maps {-1.0: -1.0, 0: 0, 1.0: 1.0}"
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
previousValue = None
|
|
|
|
for fromCoord, toCoord in sorted(segmentMap.items()):
|
|
|
|
if previousValue is not None and previousValue > toCoord:
|
|
|
|
log.warning(
|
|
|
|
f"Invalid avar AxisValueMap({fromCoord}, {toCoord}) record "
|
|
|
|
f"for axis '{axisTag}': the toCoordinate value must be >= to "
|
|
|
|
f"the toCoordinate value of the preceding record ({previousValue})."
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
previousValue = toCoord
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def instantiateAvar(varfont, axisLimits):
|
2019-11-15 17:43:22 +00:00
|
|
|
# 'axisLimits' dict must contain user-space (non-normalized) coordinates.
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
location, axisRanges = splitAxisLocationAndRanges(axisLimits)
|
|
|
|
|
2019-04-29 18:04:21 +02:00
|
|
|
segments = varfont["avar"].segments
|
|
|
|
|
|
|
|
# drop table if we instantiate all the axes
|
2019-09-18 17:00:53 +01:00
|
|
|
pinnedAxes = set(location.keys())
|
|
|
|
if pinnedAxes.issuperset(segments):
|
2019-04-29 18:04:21 +02:00
|
|
|
log.info("Dropping avar table")
|
|
|
|
del varfont["avar"]
|
|
|
|
return
|
|
|
|
|
|
|
|
log.info("Instantiating avar table")
|
2019-09-18 17:00:53 +01:00
|
|
|
for axis in pinnedAxes:
|
2019-04-29 18:04:21 +02:00
|
|
|
if axis in segments:
|
|
|
|
del segments[axis]
|
|
|
|
|
2019-11-15 17:43:22 +00:00
|
|
|
# First compute the default normalization for axisRanges coordinates: i.e.
|
|
|
|
# min = -1.0, default = 0, max = +1.0, and in between values interpolated linearly,
|
|
|
|
# without using the avar table's mappings.
|
2019-11-18 10:51:30 +00:00
|
|
|
# Then, for each SegmentMap, if we are restricting its axis, compute the new
|
2019-11-15 17:43:22 +00:00
|
|
|
# mappings by dividing the key/value pairs by the desired new min/max values,
|
|
|
|
# dropping any mappings that fall outside the restricted range.
|
2019-11-18 10:51:30 +00:00
|
|
|
# The keys ('fromCoord') are specified in default normalized coordinate space,
|
|
|
|
# whereas the values ('toCoord') are "mapped forward" using the SegmentMap.
|
2022-08-05 19:14:18 -06:00
|
|
|
normalizedRanges = normalizeAxisLimits(varfont, axisLimits, usingAvar=False)
|
2019-09-18 17:00:53 +01:00
|
|
|
newSegments = {}
|
|
|
|
for axisTag, mapping in segments.items():
|
|
|
|
if not _isValidAvarSegmentMap(axisTag, mapping):
|
|
|
|
continue
|
|
|
|
if mapping and axisTag in normalizedRanges:
|
|
|
|
axisRange = normalizedRanges[axisTag]
|
|
|
|
mappedMin = floatToFixedToFloat(
|
|
|
|
piecewiseLinearMap(axisRange.minimum, mapping), 14
|
|
|
|
)
|
|
|
|
mappedMax = floatToFixedToFloat(
|
|
|
|
piecewiseLinearMap(axisRange.maximum, mapping), 14
|
|
|
|
)
|
|
|
|
newMapping = {}
|
2019-11-18 10:51:30 +00:00
|
|
|
for fromCoord, toCoord in mapping.items():
|
2022-08-15 11:47:09 -06:00
|
|
|
|
|
|
|
if fromCoord < axisRange.minimum or fromCoord > axisRange.maximum:
|
|
|
|
continue
|
2022-08-22 06:28:48 -06:00
|
|
|
fromCoord = normalizeValue(
|
|
|
|
fromCoord, (axisRange.minimum, 0, axisRange.maximum)
|
|
|
|
)
|
2022-08-15 11:47:09 -06:00
|
|
|
|
|
|
|
assert mappedMin <= toCoord <= mappedMax
|
|
|
|
toCoord = normalizeValue(toCoord, (mappedMin, 0, mappedMax))
|
|
|
|
|
2019-11-18 10:51:30 +00:00
|
|
|
fromCoord = floatToFixedToFloat(fromCoord, 14)
|
|
|
|
toCoord = floatToFixedToFloat(toCoord, 14)
|
|
|
|
newMapping[fromCoord] = toCoord
|
2019-09-18 17:00:53 +01:00
|
|
|
newMapping.update({-1.0: -1.0, 1.0: 1.0})
|
|
|
|
newSegments[axisTag] = newMapping
|
|
|
|
else:
|
|
|
|
newSegments[axisTag] = mapping
|
|
|
|
varfont["avar"].segments = newSegments
|
|
|
|
|
|
|
|
|
|
|
|
def isInstanceWithinAxisRanges(location, axisRanges):
|
|
|
|
for axisTag, coord in location.items():
|
|
|
|
if axisTag in axisRanges:
|
|
|
|
axisRange = axisRanges[axisTag]
|
|
|
|
if coord < axisRange.minimum or coord > axisRange.maximum:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def instantiateFvar(varfont, axisLimits):
|
|
|
|
# 'axisLimits' dict must contain user-space (non-normalized) coordinates
|
2019-04-29 18:04:21 +02:00
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
location, axisRanges = splitAxisLocationAndRanges(axisLimits, rangeType=AxisRange)
|
2019-04-29 18:04:21 +02:00
|
|
|
|
|
|
|
fvar = varfont["fvar"]
|
|
|
|
|
|
|
|
# drop table if we instantiate all the axes
|
|
|
|
if set(location).issuperset(axis.axisTag for axis in fvar.axes):
|
|
|
|
log.info("Dropping fvar table")
|
|
|
|
del varfont["fvar"]
|
|
|
|
return
|
|
|
|
|
|
|
|
log.info("Instantiating fvar table")
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
axes = []
|
|
|
|
for axis in fvar.axes:
|
|
|
|
axisTag = axis.axisTag
|
|
|
|
if axisTag in location:
|
|
|
|
continue
|
|
|
|
if axisTag in axisRanges:
|
|
|
|
axis.minValue, axis.maxValue = axisRanges[axisTag]
|
|
|
|
axes.append(axis)
|
|
|
|
fvar.axes = axes
|
2019-04-29 18:04:21 +02:00
|
|
|
|
|
|
|
# only keep NamedInstances whose coordinates == pinned axis location
|
|
|
|
instances = []
|
|
|
|
for instance in fvar.instances:
|
|
|
|
if any(instance.coordinates[axis] != value for axis, value in location.items()):
|
|
|
|
continue
|
2019-09-18 17:00:53 +01:00
|
|
|
for axisTag in location:
|
|
|
|
del instance.coordinates[axisTag]
|
|
|
|
if not isInstanceWithinAxisRanges(instance.coordinates, axisRanges):
|
|
|
|
continue
|
2019-04-29 18:04:21 +02:00
|
|
|
instances.append(instance)
|
|
|
|
fvar.instances = instances
|
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def instantiateSTAT(varfont, axisLimits):
|
|
|
|
# 'axisLimits' dict must contain user-space (non-normalized) coordinates
|
|
|
|
|
2019-05-01 19:25:32 +01:00
|
|
|
stat = varfont["STAT"].table
|
2019-10-17 19:02:26 +01:00
|
|
|
if not stat.DesignAxisRecord or not (
|
|
|
|
stat.AxisValueArray and stat.AxisValueArray.AxisValue
|
|
|
|
):
|
|
|
|
return # STAT table empty, nothing to do
|
2019-05-01 19:25:32 +01:00
|
|
|
|
2020-10-16 10:47:40 +01:00
|
|
|
log.info("Instantiating STAT table")
|
2021-01-05 14:34:25 +00:00
|
|
|
newAxisValueTables = axisValuesFromAxisLimits(stat, axisLimits)
|
2022-03-25 16:42:17 +01:00
|
|
|
stat.AxisValueCount = len(newAxisValueTables)
|
|
|
|
if stat.AxisValueCount:
|
|
|
|
stat.AxisValueArray.AxisValue = newAxisValueTables
|
|
|
|
else:
|
|
|
|
stat.AxisValueArray = None
|
2020-10-16 10:47:40 +01:00
|
|
|
|
|
|
|
|
2021-01-05 14:34:25 +00:00
|
|
|
def axisValuesFromAxisLimits(stat, axisLimits):
|
2019-10-17 19:02:26 +01:00
|
|
|
location, axisRanges = splitAxisLocationAndRanges(axisLimits, rangeType=AxisRange)
|
2019-05-01 19:25:32 +01:00
|
|
|
|
2019-10-17 19:02:26 +01:00
|
|
|
def isAxisValueOutsideLimits(axisTag, axisValue):
|
|
|
|
if axisTag in location and axisValue != location[axisTag]:
|
|
|
|
return True
|
|
|
|
elif axisTag in axisRanges:
|
|
|
|
axisRange = axisRanges[axisTag]
|
|
|
|
if axisValue < axisRange.minimum or axisValue > axisRange.maximum:
|
|
|
|
return True
|
|
|
|
return False
|
2019-05-01 19:25:32 +01:00
|
|
|
|
2019-10-17 19:02:26 +01:00
|
|
|
# only keep AxisValues whose axis is not pinned nor restricted, or is pinned at the
|
|
|
|
# exact (nominal) value, or is restricted but the value is within the new range
|
|
|
|
designAxes = stat.DesignAxisRecord.Axis
|
|
|
|
newAxisValueTables = []
|
|
|
|
for axisValueTable in stat.AxisValueArray.AxisValue:
|
|
|
|
axisValueFormat = axisValueTable.Format
|
|
|
|
if axisValueFormat in (1, 2, 3):
|
|
|
|
axisTag = designAxes[axisValueTable.AxisIndex].AxisTag
|
|
|
|
if axisValueFormat == 2:
|
|
|
|
axisValue = axisValueTable.NominalValue
|
2019-05-01 19:25:32 +01:00
|
|
|
else:
|
2019-10-17 19:02:26 +01:00
|
|
|
axisValue = axisValueTable.Value
|
|
|
|
if isAxisValueOutsideLimits(axisTag, axisValue):
|
|
|
|
continue
|
|
|
|
elif axisValueFormat == 4:
|
|
|
|
# drop 'non-analytic' AxisValue if _any_ AxisValueRecord doesn't match
|
|
|
|
# the pinned location or is outside range
|
|
|
|
dropAxisValueTable = False
|
|
|
|
for rec in axisValueTable.AxisValueRecord:
|
|
|
|
axisTag = designAxes[rec.AxisIndex].AxisTag
|
|
|
|
axisValue = rec.Value
|
|
|
|
if isAxisValueOutsideLimits(axisTag, axisValue):
|
|
|
|
dropAxisValueTable = True
|
|
|
|
break
|
|
|
|
if dropAxisValueTable:
|
|
|
|
continue
|
|
|
|
else:
|
2021-03-11 18:24:23 +00:00
|
|
|
log.warning("Unknown AxisValue table format (%s); ignored", axisValueFormat)
|
2019-10-17 19:02:26 +01:00
|
|
|
newAxisValueTables.append(axisValueTable)
|
2021-01-05 14:34:25 +00:00
|
|
|
return newAxisValueTables
|
2019-05-07 17:55:50 +01:00
|
|
|
|
|
|
|
|
2019-05-08 16:24:17 +01:00
|
|
|
def setMacOverlapFlags(glyfTable):
|
|
|
|
flagOverlapCompound = _g_l_y_f.OVERLAP_COMPOUND
|
|
|
|
flagOverlapSimple = _g_l_y_f.flagOverlapSimple
|
|
|
|
for glyphName in glyfTable.keys():
|
|
|
|
glyph = glyfTable[glyphName]
|
|
|
|
# Set OVERLAP_COMPOUND bit for compound glyphs
|
|
|
|
if glyph.isComposite():
|
|
|
|
glyph.components[0].flags |= flagOverlapCompound
|
|
|
|
# Set OVERLAP_SIMPLE bit for simple glyphs
|
|
|
|
elif glyph.numberOfContours > 0:
|
|
|
|
glyph.flags[0] |= flagOverlapSimple
|
|
|
|
|
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
def normalize(value, triple, avarMapping):
|
2019-03-06 21:54:15 -08:00
|
|
|
value = normalizeValue(value, triple)
|
2019-05-31 12:00:08 +01:00
|
|
|
if avarMapping:
|
|
|
|
value = piecewiseLinearMap(value, avarMapping)
|
2019-03-06 21:54:15 -08:00
|
|
|
# Quantize to F2Dot14, to avoid surprise interpolations.
|
|
|
|
return floatToFixedToFloat(value, 14)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-03-07 19:18:14 -08:00
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def normalizeAxisLimits(varfont, axisLimits, usingAvar=True):
|
2019-03-07 19:18:14 -08:00
|
|
|
fvar = varfont["fvar"]
|
2019-05-31 12:00:08 +01:00
|
|
|
badLimits = set(axisLimits.keys()).difference(a.axisTag for a in fvar.axes)
|
|
|
|
if badLimits:
|
|
|
|
raise ValueError("Cannot limit: {} not present in fvar".format(badLimits))
|
2019-03-06 21:54:15 -08:00
|
|
|
|
2019-03-07 19:18:14 -08:00
|
|
|
axes = {
|
|
|
|
a.axisTag: (a.minValue, a.defaultValue, a.maxValue)
|
|
|
|
for a in fvar.axes
|
2019-05-31 12:00:08 +01:00
|
|
|
if a.axisTag in axisLimits
|
2019-03-07 19:18:14 -08:00
|
|
|
}
|
2019-03-06 21:54:15 -08:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
avarSegments = {}
|
2019-09-18 17:00:53 +01:00
|
|
|
if usingAvar and "avar" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
avarSegments = varfont["avar"].segments
|
2019-11-15 19:02:12 +00:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
normalizedLimits = {}
|
2022-08-05 19:14:18 -06:00
|
|
|
|
2019-03-06 21:54:15 -08:00
|
|
|
for axis_tag, triple in axes.items():
|
2022-08-05 19:14:18 -06:00
|
|
|
default = triple[1]
|
|
|
|
|
|
|
|
value = axisLimits[axis_tag]
|
|
|
|
|
2022-08-06 16:51:21 -06:00
|
|
|
minV, defaultV, maxV = _expand(value)
|
2022-08-05 19:14:18 -06:00
|
|
|
if defaultV is None:
|
|
|
|
defaultV = default
|
|
|
|
if (minV != maxV or minV != defaultV) and defaultV != default:
|
|
|
|
raise NotImplementedError(
|
|
|
|
f"Unsupported range {axis_tag}={minV:g}:{defaultV:g}:{maxV:g}; "
|
|
|
|
f"can't change default position ({axis_tag}={default:g})"
|
|
|
|
)
|
|
|
|
|
|
|
|
value = (minV, defaultV, maxV)
|
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
avarMapping = avarSegments.get(axis_tag, None)
|
2022-08-05 19:14:18 -06:00
|
|
|
normalizedLimits[axis_tag] = NormalizedAxisTent(
|
|
|
|
*(normalize(v, triple, avarMapping) for v in value)
|
2022-08-05 18:41:52 -06:00
|
|
|
)
|
2022-08-05 19:14:18 -06:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
return normalizedLimits
|
2019-03-06 21:54:15 -08:00
|
|
|
|
2019-03-06 21:58:58 -08:00
|
|
|
|
|
|
|
def sanityCheckVariableTables(varfont):
|
|
|
|
if "fvar" not in varfont:
|
|
|
|
raise ValueError("Missing required table fvar")
|
|
|
|
if "gvar" in varfont:
|
|
|
|
if "glyf" not in varfont:
|
|
|
|
raise ValueError("Can't have gvar without glyf")
|
2019-04-20 12:31:43 +01:00
|
|
|
# TODO(anthrotype) Remove once we do support partial instancing CFF2
|
|
|
|
if "CFF2" in varfont:
|
2019-04-20 12:42:31 +01:00
|
|
|
raise NotImplementedError("Instancing CFF2 variable fonts is not supported yet")
|
2019-03-06 21:58:58 -08:00
|
|
|
|
2019-03-07 19:18:14 -08:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
def populateAxisDefaults(varfont, axisLimits):
|
|
|
|
if any(value is None for value in axisLimits.values()):
|
2019-05-20 17:31:46 -04:00
|
|
|
fvar = varfont["fvar"]
|
|
|
|
defaultValues = {a.axisTag: a.defaultValue for a in fvar.axes}
|
|
|
|
return {
|
|
|
|
axisTag: defaultValues[axisTag] if value is None else value
|
2019-05-31 12:00:08 +01:00
|
|
|
for axisTag, value in axisLimits.items()
|
2019-05-20 17:31:46 -04:00
|
|
|
}
|
2019-05-31 12:00:08 +01:00
|
|
|
return axisLimits
|
2019-05-20 17:31:46 -04:00
|
|
|
|
|
|
|
|
2019-05-08 16:24:17 +01:00
|
|
|
def instantiateVariableFont(
|
2020-09-27 17:48:52 +01:00
|
|
|
varfont,
|
|
|
|
axisLimits,
|
|
|
|
inplace=False,
|
|
|
|
optimize=True,
|
2020-09-29 19:33:26 +01:00
|
|
|
overlap=OverlapMode.KEEP_AND_SET_FLAGS,
|
2020-10-14 11:56:18 +01:00
|
|
|
updateFontNames=False,
|
2019-05-08 16:24:17 +01:00
|
|
|
):
|
2020-09-27 17:48:52 +01:00
|
|
|
"""Instantiate variable font, either fully or partially.
|
2019-05-31 11:52:39 +01:00
|
|
|
|
|
|
|
Depending on whether the `axisLimits` dictionary references all or some of the
|
|
|
|
input varfont's axes, the output font will either be a full instance (static
|
|
|
|
font) or a variable font with possibly less variation data.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
varfont: a TTFont instance, which must contain at least an 'fvar' table.
|
|
|
|
Note that variable fonts with 'CFF2' table are not supported yet.
|
|
|
|
axisLimits: a dict keyed by axis tags (str) containing the coordinates (float)
|
|
|
|
along one or more axes where the desired instance will be located.
|
|
|
|
If the value is `None`, the default coordinate as per 'fvar' table for
|
|
|
|
that axis is used.
|
|
|
|
The limit values can also be (min, max) tuples for restricting an
|
2021-04-28 08:51:28 -04:00
|
|
|
axis's variation range. The default axis value must be included in
|
|
|
|
the new range.
|
2019-05-31 11:52:39 +01:00
|
|
|
inplace (bool): whether to modify input TTFont object in-place instead of
|
|
|
|
returning a distinct object.
|
|
|
|
optimize (bool): if False, do not perform IUP-delta optimization on the
|
|
|
|
remaining 'gvar' table's deltas. Possibly faster, and might work around
|
|
|
|
rendering issues in some buggy environments, at the cost of a slightly
|
|
|
|
larger file size.
|
2020-09-29 19:33:26 +01:00
|
|
|
overlap (OverlapMode): variable fonts usually contain overlapping contours, and
|
2020-09-27 17:48:52 +01:00
|
|
|
some font rendering engines on Apple platforms require that the
|
|
|
|
`OVERLAP_SIMPLE` and `OVERLAP_COMPOUND` flags in the 'glyf' table be set to
|
|
|
|
force rendering using a non-zero fill rule. Thus we always set these flags
|
|
|
|
on all glyphs to maximise cross-compatibility of the generated instance.
|
2020-09-29 19:33:26 +01:00
|
|
|
You can disable this by passing OverlapMode.KEEP_AND_DONT_SET_FLAGS.
|
2020-09-27 17:48:52 +01:00
|
|
|
If you want to remove the overlaps altogether and merge overlapping
|
2021-07-29 16:59:04 +02:00
|
|
|
contours and components, you can pass OverlapMode.REMOVE (or
|
|
|
|
REMOVE_AND_IGNORE_ERRORS to not hard-fail on tricky glyphs). Note that this
|
2020-09-27 17:48:52 +01:00
|
|
|
requires the skia-pathops package (available to pip install).
|
|
|
|
The overlap parameter only has effect when generating full static instances.
|
2021-02-16 09:36:15 +00:00
|
|
|
updateFontNames (bool): if True, update the instantiated font's name table using
|
2022-07-06 10:37:22 +01:00
|
|
|
the Axis Value Tables from the STAT table. The name table and the style bits
|
|
|
|
in the head and OS/2 table will be updated so they conform to the R/I/B/BI
|
|
|
|
model. If the STAT table is missing or an Axis Value table is missing for
|
|
|
|
a given axis coordinate, a ValueError will be raised.
|
2019-05-31 11:52:39 +01:00
|
|
|
"""
|
2020-09-27 17:48:52 +01:00
|
|
|
# 'overlap' used to be bool and is now enum; for backward compat keep accepting bool
|
2020-09-29 19:33:26 +01:00
|
|
|
overlap = OverlapMode(int(overlap))
|
2020-09-27 17:48:52 +01:00
|
|
|
|
2019-03-06 21:58:58 -08:00
|
|
|
sanityCheckVariableTables(varfont)
|
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
axisLimits = populateAxisDefaults(varfont, axisLimits)
|
2019-05-20 17:31:46 -04:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
normalizedLimits = normalizeAxisLimits(varfont, axisLimits)
|
2019-03-06 21:54:15 -08:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
log.info("Normalized limits: %s", normalizedLimits)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-11-15 19:02:12 +00:00
|
|
|
if not inplace:
|
|
|
|
varfont = deepcopy(varfont)
|
|
|
|
|
2020-10-14 11:56:18 +01:00
|
|
|
if updateFontNames:
|
2021-01-05 14:34:25 +00:00
|
|
|
log.info("Updating name table")
|
2021-02-15 12:22:48 +00:00
|
|
|
names.updateNameTable(varfont, axisLimits)
|
2020-10-14 11:56:18 +01:00
|
|
|
|
2019-03-06 17:43:28 -08:00
|
|
|
if "gvar" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateGvar(varfont, normalizedLimits, optimize=optimize)
|
2019-03-14 10:59:15 -04:00
|
|
|
|
|
|
|
if "cvar" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateCvar(varfont, normalizedLimits)
|
2019-03-19 10:44:39 -04:00
|
|
|
|
|
|
|
if "MVAR" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateMVAR(varfont, normalizedLimits)
|
2019-03-25 16:14:57 -04:00
|
|
|
|
2019-04-20 12:42:31 +01:00
|
|
|
if "HVAR" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateHVAR(varfont, normalizedLimits)
|
2019-04-20 12:42:31 +01:00
|
|
|
|
2019-04-20 13:06:13 +01:00
|
|
|
if "VVAR" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateVVAR(varfont, normalizedLimits)
|
2019-04-29 18:04:21 +02:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateOTL(varfont, normalizedLimits)
|
2019-04-20 12:42:31 +01:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateFeatureVariations(varfont, normalizedLimits)
|
2019-04-29 18:04:21 +02:00
|
|
|
|
|
|
|
if "avar" in varfont:
|
2019-09-18 17:00:53 +01:00
|
|
|
instantiateAvar(varfont, axisLimits)
|
2019-04-17 19:20:26 +01:00
|
|
|
|
2021-02-15 12:22:48 +00:00
|
|
|
with names.pruningUnusedNames(varfont):
|
2019-05-07 17:55:50 +01:00
|
|
|
if "STAT" in varfont:
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateSTAT(varfont, axisLimits)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
instantiateFvar(varfont, axisLimits)
|
2019-05-01 18:30:49 +01:00
|
|
|
|
2019-05-08 16:24:17 +01:00
|
|
|
if "fvar" not in varfont:
|
2020-09-27 17:48:52 +01:00
|
|
|
if "glyf" in varfont:
|
2020-09-29 19:33:26 +01:00
|
|
|
if overlap == OverlapMode.KEEP_AND_SET_FLAGS:
|
2020-09-27 17:48:52 +01:00
|
|
|
setMacOverlapFlags(varfont["glyf"])
|
2021-07-29 16:59:04 +02:00
|
|
|
elif overlap in (OverlapMode.REMOVE, OverlapMode.REMOVE_AND_IGNORE_ERRORS):
|
2020-09-27 17:48:52 +01:00
|
|
|
from fontTools.ttLib.removeOverlaps import removeOverlaps
|
|
|
|
|
|
|
|
log.info("Removing overlaps from glyf table")
|
2021-07-29 16:59:04 +02:00
|
|
|
removeOverlaps(
|
|
|
|
varfont,
|
|
|
|
ignoreErrors=(overlap == OverlapMode.REMOVE_AND_IGNORE_ERRORS),
|
|
|
|
)
|
2019-05-08 16:24:17 +01:00
|
|
|
|
2019-06-20 15:09:17 +01:00
|
|
|
varLib.set_default_weight_width_slant(
|
2019-05-08 18:46:43 +01:00
|
|
|
varfont,
|
|
|
|
location={
|
2022-08-06 16:51:21 -06:00
|
|
|
axisTag: _expand(limit)[1]
|
2019-05-31 12:00:08 +01:00
|
|
|
for axisTag, limit in axisLimits.items()
|
2022-08-06 16:51:21 -06:00
|
|
|
if _expand(limit)[0] == _expand(limit)[2]
|
2019-05-08 18:46:43 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-07-05 16:10:15 +01:00
|
|
|
if updateFontNames:
|
|
|
|
# Set Regular/Italic/Bold/Bold Italic bits as appropriate, after the
|
|
|
|
# name table has been updated.
|
|
|
|
setRibbiBits(varfont)
|
2022-06-30 18:56:03 +01:00
|
|
|
|
2019-03-06 17:43:28 -08:00
|
|
|
return varfont
|
|
|
|
|
|
|
|
|
2022-07-05 15:45:44 +01:00
|
|
|
def setRibbiBits(font):
|
2022-06-30 18:56:03 +01:00
|
|
|
"""Set the `head.macStyle` and `OS/2.fsSelection` style bits
|
|
|
|
appropriately."""
|
|
|
|
|
2022-07-06 10:40:27 +01:00
|
|
|
english_ribbi_style = font["name"].getName(names.NameID.SUBFAMILY_NAME, 3, 1, 0x409)
|
2022-06-30 18:56:03 +01:00
|
|
|
if english_ribbi_style is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
styleMapStyleName = english_ribbi_style.toStr().lower()
|
2022-07-05 15:45:44 +01:00
|
|
|
if styleMapStyleName not in {"regular", "bold", "italic", "bold italic"}:
|
|
|
|
return
|
|
|
|
|
2022-06-30 18:56:03 +01:00
|
|
|
if styleMapStyleName == "bold":
|
2022-07-05 15:45:44 +01:00
|
|
|
font["head"].macStyle = 0b01
|
2022-06-30 18:56:03 +01:00
|
|
|
elif styleMapStyleName == "bold italic":
|
2022-07-05 15:45:44 +01:00
|
|
|
font["head"].macStyle = 0b11
|
2022-06-30 18:56:03 +01:00
|
|
|
elif styleMapStyleName == "italic":
|
2022-07-05 15:45:44 +01:00
|
|
|
font["head"].macStyle = 0b10
|
2022-06-30 18:56:03 +01:00
|
|
|
|
2022-07-05 15:45:44 +01:00
|
|
|
selection = font["OS/2"].fsSelection
|
2022-06-30 18:56:03 +01:00
|
|
|
# First clear...
|
|
|
|
selection &= ~(1 << 0)
|
|
|
|
selection &= ~(1 << 5)
|
|
|
|
selection &= ~(1 << 6)
|
|
|
|
# ...then re-set the bits.
|
|
|
|
if styleMapStyleName == "regular":
|
|
|
|
selection |= 1 << 6
|
|
|
|
elif styleMapStyleName == "bold":
|
|
|
|
selection |= 1 << 5
|
|
|
|
elif styleMapStyleName == "italic":
|
|
|
|
selection |= 1 << 0
|
|
|
|
elif styleMapStyleName == "bold italic":
|
|
|
|
selection |= 1 << 0
|
|
|
|
selection |= 1 << 5
|
2022-07-05 15:45:44 +01:00
|
|
|
font["OS/2"].fsSelection = selection
|
2022-06-30 18:56:03 +01:00
|
|
|
|
|
|
|
|
2019-09-18 17:00:53 +01:00
|
|
|
def splitAxisLocationAndRanges(axisLimits, rangeType=AxisRange):
|
|
|
|
location, axisRanges = {}, {}
|
2022-08-06 16:51:21 -06:00
|
|
|
for axisTag, value in axisLimits.items():
|
|
|
|
(minimum, default, maximum) = _expand(value)
|
2022-08-05 21:42:53 -06:00
|
|
|
if minimum == default == maximum:
|
|
|
|
location[axisTag] = default
|
2019-09-18 17:00:53 +01:00
|
|
|
else:
|
2022-08-05 18:41:52 -06:00
|
|
|
axisRanges[axisTag] = rangeType(minimum, maximum)
|
2019-09-18 17:00:53 +01:00
|
|
|
return location, axisRanges
|
|
|
|
|
|
|
|
|
2019-03-06 21:54:15 -08:00
|
|
|
def parseLimits(limits):
|
|
|
|
result = {}
|
2019-05-31 12:00:08 +01:00
|
|
|
for limitString in limits:
|
2022-08-05 19:14:18 -06:00
|
|
|
match = re.match(r"^(\w{1,4})=(?:(drop)|(?:([^:]+)(?:[:]([^:]+))?(?:[:]([^:]+))?))$", limitString)
|
2019-03-06 21:54:15 -08:00
|
|
|
if not match:
|
2019-05-31 12:00:08 +01:00
|
|
|
raise ValueError("invalid location format: %r" % limitString)
|
2019-03-06 21:54:15 -08:00
|
|
|
tag = match.group(1).ljust(4)
|
2019-05-21 14:06:44 -04:00
|
|
|
if match.group(2): # 'drop'
|
2019-05-20 17:31:46 -04:00
|
|
|
lbound = None
|
|
|
|
else:
|
2019-09-18 17:00:53 +01:00
|
|
|
lbound = strToFixedToFloat(match.group(3), precisionBits=16)
|
2022-08-05 19:14:18 -06:00
|
|
|
ubound = default = lbound
|
2019-05-20 17:31:46 -04:00
|
|
|
if match.group(4):
|
2022-08-05 19:14:18 -06:00
|
|
|
ubound = default = strToFixedToFloat(match.group(4), precisionBits=16)
|
|
|
|
default = None
|
|
|
|
if match.group(5):
|
|
|
|
default = ubound
|
|
|
|
ubound = strToFixedToFloat(match.group(5), precisionBits=16)
|
|
|
|
|
|
|
|
if default is None:
|
|
|
|
assert lbound <= ubound
|
|
|
|
else:
|
|
|
|
assert lbound <= default <= ubound
|
|
|
|
result[tag] = (lbound, default, ubound)
|
2019-03-06 21:54:15 -08:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def parseArgs(args):
|
|
|
|
"""Parse argv.
|
|
|
|
|
|
|
|
Returns:
|
2019-05-31 12:00:08 +01:00
|
|
|
3-tuple (infile, axisLimits, options)
|
|
|
|
axisLimits is either a Dict[str, Optional[float]], for pinning variation axes
|
2019-05-21 14:40:26 -04:00
|
|
|
to specific coordinates along those axes (with `None` as a placeholder for an
|
|
|
|
axis' default value); or a Dict[str, Tuple(float, float)], meaning limit this
|
|
|
|
axis to min/max range.
|
2019-03-12 19:02:14 +00:00
|
|
|
Axes locations are in user-space coordinates, as defined in the "fvar" table.
|
2019-03-12 17:59:11 +00:00
|
|
|
"""
|
2019-03-06 17:43:28 -08:00
|
|
|
from fontTools import configLogger
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
2019-03-21 15:30:48 +00:00
|
|
|
"fonttools varLib.instancer",
|
2019-03-07 19:18:14 -08:00
|
|
|
description="Partially instantiate a variable font",
|
2019-03-06 17:43:28 -08:00
|
|
|
)
|
2019-03-07 19:18:14 -08:00
|
|
|
parser.add_argument("input", metavar="INPUT.ttf", help="Input variable TTF file.")
|
2019-03-06 17:43:28 -08:00
|
|
|
parser.add_argument(
|
2019-03-07 19:18:14 -08:00
|
|
|
"locargs",
|
|
|
|
metavar="AXIS=LOC",
|
|
|
|
nargs="*",
|
2021-03-01 12:02:06 -07:00
|
|
|
help="List of space separated locations. A location consists of "
|
2019-05-21 14:40:26 -04:00
|
|
|
"the tag of a variation axis, followed by '=' and one of number, "
|
|
|
|
"number:number or the literal string 'drop'. "
|
|
|
|
"E.g.: wdth=100 or wght=75.0:125.0 or wght=drop",
|
2019-03-07 19:18:14 -08:00
|
|
|
)
|
2019-03-06 17:43:28 -08:00
|
|
|
parser.add_argument(
|
2019-03-07 19:18:14 -08:00
|
|
|
"-o",
|
|
|
|
"--output",
|
|
|
|
metavar="OUTPUT.ttf",
|
|
|
|
default=None,
|
|
|
|
help="Output instance TTF file (default: INPUT-instance.ttf).",
|
|
|
|
)
|
2019-04-01 11:03:45 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-optimize",
|
|
|
|
dest="optimize",
|
|
|
|
action="store_false",
|
2019-05-08 16:33:18 +01:00
|
|
|
help="Don't perform IUP optimization on the remaining gvar TupleVariations",
|
2019-04-01 11:03:45 +01:00
|
|
|
)
|
2019-05-08 16:29:38 +01:00
|
|
|
parser.add_argument(
|
2019-05-09 16:39:26 +01:00
|
|
|
"--no-overlap-flag",
|
2019-05-08 16:29:38 +01:00
|
|
|
dest="overlap",
|
|
|
|
action="store_false",
|
|
|
|
help="Don't set OVERLAP_SIMPLE/OVERLAP_COMPOUND glyf flags (only applicable "
|
|
|
|
"when generating a full instance)",
|
|
|
|
)
|
2020-09-27 17:48:52 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"--remove-overlaps",
|
|
|
|
dest="remove_overlaps",
|
|
|
|
action="store_true",
|
|
|
|
help="Merge overlapping contours and components (only applicable "
|
|
|
|
"when generating a full instance). Requires skia-pathops",
|
|
|
|
)
|
2021-07-29 16:59:04 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--ignore-overlap-errors",
|
|
|
|
dest="ignore_overlap_errors",
|
|
|
|
action="store_true",
|
|
|
|
help="Don't crash if the remove-overlaps operation fails for some glyphs.",
|
|
|
|
)
|
2020-10-02 13:05:08 +01:00
|
|
|
parser.add_argument(
|
2021-01-05 14:34:25 +00:00
|
|
|
"--update-name-table",
|
2020-10-02 13:05:08 +01:00
|
|
|
action="store_true",
|
2021-01-05 14:34:25 +00:00
|
|
|
help="Update the instantiated font's `name` table. Input font must have "
|
2020-10-14 11:56:18 +01:00
|
|
|
"a STAT table with Axis Value Tables",
|
2020-10-02 13:05:08 +01:00
|
|
|
)
|
2022-06-14 15:16:07 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-recalc-timestamp",
|
|
|
|
dest="recalc_timestamp",
|
|
|
|
action="store_false",
|
2022-08-22 06:28:48 -06:00
|
|
|
help="Don't set the output font's timestamp to the current time.",
|
2022-06-14 15:16:07 +01:00
|
|
|
)
|
2022-07-05 19:02:21 +01:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-recalc-bounds",
|
|
|
|
dest="recalc_bounds",
|
|
|
|
action="store_false",
|
|
|
|
help="Don't recalculate font bounding boxes",
|
|
|
|
)
|
2019-05-31 12:00:08 +01:00
|
|
|
loggingGroup = parser.add_mutually_exclusive_group(required=False)
|
|
|
|
loggingGroup.add_argument(
|
2019-03-07 19:18:14 -08:00
|
|
|
"-v", "--verbose", action="store_true", help="Run more verbosely."
|
|
|
|
)
|
2019-05-31 12:00:08 +01:00
|
|
|
loggingGroup.add_argument(
|
2019-03-07 19:18:14 -08:00
|
|
|
"-q", "--quiet", action="store_true", help="Turn verbosity off."
|
|
|
|
)
|
2019-03-06 17:43:28 -08:00
|
|
|
options = parser.parse_args(args)
|
|
|
|
|
2020-09-27 17:48:52 +01:00
|
|
|
if options.remove_overlaps:
|
2021-07-29 16:59:04 +02:00
|
|
|
if options.ignore_overlap_errors:
|
|
|
|
options.overlap = OverlapMode.REMOVE_AND_IGNORE_ERRORS
|
|
|
|
else:
|
|
|
|
options.overlap = OverlapMode.REMOVE
|
2020-09-27 17:48:52 +01:00
|
|
|
else:
|
2020-09-29 19:33:26 +01:00
|
|
|
options.overlap = OverlapMode(int(options.overlap))
|
2020-09-27 17:48:52 +01:00
|
|
|
|
2019-03-06 21:54:15 -08:00
|
|
|
infile = options.input
|
2019-05-08 16:33:18 +01:00
|
|
|
if not os.path.isfile(infile):
|
|
|
|
parser.error("No such file '{}'".format(infile))
|
|
|
|
|
2019-03-06 17:43:28 -08:00
|
|
|
configLogger(
|
2019-03-07 19:18:14 -08:00
|
|
|
level=("DEBUG" if options.verbose else "ERROR" if options.quiet else "INFO")
|
2019-03-06 17:43:28 -08:00
|
|
|
)
|
|
|
|
|
2019-05-08 16:33:18 +01:00
|
|
|
try:
|
2019-05-31 12:00:08 +01:00
|
|
|
axisLimits = parseLimits(options.locargs)
|
2019-05-08 16:33:18 +01:00
|
|
|
except ValueError as e:
|
2019-08-16 22:08:35 +01:00
|
|
|
parser.error(str(e))
|
2019-05-08 16:33:18 +01:00
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
if len(axisLimits) != len(options.locargs):
|
2019-05-08 16:33:18 +01:00
|
|
|
parser.error("Specified multiple limits for the same axis")
|
|
|
|
|
2019-05-31 12:00:08 +01:00
|
|
|
return (infile, axisLimits, options)
|
2019-03-06 21:54:15 -08:00
|
|
|
|
|
|
|
|
|
|
|
def main(args=None):
|
2022-08-18 07:40:13 -06:00
|
|
|
"""Partially instantiate a variable font"""
|
2019-05-31 12:00:08 +01:00
|
|
|
infile, axisLimits, options = parseArgs(args)
|
|
|
|
log.info("Restricting axes: %s", axisLimits)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
|
|
|
log.info("Loading variable font")
|
2022-07-05 19:02:21 +01:00
|
|
|
varfont = TTFont(
|
|
|
|
infile,
|
|
|
|
recalcTimestamp=options.recalc_timestamp,
|
|
|
|
recalcBBoxes=options.recalc_bounds,
|
|
|
|
)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2019-05-08 16:33:18 +01:00
|
|
|
isFullInstance = {
|
2019-05-31 12:37:31 +01:00
|
|
|
axisTag for axisTag, limit in axisLimits.items() if not isinstance(limit, tuple)
|
2019-05-08 16:33:18 +01:00
|
|
|
}.issuperset(axis.axisTag for axis in varfont["fvar"].axes)
|
|
|
|
|
2019-04-01 11:03:45 +01:00
|
|
|
instantiateVariableFont(
|
2019-05-08 16:29:38 +01:00
|
|
|
varfont,
|
2019-05-31 12:00:08 +01:00
|
|
|
axisLimits,
|
2019-05-08 16:29:38 +01:00
|
|
|
inplace=True,
|
|
|
|
optimize=options.optimize,
|
|
|
|
overlap=options.overlap,
|
2021-01-05 14:34:25 +00:00
|
|
|
updateFontNames=options.update_name_table,
|
2019-04-01 11:03:45 +01:00
|
|
|
)
|
2019-03-06 17:43:28 -08:00
|
|
|
|
2022-08-22 06:26:30 -06:00
|
|
|
suffix = "-instance" if isFullInstance else "-partial"
|
2022-08-22 06:28:48 -06:00
|
|
|
outfile = (
|
|
|
|
makeOutputFileName(infile, overWrite=True, suffix=suffix)
|
2019-05-08 16:33:18 +01:00
|
|
|
if not options.output
|
|
|
|
else options.output
|
|
|
|
)
|
|
|
|
|
|
|
|
log.info(
|
|
|
|
"Saving %s font %s",
|
|
|
|
"instance" if isFullInstance else "partial variable",
|
|
|
|
outfile,
|
|
|
|
)
|
2019-03-06 17:43:28 -08:00
|
|
|
varfont.save(outfile)
|