Merge branch 'main' into designspaceLib-public-fontInfo
This commit is contained in:
commit
e11074a6ee
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@ -114,7 +114,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: artifact
|
name: artifact
|
||||||
path: dist
|
path: dist
|
||||||
- uses: pypa/gh-action-pypi-publish@v1.8.10
|
- uses: pypa/gh-action-pypi-publish@v1.8.11
|
||||||
with:
|
with:
|
||||||
user: __token__
|
user: __token__
|
||||||
password: ${{ secrets.PYPI_PASSWORD }}
|
password: ${{ secrets.PYPI_PASSWORD }}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
sphinx==7.2.6
|
sphinx==7.2.6
|
||||||
sphinx_rtd_theme==1.3.0
|
sphinx_rtd_theme==2.0.0
|
||||||
reportlab==4.0.6
|
reportlab==4.0.7
|
||||||
freetype-py==2.4.0
|
freetype-py==2.4.0
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 267 KiB After Width: | Height: | Size: 283 KiB |
@ -112,7 +112,7 @@ class Source {
|
|||||||
+ path: str
|
+ path: str
|
||||||
+ layerName: Optional[str]
|
+ layerName: Optional[str]
|
||||||
+ <color:brown><s><<Deprecated>> location: Location
|
+ <color:brown><s><<Deprecated>> location: Location
|
||||||
+ <color:green><b><<New>> designLocation: AnisotropicLocation
|
+ <color:green><b><<New>> designLocation: SimpleLocation
|
||||||
....
|
....
|
||||||
+ font: Optional[Font]
|
+ font: Optional[Font]
|
||||||
....
|
....
|
||||||
|
@ -438,8 +438,8 @@ glyphname pairs: the glyphs that need to be substituted. For a rule to be trigge
|
|||||||
See the following issues for more information:
|
See the following issues for more information:
|
||||||
`fontTools#1371 <https://github.com/fonttools/fonttools/issues/1371#issuecomment-590214572>`__
|
`fontTools#1371 <https://github.com/fonttools/fonttools/issues/1371#issuecomment-590214572>`__
|
||||||
`fontTools#2050 <https://github.com/fonttools/fonttools/issues/2050#issuecomment-678691020>`__
|
`fontTools#2050 <https://github.com/fonttools/fonttools/issues/2050#issuecomment-678691020>`__
|
||||||
- If you want to use a different feature altogether, e.g. ``calt``,
|
- If you want to use a different feature(s) altogether, e.g. ``calt``,
|
||||||
use the lib key ``com.github.fonttools.varLib.featureVarsFeatureTag``
|
use the lib key ``com.github.fonttools.varLib.featureVarsFeatureTag``.
|
||||||
|
|
||||||
.. code:: xml
|
.. code:: xml
|
||||||
|
|
||||||
@ -450,6 +450,9 @@ glyphname pairs: the glyphs that need to be substituted. For a rule to be trigge
|
|||||||
</dict>
|
</dict>
|
||||||
</lib>
|
</lib>
|
||||||
|
|
||||||
|
This can also take a comma-separated list of feature tags, e.g. ``salt,ss01``,
|
||||||
|
if you wish the same rules to be applied with several features.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
``<rule>`` element
|
``<rule>`` element
|
||||||
|
@ -3,6 +3,6 @@ from fontTools.misc.loggingTools import configLogger
|
|||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
version = __version__ = "4.45.2.dev0"
|
version = __version__ = "4.46.1.dev0"
|
||||||
|
|
||||||
__all__ = ["version", "log", "configLogger"]
|
__all__ = ["version", "log", "configLogger"]
|
||||||
|
@ -82,7 +82,10 @@ kernRE = re.compile(
|
|||||||
# regular expressions to parse composite info lines of the form:
|
# regular expressions to parse composite info lines of the form:
|
||||||
# Aacute 2 ; PCC A 0 0 ; PCC acute 182 211 ;
|
# Aacute 2 ; PCC A 0 0 ; PCC acute 182 211 ;
|
||||||
compositeRE = re.compile(
|
compositeRE = re.compile(
|
||||||
r"([.A-Za-z0-9_]+)" r"\s+" r"(\d+)" r"\s*;\s*" # char name # number of parts
|
r"([.A-Za-z0-9_]+)" # char name
|
||||||
|
r"\s+"
|
||||||
|
r"(\d+)" # number of parts
|
||||||
|
r"\s*;\s*"
|
||||||
)
|
)
|
||||||
componentRE = re.compile(
|
componentRE = re.compile(
|
||||||
r"PCC\s+" # PPC
|
r"PCC\s+" # PPC
|
||||||
|
@ -312,7 +312,7 @@ class SourceDescriptor(SimpleDescriptor):
|
|||||||
return self.designLocation
|
return self.designLocation
|
||||||
|
|
||||||
@location.setter
|
@location.setter
|
||||||
def location(self, location: Optional[AnisotropicLocationDict]):
|
def location(self, location: Optional[SimpleLocationDict]):
|
||||||
self.designLocation = location or {}
|
self.designLocation = location or {}
|
||||||
|
|
||||||
def setFamilyName(self, familyName, languageCode="en"):
|
def setFamilyName(self, familyName, languageCode="en"):
|
||||||
@ -329,15 +329,13 @@ class SourceDescriptor(SimpleDescriptor):
|
|||||||
"""
|
"""
|
||||||
return self.localisedFamilyName.get(languageCode)
|
return self.localisedFamilyName.get(languageCode)
|
||||||
|
|
||||||
def getFullDesignLocation(
|
def getFullDesignLocation(self, doc: "DesignSpaceDocument") -> SimpleLocationDict:
|
||||||
self, doc: "DesignSpaceDocument"
|
|
||||||
) -> AnisotropicLocationDict:
|
|
||||||
"""Get the complete design location of this source, from its
|
"""Get the complete design location of this source, from its
|
||||||
:attr:`designLocation` and the document's axis defaults.
|
:attr:`designLocation` and the document's axis defaults.
|
||||||
|
|
||||||
.. versionadded:: 5.0
|
.. versionadded:: 5.0
|
||||||
"""
|
"""
|
||||||
result: AnisotropicLocationDict = {}
|
result: SimpleLocationDict = {}
|
||||||
for axis in doc.axes:
|
for axis in doc.axes:
|
||||||
if axis.name in self.designLocation:
|
if axis.name in self.designLocation:
|
||||||
result[axis.name] = self.designLocation[axis.name]
|
result[axis.name] = self.designLocation[axis.name]
|
||||||
|
@ -1370,6 +1370,11 @@ def _curve_curve_intersections_t(
|
|||||||
return unique_values
|
return unique_values
|
||||||
|
|
||||||
|
|
||||||
|
def _is_linelike(segment):
|
||||||
|
maybeline = _alignment_transformation(segment).transformPoints(segment)
|
||||||
|
return all(math.isclose(p[1], 0.0) for p in maybeline)
|
||||||
|
|
||||||
|
|
||||||
def curveCurveIntersections(curve1, curve2):
|
def curveCurveIntersections(curve1, curve2):
|
||||||
"""Finds intersections between a curve and a curve.
|
"""Finds intersections between a curve and a curve.
|
||||||
|
|
||||||
@ -1391,6 +1396,17 @@ def curveCurveIntersections(curve1, curve2):
|
|||||||
>>> intersections[0].pt
|
>>> intersections[0].pt
|
||||||
(81.7831487395506, 109.88904552375288)
|
(81.7831487395506, 109.88904552375288)
|
||||||
"""
|
"""
|
||||||
|
if _is_linelike(curve1):
|
||||||
|
line1 = curve1[0], curve1[-1]
|
||||||
|
if _is_linelike(curve2):
|
||||||
|
line2 = curve2[0], curve2[-1]
|
||||||
|
return lineLineIntersections(*line1, *line2)
|
||||||
|
else:
|
||||||
|
return curveLineIntersections(curve2, line1)
|
||||||
|
elif _is_linelike(curve2):
|
||||||
|
line2 = curve2[0], curve2[-1]
|
||||||
|
return curveLineIntersections(curve1, line2)
|
||||||
|
|
||||||
intersection_ts = _curve_curve_intersections_t(curve1, curve2)
|
intersection_ts = _curve_curve_intersections_t(curve1, curve2)
|
||||||
return [
|
return [
|
||||||
Intersection(pt=segmentPointAtT(curve1, ts[0]), t1=ts[0], t2=ts[1])
|
Intersection(pt=segmentPointAtT(curve1, ts[0]), t1=ts[0], t2=ts[1])
|
||||||
|
@ -8,6 +8,7 @@ __all__ = [
|
|||||||
"RecordingPen",
|
"RecordingPen",
|
||||||
"DecomposingRecordingPen",
|
"DecomposingRecordingPen",
|
||||||
"RecordingPointPen",
|
"RecordingPointPen",
|
||||||
|
"lerpRecordings",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -172,6 +173,34 @@ class RecordingPointPen(AbstractPointPen):
|
|||||||
drawPoints = replay
|
drawPoints = replay
|
||||||
|
|
||||||
|
|
||||||
|
def lerpRecordings(recording1, recording2, factor=0.5):
|
||||||
|
"""Linearly interpolate between two recordings. The recordings
|
||||||
|
must be decomposed, i.e. they must not contain any components.
|
||||||
|
|
||||||
|
Factor is typically between 0 and 1. 0 means the first recording,
|
||||||
|
1 means the second recording, and 0.5 means the average of the
|
||||||
|
two recordings. Other values are possible, and can be useful to
|
||||||
|
extrapolate. Defaults to 0.5.
|
||||||
|
|
||||||
|
Returns a generator with the new recording.
|
||||||
|
"""
|
||||||
|
if len(recording1) != len(recording2):
|
||||||
|
raise ValueError(
|
||||||
|
"Mismatched lengths: %d and %d" % (len(recording1), len(recording2))
|
||||||
|
)
|
||||||
|
for (op1, args1), (op2, args2) in zip(recording1, recording2):
|
||||||
|
if op1 != op2:
|
||||||
|
raise ValueError("Mismatched operations: %s, %s" % (op1, op2))
|
||||||
|
if op1 == "addComponent":
|
||||||
|
raise ValueError("Cannot interpolate components")
|
||||||
|
else:
|
||||||
|
mid_args = [
|
||||||
|
(x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
|
||||||
|
for (x1, y1), (x2, y2) in zip(args1, args2)
|
||||||
|
]
|
||||||
|
yield (op1, mid_args)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
pen = RecordingPen()
|
pen = RecordingPen()
|
||||||
pen.moveTo((0, 0))
|
pen.moveTo((0, 0))
|
||||||
|
@ -9,6 +9,11 @@ from fontTools.misc.fixedTools import otRound
|
|||||||
from fontTools.misc.loggingTools import deprecateFunction
|
from fontTools.misc.loggingTools import deprecateFunction
|
||||||
from fontTools.misc.transform import Transform
|
from fontTools.misc.transform import Transform
|
||||||
from fontTools.pens.transformPen import TransformPen, TransformPointPen
|
from fontTools.pens.transformPen import TransformPen, TransformPointPen
|
||||||
|
from fontTools.pens.recordingPen import (
|
||||||
|
DecomposingRecordingPen,
|
||||||
|
lerpRecordings,
|
||||||
|
replayRecording,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class _TTGlyphSet(Mapping):
|
class _TTGlyphSet(Mapping):
|
||||||
@ -321,3 +326,52 @@ def _setCoordinates(glyph, coord, glyfTable, *, recalcBounds=True):
|
|||||||
verticalAdvanceWidth,
|
verticalAdvanceWidth,
|
||||||
topSideBearing,
|
topSideBearing,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LerpGlyphSet(Mapping):
|
||||||
|
"""A glyphset that interpolates between two other glyphsets.
|
||||||
|
|
||||||
|
Factor is typically between 0 and 1. 0 means the first glyphset,
|
||||||
|
1 means the second glyphset, and 0.5 means the average of the
|
||||||
|
two glyphsets. Other values are possible, and can be useful to
|
||||||
|
extrapolate. Defaults to 0.5.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, glyphset1, glyphset2, factor=0.5):
|
||||||
|
self.glyphset1 = glyphset1
|
||||||
|
self.glyphset2 = glyphset2
|
||||||
|
self.factor = factor
|
||||||
|
|
||||||
|
def __getitem__(self, glyphname):
|
||||||
|
if glyphname in self.glyphset1 and glyphname in self.glyphset2:
|
||||||
|
return LerpGlyph(glyphname, self)
|
||||||
|
raise KeyError(glyphname)
|
||||||
|
|
||||||
|
def __contains__(self, glyphname):
|
||||||
|
return glyphname in self.glyphset1 and glyphname in self.glyphset2
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
set1 = set(self.glyphset1)
|
||||||
|
set2 = set(self.glyphset2)
|
||||||
|
return iter(set1.intersection(set2))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
set1 = set(self.glyphset1)
|
||||||
|
set2 = set(self.glyphset2)
|
||||||
|
return len(set1.intersection(set2))
|
||||||
|
|
||||||
|
|
||||||
|
class LerpGlyph:
|
||||||
|
def __init__(self, glyphname, glyphset):
|
||||||
|
self.glyphset = glyphset
|
||||||
|
self.glyphname = glyphname
|
||||||
|
|
||||||
|
def draw(self, pen):
|
||||||
|
recording1 = DecomposingRecordingPen(self.glyphset.glyphset1)
|
||||||
|
self.glyphset.glyphset1[self.glyphname].draw(recording1)
|
||||||
|
recording2 = DecomposingRecordingPen(self.glyphset.glyphset2)
|
||||||
|
self.glyphset.glyphset2[self.glyphname].draw(recording2)
|
||||||
|
|
||||||
|
factor = self.glyphset.factor
|
||||||
|
|
||||||
|
replayRecording(lerpRecordings(recording1.value, recording2.value, factor), pen)
|
||||||
|
@ -52,7 +52,8 @@ from .errors import VarLibError, VarLibValidationError
|
|||||||
log = logging.getLogger("fontTools.varLib")
|
log = logging.getLogger("fontTools.varLib")
|
||||||
|
|
||||||
# This is a lib key for the designspace document. The value should be
|
# This is a lib key for the designspace document. The value should be
|
||||||
# an OpenType feature tag, to be used as the FeatureVariations feature.
|
# a comma-separated list of OpenType feature tag(s), to be used as the
|
||||||
|
# FeatureVariations feature.
|
||||||
# If present, the DesignSpace <rules processing="..."> flag is ignored.
|
# If present, the DesignSpace <rules processing="..."> flag is ignored.
|
||||||
FEAVAR_FEATURETAG_LIB_KEY = "com.github.fonttools.varLib.featureVarsFeatureTag"
|
FEAVAR_FEATURETAG_LIB_KEY = "com.github.fonttools.varLib.featureVarsFeatureTag"
|
||||||
|
|
||||||
@ -781,7 +782,9 @@ def _merge_OTL(font, model, master_fonts, axisTags):
|
|||||||
font["GPOS"].table.remap_device_varidxes(varidx_map)
|
font["GPOS"].table.remap_device_varidxes(varidx_map)
|
||||||
|
|
||||||
|
|
||||||
def _add_GSUB_feature_variations(font, axes, internal_axis_supports, rules, featureTag):
|
def _add_GSUB_feature_variations(
|
||||||
|
font, axes, internal_axis_supports, rules, featureTags
|
||||||
|
):
|
||||||
def normalize(name, value):
|
def normalize(name, value):
|
||||||
return models.normalizeLocation({name: value}, internal_axis_supports)[name]
|
return models.normalizeLocation({name: value}, internal_axis_supports)[name]
|
||||||
|
|
||||||
@ -812,7 +815,7 @@ def _add_GSUB_feature_variations(font, axes, internal_axis_supports, rules, feat
|
|||||||
|
|
||||||
conditional_subs.append((region, subs))
|
conditional_subs.append((region, subs))
|
||||||
|
|
||||||
addFeatureVariations(font, conditional_subs, featureTag)
|
addFeatureVariations(font, conditional_subs, featureTags)
|
||||||
|
|
||||||
|
|
||||||
_DesignSpaceData = namedtuple(
|
_DesignSpaceData = namedtuple(
|
||||||
@ -860,7 +863,7 @@ def _add_COLR(font, model, master_fonts, axisTags, colr_layer_reuse=True):
|
|||||||
colr.VarIndexMap = builder.buildDeltaSetIndexMap(varIdxes)
|
colr.VarIndexMap = builder.buildDeltaSetIndexMap(varIdxes)
|
||||||
|
|
||||||
|
|
||||||
def load_designspace(designspace):
|
def load_designspace(designspace, log_enabled=True):
|
||||||
# TODO: remove this and always assume 'designspace' is a DesignSpaceDocument,
|
# TODO: remove this and always assume 'designspace' is a DesignSpaceDocument,
|
||||||
# never a file path, as that's already handled by caller
|
# never a file path, as that's already handled by caller
|
||||||
if hasattr(designspace, "sources"): # Assume a DesignspaceDocument
|
if hasattr(designspace, "sources"): # Assume a DesignspaceDocument
|
||||||
@ -908,10 +911,11 @@ def load_designspace(designspace):
|
|||||||
axis.labelNames["en"] = tostr(axis_name)
|
axis.labelNames["en"] = tostr(axis_name)
|
||||||
|
|
||||||
axes[axis_name] = axis
|
axes[axis_name] = axis
|
||||||
|
if log_enabled:
|
||||||
log.info("Axes:\n%s", pformat([axis.asdict() for axis in axes.values()]))
|
log.info("Axes:\n%s", pformat([axis.asdict() for axis in axes.values()]))
|
||||||
|
|
||||||
axisMappings = ds.axisMappings
|
axisMappings = ds.axisMappings
|
||||||
if axisMappings:
|
if axisMappings and log_enabled:
|
||||||
log.info("Mappings:\n%s", pformat(axisMappings))
|
log.info("Mappings:\n%s", pformat(axisMappings))
|
||||||
|
|
||||||
# Check all master and instance locations are valid and fill in defaults
|
# Check all master and instance locations are valid and fill in defaults
|
||||||
@ -941,6 +945,7 @@ def load_designspace(designspace):
|
|||||||
# Normalize master locations
|
# Normalize master locations
|
||||||
|
|
||||||
internal_master_locs = [o.getFullDesignLocation(ds) for o in masters]
|
internal_master_locs = [o.getFullDesignLocation(ds) for o in masters]
|
||||||
|
if log_enabled:
|
||||||
log.info("Internal master locations:\n%s", pformat(internal_master_locs))
|
log.info("Internal master locations:\n%s", pformat(internal_master_locs))
|
||||||
|
|
||||||
# TODO This mapping should ideally be moved closer to logic in _add_fvar/avar
|
# TODO This mapping should ideally be moved closer to logic in _add_fvar/avar
|
||||||
@ -948,12 +953,14 @@ def load_designspace(designspace):
|
|||||||
for axis in axes.values():
|
for axis in axes.values():
|
||||||
triple = (axis.minimum, axis.default, axis.maximum)
|
triple = (axis.minimum, axis.default, axis.maximum)
|
||||||
internal_axis_supports[axis.name] = [axis.map_forward(v) for v in triple]
|
internal_axis_supports[axis.name] = [axis.map_forward(v) for v in triple]
|
||||||
|
if log_enabled:
|
||||||
log.info("Internal axis supports:\n%s", pformat(internal_axis_supports))
|
log.info("Internal axis supports:\n%s", pformat(internal_axis_supports))
|
||||||
|
|
||||||
normalized_master_locs = [
|
normalized_master_locs = [
|
||||||
models.normalizeLocation(m, internal_axis_supports)
|
models.normalizeLocation(m, internal_axis_supports)
|
||||||
for m in internal_master_locs
|
for m in internal_master_locs
|
||||||
]
|
]
|
||||||
|
if log_enabled:
|
||||||
log.info("Normalized master locations:\n%s", pformat(normalized_master_locs))
|
log.info("Normalized master locations:\n%s", pformat(normalized_master_locs))
|
||||||
|
|
||||||
# Find base master
|
# Find base master
|
||||||
@ -969,6 +976,7 @@ def load_designspace(designspace):
|
|||||||
raise VarLibValidationError(
|
raise VarLibValidationError(
|
||||||
"Base master not found; no master at default location?"
|
"Base master not found; no master at default location?"
|
||||||
)
|
)
|
||||||
|
if log_enabled:
|
||||||
log.info("Index of base master: %s", base_idx)
|
log.info("Index of base master: %s", base_idx)
|
||||||
|
|
||||||
return _DesignSpaceData(
|
return _DesignSpaceData(
|
||||||
@ -1204,11 +1212,9 @@ def build(
|
|||||||
if "cvar" not in exclude and "glyf" in vf:
|
if "cvar" not in exclude and "glyf" in vf:
|
||||||
_merge_TTHinting(vf, model, master_fonts)
|
_merge_TTHinting(vf, model, master_fonts)
|
||||||
if "GSUB" not in exclude and ds.rules:
|
if "GSUB" not in exclude and ds.rules:
|
||||||
featureTag = ds.lib.get(
|
featureTags = _feature_variations_tags(ds)
|
||||||
FEAVAR_FEATURETAG_LIB_KEY, "rclt" if ds.rulesProcessingLast else "rvrn"
|
|
||||||
)
|
|
||||||
_add_GSUB_feature_variations(
|
_add_GSUB_feature_variations(
|
||||||
vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTag
|
vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTags
|
||||||
)
|
)
|
||||||
if "CFF2" not in exclude and ("CFF " in vf or "CFF2" in vf):
|
if "CFF2" not in exclude and ("CFF " in vf or "CFF2" in vf):
|
||||||
_add_CFF2(vf, model, master_fonts)
|
_add_CFF2(vf, model, master_fonts)
|
||||||
@ -1299,6 +1305,38 @@ class MasterFinder(object):
|
|||||||
return os.path.normpath(path)
|
return os.path.normpath(path)
|
||||||
|
|
||||||
|
|
||||||
|
def _feature_variations_tags(ds):
|
||||||
|
raw_tags = ds.lib.get(
|
||||||
|
FEAVAR_FEATURETAG_LIB_KEY,
|
||||||
|
"rclt" if ds.rulesProcessingLast else "rvrn",
|
||||||
|
)
|
||||||
|
return sorted({t.strip() for t in raw_tags.split(",")})
|
||||||
|
|
||||||
|
|
||||||
|
def addGSUBFeatureVariations(vf, designspace, featureTags=(), *, log_enabled=False):
|
||||||
|
"""Add GSUB FeatureVariations table to variable font, based on DesignSpace rules.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
vf: A TTFont object representing the variable font.
|
||||||
|
designspace: A DesignSpaceDocument object.
|
||||||
|
featureTags: Optional feature tag(s) to use for the FeatureVariations records.
|
||||||
|
If unset, the key 'com.github.fonttools.varLib.featureVarsFeatureTag' is
|
||||||
|
looked up in the DS <lib> and used; otherwise the default is 'rclt' if
|
||||||
|
the <rules processing="last"> attribute is set, else 'rvrn'.
|
||||||
|
See <https://fonttools.readthedocs.io/en/latest/designspaceLib/xml.html#rules-element>
|
||||||
|
log_enabled: If True, log info about DS axes and sources. Default is False, as
|
||||||
|
the same info may have already been logged as part of varLib.build.
|
||||||
|
"""
|
||||||
|
ds = load_designspace(designspace, log_enabled=log_enabled)
|
||||||
|
if not ds.rules:
|
||||||
|
return
|
||||||
|
if not featureTags:
|
||||||
|
featureTags = _feature_variations_tags(ds)
|
||||||
|
_add_GSUB_feature_variations(
|
||||||
|
vf, ds.axes, ds.internal_axis_supports, ds.rules, featureTags
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args=None):
|
||||||
"""Build variable fonts from a designspace file and masters"""
|
"""Build variable fonts from a designspace file and masters"""
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
@ -43,9 +43,18 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
|
|||||||
# ... ]
|
# ... ]
|
||||||
# >>> addFeatureVariations(f, condSubst)
|
# >>> addFeatureVariations(f, condSubst)
|
||||||
# >>> f.save(dstPath)
|
# >>> f.save(dstPath)
|
||||||
|
|
||||||
|
The `featureTag` parameter takes either a str or a iterable of str (the single str
|
||||||
|
is kept for backwards compatibility), and defines which feature(s) will be
|
||||||
|
associated with the feature variations.
|
||||||
|
Note, if this is "rvrn", then the substitution lookup will be inserted at the
|
||||||
|
beginning of the lookup list so that it is processed before others, otherwise
|
||||||
|
for any other feature tags it will be appended last.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
processLast = featureTag != "rvrn"
|
# process first when "rvrn" is the only listed tag
|
||||||
|
featureTags = [featureTag] if isinstance(featureTag, str) else sorted(featureTag)
|
||||||
|
processLast = "rvrn" not in featureTags or len(featureTags) > 1
|
||||||
|
|
||||||
_checkSubstitutionGlyphsExist(
|
_checkSubstitutionGlyphsExist(
|
||||||
glyphNames=set(font.getGlyphOrder()),
|
glyphNames=set(font.getGlyphOrder()),
|
||||||
@ -60,6 +69,14 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
|
|||||||
)
|
)
|
||||||
if "GSUB" not in font:
|
if "GSUB" not in font:
|
||||||
font["GSUB"] = buildGSUB()
|
font["GSUB"] = buildGSUB()
|
||||||
|
else:
|
||||||
|
existingTags = _existingVariableFeatures(font["GSUB"].table).intersection(
|
||||||
|
featureTags
|
||||||
|
)
|
||||||
|
if existingTags:
|
||||||
|
raise VarLibError(
|
||||||
|
f"FeatureVariations already exist for feature tag(s): {existingTags}"
|
||||||
|
)
|
||||||
|
|
||||||
# setup lookups
|
# setup lookups
|
||||||
lookupMap = buildSubstitutionLookups(
|
lookupMap = buildSubstitutionLookups(
|
||||||
@ -75,7 +92,17 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag="rvrn"):
|
|||||||
(conditionSet, [lookupMap[s] for s in substitutions])
|
(conditionSet, [lookupMap[s] for s in substitutions])
|
||||||
)
|
)
|
||||||
|
|
||||||
addFeatureVariationsRaw(font, font["GSUB"].table, conditionsAndLookups, featureTag)
|
addFeatureVariationsRaw(font, font["GSUB"].table, conditionsAndLookups, featureTags)
|
||||||
|
|
||||||
|
|
||||||
|
def _existingVariableFeatures(table):
|
||||||
|
existingFeatureVarsTags = set()
|
||||||
|
if hasattr(table, "FeatureVariations") and table.FeatureVariations is not None:
|
||||||
|
features = table.FeatureList.FeatureRecord
|
||||||
|
for fvr in table.FeatureVariations.FeatureVariationRecord:
|
||||||
|
for ftsr in fvr.FeatureTableSubstitution.SubstitutionRecord:
|
||||||
|
existingFeatureVarsTags.add(features[ftsr.FeatureIndex].FeatureTag)
|
||||||
|
return existingFeatureVarsTags
|
||||||
|
|
||||||
|
|
||||||
def _checkSubstitutionGlyphsExist(glyphNames, substitutions):
|
def _checkSubstitutionGlyphsExist(glyphNames, substitutions):
|
||||||
@ -324,32 +351,42 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
|
|||||||
"""Low level implementation of addFeatureVariations that directly
|
"""Low level implementation of addFeatureVariations that directly
|
||||||
models the possibilities of the FeatureVariations table."""
|
models the possibilities of the FeatureVariations table."""
|
||||||
|
|
||||||
processLast = featureTag != "rvrn"
|
featureTags = [featureTag] if isinstance(featureTag, str) else sorted(featureTag)
|
||||||
|
processLast = "rvrn" not in featureTags or len(featureTags) > 1
|
||||||
|
|
||||||
#
|
#
|
||||||
# if there is no <featureTag> feature:
|
# if a <featureTag> feature is not present:
|
||||||
# make empty <featureTag> feature
|
# make empty <featureTag> feature
|
||||||
# sort features, get <featureTag> feature index
|
# sort features, get <featureTag> feature index
|
||||||
# add <featureTag> feature to all scripts
|
# add <featureTag> feature to all scripts
|
||||||
|
# if a <featureTag> feature is present:
|
||||||
|
# reuse <featureTag> feature index
|
||||||
# make lookups
|
# make lookups
|
||||||
# add feature variations
|
# add feature variations
|
||||||
#
|
#
|
||||||
if table.Version < 0x00010001:
|
if table.Version < 0x00010001:
|
||||||
table.Version = 0x00010001 # allow table.FeatureVariations
|
table.Version = 0x00010001 # allow table.FeatureVariations
|
||||||
|
|
||||||
table.FeatureVariations = None # delete any existing FeatureVariations
|
varFeatureIndices = set()
|
||||||
|
|
||||||
varFeatureIndices = []
|
existingTags = {
|
||||||
for index, feature in enumerate(table.FeatureList.FeatureRecord):
|
feature.FeatureTag
|
||||||
if feature.FeatureTag == featureTag:
|
for feature in table.FeatureList.FeatureRecord
|
||||||
varFeatureIndices.append(index)
|
if feature.FeatureTag in featureTags
|
||||||
|
}
|
||||||
|
|
||||||
if not varFeatureIndices:
|
newTags = set(featureTags) - existingTags
|
||||||
|
if newTags:
|
||||||
|
varFeatures = []
|
||||||
|
for featureTag in sorted(newTags):
|
||||||
varFeature = buildFeatureRecord(featureTag, [])
|
varFeature = buildFeatureRecord(featureTag, [])
|
||||||
table.FeatureList.FeatureRecord.append(varFeature)
|
table.FeatureList.FeatureRecord.append(varFeature)
|
||||||
|
varFeatures.append(varFeature)
|
||||||
table.FeatureList.FeatureCount = len(table.FeatureList.FeatureRecord)
|
table.FeatureList.FeatureCount = len(table.FeatureList.FeatureRecord)
|
||||||
|
|
||||||
sortFeatureList(table)
|
sortFeatureList(table)
|
||||||
|
|
||||||
|
for varFeature in varFeatures:
|
||||||
varFeatureIndex = table.FeatureList.FeatureRecord.index(varFeature)
|
varFeatureIndex = table.FeatureList.FeatureRecord.index(varFeature)
|
||||||
|
|
||||||
for scriptRecord in table.ScriptList.ScriptRecord:
|
for scriptRecord in table.ScriptList.ScriptRecord:
|
||||||
@ -362,8 +399,16 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
|
|||||||
for langSys in [scriptRecord.Script.DefaultLangSys] + langSystems:
|
for langSys in [scriptRecord.Script.DefaultLangSys] + langSystems:
|
||||||
langSys.FeatureIndex.append(varFeatureIndex)
|
langSys.FeatureIndex.append(varFeatureIndex)
|
||||||
langSys.FeatureCount = len(langSys.FeatureIndex)
|
langSys.FeatureCount = len(langSys.FeatureIndex)
|
||||||
|
varFeatureIndices.add(varFeatureIndex)
|
||||||
|
|
||||||
varFeatureIndices = [varFeatureIndex]
|
if existingTags:
|
||||||
|
# indices may have changed if we inserted new features and sorted feature list
|
||||||
|
# so we must do this after the above
|
||||||
|
varFeatureIndices.update(
|
||||||
|
index
|
||||||
|
for index, feature in enumerate(table.FeatureList.FeatureRecord)
|
||||||
|
if feature.FeatureTag in existingTags
|
||||||
|
)
|
||||||
|
|
||||||
axisIndices = {
|
axisIndices = {
|
||||||
axis.axisTag: axisIndex for axisIndex, axis in enumerate(font["fvar"].axes)
|
axis.axisTag: axisIndex for axisIndex, axis in enumerate(font["fvar"].axes)
|
||||||
@ -380,7 +425,7 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
|
|||||||
ct = buildConditionTable(axisIndices[axisTag], minValue, maxValue)
|
ct = buildConditionTable(axisIndices[axisTag], minValue, maxValue)
|
||||||
conditionTable.append(ct)
|
conditionTable.append(ct)
|
||||||
records = []
|
records = []
|
||||||
for varFeatureIndex in varFeatureIndices:
|
for varFeatureIndex in sorted(varFeatureIndices):
|
||||||
existingLookupIndices = table.FeatureList.FeatureRecord[
|
existingLookupIndices = table.FeatureList.FeatureRecord[
|
||||||
varFeatureIndex
|
varFeatureIndex
|
||||||
].Feature.LookupListIndex
|
].Feature.LookupListIndex
|
||||||
@ -399,6 +444,17 @@ def addFeatureVariationsRaw(font, table, conditionalSubstitutions, featureTag="r
|
|||||||
buildFeatureVariationRecord(conditionTable, records)
|
buildFeatureVariationRecord(conditionTable, records)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if hasattr(table, "FeatureVariations") and table.FeatureVariations is not None:
|
||||||
|
if table.FeatureVariations.Version != 0x00010000:
|
||||||
|
raise VarLibError(
|
||||||
|
"Unsupported FeatureVariations table version: "
|
||||||
|
f"0x{table.FeatureVariations.Version:08x} (expected 0x00010000)."
|
||||||
|
)
|
||||||
|
table.FeatureVariations.FeatureVariationRecord.extend(featureVariationRecords)
|
||||||
|
table.FeatureVariations.FeatureVariationCount = len(
|
||||||
|
table.FeatureVariations.FeatureVariationRecord
|
||||||
|
)
|
||||||
|
else:
|
||||||
table.FeatureVariations = buildFeatureVariations(featureVariationRecords)
|
table.FeatureVariations = buildFeatureVariations(featureVariationRecords)
|
||||||
|
|
||||||
|
|
||||||
|
@ -178,7 +178,9 @@ def _solve(tent, axisLimit, negative=False):
|
|||||||
#
|
#
|
||||||
newUpper = peak + (1 - gain) * (upper - peak)
|
newUpper = peak + (1 - gain) * (upper - peak)
|
||||||
assert axisMax <= newUpper # Because outGain > gain
|
assert axisMax <= newUpper # Because outGain > gain
|
||||||
if newUpper <= axisDef + (axisMax - axisDef) * 2:
|
# Disabled because ots doesn't like us:
|
||||||
|
# https://github.com/fonttools/fonttools/issues/3350
|
||||||
|
if False and newUpper <= axisDef + (axisMax - axisDef) * 2:
|
||||||
upper = newUpper
|
upper = newUpper
|
||||||
if not negative and axisDef + (axisMax - axisDef) * MAX_F2DOT14 < upper:
|
if not negative and axisDef + (axisMax - axisDef) * MAX_F2DOT14 < upper:
|
||||||
# we clamp +2.0 to the max F2Dot14 (~1.99994) for convenience
|
# we clamp +2.0 to the max F2Dot14 (~1.99994) for convenience
|
||||||
|
File diff suppressed because it is too large
Load Diff
380
Lib/fontTools/varLib/interpolatableHelpers.py
Normal file
380
Lib/fontTools/varLib/interpolatableHelpers.py
Normal file
@ -0,0 +1,380 @@
|
|||||||
|
from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
|
||||||
|
from fontTools.pens.basePen import AbstractPen, BasePen, DecomposingPen
|
||||||
|
from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
|
||||||
|
from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
|
||||||
|
from fontTools.misc.transform import Transform
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
from math import sqrt, copysign, atan2, pi
|
||||||
|
from enum import Enum
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger("fontTools.varLib.interpolatable")
|
||||||
|
|
||||||
|
|
||||||
|
class InterpolatableProblem:
|
||||||
|
NOTHING = "nothing"
|
||||||
|
MISSING = "missing"
|
||||||
|
OPEN_PATH = "open_path"
|
||||||
|
PATH_COUNT = "path_count"
|
||||||
|
NODE_COUNT = "node_count"
|
||||||
|
NODE_INCOMPATIBILITY = "node_incompatibility"
|
||||||
|
CONTOUR_ORDER = "contour_order"
|
||||||
|
WRONG_START_POINT = "wrong_start_point"
|
||||||
|
KINK = "kink"
|
||||||
|
UNDERWEIGHT = "underweight"
|
||||||
|
OVERWEIGHT = "overweight"
|
||||||
|
|
||||||
|
severity = {
|
||||||
|
MISSING: 1,
|
||||||
|
OPEN_PATH: 2,
|
||||||
|
PATH_COUNT: 3,
|
||||||
|
NODE_COUNT: 4,
|
||||||
|
NODE_INCOMPATIBILITY: 5,
|
||||||
|
CONTOUR_ORDER: 6,
|
||||||
|
WRONG_START_POINT: 7,
|
||||||
|
KINK: 8,
|
||||||
|
UNDERWEIGHT: 9,
|
||||||
|
OVERWEIGHT: 10,
|
||||||
|
NOTHING: 11,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def sort_problems(problems):
|
||||||
|
"""Sort problems by severity, then by glyph name, then by problem message."""
|
||||||
|
return dict(
|
||||||
|
sorted(
|
||||||
|
problems.items(),
|
||||||
|
key=lambda _: -min(
|
||||||
|
(
|
||||||
|
(InterpolatableProblem.severity[p["type"]] + p.get("tolerance", 0))
|
||||||
|
for p in _[1]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def rot_list(l, k):
|
||||||
|
"""Rotate list by k items forward. Ie. item at position 0 will be
|
||||||
|
at position k in returned list. Negative k is allowed."""
|
||||||
|
return l[-k:] + l[:-k]
|
||||||
|
|
||||||
|
|
||||||
|
class PerContourPen(BasePen):
|
||||||
|
def __init__(self, Pen, glyphset=None):
|
||||||
|
BasePen.__init__(self, glyphset)
|
||||||
|
self._glyphset = glyphset
|
||||||
|
self._Pen = Pen
|
||||||
|
self._pen = None
|
||||||
|
self.value = []
|
||||||
|
|
||||||
|
def _moveTo(self, p0):
|
||||||
|
self._newItem()
|
||||||
|
self._pen.moveTo(p0)
|
||||||
|
|
||||||
|
def _lineTo(self, p1):
|
||||||
|
self._pen.lineTo(p1)
|
||||||
|
|
||||||
|
def _qCurveToOne(self, p1, p2):
|
||||||
|
self._pen.qCurveTo(p1, p2)
|
||||||
|
|
||||||
|
def _curveToOne(self, p1, p2, p3):
|
||||||
|
self._pen.curveTo(p1, p2, p3)
|
||||||
|
|
||||||
|
def _closePath(self):
|
||||||
|
self._pen.closePath()
|
||||||
|
self._pen = None
|
||||||
|
|
||||||
|
def _endPath(self):
|
||||||
|
self._pen.endPath()
|
||||||
|
self._pen = None
|
||||||
|
|
||||||
|
def _newItem(self):
|
||||||
|
self._pen = pen = self._Pen()
|
||||||
|
self.value.append(pen)
|
||||||
|
|
||||||
|
|
||||||
|
class PerContourOrComponentPen(PerContourPen):
|
||||||
|
def addComponent(self, glyphName, transformation):
|
||||||
|
self._newItem()
|
||||||
|
self.value[-1].addComponent(glyphName, transformation)
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleRecordingPointPen(AbstractPointPen):
|
||||||
|
def __init__(self):
|
||||||
|
self.value = []
|
||||||
|
|
||||||
|
def beginPath(self, identifier=None, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def endPath(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def addPoint(self, pt, segmentType=None):
|
||||||
|
self.value.append((pt, False if segmentType is None else True))
|
||||||
|
|
||||||
|
|
||||||
|
def vdiff_hypot2(v0, v1):
|
||||||
|
s = 0
|
||||||
|
for x0, x1 in zip(v0, v1):
|
||||||
|
d = x1 - x0
|
||||||
|
s += d * d
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def vdiff_hypot2_complex(v0, v1):
|
||||||
|
s = 0
|
||||||
|
for x0, x1 in zip(v0, v1):
|
||||||
|
d = x1 - x0
|
||||||
|
s += d.real * d.real + d.imag * d.imag
|
||||||
|
# This does the same but seems to be slower:
|
||||||
|
# s += (d * d.conjugate()).real
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def matching_cost(G, matching):
|
||||||
|
return sum(G[i][j] for i, j in enumerate(matching))
|
||||||
|
|
||||||
|
|
||||||
|
def min_cost_perfect_bipartite_matching_scipy(G):
|
||||||
|
n = len(G)
|
||||||
|
rows, cols = linear_sum_assignment(G)
|
||||||
|
assert (rows == list(range(n))).all()
|
||||||
|
return list(cols), matching_cost(G, cols)
|
||||||
|
|
||||||
|
|
||||||
|
def min_cost_perfect_bipartite_matching_munkres(G):
|
||||||
|
n = len(G)
|
||||||
|
cols = [None] * n
|
||||||
|
for row, col in Munkres().compute(G):
|
||||||
|
cols[row] = col
|
||||||
|
return cols, matching_cost(G, cols)
|
||||||
|
|
||||||
|
|
||||||
|
def min_cost_perfect_bipartite_matching_bruteforce(G):
|
||||||
|
n = len(G)
|
||||||
|
|
||||||
|
if n > 6:
|
||||||
|
raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
|
||||||
|
|
||||||
|
# Otherwise just brute-force
|
||||||
|
permutations = itertools.permutations(range(n))
|
||||||
|
best = list(next(permutations))
|
||||||
|
best_cost = matching_cost(G, best)
|
||||||
|
for p in permutations:
|
||||||
|
cost = matching_cost(G, p)
|
||||||
|
if cost < best_cost:
|
||||||
|
best, best_cost = list(p), cost
|
||||||
|
return best, best_cost
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from scipy.optimize import linear_sum_assignment
|
||||||
|
|
||||||
|
min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from munkres import Munkres
|
||||||
|
|
||||||
|
min_cost_perfect_bipartite_matching = (
|
||||||
|
min_cost_perfect_bipartite_matching_munkres
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
min_cost_perfect_bipartite_matching = (
|
||||||
|
min_cost_perfect_bipartite_matching_bruteforce
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def contour_vector_from_stats(stats):
|
||||||
|
# Don't change the order of items here.
|
||||||
|
# It's okay to add to the end, but otherwise, other
|
||||||
|
# code depends on it. Search for "covariance".
|
||||||
|
size = sqrt(abs(stats.area))
|
||||||
|
return (
|
||||||
|
copysign((size), stats.area),
|
||||||
|
stats.meanX,
|
||||||
|
stats.meanY,
|
||||||
|
stats.stddevX * 2,
|
||||||
|
stats.stddevY * 2,
|
||||||
|
stats.correlation * size,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def matching_for_vectors(m0, m1):
|
||||||
|
n = len(m0)
|
||||||
|
|
||||||
|
identity_matching = list(range(n))
|
||||||
|
|
||||||
|
costs = [[vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
|
||||||
|
(
|
||||||
|
matching,
|
||||||
|
matching_cost,
|
||||||
|
) = min_cost_perfect_bipartite_matching(costs)
|
||||||
|
identity_cost = sum(costs[i][i] for i in range(n))
|
||||||
|
return matching, matching_cost, identity_cost
|
||||||
|
|
||||||
|
|
||||||
|
def points_characteristic_bits(points):
|
||||||
|
bits = 0
|
||||||
|
for pt, b in reversed(points):
|
||||||
|
bits = (bits << 1) | b
|
||||||
|
return bits
|
||||||
|
|
||||||
|
|
||||||
|
_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
|
||||||
|
|
||||||
|
|
||||||
|
def points_complex_vector(points):
|
||||||
|
vector = []
|
||||||
|
if not points:
|
||||||
|
return vector
|
||||||
|
points = [complex(*pt) for pt, _ in points]
|
||||||
|
n = len(points)
|
||||||
|
assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
|
||||||
|
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
||||||
|
while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
|
||||||
|
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
||||||
|
for i in range(n):
|
||||||
|
# The weights are magic numbers.
|
||||||
|
|
||||||
|
# The point itself
|
||||||
|
p0 = points[i]
|
||||||
|
vector.append(p0)
|
||||||
|
|
||||||
|
# The vector to the next point
|
||||||
|
p1 = points[i + 1]
|
||||||
|
d0 = p1 - p0
|
||||||
|
vector.append(d0 * 3)
|
||||||
|
|
||||||
|
# The turn vector
|
||||||
|
p2 = points[i + 2]
|
||||||
|
d1 = p2 - p1
|
||||||
|
vector.append(d1 - d0)
|
||||||
|
|
||||||
|
# The angle to the next point, as a cross product;
|
||||||
|
# Square root of, to match dimentionality of distance.
|
||||||
|
cross = d0.real * d1.imag - d0.imag * d1.real
|
||||||
|
cross = copysign(sqrt(abs(cross)), cross)
|
||||||
|
vector.append(cross * 4)
|
||||||
|
|
||||||
|
return vector
|
||||||
|
|
||||||
|
|
||||||
|
def add_isomorphisms(points, isomorphisms, reverse):
|
||||||
|
reference_bits = points_characteristic_bits(points)
|
||||||
|
n = len(points)
|
||||||
|
|
||||||
|
# if points[0][0] == points[-1][0]:
|
||||||
|
# abort
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
points = points[::-1]
|
||||||
|
bits = points_characteristic_bits(points)
|
||||||
|
else:
|
||||||
|
bits = reference_bits
|
||||||
|
|
||||||
|
vector = points_complex_vector(points)
|
||||||
|
|
||||||
|
assert len(vector) % n == 0
|
||||||
|
mult = len(vector) // n
|
||||||
|
mask = (1 << n) - 1
|
||||||
|
|
||||||
|
for i in range(n):
|
||||||
|
b = ((bits << (n - i)) & mask) | (bits >> i)
|
||||||
|
if b == reference_bits:
|
||||||
|
isomorphisms.append(
|
||||||
|
(rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def find_parents_and_order(glyphsets, locations):
|
||||||
|
parents = [None] + list(range(len(glyphsets) - 1))
|
||||||
|
order = list(range(len(glyphsets)))
|
||||||
|
if locations:
|
||||||
|
# Order base master first
|
||||||
|
bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
|
||||||
|
if bases:
|
||||||
|
base = next(bases)
|
||||||
|
logging.info("Base master index %s, location %s", base, locations[base])
|
||||||
|
else:
|
||||||
|
base = 0
|
||||||
|
logging.warning("No base master location found")
|
||||||
|
|
||||||
|
# Form a minimum spanning tree of the locations
|
||||||
|
try:
|
||||||
|
from scipy.sparse.csgraph import minimum_spanning_tree
|
||||||
|
|
||||||
|
graph = [[0] * len(locations) for _ in range(len(locations))]
|
||||||
|
axes = set()
|
||||||
|
for l in locations:
|
||||||
|
axes.update(l.keys())
|
||||||
|
axes = sorted(axes)
|
||||||
|
vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
|
||||||
|
for i, j in itertools.combinations(range(len(locations)), 2):
|
||||||
|
graph[i][j] = vdiff_hypot2(vectors[i], vectors[j])
|
||||||
|
|
||||||
|
tree = minimum_spanning_tree(graph)
|
||||||
|
rows, cols = tree.nonzero()
|
||||||
|
graph = defaultdict(set)
|
||||||
|
for row, col in zip(rows, cols):
|
||||||
|
graph[row].add(col)
|
||||||
|
graph[col].add(row)
|
||||||
|
|
||||||
|
# Traverse graph from the base and assign parents
|
||||||
|
parents = [None] * len(locations)
|
||||||
|
order = []
|
||||||
|
visited = set()
|
||||||
|
queue = deque([base])
|
||||||
|
while queue:
|
||||||
|
i = queue.popleft()
|
||||||
|
visited.add(i)
|
||||||
|
order.append(i)
|
||||||
|
for j in sorted(graph[i]):
|
||||||
|
if j not in visited:
|
||||||
|
parents[j] = i
|
||||||
|
queue.append(j)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
log.info("Parents: %s", parents)
|
||||||
|
log.info("Order: %s", order)
|
||||||
|
return parents, order
|
||||||
|
|
||||||
|
|
||||||
|
def transform_from_stats(stats, inverse=False):
|
||||||
|
# https://cookierobotics.com/007/
|
||||||
|
a = stats.varianceX
|
||||||
|
b = stats.covariance
|
||||||
|
c = stats.varianceY
|
||||||
|
|
||||||
|
delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
|
||||||
|
lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
|
||||||
|
lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
|
||||||
|
theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
|
||||||
|
trans = Transform()
|
||||||
|
|
||||||
|
if lambda2 < 0:
|
||||||
|
# XXX This is a hack.
|
||||||
|
# The problem is that the covariance matrix is singular.
|
||||||
|
# This happens when the contour is a line, or a circle.
|
||||||
|
# In that case, the covariance matrix is not a good
|
||||||
|
# representation of the contour.
|
||||||
|
# We should probably detect this earlier and avoid
|
||||||
|
# computing the covariance matrix in the first place.
|
||||||
|
# But for now, we just avoid the division by zero.
|
||||||
|
lambda2 = 0
|
||||||
|
|
||||||
|
if inverse:
|
||||||
|
trans = trans.translate(-stats.meanX, -stats.meanY)
|
||||||
|
trans = trans.rotate(-theta)
|
||||||
|
trans = trans.scale(1 / sqrt(lambda1), 1 / sqrt(lambda2))
|
||||||
|
else:
|
||||||
|
trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
|
||||||
|
trans = trans.rotate(theta)
|
||||||
|
trans = trans.translate(stats.meanX, stats.meanY)
|
||||||
|
|
||||||
|
return trans
|
@ -1,4 +1,6 @@
|
|||||||
|
from .interpolatableHelpers import *
|
||||||
from fontTools.ttLib import TTFont
|
from fontTools.ttLib import TTFont
|
||||||
|
from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
|
||||||
from fontTools.pens.recordingPen import (
|
from fontTools.pens.recordingPen import (
|
||||||
RecordingPen,
|
RecordingPen,
|
||||||
DecomposingRecordingPen,
|
DecomposingRecordingPen,
|
||||||
@ -11,7 +13,7 @@ from fontTools.pens.pointPen import (
|
|||||||
PointToSegmentPen,
|
PointToSegmentPen,
|
||||||
ReverseContourPointPen,
|
ReverseContourPointPen,
|
||||||
)
|
)
|
||||||
from fontTools.varLib.interpolatable import (
|
from fontTools.varLib.interpolatableHelpers import (
|
||||||
PerContourOrComponentPen,
|
PerContourOrComponentPen,
|
||||||
SimpleRecordingPointPen,
|
SimpleRecordingPointPen,
|
||||||
)
|
)
|
||||||
@ -26,38 +28,6 @@ import logging
|
|||||||
log = logging.getLogger("fontTools.varLib.interpolatable")
|
log = logging.getLogger("fontTools.varLib.interpolatable")
|
||||||
|
|
||||||
|
|
||||||
class LerpGlyphSet:
|
|
||||||
def __init__(self, glyphset1, glyphset2, factor=0.5):
|
|
||||||
self.glyphset1 = glyphset1
|
|
||||||
self.glyphset2 = glyphset2
|
|
||||||
self.factor = factor
|
|
||||||
|
|
||||||
def __getitem__(self, glyphname):
|
|
||||||
return LerpGlyph(glyphname, self)
|
|
||||||
|
|
||||||
|
|
||||||
class LerpGlyph:
|
|
||||||
def __init__(self, glyphname, glyphset):
|
|
||||||
self.glyphset = glyphset
|
|
||||||
self.glyphname = glyphname
|
|
||||||
|
|
||||||
def draw(self, pen):
|
|
||||||
recording1 = DecomposingRecordingPen(self.glyphset.glyphset1)
|
|
||||||
self.glyphset.glyphset1[self.glyphname].draw(recording1)
|
|
||||||
recording2 = DecomposingRecordingPen(self.glyphset.glyphset2)
|
|
||||||
self.glyphset.glyphset2[self.glyphname].draw(recording2)
|
|
||||||
|
|
||||||
factor = self.glyphset.factor
|
|
||||||
for (op1, args1), (op2, args2) in zip(recording1.value, recording2.value):
|
|
||||||
if op1 != op2:
|
|
||||||
raise ValueError("Mismatching operations: %s, %s" % (op1, op2))
|
|
||||||
mid_args = [
|
|
||||||
(x1 + (x2 - x1) * factor, y1 + (y2 - y1) * factor)
|
|
||||||
for (x1, y1), (x2, y2) in zip(args1, args2)
|
|
||||||
]
|
|
||||||
getattr(pen, op1)(*mid_args)
|
|
||||||
|
|
||||||
|
|
||||||
class OverridingDict(dict):
|
class OverridingDict(dict):
|
||||||
def __init__(self, parent_dict):
|
def __init__(self, parent_dict):
|
||||||
self.parent_dict = parent_dict
|
self.parent_dict = parent_dict
|
||||||
@ -79,24 +49,25 @@ class InterpolatablePlot:
|
|||||||
fill_color = (0.8, 0.8, 0.8)
|
fill_color = (0.8, 0.8, 0.8)
|
||||||
stroke_color = (0.1, 0.1, 0.1)
|
stroke_color = (0.1, 0.1, 0.1)
|
||||||
stroke_width = 2
|
stroke_width = 2
|
||||||
oncurve_node_color = (0, 0.8, 0)
|
oncurve_node_color = (0, 0.8, 0, 0.7)
|
||||||
oncurve_node_diameter = 10
|
oncurve_node_diameter = 10
|
||||||
offcurve_node_color = (0, 0.5, 0)
|
offcurve_node_color = (0, 0.5, 0, 0.7)
|
||||||
offcurve_node_diameter = 8
|
offcurve_node_diameter = 8
|
||||||
handle_color = (0.2, 1, 0.2)
|
handle_color = (0, 0.5, 0, 0.7)
|
||||||
handle_width = 1
|
handle_width = 1
|
||||||
corrected_start_point_color = (0, 0.9, 0)
|
corrected_start_point_color = (0, 0.9, 0, 0.7)
|
||||||
corrected_start_point_size = 15
|
corrected_start_point_size = 15
|
||||||
wrong_start_point_color = (1, 0, 0)
|
wrong_start_point_color = (1, 0, 0, 0.7)
|
||||||
start_point_color = (0, 0, 1)
|
start_point_color = (0, 0, 1, 0.7)
|
||||||
start_arrow_length = 20
|
start_arrow_length = 20
|
||||||
kink_point_size = 10
|
kink_point_size = 10
|
||||||
kink_point_color = (1, 0, 1, 0.7)
|
kink_point_color = (1, 0, 1, 0.7)
|
||||||
kink_circle_size = 25
|
kink_circle_size = 25
|
||||||
kink_circle_stroke_width = 1.5
|
kink_circle_stroke_width = 1.5
|
||||||
kink_circle_color = (1, 0, 1, 0.5)
|
kink_circle_color = (1, 0, 1, 0.7)
|
||||||
contour_colors = ((1, 0, 0), (0, 0, 1), (0, 1, 0), (1, 1, 0), (1, 0, 1), (0, 1, 1))
|
contour_colors = ((1, 0, 0), (0, 0, 1), (0, 1, 0), (1, 1, 0), (1, 0, 1), (0, 1, 1))
|
||||||
contour_alpha = 0.5
|
contour_alpha = 0.5
|
||||||
|
weight_issue_contour_color = (0, 0, 0, 0.4)
|
||||||
no_issues_label = "Your font's good! Have a cupcake..."
|
no_issues_label = "Your font's good! Have a cupcake..."
|
||||||
no_issues_label_color = (0, 0.5, 0)
|
no_issues_label_color = (0, 0.5, 0)
|
||||||
cupcake_color = (0.3, 0, 0.3)
|
cupcake_color = (0.3, 0, 0.3)
|
||||||
@ -125,8 +96,19 @@ class InterpolatablePlot:
|
|||||||
\\\\ |||| |||| |||| //
|
\\\\ |||| |||| |||| //
|
||||||
||||||||||||||||||||||||
|
||||||||||||||||||||||||
|
||||||
"""
|
"""
|
||||||
shrug_color = (0, 0.3, 0.3)
|
emoticon_color = (0, 0.3, 0.3)
|
||||||
shrug = r"""\_(")_/"""
|
shrug = r"""\_(")_/"""
|
||||||
|
underweight = r"""
|
||||||
|
o
|
||||||
|
/|\
|
||||||
|
/ \
|
||||||
|
"""
|
||||||
|
overweight = r"""
|
||||||
|
o
|
||||||
|
/O\
|
||||||
|
/ \
|
||||||
|
"""
|
||||||
|
yay = r""" \o/ """
|
||||||
|
|
||||||
def __init__(self, out, glyphsets, names=None, **kwargs):
|
def __init__(self, out, glyphsets, names=None, **kwargs):
|
||||||
self.out = out
|
self.out = out
|
||||||
@ -242,12 +224,26 @@ class InterpolatablePlot:
|
|||||||
)
|
)
|
||||||
y -= self.pad + self.line_height
|
y -= self.pad + self.line_height
|
||||||
|
|
||||||
|
self.draw_label("Underweight contours", x=xxx, y=y, width=width)
|
||||||
|
cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.line_height)
|
||||||
|
cr.set_source_rgb(*self.fill_color)
|
||||||
|
cr.fill_preserve()
|
||||||
|
if self.stroke_color:
|
||||||
|
cr.set_source_rgb(*self.stroke_color)
|
||||||
|
cr.set_line_width(self.stroke_width)
|
||||||
|
cr.stroke_preserve()
|
||||||
|
cr.set_source_rgba(*self.weight_issue_contour_color)
|
||||||
|
cr.fill()
|
||||||
|
y -= self.pad + self.line_height
|
||||||
|
|
||||||
self.draw_label(
|
self.draw_label(
|
||||||
"Colored contours: contours with the wrong order", x=xxx, y=y, width=width
|
"Colored contours: contours with the wrong order", x=xxx, y=y, width=width
|
||||||
)
|
)
|
||||||
cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.line_height)
|
cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.line_height)
|
||||||
|
if self.fill_color:
|
||||||
cr.set_source_rgb(*self.fill_color)
|
cr.set_source_rgb(*self.fill_color)
|
||||||
cr.fill_preserve()
|
cr.fill_preserve()
|
||||||
|
if self.stroke_color:
|
||||||
cr.set_source_rgb(*self.stroke_color)
|
cr.set_source_rgb(*self.stroke_color)
|
||||||
cr.set_line_width(self.stroke_width)
|
cr.set_line_width(self.stroke_width)
|
||||||
cr.stroke_preserve()
|
cr.stroke_preserve()
|
||||||
@ -402,7 +398,7 @@ class InterpolatablePlot:
|
|||||||
)
|
)
|
||||||
master_indices = [problems[0][k] for k in master_keys]
|
master_indices = [problems[0][k] for k in master_keys]
|
||||||
|
|
||||||
if problem_type == "missing":
|
if problem_type == InterpolatableProblem.MISSING:
|
||||||
sample_glyph = next(
|
sample_glyph = next(
|
||||||
i for i, m in enumerate(self.glyphsets) if m[glyphname] is not None
|
i for i, m in enumerate(self.glyphsets) if m[glyphname] is not None
|
||||||
)
|
)
|
||||||
@ -456,17 +452,18 @@ class InterpolatablePlot:
|
|||||||
self.draw_glyph(glyphset, glyphname, problems, which, x=x, y=y)
|
self.draw_glyph(glyphset, glyphname, problems, which, x=x, y=y)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.draw_shrug(x=x, y=y)
|
self.draw_emoticon(self.shrug, x=x, y=y)
|
||||||
y += self.height + self.pad
|
y += self.height + self.pad
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
pt
|
pt
|
||||||
in (
|
in (
|
||||||
"nothing",
|
InterpolatableProblem.NOTHING,
|
||||||
"wrong_start_point",
|
InterpolatableProblem.WRONG_START_POINT,
|
||||||
"contour_order",
|
InterpolatableProblem.CONTOUR_ORDER,
|
||||||
"wrong_structure",
|
InterpolatableProblem.KINK,
|
||||||
"kink",
|
InterpolatableProblem.UNDERWEIGHT,
|
||||||
|
InterpolatableProblem.OVERWEIGHT,
|
||||||
)
|
)
|
||||||
for pt in problem_types
|
for pt in problem_types
|
||||||
):
|
):
|
||||||
@ -489,7 +486,17 @@ class InterpolatablePlot:
|
|||||||
self.draw_glyph(
|
self.draw_glyph(
|
||||||
midway_glyphset,
|
midway_glyphset,
|
||||||
glyphname,
|
glyphname,
|
||||||
[{"type": "midway"}] + [p for p in problems if p["type"] == "kink"],
|
[{"type": "midway"}]
|
||||||
|
+ [
|
||||||
|
p
|
||||||
|
for p in problems
|
||||||
|
if p["type"]
|
||||||
|
in (
|
||||||
|
InterpolatableProblem.KINK,
|
||||||
|
InterpolatableProblem.UNDERWEIGHT,
|
||||||
|
InterpolatableProblem.OVERWEIGHT,
|
||||||
|
)
|
||||||
|
],
|
||||||
None,
|
None,
|
||||||
x=x,
|
x=x,
|
||||||
y=y,
|
y=y,
|
||||||
@ -498,14 +505,20 @@ class InterpolatablePlot:
|
|||||||
|
|
||||||
y += self.height + self.pad
|
y += self.height + self.pad
|
||||||
|
|
||||||
|
if any(
|
||||||
|
pt
|
||||||
|
in (
|
||||||
|
InterpolatableProblem.WRONG_START_POINT,
|
||||||
|
InterpolatableProblem.CONTOUR_ORDER,
|
||||||
|
InterpolatableProblem.KINK,
|
||||||
|
)
|
||||||
|
for pt in problem_types
|
||||||
|
):
|
||||||
# Draw the proposed fix
|
# Draw the proposed fix
|
||||||
|
|
||||||
self.draw_label("proposed fix", x=x, y=y, color=self.head_color, align=0.5)
|
self.draw_label("proposed fix", x=x, y=y, color=self.head_color, align=0.5)
|
||||||
y += self.line_height + self.pad
|
y += self.line_height + self.pad
|
||||||
|
|
||||||
if problem_type in ("wrong_structure"):
|
|
||||||
self.draw_shrug(x=x, y=y)
|
|
||||||
else:
|
|
||||||
overriding1 = OverridingDict(glyphset1)
|
overriding1 = OverridingDict(glyphset1)
|
||||||
overriding2 = OverridingDict(glyphset2)
|
overriding2 = OverridingDict(glyphset2)
|
||||||
perContourPen1 = PerContourOrComponentPen(
|
perContourPen1 = PerContourOrComponentPen(
|
||||||
@ -518,14 +531,14 @@ class InterpolatablePlot:
|
|||||||
glyphset2[glyphname].draw(perContourPen2)
|
glyphset2[glyphname].draw(perContourPen2)
|
||||||
|
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
if problem["type"] == "contour_order":
|
if problem["type"] == InterpolatableProblem.CONTOUR_ORDER:
|
||||||
fixed_contours = [
|
fixed_contours = [
|
||||||
perContourPen2.value[i] for i in problems[0]["value_2"]
|
perContourPen2.value[i] for i in problems[0]["value_2"]
|
||||||
]
|
]
|
||||||
perContourPen2.value = fixed_contours
|
perContourPen2.value = fixed_contours
|
||||||
|
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
if problem["type"] == "wrong_start_point":
|
if problem["type"] == InterpolatableProblem.WRONG_START_POINT:
|
||||||
# Save the wrong contours
|
# Save the wrong contours
|
||||||
wrongContour1 = perContourPen1.value[problem["contour"]]
|
wrongContour1 = perContourPen1.value[problem["contour"]]
|
||||||
wrongContour2 = perContourPen2.value[problem["contour"]]
|
wrongContour2 = perContourPen2.value[problem["contour"]]
|
||||||
@ -571,7 +584,7 @@ class InterpolatablePlot:
|
|||||||
|
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
# If we have a kink, try to fix it.
|
# If we have a kink, try to fix it.
|
||||||
if problem["type"] == "kink":
|
if problem["type"] == InterpolatableProblem.KINK:
|
||||||
# Save the wrong contours
|
# Save the wrong contours
|
||||||
wrongContour1 = perContourPen1.value[problem["contour"]]
|
wrongContour1 = perContourPen1.value[problem["contour"]]
|
||||||
wrongContour2 = perContourPen2.value[problem["contour"]]
|
wrongContour2 = perContourPen2.value[problem["contour"]]
|
||||||
@ -661,9 +674,19 @@ class InterpolatablePlot:
|
|||||||
scale=min(scales),
|
scale=min(scales),
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.draw_shrug(x=x, y=y)
|
self.draw_emoticon(self.shrug, x=x, y=y)
|
||||||
y += self.height + self.pad
|
y += self.height + self.pad
|
||||||
|
|
||||||
|
else:
|
||||||
|
emoticon = self.shrug
|
||||||
|
if InterpolatableProblem.UNDERWEIGHT in problem_types:
|
||||||
|
emoticon = self.underweight
|
||||||
|
elif InterpolatableProblem.OVERWEIGHT in problem_types:
|
||||||
|
emoticon = self.overweight
|
||||||
|
elif InterpolatableProblem.NOTHING in problem_types:
|
||||||
|
emoticon = self.yay
|
||||||
|
self.draw_emoticon(emoticon, x=x, y=y)
|
||||||
|
|
||||||
if show_page_number:
|
if show_page_number:
|
||||||
self.draw_label(
|
self.draw_label(
|
||||||
str(self.page_number),
|
str(self.page_number),
|
||||||
@ -776,7 +799,7 @@ class InterpolatablePlot:
|
|||||||
pen = CairoPen(glyphset, cr)
|
pen = CairoPen(glyphset, cr)
|
||||||
decomposedRecording.replay(pen)
|
decomposedRecording.replay(pen)
|
||||||
|
|
||||||
if self.fill_color and problem_type != "open_path":
|
if self.fill_color and problem_type != InterpolatableProblem.OPEN_PATH:
|
||||||
cr.set_source_rgb(*self.fill_color)
|
cr.set_source_rgb(*self.fill_color)
|
||||||
cr.fill_preserve()
|
cr.fill_preserve()
|
||||||
|
|
||||||
@ -787,13 +810,28 @@ class InterpolatablePlot:
|
|||||||
|
|
||||||
cr.new_path()
|
cr.new_path()
|
||||||
|
|
||||||
|
if (
|
||||||
|
InterpolatableProblem.UNDERWEIGHT in problem_types
|
||||||
|
or InterpolatableProblem.OVERWEIGHT in problem_types
|
||||||
|
):
|
||||||
|
perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
|
||||||
|
recording.replay(perContourPen)
|
||||||
|
for problem in problems:
|
||||||
|
if problem["type"] in (
|
||||||
|
InterpolatableProblem.UNDERWEIGHT,
|
||||||
|
InterpolatableProblem.OVERWEIGHT,
|
||||||
|
):
|
||||||
|
contour = perContourPen.value[problem["contour"]]
|
||||||
|
contour.replay(CairoPen(glyphset, cr))
|
||||||
|
cr.set_source_rgba(*self.weight_issue_contour_color)
|
||||||
|
cr.fill()
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
t in problem_types
|
t in problem_types
|
||||||
for t in {
|
for t in {
|
||||||
"nothing",
|
InterpolatableProblem.NOTHING,
|
||||||
"node_count",
|
InterpolatableProblem.NODE_COUNT,
|
||||||
"node_incompatibility",
|
InterpolatableProblem.NODE_INCOMPATIBILITY,
|
||||||
"wrong_structure",
|
|
||||||
}
|
}
|
||||||
):
|
):
|
||||||
cr.set_line_cap(cairo.LINE_CAP_ROUND)
|
cr.set_line_cap(cairo.LINE_CAP_ROUND)
|
||||||
@ -805,7 +843,7 @@ class InterpolatablePlot:
|
|||||||
x, y = args[-1]
|
x, y = args[-1]
|
||||||
cr.move_to(x, y)
|
cr.move_to(x, y)
|
||||||
cr.line_to(x, y)
|
cr.line_to(x, y)
|
||||||
cr.set_source_rgb(*self.oncurve_node_color)
|
cr.set_source_rgba(*self.oncurve_node_color)
|
||||||
cr.set_line_width(self.oncurve_node_diameter / scale)
|
cr.set_line_width(self.oncurve_node_diameter / scale)
|
||||||
cr.stroke()
|
cr.stroke()
|
||||||
|
|
||||||
@ -816,7 +854,7 @@ class InterpolatablePlot:
|
|||||||
for x, y in args[:-1]:
|
for x, y in args[:-1]:
|
||||||
cr.move_to(x, y)
|
cr.move_to(x, y)
|
||||||
cr.line_to(x, y)
|
cr.line_to(x, y)
|
||||||
cr.set_source_rgb(*self.offcurve_node_color)
|
cr.set_source_rgba(*self.offcurve_node_color)
|
||||||
cr.set_line_width(self.offcurve_node_diameter / scale)
|
cr.set_line_width(self.offcurve_node_diameter / scale)
|
||||||
cr.stroke()
|
cr.stroke()
|
||||||
|
|
||||||
@ -841,13 +879,13 @@ class InterpolatablePlot:
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cr.set_source_rgb(*self.handle_color)
|
cr.set_source_rgba(*self.handle_color)
|
||||||
cr.set_line_width(self.handle_width / scale)
|
cr.set_line_width(self.handle_width / scale)
|
||||||
cr.stroke()
|
cr.stroke()
|
||||||
|
|
||||||
matching = None
|
matching = None
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
if problem["type"] == "contour_order":
|
if problem["type"] == InterpolatableProblem.CONTOUR_ORDER:
|
||||||
matching = problem["value_2"]
|
matching = problem["value_2"]
|
||||||
colors = cycle(self.contour_colors)
|
colors = cycle(self.contour_colors)
|
||||||
perContourPen = PerContourOrComponentPen(
|
perContourPen = PerContourOrComponentPen(
|
||||||
@ -863,7 +901,10 @@ class InterpolatablePlot:
|
|||||||
cr.fill()
|
cr.fill()
|
||||||
|
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
if problem["type"] in ("nothing", "wrong_start_point", "wrong_structure"):
|
if problem["type"] in (
|
||||||
|
InterpolatableProblem.NOTHING,
|
||||||
|
InterpolatableProblem.WRONG_START_POINT,
|
||||||
|
):
|
||||||
idx = problem.get("contour")
|
idx = problem.get("contour")
|
||||||
|
|
||||||
# Draw suggested point
|
# Draw suggested point
|
||||||
@ -902,6 +943,9 @@ class InterpolatablePlot:
|
|||||||
continue
|
continue
|
||||||
if first_pt is None:
|
if first_pt is None:
|
||||||
continue
|
continue
|
||||||
|
if segment == "closePath":
|
||||||
|
second_pt = first_pt
|
||||||
|
else:
|
||||||
second_pt = args[0]
|
second_pt = args[0]
|
||||||
|
|
||||||
if idx is None or i == idx:
|
if idx is None or i == idx:
|
||||||
@ -938,7 +982,7 @@ class InterpolatablePlot:
|
|||||||
|
|
||||||
cr.restore()
|
cr.restore()
|
||||||
|
|
||||||
if problem["type"] == "kink":
|
if problem["type"] == InterpolatableProblem.KINK:
|
||||||
idx = problem.get("contour")
|
idx = problem.get("contour")
|
||||||
perContourPen = PerContourOrComponentPen(
|
perContourPen = PerContourOrComponentPen(
|
||||||
RecordingPen, glyphset=glyphset
|
RecordingPen, glyphset=glyphset
|
||||||
@ -950,22 +994,6 @@ class InterpolatablePlot:
|
|||||||
converter
|
converter
|
||||||
)
|
)
|
||||||
|
|
||||||
if which == 1 or midway:
|
|
||||||
wrong_start_point_problem = [
|
|
||||||
pt
|
|
||||||
for pt in problems
|
|
||||||
if pt["type"] == "wrong_start_point"
|
|
||||||
and pt.get("contour") == idx
|
|
||||||
]
|
|
||||||
if wrong_start_point_problem:
|
|
||||||
proposed_start = wrong_start_point_problem[0]["value_2"]
|
|
||||||
points.value = (
|
|
||||||
points.value[proposed_start:]
|
|
||||||
+ points.value[:proposed_start]
|
|
||||||
)
|
|
||||||
if wrong_start_point_problem[0]["reversed"]:
|
|
||||||
points.value = points.value[::-1]
|
|
||||||
|
|
||||||
targetPoint = points.value[problem["value"]][0]
|
targetPoint = points.value[problem["value"]][0]
|
||||||
cr.save()
|
cr.save()
|
||||||
cr.translate(*targetPoint)
|
cr.translate(*targetPoint)
|
||||||
@ -1031,6 +1059,44 @@ class InterpolatablePlot:
|
|||||||
cr.fill()
|
cr.fill()
|
||||||
cr.restore()
|
cr.restore()
|
||||||
|
|
||||||
|
def draw_text(self, text, *, x=0, y=0, color=(0, 0, 0), width=None, height=None):
|
||||||
|
if width is None:
|
||||||
|
width = self.width
|
||||||
|
if height is None:
|
||||||
|
height = self.height
|
||||||
|
|
||||||
|
text = text.splitlines()
|
||||||
|
cr = cairo.Context(self.surface)
|
||||||
|
cr.set_source_rgb(*color)
|
||||||
|
cr.set_font_size(self.line_height)
|
||||||
|
cr.select_font_face(
|
||||||
|
"@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
|
||||||
|
)
|
||||||
|
text_width = 0
|
||||||
|
text_height = 0
|
||||||
|
font_extents = cr.font_extents()
|
||||||
|
font_line_height = font_extents[2]
|
||||||
|
font_ascent = font_extents[0]
|
||||||
|
for line in text:
|
||||||
|
extents = cr.text_extents(line)
|
||||||
|
text_width = max(text_width, extents.x_advance)
|
||||||
|
text_height += font_line_height
|
||||||
|
if not text_width:
|
||||||
|
return
|
||||||
|
cr.translate(x, y)
|
||||||
|
scale = min(width / text_width, height / text_height)
|
||||||
|
# center
|
||||||
|
cr.translate(
|
||||||
|
(width - text_width * scale) / 2, (height - text_height * scale) / 2
|
||||||
|
)
|
||||||
|
cr.scale(scale, scale)
|
||||||
|
|
||||||
|
cr.translate(0, font_ascent)
|
||||||
|
for line in text:
|
||||||
|
cr.move_to(0, 0)
|
||||||
|
cr.show_text(line)
|
||||||
|
cr.translate(0, font_line_height)
|
||||||
|
|
||||||
def draw_cupcake(self):
|
def draw_cupcake(self):
|
||||||
self.set_size(self.total_width(), self.total_height())
|
self.set_size(self.total_width(), self.total_height())
|
||||||
|
|
||||||
@ -1044,50 +1110,17 @@ class InterpolatablePlot:
|
|||||||
bold=True,
|
bold=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
cupcake = self.cupcake.splitlines()
|
self.draw_text(
|
||||||
cr = cairo.Context(self.surface)
|
self.cupcake,
|
||||||
cr.set_source_rgb(*self.cupcake_color)
|
x=self.pad,
|
||||||
cr.set_font_size(self.line_height)
|
y=self.pad + self.line_height,
|
||||||
cr.select_font_face(
|
width=self.total_width() - 2 * self.pad,
|
||||||
"@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
|
height=self.total_height() - 2 * self.pad - self.line_height,
|
||||||
|
color=self.cupcake_color,
|
||||||
)
|
)
|
||||||
width = 0
|
|
||||||
height = 0
|
|
||||||
font_extents = cr.font_extents()
|
|
||||||
font_line_height = font_extents[2]
|
|
||||||
font_ascent = font_extents[0]
|
|
||||||
for line in cupcake:
|
|
||||||
extents = cr.text_extents(line)
|
|
||||||
width = max(width, extents.width)
|
|
||||||
height += font_line_height
|
|
||||||
if not width:
|
|
||||||
return
|
|
||||||
cr.scale(
|
|
||||||
(self.total_width() - 2 * self.pad) / width,
|
|
||||||
(self.total_height() - 2 * self.pad - self.line_height) / height,
|
|
||||||
)
|
|
||||||
cr.translate(self.pad, self.pad + font_ascent + self.line_height)
|
|
||||||
for line in cupcake:
|
|
||||||
cr.move_to(0, 0)
|
|
||||||
cr.show_text(line)
|
|
||||||
cr.translate(0, font_line_height)
|
|
||||||
|
|
||||||
def draw_shrug(self, x=0, y=0):
|
def draw_emoticon(self, emoticon, x=0, y=0):
|
||||||
cr = cairo.Context(self.surface)
|
self.draw_text(emoticon, x=x, y=y, color=self.emoticon_color)
|
||||||
cr.translate(x, y)
|
|
||||||
cr.set_source_rgb(*self.shrug_color)
|
|
||||||
cr.set_font_size(self.line_height)
|
|
||||||
cr.select_font_face(
|
|
||||||
"@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
|
|
||||||
)
|
|
||||||
extents = cr.text_extents(self.shrug)
|
|
||||||
if not extents.width:
|
|
||||||
return
|
|
||||||
cr.translate(0, self.height * 0.6)
|
|
||||||
scale = self.width / extents.width
|
|
||||||
cr.scale(scale, scale)
|
|
||||||
cr.move_to(-extents.x_bearing, 0)
|
|
||||||
cr.show_text(self.shrug)
|
|
||||||
|
|
||||||
|
|
||||||
class InterpolatablePostscriptLike(InterpolatablePlot):
|
class InterpolatablePostscriptLike(InterpolatablePlot):
|
||||||
@ -1105,10 +1138,6 @@ class InterpolatablePostscriptLike(InterpolatablePlot):
|
|||||||
super().show_page()
|
super().show_page()
|
||||||
self.surface.show_page()
|
self.surface.show_page()
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.surface = cairo.PSSurface(self.out, self.width, self.height)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class InterpolatablePS(InterpolatablePostscriptLike):
|
class InterpolatablePS(InterpolatablePostscriptLike):
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
82
Lib/fontTools/varLib/interpolatableTestContourOrder.py
Normal file
82
Lib/fontTools/varLib/interpolatableTestContourOrder.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from .interpolatableHelpers import *
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger("fontTools.varLib.interpolatable")
|
||||||
|
|
||||||
|
|
||||||
|
def test_contour_order(glyph0, glyph1):
|
||||||
|
# We try matching both the StatisticsControlPen vector
|
||||||
|
# and the StatisticsPen vector.
|
||||||
|
#
|
||||||
|
# If either method found a identity matching, accept it.
|
||||||
|
# This is crucial for fonts like Kablammo[MORF].ttf and
|
||||||
|
# Nabla[EDPT,EHLT].ttf, since they really confuse the
|
||||||
|
# StatisticsPen vector because of their area=0 contours.
|
||||||
|
|
||||||
|
n = len(glyph0.controlVectors)
|
||||||
|
matching = None
|
||||||
|
matching_cost = 0
|
||||||
|
identity_cost = 0
|
||||||
|
done = n <= 1
|
||||||
|
if not done:
|
||||||
|
m0Control = glyph0.controlVectors
|
||||||
|
m1Control = glyph1.controlVectors
|
||||||
|
(
|
||||||
|
matching_control,
|
||||||
|
matching_cost_control,
|
||||||
|
identity_cost_control,
|
||||||
|
) = matching_for_vectors(m0Control, m1Control)
|
||||||
|
done = matching_cost_control == identity_cost_control
|
||||||
|
if not done:
|
||||||
|
m0Green = glyph0.greenVectors
|
||||||
|
m1Green = glyph1.greenVectors
|
||||||
|
(
|
||||||
|
matching_green,
|
||||||
|
matching_cost_green,
|
||||||
|
identity_cost_green,
|
||||||
|
) = matching_for_vectors(m0Green, m1Green)
|
||||||
|
done = matching_cost_green == identity_cost_green
|
||||||
|
|
||||||
|
if not done:
|
||||||
|
# See if reversing contours in one master helps.
|
||||||
|
# That's a common problem. Then the wrong_start_point
|
||||||
|
# test will fix them.
|
||||||
|
#
|
||||||
|
# Reverse the sign of the area (0); the rest stay the same.
|
||||||
|
if not done:
|
||||||
|
m1ControlReversed = [(-m[0],) + m[1:] for m in m1Control]
|
||||||
|
(
|
||||||
|
matching_control_reversed,
|
||||||
|
matching_cost_control_reversed,
|
||||||
|
identity_cost_control_reversed,
|
||||||
|
) = matching_for_vectors(m0Control, m1ControlReversed)
|
||||||
|
done = matching_cost_control_reversed == identity_cost_control_reversed
|
||||||
|
if not done:
|
||||||
|
m1GreenReversed = [(-m[0],) + m[1:] for m in m1Green]
|
||||||
|
(
|
||||||
|
matching_control_reversed,
|
||||||
|
matching_cost_control_reversed,
|
||||||
|
identity_cost_control_reversed,
|
||||||
|
) = matching_for_vectors(m0Control, m1ControlReversed)
|
||||||
|
done = matching_cost_control_reversed == identity_cost_control_reversed
|
||||||
|
|
||||||
|
if not done:
|
||||||
|
# Otherwise, use the worst of the two matchings.
|
||||||
|
if (
|
||||||
|
matching_cost_control / identity_cost_control
|
||||||
|
< matching_cost_green / identity_cost_green
|
||||||
|
):
|
||||||
|
matching = matching_control
|
||||||
|
matching_cost = matching_cost_control
|
||||||
|
identity_cost = identity_cost_control
|
||||||
|
else:
|
||||||
|
matching = matching_green
|
||||||
|
matching_cost = matching_cost_green
|
||||||
|
identity_cost = identity_cost_green
|
||||||
|
|
||||||
|
this_tolerance = matching_cost / identity_cost if identity_cost else 1
|
||||||
|
log.debug(
|
||||||
|
"test-contour-order: tolerance %g",
|
||||||
|
this_tolerance,
|
||||||
|
)
|
||||||
|
return this_tolerance, matching
|
105
Lib/fontTools/varLib/interpolatableTestStartingPoint.py
Normal file
105
Lib/fontTools/varLib/interpolatableTestStartingPoint.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
from .interpolatableHelpers import *
|
||||||
|
|
||||||
|
|
||||||
|
def test_starting_point(glyph0, glyph1, ix, tolerance, matching):
|
||||||
|
if matching is None:
|
||||||
|
matching = list(range(len(glyph0.isomorphisms)))
|
||||||
|
contour0 = glyph0.isomorphisms[ix]
|
||||||
|
contour1 = glyph1.isomorphisms[matching[ix]]
|
||||||
|
m0Vectors = glyph0.greenVectors
|
||||||
|
m1Vectors = [glyph1.greenVectors[i] for i in matching]
|
||||||
|
|
||||||
|
c0 = contour0[0]
|
||||||
|
# Next few lines duplicated below.
|
||||||
|
costs = [vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
|
||||||
|
min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
|
||||||
|
first_cost = costs[0]
|
||||||
|
proposed_point = contour1[min_cost_idx][1]
|
||||||
|
reverse = contour1[min_cost_idx][2]
|
||||||
|
|
||||||
|
if min_cost < first_cost * tolerance:
|
||||||
|
# c0 is the first isomorphism of the m0 master
|
||||||
|
# contour1 is list of all isomorphisms of the m1 master
|
||||||
|
#
|
||||||
|
# If the two shapes are both circle-ish and slightly
|
||||||
|
# rotated, we detect wrong start point. This is for
|
||||||
|
# example the case hundreds of times in
|
||||||
|
# RobotoSerif-Italic[GRAD,opsz,wdth,wght].ttf
|
||||||
|
#
|
||||||
|
# If the proposed point is only one off from the first
|
||||||
|
# point (and not reversed), try harder:
|
||||||
|
#
|
||||||
|
# Find the major eigenvector of the covariance matrix,
|
||||||
|
# and rotate the contours by that angle. Then find the
|
||||||
|
# closest point again. If it matches this time, let it
|
||||||
|
# pass.
|
||||||
|
|
||||||
|
num_points = len(glyph1.points[ix])
|
||||||
|
leeway = 3
|
||||||
|
if not reverse and (
|
||||||
|
proposed_point <= leeway or proposed_point >= num_points - leeway
|
||||||
|
):
|
||||||
|
# Try harder
|
||||||
|
|
||||||
|
# Recover the covariance matrix from the GreenVectors.
|
||||||
|
# This is a 2x2 matrix.
|
||||||
|
transforms = []
|
||||||
|
for vector in (m0Vectors[ix], m1Vectors[ix]):
|
||||||
|
meanX = vector[1]
|
||||||
|
meanY = vector[2]
|
||||||
|
stddevX = vector[3] * 0.5
|
||||||
|
stddevY = vector[4] * 0.5
|
||||||
|
correlation = vector[5] / abs(vector[0])
|
||||||
|
|
||||||
|
# https://cookierobotics.com/007/
|
||||||
|
a = stddevX * stddevX # VarianceX
|
||||||
|
c = stddevY * stddevY # VarianceY
|
||||||
|
b = correlation * stddevX * stddevY # Covariance
|
||||||
|
|
||||||
|
delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
|
||||||
|
lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
|
||||||
|
lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
|
||||||
|
theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
|
||||||
|
trans = Transform()
|
||||||
|
# Don't translate here. We are working on the complex-vector
|
||||||
|
# that includes more than just the points. It's horrible what
|
||||||
|
# we are doing anyway...
|
||||||
|
# trans = trans.translate(meanX, meanY)
|
||||||
|
trans = trans.rotate(theta)
|
||||||
|
trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
|
||||||
|
transforms.append(trans)
|
||||||
|
|
||||||
|
trans = transforms[0]
|
||||||
|
new_c0 = (
|
||||||
|
[complex(*trans.transformPoint((pt.real, pt.imag))) for pt in c0[0]],
|
||||||
|
) + c0[1:]
|
||||||
|
trans = transforms[1]
|
||||||
|
new_contour1 = []
|
||||||
|
for c1 in contour1:
|
||||||
|
new_c1 = (
|
||||||
|
[
|
||||||
|
complex(*trans.transformPoint((pt.real, pt.imag)))
|
||||||
|
for pt in c1[0]
|
||||||
|
],
|
||||||
|
) + c1[1:]
|
||||||
|
new_contour1.append(new_c1)
|
||||||
|
|
||||||
|
# Next few lines duplicate from above.
|
||||||
|
costs = [
|
||||||
|
vdiff_hypot2_complex(new_c0[0], new_c1[0]) for new_c1 in new_contour1
|
||||||
|
]
|
||||||
|
min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
|
||||||
|
first_cost = costs[0]
|
||||||
|
if min_cost < first_cost * tolerance:
|
||||||
|
# Don't report this
|
||||||
|
# min_cost = first_cost
|
||||||
|
# reverse = False
|
||||||
|
# proposed_point = 0 # new_contour1[min_cost_idx][1]
|
||||||
|
pass
|
||||||
|
|
||||||
|
this_tolerance = min_cost / first_cost if first_cost else 1
|
||||||
|
log.debug(
|
||||||
|
"test-starting-point: tolerance %g",
|
||||||
|
this_tolerance,
|
||||||
|
)
|
||||||
|
return this_tolerance, proposed_point, reverse
|
16
NEWS.rst
16
NEWS.rst
@ -1,3 +1,19 @@
|
|||||||
|
4.46.0 (released 2023-12-02)
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
- [featureVars] Allow to register the same set of substitution rules to multiple features.
|
||||||
|
The ``addFeatureVariations`` function can now take a list of featureTags; similarly, the
|
||||||
|
lib key 'com.github.fonttools.varLib.featureVarsFeatureTag' can now take a
|
||||||
|
comma-separateed string of feature tags (e.g. "salt,ss01") instead of a single tag (#3360).
|
||||||
|
- [featureVars] Don't overwrite GSUB FeatureVariations, but append new records to it
|
||||||
|
for features which are not already there. But raise ``VarLibError`` if the feature tag
|
||||||
|
already has feature variations associated with it (#3363).
|
||||||
|
- [varLib] Added ``addGSUBFeatureVariations`` function to add GSUB Feature Variations
|
||||||
|
to an existing variable font from rules defined in a DesignSpace document (#3362).
|
||||||
|
- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular,
|
||||||
|
a new test for "underweight" glyphs. The new test reports quite a few false-positives
|
||||||
|
though. Please send feedback.
|
||||||
|
|
||||||
4.45.1 (released 2023-11-23)
|
4.45.1 (released 2023-11-23)
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from fontTools.misc.bezierTools import (
|
|||||||
calcQuadraticArcLength,
|
calcQuadraticArcLength,
|
||||||
calcCubicBounds,
|
calcCubicBounds,
|
||||||
curveLineIntersections,
|
curveLineIntersections,
|
||||||
|
curveCurveIntersections,
|
||||||
segmentPointAtT,
|
segmentPointAtT,
|
||||||
splitLine,
|
splitLine,
|
||||||
splitQuadratic,
|
splitQuadratic,
|
||||||
@ -189,3 +190,10 @@ def test_calcQuadraticArcLength():
|
|||||||
assert calcQuadraticArcLength(
|
assert calcQuadraticArcLength(
|
||||||
(210, 333), (289, 333), (326.5, 290.5)
|
(210, 333), (289, 333), (326.5, 290.5)
|
||||||
) == pytest.approx(127.9225)
|
) == pytest.approx(127.9225)
|
||||||
|
|
||||||
|
|
||||||
|
def test_intersections_linelike():
|
||||||
|
seg1 = [(0.0, 0.0), (0.0, 0.25), (0.0, 0.75), (0.0, 1.0)]
|
||||||
|
seg2 = [(0.0, 0.5), (0.25, 0.5), (0.75, 0.5), (1.0, 0.5)]
|
||||||
|
pt = curveCurveIntersections(seg1, seg2)[0][0]
|
||||||
|
assert pt == (0.0, 0.5)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from fontTools.ttLib import TTFont
|
from fontTools.ttLib import TTFont
|
||||||
from fontTools.ttLib import ttGlyphSet
|
from fontTools.ttLib import ttGlyphSet
|
||||||
|
from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
|
||||||
from fontTools.pens.recordingPen import (
|
from fontTools.pens.recordingPen import (
|
||||||
RecordingPen,
|
RecordingPen,
|
||||||
RecordingPointPen,
|
RecordingPointPen,
|
||||||
@ -164,6 +165,53 @@ class TTGlyphSetTest(object):
|
|||||||
|
|
||||||
assert actual == expected, (location, actual, expected)
|
assert actual == expected, (location, actual, expected)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"fontfile, locations, factor, expected",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"I.ttf",
|
||||||
|
({"wght": 400}, {"wght": 1000}),
|
||||||
|
0.5,
|
||||||
|
[
|
||||||
|
("moveTo", ((151.5, 0.0),)),
|
||||||
|
("lineTo", ((458.5, 0.0),)),
|
||||||
|
("lineTo", ((458.5, 1456.0),)),
|
||||||
|
("lineTo", ((151.5, 1456.0),)),
|
||||||
|
("closePath", ()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"I.ttf",
|
||||||
|
({"wght": 400}, {"wght": 1000}),
|
||||||
|
0.25,
|
||||||
|
[
|
||||||
|
("moveTo", ((163.25, 0.0),)),
|
||||||
|
("lineTo", ((412.75, 0.0),)),
|
||||||
|
("lineTo", ((412.75, 1456.0),)),
|
||||||
|
("lineTo", ((163.25, 1456.0),)),
|
||||||
|
("closePath", ()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_lerp_glyphset(self, fontfile, locations, factor, expected):
|
||||||
|
font = TTFont(self.getpath(fontfile))
|
||||||
|
glyphset1 = font.getGlyphSet(location=locations[0])
|
||||||
|
glyphset2 = font.getGlyphSet(location=locations[1])
|
||||||
|
glyphset = LerpGlyphSet(glyphset1, glyphset2, factor)
|
||||||
|
|
||||||
|
assert "I" in glyphset
|
||||||
|
|
||||||
|
pen = RecordingPen()
|
||||||
|
glyph = glyphset["I"]
|
||||||
|
|
||||||
|
assert glyphset.get("foobar") is None
|
||||||
|
|
||||||
|
glyph.draw(pen)
|
||||||
|
actual = pen.value
|
||||||
|
|
||||||
|
assert actual == expected, (locations, actual, expected)
|
||||||
|
|
||||||
def test_glyphset_varComposite_components(self):
|
def test_glyphset_varComposite_components(self):
|
||||||
font = TTFont(self.getpath("varc-ac00-ac01.ttf"))
|
font = TTFont(self.getpath("varc-ac00-ac01.ttf"))
|
||||||
glyphset = font.getGlyphSet()
|
glyphset = font.getGlyphSet()
|
||||||
|
@ -71,7 +71,7 @@
|
|||||||
<lib>
|
<lib>
|
||||||
<dict>
|
<dict>
|
||||||
<key>com.github.fonttools.varLib.featureVarsFeatureTag</key>
|
<key>com.github.fonttools.varLib.featureVarsFeatureTag</key>
|
||||||
<string>calt</string>
|
<string>rclt,calt</string>
|
||||||
</dict>
|
</dict>
|
||||||
</lib>
|
</lib>
|
||||||
</designspace>
|
</designspace>
|
||||||
|
@ -33,21 +33,28 @@
|
|||||||
<Script>
|
<Script>
|
||||||
<DefaultLangSys>
|
<DefaultLangSys>
|
||||||
<ReqFeatureIndex value="65535"/>
|
<ReqFeatureIndex value="65535"/>
|
||||||
<!-- FeatureCount=1 -->
|
<!-- FeatureCount=2 -->
|
||||||
<FeatureIndex index="0" value="0"/>
|
<FeatureIndex index="0" value="0"/>
|
||||||
|
<FeatureIndex index="1" value="1"/>
|
||||||
</DefaultLangSys>
|
</DefaultLangSys>
|
||||||
<!-- LangSysCount=0 -->
|
<!-- LangSysCount=0 -->
|
||||||
</Script>
|
</Script>
|
||||||
</ScriptRecord>
|
</ScriptRecord>
|
||||||
</ScriptList>
|
</ScriptList>
|
||||||
<FeatureList>
|
<FeatureList>
|
||||||
<!-- FeatureCount=1 -->
|
<!-- FeatureCount=2 -->
|
||||||
<FeatureRecord index="0">
|
<FeatureRecord index="0">
|
||||||
<FeatureTag value="calt"/>
|
<FeatureTag value="calt"/>
|
||||||
<Feature>
|
<Feature>
|
||||||
<!-- LookupCount=0 -->
|
<!-- LookupCount=0 -->
|
||||||
</Feature>
|
</Feature>
|
||||||
</FeatureRecord>
|
</FeatureRecord>
|
||||||
|
<FeatureRecord index="1">
|
||||||
|
<FeatureTag value="rclt"/>
|
||||||
|
<Feature>
|
||||||
|
<!-- LookupCount=0 -->
|
||||||
|
</Feature>
|
||||||
|
</FeatureRecord>
|
||||||
</FeatureList>
|
</FeatureList>
|
||||||
<LookupList>
|
<LookupList>
|
||||||
<!-- LookupCount=3 -->
|
<!-- LookupCount=3 -->
|
||||||
@ -95,7 +102,7 @@
|
|||||||
</ConditionSet>
|
</ConditionSet>
|
||||||
<FeatureTableSubstitution>
|
<FeatureTableSubstitution>
|
||||||
<Version value="0x00010000"/>
|
<Version value="0x00010000"/>
|
||||||
<!-- SubstitutionCount=1 -->
|
<!-- SubstitutionCount=2 -->
|
||||||
<SubstitutionRecord index="0">
|
<SubstitutionRecord index="0">
|
||||||
<FeatureIndex value="0"/>
|
<FeatureIndex value="0"/>
|
||||||
<Feature>
|
<Feature>
|
||||||
@ -104,6 +111,14 @@
|
|||||||
<LookupListIndex index="1" value="1"/>
|
<LookupListIndex index="1" value="1"/>
|
||||||
</Feature>
|
</Feature>
|
||||||
</SubstitutionRecord>
|
</SubstitutionRecord>
|
||||||
|
<SubstitutionRecord index="1">
|
||||||
|
<FeatureIndex value="1"/>
|
||||||
|
<Feature>
|
||||||
|
<!-- LookupCount=2 -->
|
||||||
|
<LookupListIndex index="0" value="0"/>
|
||||||
|
<LookupListIndex index="1" value="1"/>
|
||||||
|
</Feature>
|
||||||
|
</SubstitutionRecord>
|
||||||
</FeatureTableSubstitution>
|
</FeatureTableSubstitution>
|
||||||
</FeatureVariationRecord>
|
</FeatureVariationRecord>
|
||||||
<FeatureVariationRecord index="1">
|
<FeatureVariationRecord index="1">
|
||||||
@ -122,7 +137,7 @@
|
|||||||
</ConditionSet>
|
</ConditionSet>
|
||||||
<FeatureTableSubstitution>
|
<FeatureTableSubstitution>
|
||||||
<Version value="0x00010000"/>
|
<Version value="0x00010000"/>
|
||||||
<!-- SubstitutionCount=1 -->
|
<!-- SubstitutionCount=2 -->
|
||||||
<SubstitutionRecord index="0">
|
<SubstitutionRecord index="0">
|
||||||
<FeatureIndex value="0"/>
|
<FeatureIndex value="0"/>
|
||||||
<Feature>
|
<Feature>
|
||||||
@ -130,6 +145,13 @@
|
|||||||
<LookupListIndex index="0" value="2"/>
|
<LookupListIndex index="0" value="2"/>
|
||||||
</Feature>
|
</Feature>
|
||||||
</SubstitutionRecord>
|
</SubstitutionRecord>
|
||||||
|
<SubstitutionRecord index="1">
|
||||||
|
<FeatureIndex value="1"/>
|
||||||
|
<Feature>
|
||||||
|
<!-- LookupCount=1 -->
|
||||||
|
<LookupListIndex index="0" value="2"/>
|
||||||
|
</Feature>
|
||||||
|
</SubstitutionRecord>
|
||||||
</FeatureTableSubstitution>
|
</FeatureTableSubstitution>
|
||||||
</FeatureVariationRecord>
|
</FeatureVariationRecord>
|
||||||
<FeatureVariationRecord index="2">
|
<FeatureVariationRecord index="2">
|
||||||
@ -143,7 +165,7 @@
|
|||||||
</ConditionSet>
|
</ConditionSet>
|
||||||
<FeatureTableSubstitution>
|
<FeatureTableSubstitution>
|
||||||
<Version value="0x00010000"/>
|
<Version value="0x00010000"/>
|
||||||
<!-- SubstitutionCount=1 -->
|
<!-- SubstitutionCount=2 -->
|
||||||
<SubstitutionRecord index="0">
|
<SubstitutionRecord index="0">
|
||||||
<FeatureIndex value="0"/>
|
<FeatureIndex value="0"/>
|
||||||
<Feature>
|
<Feature>
|
||||||
@ -151,6 +173,13 @@
|
|||||||
<LookupListIndex index="0" value="1"/>
|
<LookupListIndex index="0" value="1"/>
|
||||||
</Feature>
|
</Feature>
|
||||||
</SubstitutionRecord>
|
</SubstitutionRecord>
|
||||||
|
<SubstitutionRecord index="1">
|
||||||
|
<FeatureIndex value="1"/>
|
||||||
|
<Feature>
|
||||||
|
<!-- LookupCount=1 -->
|
||||||
|
<LookupListIndex index="0" value="1"/>
|
||||||
|
</Feature>
|
||||||
|
</SubstitutionRecord>
|
||||||
</FeatureTableSubstitution>
|
</FeatureTableSubstitution>
|
||||||
</FeatureVariationRecord>
|
</FeatureVariationRecord>
|
||||||
<FeatureVariationRecord index="3">
|
<FeatureVariationRecord index="3">
|
||||||
@ -164,7 +193,7 @@
|
|||||||
</ConditionSet>
|
</ConditionSet>
|
||||||
<FeatureTableSubstitution>
|
<FeatureTableSubstitution>
|
||||||
<Version value="0x00010000"/>
|
<Version value="0x00010000"/>
|
||||||
<!-- SubstitutionCount=1 -->
|
<!-- SubstitutionCount=2 -->
|
||||||
<SubstitutionRecord index="0">
|
<SubstitutionRecord index="0">
|
||||||
<FeatureIndex value="0"/>
|
<FeatureIndex value="0"/>
|
||||||
<Feature>
|
<Feature>
|
||||||
@ -172,6 +201,13 @@
|
|||||||
<LookupListIndex index="0" value="0"/>
|
<LookupListIndex index="0" value="0"/>
|
||||||
</Feature>
|
</Feature>
|
||||||
</SubstitutionRecord>
|
</SubstitutionRecord>
|
||||||
|
<SubstitutionRecord index="1">
|
||||||
|
<FeatureIndex value="1"/>
|
||||||
|
<Feature>
|
||||||
|
<!-- LookupCount=1 -->
|
||||||
|
<LookupListIndex index="0" value="0"/>
|
||||||
|
</Feature>
|
||||||
|
</SubstitutionRecord>
|
||||||
</FeatureTableSubstitution>
|
</FeatureTableSubstitution>
|
||||||
</FeatureVariationRecord>
|
</FeatureVariationRecord>
|
||||||
</FeatureVariations>
|
</FeatureVariations>
|
||||||
|
@ -1,4 +1,136 @@
|
|||||||
from fontTools.varLib.featureVars import overlayFeatureVariations, overlayBox
|
from collections import OrderedDict
|
||||||
|
from fontTools.designspaceLib import AxisDescriptor
|
||||||
|
from fontTools.ttLib import TTFont, newTable
|
||||||
|
from fontTools import varLib
|
||||||
|
from fontTools.varLib.featureVars import (
|
||||||
|
addFeatureVariations,
|
||||||
|
overlayFeatureVariations,
|
||||||
|
overlayBox,
|
||||||
|
)
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
def makeVariableFont(glyphOrder, axes):
|
||||||
|
font = TTFont()
|
||||||
|
font.setGlyphOrder(glyphOrder)
|
||||||
|
font["name"] = newTable("name")
|
||||||
|
ds_axes = OrderedDict()
|
||||||
|
for axisTag, (minimum, default, maximum) in axes.items():
|
||||||
|
axis = AxisDescriptor()
|
||||||
|
axis.name = axis.tag = axis.labelNames["en"] = axisTag
|
||||||
|
axis.minimum, axis.default, axis.maximum = minimum, default, maximum
|
||||||
|
ds_axes[axisTag] = axis
|
||||||
|
varLib._add_fvar(font, ds_axes, instances=())
|
||||||
|
return font
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def varfont():
|
||||||
|
return makeVariableFont(
|
||||||
|
[".notdef", "space", "A", "B", "A.alt", "B.alt"],
|
||||||
|
{"wght": (100, 400, 900)},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_addFeatureVariations(varfont):
|
||||||
|
assert "GSUB" not in varfont
|
||||||
|
|
||||||
|
addFeatureVariations(varfont, [([{"wght": (0.5, 1.0)}], {"A": "A.alt"})])
|
||||||
|
|
||||||
|
assert "GSUB" in varfont
|
||||||
|
gsub = varfont["GSUB"].table
|
||||||
|
|
||||||
|
assert len(gsub.ScriptList.ScriptRecord) == 1
|
||||||
|
assert gsub.ScriptList.ScriptRecord[0].ScriptTag == "DFLT"
|
||||||
|
|
||||||
|
assert len(gsub.FeatureList.FeatureRecord) == 1
|
||||||
|
assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "rvrn"
|
||||||
|
|
||||||
|
assert len(gsub.LookupList.Lookup) == 1
|
||||||
|
assert gsub.LookupList.Lookup[0].LookupType == 1
|
||||||
|
assert len(gsub.LookupList.Lookup[0].SubTable) == 1
|
||||||
|
assert gsub.LookupList.Lookup[0].SubTable[0].mapping == {"A": "A.alt"}
|
||||||
|
|
||||||
|
assert gsub.FeatureVariations is not None
|
||||||
|
assert len(gsub.FeatureVariations.FeatureVariationRecord) == 1
|
||||||
|
fvr = gsub.FeatureVariations.FeatureVariationRecord[0]
|
||||||
|
assert len(fvr.ConditionSet.ConditionTable) == 1
|
||||||
|
cst = fvr.ConditionSet.ConditionTable[0]
|
||||||
|
assert cst.AxisIndex == 0
|
||||||
|
assert cst.FilterRangeMinValue == 0.5
|
||||||
|
assert cst.FilterRangeMaxValue == 1.0
|
||||||
|
assert len(fvr.FeatureTableSubstitution.SubstitutionRecord) == 1
|
||||||
|
ftsr = fvr.FeatureTableSubstitution.SubstitutionRecord[0]
|
||||||
|
assert ftsr.FeatureIndex == 0
|
||||||
|
assert ftsr.Feature.LookupListIndex == [0]
|
||||||
|
|
||||||
|
|
||||||
|
def _substitution_features(gsub, rec_index):
|
||||||
|
fea_tags = [feature.FeatureTag for feature in gsub.FeatureList.FeatureRecord]
|
||||||
|
fea_indices = [
|
||||||
|
gsub.FeatureVariations.FeatureVariationRecord[rec_index]
|
||||||
|
.FeatureTableSubstitution.SubstitutionRecord[i]
|
||||||
|
.FeatureIndex
|
||||||
|
for i in range(
|
||||||
|
len(
|
||||||
|
gsub.FeatureVariations.FeatureVariationRecord[
|
||||||
|
rec_index
|
||||||
|
].FeatureTableSubstitution.SubstitutionRecord
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return [(i, fea_tags[i]) for i in fea_indices]
|
||||||
|
|
||||||
|
|
||||||
|
def test_addFeatureVariations_existing_variable_feature(varfont):
|
||||||
|
assert "GSUB" not in varfont
|
||||||
|
|
||||||
|
addFeatureVariations(varfont, [([{"wght": (0.5, 1.0)}], {"A": "A.alt"})])
|
||||||
|
|
||||||
|
gsub = varfont["GSUB"].table
|
||||||
|
assert len(gsub.FeatureList.FeatureRecord) == 1
|
||||||
|
assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "rvrn"
|
||||||
|
assert len(gsub.FeatureVariations.FeatureVariationRecord) == 1
|
||||||
|
assert _substitution_features(gsub, rec_index=0) == [(0, "rvrn")]
|
||||||
|
|
||||||
|
# can't add feature variations for an existing feature tag that already has some,
|
||||||
|
# in this case the default 'rvrn'
|
||||||
|
with pytest.raises(
|
||||||
|
varLib.VarLibError,
|
||||||
|
match=r"FeatureVariations already exist for feature tag\(s\): {'rvrn'}",
|
||||||
|
):
|
||||||
|
addFeatureVariations(varfont, [([{"wght": (0.5, 1.0)}], {"A": "A.alt"})])
|
||||||
|
|
||||||
|
|
||||||
|
def test_addFeatureVariations_new_feature(varfont):
|
||||||
|
assert "GSUB" not in varfont
|
||||||
|
|
||||||
|
addFeatureVariations(varfont, [([{"wght": (0.5, 1.0)}], {"A": "A.alt"})])
|
||||||
|
|
||||||
|
gsub = varfont["GSUB"].table
|
||||||
|
assert len(gsub.FeatureList.FeatureRecord) == 1
|
||||||
|
assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "rvrn"
|
||||||
|
assert len(gsub.LookupList.Lookup) == 1
|
||||||
|
assert len(gsub.FeatureVariations.FeatureVariationRecord) == 1
|
||||||
|
assert _substitution_features(gsub, rec_index=0) == [(0, "rvrn")]
|
||||||
|
|
||||||
|
# we can add feature variations for a feature tag that does not have
|
||||||
|
# any feature variations yet
|
||||||
|
addFeatureVariations(
|
||||||
|
varfont, [([{"wght": (-1.0, 0.0)}], {"B": "B.alt"})], featureTag="rclt"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(gsub.FeatureList.FeatureRecord) == 2
|
||||||
|
# Note 'rclt' is now first (index=0) in the feature list sorted by tag, and
|
||||||
|
# 'rvrn' is second (index=1)
|
||||||
|
assert gsub.FeatureList.FeatureRecord[0].FeatureTag == "rclt"
|
||||||
|
assert gsub.FeatureList.FeatureRecord[1].FeatureTag == "rvrn"
|
||||||
|
assert len(gsub.LookupList.Lookup) == 2
|
||||||
|
assert len(gsub.FeatureVariations.FeatureVariationRecord) == 2
|
||||||
|
# The new 'rclt' feature variation record is appended to the end;
|
||||||
|
# the feature index for 'rvrn' feature table substitution record is now 1
|
||||||
|
assert _substitution_features(gsub, rec_index=0) == [(1, "rvrn")]
|
||||||
|
assert _substitution_features(gsub, rec_index=1) == [(0, "rclt")]
|
||||||
|
|
||||||
|
|
||||||
def _test_linear(n):
|
def _test_linear(n):
|
||||||
|
@ -1986,7 +1986,10 @@ class LimitTupleVariationAxisRangesTest:
|
|||||||
TupleVariation({"wght": (0.0, 0.5, 1.0)}, [100, 100]),
|
TupleVariation({"wght": (0.0, 0.5, 1.0)}, [100, 100]),
|
||||||
"wght",
|
"wght",
|
||||||
0.6,
|
0.6,
|
||||||
[TupleVariation({"wght": (0.0, 0.833334, 1.666667)}, [100, 100])],
|
[
|
||||||
|
TupleVariation({"wght": (0.0, 0.833334, 1.0)}, [100, 100]),
|
||||||
|
TupleVariation({"wght": (0.833334, 1.0, 1.0)}, [80, 80]),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
TupleVariation({"wght": (0.0, 0.2, 1.0)}, [100, 100]),
|
TupleVariation({"wght": (0.0, 0.2, 1.0)}, [100, 100]),
|
||||||
@ -2001,7 +2004,10 @@ class LimitTupleVariationAxisRangesTest:
|
|||||||
TupleVariation({"wght": (0.0, 0.2, 1.0)}, [100, 100]),
|
TupleVariation({"wght": (0.0, 0.2, 1.0)}, [100, 100]),
|
||||||
"wght",
|
"wght",
|
||||||
0.5,
|
0.5,
|
||||||
[TupleVariation({"wght": (0.0, 0.4, 1.99994)}, [100, 100])],
|
[
|
||||||
|
TupleVariation({"wght": (0.0, 0.4, 1)}, [100, 100]),
|
||||||
|
TupleVariation({"wght": (0.4, 1, 1)}, [62.5, 62.5]),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
TupleVariation({"wght": (0.5, 0.5, 1.0)}, [100, 100]),
|
TupleVariation({"wght": (0.5, 0.5, 1.0)}, [100, 100]),
|
||||||
@ -2065,7 +2071,10 @@ class LimitTupleVariationAxisRangesTest:
|
|||||||
TupleVariation({"wght": (-1.0, -0.5, 0.0)}, [100, 100]),
|
TupleVariation({"wght": (-1.0, -0.5, 0.0)}, [100, 100]),
|
||||||
"wght",
|
"wght",
|
||||||
-0.6,
|
-0.6,
|
||||||
[TupleVariation({"wght": (-1.666667, -0.833334, 0.0)}, [100, 100])],
|
[
|
||||||
|
TupleVariation({"wght": (-1.0, -0.833334, 0.0)}, [100, 100]),
|
||||||
|
TupleVariation({"wght": (-1.0, -1.0, -0.833334)}, [80, 80]),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
TupleVariation({"wght": (-1.0, -0.2, 0.0)}, [100, 100]),
|
TupleVariation({"wght": (-1.0, -0.2, 0.0)}, [100, 100]),
|
||||||
@ -2080,7 +2089,10 @@ class LimitTupleVariationAxisRangesTest:
|
|||||||
TupleVariation({"wght": (-1.0, -0.2, 0.0)}, [100, 100]),
|
TupleVariation({"wght": (-1.0, -0.2, 0.0)}, [100, 100]),
|
||||||
"wght",
|
"wght",
|
||||||
-0.5,
|
-0.5,
|
||||||
[TupleVariation({"wght": (-2.0, -0.4, 0.0)}, [100, 100])],
|
[
|
||||||
|
TupleVariation({"wght": (-1.0, -0.4, 0.0)}, [100, 100]),
|
||||||
|
TupleVariation({"wght": (-1.0, -1.0, -0.4)}, [62.5, 62.5]),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
TupleVariation({"wght": (-1.0, -0.5, -0.5)}, [100, 100]),
|
TupleVariation({"wght": (-1.0, -0.5, -0.5)}, [100, 100]),
|
||||||
|
@ -43,7 +43,8 @@ class RebaseTentTest(object):
|
|||||||
(0, 0.2, 1),
|
(0, 0.2, 1),
|
||||||
(-1, 0, 0.8),
|
(-1, 0, 0.8),
|
||||||
[
|
[
|
||||||
(1, (0, 0.25, 1.25)),
|
(1, (0, 0.25, 1)),
|
||||||
|
(0.25, (0.25, 1, 1)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
# Case 3 boundary
|
# Case 3 boundary
|
||||||
@ -51,7 +52,8 @@ class RebaseTentTest(object):
|
|||||||
(0, 0.4, 1),
|
(0, 0.4, 1),
|
||||||
(-1, 0, 0.5),
|
(-1, 0, 0.5),
|
||||||
[
|
[
|
||||||
(1, (0, 0.8, 1.99994)),
|
(1, (0, 0.8, 1)),
|
||||||
|
(2.5 / 3, (0.8, 1, 1)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
# Case 4
|
# Case 4
|
||||||
@ -234,7 +236,8 @@ class RebaseTentTest(object):
|
|||||||
(0, 0.2, 1),
|
(0, 0.2, 1),
|
||||||
(0, 0, 0.5),
|
(0, 0, 0.5),
|
||||||
[
|
[
|
||||||
(1, (0, 0.4, 1.99994)),
|
(1, (0, 0.4, 1)),
|
||||||
|
(0.625, (0.4, 1, 1)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
# https://github.com/fonttools/fonttools/issues/3139
|
# https://github.com/fonttools/fonttools/issues/3139
|
||||||
|
@ -1,7 +1,13 @@
|
|||||||
from fontTools.colorLib.builder import buildCOLR
|
from fontTools.colorLib.builder import buildCOLR
|
||||||
from fontTools.ttLib import TTFont, newTable
|
from fontTools.ttLib import TTFont, newTable
|
||||||
from fontTools.ttLib.tables import otTables as ot
|
from fontTools.ttLib.tables import otTables as ot
|
||||||
from fontTools.varLib import build, build_many, load_designspace, _add_COLR
|
from fontTools.varLib import (
|
||||||
|
build,
|
||||||
|
build_many,
|
||||||
|
load_designspace,
|
||||||
|
_add_COLR,
|
||||||
|
addGSUBFeatureVariations,
|
||||||
|
)
|
||||||
from fontTools.varLib.errors import VarLibValidationError
|
from fontTools.varLib.errors import VarLibValidationError
|
||||||
import fontTools.varLib.errors as varLibErrors
|
import fontTools.varLib.errors as varLibErrors
|
||||||
from fontTools.varLib.models import VariationModel
|
from fontTools.varLib.models import VariationModel
|
||||||
@ -1009,6 +1015,32 @@ Expected to see .ScriptCount==1, instead saw 0""",
|
|||||||
save_before_dump=True,
|
save_before_dump=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_varlib_addGSUBFeatureVariations(self):
|
||||||
|
ttx_dir = self.get_test_input("master_ttx_interpolatable_ttf")
|
||||||
|
|
||||||
|
ds = DesignSpaceDocument.fromfile(
|
||||||
|
self.get_test_input("FeatureVars.designspace")
|
||||||
|
)
|
||||||
|
for source in ds.sources:
|
||||||
|
ttx_dump = TTFont()
|
||||||
|
ttx_dump.importXML(
|
||||||
|
os.path.join(
|
||||||
|
ttx_dir, os.path.basename(source.filename).replace(".ufo", ".ttx")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
source.font = ttx_dump
|
||||||
|
|
||||||
|
varfont, _, _ = build(ds, exclude=["GSUB"])
|
||||||
|
assert "GSUB" not in varfont
|
||||||
|
|
||||||
|
addGSUBFeatureVariations(varfont, ds)
|
||||||
|
assert "GSUB" in varfont
|
||||||
|
|
||||||
|
tables = ["fvar", "GSUB"]
|
||||||
|
expected_ttx_path = self.get_test_output("FeatureVars.ttx")
|
||||||
|
self.expect_ttx(varfont, expected_ttx_path, tables)
|
||||||
|
self.check_ttx_dump(varfont, expected_ttx_path, tables, ".ttf")
|
||||||
|
|
||||||
|
|
||||||
def test_load_masters_layerName_without_required_font():
|
def test_load_masters_layerName_without_required_font():
|
||||||
ds = DesignSpaceDocument()
|
ds = DesignSpaceDocument()
|
||||||
|
@ -5,4 +5,4 @@ sphinx>=1.5.5
|
|||||||
mypy>=0.782
|
mypy>=0.782
|
||||||
|
|
||||||
# Pin black as each version could change formatting, breaking CI randomly.
|
# Pin black as each version could change formatting, breaking CI randomly.
|
||||||
black==23.10.0
|
black==23.11.0
|
||||||
|
@ -4,7 +4,7 @@ brotli==1.1.0; platform_python_implementation != "PyPy"
|
|||||||
brotlicffi==1.1.0.0; platform_python_implementation == "PyPy"
|
brotlicffi==1.1.0.0; platform_python_implementation == "PyPy"
|
||||||
unicodedata2==15.1.0; python_version <= '3.11'
|
unicodedata2==15.1.0; python_version <= '3.11'
|
||||||
scipy==1.10.0; platform_python_implementation != "PyPy" and python_version <= '3.8' # pyup: ignore
|
scipy==1.10.0; platform_python_implementation != "PyPy" and python_version <= '3.8' # pyup: ignore
|
||||||
scipy==1.11.3; platform_python_implementation != "PyPy" and python_version >= '3.9'
|
scipy==1.11.4; platform_python_implementation != "PyPy" and python_version >= '3.9'
|
||||||
munkres==1.1.4; platform_python_implementation == "PyPy"
|
munkres==1.1.4; platform_python_implementation == "PyPy"
|
||||||
zopfli==0.2.3
|
zopfli==0.2.3
|
||||||
fs==2.4.16
|
fs==2.4.16
|
||||||
@ -15,6 +15,6 @@ ufo2ft==2.33.4
|
|||||||
pyobjc==10.0; sys_platform == "darwin"
|
pyobjc==10.0; sys_platform == "darwin"
|
||||||
freetype-py==2.4.0
|
freetype-py==2.4.0
|
||||||
uharfbuzz==0.37.3
|
uharfbuzz==0.37.3
|
||||||
glyphsLib==6.4.1 # this is only required to run Tests/varLib/interpolatable_test.py
|
glyphsLib==6.6.0 # this is only required to run Tests/varLib/interpolatable_test.py
|
||||||
lxml==4.9.3
|
lxml==4.9.3
|
||||||
sympy==1.12
|
sympy==1.12
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 4.45.2.dev0
|
current_version = 4.46.1.dev0
|
||||||
commit = True
|
commit = True
|
||||||
tag = False
|
tag = False
|
||||||
tag_name = {new_version}
|
tag_name = {new_version}
|
||||||
|
4
setup.py
4
setup.py
@ -241,7 +241,7 @@ class release(Command):
|
|||||||
]
|
]
|
||||||
|
|
||||||
changelog_name = "NEWS.rst"
|
changelog_name = "NEWS.rst"
|
||||||
version_RE = re.compile("^[0-9]+\.[0-9]+")
|
version_RE = re.compile(r"^[0-9]+\.[0-9]+")
|
||||||
date_fmt = "%Y-%m-%d"
|
date_fmt = "%Y-%m-%d"
|
||||||
header_fmt = "%s (released %s)"
|
header_fmt = "%s (released %s)"
|
||||||
commit_message = "Release {new_version}"
|
commit_message = "Release {new_version}"
|
||||||
@ -467,7 +467,7 @@ if ext_modules:
|
|||||||
|
|
||||||
setup_params = dict(
|
setup_params = dict(
|
||||||
name="fonttools",
|
name="fonttools",
|
||||||
version="4.45.2.dev0",
|
version="4.46.1.dev0",
|
||||||
description="Tools to manipulate font files",
|
description="Tools to manipulate font files",
|
||||||
author="Just van Rossum",
|
author="Just van Rossum",
|
||||||
author_email="just@letterror.com",
|
author_email="just@letterror.com",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user