Merge remote-tracking branch 'origin/master' into otdata-colr
This commit is contained in:
commit
d659e055b2
10
.travis.yml
10
.travis.yml
@ -1,5 +1,5 @@
|
|||||||
language: python
|
language: python
|
||||||
python: 3.5
|
python: 3.6
|
||||||
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
@ -16,9 +16,6 @@ branches:
|
|||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
exclude:
|
|
||||||
# Exclude the default Python 3.6 build
|
|
||||||
- python: 3.6
|
|
||||||
include:
|
include:
|
||||||
- python: 3.6
|
- python: 3.6
|
||||||
env:
|
env:
|
||||||
@ -52,6 +49,11 @@ cache:
|
|||||||
- directories:
|
- directories:
|
||||||
- $HOME/.pyenv_cache
|
- $HOME/.pyenv_cache
|
||||||
|
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
packages:
|
||||||
|
- language-pack-de
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- source ./.travis/before_install.sh
|
- source ./.travis/before_install.sh
|
||||||
|
|
||||||
|
@ -4,6 +4,6 @@ from fontTools.misc.loggingTools import configLogger
|
|||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
version = __version__ = "4.3.1.dev0"
|
version = __version__ = "4.4.2.dev0"
|
||||||
|
|
||||||
__all__ = ["version", "log", "configLogger"]
|
__all__ = ["version", "log", "configLogger"]
|
||||||
|
0
Lib/fontTools/colorLib/__init__.py
Normal file
0
Lib/fontTools/colorLib/__init__.py
Normal file
147
Lib/fontTools/colorLib/builder.py
Normal file
147
Lib/fontTools/colorLib/builder.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
import enum
|
||||||
|
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||||
|
from fontTools.ttLib.tables.C_O_L_R_ import LayerRecord, table_C_O_L_R_
|
||||||
|
from fontTools.ttLib.tables.C_P_A_L_ import Color, table_C_P_A_L_
|
||||||
|
from fontTools.ttLib.tables._n_a_m_e import table__n_a_m_e
|
||||||
|
from .errors import ColorLibError
|
||||||
|
|
||||||
|
|
||||||
|
def buildCOLR(colorLayers: Dict[str, List[Tuple[str, int]]]) -> table_C_O_L_R_:
|
||||||
|
"""Build COLR table from color layers mapping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
colorLayers: : map of base glyph names to lists of (layer glyph names,
|
||||||
|
palette indices) tuples.
|
||||||
|
|
||||||
|
Return:
|
||||||
|
A new COLRv0 table.
|
||||||
|
"""
|
||||||
|
colorLayerLists = {}
|
||||||
|
for baseGlyphName, layers in colorLayers.items():
|
||||||
|
colorLayerLists[baseGlyphName] = [
|
||||||
|
LayerRecord(layerGlyphName, colorID) for layerGlyphName, colorID in layers
|
||||||
|
]
|
||||||
|
|
||||||
|
colr = table_C_O_L_R_()
|
||||||
|
colr.version = 0
|
||||||
|
colr.ColorLayers = colorLayerLists
|
||||||
|
return colr
|
||||||
|
|
||||||
|
|
||||||
|
class ColorPaletteType(enum.IntFlag):
|
||||||
|
USABLE_WITH_LIGHT_BACKGROUND = 0x0001
|
||||||
|
USABLE_WITH_DARK_BACKGROUND = 0x0002
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _missing_(cls, value):
|
||||||
|
# enforce reserved bits
|
||||||
|
if isinstance(value, int) and (value < 0 or value & 0xFFFC != 0):
|
||||||
|
raise ValueError(f"{value} is not a valid {cls.__name__}")
|
||||||
|
return super()._missing_(value)
|
||||||
|
|
||||||
|
|
||||||
|
# None, 'abc' or {'en': 'abc', 'de': 'xyz'}
|
||||||
|
_OptionalLocalizedString = Union[None, str, Dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
|
def buildPaletteLabels(
|
||||||
|
labels: List[_OptionalLocalizedString], nameTable: table__n_a_m_e
|
||||||
|
) -> List[Optional[int]]:
|
||||||
|
return [
|
||||||
|
nameTable.addMultilingualName(l, mac=False)
|
||||||
|
if isinstance(l, dict)
|
||||||
|
else table_C_P_A_L_.NO_NAME_ID
|
||||||
|
if l is None
|
||||||
|
else nameTable.addMultilingualName({"en": l}, mac=False)
|
||||||
|
for l in labels
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def buildCPAL(
|
||||||
|
palettes: List[List[Tuple[float, float, float, float]]],
|
||||||
|
paletteTypes: Optional[List[ColorPaletteType]] = None,
|
||||||
|
paletteLabels: Optional[List[_OptionalLocalizedString]] = None,
|
||||||
|
paletteEntryLabels: Optional[List[_OptionalLocalizedString]] = None,
|
||||||
|
nameTable: Optional[table__n_a_m_e] = None,
|
||||||
|
) -> table_C_P_A_L_:
|
||||||
|
"""Build CPAL table from list of color palettes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
palettes: list of lists of colors encoded as tuples of (R, G, B, A) floats
|
||||||
|
in the range [0..1].
|
||||||
|
paletteTypes: optional list of ColorPaletteType, one for each palette.
|
||||||
|
paletteLabels: optional list of palette labels. Each lable can be either:
|
||||||
|
None (no label), a string (for for default English labels), or a
|
||||||
|
localized string (as a dict keyed with BCP47 language codes).
|
||||||
|
paletteEntryLabels: optional list of palette entry labels, one for each
|
||||||
|
palette entry (see paletteLabels).
|
||||||
|
nameTable: optional name table where to store palette and palette entry
|
||||||
|
labels. Required if either paletteLabels or paletteEntryLabels is set.
|
||||||
|
|
||||||
|
Return:
|
||||||
|
A new CPAL v0 or v1 table, if custom palette types or labels are specified.
|
||||||
|
"""
|
||||||
|
if len({len(p) for p in palettes}) != 1:
|
||||||
|
raise ColorLibError("color palettes have different lengths")
|
||||||
|
|
||||||
|
if (paletteLabels or paletteEntryLabels) and not nameTable:
|
||||||
|
raise TypeError(
|
||||||
|
"nameTable is required if palette or palette entries have labels"
|
||||||
|
)
|
||||||
|
|
||||||
|
cpal = table_C_P_A_L_()
|
||||||
|
cpal.numPaletteEntries = len(palettes[0])
|
||||||
|
|
||||||
|
cpal.palettes = []
|
||||||
|
for i, palette in enumerate(palettes):
|
||||||
|
colors = []
|
||||||
|
for j, color in enumerate(palette):
|
||||||
|
if not isinstance(color, tuple) or len(color) != 4:
|
||||||
|
raise ColorLibError(
|
||||||
|
f"In palette[{i}][{j}]: expected (R, G, B, A) tuple, got {color!r}"
|
||||||
|
)
|
||||||
|
if any(v > 1 or v < 0 for v in color):
|
||||||
|
raise ColorLibError(
|
||||||
|
f"palette[{i}][{j}] has invalid out-of-range [0..1] color: {color!r}"
|
||||||
|
)
|
||||||
|
# input colors are RGBA, CPAL encodes them as BGRA
|
||||||
|
red, green, blue, alpha = color
|
||||||
|
colors.append(Color(*(round(v * 255) for v in (blue, green, red, alpha))))
|
||||||
|
cpal.palettes.append(colors)
|
||||||
|
|
||||||
|
if any(v is not None for v in (paletteTypes, paletteLabels, paletteEntryLabels)):
|
||||||
|
cpal.version = 1
|
||||||
|
|
||||||
|
if paletteTypes is not None:
|
||||||
|
if len(paletteTypes) != len(palettes):
|
||||||
|
raise ColorLibError(
|
||||||
|
f"Expected {len(palettes)} paletteTypes, got {len(paletteTypes)}"
|
||||||
|
)
|
||||||
|
cpal.paletteTypes = [ColorPaletteType(t).value for t in paletteTypes]
|
||||||
|
else:
|
||||||
|
cpal.paletteTypes = [table_C_P_A_L_.DEFAULT_PALETTE_TYPE] * len(palettes)
|
||||||
|
|
||||||
|
if paletteLabels is not None:
|
||||||
|
if len(paletteLabels) != len(palettes):
|
||||||
|
raise ColorLibError(
|
||||||
|
f"Expected {len(palettes)} paletteLabels, got {len(paletteLabels)}"
|
||||||
|
)
|
||||||
|
cpal.paletteLabels = buildPaletteLabels(paletteLabels, nameTable)
|
||||||
|
else:
|
||||||
|
cpal.paletteLabels = [table_C_P_A_L_.NO_NAME_ID] * len(palettes)
|
||||||
|
|
||||||
|
if paletteEntryLabels is not None:
|
||||||
|
if len(paletteEntryLabels) != cpal.numPaletteEntries:
|
||||||
|
raise ColorLibError(
|
||||||
|
f"Expected {cpal.numPaletteEntries} paletteEntryLabels, "
|
||||||
|
f"got {len(paletteEntryLabels)}"
|
||||||
|
)
|
||||||
|
cpal.paletteEntryLabels = buildPaletteLabels(paletteEntryLabels, nameTable)
|
||||||
|
else:
|
||||||
|
cpal.paletteEntryLabels = [
|
||||||
|
table_C_P_A_L_.NO_NAME_ID
|
||||||
|
] * cpal.numPaletteEntries
|
||||||
|
else:
|
||||||
|
cpal.version = 0
|
||||||
|
|
||||||
|
return cpal
|
3
Lib/fontTools/colorLib/errors.py
Normal file
3
Lib/fontTools/colorLib/errors.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
class ColorLibError(Exception):
|
||||||
|
pass
|
@ -9,10 +9,18 @@ class FeatureLibError(Exception):
|
|||||||
message = Exception.__str__(self)
|
message = Exception.__str__(self)
|
||||||
if self.location:
|
if self.location:
|
||||||
path, line, column = self.location
|
path, line, column = self.location
|
||||||
return "%s:%d:%d: %s" % (path, line, column, message)
|
return f"{path}:{line}:{column}: {message}"
|
||||||
else:
|
else:
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
class IncludedFeaNotFound(FeatureLibError):
|
class IncludedFeaNotFound(FeatureLibError):
|
||||||
pass
|
def __str__(self):
|
||||||
|
assert self.location is not None
|
||||||
|
|
||||||
|
message = (
|
||||||
|
"The following feature file should be included but cannot be found: "
|
||||||
|
f"{Exception.__str__(self)}"
|
||||||
|
)
|
||||||
|
path, line, column = self.location
|
||||||
|
return f"{path}:{line}:{column}: {message}"
|
||||||
|
@ -19,6 +19,14 @@ class Parser(object):
|
|||||||
|
|
||||||
def __init__(self, featurefile, glyphNames=(), followIncludes=True,
|
def __init__(self, featurefile, glyphNames=(), followIncludes=True,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
|
"""Initializes a Parser object.
|
||||||
|
|
||||||
|
Note: the `glyphNames` iterable serves a double role to help distinguish
|
||||||
|
glyph names from ranges in the presence of hyphens and to ensure that glyph
|
||||||
|
names referenced in a feature file are actually part of a font's glyph set.
|
||||||
|
If the iterable is left empty, no glyph name in glyph set checking takes
|
||||||
|
place.
|
||||||
|
"""
|
||||||
if "glyphMap" in kwargs:
|
if "glyphMap" in kwargs:
|
||||||
from fontTools.misc.loggingTools import deprecateArgument
|
from fontTools.misc.loggingTools import deprecateArgument
|
||||||
deprecateArgument("glyphMap", "use 'glyphNames' (iterable) instead")
|
deprecateArgument("glyphMap", "use 'glyphNames' (iterable) instead")
|
||||||
@ -268,6 +276,7 @@ class Parser(object):
|
|||||||
if (accept_glyphname and
|
if (accept_glyphname and
|
||||||
self.next_token_type_ in (Lexer.NAME, Lexer.CID)):
|
self.next_token_type_ in (Lexer.NAME, Lexer.CID)):
|
||||||
glyph = self.expect_glyph_()
|
glyph = self.expect_glyph_()
|
||||||
|
self.check_glyph_name_in_glyph_set(glyph)
|
||||||
return self.ast.GlyphName(glyph, location=self.cur_token_location_)
|
return self.ast.GlyphName(glyph, location=self.cur_token_location_)
|
||||||
if self.next_token_type_ is Lexer.GLYPHCLASS:
|
if self.next_token_type_ is Lexer.GLYPHCLASS:
|
||||||
self.advance_lexer_()
|
self.advance_lexer_()
|
||||||
@ -292,6 +301,7 @@ class Parser(object):
|
|||||||
location = self.cur_token_location_
|
location = self.cur_token_location_
|
||||||
if '-' in glyph and glyph not in self.glyphNames_:
|
if '-' in glyph and glyph not in self.glyphNames_:
|
||||||
start, limit = self.split_glyph_range_(glyph, location)
|
start, limit = self.split_glyph_range_(glyph, location)
|
||||||
|
self.check_glyph_name_in_glyph_set(start, limit)
|
||||||
glyphs.add_range(
|
glyphs.add_range(
|
||||||
start, limit,
|
start, limit,
|
||||||
self.make_glyph_range_(location, start, limit))
|
self.make_glyph_range_(location, start, limit))
|
||||||
@ -299,10 +309,12 @@ class Parser(object):
|
|||||||
start = glyph
|
start = glyph
|
||||||
self.expect_symbol_("-")
|
self.expect_symbol_("-")
|
||||||
limit = self.expect_glyph_()
|
limit = self.expect_glyph_()
|
||||||
|
self.check_glyph_name_in_glyph_set(start, limit)
|
||||||
glyphs.add_range(
|
glyphs.add_range(
|
||||||
start, limit,
|
start, limit,
|
||||||
self.make_glyph_range_(location, start, limit))
|
self.make_glyph_range_(location, start, limit))
|
||||||
else:
|
else:
|
||||||
|
self.check_glyph_name_in_glyph_set(glyph)
|
||||||
glyphs.append(glyph)
|
glyphs.append(glyph)
|
||||||
elif self.next_token_type_ is Lexer.CID:
|
elif self.next_token_type_ is Lexer.CID:
|
||||||
glyph = self.expect_glyph_()
|
glyph = self.expect_glyph_()
|
||||||
@ -311,11 +323,17 @@ class Parser(object):
|
|||||||
range_start = self.cur_token_
|
range_start = self.cur_token_
|
||||||
self.expect_symbol_("-")
|
self.expect_symbol_("-")
|
||||||
range_end = self.expect_cid_()
|
range_end = self.expect_cid_()
|
||||||
|
self.check_glyph_name_in_glyph_set(
|
||||||
|
f"cid{range_start:05d}",
|
||||||
|
f"cid{range_end:05d}",
|
||||||
|
)
|
||||||
glyphs.add_cid_range(range_start, range_end,
|
glyphs.add_cid_range(range_start, range_end,
|
||||||
self.make_cid_range_(range_location,
|
self.make_cid_range_(range_location,
|
||||||
range_start, range_end))
|
range_start, range_end))
|
||||||
else:
|
else:
|
||||||
glyphs.append("cid%05d" % self.cur_token_)
|
glyph_name = f"cid{self.cur_token_:05d}"
|
||||||
|
self.check_glyph_name_in_glyph_set(glyph_name)
|
||||||
|
glyphs.append(glyph_name)
|
||||||
elif self.next_token_type_ is Lexer.GLYPHCLASS:
|
elif self.next_token_type_ is Lexer.GLYPHCLASS:
|
||||||
self.advance_lexer_()
|
self.advance_lexer_()
|
||||||
gc = self.glyphclasses_.resolve(self.cur_token_)
|
gc = self.glyphclasses_.resolve(self.cur_token_)
|
||||||
@ -1509,6 +1527,21 @@ class Parser(object):
|
|||||||
raise FeatureLibError("Expected a glyph name or CID",
|
raise FeatureLibError("Expected a glyph name or CID",
|
||||||
self.cur_token_location_)
|
self.cur_token_location_)
|
||||||
|
|
||||||
|
def check_glyph_name_in_glyph_set(self, *names):
|
||||||
|
"""Raises if glyph name (just `start`) or glyph names of a
|
||||||
|
range (`start` and `end`) are not in the glyph set.
|
||||||
|
|
||||||
|
If no glyph set is present, does nothing.
|
||||||
|
"""
|
||||||
|
if self.glyphNames_:
|
||||||
|
missing = [name for name in names if name not in self.glyphNames_]
|
||||||
|
if missing:
|
||||||
|
raise FeatureLibError(
|
||||||
|
"The following glyph names are referenced but are missing from the "
|
||||||
|
f"glyph set: {', '.join(missing)}",
|
||||||
|
self.cur_token_location_
|
||||||
|
)
|
||||||
|
|
||||||
def expect_markClass_reference_(self):
|
def expect_markClass_reference_(self):
|
||||||
name = self.expect_class_name_()
|
name = self.expect_class_name_()
|
||||||
mc = self.glyphclasses_.resolve(name)
|
mc = self.glyphclasses_.resolve(name)
|
||||||
|
@ -506,6 +506,7 @@ class FontBuilder(object):
|
|||||||
fontSet = CFFFontSet()
|
fontSet = CFFFontSet()
|
||||||
fontSet.major = 1
|
fontSet.major = 1
|
||||||
fontSet.minor = 0
|
fontSet.minor = 0
|
||||||
|
fontSet.otFont = self.font
|
||||||
fontSet.fontNames = [psName]
|
fontSet.fontNames = [psName]
|
||||||
fontSet.topDictIndex = TopDictIndex()
|
fontSet.topDictIndex = TopDictIndex()
|
||||||
|
|
||||||
@ -520,6 +521,7 @@ class FontBuilder(object):
|
|||||||
topDict = TopDict()
|
topDict = TopDict()
|
||||||
topDict.charset = self.font.getGlyphOrder()
|
topDict.charset = self.font.getGlyphOrder()
|
||||||
topDict.Private = private
|
topDict.Private = private
|
||||||
|
topDict.GlobalSubrs = fontSet.GlobalSubrs
|
||||||
for key, value in fontInfo.items():
|
for key, value in fontInfo.items():
|
||||||
setattr(topDict, key, value)
|
setattr(topDict, key, value)
|
||||||
if "FontMatrix" not in fontInfo:
|
if "FontMatrix" not in fontInfo:
|
||||||
@ -768,6 +770,39 @@ class FontBuilder(object):
|
|||||||
self.font, conditionalSubstitutions, featureTag=featureTag
|
self.font, conditionalSubstitutions, featureTag=featureTag
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def setupCOLR(self, colorLayers):
|
||||||
|
"""Build new COLR table using color layers dictionary.
|
||||||
|
|
||||||
|
Cf. `fontTools.colorLib.builder.buildCOLR`.
|
||||||
|
"""
|
||||||
|
from fontTools.colorLib.builder import buildCOLR
|
||||||
|
|
||||||
|
self.font["COLR"] = buildCOLR(colorLayers)
|
||||||
|
|
||||||
|
def setupCPAL(
|
||||||
|
self,
|
||||||
|
palettes,
|
||||||
|
paletteTypes=None,
|
||||||
|
paletteLabels=None,
|
||||||
|
paletteEntryLabels=None,
|
||||||
|
):
|
||||||
|
"""Build new CPAL table using list of palettes.
|
||||||
|
|
||||||
|
Optionally build CPAL v1 table using paletteTypes, paletteLabels and
|
||||||
|
paletteEntryLabels.
|
||||||
|
|
||||||
|
Cf. `fontTools.colorLib.builder.buildCPAL`.
|
||||||
|
"""
|
||||||
|
from fontTools.colorLib.builder import buildCPAL
|
||||||
|
|
||||||
|
self.font["CPAL"] = buildCPAL(
|
||||||
|
palettes,
|
||||||
|
paletteTypes=paletteTypes,
|
||||||
|
paletteLabels=paletteLabels,
|
||||||
|
paletteEntryLabels=paletteEntryLabels,
|
||||||
|
nameTable=self.font.get("name")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def buildCmapSubTable(cmapping, format, platformID, platEncID):
|
def buildCmapSubTable(cmapping, format, platformID, platEncID):
|
||||||
subTable = cmap_classes[format](format)
|
subTable = cmap_classes[format](format)
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
from fontTools.misc.py23 import *
|
from fontTools.misc.py23 import *
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime, timezone
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +45,12 @@ def timestampToString(value):
|
|||||||
return asctime(time.gmtime(max(0, value + epoch_diff)))
|
return asctime(time.gmtime(max(0, value + epoch_diff)))
|
||||||
|
|
||||||
def timestampFromString(value):
|
def timestampFromString(value):
|
||||||
return calendar.timegm(time.strptime(value)) - epoch_diff
|
wkday, mnth = value[:7].split()
|
||||||
|
t = datetime.strptime(value[7:], ' %d %H:%M:%S %Y')
|
||||||
|
t = t.replace(month=MONTHNAMES.index(mnth), tzinfo=timezone.utc)
|
||||||
|
wkday_idx = DAYNAMES.index(wkday)
|
||||||
|
assert t.weekday() == wkday_idx, '"' + value + '" has inconsistent weekday'
|
||||||
|
return int(t.timestamp()) - epoch_diff
|
||||||
|
|
||||||
def timestampNow():
|
def timestampNow():
|
||||||
# https://reproducible-builds.org/specs/source-date-epoch/
|
# https://reproducible-builds.org/specs/source-date-epoch/
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from fontTools.misc.py23 import *
|
from fontTools.misc.py23 import *
|
||||||
from array import array
|
from array import array
|
||||||
from fontTools.misc.fixedTools import MAX_F2DOT14, otRound
|
from fontTools.misc.fixedTools import MAX_F2DOT14, otRound, floatToFixedToFloat
|
||||||
from fontTools.pens.basePen import LoggingPen
|
from fontTools.pens.basePen import LoggingPen
|
||||||
from fontTools.pens.transformPen import TransformPen
|
from fontTools.pens.transformPen import TransformPen
|
||||||
from fontTools.ttLib.tables import ttProgram
|
from fontTools.ttLib.tables import ttProgram
|
||||||
@ -119,7 +119,11 @@ class TTGlyphPen(LoggingPen):
|
|||||||
component = GlyphComponent()
|
component = GlyphComponent()
|
||||||
component.glyphName = glyphName
|
component.glyphName = glyphName
|
||||||
component.x, component.y = (otRound(v) for v in transformation[4:])
|
component.x, component.y = (otRound(v) for v in transformation[4:])
|
||||||
transformation = transformation[:4]
|
# quantize floats to F2Dot14 so we get same values as when decompiled
|
||||||
|
# from a binary glyf table
|
||||||
|
transformation = tuple(
|
||||||
|
floatToFixedToFloat(v, 14) for v in transformation[:4]
|
||||||
|
)
|
||||||
if transformation != (1, 0, 0, 1):
|
if transformation != (1, 0, 0, 1):
|
||||||
if (self.handleOverflowingTransforms and
|
if (self.handleOverflowingTransforms and
|
||||||
any(MAX_F2DOT14 < s <= 2 for s in transformation)):
|
any(MAX_F2DOT14 < s <= 2 for s in transformation)):
|
||||||
|
@ -13,6 +13,9 @@ import sys
|
|||||||
|
|
||||||
class table_C_P_A_L_(DefaultTable.DefaultTable):
|
class table_C_P_A_L_(DefaultTable.DefaultTable):
|
||||||
|
|
||||||
|
NO_NAME_ID = 0xFFFF
|
||||||
|
DEFAULT_PALETTE_TYPE = 0
|
||||||
|
|
||||||
def __init__(self, tag=None):
|
def __init__(self, tag=None):
|
||||||
DefaultTable.DefaultTable.__init__(self, tag)
|
DefaultTable.DefaultTable.__init__(self, tag)
|
||||||
self.palettes = []
|
self.palettes = []
|
||||||
@ -45,24 +48,25 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
offsetToPaletteEntryLabelArray) = (
|
offsetToPaletteEntryLabelArray) = (
|
||||||
struct.unpack(">LLL", data[pos:pos+12]))
|
struct.unpack(">LLL", data[pos:pos+12]))
|
||||||
self.paletteTypes = self._decompileUInt32Array(
|
self.paletteTypes = self._decompileUInt32Array(
|
||||||
data, offsetToPaletteTypeArray, numPalettes)
|
data, offsetToPaletteTypeArray, numPalettes,
|
||||||
|
default=self.DEFAULT_PALETTE_TYPE)
|
||||||
self.paletteLabels = self._decompileUInt16Array(
|
self.paletteLabels = self._decompileUInt16Array(
|
||||||
data, offsetToPaletteLabelArray, numPalettes)
|
data, offsetToPaletteLabelArray, numPalettes, default=self.NO_NAME_ID)
|
||||||
self.paletteEntryLabels = self._decompileUInt16Array(
|
self.paletteEntryLabels = self._decompileUInt16Array(
|
||||||
data, offsetToPaletteEntryLabelArray,
|
data, offsetToPaletteEntryLabelArray,
|
||||||
self.numPaletteEntries)
|
self.numPaletteEntries, default=self.NO_NAME_ID)
|
||||||
|
|
||||||
def _decompileUInt16Array(self, data, offset, numElements):
|
def _decompileUInt16Array(self, data, offset, numElements, default=0):
|
||||||
if offset == 0:
|
if offset == 0:
|
||||||
return [0] * numElements
|
return [default] * numElements
|
||||||
result = array.array("H", data[offset : offset + 2 * numElements])
|
result = array.array("H", data[offset : offset + 2 * numElements])
|
||||||
if sys.byteorder != "big": result.byteswap()
|
if sys.byteorder != "big": result.byteswap()
|
||||||
assert len(result) == numElements, result
|
assert len(result) == numElements, result
|
||||||
return result.tolist()
|
return result.tolist()
|
||||||
|
|
||||||
def _decompileUInt32Array(self, data, offset, numElements):
|
def _decompileUInt32Array(self, data, offset, numElements, default=0):
|
||||||
if offset == 0:
|
if offset == 0:
|
||||||
return [0] * numElements
|
return [default] * numElements
|
||||||
result = array.array("I", data[offset : offset + 4 * numElements])
|
result = array.array("I", data[offset : offset + 4 * numElements])
|
||||||
if sys.byteorder != "big": result.byteswap()
|
if sys.byteorder != "big": result.byteswap()
|
||||||
assert len(result) == numElements, result
|
assert len(result) == numElements, result
|
||||||
@ -136,7 +140,7 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def _compilePaletteLabels(self):
|
def _compilePaletteLabels(self):
|
||||||
if self.version == 0 or not any(self.paletteLabels):
|
if self.version == 0 or all(l == self.NO_NAME_ID for l in self.paletteLabels):
|
||||||
return b''
|
return b''
|
||||||
assert len(self.paletteLabels) == len(self.palettes)
|
assert len(self.paletteLabels) == len(self.palettes)
|
||||||
result = bytesjoin([struct.pack(">H", label)
|
result = bytesjoin([struct.pack(">H", label)
|
||||||
@ -145,7 +149,7 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def _compilePaletteEntryLabels(self):
|
def _compilePaletteEntryLabels(self):
|
||||||
if self.version == 0 or not any(self.paletteEntryLabels):
|
if self.version == 0 or all(l == self.NO_NAME_ID for l in self.paletteEntryLabels):
|
||||||
return b''
|
return b''
|
||||||
assert len(self.paletteEntryLabels) == self.numPaletteEntries
|
assert len(self.paletteEntryLabels) == self.numPaletteEntries
|
||||||
result = bytesjoin([struct.pack(">H", label)
|
result = bytesjoin([struct.pack(">H", label)
|
||||||
@ -165,15 +169,15 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
writer.newline()
|
writer.newline()
|
||||||
for index, palette in enumerate(self.palettes):
|
for index, palette in enumerate(self.palettes):
|
||||||
attrs = {"index": index}
|
attrs = {"index": index}
|
||||||
paletteType = paletteTypes.get(index)
|
paletteType = paletteTypes.get(index, self.DEFAULT_PALETTE_TYPE)
|
||||||
paletteLabel = paletteLabels.get(index)
|
paletteLabel = paletteLabels.get(index, self.NO_NAME_ID)
|
||||||
if self.version > 0 and paletteLabel is not None:
|
if self.version > 0 and paletteLabel != self.NO_NAME_ID:
|
||||||
attrs["label"] = paletteLabel
|
attrs["label"] = paletteLabel
|
||||||
if self.version > 0 and paletteType is not None:
|
if self.version > 0 and paletteType != self.DEFAULT_PALETTE_TYPE:
|
||||||
attrs["type"] = paletteType
|
attrs["type"] = paletteType
|
||||||
writer.begintag("palette", **attrs)
|
writer.begintag("palette", **attrs)
|
||||||
writer.newline()
|
writer.newline()
|
||||||
if (self.version > 0 and paletteLabel and
|
if (self.version > 0 and paletteLabel != self.NO_NAME_ID and
|
||||||
ttFont and "name" in ttFont):
|
ttFont and "name" in ttFont):
|
||||||
name = ttFont["name"].getDebugName(paletteLabel)
|
name = ttFont["name"].getDebugName(paletteLabel)
|
||||||
if name is not None:
|
if name is not None:
|
||||||
@ -184,11 +188,11 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
color.toXML(writer, ttFont, cindex)
|
color.toXML(writer, ttFont, cindex)
|
||||||
writer.endtag("palette")
|
writer.endtag("palette")
|
||||||
writer.newline()
|
writer.newline()
|
||||||
if self.version > 0 and any(self.paletteEntryLabels):
|
if self.version > 0 and not all(l == self.NO_NAME_ID for l in self.paletteEntryLabels):
|
||||||
writer.begintag("paletteEntryLabels")
|
writer.begintag("paletteEntryLabels")
|
||||||
writer.newline()
|
writer.newline()
|
||||||
for index, label in enumerate(self.paletteEntryLabels):
|
for index, label in enumerate(self.paletteEntryLabels):
|
||||||
if label:
|
if label != self.NO_NAME_ID:
|
||||||
writer.simpletag("label", index=index, value=label)
|
writer.simpletag("label", index=index, value=label)
|
||||||
if (self.version > 0 and label and ttFont and "name" in ttFont):
|
if (self.version > 0 and label and ttFont and "name" in ttFont):
|
||||||
name = ttFont["name"].getDebugName(label)
|
name = ttFont["name"].getDebugName(label)
|
||||||
@ -200,8 +204,8 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
|
|
||||||
def fromXML(self, name, attrs, content, ttFont):
|
def fromXML(self, name, attrs, content, ttFont):
|
||||||
if name == "palette":
|
if name == "palette":
|
||||||
self.paletteLabels.append(int(attrs.get("label", "0")))
|
self.paletteLabels.append(int(attrs.get("label", self.NO_NAME_ID)))
|
||||||
self.paletteTypes.append(int(attrs.get("type", "0")))
|
self.paletteTypes.append(int(attrs.get("type", self.DEFAULT_PALETTE_TYPE)))
|
||||||
palette = []
|
palette = []
|
||||||
for element in content:
|
for element in content:
|
||||||
if isinstance(element, basestring):
|
if isinstance(element, basestring):
|
||||||
@ -221,13 +225,13 @@ class table_C_P_A_L_(DefaultTable.DefaultTable):
|
|||||||
nameID = safeEval(elementAttr["value"])
|
nameID = safeEval(elementAttr["value"])
|
||||||
colorLabels[labelIndex] = nameID
|
colorLabels[labelIndex] = nameID
|
||||||
self.paletteEntryLabels = [
|
self.paletteEntryLabels = [
|
||||||
colorLabels.get(i, 0)
|
colorLabels.get(i, self.NO_NAME_ID)
|
||||||
for i in range(self.numPaletteEntries)]
|
for i in range(self.numPaletteEntries)]
|
||||||
elif "value" in attrs:
|
elif "value" in attrs:
|
||||||
value = safeEval(attrs["value"])
|
value = safeEval(attrs["value"])
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
if name == "numPaletteEntries":
|
if name == "numPaletteEntries":
|
||||||
self.paletteEntryLabels = [0] * self.numPaletteEntries
|
self.paletteEntryLabels = [self.NO_NAME_ID] * self.numPaletteEntries
|
||||||
|
|
||||||
|
|
||||||
class Color(namedtuple("Color", "blue green red alpha")):
|
class Color(namedtuple("Color", "blue green red alpha")):
|
||||||
|
@ -122,6 +122,12 @@ class table__g_l_y_f(DefaultTable.DefaultTable):
|
|||||||
ttFont['loca'].set(locations)
|
ttFont['loca'].set(locations)
|
||||||
if 'maxp' in ttFont:
|
if 'maxp' in ttFont:
|
||||||
ttFont['maxp'].numGlyphs = len(self.glyphs)
|
ttFont['maxp'].numGlyphs = len(self.glyphs)
|
||||||
|
if not data:
|
||||||
|
# As a special case when all glyph in the font are empty, add a zero byte
|
||||||
|
# to the table, so that OTS doesn’t reject it, and to make the table work
|
||||||
|
# on Windows as well.
|
||||||
|
# See https://github.com/khaledhosny/ots/issues/52
|
||||||
|
data = b"\0"
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def toXML(self, writer, ttFont, splitGlyphs=False):
|
def toXML(self, writer, ttFont, splitGlyphs=False):
|
||||||
@ -1006,33 +1012,37 @@ class Glyph(object):
|
|||||||
coordinates, endPts, flags = g.getCoordinates(glyfTable)
|
coordinates, endPts, flags = g.getCoordinates(glyfTable)
|
||||||
except RecursionError:
|
except RecursionError:
|
||||||
raise ttLib.TTLibError("glyph '%s' contains a recursive component reference" % compo.glyphName)
|
raise ttLib.TTLibError("glyph '%s' contains a recursive component reference" % compo.glyphName)
|
||||||
|
coordinates = GlyphCoordinates(coordinates)
|
||||||
if hasattr(compo, "firstPt"):
|
if hasattr(compo, "firstPt"):
|
||||||
# move according to two reference points
|
# component uses two reference points: we apply the transform _before_
|
||||||
|
# computing the offset between the points
|
||||||
|
if hasattr(compo, "transform"):
|
||||||
|
coordinates.transform(compo.transform)
|
||||||
x1,y1 = allCoords[compo.firstPt]
|
x1,y1 = allCoords[compo.firstPt]
|
||||||
x2,y2 = coordinates[compo.secondPt]
|
x2,y2 = coordinates[compo.secondPt]
|
||||||
move = x1-x2, y1-y2
|
move = x1-x2, y1-y2
|
||||||
else:
|
|
||||||
move = compo.x, compo.y
|
|
||||||
|
|
||||||
coordinates = GlyphCoordinates(coordinates)
|
|
||||||
if not hasattr(compo, "transform"):
|
|
||||||
coordinates.translate(move)
|
coordinates.translate(move)
|
||||||
else:
|
else:
|
||||||
apple_way = compo.flags & SCALED_COMPONENT_OFFSET
|
# component uses XY offsets
|
||||||
ms_way = compo.flags & UNSCALED_COMPONENT_OFFSET
|
move = compo.x, compo.y
|
||||||
assert not (apple_way and ms_way)
|
if not hasattr(compo, "transform"):
|
||||||
if not (apple_way or ms_way):
|
|
||||||
scale_component_offset = SCALE_COMPONENT_OFFSET_DEFAULT # see top of this file
|
|
||||||
else:
|
|
||||||
scale_component_offset = apple_way
|
|
||||||
if scale_component_offset:
|
|
||||||
# the Apple way: first move, then scale (ie. scale the component offset)
|
|
||||||
coordinates.translate(move)
|
coordinates.translate(move)
|
||||||
coordinates.transform(compo.transform)
|
|
||||||
else:
|
else:
|
||||||
# the MS way: first scale, then move
|
apple_way = compo.flags & SCALED_COMPONENT_OFFSET
|
||||||
coordinates.transform(compo.transform)
|
ms_way = compo.flags & UNSCALED_COMPONENT_OFFSET
|
||||||
coordinates.translate(move)
|
assert not (apple_way and ms_way)
|
||||||
|
if not (apple_way or ms_way):
|
||||||
|
scale_component_offset = SCALE_COMPONENT_OFFSET_DEFAULT # see top of this file
|
||||||
|
else:
|
||||||
|
scale_component_offset = apple_way
|
||||||
|
if scale_component_offset:
|
||||||
|
# the Apple way: first move, then scale (ie. scale the component offset)
|
||||||
|
coordinates.translate(move)
|
||||||
|
coordinates.transform(compo.transform)
|
||||||
|
else:
|
||||||
|
# the MS way: first scale, then move
|
||||||
|
coordinates.transform(compo.transform)
|
||||||
|
coordinates.translate(move)
|
||||||
offset = len(allCoords)
|
offset = len(allCoords)
|
||||||
allEndPts.extend(e + offset for e in endPts)
|
allEndPts.extend(e + offset for e in endPts)
|
||||||
allCoords.extend(coordinates)
|
allCoords.extend(coordinates)
|
||||||
|
@ -41,7 +41,7 @@ class table__h_e_a_d(DefaultTable.DefaultTable):
|
|||||||
if rest:
|
if rest:
|
||||||
# this is quite illegal, but there seem to be fonts out there that do this
|
# this is quite illegal, but there seem to be fonts out there that do this
|
||||||
log.warning("extra bytes at the end of 'head' table")
|
log.warning("extra bytes at the end of 'head' table")
|
||||||
assert rest == "\0\0"
|
assert rest == b"\0\0"
|
||||||
|
|
||||||
# For timestamp fields, ignore the top four bytes. Some fonts have
|
# For timestamp fields, ignore the top four bytes. Some fonts have
|
||||||
# bogus values there. Since till 2038 those bytes only can be zero,
|
# bogus values there. Since till 2038 those bytes only can be zero,
|
||||||
|
@ -226,7 +226,11 @@ class WOFF2Writer(SFNTWriter):
|
|||||||
# See:
|
# See:
|
||||||
# https://github.com/khaledhosny/ots/issues/60
|
# https://github.com/khaledhosny/ots/issues/60
|
||||||
# https://github.com/google/woff2/issues/15
|
# https://github.com/google/woff2/issues/15
|
||||||
if isTrueType and "glyf" in self.flavorData.transformedTables:
|
if (
|
||||||
|
isTrueType
|
||||||
|
and "glyf" in self.flavorData.transformedTables
|
||||||
|
and "glyf" in self.tables
|
||||||
|
):
|
||||||
self._normaliseGlyfAndLoca(padding=4)
|
self._normaliseGlyfAndLoca(padding=4)
|
||||||
self._setHeadTransformFlag()
|
self._setHeadTransformFlag()
|
||||||
|
|
||||||
|
@ -2146,18 +2146,14 @@ def convertFontInfoValueForAttributeFromVersion2ToVersion3(attr, value):
|
|||||||
"""
|
"""
|
||||||
if attr in _ufo2To3FloatToInt:
|
if attr in _ufo2To3FloatToInt:
|
||||||
try:
|
try:
|
||||||
v = int(round(value))
|
value = round(value)
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise UFOLibError("Could not convert value for %s." % attr)
|
raise UFOLibError("Could not convert value for %s." % attr)
|
||||||
if v != value:
|
|
||||||
value = v
|
|
||||||
if attr in _ufo2To3NonNegativeInt:
|
if attr in _ufo2To3NonNegativeInt:
|
||||||
try:
|
try:
|
||||||
v = int(abs(value))
|
value = int(abs(value))
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise UFOLibError("Could not convert value for %s." % attr)
|
raise UFOLibError("Could not convert value for %s." % attr)
|
||||||
if v != value:
|
|
||||||
value = v
|
|
||||||
elif attr in _ufo2To3NonNegativeIntOrFloat:
|
elif attr in _ufo2To3NonNegativeIntOrFloat:
|
||||||
try:
|
try:
|
||||||
v = float(abs(value))
|
v = float(abs(value))
|
||||||
|
@ -39,13 +39,11 @@ import os.path
|
|||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from .errors import VarLibError, VarLibValidationError
|
||||||
|
|
||||||
log = logging.getLogger("fontTools.varLib")
|
log = logging.getLogger("fontTools.varLib")
|
||||||
|
|
||||||
|
|
||||||
class VarLibError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Creation routines
|
# Creation routines
|
||||||
#
|
#
|
||||||
@ -81,7 +79,12 @@ def _add_fvar(font, axes, instances):
|
|||||||
coordinates = instance.location
|
coordinates = instance.location
|
||||||
|
|
||||||
if "en" not in instance.localisedStyleName:
|
if "en" not in instance.localisedStyleName:
|
||||||
assert instance.styleName
|
if not instance.styleName:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Instance at location '{coordinates}' must have a default English "
|
||||||
|
"style name ('stylename' attribute on the instance element or a "
|
||||||
|
"stylename element with an 'xml:lang=\"en\"' attribute)."
|
||||||
|
)
|
||||||
localisedStyleName = dict(instance.localisedStyleName)
|
localisedStyleName = dict(instance.localisedStyleName)
|
||||||
localisedStyleName["en"] = tounicode(instance.styleName)
|
localisedStyleName["en"] = tounicode(instance.styleName)
|
||||||
else:
|
else:
|
||||||
@ -137,20 +140,32 @@ def _add_avar(font, axes):
|
|||||||
# Current avar requirements. We don't have to enforce
|
# Current avar requirements. We don't have to enforce
|
||||||
# these on the designer and can deduce some ourselves,
|
# these on the designer and can deduce some ourselves,
|
||||||
# but for now just enforce them.
|
# but for now just enforce them.
|
||||||
assert axis.minimum == min(keys)
|
if axis.minimum != min(keys):
|
||||||
assert axis.maximum == max(keys)
|
raise VarLibValidationError(
|
||||||
assert axis.default in keys
|
f"Axis '{axis.name}': there must be a mapping for the axis minimum "
|
||||||
# No duplicates
|
f"value {axis.minimum} and it must be the lowest input mapping value."
|
||||||
assert len(set(keys)) == len(keys), (
|
)
|
||||||
f"{axis.tag} axis: All axis mapping input='...' "
|
if axis.maximum != max(keys):
|
||||||
"values must be unique, but we found duplicates."
|
raise VarLibValidationError(
|
||||||
)
|
f"Axis '{axis.name}': there must be a mapping for the axis maximum "
|
||||||
assert len(set(vals)) == len(vals), (
|
f"value {axis.maximum} and it must be the highest input mapping value."
|
||||||
f"{axis.tag} axis: All axis mapping output='...' "
|
)
|
||||||
"values must be unique, but we found duplicates."
|
if axis.default not in keys:
|
||||||
)
|
raise VarLibValidationError(
|
||||||
|
f"Axis '{axis.name}': there must be a mapping for the axis default "
|
||||||
|
f"value {axis.default}."
|
||||||
|
)
|
||||||
|
# No duplicate input values (output values can be >= their preceeding value).
|
||||||
|
if len(set(keys)) != len(keys):
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Axis '{axis.name}': All axis mapping input='...' values must be "
|
||||||
|
"unique, but we found duplicates."
|
||||||
|
)
|
||||||
# Ascending values
|
# Ascending values
|
||||||
assert sorted(vals) == vals
|
if sorted(vals) != vals:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Axis '{axis.name}': mapping output values must be in ascending order."
|
||||||
|
)
|
||||||
|
|
||||||
keys_triple = (axis.minimum, axis.default, axis.maximum)
|
keys_triple = (axis.minimum, axis.default, axis.maximum)
|
||||||
vals_triple = tuple(axis.map_forward(v) for v in keys_triple)
|
vals_triple = tuple(axis.map_forward(v) for v in keys_triple)
|
||||||
@ -214,8 +229,8 @@ def _add_stat(font, axes):
|
|||||||
|
|
||||||
|
|
||||||
def _add_gvar(font, masterModel, master_ttfs, tolerance=0.5, optimize=True):
|
def _add_gvar(font, masterModel, master_ttfs, tolerance=0.5, optimize=True):
|
||||||
|
if tolerance < 0:
|
||||||
assert tolerance >= 0
|
raise ValueError("`tolerance` must be a positive number.")
|
||||||
|
|
||||||
log.info("Generating gvar")
|
log.info("Generating gvar")
|
||||||
assert "gvar" not in font
|
assert "gvar" not in font
|
||||||
@ -703,9 +718,10 @@ def load_designspace(designspace):
|
|||||||
|
|
||||||
masters = ds.sources
|
masters = ds.sources
|
||||||
if not masters:
|
if not masters:
|
||||||
raise VarLibError("no sources found in .designspace")
|
raise VarLibValidationError("Designspace must have at least one source.")
|
||||||
instances = ds.instances
|
instances = ds.instances
|
||||||
|
|
||||||
|
# TODO: Use fontTools.designspaceLib.tagForAxisName instead.
|
||||||
standard_axis_map = OrderedDict([
|
standard_axis_map = OrderedDict([
|
||||||
('weight', ('wght', {'en': u'Weight'})),
|
('weight', ('wght', {'en': u'Weight'})),
|
||||||
('width', ('wdth', {'en': u'Width'})),
|
('width', ('wdth', {'en': u'Width'})),
|
||||||
@ -715,11 +731,15 @@ def load_designspace(designspace):
|
|||||||
])
|
])
|
||||||
|
|
||||||
# Setup axes
|
# Setup axes
|
||||||
|
if not ds.axes:
|
||||||
|
raise VarLibValidationError(f"Designspace must have at least one axis.")
|
||||||
|
|
||||||
axes = OrderedDict()
|
axes = OrderedDict()
|
||||||
for axis in ds.axes:
|
for axis_index, axis in enumerate(ds.axes):
|
||||||
axis_name = axis.name
|
axis_name = axis.name
|
||||||
if not axis_name:
|
if not axis_name:
|
||||||
assert axis.tag is not None
|
if not axis.tag:
|
||||||
|
raise VarLibValidationError(f"Axis at index {axis_index} needs a tag.")
|
||||||
axis_name = axis.name = axis.tag
|
axis_name = axis.name = axis.tag
|
||||||
|
|
||||||
if axis_name in standard_axis_map:
|
if axis_name in standard_axis_map:
|
||||||
@ -728,7 +748,8 @@ def load_designspace(designspace):
|
|||||||
if not axis.labelNames:
|
if not axis.labelNames:
|
||||||
axis.labelNames.update(standard_axis_map[axis_name][1])
|
axis.labelNames.update(standard_axis_map[axis_name][1])
|
||||||
else:
|
else:
|
||||||
assert axis.tag is not None
|
if not axis.tag:
|
||||||
|
raise VarLibValidationError(f"Axis at index {axis_index} needs a tag.")
|
||||||
if not axis.labelNames:
|
if not axis.labelNames:
|
||||||
axis.labelNames["en"] = tounicode(axis_name)
|
axis.labelNames["en"] = tounicode(axis_name)
|
||||||
|
|
||||||
@ -739,14 +760,28 @@ def load_designspace(designspace):
|
|||||||
for obj in masters+instances:
|
for obj in masters+instances:
|
||||||
obj_name = obj.name or obj.styleName or ''
|
obj_name = obj.name or obj.styleName or ''
|
||||||
loc = obj.location
|
loc = obj.location
|
||||||
|
if loc is None:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Source or instance '{obj_name}' has no location."
|
||||||
|
)
|
||||||
for axis_name in loc.keys():
|
for axis_name in loc.keys():
|
||||||
assert axis_name in axes, "Location axis '%s' unknown for '%s'." % (axis_name, obj_name)
|
if axis_name not in axes:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Location axis '{axis_name}' unknown for '{obj_name}'."
|
||||||
|
)
|
||||||
for axis_name,axis in axes.items():
|
for axis_name,axis in axes.items():
|
||||||
if axis_name not in loc:
|
if axis_name not in loc:
|
||||||
loc[axis_name] = axis.default
|
# NOTE: `axis.default` is always user-space, but `obj.location` always design-space.
|
||||||
|
loc[axis_name] = axis.map_forward(axis.default)
|
||||||
else:
|
else:
|
||||||
v = axis.map_backward(loc[axis_name])
|
v = axis.map_backward(loc[axis_name])
|
||||||
assert axis.minimum <= v <= axis.maximum, "Location for axis '%s' (mapped to %s) out of range for '%s' [%s..%s]" % (axis_name, v, obj_name, axis.minimum, axis.maximum)
|
if not (axis.minimum <= v <= axis.maximum):
|
||||||
|
raise VarLibValidationError(
|
||||||
|
f"Source or instance '{obj_name}' has out-of-range location "
|
||||||
|
f"for axis '{axis_name}': is mapped to {v} but must be in "
|
||||||
|
f"mapped range [{axis.minimum}..{axis.maximum}] (NOTE: all "
|
||||||
|
"values are in user-space)."
|
||||||
|
)
|
||||||
|
|
||||||
# Normalize master locations
|
# Normalize master locations
|
||||||
|
|
||||||
@ -767,9 +802,15 @@ def load_designspace(designspace):
|
|||||||
base_idx = None
|
base_idx = None
|
||||||
for i,m in enumerate(normalized_master_locs):
|
for i,m in enumerate(normalized_master_locs):
|
||||||
if all(v == 0 for v in m.values()):
|
if all(v == 0 for v in m.values()):
|
||||||
assert base_idx is None
|
if base_idx is not None:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
"More than one base master found in Designspace."
|
||||||
|
)
|
||||||
base_idx = i
|
base_idx = i
|
||||||
assert base_idx is not None, "Base master not found; no master at default location?"
|
if base_idx is None:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
"Base master not found; no master at default location?"
|
||||||
|
)
|
||||||
log.info("Index of base master: %s", base_idx)
|
log.info("Index of base master: %s", base_idx)
|
||||||
|
|
||||||
return _DesignSpaceData(
|
return _DesignSpaceData(
|
||||||
@ -926,7 +967,7 @@ def _open_font(path, master_finder=lambda s: s):
|
|||||||
elif tp in ("TTF", "OTF", "WOFF", "WOFF2"):
|
elif tp in ("TTF", "OTF", "WOFF", "WOFF2"):
|
||||||
font = TTFont(master_path)
|
font = TTFont(master_path)
|
||||||
else:
|
else:
|
||||||
raise VarLibError("Invalid master path: %r" % master_path)
|
raise VarLibValidationError("Invalid master path: %r" % master_path)
|
||||||
return font
|
return font
|
||||||
|
|
||||||
|
|
||||||
@ -946,10 +987,10 @@ def load_masters(designspace, master_finder=lambda s: s):
|
|||||||
# If a SourceDescriptor has a layer name, demand that the compiled TTFont
|
# If a SourceDescriptor has a layer name, demand that the compiled TTFont
|
||||||
# be supplied by the caller. This spares us from modifying MasterFinder.
|
# be supplied by the caller. This spares us from modifying MasterFinder.
|
||||||
if master.layerName and master.font is None:
|
if master.layerName and master.font is None:
|
||||||
raise AttributeError(
|
raise VarLibValidationError(
|
||||||
"Designspace source '%s' specified a layer name but lacks the "
|
f"Designspace source '{master.name or '<Unknown>'}' specified a "
|
||||||
"required TTFont object in the 'font' attribute."
|
"layer name but lacks the required TTFont object in the 'font' "
|
||||||
% (master.name or "<Unknown>")
|
"attribute."
|
||||||
)
|
)
|
||||||
|
|
||||||
return designspace.loadSourceFonts(_open_font, master_finder=master_finder)
|
return designspace.loadSourceFonts(_open_font, master_finder=master_finder)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import os
|
|
||||||
from fontTools.cffLib import (
|
from fontTools.cffLib import (
|
||||||
maxStackLimit,
|
maxStackLimit,
|
||||||
TopDictIndex,
|
TopDictIndex,
|
||||||
@ -21,6 +20,13 @@ from fontTools.varLib.models import allEqual
|
|||||||
from fontTools.misc.psCharStrings import T2CharString, T2OutlineExtractor
|
from fontTools.misc.psCharStrings import T2CharString, T2OutlineExtractor
|
||||||
from fontTools.pens.t2CharStringPen import T2CharStringPen, t2c_round
|
from fontTools.pens.t2CharStringPen import T2CharStringPen, t2c_round
|
||||||
|
|
||||||
|
from .errors import VarLibCFFDictMergeError, VarLibCFFPointTypeMergeError, VarLibMergeError
|
||||||
|
|
||||||
|
|
||||||
|
# Backwards compatibility
|
||||||
|
MergeDictError = VarLibCFFDictMergeError
|
||||||
|
MergeTypeError = VarLibCFFPointTypeMergeError
|
||||||
|
|
||||||
|
|
||||||
def addCFFVarStore(varFont, varModel, varDataList, masterSupports):
|
def addCFFVarStore(varFont, varModel, varDataList, masterSupports):
|
||||||
fvarTable = varFont['fvar']
|
fvarTable = varFont['fvar']
|
||||||
@ -126,16 +132,6 @@ def convertCFFtoCFF2(varFont):
|
|||||||
del varFont['CFF ']
|
del varFont['CFF ']
|
||||||
|
|
||||||
|
|
||||||
class MergeDictError(TypeError):
|
|
||||||
def __init__(self, key, value, values):
|
|
||||||
error_msg = ["For the Private Dict key '{}', ".format(key),
|
|
||||||
"the default font value list:",
|
|
||||||
"\t{}".format(value),
|
|
||||||
"had a different number of values than a region font:"]
|
|
||||||
error_msg += ["\t{}".format(region_value) for region_value in values]
|
|
||||||
error_msg = os.linesep.join(error_msg)
|
|
||||||
|
|
||||||
|
|
||||||
def conv_to_int(num):
|
def conv_to_int(num):
|
||||||
if isinstance(num, float) and num.is_integer():
|
if isinstance(num, float) and num.is_integer():
|
||||||
return int(num)
|
return int(num)
|
||||||
@ -219,7 +215,7 @@ def merge_PrivateDicts(top_dicts, vsindex_dict, var_model, fd_map):
|
|||||||
try:
|
try:
|
||||||
values = zip(*values)
|
values = zip(*values)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise MergeDictError(key, value, values)
|
raise VarLibCFFDictMergeError(key, value, values)
|
||||||
"""
|
"""
|
||||||
Row 0 contains the first value from each master.
|
Row 0 contains the first value from each master.
|
||||||
Convert each row from absolute values to relative
|
Convert each row from absolute values to relative
|
||||||
@ -426,21 +422,6 @@ def merge_charstrings(glyphOrder, num_masters, top_dicts, masterModel):
|
|||||||
return cvData
|
return cvData
|
||||||
|
|
||||||
|
|
||||||
class MergeTypeError(TypeError):
|
|
||||||
def __init__(self, point_type, pt_index, m_index, default_type, glyphName):
|
|
||||||
self.error_msg = [
|
|
||||||
"In glyph '{gname}' "
|
|
||||||
"'{point_type}' at point index {pt_index} in master "
|
|
||||||
"index {m_index} differs from the default font point "
|
|
||||||
"type '{default_type}'"
|
|
||||||
"".format(
|
|
||||||
gname=glyphName,
|
|
||||||
point_type=point_type, pt_index=pt_index,
|
|
||||||
m_index=m_index, default_type=default_type)
|
|
||||||
][0]
|
|
||||||
super(MergeTypeError, self).__init__(self.error_msg)
|
|
||||||
|
|
||||||
|
|
||||||
def makeRoundNumberFunc(tolerance):
|
def makeRoundNumberFunc(tolerance):
|
||||||
if tolerance < 0:
|
if tolerance < 0:
|
||||||
raise ValueError("Rounding tolerance must be positive")
|
raise ValueError("Rounding tolerance must be positive")
|
||||||
@ -547,7 +528,7 @@ class CFF2CharStringMergePen(T2CharStringPen):
|
|||||||
else:
|
else:
|
||||||
cmd = self._commands[self.pt_index]
|
cmd = self._commands[self.pt_index]
|
||||||
if cmd[0] != point_type:
|
if cmd[0] != point_type:
|
||||||
raise MergeTypeError(
|
raise VarLibCFFPointTypeMergeError(
|
||||||
point_type,
|
point_type,
|
||||||
self.pt_index, len(cmd[1]),
|
self.pt_index, len(cmd[1]),
|
||||||
cmd[0], self.glyphName)
|
cmd[0], self.glyphName)
|
||||||
@ -560,7 +541,7 @@ class CFF2CharStringMergePen(T2CharStringPen):
|
|||||||
else:
|
else:
|
||||||
cmd = self._commands[self.pt_index]
|
cmd = self._commands[self.pt_index]
|
||||||
if cmd[0] != hint_type:
|
if cmd[0] != hint_type:
|
||||||
raise MergeTypeError(hint_type, self.pt_index, len(cmd[1]),
|
raise VarLibCFFPointTypeMergeError(hint_type, self.pt_index, len(cmd[1]),
|
||||||
cmd[0], self.glyphName)
|
cmd[0], self.glyphName)
|
||||||
cmd[1].append(args)
|
cmd[1].append(args)
|
||||||
self.pt_index += 1
|
self.pt_index += 1
|
||||||
@ -576,7 +557,7 @@ class CFF2CharStringMergePen(T2CharStringPen):
|
|||||||
else:
|
else:
|
||||||
cmd = self._commands[self.pt_index]
|
cmd = self._commands[self.pt_index]
|
||||||
if cmd[0] != hint_type:
|
if cmd[0] != hint_type:
|
||||||
raise MergeTypeError(hint_type, self.pt_index, len(cmd[1]),
|
raise VarLibCFFPointTypeMergeError(hint_type, self.pt_index, len(cmd[1]),
|
||||||
cmd[0], self.glyphName)
|
cmd[0], self.glyphName)
|
||||||
self.pt_index += 1
|
self.pt_index += 1
|
||||||
cmd = self._commands[self.pt_index]
|
cmd = self._commands[self.pt_index]
|
||||||
@ -646,8 +627,8 @@ class CFF2CharStringMergePen(T2CharStringPen):
|
|||||||
# second has only args.
|
# second has only args.
|
||||||
if lastOp in ['hintmask', 'cntrmask']:
|
if lastOp in ['hintmask', 'cntrmask']:
|
||||||
coord = list(cmd[1])
|
coord = list(cmd[1])
|
||||||
assert allEqual(coord), (
|
if not allEqual(coord):
|
||||||
"hintmask values cannot differ between source fonts.")
|
raise VarLibMergeError("Hintmask values cannot differ between source fonts.")
|
||||||
cmd[1] = [coord[0][0]]
|
cmd[1] = [coord[0][0]]
|
||||||
else:
|
else:
|
||||||
coords = cmd[1]
|
coords = cmd[1]
|
||||||
|
39
Lib/fontTools/varLib/errors.py
Normal file
39
Lib/fontTools/varLib/errors.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
class VarLibError(Exception):
|
||||||
|
"""Base exception for the varLib module."""
|
||||||
|
|
||||||
|
|
||||||
|
class VarLibValidationError(VarLibError):
|
||||||
|
"""Raised when input data is invalid from varLib's point of view."""
|
||||||
|
|
||||||
|
|
||||||
|
class VarLibMergeError(VarLibError):
|
||||||
|
"""Raised when input data cannot be merged into a variable font."""
|
||||||
|
|
||||||
|
|
||||||
|
class VarLibCFFDictMergeError(VarLibMergeError):
|
||||||
|
"""Raised when a CFF PrivateDict cannot be merged."""
|
||||||
|
|
||||||
|
def __init__(self, key, value, values):
|
||||||
|
error_msg = (
|
||||||
|
f"For the Private Dict key '{key}', the default font value list:"
|
||||||
|
f"\n\t{value}\nhad a different number of values than a region font:"
|
||||||
|
)
|
||||||
|
for region_value in values:
|
||||||
|
error_msg += f"\n\t{region_value}"
|
||||||
|
self.args = (error_msg,)
|
||||||
|
|
||||||
|
|
||||||
|
class VarLibCFFPointTypeMergeError(VarLibMergeError):
|
||||||
|
"""Raised when a CFF glyph cannot be merged."""
|
||||||
|
|
||||||
|
def __init__(self, point_type, pt_index, m_index, default_type, glyph_name):
|
||||||
|
error_msg = (
|
||||||
|
f"Glyph '{glyph_name}': '{point_type}' at point index {pt_index} in "
|
||||||
|
f"master index {m_index} differs from the default font point type "
|
||||||
|
f"'{default_type}'"
|
||||||
|
)
|
||||||
|
self.args = (error_msg,)
|
||||||
|
|
||||||
|
|
||||||
|
class VariationModelError(VarLibError):
|
||||||
|
"""Raised when a variation model is faulty."""
|
@ -10,6 +10,8 @@ from fontTools.ttLib.tables import otTables as ot
|
|||||||
from fontTools.otlLib.builder import buildLookup, buildSingleSubstSubtable
|
from fontTools.otlLib.builder import buildLookup, buildSingleSubstSubtable
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from .errors import VarLibValidationError
|
||||||
|
|
||||||
|
|
||||||
def addFeatureVariations(font, conditionalSubstitutions, featureTag='rvrn'):
|
def addFeatureVariations(font, conditionalSubstitutions, featureTag='rvrn'):
|
||||||
"""Add conditional substitutions to a Variable Font.
|
"""Add conditional substitutions to a Variable Font.
|
||||||
@ -312,7 +314,10 @@ def addFeatureVariationsRaw(font, conditionalSubstitutions, featureTag='rvrn'):
|
|||||||
for conditionSet, substitutions in conditionalSubstitutions:
|
for conditionSet, substitutions in conditionalSubstitutions:
|
||||||
conditionTable = []
|
conditionTable = []
|
||||||
for axisTag, (minValue, maxValue) in sorted(conditionSet.items()):
|
for axisTag, (minValue, maxValue) in sorted(conditionSet.items()):
|
||||||
assert minValue < maxValue
|
if minValue > maxValue:
|
||||||
|
raise VarLibValidationError(
|
||||||
|
"A condition set has a minimum value above the maximum value."
|
||||||
|
)
|
||||||
ct = buildConditionTable(axisIndices[axisTag], minValue, maxValue)
|
ct = buildConditionTable(axisIndices[axisTag], minValue, maxValue)
|
||||||
conditionTable.append(ct)
|
conditionTable.append(ct)
|
||||||
|
|
||||||
|
@ -14,6 +14,8 @@ from fontTools.varLib.varStore import VarStoreInstancer
|
|||||||
from functools import reduce
|
from functools import reduce
|
||||||
from fontTools.otlLib.builder import buildSinglePos
|
from fontTools.otlLib.builder import buildSinglePos
|
||||||
|
|
||||||
|
from .errors import VarLibMergeError
|
||||||
|
|
||||||
|
|
||||||
class Merger(object):
|
class Merger(object):
|
||||||
|
|
||||||
@ -66,8 +68,8 @@ class Merger(object):
|
|||||||
if hasattr(item, "ensureDecompiled"):
|
if hasattr(item, "ensureDecompiled"):
|
||||||
item.ensureDecompiled()
|
item.ensureDecompiled()
|
||||||
keys = sorted(vars(out).keys())
|
keys = sorted(vars(out).keys())
|
||||||
assert all(keys == sorted(vars(v).keys()) for v in lst), \
|
if not all(keys == sorted(vars(v).keys()) for v in lst):
|
||||||
(keys, [sorted(vars(v).keys()) for v in lst])
|
raise VarLibMergeError((keys, [sorted(vars(v).keys()) for v in lst]))
|
||||||
mergers = self.mergersFor(out)
|
mergers = self.mergersFor(out)
|
||||||
defaultMerger = mergers.get('*', self.__class__.mergeThings)
|
defaultMerger = mergers.get('*', self.__class__.mergeThings)
|
||||||
try:
|
try:
|
||||||
@ -82,7 +84,8 @@ class Merger(object):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
def mergeLists(self, out, lst):
|
def mergeLists(self, out, lst):
|
||||||
assert allEqualTo(out, lst, len), (len(out), [len(v) for v in lst])
|
if not allEqualTo(out, lst, len):
|
||||||
|
raise VarLibMergeError((len(out), [len(v) for v in lst]))
|
||||||
for i,(value,values) in enumerate(zip(out, zip(*lst))):
|
for i,(value,values) in enumerate(zip(out, zip(*lst))):
|
||||||
try:
|
try:
|
||||||
self.mergeThings(value, values)
|
self.mergeThings(value, values)
|
||||||
@ -92,7 +95,8 @@ class Merger(object):
|
|||||||
|
|
||||||
def mergeThings(self, out, lst):
|
def mergeThings(self, out, lst):
|
||||||
try:
|
try:
|
||||||
assert allEqualTo(out, lst, type), (out, lst)
|
if not allEqualTo(out, lst, type):
|
||||||
|
raise VarLibMergeError((out, lst))
|
||||||
mergerFunc = self.mergersFor(out).get(None, None)
|
mergerFunc = self.mergersFor(out).get(None, None)
|
||||||
if mergerFunc is not None:
|
if mergerFunc is not None:
|
||||||
mergerFunc(self, out, lst)
|
mergerFunc(self, out, lst)
|
||||||
@ -101,7 +105,8 @@ class Merger(object):
|
|||||||
elif isinstance(out, list):
|
elif isinstance(out, list):
|
||||||
self.mergeLists(out, lst)
|
self.mergeLists(out, lst)
|
||||||
else:
|
else:
|
||||||
assert allEqualTo(out, lst), (out, lst)
|
if not allEqualTo(out, lst):
|
||||||
|
raise VarLibMergeError((out, lst))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
e.args = e.args + (type(out).__name__,)
|
e.args = e.args + (type(out).__name__,)
|
||||||
raise
|
raise
|
||||||
@ -122,7 +127,8 @@ class AligningMerger(Merger):
|
|||||||
@AligningMerger.merger(ot.GDEF, "GlyphClassDef")
|
@AligningMerger.merger(ot.GDEF, "GlyphClassDef")
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
if self is None:
|
if self is None:
|
||||||
assert allNone(lst), (lst)
|
if not allNone(lst):
|
||||||
|
raise VarLibMergeError(lst)
|
||||||
return
|
return
|
||||||
|
|
||||||
lst = [l.classDefs for l in lst]
|
lst = [l.classDefs for l in lst]
|
||||||
@ -134,7 +140,8 @@ def merge(merger, self, lst):
|
|||||||
allKeys.update(*[l.keys() for l in lst])
|
allKeys.update(*[l.keys() for l in lst])
|
||||||
for k in allKeys:
|
for k in allKeys:
|
||||||
allValues = nonNone(l.get(k) for l in lst)
|
allValues = nonNone(l.get(k) for l in lst)
|
||||||
assert allEqual(allValues), allValues
|
if not allEqual(allValues):
|
||||||
|
raise VarLibMergeError(allValues)
|
||||||
if not allValues:
|
if not allValues:
|
||||||
self[k] = None
|
self[k] = None
|
||||||
else:
|
else:
|
||||||
@ -170,7 +177,8 @@ def _merge_GlyphOrders(font, lst, values_lst=None, default=None):
|
|||||||
sortKey = font.getReverseGlyphMap().__getitem__
|
sortKey = font.getReverseGlyphMap().__getitem__
|
||||||
order = sorted(combined, key=sortKey)
|
order = sorted(combined, key=sortKey)
|
||||||
# Make sure all input glyphsets were in proper order
|
# Make sure all input glyphsets were in proper order
|
||||||
assert all(sorted(vs, key=sortKey) == vs for vs in lst), "glyph orders are not consistent across masters"
|
if not all(sorted(vs, key=sortKey) == vs for vs in lst):
|
||||||
|
raise VarLibMergeError("Glyph order inconsistent across masters.")
|
||||||
del combined
|
del combined
|
||||||
|
|
||||||
paddedValues = None
|
paddedValues = None
|
||||||
@ -197,7 +205,10 @@ def _Lookup_SinglePos_get_effective_value(subtables, glyph):
|
|||||||
elif self.Format == 2:
|
elif self.Format == 2:
|
||||||
return self.Value[self.Coverage.glyphs.index(glyph)]
|
return self.Value[self.Coverage.glyphs.index(glyph)]
|
||||||
else:
|
else:
|
||||||
assert 0
|
raise VarLibMergeError(
|
||||||
|
"Cannot retrieve effective value for SinglePos lookup, unsupported "
|
||||||
|
f"format {self.Format}."
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _Lookup_PairPos_get_effective_value_pair(subtables, firstGlyph, secondGlyph):
|
def _Lookup_PairPos_get_effective_value_pair(subtables, firstGlyph, secondGlyph):
|
||||||
@ -219,13 +230,17 @@ def _Lookup_PairPos_get_effective_value_pair(subtables, firstGlyph, secondGlyph)
|
|||||||
klass2 = self.ClassDef2.classDefs.get(secondGlyph, 0)
|
klass2 = self.ClassDef2.classDefs.get(secondGlyph, 0)
|
||||||
return self.Class1Record[klass1].Class2Record[klass2]
|
return self.Class1Record[klass1].Class2Record[klass2]
|
||||||
else:
|
else:
|
||||||
assert 0
|
raise VarLibMergeError(
|
||||||
|
"Cannot retrieve effective value pair for PairPos lookup, unsupported "
|
||||||
|
f"format {self.Format}."
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@AligningMerger.merger(ot.SinglePos)
|
@AligningMerger.merger(ot.SinglePos)
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
self.ValueFormat = valueFormat = reduce(int.__or__, [l.ValueFormat for l in lst], 0)
|
self.ValueFormat = valueFormat = reduce(int.__or__, [l.ValueFormat for l in lst], 0)
|
||||||
assert len(lst) == 1 or (valueFormat & ~0xF == 0), valueFormat
|
if not (len(lst) == 1 or (valueFormat & ~0xF == 0)):
|
||||||
|
raise VarLibMergeError(f"SinglePos format {valueFormat} is unsupported.")
|
||||||
|
|
||||||
# If all have same coverage table and all are format 1,
|
# If all have same coverage table and all are format 1,
|
||||||
coverageGlyphs = self.Coverage.glyphs
|
coverageGlyphs = self.Coverage.glyphs
|
||||||
@ -511,7 +526,9 @@ def merge(merger, self, lst):
|
|||||||
elif self.Format == 2:
|
elif self.Format == 2:
|
||||||
_PairPosFormat2_merge(self, lst, merger)
|
_PairPosFormat2_merge(self, lst, merger)
|
||||||
else:
|
else:
|
||||||
assert False
|
raise VarLibMergeError(
|
||||||
|
f"Cannot merge PairPos lookup, unsupported format {self.Format}."
|
||||||
|
)
|
||||||
|
|
||||||
del merger.valueFormat1, merger.valueFormat2
|
del merger.valueFormat1, merger.valueFormat2
|
||||||
|
|
||||||
@ -576,7 +593,8 @@ def _MarkBasePosFormat1_merge(self, lst, merger, Mark='Mark', Base='Base'):
|
|||||||
# failures in that case will probably signify mistakes in the
|
# failures in that case will probably signify mistakes in the
|
||||||
# input masters.
|
# input masters.
|
||||||
|
|
||||||
assert allEqual(allClasses), allClasses
|
if not allEqual(allClasses):
|
||||||
|
raise VarLibMergeError(allClasses)
|
||||||
if not allClasses:
|
if not allClasses:
|
||||||
rec = None
|
rec = None
|
||||||
else:
|
else:
|
||||||
@ -625,19 +643,31 @@ def _MarkBasePosFormat1_merge(self, lst, merger, Mark='Mark', Base='Base'):
|
|||||||
|
|
||||||
@AligningMerger.merger(ot.MarkBasePos)
|
@AligningMerger.merger(ot.MarkBasePos)
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
assert allEqualTo(self.Format, (l.Format for l in lst))
|
if not allEqualTo(self.Format, (l.Format for l in lst)):
|
||||||
|
raise VarLibMergeError(
|
||||||
|
f"MarkBasePos formats inconsistent across masters, "
|
||||||
|
f"expected {self.Format} but got {[l.Format for l in lst]}."
|
||||||
|
)
|
||||||
if self.Format == 1:
|
if self.Format == 1:
|
||||||
_MarkBasePosFormat1_merge(self, lst, merger)
|
_MarkBasePosFormat1_merge(self, lst, merger)
|
||||||
else:
|
else:
|
||||||
assert False
|
raise VarLibMergeError(
|
||||||
|
f"Cannot merge MarkBasePos lookup, unsupported format {self.Format}."
|
||||||
|
)
|
||||||
|
|
||||||
@AligningMerger.merger(ot.MarkMarkPos)
|
@AligningMerger.merger(ot.MarkMarkPos)
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
assert allEqualTo(self.Format, (l.Format for l in lst))
|
if not allEqualTo(self.Format, (l.Format for l in lst)):
|
||||||
|
raise VarLibMergeError(
|
||||||
|
f"MarkMarkPos formats inconsistent across masters, "
|
||||||
|
f"expected {self.Format} but got {[l.Format for l in lst]}."
|
||||||
|
)
|
||||||
if self.Format == 1:
|
if self.Format == 1:
|
||||||
_MarkBasePosFormat1_merge(self, lst, merger, 'Mark1', 'Mark2')
|
_MarkBasePosFormat1_merge(self, lst, merger, 'Mark1', 'Mark2')
|
||||||
else:
|
else:
|
||||||
assert False
|
raise VarLibMergeError(
|
||||||
|
f"Cannot merge MarkMarkPos lookup, unsupported format {self.Format}."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _PairSet_flatten(lst, font):
|
def _PairSet_flatten(lst, font):
|
||||||
@ -766,8 +796,16 @@ def merge(merger, self, lst):
|
|||||||
if not sts:
|
if not sts:
|
||||||
continue
|
continue
|
||||||
if sts[0].__class__.__name__.startswith('Extension'):
|
if sts[0].__class__.__name__.startswith('Extension'):
|
||||||
assert allEqual([st.__class__ for st in sts])
|
if not allEqual([st.__class__ for st in sts]):
|
||||||
assert allEqual([st.ExtensionLookupType for st in sts])
|
raise VarLibMergeError(
|
||||||
|
"Use of extensions inconsistent between masters: "
|
||||||
|
f"{[st.__class__.__name__ for st in sts]}."
|
||||||
|
)
|
||||||
|
if not allEqual([st.ExtensionLookupType for st in sts]):
|
||||||
|
raise VarLibMergeError(
|
||||||
|
"Extension lookup type differs between masters: "
|
||||||
|
f"{[st.ExtensionLookupType for st in sts]}."
|
||||||
|
)
|
||||||
l.LookupType = sts[0].ExtensionLookupType
|
l.LookupType = sts[0].ExtensionLookupType
|
||||||
new_sts = [st.ExtSubTable for st in sts]
|
new_sts = [st.ExtSubTable for st in sts]
|
||||||
del sts[:]
|
del sts[:]
|
||||||
@ -995,7 +1033,8 @@ class VariationMerger(AligningMerger):
|
|||||||
masterModel = None
|
masterModel = None
|
||||||
if None in lst:
|
if None in lst:
|
||||||
if allNone(lst):
|
if allNone(lst):
|
||||||
assert out is None, (out, lst)
|
if out is not None:
|
||||||
|
raise VarLibMergeError((out, lst))
|
||||||
return
|
return
|
||||||
masterModel = self.model
|
masterModel = self.model
|
||||||
model, lst = masterModel.getSubModel(lst)
|
model, lst = masterModel.getSubModel(lst)
|
||||||
@ -1015,7 +1054,8 @@ def buildVarDevTable(store_builder, master_values):
|
|||||||
|
|
||||||
@VariationMerger.merger(ot.CaretValue)
|
@VariationMerger.merger(ot.CaretValue)
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
assert self.Format == 1
|
if self.Format != 1:
|
||||||
|
raise VarLibMergeError(f"CaretValue format {self.Format} unsupported.")
|
||||||
self.Coordinate, DeviceTable = buildVarDevTable(merger.store_builder, [a.Coordinate for a in lst])
|
self.Coordinate, DeviceTable = buildVarDevTable(merger.store_builder, [a.Coordinate for a in lst])
|
||||||
if DeviceTable:
|
if DeviceTable:
|
||||||
self.Format = 3
|
self.Format = 3
|
||||||
@ -1023,7 +1063,8 @@ def merge(merger, self, lst):
|
|||||||
|
|
||||||
@VariationMerger.merger(ot.Anchor)
|
@VariationMerger.merger(ot.Anchor)
|
||||||
def merge(merger, self, lst):
|
def merge(merger, self, lst):
|
||||||
assert self.Format == 1
|
if self.Format != 1:
|
||||||
|
raise VarLibMergeError(f"Anchor format {self.Format} unsupported.")
|
||||||
self.XCoordinate, XDeviceTable = buildVarDevTable(merger.store_builder, [a.XCoordinate for a in lst])
|
self.XCoordinate, XDeviceTable = buildVarDevTable(merger.store_builder, [a.XCoordinate for a in lst])
|
||||||
self.YCoordinate, YDeviceTable = buildVarDevTable(merger.store_builder, [a.YCoordinate for a in lst])
|
self.YCoordinate, YDeviceTable = buildVarDevTable(merger.store_builder, [a.YCoordinate for a in lst])
|
||||||
if XDeviceTable or YDeviceTable:
|
if XDeviceTable or YDeviceTable:
|
||||||
|
@ -5,6 +5,8 @@ __all__ = ['nonNone', 'allNone', 'allEqual', 'allEqualTo', 'subList',
|
|||||||
'supportScalar',
|
'supportScalar',
|
||||||
'VariationModel']
|
'VariationModel']
|
||||||
|
|
||||||
|
from .errors import VariationModelError
|
||||||
|
|
||||||
|
|
||||||
def nonNone(lst):
|
def nonNone(lst):
|
||||||
return [l for l in lst if l is not None]
|
return [l for l in lst if l is not None]
|
||||||
@ -43,7 +45,11 @@ def normalizeValue(v, triple):
|
|||||||
0.5
|
0.5
|
||||||
"""
|
"""
|
||||||
lower, default, upper = triple
|
lower, default, upper = triple
|
||||||
assert lower <= default <= upper, "invalid axis values: %3.3f, %3.3f %3.3f"%(lower, default, upper)
|
if not (lower <= default <= upper):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid axis values, must be minimum, default, maximum: "
|
||||||
|
f"{lower:3.3f}, {default:3.3f}, {upper:3.3f}"
|
||||||
|
)
|
||||||
v = max(min(v, upper), lower)
|
v = max(min(v, upper), lower)
|
||||||
if v == default:
|
if v == default:
|
||||||
v = 0.
|
v = 0.
|
||||||
@ -192,7 +198,7 @@ class VariationModel(object):
|
|||||||
|
|
||||||
def __init__(self, locations, axisOrder=None):
|
def __init__(self, locations, axisOrder=None):
|
||||||
if len(set(tuple(sorted(l.items())) for l in locations)) != len(locations):
|
if len(set(tuple(sorted(l.items())) for l in locations)) != len(locations):
|
||||||
raise ValueError("locations must be unique")
|
raise VariationModelError("Locations must be unique.")
|
||||||
|
|
||||||
self.origLocations = locations
|
self.origLocations = locations
|
||||||
self.axisOrder = axisOrder if axisOrder is not None else []
|
self.axisOrder = axisOrder if axisOrder is not None else []
|
||||||
@ -220,7 +226,8 @@ class VariationModel(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getMasterLocationsSortKeyFunc(locations, axisOrder=[]):
|
def getMasterLocationsSortKeyFunc(locations, axisOrder=[]):
|
||||||
assert {} in locations, "Base master not found."
|
if {} not in locations:
|
||||||
|
raise VariationModelError("Base master not found.")
|
||||||
axisPoints = {}
|
axisPoints = {}
|
||||||
for loc in locations:
|
for loc in locations:
|
||||||
if len(loc) != 1:
|
if len(loc) != 1:
|
||||||
|
33
NEWS.rst
33
NEWS.rst
@ -1,3 +1,36 @@
|
|||||||
|
4.4.1 (released 2020-02-26)
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
- [woff2] Skip normalizing ``glyf`` and ``loca`` tables if these are missing from
|
||||||
|
a font (e.g. in NotoColorEmoji using ``CBDT/CBLC`` tables).
|
||||||
|
- [timeTools] Use non-localized date parsing in ``timestampFromString``, to fix
|
||||||
|
error when non-English ``LC_TIME`` locale is set (#1838, #1839).
|
||||||
|
- [fontBuilder] Make sure the CFF table generated by fontBuilder can be used by varLib
|
||||||
|
without having to compile and decompile the table first. This was breaking in
|
||||||
|
converting the CFF table to CFF2 due to some unset attributes (#1836).
|
||||||
|
|
||||||
|
4.4.0 (released 2020-02-18)
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
- [colorLib] Added ``fontTools.colorLib.builder`` module, initially with ``buildCOLR``
|
||||||
|
and ``buildCPAL`` public functions. More color font formats will follow (#1827).
|
||||||
|
- [fontBuilder] Added ``setupCOLR`` and ``setupCPAL`` methods (#1826).
|
||||||
|
- [ttGlyphPen] Quantize ``GlyphComponent.transform`` floats to ``F2Dot14`` to fix
|
||||||
|
round-trip issue when computing bounding boxes of transformed components (#1830).
|
||||||
|
- [glyf] If a component uses reference points (``firstPt`` and ``secondPt``) for
|
||||||
|
alignment (instead of X and Y offsets), compute the effective translation offset
|
||||||
|
*after* having applied any transform (#1831).
|
||||||
|
- [glyf] When all glyphs have zero contours, compile ``glyf`` table data as a single
|
||||||
|
null byte in order to pass validation by OTS and Windows (#1829).
|
||||||
|
- [feaLib] Parsing feature code now ensures that referenced glyph names are part of
|
||||||
|
the known glyph set, unless a glyph set was not provided.
|
||||||
|
- [varLib] When filling in the default axis value for a missing location of a source or
|
||||||
|
instance, correctly map the value forward.
|
||||||
|
- [varLib] The avar table can now contain mapping output values that are greater than
|
||||||
|
OR EQUAL to the preceeding value, as the avar specification allows this.
|
||||||
|
- [varLib] The errors of the module are now ordered hierarchically below VarLibError.
|
||||||
|
See #1821.
|
||||||
|
|
||||||
4.3.0 (released 2020-02-03)
|
4.3.0 (released 2020-02-03)
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
|
0
Tests/colorLib/__init__.py
Normal file
0
Tests/colorLib/__init__.py
Normal file
187
Tests/colorLib/builder_test.py
Normal file
187
Tests/colorLib/builder_test.py
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
from fontTools.ttLib import newTable
|
||||||
|
from fontTools.colorLib import builder
|
||||||
|
from fontTools.colorLib.errors import ColorLibError
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildCOLR_v0():
|
||||||
|
color_layer_lists = {
|
||||||
|
"a": [("a.color0", 0), ("a.color1", 1)],
|
||||||
|
"b": [("b.color1", 1), ("b.color0", 0)],
|
||||||
|
}
|
||||||
|
|
||||||
|
colr = builder.buildCOLR(color_layer_lists)
|
||||||
|
|
||||||
|
assert colr.tableTag == "COLR"
|
||||||
|
assert colr.version == 0
|
||||||
|
assert colr.ColorLayers["a"][0].name == "a.color0"
|
||||||
|
assert colr.ColorLayers["a"][0].colorID == 0
|
||||||
|
assert colr.ColorLayers["a"][1].name == "a.color1"
|
||||||
|
assert colr.ColorLayers["a"][1].colorID == 1
|
||||||
|
assert colr.ColorLayers["b"][0].name == "b.color1"
|
||||||
|
assert colr.ColorLayers["b"][0].colorID == 1
|
||||||
|
assert colr.ColorLayers["b"][1].name == "b.color0"
|
||||||
|
assert colr.ColorLayers["b"][1].colorID == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildCPAL_v0():
|
||||||
|
palettes = [
|
||||||
|
[(0.68, 0.20, 0.32, 1.0), (0.45, 0.68, 0.21, 1.0)],
|
||||||
|
[(0.68, 0.20, 0.32, 0.6), (0.45, 0.68, 0.21, 0.6)],
|
||||||
|
[(0.68, 0.20, 0.32, 0.3), (0.45, 0.68, 0.21, 0.3)],
|
||||||
|
]
|
||||||
|
|
||||||
|
cpal = builder.buildCPAL(palettes)
|
||||||
|
|
||||||
|
assert cpal.tableTag == "CPAL"
|
||||||
|
assert cpal.version == 0
|
||||||
|
assert cpal.numPaletteEntries == 2
|
||||||
|
|
||||||
|
assert len(cpal.palettes) == 3
|
||||||
|
assert [tuple(c) for c in cpal.palettes[0]] == [
|
||||||
|
(82, 51, 173, 255),
|
||||||
|
(54, 173, 115, 255),
|
||||||
|
]
|
||||||
|
assert [tuple(c) for c in cpal.palettes[1]] == [
|
||||||
|
(82, 51, 173, 153),
|
||||||
|
(54, 173, 115, 153),
|
||||||
|
]
|
||||||
|
assert [tuple(c) for c in cpal.palettes[2]] == [
|
||||||
|
(82, 51, 173, 76),
|
||||||
|
(54, 173, 115, 76),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildCPAL_palettes_different_lengths():
|
||||||
|
with pytest.raises(ColorLibError, match="have different lengths"):
|
||||||
|
builder.buildCPAL([[(1, 1, 1, 1)], [(0, 0, 0, 1), (0.5, 0.5, 0.5, 1)]])
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildPaletteLabels():
|
||||||
|
name_table = newTable("name")
|
||||||
|
name_table.names = []
|
||||||
|
|
||||||
|
name_ids = builder.buildPaletteLabels(
|
||||||
|
[None, "hi", {"en": "hello", "de": "hallo"}], name_table
|
||||||
|
)
|
||||||
|
|
||||||
|
assert name_ids == [0xFFFF, 256, 257]
|
||||||
|
|
||||||
|
assert len(name_table.names) == 3
|
||||||
|
assert str(name_table.names[0]) == "hi"
|
||||||
|
assert name_table.names[0].nameID == 256
|
||||||
|
|
||||||
|
assert str(name_table.names[1]) == "hallo"
|
||||||
|
assert name_table.names[1].nameID == 257
|
||||||
|
|
||||||
|
assert str(name_table.names[2]) == "hello"
|
||||||
|
assert name_table.names[2].nameID == 257
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_CPAL_v1_types_no_labels():
|
||||||
|
palettes = [
|
||||||
|
[(0.1, 0.2, 0.3, 1.0), (0.4, 0.5, 0.6, 1.0)],
|
||||||
|
[(0.1, 0.2, 0.3, 0.6), (0.4, 0.5, 0.6, 0.6)],
|
||||||
|
[(0.1, 0.2, 0.3, 0.3), (0.4, 0.5, 0.6, 0.3)],
|
||||||
|
]
|
||||||
|
paletteTypes = [
|
||||||
|
builder.ColorPaletteType.USABLE_WITH_LIGHT_BACKGROUND,
|
||||||
|
builder.ColorPaletteType.USABLE_WITH_DARK_BACKGROUND,
|
||||||
|
builder.ColorPaletteType.USABLE_WITH_LIGHT_BACKGROUND
|
||||||
|
| builder.ColorPaletteType.USABLE_WITH_DARK_BACKGROUND,
|
||||||
|
]
|
||||||
|
|
||||||
|
cpal = builder.buildCPAL(palettes, paletteTypes=paletteTypes)
|
||||||
|
|
||||||
|
assert cpal.tableTag == "CPAL"
|
||||||
|
assert cpal.version == 1
|
||||||
|
assert cpal.numPaletteEntries == 2
|
||||||
|
assert len(cpal.palettes) == 3
|
||||||
|
|
||||||
|
assert cpal.paletteTypes == paletteTypes
|
||||||
|
assert cpal.paletteLabels == [cpal.NO_NAME_ID] * len(palettes)
|
||||||
|
assert cpal.paletteEntryLabels == [cpal.NO_NAME_ID] * cpal.numPaletteEntries
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_CPAL_v1_labels():
|
||||||
|
palettes = [
|
||||||
|
[(0.1, 0.2, 0.3, 1.0), (0.4, 0.5, 0.6, 1.0)],
|
||||||
|
[(0.1, 0.2, 0.3, 0.6), (0.4, 0.5, 0.6, 0.6)],
|
||||||
|
[(0.1, 0.2, 0.3, 0.3), (0.4, 0.5, 0.6, 0.3)],
|
||||||
|
]
|
||||||
|
paletteLabels = ["First", {"en": "Second", "it": "Seconda"}, None]
|
||||||
|
paletteEntryLabels = ["Foo", "Bar"]
|
||||||
|
|
||||||
|
with pytest.raises(TypeError, match="nameTable is required"):
|
||||||
|
builder.buildCPAL(palettes, paletteLabels=paletteLabels)
|
||||||
|
with pytest.raises(TypeError, match="nameTable is required"):
|
||||||
|
builder.buildCPAL(palettes, paletteEntryLabels=paletteEntryLabels)
|
||||||
|
|
||||||
|
name_table = newTable("name")
|
||||||
|
name_table.names = []
|
||||||
|
|
||||||
|
cpal = builder.buildCPAL(
|
||||||
|
palettes,
|
||||||
|
paletteLabels=paletteLabels,
|
||||||
|
paletteEntryLabels=paletteEntryLabels,
|
||||||
|
nameTable=name_table,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert cpal.tableTag == "CPAL"
|
||||||
|
assert cpal.version == 1
|
||||||
|
assert cpal.numPaletteEntries == 2
|
||||||
|
assert len(cpal.palettes) == 3
|
||||||
|
|
||||||
|
assert cpal.paletteTypes == [cpal.DEFAULT_PALETTE_TYPE] * len(palettes)
|
||||||
|
assert cpal.paletteLabels == [256, 257, cpal.NO_NAME_ID]
|
||||||
|
assert cpal.paletteEntryLabels == [258, 259]
|
||||||
|
|
||||||
|
assert name_table.getDebugName(256) == "First"
|
||||||
|
assert name_table.getDebugName(257) == "Second"
|
||||||
|
assert name_table.getDebugName(258) == "Foo"
|
||||||
|
assert name_table.getDebugName(259) == "Bar"
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_ColorPaletteType():
|
||||||
|
with pytest.raises(ValueError, match="not a valid ColorPaletteType"):
|
||||||
|
builder.ColorPaletteType(-1)
|
||||||
|
with pytest.raises(ValueError, match="not a valid ColorPaletteType"):
|
||||||
|
builder.ColorPaletteType(4)
|
||||||
|
with pytest.raises(ValueError, match="not a valid ColorPaletteType"):
|
||||||
|
builder.ColorPaletteType("abc")
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildCPAL_v1_invalid_args_length():
|
||||||
|
with pytest.raises(ColorLibError, match="Expected 2 paletteTypes, got 1"):
|
||||||
|
builder.buildCPAL([[(0, 0, 0, 0)], [(1, 1, 1, 1)]], paletteTypes=[1])
|
||||||
|
|
||||||
|
with pytest.raises(ColorLibError, match="Expected 2 paletteLabels, got 1"):
|
||||||
|
builder.buildCPAL(
|
||||||
|
[[(0, 0, 0, 0)], [(1, 1, 1, 1)]],
|
||||||
|
paletteLabels=["foo"],
|
||||||
|
nameTable=newTable("name"),
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ColorLibError, match="Expected 1 paletteEntryLabels, got 0"):
|
||||||
|
cpal = builder.buildCPAL(
|
||||||
|
[[(0, 0, 0, 0)], [(1, 1, 1, 1)]],
|
||||||
|
paletteEntryLabels=[],
|
||||||
|
nameTable=newTable("name"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildCPAL_invalid_color():
|
||||||
|
with pytest.raises(
|
||||||
|
ColorLibError,
|
||||||
|
match=r"In palette\[0\]\[1\]: expected \(R, G, B, A\) tuple, got \(1, 1, 1\)",
|
||||||
|
):
|
||||||
|
builder.buildCPAL([[(1, 1, 1, 1), (1, 1, 1)]])
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
ColorLibError,
|
||||||
|
match=(
|
||||||
|
r"palette\[1\]\[0\] has invalid out-of-range "
|
||||||
|
r"\[0..1\] color: \(1, 1, -1, 2\)"
|
||||||
|
),
|
||||||
|
):
|
||||||
|
builder.buildCPAL([[(0, 0, 0, 0)], [(1, 1, -1, 2)]])
|
@ -41,7 +41,7 @@ def makeTTFont():
|
|||||||
a_n_d T_h T_h.swash germandbls ydieresis yacute breve
|
a_n_d T_h T_h.swash germandbls ydieresis yacute breve
|
||||||
grave acute dieresis macron circumflex cedilla umlaut ogonek caron
|
grave acute dieresis macron circumflex cedilla umlaut ogonek caron
|
||||||
damma hamza sukun kasratan lam_meem_jeem noon.final noon.initial
|
damma hamza sukun kasratan lam_meem_jeem noon.final noon.initial
|
||||||
by feature lookup sub table
|
by feature lookup sub table uni0327 uni0328 e.fina
|
||||||
""".split()
|
""".split()
|
||||||
font = TTFont()
|
font = TTFont()
|
||||||
font.setGlyphOrder(glyphs)
|
font.setGlyphOrder(glyphs)
|
||||||
|
@ -178,7 +178,9 @@ class IncludingLexerTest(unittest.TestCase):
|
|||||||
def test_include_missing_file(self):
|
def test_include_missing_file(self):
|
||||||
lexer = IncludingLexer(self.getpath("include/includemissingfile.fea"))
|
lexer = IncludingLexer(self.getpath("include/includemissingfile.fea"))
|
||||||
self.assertRaisesRegex(IncludedFeaNotFound,
|
self.assertRaisesRegex(IncludedFeaNotFound,
|
||||||
"includemissingfile.fea:1:8: missingfile.fea",
|
"includemissingfile.fea:1:8: The following feature file "
|
||||||
|
"should be included but cannot be found: "
|
||||||
|
"missingfile.fea",
|
||||||
lambda: list(lexer))
|
lambda: list(lexer))
|
||||||
|
|
||||||
def test_featurefilepath_None(self):
|
def test_featurefilepath_None(self):
|
||||||
@ -223,7 +225,7 @@ class IncludingLexerTest(unittest.TestCase):
|
|||||||
# an in-memory stream, so it will use the current working
|
# an in-memory stream, so it will use the current working
|
||||||
# directory to resolve relative include statements
|
# directory to resolve relative include statements
|
||||||
lexer = IncludingLexer(UnicodeIO("include(included.fea);"))
|
lexer = IncludingLexer(UnicodeIO("include(included.fea);"))
|
||||||
files = set(loc[0] for _, _, loc in lexer)
|
files = set(os.path.realpath(loc[0]) for _, _, loc in lexer)
|
||||||
expected = os.path.realpath(included.name)
|
expected = os.path.realpath(included.name)
|
||||||
self.assertIn(expected, files)
|
self.assertIn(expected, files)
|
||||||
finally:
|
finally:
|
||||||
|
@ -39,6 +39,14 @@ GLYPHNAMES = ("""
|
|||||||
n.sc o.sc p.sc q.sc r.sc s.sc t.sc u.sc v.sc w.sc x.sc y.sc z.sc
|
n.sc o.sc p.sc q.sc r.sc s.sc t.sc u.sc v.sc w.sc x.sc y.sc z.sc
|
||||||
a.swash b.swash x.swash y.swash z.swash
|
a.swash b.swash x.swash y.swash z.swash
|
||||||
foobar foo.09 foo.1234 foo.9876
|
foobar foo.09 foo.1234 foo.9876
|
||||||
|
one two five six acute grave dieresis umlaut cedilla ogonek macron
|
||||||
|
a_f_f_i o_f_f_i f_i f_f_i one.fitted one.oldstyle a.1 a.2 a.3 c_t
|
||||||
|
PRE SUF FIX BACK TRACK LOOK AHEAD ampersand ampersand.1 ampersand.2
|
||||||
|
cid00001 cid00002 cid00003 cid00004 cid00005 cid00006 cid00007
|
||||||
|
cid12345 cid78987 cid00999 cid01000 cid01001 cid00998 cid00995
|
||||||
|
cid00111 cid00222
|
||||||
|
comma endash emdash figuredash damma hamza
|
||||||
|
c_d d.alt n.end s.end f_f
|
||||||
""").split() + ["foo.%d" % i for i in range(1, 200)]
|
""").split() + ["foo.%d" % i for i in range(1, 200)]
|
||||||
|
|
||||||
|
|
||||||
@ -260,6 +268,12 @@ class ParserTest(unittest.TestCase):
|
|||||||
FeatureLibError, "Font revision numbers must be positive",
|
FeatureLibError, "Font revision numbers must be positive",
|
||||||
self.parse, "table head {FontRevision -17.2;} head;")
|
self.parse, "table head {FontRevision -17.2;} head;")
|
||||||
|
|
||||||
|
def test_strict_glyph_name_check(self):
|
||||||
|
self.parse("@bad = [a b ccc];", glyphNames=("a", "b", "ccc"))
|
||||||
|
|
||||||
|
with self.assertRaisesRegex(FeatureLibError, "missing from the glyph set: ccc"):
|
||||||
|
self.parse("@bad = [a b ccc];", glyphNames=("a", "b"))
|
||||||
|
|
||||||
def test_glyphclass(self):
|
def test_glyphclass(self):
|
||||||
[gc] = self.parse("@dash = [endash emdash figuredash];").statements
|
[gc] = self.parse("@dash = [endash emdash figuredash];").statements
|
||||||
self.assertEqual(gc.name, "dash")
|
self.assertEqual(gc.name, "dash")
|
||||||
|
@ -255,6 +255,19 @@ def test_build_cff2(tmpdir):
|
|||||||
_verifyOutput(outPath)
|
_verifyOutput(outPath)
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_cff_to_cff2(tmpdir):
|
||||||
|
fb, _, _ = _setupFontBuilder(False, 1000)
|
||||||
|
|
||||||
|
pen = T2CharStringPen(600, None)
|
||||||
|
drawTestGlyph(pen)
|
||||||
|
charString = pen.getCharString()
|
||||||
|
charStrings = {".notdef": charString, "A": charString, "a": charString, ".null": charString}
|
||||||
|
fb.setupCFF("TestFont", {}, charStrings, {})
|
||||||
|
|
||||||
|
from fontTools.varLib.cff import convertCFFtoCFF2
|
||||||
|
convertCFFtoCFF2(fb.font)
|
||||||
|
|
||||||
|
|
||||||
def test_setupNameTable_no_mac():
|
def test_setupNameTable_no_mac():
|
||||||
fb, _, nameStrings = _setupFontBuilder(True)
|
fb, _, nameStrings = _setupFontBuilder(True)
|
||||||
fb.setupNameTable(nameStrings, mac=False)
|
fb.setupNameTable(nameStrings, mac=False)
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
from fontTools.misc.py23 import *
|
from fontTools.misc.py23 import *
|
||||||
from fontTools.misc.timeTools import asctime, timestampNow, epoch_diff
|
from fontTools.misc.timeTools import asctime, timestampNow, timestampToString, timestampFromString, epoch_diff
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import locale
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@ -21,3 +22,17 @@ def test_source_date_epoch():
|
|||||||
|
|
||||||
del os.environ["SOURCE_DATE_EPOCH"]
|
del os.environ["SOURCE_DATE_EPOCH"]
|
||||||
assert timestampNow() + epoch_diff != 150687315
|
assert timestampNow() + epoch_diff != 150687315
|
||||||
|
|
||||||
|
|
||||||
|
# test for issue #1838
|
||||||
|
def test_date_parsing_with_locale():
|
||||||
|
l = locale.getlocale(locale.LC_TIME)
|
||||||
|
try:
|
||||||
|
locale.setlocale(locale.LC_TIME, 'de_DE.utf8')
|
||||||
|
except locale.Error:
|
||||||
|
pytest.skip("Locale de_DE not available")
|
||||||
|
|
||||||
|
try:
|
||||||
|
assert timestampFromString(timestampToString(timestampNow()))
|
||||||
|
finally:
|
||||||
|
locale.setlocale(locale.LC_TIME, l)
|
||||||
|
@ -264,6 +264,33 @@ class TTGlyphPenTest(TestCase):
|
|||||||
compositeGlyph.recalcBounds(glyphSet)
|
compositeGlyph.recalcBounds(glyphSet)
|
||||||
self.assertGlyphBoundsEqual(compositeGlyph, (-86, 0, 282, 1))
|
self.assertGlyphBoundsEqual(compositeGlyph, (-86, 0, 282, 1))
|
||||||
|
|
||||||
|
def test_scaled_component_bounds(self):
|
||||||
|
glyphSet = {}
|
||||||
|
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.moveTo((-231, 939))
|
||||||
|
pen.lineTo((-55, 939))
|
||||||
|
pen.lineTo((-55, 745))
|
||||||
|
pen.lineTo((-231, 745))
|
||||||
|
pen.closePath()
|
||||||
|
glyphSet["gravecomb"] = gravecomb = pen.glyph()
|
||||||
|
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.moveTo((-278, 939))
|
||||||
|
pen.lineTo((8, 939))
|
||||||
|
pen.lineTo((8, 745))
|
||||||
|
pen.lineTo((-278, 745))
|
||||||
|
pen.closePath()
|
||||||
|
glyphSet["circumflexcomb"] = circumflexcomb = pen.glyph()
|
||||||
|
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("circumflexcomb", (1, 0, 0, 1, 0, 0))
|
||||||
|
pen.addComponent("gravecomb", (0.9, 0, 0, 0.9, 198, 180))
|
||||||
|
glyphSet["uni0302_uni0300"] = uni0302_uni0300 = pen.glyph()
|
||||||
|
|
||||||
|
uni0302_uni0300.recalcBounds(glyphSet)
|
||||||
|
self.assertGlyphBoundsEqual(uni0302_uni0300, (-278, 745, 148, 1025))
|
||||||
|
|
||||||
|
|
||||||
class _TestGlyph(object):
|
class _TestGlyph(object):
|
||||||
def __init__(self, glyph):
|
def __init__(self, glyph):
|
||||||
|
@ -12,6 +12,8 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
|
import pathlib
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
class SubsetTest(unittest.TestCase):
|
class SubsetTest(unittest.TestCase):
|
||||||
@ -835,5 +837,40 @@ def test_subset_single_pos_format():
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ttf_path(tmp_path):
|
||||||
|
# $(dirname $0)/../ttLib/data
|
||||||
|
ttLib_data = pathlib.Path(__file__).parent.parent / "ttLib" / "data"
|
||||||
|
font = TTFont()
|
||||||
|
font.importXML(ttLib_data / "TestTTF-Regular.ttx")
|
||||||
|
font_path = tmp_path / "TestTTF-Regular.ttf"
|
||||||
|
font.save(font_path)
|
||||||
|
return font_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_subset_empty_glyf(tmp_path, ttf_path):
|
||||||
|
subset_path = tmp_path / (ttf_path.name + ".subset")
|
||||||
|
# only keep empty .notdef and space glyph, resulting in an empty glyf table
|
||||||
|
subset.main(
|
||||||
|
[
|
||||||
|
str(ttf_path),
|
||||||
|
"--no-notdef-outline",
|
||||||
|
"--glyph-names",
|
||||||
|
f"--output-file={subset_path}",
|
||||||
|
"--glyphs=.notdef space",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
subset_font = TTFont(subset_path)
|
||||||
|
|
||||||
|
assert subset_font.getGlyphOrder() == [".notdef", "space"]
|
||||||
|
assert subset_font.reader['glyf'] == b"\x00"
|
||||||
|
|
||||||
|
glyf = subset_font["glyf"]
|
||||||
|
assert all(glyf[g].numberOfContours == 0 for g in subset_font.getGlyphOrder())
|
||||||
|
|
||||||
|
loca = subset_font["loca"]
|
||||||
|
assert all(loc == 0 for loc in loca)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(unittest.main())
|
sys.exit(unittest.main())
|
||||||
|
@ -66,9 +66,6 @@ class CPALTest(unittest.TestCase):
|
|||||||
self.assertEqual(cpal.numPaletteEntries, 2)
|
self.assertEqual(cpal.numPaletteEntries, 2)
|
||||||
self.assertEqual(repr(cpal.palettes),
|
self.assertEqual(repr(cpal.palettes),
|
||||||
'[[#000000FF, #66CCFFFF], [#000000FF, #800000FF]]')
|
'[[#000000FF, #66CCFFFF], [#000000FF, #800000FF]]')
|
||||||
self.assertEqual(cpal.paletteLabels, [0, 0])
|
|
||||||
self.assertEqual(cpal.paletteTypes, [0, 0])
|
|
||||||
self.assertEqual(cpal.paletteEntryLabels, [0, 0])
|
|
||||||
|
|
||||||
def test_decompile_v0_sharingColors(self):
|
def test_decompile_v0_sharingColors(self):
|
||||||
cpal = newTable('CPAL')
|
cpal = newTable('CPAL')
|
||||||
@ -80,9 +77,6 @@ class CPALTest(unittest.TestCase):
|
|||||||
'[#223344FF, #99887711, #55555555]',
|
'[#223344FF, #99887711, #55555555]',
|
||||||
'[#223344FF, #99887711, #FFFFFFFF]',
|
'[#223344FF, #99887711, #FFFFFFFF]',
|
||||||
'[#223344FF, #99887711, #55555555]'])
|
'[#223344FF, #99887711, #55555555]'])
|
||||||
self.assertEqual(cpal.paletteLabels, [0, 0, 0, 0])
|
|
||||||
self.assertEqual(cpal.paletteTypes, [0, 0, 0, 0])
|
|
||||||
self.assertEqual(cpal.paletteEntryLabels, [0, 0, 0])
|
|
||||||
|
|
||||||
def test_decompile_v1_noLabelsNoTypes(self):
|
def test_decompile_v1_noLabelsNoTypes(self):
|
||||||
cpal = newTable('CPAL')
|
cpal = newTable('CPAL')
|
||||||
@ -92,9 +86,10 @@ class CPALTest(unittest.TestCase):
|
|||||||
self.assertEqual([repr(p) for p in cpal.palettes], [
|
self.assertEqual([repr(p) for p in cpal.palettes], [
|
||||||
'[#CAFECAFE, #22110033, #66554477]', # RGBA
|
'[#CAFECAFE, #22110033, #66554477]', # RGBA
|
||||||
'[#59413127, #42424242, #13330037]'])
|
'[#59413127, #42424242, #13330037]'])
|
||||||
self.assertEqual(cpal.paletteLabels, [0, 0])
|
self.assertEqual(cpal.paletteLabels, [cpal.NO_NAME_ID] * len(cpal.palettes))
|
||||||
self.assertEqual(cpal.paletteTypes, [0, 0])
|
self.assertEqual(cpal.paletteTypes, [0, 0])
|
||||||
self.assertEqual(cpal.paletteEntryLabels, [0, 0, 0])
|
self.assertEqual(cpal.paletteEntryLabels,
|
||||||
|
[cpal.NO_NAME_ID] * cpal.numPaletteEntries)
|
||||||
|
|
||||||
def test_decompile_v1(self):
|
def test_decompile_v1(self):
|
||||||
cpal = newTable('CPAL')
|
cpal = newTable('CPAL')
|
||||||
@ -194,9 +189,6 @@ class CPALTest(unittest.TestCase):
|
|||||||
self.assertEqual(cpal.version, 0)
|
self.assertEqual(cpal.version, 0)
|
||||||
self.assertEqual(cpal.numPaletteEntries, 2)
|
self.assertEqual(cpal.numPaletteEntries, 2)
|
||||||
self.assertEqual(repr(cpal.palettes), '[[#12345678, #FEDCBA98]]')
|
self.assertEqual(repr(cpal.palettes), '[[#12345678, #FEDCBA98]]')
|
||||||
self.assertEqual(cpal.paletteLabels, [0])
|
|
||||||
self.assertEqual(cpal.paletteTypes, [0])
|
|
||||||
self.assertEqual(cpal.paletteEntryLabels, [0, 0])
|
|
||||||
|
|
||||||
def test_fromXML_v1(self):
|
def test_fromXML_v1(self):
|
||||||
cpal = newTable('CPAL')
|
cpal = newTable('CPAL')
|
||||||
@ -218,7 +210,8 @@ class CPALTest(unittest.TestCase):
|
|||||||
'[[#12345678, #FEDCBA98, #CAFECAFE]]')
|
'[[#12345678, #FEDCBA98, #CAFECAFE]]')
|
||||||
self.assertEqual(cpal.paletteLabels, [259])
|
self.assertEqual(cpal.paletteLabels, [259])
|
||||||
self.assertEqual(cpal.paletteTypes, [2])
|
self.assertEqual(cpal.paletteTypes, [2])
|
||||||
self.assertEqual(cpal.paletteEntryLabels, [0, 262, 0])
|
self.assertEqual(cpal.paletteEntryLabels,
|
||||||
|
[cpal.NO_NAME_ID, 262, cpal.NO_NAME_ID])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -6,9 +6,12 @@ from fontTools.pens.recordingPen import RecordingPen, RecordingPointPen
|
|||||||
from fontTools.pens.pointPen import PointToSegmentPen
|
from fontTools.pens.pointPen import PointToSegmentPen
|
||||||
from fontTools.ttLib import TTFont, newTable, TTLibError
|
from fontTools.ttLib import TTFont, newTable, TTLibError
|
||||||
from fontTools.ttLib.tables._g_l_y_f import (
|
from fontTools.ttLib.tables._g_l_y_f import (
|
||||||
|
Glyph,
|
||||||
GlyphCoordinates,
|
GlyphCoordinates,
|
||||||
GlyphComponent,
|
GlyphComponent,
|
||||||
ARGS_ARE_XY_VALUES,
|
ARGS_ARE_XY_VALUES,
|
||||||
|
SCALED_COMPONENT_OFFSET,
|
||||||
|
UNSCALED_COMPONENT_OFFSET,
|
||||||
WE_HAVE_A_SCALE,
|
WE_HAVE_A_SCALE,
|
||||||
WE_HAVE_A_TWO_BY_TWO,
|
WE_HAVE_A_TWO_BY_TWO,
|
||||||
WE_HAVE_AN_X_AND_Y_SCALE,
|
WE_HAVE_AN_X_AND_Y_SCALE,
|
||||||
@ -190,7 +193,7 @@ def strip_ttLibVersion(string):
|
|||||||
return re.sub(' ttLibVersion=".*"', '', string)
|
return re.sub(' ttLibVersion=".*"', '', string)
|
||||||
|
|
||||||
|
|
||||||
class glyfTableTest(unittest.TestCase):
|
class GlyfTableTest(unittest.TestCase):
|
||||||
|
|
||||||
def __init__(self, methodName):
|
def __init__(self, methodName):
|
||||||
unittest.TestCase.__init__(self, methodName)
|
unittest.TestCase.__init__(self, methodName)
|
||||||
@ -338,6 +341,136 @@ class glyfTableTest(unittest.TestCase):
|
|||||||
glyfTable["glyph00003"].drawPoints(PointToSegmentPen(pen2), glyfTable)
|
glyfTable["glyph00003"].drawPoints(PointToSegmentPen(pen2), glyfTable)
|
||||||
self.assertEqual(pen1.value, pen2.value)
|
self.assertEqual(pen1.value, pen2.value)
|
||||||
|
|
||||||
|
def test_compile_empty_table(self):
|
||||||
|
font = TTFont(sfntVersion="\x00\x01\x00\x00")
|
||||||
|
font.importXML(GLYF_TTX)
|
||||||
|
glyfTable = font['glyf']
|
||||||
|
# set all glyphs to zero contours
|
||||||
|
glyfTable.glyphs = {glyphName: Glyph() for glyphName in font.getGlyphOrder()}
|
||||||
|
glyfData = glyfTable.compile(font)
|
||||||
|
self.assertEqual(glyfData, b"\x00")
|
||||||
|
self.assertEqual(list(font["loca"]), [0] * (font["maxp"].numGlyphs+1))
|
||||||
|
|
||||||
|
def test_decompile_empty_table(self):
|
||||||
|
font = TTFont()
|
||||||
|
glyphNames = [".notdef", "space"]
|
||||||
|
font.setGlyphOrder(glyphNames)
|
||||||
|
font["loca"] = newTable("loca")
|
||||||
|
font["loca"].locations = [0] * (len(glyphNames) + 1)
|
||||||
|
font["glyf"] = newTable("glyf")
|
||||||
|
font["glyf"].decompile(b"\x00", font)
|
||||||
|
self.assertEqual(len(font["glyf"]), 2)
|
||||||
|
self.assertEqual(font["glyf"][".notdef"].numberOfContours, 0)
|
||||||
|
self.assertEqual(font["glyf"]["space"].numberOfContours, 0)
|
||||||
|
|
||||||
|
|
||||||
|
class GlyphTest:
|
||||||
|
|
||||||
|
def test_getCoordinates(self):
|
||||||
|
glyphSet = {}
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.moveTo((0, 0))
|
||||||
|
pen.lineTo((100, 0))
|
||||||
|
pen.lineTo((100, 100))
|
||||||
|
pen.lineTo((0, 100))
|
||||||
|
pen.closePath()
|
||||||
|
# simple contour glyph
|
||||||
|
glyphSet["a"] = a = pen.glyph()
|
||||||
|
|
||||||
|
assert a.getCoordinates(glyphSet) == (
|
||||||
|
GlyphCoordinates([(0, 0), (100, 0), (100, 100), (0, 100)]),
|
||||||
|
[3],
|
||||||
|
array.array("B", [1, 1, 1, 1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# composite glyph with only XY offset
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("a", (1, 0, 0, 1, 10, 20))
|
||||||
|
glyphSet["b"] = b = pen.glyph()
|
||||||
|
|
||||||
|
assert b.getCoordinates(glyphSet) == (
|
||||||
|
GlyphCoordinates([(10, 20), (110, 20), (110, 120), (10, 120)]),
|
||||||
|
[3],
|
||||||
|
array.array("B", [1, 1, 1, 1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# composite glyph with a scale (and referencing another composite glyph)
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("b", (0.5, 0, 0, 0.5, 0, 0))
|
||||||
|
glyphSet["c"] = c = pen.glyph()
|
||||||
|
|
||||||
|
assert c.getCoordinates(glyphSet) == (
|
||||||
|
GlyphCoordinates([(5, 10), (55, 10), (55, 60), (5, 60)]),
|
||||||
|
[3],
|
||||||
|
array.array("B", [1, 1, 1, 1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# composite glyph with unscaled offset (MS-style)
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("a", (0.5, 0, 0, 0.5, 10, 20))
|
||||||
|
glyphSet["d"] = d = pen.glyph()
|
||||||
|
d.components[0].flags |= UNSCALED_COMPONENT_OFFSET
|
||||||
|
|
||||||
|
assert d.getCoordinates(glyphSet) == (
|
||||||
|
GlyphCoordinates([(10, 20), (60, 20), (60, 70), (10, 70)]),
|
||||||
|
[3],
|
||||||
|
array.array("B", [1, 1, 1, 1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# composite glyph with a scaled offset (Apple-style)
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("a", (0.5, 0, 0, 0.5, 10, 20))
|
||||||
|
glyphSet["e"] = e = pen.glyph()
|
||||||
|
e.components[0].flags |= SCALED_COMPONENT_OFFSET
|
||||||
|
|
||||||
|
assert e.getCoordinates(glyphSet) == (
|
||||||
|
GlyphCoordinates([(5, 10), (55, 10), (55, 60), (5, 60)]),
|
||||||
|
[3],
|
||||||
|
array.array("B", [1, 1, 1, 1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# composite glyph where the 2nd and 3rd components use anchor points
|
||||||
|
pen = TTGlyphPen(glyphSet)
|
||||||
|
pen.addComponent("a", (1, 0, 0, 1, 0, 0))
|
||||||
|
glyphSet["f"] = f = pen.glyph()
|
||||||
|
|
||||||
|
comp1 = GlyphComponent()
|
||||||
|
comp1.glyphName = "a"
|
||||||
|
# aling the new component's pt 0 to pt 2 of contour points added so far
|
||||||
|
comp1.firstPt = 2
|
||||||
|
comp1.secondPt = 0
|
||||||
|
comp1.flags = 0
|
||||||
|
f.components.append(comp1)
|
||||||
|
|
||||||
|
comp2 = GlyphComponent()
|
||||||
|
comp2.glyphName = "a"
|
||||||
|
# aling the new component's pt 0 to pt 6 of contour points added so far
|
||||||
|
comp2.firstPt = 6
|
||||||
|
comp2.secondPt = 0
|
||||||
|
comp2.transform = [[0.707107, 0.707107], [-0.707107, 0.707107]] # rotate 45 deg
|
||||||
|
comp2.flags = WE_HAVE_A_TWO_BY_TWO
|
||||||
|
f.components.append(comp2)
|
||||||
|
|
||||||
|
coords, end_pts, flags = f.getCoordinates(glyphSet)
|
||||||
|
assert end_pts == [3, 7, 11]
|
||||||
|
assert flags == array.array("B", [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
|
||||||
|
assert list(sum(coords, ())) == pytest.approx(
|
||||||
|
[
|
||||||
|
0, 0,
|
||||||
|
100, 0,
|
||||||
|
100, 100,
|
||||||
|
0, 100,
|
||||||
|
100, 100,
|
||||||
|
200, 100,
|
||||||
|
200, 200,
|
||||||
|
100, 200,
|
||||||
|
200, 200,
|
||||||
|
270.7107, 270.7107,
|
||||||
|
200.0, 341.4214,
|
||||||
|
129.2893, 270.7107,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GlyphComponentTest:
|
class GlyphComponentTest:
|
||||||
|
|
||||||
@ -456,6 +589,29 @@ class GlyphComponentTest:
|
|||||||
):
|
):
|
||||||
assert value == pytest.approx(expected)
|
assert value == pytest.approx(expected)
|
||||||
|
|
||||||
|
def test_toXML_reference_points(self):
|
||||||
|
comp = GlyphComponent()
|
||||||
|
comp.glyphName = "a"
|
||||||
|
comp.flags = 0
|
||||||
|
comp.firstPt = 1
|
||||||
|
comp.secondPt = 2
|
||||||
|
|
||||||
|
assert getXML(comp.toXML) == [
|
||||||
|
'<component glyphName="a" firstPt="1" secondPt="2" flags="0x0"/>'
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_fromXML_reference_points(self):
|
||||||
|
comp = GlyphComponent()
|
||||||
|
for name, attrs, content in parseXML(
|
||||||
|
['<component glyphName="a" firstPt="1" secondPt="2" flags="0x0"/>']
|
||||||
|
):
|
||||||
|
comp.fromXML(name, attrs, content, ttFont=None)
|
||||||
|
|
||||||
|
assert comp.glyphName == "a"
|
||||||
|
assert comp.flags == 0
|
||||||
|
assert (comp.firstPt, comp.secondPt) == (1, 2)
|
||||||
|
assert not hasattr(comp, "transform")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import sys
|
import sys
|
||||||
|
@ -1199,6 +1199,24 @@ class WOFF2RoundtripTest(object):
|
|||||||
assert tmp.getvalue() == tmp2.getvalue()
|
assert tmp.getvalue() == tmp2.getvalue()
|
||||||
assert ttFont2.reader.flavorData.transformedTables == {"hmtx"}
|
assert ttFont2.reader.flavorData.transformedTables == {"hmtx"}
|
||||||
|
|
||||||
|
def test_roundtrip_no_glyf_and_loca_tables(self):
|
||||||
|
ttx = os.path.join(
|
||||||
|
os.path.dirname(current_dir), "subset", "data", "google_color.ttx"
|
||||||
|
)
|
||||||
|
ttFont = ttLib.TTFont()
|
||||||
|
ttFont.importXML(ttx)
|
||||||
|
|
||||||
|
assert "glyf" not in ttFont
|
||||||
|
assert "loca" not in ttFont
|
||||||
|
|
||||||
|
ttFont.flavor = "woff2"
|
||||||
|
tmp = BytesIO()
|
||||||
|
ttFont.save(tmp)
|
||||||
|
|
||||||
|
tmp2, ttFont2 = self.roundtrip(tmp)
|
||||||
|
assert tmp.getvalue() == tmp2.getvalue()
|
||||||
|
assert ttFont.flavor == "woff2"
|
||||||
|
|
||||||
|
|
||||||
class MainTest(object):
|
class MainTest(object):
|
||||||
|
|
||||||
|
26
Tests/varLib/data/VarLibLocationTest.designspace
Normal file
26
Tests/varLib/data/VarLibLocationTest.designspace
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<designspace format="4.0">
|
||||||
|
<axes>
|
||||||
|
<axis default="100" maximum="900" minimum="100" name="weight" tag="wght">
|
||||||
|
<map input="500" output="105"/>
|
||||||
|
<map input="300" output="57"/>
|
||||||
|
<map input="900" output="158"/>
|
||||||
|
<map input="100" output="0"/>
|
||||||
|
</axis>
|
||||||
|
<axis default="50" maximum="100" minimum="0" name="width" tag="wdth" />
|
||||||
|
</axes>
|
||||||
|
<sources>
|
||||||
|
<source filename="A.ufo">
|
||||||
|
<location>
|
||||||
|
<dimension name="weight" xvalue="0" />
|
||||||
|
<dimension name="width" xvalue="50" />
|
||||||
|
</location>
|
||||||
|
</source>
|
||||||
|
</sources>
|
||||||
|
<instances>
|
||||||
|
<instance filename="C.ufo" familyname="C" stylename="CCC">
|
||||||
|
<location>
|
||||||
|
</location>
|
||||||
|
</instance>
|
||||||
|
</instances>
|
||||||
|
</designspace>
|
@ -1,6 +1,6 @@
|
|||||||
from fontTools.misc.py23 import *
|
from fontTools.misc.py23 import *
|
||||||
from fontTools.varLib.models import (
|
from fontTools.varLib.models import (
|
||||||
normalizeLocation, supportScalar, VariationModel)
|
normalizeLocation, supportScalar, VariationModel, VariationModelError)
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@ -145,7 +145,7 @@ class VariationModelTest(object):
|
|||||||
assert model.deltaWeights == deltaWeights
|
assert model.deltaWeights == deltaWeights
|
||||||
|
|
||||||
def test_init_duplicate_locations(self):
|
def test_init_duplicate_locations(self):
|
||||||
with pytest.raises(ValueError, match="locations must be unique"):
|
with pytest.raises(VariationModelError, match="Locations must be unique."):
|
||||||
VariationModel(
|
VariationModel(
|
||||||
[
|
[
|
||||||
{"foo": 0.0, "bar": 0.0},
|
{"foo": 0.0, "bar": 0.0},
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from fontTools.misc.py23 import *
|
from fontTools.misc.py23 import *
|
||||||
from fontTools.ttLib import TTFont, newTable
|
from fontTools.ttLib import TTFont, newTable
|
||||||
from fontTools.varLib import build
|
from fontTools.varLib import build, load_designspace
|
||||||
|
from fontTools.varLib.errors import VarLibValidationError
|
||||||
from fontTools.varLib.mutator import instantiateVariableFont
|
from fontTools.varLib.mutator import instantiateVariableFont
|
||||||
from fontTools.varLib import main as varLib_main, load_masters
|
from fontTools.varLib import main as varLib_main, load_masters
|
||||||
from fontTools.varLib import set_default_weight_width_slant
|
from fontTools.varLib import set_default_weight_width_slant
|
||||||
@ -728,6 +729,13 @@ class BuildTest(unittest.TestCase):
|
|||||||
("B", "D"): 40,
|
("B", "D"): 40,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def test_designspace_fill_in_location(self):
|
||||||
|
ds_path = self.get_test_input("VarLibLocationTest.designspace")
|
||||||
|
ds = DesignSpaceDocument.fromfile(ds_path)
|
||||||
|
ds_loaded = load_designspace(ds)
|
||||||
|
|
||||||
|
assert ds_loaded.instances[0].location == {"weight": 0, "width": 50}
|
||||||
|
|
||||||
|
|
||||||
def test_load_masters_layerName_without_required_font():
|
def test_load_masters_layerName_without_required_font():
|
||||||
ds = DesignSpaceDocument()
|
ds = DesignSpaceDocument()
|
||||||
@ -737,7 +745,7 @@ def test_load_masters_layerName_without_required_font():
|
|||||||
ds.addSource(s)
|
ds.addSource(s)
|
||||||
|
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
AttributeError,
|
VarLibValidationError,
|
||||||
match="specified a layer name but lacks the required TTFont object",
|
match="specified a layer name but lacks the required TTFont object",
|
||||||
):
|
):
|
||||||
load_masters(ds)
|
load_masters(ds)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 4.3.1.dev0
|
current_version = 4.4.2.dev0
|
||||||
commit = True
|
commit = True
|
||||||
tag = False
|
tag = False
|
||||||
tag_name = {new_version}
|
tag_name = {new_version}
|
||||||
|
2
setup.py
2
setup.py
@ -345,7 +345,7 @@ def find_data_files(manpath="share/man"):
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="fonttools",
|
name="fonttools",
|
||||||
version="4.3.1.dev0",
|
version="4.4.2.dev0",
|
||||||
description="Tools to manipulate font files",
|
description="Tools to manipulate font files",
|
||||||
author="Just van Rossum",
|
author="Just van Rossum",
|
||||||
author_email="just@letterror.com",
|
author_email="just@letterror.com",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user