Merge remote-tracking branch 'origin/master' into add-feature-variations
This commit is contained in:
commit
e35151a051
1
.gitignore
vendored
1
.gitignore
vendored
@ -24,3 +24,4 @@ htmlcov/
|
||||
# OSX Finder
|
||||
.DS_Store
|
||||
|
||||
.pytest_cache
|
||||
|
@ -13,7 +13,7 @@ A couple of differences between things that use designspaces:
|
||||
|
||||
- Varlib does not support anisotropic interpolations.
|
||||
- MutatorMath and Superpolator will extrapolate over the boundaries of
|
||||
the axes. Varlib can not.
|
||||
the axes. Varlib can not (at the moment).
|
||||
- Varlib requires much less data to define an instance than
|
||||
MutatorMath.
|
||||
- The goals of Varlib and MutatorMath are different, so not all
|
||||
@ -83,21 +83,13 @@ dictionary, ``obj.stylename`` and ``obj.localisedStyleName['en']``.
|
||||
Rules
|
||||
*****
|
||||
|
||||
**The ``rule`` element is experimental.** Some ideas behind how rules
|
||||
could work in designspaces come from Superpolator. Such rules can maybe
|
||||
be used to describe some of the conditional GSUB functionality of
|
||||
OpenType 1.8. The definition of a rule is not that complicated. A rule
|
||||
has a name, and it has a number of conditions. The rule also contains a
|
||||
list of glyphname pairs: the glyphs that need to be substituted.
|
||||
Rules describe designspace areas in which one glyph should be replaced by another.
|
||||
A rule has a name and a number of conditionsets. The rule also contains a list of
|
||||
glyphname pairs: the glyphs that need to be substituted. For a rule to be triggered
|
||||
**only one** of the conditionsets needs to be true, ``OR``. Within a conditionset
|
||||
**all** conditions need to be true, ``AND``.
|
||||
|
||||
Variable font instances
|
||||
=======================
|
||||
|
||||
- In an variable font the substitution happens at run time: there are
|
||||
no changes in the font, only in the sequence of glyphnames that is
|
||||
rendered.
|
||||
- The infrastructure to get this rule data in a variable font needs to
|
||||
be built.
|
||||
The ``sub`` element contains a pair of glyphnames. The ``name`` attribute is the glyph that should be visible when the rule evaluates to **False**. The ``with`` attribute is the glyph that should be visible when the rule evaluates to **True**.
|
||||
|
||||
UFO instances
|
||||
=============
|
||||
@ -129,6 +121,8 @@ Attributes
|
||||
- ``path``: string. Absolute path to the source file, calculated from
|
||||
the document path and the string in the filename attr. MutatorMath +
|
||||
Varlib.
|
||||
- ``layerName``: string. The name of the layer in the source to look for
|
||||
outline data. Default ``None`` which means ``foreground``.
|
||||
- ``font``: Any Python object. Optional. Points to a representation of
|
||||
this source font that is loaded in memory, as a Python object
|
||||
(e.g. a ``defcon.Font`` or a ``fontTools.ttFont.TTFont``). The default
|
||||
@ -136,7 +130,7 @@ Attributes
|
||||
writer will not use this attribute. It is up to the user of
|
||||
``designspaceLib`` to either load the resource identified by ``filename``
|
||||
and store it in this field, or write the contents of this field to the
|
||||
disk and make ```filename`` point to that.
|
||||
disk and make ``filename`` point to that.
|
||||
- ``name``: string. Optional. Unique identifier name for this source,
|
||||
if there is one or more ``instance.glyph`` elements in the document.
|
||||
MutatorMath.
|
||||
@ -310,18 +304,21 @@ AxisDescriptor object
|
||||
RuleDescriptor object
|
||||
=====================
|
||||
|
||||
- ``name``: string. Unique name for this rule. Will be used to
|
||||
- ``name``: string. Unique name for this rule. Can be used to
|
||||
reference this rule data.
|
||||
- ``conditions``: list of dicts with condition data.
|
||||
- Each condition specifies the axis name it is active on and the values
|
||||
between which the condition is true.
|
||||
- ``conditionSets``: a list of conditionsets
|
||||
- Each conditionset is a list of conditions.
|
||||
- Each condition is a dict with ``name``, ``minimum`` and ``maximum`` keys.
|
||||
- ``subs``: list of substitutions
|
||||
- Each substitution is stored as tuples of glyphnames, e.g. ("a", "a.alt").
|
||||
|
||||
.. code:: python
|
||||
|
||||
r1 = RuleDescriptor()
|
||||
r1.name = "unique.rule.name"
|
||||
r1.conditions.append(dict(name="weight", minimum=-10, maximum=10))
|
||||
r1.conditions.append(dict(name="width", minimum=-10, maximum=10))
|
||||
r1.conditionsSets.append([dict(name="weight", minimum=-10, maximum=10), dict(...)])
|
||||
r1.conditionsSets.append([dict(...), dict(...)])
|
||||
r1.subs.append(("a", "a.alt"))
|
||||
|
||||
.. _subclassing-descriptors:
|
||||
|
||||
@ -356,6 +353,8 @@ Document xml structure
|
||||
- The ``axes`` element contains one or more ``axis`` elements.
|
||||
- The ``sources`` element contains one or more ``source`` elements.
|
||||
- The ``instances`` element contains one or more ``instance`` elements.
|
||||
- The ``rules`` element contains one or more ``rule`` elements.
|
||||
- The ``lib`` element contains arbitrary data.
|
||||
|
||||
.. code:: xml
|
||||
|
||||
@ -364,7 +363,7 @@ Document xml structure
|
||||
<axes>
|
||||
<!-- define axes here -->
|
||||
<axis../>
|
||||
</axes>
|
||||
</axes>
|
||||
<sources>
|
||||
<!-- define masters here -->
|
||||
<source../>
|
||||
@ -373,6 +372,10 @@ Document xml structure
|
||||
<!-- define instances here -->
|
||||
<instance../>
|
||||
</instances>
|
||||
<rules>
|
||||
<!-- define rules here -->
|
||||
<rule../>
|
||||
</rules>
|
||||
<lib>
|
||||
<dict>
|
||||
<!-- store custom data here -->
|
||||
@ -512,8 +515,8 @@ Example
|
||||
.. code:: xml
|
||||
|
||||
<location>
|
||||
<dimension name="width" xvalue="0.000000" />
|
||||
<dimension name="weight" xvalue="0.000000" yvalue="0.003" />
|
||||
<dimension name="width" xvalue="0.000000" />
|
||||
<dimension name="weight" xvalue="0.000000" yvalue="0.003" />
|
||||
</location>
|
||||
|
||||
.. 3-source-element:
|
||||
@ -538,6 +541,8 @@ Attributes
|
||||
- ``filename``: required, string. A path to the source file, relative
|
||||
to the root path of this document. The path can be at the same level
|
||||
as the document or lower.
|
||||
- ``layer``: optional, string. The name of the layer in the source file.
|
||||
If no layer attribute is given assume the foreground layer should be used.
|
||||
|
||||
.. 31-lib-element:
|
||||
|
||||
@ -556,7 +561,7 @@ There are two meanings for the ``lib`` element:
|
||||
2. Document and instance lib
|
||||
- Example:
|
||||
|
||||
.. code:: python
|
||||
.. code:: xml
|
||||
|
||||
<lib>
|
||||
<dict>
|
||||
@ -568,6 +573,7 @@ There are two meanings for the ``lib`` element:
|
||||
- Child element of ``designspace`` and ``instance``
|
||||
- Contains arbitrary data about the whole document or about a specific
|
||||
instance.
|
||||
- Items in the dict need to use **reverse domain name notation** <https://en.wikipedia.org/wiki/Reverse_domain_name_notation>__
|
||||
|
||||
.. 32-info-element:
|
||||
|
||||
@ -641,14 +647,14 @@ Example
|
||||
|
||||
<source familyname="MasterFamilyName" filename="masters/masterTest1.ufo" name="master.ufo1" stylename="MasterStyleNameOne">
|
||||
<lib copy="1" />
|
||||
<features copy="1" />
|
||||
<features copy="1" />
|
||||
<info copy="1" />
|
||||
<glyph mute="1" name="A" />
|
||||
<glyph mute="1" name="Z" />
|
||||
<location>
|
||||
<dimension name="width" xvalue="0.000000" />
|
||||
<dimension name="weight" xvalue="0.000000" />
|
||||
</location>
|
||||
<glyph mute="1" name="Z" />
|
||||
<location>
|
||||
<dimension name="width" xvalue="0.000000" />
|
||||
<dimension name="weight" xvalue="0.000000" />
|
||||
</location>
|
||||
</source>
|
||||
|
||||
.. 4-instance-element:
|
||||
@ -756,10 +762,12 @@ Attributes
|
||||
|
||||
- Defines a single alternative master for this glyph.
|
||||
|
||||
#4.3 Localised names for intances Localised names for instances can be
|
||||
included with these simple elements with an xml:lang attribute: `XML
|
||||
language
|
||||
definition <https://www.w3.org/International/questions/qa-when-xmllang.en>`__
|
||||
4.3 Localised names for instances
|
||||
=================================
|
||||
|
||||
Localised names for instances can be included with these simple elements
|
||||
with an ``xml:lang`` attribute:
|
||||
`XML language definition <https://www.w3.org/International/questions/qa-when-xmllang.en>`__
|
||||
|
||||
- stylename
|
||||
- familyname
|
||||
@ -800,14 +808,14 @@ Example
|
||||
<instance familyname="InstanceFamilyName" filename="instances/instanceTest2.ufo" name="instance.ufo2" postscriptfontname="InstancePostscriptName" stylemapfamilyname="InstanceStyleMapFamilyName" stylemapstylename="InstanceStyleMapStyleName" stylename="InstanceStyleName">
|
||||
<location>
|
||||
<dimension name="width" xvalue="400" yvalue="300" />
|
||||
<dimension name="weight" xvalue="66" />
|
||||
<dimension name="weight" xvalue="66" />
|
||||
</location>
|
||||
<glyphs>
|
||||
<glyph name="arrow2" />
|
||||
<glyph name="arrow" unicode="0x4d2 0x4d3">
|
||||
<location>
|
||||
<dimension name="width" xvalue="100" />
|
||||
<dimension name="weight" xvalue="120" />
|
||||
<dimension name="weight" xvalue="120" />
|
||||
</location>
|
||||
<note>A note about this glyph</note>
|
||||
<masters>
|
||||
@ -836,37 +844,47 @@ Example
|
||||
=================
|
||||
|
||||
- Container for ``rule`` elements
|
||||
- The rules are evaluated in this order.
|
||||
|
||||
.. 51-rule-element:
|
||||
|
||||
5.1 rule element
|
||||
================
|
||||
|
||||
- Defines a named rule with a set of conditions.
|
||||
- The conditional substitutions specifed in the OpenType specification
|
||||
can be much more elaborate than what it recorded in this element.
|
||||
- So while authoring tools are welcome to use the ``sub`` element,
|
||||
they're intended as preview / example / test substitutions for the
|
||||
rule.
|
||||
- Defines a named rule.
|
||||
- Each ``rule`` element contains one or more ``conditionset`` elements.
|
||||
- Only one ``conditionset`` needs to be true to trigger the rule.
|
||||
- All conditions in a ``conditionset`` must be true to make the ``conditionset`` true.
|
||||
- For backwards compatibility a ``rule`` can contain ``condition`` elements outside of a conditionset. These are then understood to be part of a single, implied, ``conditionset``. Note: these conditions should be written wrapped in a conditionset.
|
||||
- A rule element needs to contain one or more ``sub`` elements in order to be compiled to a variable font.
|
||||
- Rules without sub elements should be ignored when compiling a font.
|
||||
- For authoring tools it might be necessary to save designspace files without ``sub`` elements just because the work is incomplete.
|
||||
|
||||
.. attributes-11:
|
||||
|
||||
Attributes
|
||||
----------
|
||||
|
||||
- ``name``: required, string. A unique name that can be used to
|
||||
identify this rule if it needs to be referenced elsewhere.
|
||||
- ``name``: optional, string. A unique name that can be used to
|
||||
identify this rule if it needs to be referenced elsewhere. The name
|
||||
is not important for compiling variable fonts.
|
||||
|
||||
.. 511-condition-element:
|
||||
|
||||
5.1.1 condition element
|
||||
5.1.1 conditionset element
|
||||
=======================
|
||||
|
||||
- Child element of ``rule``
|
||||
- Between the ``minimum`` and ``maximum`` this rule is ``true``.
|
||||
- Contains one or more ``condition`` elements.
|
||||
|
||||
.. 512-condition-element:
|
||||
|
||||
5.1.2 condition element
|
||||
=======================
|
||||
|
||||
- Child element of ``conditionset``
|
||||
- Between the ``minimum`` and ``maximum`` this rule is ``True``.
|
||||
- If ``minimum`` is not available, assume it is ``axis.minimum``.
|
||||
- If ``maximum`` is not available, assume it is ``axis.maximum``.
|
||||
- One or the other or both need to be present.
|
||||
- The condition must contain at least a minimum or maximum or both.
|
||||
|
||||
.. attributes-12:
|
||||
|
||||
@ -878,15 +896,13 @@ Attributes
|
||||
- ``minimum``: number, required*. The low value.
|
||||
- ``maximum``: number, required*. The high value.
|
||||
|
||||
.. 512-sub-element:
|
||||
.. 513-sub-element:
|
||||
|
||||
5.1.2 sub element
|
||||
5.1.3 sub element
|
||||
=================
|
||||
|
||||
- Child element of ``rule``.
|
||||
- Defines which glyphs to replace when the rule is true.
|
||||
- This element is optional. It may be useful for editors to know which
|
||||
glyphs can be used to preview the axis.
|
||||
- Defines which glyph to replace when the rule evaluates to **True**.
|
||||
|
||||
.. attributes-13:
|
||||
|
||||
@ -895,7 +911,7 @@ Attributes
|
||||
|
||||
- ``name``: string, required. The name of the glyph this rule looks
|
||||
for.
|
||||
- ``byname``: string, required. The name of the glyph it is replaced
|
||||
- ``with``: string, required. The name of the glyph it is replaced
|
||||
with.
|
||||
|
||||
.. example-7:
|
||||
@ -903,13 +919,34 @@ Attributes
|
||||
Example
|
||||
-------
|
||||
|
||||
Example with an implied ``conditionset``. Here the conditions are not
|
||||
contained in a conditionset.
|
||||
|
||||
.. code:: xml
|
||||
|
||||
<rules>
|
||||
<rule name="named.rule.1">
|
||||
<condition minimum="250" maximum="750" name="weight" />
|
||||
<condition minimum="50" maximum="100" name="width" />
|
||||
<sub name="dollar" byname="dollar.alt"/>
|
||||
<sub name="dollar" with="dollar.alt"/>
|
||||
</rule>
|
||||
</rules>
|
||||
|
||||
Example with ``conditionsets``. All conditions in a conditionset must be true.
|
||||
|
||||
.. code:: xml
|
||||
|
||||
<rules>
|
||||
<rule name="named.rule.2">
|
||||
<conditionset>
|
||||
<condition minimum="250" maximum="750" name="weight" />
|
||||
<condition minimum="50" maximum="100" name="width" />
|
||||
</conditionset>
|
||||
<conditionset>
|
||||
<condition ... />
|
||||
<condition ... />
|
||||
</conditionset>
|
||||
<sub name="dollar" with="dollar.alt"/>
|
||||
</rule>
|
||||
</rules>
|
||||
|
||||
|
@ -59,6 +59,8 @@ Make a descriptor object and add it to the document.
|
||||
- The ``tag`` attribute is the one of the registered `OpenType
|
||||
Variation Axis
|
||||
Tags <https://www.microsoft.com/typography/otspec/fvar.htm#VAT>`__
|
||||
- The default master is expected at the intersection of all
|
||||
default values of all axes.
|
||||
|
||||
Option: add label names
|
||||
-----------------------
|
||||
@ -111,6 +113,7 @@ outline geometry, kerning and font.info that we want to work with.
|
||||
- The **path** attribute is the absolute path to an existing UFO.
|
||||
- The **name** attribute is a unique name for this source used to keep
|
||||
track it.
|
||||
- The **layerName** attribute is the name of the UFO3 layer. Default None for ``foreground``.
|
||||
|
||||
So go ahead and add another master:
|
||||
|
||||
@ -121,6 +124,7 @@ So go ahead and add another master:
|
||||
s1.name = "master.bold"
|
||||
s1.location = dict(weight=1000)
|
||||
doc.addSource(s1)
|
||||
|
||||
|
||||
Option: exclude glyphs
|
||||
----------------------
|
||||
|
@ -5,6 +5,6 @@ from fontTools.misc.loggingTools import configLogger
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
version = __version__ = "3.25.1.dev0"
|
||||
version = __version__ = "3.26.1.dev0"
|
||||
|
||||
__all__ = ["version", "log", "configLogger"]
|
||||
|
@ -244,7 +244,7 @@ class CFFFontSet(object):
|
||||
if key in topDict.rawDict:
|
||||
del topDict.rawDict[key]
|
||||
if hasattr(topDict, key):
|
||||
exec("del topDict.%s" % (key))
|
||||
delattr(topDict, key)
|
||||
|
||||
if not hasattr(topDict, "FDArray"):
|
||||
fdArray = topDict.FDArray = FDArrayIndex()
|
||||
@ -257,6 +257,7 @@ class CFFFontSet(object):
|
||||
else:
|
||||
charStrings.fdArray = fdArray
|
||||
fontDict = FontDict()
|
||||
fontDict.setCFF2(True)
|
||||
fdArray.append(fontDict)
|
||||
fontDict.Private = privateDict
|
||||
privateOpOrder = buildOrder(privateDictOperators2)
|
||||
@ -267,12 +268,20 @@ class CFFFontSet(object):
|
||||
# print "Removing private dict", key
|
||||
del privateDict.rawDict[key]
|
||||
if hasattr(privateDict, key):
|
||||
exec("del privateDict.%s" % (key))
|
||||
delattr(privateDict, key)
|
||||
# print "Removing privateDict attr", key
|
||||
else:
|
||||
# clean up the PrivateDicts in the fdArray
|
||||
fdArray = topDict.FDArray
|
||||
privateOpOrder = buildOrder(privateDictOperators2)
|
||||
for fontDict in fdArray:
|
||||
fontDict.setCFF2(True)
|
||||
for key in fontDict.rawDict.keys():
|
||||
if key not in fontDict.order:
|
||||
del fontDict.rawDict[key]
|
||||
if hasattr(fontDict, key):
|
||||
delattr(fontDict, key)
|
||||
|
||||
privateDict = fontDict.Private
|
||||
for entry in privateDictOperators:
|
||||
key = entry[1]
|
||||
@ -281,7 +290,7 @@ class CFFFontSet(object):
|
||||
# print "Removing private dict", key
|
||||
del privateDict.rawDict[key]
|
||||
if hasattr(privateDict, key):
|
||||
exec("del privateDict.%s" % (key))
|
||||
delattr(privateDict, key)
|
||||
# print "Removing privateDict attr", key
|
||||
# At this point, the Subrs and Charstrings are all still T2Charstring class
|
||||
# easiest to fix this by compiling, then decompiling again
|
||||
@ -2381,13 +2390,24 @@ class FontDict(BaseDict):
|
||||
defaults = {}
|
||||
converters = buildConverters(topDictOperators)
|
||||
compilerClass = FontDictCompiler
|
||||
order = ['FontName', 'FontMatrix', 'Weight', 'Private']
|
||||
orderCFF = ['FontName', 'FontMatrix', 'Weight', 'Private']
|
||||
orderCFF2 = ['Private']
|
||||
decompilerClass = TopDictDecompiler
|
||||
|
||||
def __init__(self, strings=None, file=None, offset=None,
|
||||
GlobalSubrs=None, isCFF2=None, vstore=None):
|
||||
super(FontDict, self).__init__(strings, file, offset, isCFF2=isCFF2)
|
||||
self.vstore = vstore
|
||||
self.setCFF2(isCFF2)
|
||||
|
||||
def setCFF2(self, isCFF2):
|
||||
# isCFF2 may be None.
|
||||
if isCFF2:
|
||||
self.order = self.orderCFF2
|
||||
self._isCFF2 = True
|
||||
else:
|
||||
self.order = self.orderCFF
|
||||
self._isCFF2 = False
|
||||
|
||||
|
||||
class PrivateDict(BaseDict):
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -8,6 +8,76 @@ import itertools
|
||||
|
||||
SHIFT = " " * 4
|
||||
|
||||
__all__ = [
|
||||
'AlternateSubstStatement',
|
||||
'Anchor',
|
||||
'AnchorDefinition',
|
||||
'AnonymousBlock',
|
||||
'AttachStatement',
|
||||
'BaseAxis',
|
||||
'Block',
|
||||
'BytesIO',
|
||||
'CVParametersNameStatement',
|
||||
'ChainContextPosStatement',
|
||||
'ChainContextSubstStatement',
|
||||
'CharacterStatement',
|
||||
'Comment',
|
||||
'CursivePosStatement',
|
||||
'Element',
|
||||
'Expression',
|
||||
'FeatureBlock',
|
||||
'FeatureFile',
|
||||
'FeatureLibError',
|
||||
'FeatureNameStatement',
|
||||
'FeatureReferenceStatement',
|
||||
'FontRevisionStatement',
|
||||
'GlyphClass',
|
||||
'GlyphClassDefStatement',
|
||||
'GlyphClassDefinition',
|
||||
'GlyphClassName',
|
||||
'GlyphName',
|
||||
'HheaField',
|
||||
'IgnorePosStatement',
|
||||
'IgnoreSubstStatement',
|
||||
'IncludeStatement',
|
||||
'LanguageStatement',
|
||||
'LanguageSystemStatement',
|
||||
'LigatureCaretByIndexStatement',
|
||||
'LigatureCaretByPosStatement',
|
||||
'LigatureSubstStatement',
|
||||
'LookupBlock',
|
||||
'LookupFlagStatement',
|
||||
'LookupReferenceStatement',
|
||||
'MarkBasePosStatement',
|
||||
'MarkClass',
|
||||
'MarkClassDefinition',
|
||||
'MarkClassName',
|
||||
'MarkLigPosStatement',
|
||||
'MarkMarkPosStatement',
|
||||
'MultipleSubstStatement',
|
||||
'NameRecord',
|
||||
'NestedBlock',
|
||||
'OS2Field',
|
||||
'OrderedDict',
|
||||
'PairPosStatement',
|
||||
'Py23Error',
|
||||
'ReverseChainSingleSubstStatement',
|
||||
'ScriptStatement',
|
||||
'SimpleNamespace',
|
||||
'SinglePosStatement',
|
||||
'SingleSubstStatement',
|
||||
'SizeParameters',
|
||||
'Statement',
|
||||
'StringIO',
|
||||
'SubtableStatement',
|
||||
'TableBlock',
|
||||
'Tag',
|
||||
'UnicodeIO',
|
||||
'ValueRecord',
|
||||
'ValueRecordDefinition',
|
||||
'VheaField',
|
||||
]
|
||||
|
||||
|
||||
def deviceToString(device):
|
||||
if device is None:
|
||||
|
@ -500,8 +500,12 @@ class LogMixin(object):
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
name = ".".join([self.__class__.__module__, self.__class__.__name__])
|
||||
return logging.getLogger(name)
|
||||
if not hasattr(self, "_log"):
|
||||
name = ".".join(
|
||||
(self.__class__.__module__, self.__class__.__name__)
|
||||
)
|
||||
self._log = logging.getLogger(name)
|
||||
return self._log
|
||||
|
||||
|
||||
def deprecateArgument(name, msg, category=UserWarning):
|
||||
|
@ -43,13 +43,14 @@ class PSError(Exception): pass
|
||||
|
||||
class PSTokenizer(object):
|
||||
|
||||
def __init__(self, buf=b''):
|
||||
def __init__(self, buf=b'', encoding="ascii"):
|
||||
# Force self.buf to be a byte string
|
||||
buf = tobytes(buf)
|
||||
self.buf = buf
|
||||
self.len = len(buf)
|
||||
self.pos = 0
|
||||
self.closed = False
|
||||
self.encoding = encoding
|
||||
|
||||
def read(self, n=-1):
|
||||
"""Read at most 'n' bytes from the buffer, or less if the read
|
||||
@ -122,7 +123,7 @@ class PSTokenizer(object):
|
||||
_, nextpos = m.span()
|
||||
token = buf[pos:nextpos]
|
||||
self.pos = pos + len(token)
|
||||
token = tostr(token, encoding='ascii')
|
||||
token = tostr(token, encoding=self.encoding)
|
||||
return tokentype, token
|
||||
|
||||
def skipwhite(self, whitematch=skipwhiteRE.match):
|
||||
@ -145,9 +146,10 @@ class PSTokenizer(object):
|
||||
|
||||
class PSInterpreter(PSOperators):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, encoding="ascii"):
|
||||
systemdict = {}
|
||||
userdict = {}
|
||||
self.encoding = encoding
|
||||
self.dictstack = [systemdict, userdict]
|
||||
self.stack = []
|
||||
self.proclevel = 0
|
||||
@ -174,7 +176,7 @@ class PSInterpreter(PSOperators):
|
||||
self.suckoperators(systemdict, baseclass)
|
||||
|
||||
def interpret(self, data, getattr=getattr):
|
||||
tokenizer = self.tokenizer = PSTokenizer(data)
|
||||
tokenizer = self.tokenizer = PSTokenizer(data, self.encoding)
|
||||
getnexttoken = tokenizer.getnexttoken
|
||||
do_token = self.do_token
|
||||
handle_object = self.handle_object
|
||||
@ -345,13 +347,13 @@ def unpack_item(item):
|
||||
newitem = item.value
|
||||
return newitem
|
||||
|
||||
def suckfont(data):
|
||||
def suckfont(data, encoding="ascii"):
|
||||
m = re.search(br"/FontName\s+/([^ \t\n\r]+)\s+def", data)
|
||||
if m:
|
||||
fontName = m.group(1)
|
||||
else:
|
||||
fontName = None
|
||||
interpreter = PSInterpreter()
|
||||
interpreter = PSInterpreter(encoding=encoding)
|
||||
interpreter.interpret(b"/Helvetica 4 dict dup /Encoding StandardEncoding put definefont pop")
|
||||
interpreter.interpret(data)
|
||||
fontdir = interpreter.dictstack[0]['FontDirectory'].value
|
||||
|
@ -250,7 +250,7 @@ def open(file, mode='r', buffering=-1, encoding=None, errors=None,
|
||||
file, mode, buffering, encoding, errors, newline, closefd)
|
||||
|
||||
|
||||
# always use iterator for 'range' on both py 2 and 3
|
||||
# always use iterator for 'range' and 'zip' on both py 2 and 3
|
||||
try:
|
||||
range = xrange
|
||||
except NameError:
|
||||
@ -259,6 +259,11 @@ except NameError:
|
||||
def xrange(*args, **kwargs):
|
||||
raise Py23Error("'xrange' is not defined. Use 'range' instead.")
|
||||
|
||||
try:
|
||||
from itertools import izip as zip
|
||||
except ImportError:
|
||||
zip = zip
|
||||
|
||||
|
||||
import math as _math
|
||||
|
||||
|
@ -212,8 +212,8 @@ Font table options:
|
||||
Add to the set of tables that will not be subsetted.
|
||||
By default, the following tables are included in this list, as
|
||||
they do not need subsetting (ignore the fact that 'loca' is listed
|
||||
here): 'gasp', 'head', 'hhea', 'maxp', 'vhea', 'OS/2', 'loca',
|
||||
'name', 'cvt ', 'fpgm', 'prep', 'VMDX', 'DSIG', 'CPAL', 'MVAR', 'STAT'.
|
||||
here): 'gasp', 'head', 'hhea', 'maxp', 'vhea', 'OS/2', 'loca', 'name',
|
||||
'cvt ', 'fpgm', 'prep', 'VMDX', 'DSIG', 'CPAL', 'MVAR', 'cvar', 'STAT'.
|
||||
By default, tables that the tool does not know how to subset and are not
|
||||
specified here will be dropped from the font, unless --passthrough-tables
|
||||
option is passed.
|
||||
@ -311,6 +311,8 @@ Other font-specific options:
|
||||
Update the 'OS/2 xAvgCharWidth' field after subsetting.
|
||||
--no-recalc-average-width
|
||||
Don't change the 'OS/2 xAvgCharWidth' field. [default]
|
||||
--font-number=<number>
|
||||
Select font number for TrueType Collection (.ttc/.otc), starting from 0.
|
||||
|
||||
Application options:
|
||||
--verbose
|
||||
@ -1615,11 +1617,16 @@ def subset_glyphs(self, s):
|
||||
table.AttachList.GlyphCount = len(table.AttachList.AttachPoint)
|
||||
if hasattr(table, "MarkGlyphSetsDef") and table.MarkGlyphSetsDef:
|
||||
for coverage in table.MarkGlyphSetsDef.Coverage:
|
||||
coverage.subset(glyphs)
|
||||
if coverage:
|
||||
coverage.subset(glyphs)
|
||||
|
||||
# TODO: The following is disabled. If enabling, we need to go fixup all
|
||||
# lookups that use MarkFilteringSet and map their set.
|
||||
# indices = table.MarkGlyphSetsDef.Coverage = \
|
||||
# [c for c in table.MarkGlyphSetsDef.Coverage if c.glyphs]
|
||||
# TODO: The following is disabled, as ots doesn't like it. Phew...
|
||||
# https://github.com/khaledhosny/ots/issues/172
|
||||
# table.MarkGlyphSetsDef.Coverage = [c if c.glyphs else None for c in table.MarkGlyphSetsDef.Coverage]
|
||||
return True
|
||||
|
||||
|
||||
@ -1635,8 +1642,6 @@ def _pruneGDEF(font):
|
||||
|
||||
# Collect.
|
||||
table.collect_device_varidxes(usedVarIdxes)
|
||||
if 'GSUB' in font:
|
||||
font['GSUB'].table.collect_device_varidxes(usedVarIdxes)
|
||||
if 'GPOS' in font:
|
||||
font['GPOS'].table.collect_device_varidxes(usedVarIdxes)
|
||||
|
||||
@ -1675,7 +1680,8 @@ def prune_post_subset(self, font, options):
|
||||
table.MarkAttachClassDef or
|
||||
table.GlyphClassDef or
|
||||
table.AttachList or
|
||||
(table.Version >= 0x00010002 and table.MarkGlyphSetsDef))
|
||||
(table.Version >= 0x00010002 and table.MarkGlyphSetsDef) or
|
||||
(table.Version >= 0x00010003 and table.VarStore))
|
||||
|
||||
@_add_method(ttLib.getTableClass('kern'))
|
||||
def prune_pre_subset(self, font, options):
|
||||
@ -1765,9 +1771,6 @@ def subset_glyphs(self, s):
|
||||
def subset_glyphs(self, s):
|
||||
table = self.table
|
||||
|
||||
# TODO Handle direct mapping
|
||||
assert table.AdvWidthMap, "File a bug."
|
||||
|
||||
used = set()
|
||||
|
||||
if table.AdvWidthMap:
|
||||
@ -2720,8 +2723,8 @@ class Options(object):
|
||||
'gasp', 'head', 'hhea', 'maxp',
|
||||
'vhea', 'OS/2', 'loca', 'name', 'cvt',
|
||||
'fpgm', 'prep', 'VDMX', 'DSIG', 'CPAL',
|
||||
'MVAR', 'STAT']
|
||||
_hinting_tables_default = ['cvar', 'cvt', 'fpgm', 'prep', 'hdmx', 'VDMX']
|
||||
'MVAR', 'cvar', 'STAT']
|
||||
_hinting_tables_default = ['cvt', 'cvar', 'fpgm', 'prep', 'hdmx', 'VDMX']
|
||||
|
||||
# Based on HarfBuzz shapers
|
||||
_layout_features_groups = {
|
||||
@ -2776,6 +2779,7 @@ class Options(object):
|
||||
self.verbose = False
|
||||
self.timing = False
|
||||
self.xml = False
|
||||
self.font_number = -1
|
||||
|
||||
self.set(**kwargs)
|
||||
|
||||
@ -3108,7 +3112,8 @@ def load_font(fontFile,
|
||||
checkChecksums=checkChecksums,
|
||||
recalcBBoxes=options.recalc_bounds,
|
||||
recalcTimestamp=options.recalc_timestamp,
|
||||
lazy=lazy)
|
||||
lazy=lazy,
|
||||
fontNumber=options.font_number)
|
||||
|
||||
# Hack:
|
||||
#
|
||||
|
@ -49,11 +49,18 @@ class T1Font(object):
|
||||
Type 1 fonts.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None):
|
||||
if path is not None:
|
||||
self.data, type = read(path)
|
||||
def __init__(self, path, encoding="ascii", kind=None):
|
||||
if kind is None:
|
||||
self.data, _ = read(path)
|
||||
elif kind == "LWFN":
|
||||
self.data = readLWFN(path)
|
||||
elif kind == "PFB":
|
||||
self.data = readPFB(path)
|
||||
elif kind == "OTHER":
|
||||
self.data = readOther(path)
|
||||
else:
|
||||
pass # XXX
|
||||
raise ValueError(kind)
|
||||
self.encoding = encoding
|
||||
|
||||
def saveAs(self, path, type, dohex=False):
|
||||
write(path, self.getData(), type, dohex)
|
||||
@ -82,7 +89,7 @@ class T1Font(object):
|
||||
def parse(self):
|
||||
from fontTools.misc import psLib
|
||||
from fontTools.misc import psCharStrings
|
||||
self.font = psLib.suckfont(self.data)
|
||||
self.font = psLib.suckfont(self.data, self.encoding)
|
||||
charStrings = self.font["CharStrings"]
|
||||
lenIV = self.font["Private"].get("lenIV", 4)
|
||||
assert lenIV >= 0
|
||||
|
@ -1220,6 +1220,9 @@ class GlyphCoordinates(object):
|
||||
def _checkFloat(self, p):
|
||||
if self.isFloat():
|
||||
return p
|
||||
if any(v > 0x7FFF or v < -0x8000 for v in p):
|
||||
self._ensureFloat()
|
||||
return p
|
||||
if any(isinstance(v, float) for v in p):
|
||||
p = [int(v) if int(v) == v else v for v in p]
|
||||
if any(isinstance(v, float) for v in p):
|
||||
@ -1259,7 +1262,6 @@ class GlyphCoordinates(object):
|
||||
del self._a[i]
|
||||
del self._a[i]
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return 'GlyphCoordinates(['+','.join(str(c) for c in self)+'])'
|
||||
|
||||
@ -1284,8 +1286,9 @@ class GlyphCoordinates(object):
|
||||
a = self._a
|
||||
x,y = 0,0
|
||||
for i in range(len(a) // 2):
|
||||
a[2*i ] = x = a[2*i ] + x
|
||||
a[2*i+1] = y = a[2*i+1] + y
|
||||
x = a[2*i ] + x
|
||||
y = a[2*i+1] + y
|
||||
self[i] = (x, y)
|
||||
|
||||
def absoluteToRelative(self):
|
||||
a = self._a
|
||||
@ -1295,8 +1298,7 @@ class GlyphCoordinates(object):
|
||||
dy = a[2*i+1] - y
|
||||
x = a[2*i ]
|
||||
y = a[2*i+1]
|
||||
a[2*i ] = dx
|
||||
a[2*i+1] = dy
|
||||
self[i] = (dx, dy)
|
||||
|
||||
def translate(self, p):
|
||||
"""
|
||||
@ -1305,8 +1307,7 @@ class GlyphCoordinates(object):
|
||||
(x,y) = self._checkFloat(p)
|
||||
a = self._a
|
||||
for i in range(len(a) // 2):
|
||||
a[2*i ] += x
|
||||
a[2*i+1] += y
|
||||
self[i] = (a[2*i] + x, a[2*i+1] + y)
|
||||
|
||||
def scale(self, p):
|
||||
"""
|
||||
@ -1315,8 +1316,7 @@ class GlyphCoordinates(object):
|
||||
(x,y) = self._checkFloat(p)
|
||||
a = self._a
|
||||
for i in range(len(a) // 2):
|
||||
a[2*i ] *= x
|
||||
a[2*i+1] *= y
|
||||
self[i] = (a[2*i] * x, a[2*i+1] * y)
|
||||
|
||||
def transform(self, t):
|
||||
"""
|
||||
@ -1432,8 +1432,8 @@ class GlyphCoordinates(object):
|
||||
other = other._a
|
||||
a = self._a
|
||||
assert len(a) == len(other)
|
||||
for i in range(len(a)):
|
||||
a[i] += other[i]
|
||||
for i in range(len(a) // 2):
|
||||
self[i] = (a[2*i] + other[2*i], a[2*i+1] + other[2*i+1])
|
||||
return self
|
||||
return NotImplemented
|
||||
|
||||
@ -1457,8 +1457,8 @@ class GlyphCoordinates(object):
|
||||
other = other._a
|
||||
a = self._a
|
||||
assert len(a) == len(other)
|
||||
for i in range(len(a)):
|
||||
a[i] -= other[i]
|
||||
for i in range(len(a) // 2):
|
||||
self[i] = (a[2*i] - other[2*i], a[2*i+1] - other[2*i+1])
|
||||
return self
|
||||
return NotImplemented
|
||||
|
||||
|
@ -283,7 +283,7 @@ class OTTableWriter(object):
|
||||
def __eq__(self, other):
|
||||
if type(self) != type(other):
|
||||
return NotImplemented
|
||||
return self.items == other.items
|
||||
return self.longOffset == other.longOffset and self.items == other.items
|
||||
|
||||
def _doneWriting(self, internedTables):
|
||||
# Convert CountData references to data string items
|
||||
@ -610,22 +610,27 @@ class BaseTable(object):
|
||||
if conv.name == "SubStruct":
|
||||
conv = conv.getConverter(reader.tableTag,
|
||||
table["MorphType"])
|
||||
if conv.repeat:
|
||||
if isinstance(conv.repeat, int):
|
||||
countValue = conv.repeat
|
||||
elif conv.repeat in table:
|
||||
countValue = table[conv.repeat]
|
||||
try:
|
||||
if conv.repeat:
|
||||
if isinstance(conv.repeat, int):
|
||||
countValue = conv.repeat
|
||||
elif conv.repeat in table:
|
||||
countValue = table[conv.repeat]
|
||||
else:
|
||||
# conv.repeat is a propagated count
|
||||
countValue = reader[conv.repeat]
|
||||
countValue += conv.aux
|
||||
table[conv.name] = conv.readArray(reader, font, table, countValue)
|
||||
else:
|
||||
# conv.repeat is a propagated count
|
||||
countValue = reader[conv.repeat]
|
||||
countValue += conv.aux
|
||||
table[conv.name] = conv.readArray(reader, font, table, countValue)
|
||||
else:
|
||||
if conv.aux and not eval(conv.aux, None, table):
|
||||
continue
|
||||
table[conv.name] = conv.read(reader, font, table)
|
||||
if conv.isPropagated:
|
||||
reader[conv.name] = table[conv.name]
|
||||
if conv.aux and not eval(conv.aux, None, table):
|
||||
continue
|
||||
table[conv.name] = conv.read(reader, font, table)
|
||||
if conv.isPropagated:
|
||||
reader[conv.name] = table[conv.name]
|
||||
except Exception as e:
|
||||
name = conv.name
|
||||
e.args = e.args + (name,)
|
||||
raise
|
||||
|
||||
if hasattr(self, 'postRead'):
|
||||
self.postRead(table, font)
|
||||
|
@ -482,7 +482,7 @@ class Coverage(FormatSwitchingBaseTable):
|
||||
glyphs.extend(glyphOrder[glyphID] for glyphID in range(startID, endID))
|
||||
else:
|
||||
self.glyphs = []
|
||||
log.warning("Unknown Coverage format: %s" % self.Format)
|
||||
log.warning("Unknown Coverage format: %s", self.Format)
|
||||
|
||||
def preWrite(self, font):
|
||||
glyphs = getattr(self, "glyphs", None)
|
||||
@ -830,7 +830,7 @@ class ClassDef(FormatSwitchingBaseTable):
|
||||
if cls:
|
||||
classDefs[glyphOrder[glyphID]] = cls
|
||||
else:
|
||||
assert 0, "unknown format: %s" % self.Format
|
||||
log.warning("Unknown ClassDef format: %s", self.Format)
|
||||
self.classDefs = classDefs
|
||||
|
||||
def _getClassRanges(self, font):
|
||||
|
@ -493,7 +493,10 @@ class TTFont(object):
|
||||
# glyphs (eg. ligatures or alternates) may not be reachable via cmap,
|
||||
# this naming table will usually not cover all glyphs in the font.
|
||||
# If the font has no Unicode cmap table, reversecmap will be empty.
|
||||
reversecmap = self['cmap'].buildReversed()
|
||||
if 'cmap' in self:
|
||||
reversecmap = self['cmap'].buildReversed()
|
||||
else:
|
||||
reversecmap = {}
|
||||
useCount = {}
|
||||
for i in range(numGlyphs):
|
||||
tempName = glyphOrder[i]
|
||||
@ -507,14 +510,15 @@ class TTFont(object):
|
||||
glyphName = "%s.alt%d" % (glyphName, numUses - 1)
|
||||
glyphOrder[i] = glyphName
|
||||
|
||||
# Delete the temporary cmap table from the cache, so it can
|
||||
# be parsed again with the right names.
|
||||
del self.tables['cmap']
|
||||
self.glyphOrder = glyphOrder
|
||||
if cmapLoading:
|
||||
# restore partially loaded cmap, so it can continue loading
|
||||
# using the proper names.
|
||||
self.tables['cmap'] = cmapLoading
|
||||
if 'cmap' in self:
|
||||
# Delete the temporary cmap table from the cache, so it can
|
||||
# be parsed again with the right names.
|
||||
del self.tables['cmap']
|
||||
self.glyphOrder = glyphOrder
|
||||
if cmapLoading:
|
||||
# restore partially loaded cmap, so it can continue loading
|
||||
# using the proper names.
|
||||
self.tables['cmap'] = cmapLoading
|
||||
|
||||
@staticmethod
|
||||
def _makeGlyphName(codepoint):
|
||||
@ -715,6 +719,9 @@ class _TTGlyphSet(object):
|
||||
return self._glyphType(
|
||||
self, self._glyphs[glyphName], horizontalMetrics, verticalMetrics)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._glyphs)
|
||||
|
||||
def get(self, glyphName, default=None):
|
||||
try:
|
||||
return self[glyphName]
|
||||
|
@ -218,7 +218,6 @@ class Options(object):
|
||||
self.logLevel = logging.INFO
|
||||
if self.mergeFile and self.flavor:
|
||||
raise getopt.GetoptError("-m and --flavor options are mutually exclusive")
|
||||
sys.exit(2)
|
||||
if self.onlyTables and self.skipTables:
|
||||
raise getopt.GetoptError("-t and -x options are mutually exclusive")
|
||||
if self.mergeFile and numFiles > 1:
|
||||
|
@ -169,22 +169,25 @@ def _add_avar(font, axes):
|
||||
return avar
|
||||
|
||||
def _add_stat(font, axes):
|
||||
# for now we just get the axis tags and nameIDs from the fvar,
|
||||
# so we can reuse the same nameIDs which were defined in there.
|
||||
# TODO make use of 'axes' once it adds style attributes info:
|
||||
# https://github.com/LettError/designSpaceDocument/issues/8
|
||||
|
||||
if "STAT" in font:
|
||||
return
|
||||
|
||||
nameTable = font['name']
|
||||
fvarTable = font['fvar']
|
||||
|
||||
STAT = font["STAT"] = newTable('STAT')
|
||||
stat = STAT.table = ot.STAT()
|
||||
stat.Version = 0x00010000
|
||||
stat.Version = 0x00010002
|
||||
|
||||
axisRecords = []
|
||||
for i,a in enumerate(axes.values()):
|
||||
for i, a in enumerate(fvarTable.axes):
|
||||
axis = ot.AxisRecord()
|
||||
axis.AxisTag = Tag(a.tag)
|
||||
# Meh. Reuse fvar nameID!
|
||||
axis.AxisNameID = nameTable.addName(tounicode(a.labelname['en']))
|
||||
axis.AxisTag = Tag(a.axisTag)
|
||||
axis.AxisNameID = a.axisNameID
|
||||
axis.AxisOrdering = i
|
||||
axisRecords.append(axis)
|
||||
|
||||
@ -195,6 +198,10 @@ def _add_stat(font, axes):
|
||||
stat.DesignAxisCount = len(axisRecords)
|
||||
stat.DesignAxisRecord = axisRecordArray
|
||||
|
||||
# for the elided fallback name, we default to the base style name.
|
||||
# TODO make this user-configurable via designspace document
|
||||
stat.ElidedFallbackNameID = 2
|
||||
|
||||
# TODO Move to glyf or gvar table proper
|
||||
def _GetCoordinates(font, glyphName):
|
||||
"""font, glyphName --> glyph coordinates as expected by "gvar" table
|
||||
@ -760,7 +767,7 @@ def build(designspace_filename, master_finder=lambda s:s, exclude=[], optimize=T
|
||||
_merge_OTL(vf, model, master_fonts, axisTags)
|
||||
if 'gvar' not in exclude and 'glyf' in vf:
|
||||
_add_gvar(vf, model, master_fonts, optimize=optimize)
|
||||
if 'cvar' not in exclude:
|
||||
if 'cvar' not in exclude and 'glyf' in vf:
|
||||
_merge_TTHinting(vf, model, master_fonts)
|
||||
|
||||
for tag in exclude:
|
||||
@ -770,27 +777,84 @@ def build(designspace_filename, master_finder=lambda s:s, exclude=[], optimize=T
|
||||
return vf, model, master_ttfs
|
||||
|
||||
|
||||
class MasterFinder(object):
|
||||
|
||||
def __init__(self, template):
|
||||
self.template = template
|
||||
|
||||
def __call__(self, src_path):
|
||||
fullname = os.path.abspath(src_path)
|
||||
dirname, basename = os.path.split(fullname)
|
||||
stem, ext = os.path.splitext(basename)
|
||||
path = self.template.format(
|
||||
fullname=fullname,
|
||||
dirname=dirname,
|
||||
basename=basename,
|
||||
stem=stem,
|
||||
ext=ext,
|
||||
)
|
||||
return os.path.normpath(path)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
from argparse import ArgumentParser
|
||||
from fontTools import configLogger
|
||||
|
||||
parser = ArgumentParser(prog='varLib')
|
||||
parser.add_argument('designspace')
|
||||
parser.add_argument('-o', metavar='OUTPUTFILE', dest='outfile', default=None, help='output file')
|
||||
parser.add_argument('-x', metavar='TAG', dest='exclude', action='append', default=[], help='exclude table')
|
||||
parser.add_argument('--disable-iup', dest='optimize', action='store_false', help='do not perform IUP optimization')
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
metavar='OUTPUTFILE',
|
||||
dest='outfile',
|
||||
default=None,
|
||||
help='output file'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-x',
|
||||
metavar='TAG',
|
||||
dest='exclude',
|
||||
action='append',
|
||||
default=[],
|
||||
help='exclude table'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--disable-iup',
|
||||
dest='optimize',
|
||||
action='store_false',
|
||||
help='do not perform IUP optimization'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--master-finder',
|
||||
default='master_ttf_interpolatable/{stem}.ttf',
|
||||
help=(
|
||||
'templated string used for finding binary font '
|
||||
'files given the source file names defined in the '
|
||||
'designspace document. The following special strings '
|
||||
'are defined: {fullname} is the absolute source file '
|
||||
'name; {basename} is the file name without its '
|
||||
'directory; {stem} is the basename without the file '
|
||||
'extension; {ext} is the source file extension; '
|
||||
'{dirname} is the directory of the absolute file '
|
||||
'name. The default value is "%(default)s".'
|
||||
)
|
||||
)
|
||||
options = parser.parse_args(args)
|
||||
|
||||
# TODO: allow user to configure logging via command-line options
|
||||
configLogger(level="INFO")
|
||||
|
||||
designspace_filename = options.designspace
|
||||
finder = lambda s: s.replace('master_ufo', 'master_ttf_interpolatable').replace('.ufo', '.ttf')
|
||||
finder = MasterFinder(options.master_finder)
|
||||
outfile = options.outfile
|
||||
if outfile is None:
|
||||
outfile = os.path.splitext(designspace_filename)[0] + '-VF.ttf'
|
||||
|
||||
vf, model, master_ttfs = build(designspace_filename, finder, exclude=options.exclude, optimize=options.optimize)
|
||||
vf, model, master_ttfs = build(
|
||||
designspace_filename,
|
||||
finder,
|
||||
exclude=options.exclude,
|
||||
optimize=options.optimize
|
||||
)
|
||||
|
||||
log.info("Saving variation font %s", outfile)
|
||||
vf.save(outfile)
|
||||
|
@ -253,23 +253,24 @@ class VariationModel(object):
|
||||
break
|
||||
if not relevant:
|
||||
continue
|
||||
# Split the box for new master; split in whatever direction
|
||||
# that results in less area-ratio lost.
|
||||
|
||||
# Split the box for new master; split in whatever direction
|
||||
# that has largest range ratio. See commit for details.
|
||||
orderedAxes = [axis for axis in axisOrder if axis in m.keys()]
|
||||
orderedAxes.extend([axis for axis in sorted(m.keys()) if axis not in axisOrder])
|
||||
bestAxis = None
|
||||
bestPercentage = -1
|
||||
for axis in reversed(orderedAxes):
|
||||
for axis in orderedAxes:
|
||||
val = m[axis]
|
||||
assert axis in box
|
||||
lower,locV,upper = box[axis]
|
||||
newLower, newUpper = lower, upper
|
||||
if val < locV:
|
||||
newLower = val
|
||||
percentage = (val - locV) / (lower - locV)
|
||||
elif locV < val:
|
||||
newUpper = val
|
||||
percentage = (newUpper - newLower) / (upper - lower)
|
||||
percentage = (val - locV) / (upper - locV)
|
||||
if percentage > bestPercentage:
|
||||
bestPercentage = percentage
|
||||
bestAxis = axis
|
||||
@ -299,12 +300,7 @@ class VariationModel(object):
|
||||
for i,weights in enumerate(self.deltaWeights):
|
||||
delta = masterValues[mapping[i]]
|
||||
for j,weight in weights.items():
|
||||
try:
|
||||
delta -= out[j] * weight
|
||||
except OverflowError:
|
||||
# if it doesn't fit signed shorts, retry with doubles
|
||||
delta._ensureFloat()
|
||||
delta -= out[j] * weight
|
||||
delta -= out[j] * weight
|
||||
out.append(delta)
|
||||
return out
|
||||
|
||||
|
@ -112,6 +112,17 @@ def instantiateVariableFont(varfont, location, inplace=False):
|
||||
log.info("Building interpolated tables")
|
||||
merger.instantiate()
|
||||
|
||||
if 'name' in varfont:
|
||||
log.info("Pruning name table")
|
||||
exclude = {a.axisNameID for a in fvar.axes}
|
||||
for i in fvar.instances:
|
||||
exclude.add(i.subfamilyNameID)
|
||||
exclude.add(i.postscriptNameID)
|
||||
varfont['name'].names[:] = [
|
||||
n for n in varfont['name'].names
|
||||
if n.nameID not in exclude
|
||||
]
|
||||
|
||||
log.info("Removing variable tables")
|
||||
for tag in ('avar','cvar','fvar','gvar','HVAR','MVAR','VVAR','STAT'):
|
||||
if tag in varfont:
|
||||
|
@ -14,15 +14,23 @@ import sys
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def stops(support, count=20):
|
||||
def stops(support, count=10):
|
||||
a,b,c = support
|
||||
|
||||
return [a + (b - a) * i / count for i in range(count)] + \
|
||||
[b + (c - b) * i / count for i in range(count)] + \
|
||||
[c]
|
||||
|
||||
def plotLocations(locations, axes, axis3D, **kwargs):
|
||||
for loc,color in zip(locations, cycle(pyplot.cm.Set1.colors)):
|
||||
axis3D.plot([loc.get(axes[0], 0)],
|
||||
[loc.get(axes[1], 0)],
|
||||
[1.],
|
||||
'o',
|
||||
color=color,
|
||||
**kwargs)
|
||||
|
||||
def plotLocations(locations, fig, names=None, **kwargs):
|
||||
def plotLocationsSurfaces(locations, fig, names=None, **kwargs):
|
||||
|
||||
assert len(locations[0].keys()) == 2
|
||||
|
||||
@ -43,6 +51,8 @@ def plotLocations(locations, fig, names=None, **kwargs):
|
||||
axis3D.set_title(name)
|
||||
axis3D.set_xlabel(ax1)
|
||||
axis3D.set_ylabel(ax2)
|
||||
pyplot.xlim(-1.,+1.)
|
||||
pyplot.ylim(-1.,+1.)
|
||||
|
||||
Xs = support.get(ax1, (-1.,0.,+1.))
|
||||
Ys = support.get(ax2, (-1.,0.,+1.))
|
||||
@ -53,7 +63,7 @@ def plotLocations(locations, fig, names=None, **kwargs):
|
||||
X.append(x)
|
||||
Y.append(y)
|
||||
Z.append(z)
|
||||
axis3D.plot_wireframe(X, Y, Z, color=color, **kwargs)
|
||||
axis3D.plot(X, Y, Z, color=color, **kwargs)
|
||||
for y in stops(Ys):
|
||||
X, Y, Z = [], [], []
|
||||
for x in Xs:
|
||||
@ -61,14 +71,16 @@ def plotLocations(locations, fig, names=None, **kwargs):
|
||||
X.append(x)
|
||||
Y.append(y)
|
||||
Z.append(z)
|
||||
axis3D.plot_wireframe(X, Y, Z, color=color, **kwargs)
|
||||
axis3D.plot(X, Y, Z, color=color, **kwargs)
|
||||
|
||||
plotLocations(model.locations, [ax1, ax2], axis3D)
|
||||
|
||||
|
||||
def plotDocument(doc, fig, **kwargs):
|
||||
doc.normalize()
|
||||
locations = [s.location for s in doc.sources]
|
||||
names = [s.name for s in doc.sources]
|
||||
plotLocations(locations, fig, names, **kwargs)
|
||||
plotLocationsSurfaces(locations, fig, names, **kwargs)
|
||||
|
||||
|
||||
def main(args=None):
|
||||
@ -97,7 +109,7 @@ def main(args=None):
|
||||
else:
|
||||
axes = [chr(c) for c in range(ord('A'), ord('Z')+1)]
|
||||
locs = [dict(zip(axes, (float(v) for v in s.split(',')))) for s in args]
|
||||
plotLocations(locs, fig)
|
||||
plotLocationsSurfaces(locs, fig)
|
||||
|
||||
pyplot.show()
|
||||
|
||||
|
@ -232,7 +232,6 @@ def Object_collect_device_varidxes(self, varidxes):
|
||||
_visit(self, ot.Device, adder)
|
||||
|
||||
ot.GDEF.collect_device_varidxes = Object_collect_device_varidxes
|
||||
ot.GSUB.collect_device_varidxes = Object_collect_device_varidxes
|
||||
ot.GPOS.collect_device_varidxes = Object_collect_device_varidxes
|
||||
|
||||
def _Device_mapVarIdx(self, mapping, done):
|
||||
|
26
NEWS.rst
26
NEWS.rst
@ -1,3 +1,29 @@
|
||||
3.26.0 (released 2018-05-03)
|
||||
----------------------------
|
||||
|
||||
- [designspace] Added a new optional ``layer`` attribute to the source element,
|
||||
and a corresponding ``layerName`` attribute to the ``SourceDescriptor``
|
||||
object (#1253).
|
||||
Added ``conditionset`` element to the ``rule`` element to the spec, but not
|
||||
implemented in designspace reader/writer yet (#1254).
|
||||
- [varLib.models] Refine modeling one last time (0ecf5c5).
|
||||
- [otBase] Fixed sharing of tables referred to by different offset sizes
|
||||
(795f2f9).
|
||||
- [subset] Don't drop a GDEF that only has VarStore (fc819d6). Set to None
|
||||
empty Coverage tables in MarkGlyphSets (02616ab).
|
||||
- [varLib]: Added ``--master-finder`` command-line option (#1249).
|
||||
- [varLib.mutator] Prune fvar nameIDs from instance's name table (#1245).
|
||||
- [otTables] Allow decompiling bad ClassDef tables with invalid format, with
|
||||
warning (#1236).
|
||||
- [varLib] Make STAT v1.2 and reuse nameIDs from fvar table (#1242).
|
||||
- [varLib.plot] Show master locations. Set axis limits to -1, +1.
|
||||
- [subset] Handle HVAR direct mapping. Passthrough 'cvar'.
|
||||
Added ``--font-number`` command-line option for collections.
|
||||
- [t1Lib] Allow a text encoding to be specified when parsing a Type 1 font
|
||||
(#1234). Added ``kind`` argument to T1Font constructor (c5c161c).
|
||||
- [ttLib] Added context manager API to ``TTFont`` class, so it can be used in
|
||||
``with`` statements to auto-close the file when exiting the context (#1232).
|
||||
|
||||
3.25.0 (released 2018-04-03)
|
||||
----------------------------
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
|Travis Build Status| |Appveyor Build status| |Health| |Coverage Status|
|
||||
|PyPI|
|
||||
|PyPI| |Gitter Chat|
|
||||
|
||||
What is this?
|
||||
~~~~~~~~~~~~~
|
||||
@ -360,3 +360,6 @@ Have fun!
|
||||
:target: https://codecov.io/gh/fonttools/fonttools
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/fonttools.svg
|
||||
:target: https://pypi.org/project/FontTools
|
||||
.. |Gitter Chat| image:: https://badges.gitter.im/fonttools-dev/Lobby.svg
|
||||
:alt: Join the chat at https://gitter.im/fonttools-dev/Lobby
|
||||
:target: https://gitter.im/fonttools-dev/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||
|
@ -10,7 +10,11 @@ if len(sys.argv) != 2:
|
||||
print("usage: layout-features.py fontfile.ttf")
|
||||
sys.exit(1)
|
||||
fontfile = sys.argv[1]
|
||||
font = TTFont(fontfile)
|
||||
if fontfile.rsplit(".", 1)[-1] == "ttx":
|
||||
font = TTFont()
|
||||
font.importXML(fontfile)
|
||||
else:
|
||||
font = TTFont(fontfile)
|
||||
|
||||
for tag in ('GSUB', 'GPOS'):
|
||||
if not tag in font: continue
|
||||
|
170
Snippets/rename-fonts.py
Executable file
170
Snippets/rename-fonts.py
Executable file
@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env python
|
||||
"""Script to add a suffix to all family names in the input font's `name` table,
|
||||
and to optionally rename the output files with the given suffix.
|
||||
|
||||
The current family name substring is searched in the nameIDs 1, 3, 4, 6, 16,
|
||||
and 21, and if found the suffix is inserted after it; or else the suffix is
|
||||
appended at the end.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
from fontTools.ttLib import TTFont
|
||||
from fontTools.misc.cliTools import makeOutputFileName
|
||||
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
WINDOWS_ENGLISH_IDS = 3, 1, 0x409
|
||||
MAC_ROMAN_IDS = 1, 0, 0
|
||||
|
||||
FAMILY_RELATED_IDS = dict(
|
||||
LEGACY_FAMILY=1,
|
||||
TRUETYPE_UNIQUE_ID=3,
|
||||
FULL_NAME=4,
|
||||
POSTSCRIPT_NAME=6,
|
||||
PREFERRED_FAMILY=16,
|
||||
WWS_FAMILY=21,
|
||||
)
|
||||
|
||||
|
||||
def get_current_family_name(table):
|
||||
family_name_rec = None
|
||||
for plat_id, enc_id, lang_id in (WINDOWS_ENGLISH_IDS, MAC_ROMAN_IDS):
|
||||
for name_id in (
|
||||
FAMILY_RELATED_IDS["PREFERRED_FAMILY"],
|
||||
FAMILY_RELATED_IDS["LEGACY_FAMILY"],
|
||||
):
|
||||
family_name_rec = table.getName(
|
||||
nameID=name_id,
|
||||
platformID=plat_id,
|
||||
platEncID=enc_id,
|
||||
langID=lang_id,
|
||||
)
|
||||
if family_name_rec is not None:
|
||||
break
|
||||
if family_name_rec is not None:
|
||||
break
|
||||
if not family_name_rec:
|
||||
raise ValueError("family name not found; can't add suffix")
|
||||
return family_name_rec.toUnicode()
|
||||
|
||||
|
||||
def insert_suffix(string, family_name, suffix):
|
||||
# check whether family_name is a substring
|
||||
start = string.find(family_name)
|
||||
if start != -1:
|
||||
# insert suffix after the family_name substring
|
||||
end = start + len(family_name)
|
||||
new_string = string[:end] + suffix + string[end:]
|
||||
else:
|
||||
# it's not, we just append the suffix at the end
|
||||
new_string = string + suffix
|
||||
return new_string
|
||||
|
||||
|
||||
def rename_record(name_record, family_name, suffix):
|
||||
string = name_record.toUnicode()
|
||||
new_string = insert_suffix(string, family_name, suffix)
|
||||
name_record.string = new_string
|
||||
return string, new_string
|
||||
|
||||
|
||||
def rename_file(filename, family_name, suffix):
|
||||
filename, ext = os.path.splitext(filename)
|
||||
ps_name = family_name.replace(" ", "")
|
||||
if ps_name in filename:
|
||||
ps_suffix = suffix.replace(" ", "")
|
||||
return insert_suffix(filename, ps_name, ps_suffix) + ext
|
||||
else:
|
||||
return insert_suffix(filename, family_name, suffix) + ext
|
||||
|
||||
|
||||
def add_family_suffix(font, suffix):
|
||||
table = font["name"]
|
||||
|
||||
family_name = get_current_family_name(table)
|
||||
logger.info(" Current family name: '%s'", family_name)
|
||||
|
||||
# postcript name can't contain spaces
|
||||
ps_family_name = family_name.replace(" ", "")
|
||||
ps_suffix = suffix.replace(" ", "")
|
||||
for rec in table.names:
|
||||
name_id = rec.nameID
|
||||
if name_id not in FAMILY_RELATED_IDS.values():
|
||||
continue
|
||||
if name_id == FAMILY_RELATED_IDS["POSTSCRIPT_NAME"]:
|
||||
old, new = rename_record(rec, ps_family_name, ps_suffix)
|
||||
elif name_id == FAMILY_RELATED_IDS["TRUETYPE_UNIQUE_ID"]:
|
||||
# The Truetype Unique ID rec may contain either the PostScript
|
||||
# Name or the Full Name string, so we try both
|
||||
if ps_family_name in rec.toUnicode():
|
||||
old, new = rename_record(rec, ps_family_name, ps_suffix)
|
||||
else:
|
||||
old, new = rename_record(rec, family_name, suffix)
|
||||
else:
|
||||
old, new = rename_record(rec, family_name, suffix)
|
||||
logger.info(" %r: '%s' -> '%s'", rec, old, new)
|
||||
|
||||
return family_name
|
||||
|
||||
|
||||
def main(args=None):
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument("-s", "--suffix", required=True)
|
||||
parser.add_argument("input_fonts", metavar="FONTFILE", nargs="+")
|
||||
output_group = parser.add_mutually_exclusive_group()
|
||||
output_group.add_argument("-i", "--inplace", action="store_true")
|
||||
output_group.add_argument("-d", "--output-dir")
|
||||
output_group.add_argument("-o", "--output-file")
|
||||
parser.add_argument("-R", "--rename-files", action="store_true")
|
||||
parser.add_argument("-v", "--verbose", action="count", default=0)
|
||||
options = parser.parse_args(args)
|
||||
|
||||
if not options.verbose:
|
||||
level = "WARNING"
|
||||
elif options.verbose == 1:
|
||||
level = "INFO"
|
||||
else:
|
||||
level = "DEBUG"
|
||||
logging.basicConfig(level=level, format="%(message)s")
|
||||
|
||||
if options.output_file and len(options.input_fonts) > 1:
|
||||
parser.error(
|
||||
"argument -o/--output-file can't be used with multiple inputs"
|
||||
)
|
||||
if options.rename_files and (options.inplace or options.output_file):
|
||||
parser.error("argument -R not allowed with arguments -i or -o")
|
||||
|
||||
for input_name in options.input_fonts:
|
||||
logger.info("Renaming font: '%s'", input_name)
|
||||
|
||||
font = TTFont(input_name)
|
||||
family_name = add_family_suffix(font, options.suffix)
|
||||
|
||||
if options.inplace:
|
||||
output_name = input_name
|
||||
elif options.output_file:
|
||||
output_name = options.output_file
|
||||
else:
|
||||
if options.rename_files:
|
||||
input_name = rename_file(
|
||||
input_name, family_name, options.suffix
|
||||
)
|
||||
output_name = makeOutputFileName(input_name, options.output_dir)
|
||||
|
||||
font.save(output_name)
|
||||
logger.info("Saved font: '%s'", output_name)
|
||||
|
||||
font.close()
|
||||
del font
|
||||
|
||||
logger.info("Done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,5 +1,5 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<designspace format="3">
|
||||
<designspace format="4.0">
|
||||
<axes>
|
||||
<axis default="0" maximum="1000" minimum="0" name="weight" tag="wght">
|
||||
<labelname xml:lang="en">Wéíght</labelname>
|
||||
@ -14,8 +14,10 @@
|
||||
</axes>
|
||||
<rules>
|
||||
<rule name="named.rule.1">
|
||||
<condition maximum="1" minimum="0" name="aaaa" />
|
||||
<condition maximum="3" minimum="2" name="bbbb" />
|
||||
<conditionset>
|
||||
<condition maximum="1" minimum="0" name="axisName_a" />
|
||||
<condition maximum="3" minimum="2" name="axisName_b" />
|
||||
</conditionset>
|
||||
<sub name="a" with="a.alt" />
|
||||
</rule>
|
||||
</rules>
|
||||
@ -38,6 +40,12 @@
|
||||
<dimension name="width" xvalue="20" />
|
||||
</location>
|
||||
</source>
|
||||
<source familyname="MasterFamilyName" filename="masters/masterTest2.ufo" layer="supports" name="master.ufo2" stylename="Supports">
|
||||
<location>
|
||||
<dimension name="weight" xvalue="1000" />
|
||||
<dimension name="width" xvalue="20" />
|
||||
</location>
|
||||
</source>
|
||||
</sources>
|
||||
<instances>
|
||||
<instance familyname="InstanceFamilyName" filename="instances/instanceTest1.ufo" name="instance.ufo1" postscriptfontname="InstancePostscriptName" stylemapfamilyname="InstanceStyleMapFamilyName" stylemapstylename="InstanceStyleMapStyleName" stylename="InstanceStyleName">
|
||||
|
@ -5,11 +5,29 @@ from __future__ import (print_function, division, absolute_import,
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import warnings
|
||||
|
||||
from fontTools.misc.py23 import open
|
||||
from fontTools.designspaceLib import (
|
||||
DesignSpaceDocument, SourceDescriptor, AxisDescriptor, RuleDescriptor,
|
||||
InstanceDescriptor, evaluateRule, processRules, posix)
|
||||
InstanceDescriptor, evaluateRule, processRules, posix, DesignSpaceDocumentError)
|
||||
|
||||
def _axesAsDict(axes):
|
||||
"""
|
||||
Make the axis data we have available in
|
||||
"""
|
||||
axesDict = {}
|
||||
for axisDescriptor in axes:
|
||||
d = {
|
||||
'name': axisDescriptor.name,
|
||||
'tag': axisDescriptor.tag,
|
||||
'minimum': axisDescriptor.minimum,
|
||||
'maximum': axisDescriptor.maximum,
|
||||
'default': axisDescriptor.default,
|
||||
'map': axisDescriptor.map,
|
||||
}
|
||||
axesDict[axisDescriptor.name] = d
|
||||
return axesDict
|
||||
|
||||
|
||||
def assert_equals_test_file(path, test_filename):
|
||||
@ -31,6 +49,29 @@ def test_fill_document(tmpdir):
|
||||
instancePath1 = os.path.join(tmpdir, "instances", "instanceTest1.ufo")
|
||||
instancePath2 = os.path.join(tmpdir, "instances", "instanceTest2.ufo")
|
||||
doc = DesignSpaceDocument()
|
||||
|
||||
# write some axes
|
||||
a1 = AxisDescriptor()
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
a1.name = "weight"
|
||||
a1.tag = "wght"
|
||||
# note: just to test the element language, not an actual label name recommendations.
|
||||
a1.labelNames[u'fa-IR'] = u"قطر"
|
||||
a1.labelNames[u'en'] = u"Wéíght"
|
||||
doc.addAxis(a1)
|
||||
a2 = AxisDescriptor()
|
||||
a2.minimum = 0
|
||||
a2.maximum = 1000
|
||||
a2.default = 20
|
||||
a2.name = "width"
|
||||
a2.tag = "wdth"
|
||||
a2.map = [(0.0, 10.0), (401.0, 66.0), (1000.0, 990.0)]
|
||||
a2.hidden = True
|
||||
a2.labelNames[u'fr'] = u"Chasse"
|
||||
doc.addAxis(a2)
|
||||
|
||||
# add master 1
|
||||
s1 = SourceDescriptor()
|
||||
s1.filename = os.path.relpath(masterPath1, os.path.dirname(testDocPath))
|
||||
@ -57,6 +98,19 @@ def test_fill_document(tmpdir):
|
||||
s2.familyName = "MasterFamilyName"
|
||||
s2.styleName = "MasterStyleNameTwo"
|
||||
doc.addSource(s2)
|
||||
# add master 3 from a different layer
|
||||
s3 = SourceDescriptor()
|
||||
s3.filename = os.path.relpath(masterPath2, os.path.dirname(testDocPath))
|
||||
s3.name = "master.ufo2"
|
||||
s3.copyLib = False
|
||||
s3.copyInfo = False
|
||||
s3.copyFeatures = False
|
||||
s3.muteKerning = False
|
||||
s3.layerName = "supports"
|
||||
s3.location = dict(weight=1000)
|
||||
s3.familyName = "MasterFamilyName"
|
||||
s3.styleName = "Supports"
|
||||
doc.addSource(s3)
|
||||
# add instance 1
|
||||
i1 = InstanceDescriptor()
|
||||
i1.filename = os.path.relpath(instancePath1, os.path.dirname(testDocPath))
|
||||
@ -94,50 +148,13 @@ def test_fill_document(tmpdir):
|
||||
doc.filename = "suggestedFileName.designspace"
|
||||
doc.lib['com.coolDesignspaceApp.previewSize'] = 30
|
||||
|
||||
# now we have sources and instances, but no axes yet.
|
||||
doc.check()
|
||||
|
||||
# Here, since the axes are not defined in the document, but instead are
|
||||
# infered from the locations of the instances, we cannot guarantee the
|
||||
# order in which they will be created by the `check()` method.
|
||||
assert set(doc.getAxisOrder()) == set(['spooky', 'weight', 'width'])
|
||||
doc.axes = [] # clear the axes
|
||||
|
||||
# write some axes
|
||||
a1 = AxisDescriptor()
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
a1.name = "weight"
|
||||
a1.tag = "wght"
|
||||
# note: just to test the element language, not an actual label name recommendations.
|
||||
a1.labelNames[u'fa-IR'] = u"قطر"
|
||||
a1.labelNames[u'en'] = u"Wéíght"
|
||||
doc.addAxis(a1)
|
||||
a2 = AxisDescriptor()
|
||||
a2.minimum = 0
|
||||
a2.maximum = 1000
|
||||
a2.default = 20
|
||||
a2.name = "width"
|
||||
a2.tag = "wdth"
|
||||
a2.map = [(0.0, 10.0), (401.0, 66.0), (1000.0, 990.0)]
|
||||
a2.hidden = True
|
||||
a2.labelNames[u'fr'] = u"Chasse"
|
||||
doc.addAxis(a2)
|
||||
# add an axis that is not part of any location to see if that works
|
||||
a3 = AxisDescriptor()
|
||||
a3.minimum = 333
|
||||
a3.maximum = 666
|
||||
a3.default = 444
|
||||
a3.name = "spooky"
|
||||
a3.tag = "spok"
|
||||
a3.map = [(0.0, 10.0), (401.0, 66.0), (1000.0, 990.0)]
|
||||
#doc.addAxis(a3) # uncomment this line to test the effects of default axes values
|
||||
# write some rules
|
||||
r1 = RuleDescriptor()
|
||||
r1.name = "named.rule.1"
|
||||
r1.conditions.append(dict(name='aaaa', minimum=0, maximum=1))
|
||||
r1.conditions.append(dict(name='bbbb', minimum=2, maximum=3))
|
||||
r1.conditionSets.append([
|
||||
dict(name='axisName_a', minimum=0, maximum=1),
|
||||
dict(name='axisName_b', minimum=2, maximum=3)
|
||||
])
|
||||
r1.subs.append(("a", "a.alt"))
|
||||
doc.addRule(r1)
|
||||
# write the document
|
||||
@ -148,23 +165,11 @@ def test_fill_document(tmpdir):
|
||||
new = DesignSpaceDocument()
|
||||
new.read(testDocPath)
|
||||
|
||||
new.check()
|
||||
assert new.default.location == {'width': 20.0, 'weight': 0.0}
|
||||
assert new.filename == 'test.designspace'
|
||||
assert new.lib == doc.lib
|
||||
assert new.instances[0].lib == doc.instances[0].lib
|
||||
|
||||
# >>> for a, b in zip(doc.instances, new.instances):
|
||||
# ... a.compare(b)
|
||||
# >>> for a, b in zip(doc.sources, new.sources):
|
||||
# ... a.compare(b)
|
||||
# >>> for a, b in zip(doc.axes, new.axes):
|
||||
# ... a.compare(b)
|
||||
# >>> [n.mutedGlyphNames for n in new.sources]
|
||||
# [['A', 'Z'], []]
|
||||
# >>> doc.getFonts()
|
||||
# []
|
||||
|
||||
# test roundtrip for the axis attributes and data
|
||||
axes = {}
|
||||
for axis in doc.axes:
|
||||
@ -182,50 +187,6 @@ def test_fill_document(tmpdir):
|
||||
assert a == b
|
||||
|
||||
|
||||
def test_adjustAxisDefaultToNeutral(tmpdir):
|
||||
tmpdir = str(tmpdir)
|
||||
testDocPath = os.path.join(tmpdir, "testAdjustAxisDefaultToNeutral.designspace")
|
||||
masterPath1 = os.path.join(tmpdir, "masters", "masterTest1.ufo")
|
||||
masterPath2 = os.path.join(tmpdir, "masters", "masterTest2.ufo")
|
||||
instancePath1 = os.path.join(tmpdir, "instances", "instanceTest1.ufo")
|
||||
instancePath2 = os.path.join(tmpdir, "instances", "instanceTest2.ufo")
|
||||
doc = DesignSpaceDocument()
|
||||
# add master 1
|
||||
s1 = SourceDescriptor()
|
||||
s1.filename = os.path.relpath(masterPath1, os.path.dirname(testDocPath))
|
||||
s1.name = "master.ufo1"
|
||||
s1.copyInfo = True
|
||||
s1.copyFeatures = True
|
||||
s1.location = dict(weight=55, width=1000)
|
||||
doc.addSource(s1)
|
||||
# write some axes
|
||||
a1 = AxisDescriptor()
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0 # the wrong value
|
||||
a1.name = "weight"
|
||||
a1.tag = "wght"
|
||||
doc.addAxis(a1)
|
||||
a2 = AxisDescriptor()
|
||||
a2.minimum = -10
|
||||
a2.maximum = 10
|
||||
a2.default = 0 # the wrong value
|
||||
a2.name = "width"
|
||||
a2.tag = "wdth"
|
||||
doc.addAxis(a2)
|
||||
# write the document
|
||||
doc.write(testDocPath)
|
||||
assert os.path.exists(testDocPath)
|
||||
# import it again
|
||||
new = DesignSpaceDocument()
|
||||
new.read(testDocPath)
|
||||
new.check()
|
||||
loc = new.default.location
|
||||
for axisObj in new.axes:
|
||||
n = axisObj.name
|
||||
assert axisObj.default == loc.get(n)
|
||||
|
||||
|
||||
def test_unicodes(tmpdir):
|
||||
tmpdir = str(tmpdir)
|
||||
testDocPath = os.path.join(tmpdir, "testUnicodes.designspace")
|
||||
@ -361,8 +322,10 @@ def test_localisedNames(tmpdir):
|
||||
# write some rules
|
||||
r1 = RuleDescriptor()
|
||||
r1.name = "named.rule.1"
|
||||
r1.conditions.append(dict(name='aaaa', minimum=0, maximum=1))
|
||||
r1.conditions.append(dict(name='bbbb', minimum=2, maximum=3))
|
||||
r1.conditionSets.append([
|
||||
dict(name='weight', minimum=200, maximum=500),
|
||||
dict(name='width', minimum=0, maximum=150)
|
||||
])
|
||||
r1.subs.append(("a", "a.alt"))
|
||||
doc.addRule(r1)
|
||||
# write the document
|
||||
@ -399,8 +362,8 @@ def test_handleNoAxes(tmpdir):
|
||||
a.minimum = 0
|
||||
a.maximum = 1000
|
||||
a.default = 0
|
||||
a.name = "axisName%s"%(name)
|
||||
a.tag = "ax_%d"%(value)
|
||||
a.name = "axisName%s" % (name)
|
||||
a.tag = "ax_%d" % (value)
|
||||
doc.addAxis(a)
|
||||
|
||||
# add master 1
|
||||
@ -440,12 +403,10 @@ def test_handleNoAxes(tmpdir):
|
||||
doc.addInstance(i1)
|
||||
|
||||
doc.write(testDocPath)
|
||||
__removeAxesFromDesignSpace(testDocPath)
|
||||
verify = DesignSpaceDocument()
|
||||
verify.read(testDocPath)
|
||||
verify.write(testDocPath2)
|
||||
|
||||
|
||||
def test_pathNameResolve(tmpdir):
|
||||
tmpdir = str(tmpdir)
|
||||
# test how descriptor.path and descriptor.filename are resolved
|
||||
@ -460,8 +421,16 @@ def test_pathNameResolve(tmpdir):
|
||||
instancePath1 = os.path.join(tmpdir, "instances", "instanceTest1.ufo")
|
||||
instancePath2 = os.path.join(tmpdir, "instances", "instanceTest2.ufo")
|
||||
|
||||
a1 = AxisDescriptor()
|
||||
a1.tag = "TAGA"
|
||||
a1.name = "axisName_a"
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
|
||||
# Case 1: filename and path are both empty. Nothing to calculate, nothing to put in the file.
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = None
|
||||
s.path = None
|
||||
@ -478,6 +447,7 @@ def test_pathNameResolve(tmpdir):
|
||||
|
||||
# Case 2: filename is empty, path points somewhere: calculate a new filename.
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = None
|
||||
s.path = masterPath1
|
||||
@ -494,6 +464,7 @@ def test_pathNameResolve(tmpdir):
|
||||
|
||||
# Case 3: the filename is set, the path is None.
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = "../somewhere/over/the/rainbow.ufo"
|
||||
s.path = None
|
||||
@ -512,6 +483,7 @@ def test_pathNameResolve(tmpdir):
|
||||
|
||||
# Case 4: the filename points to one file, the path points to another. The path takes precedence.
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = "../somewhere/over/the/rainbow.ufo"
|
||||
s.path = masterPath1
|
||||
@ -527,6 +499,7 @@ def test_pathNameResolve(tmpdir):
|
||||
|
||||
# Case 5: the filename is None, path has a value, update the filename
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = None
|
||||
s.path = masterPath1
|
||||
@ -541,6 +514,7 @@ def test_pathNameResolve(tmpdir):
|
||||
|
||||
# Case 6: the filename has a value, path has a value, update the filenames with force
|
||||
doc = DesignSpaceDocument()
|
||||
doc.addAxis(a1)
|
||||
s = SourceDescriptor()
|
||||
s.filename = "../somewhere/over/the/rainbow.ufo"
|
||||
s.path = masterPath1
|
||||
@ -555,59 +529,62 @@ def test_pathNameResolve(tmpdir):
|
||||
assert doc.sources[0].filename == "masters/masterTest1.ufo"
|
||||
|
||||
|
||||
def test_normalise():
|
||||
def test_normalise1():
|
||||
# normalisation of anisotropic locations, clipping
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a1 = AxisDescriptor()
|
||||
a1.minimum = -1000
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
a1.name = "aaa"
|
||||
a1.tag = "aaaa"
|
||||
a1.name = "axisName_a"
|
||||
a1.tag = "TAGA"
|
||||
doc.addAxis(a1)
|
||||
|
||||
assert doc.normalizeLocation(dict(aaa=0)) == {'aaa': 0.0}
|
||||
assert doc.normalizeLocation(dict(aaa=1000)) == {'aaa': 1.0}
|
||||
|
||||
assert doc.normalizeLocation(dict(axisName_a=0)) == {'axisName_a': 0.0}
|
||||
assert doc.normalizeLocation(dict(axisName_a=1000)) == {'axisName_a': 1.0}
|
||||
# clipping beyond max values:
|
||||
assert doc.normalizeLocation(dict(aaa=1001)) == {'aaa': 1.0}
|
||||
assert doc.normalizeLocation(dict(aaa=500)) == {'aaa': 0.5}
|
||||
assert doc.normalizeLocation(dict(aaa=-1000)) == {'aaa': -1.0}
|
||||
assert doc.normalizeLocation(dict(aaa=-1001)) == {'aaa': -1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_a=1001)) == {'axisName_a': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_a=500)) == {'axisName_a': 0.5}
|
||||
assert doc.normalizeLocation(dict(axisName_a=-1000)) == {'axisName_a': -1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_a=-1001)) == {'axisName_a': -1.0}
|
||||
# anisotropic coordinates normalise to isotropic
|
||||
assert doc.normalizeLocation(dict(aaa=(1000, -1000))) == {'aaa': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_a=(1000, -1000))) == {'axisName_a': 1.0}
|
||||
doc.normalize()
|
||||
r = []
|
||||
for axis in doc.axes:
|
||||
r.append((axis.name, axis.minimum, axis.default, axis.maximum))
|
||||
r.sort()
|
||||
assert r == [('aaa', -1.0, 0.0, 1.0)]
|
||||
assert r == [('axisName_a', -1.0, 0.0, 1.0)]
|
||||
|
||||
def test_normalise2():
|
||||
# normalisation with minimum > 0
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a2 = AxisDescriptor()
|
||||
a2.minimum = 100
|
||||
a2.maximum = 1000
|
||||
a2.default = 100
|
||||
a2.name = "bbb"
|
||||
a2.name = "axisName_b"
|
||||
doc.addAxis(a2)
|
||||
assert doc.normalizeLocation(dict(bbb=0)) == {'bbb': 0.0}
|
||||
assert doc.normalizeLocation(dict(bbb=1000)) == {'bbb': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=0)) == {'axisName_b': 0.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=1000)) == {'axisName_b': 1.0}
|
||||
# clipping beyond max values:
|
||||
assert doc.normalizeLocation(dict(bbb=1001)) == {'bbb': 1.0}
|
||||
assert doc.normalizeLocation(dict(bbb=500)) == {'bbb': 0.4444444444444444}
|
||||
assert doc.normalizeLocation(dict(bbb=-1000)) == {'bbb': 0.0}
|
||||
assert doc.normalizeLocation(dict(bbb=-1001)) == {'bbb': 0.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=1001)) == {'axisName_b': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=500)) == {'axisName_b': 0.4444444444444444}
|
||||
assert doc.normalizeLocation(dict(axisName_b=-1000)) == {'axisName_b': 0.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=-1001)) == {'axisName_b': 0.0}
|
||||
# anisotropic coordinates normalise to isotropic
|
||||
assert doc.normalizeLocation(dict(bbb=(1000,-1000))) == {'bbb': 1.0}
|
||||
assert doc.normalizeLocation(dict(bbb=1001)) == {'bbb': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=(1000,-1000))) == {'axisName_b': 1.0}
|
||||
assert doc.normalizeLocation(dict(axisName_b=1001)) == {'axisName_b': 1.0}
|
||||
doc.normalize()
|
||||
r = []
|
||||
for axis in doc.axes:
|
||||
r.append((axis.name, axis.minimum, axis.default, axis.maximum))
|
||||
r.sort()
|
||||
assert r == [('bbb', 0.0, 0.0, 1.0)]
|
||||
assert r == [('axisName_b', 0.0, 0.0, 1.0)]
|
||||
|
||||
def test_normalise3():
|
||||
# normalisation of negative values, with default == maximum
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a3 = AxisDescriptor()
|
||||
@ -620,7 +597,6 @@ def test_normalise():
|
||||
assert doc.normalizeLocation(dict(ccc=1)) == {'ccc': 0.0}
|
||||
assert doc.normalizeLocation(dict(ccc=-1000)) == {'ccc': -1.0}
|
||||
assert doc.normalizeLocation(dict(ccc=-1001)) == {'ccc': -1.0}
|
||||
|
||||
doc.normalize()
|
||||
r = []
|
||||
for axis in doc.axes:
|
||||
@ -628,28 +604,8 @@ def test_normalise():
|
||||
r.sort()
|
||||
assert r == [('ccc', -1.0, 0.0, 0.0)]
|
||||
|
||||
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a3 = AxisDescriptor()
|
||||
a3.minimum = 2000
|
||||
a3.maximum = 3000
|
||||
a3.default = 2000
|
||||
a3.name = "ccc"
|
||||
doc.addAxis(a3)
|
||||
assert doc.normalizeLocation(dict(ccc=0)) == {'ccc': 0.0}
|
||||
assert doc.normalizeLocation(dict(ccc=1)) == {'ccc': 0.0}
|
||||
assert doc.normalizeLocation(dict(ccc=-1000)) == {'ccc': 0.0}
|
||||
assert doc.normalizeLocation(dict(ccc=-1001)) == {'ccc': 0.0}
|
||||
|
||||
doc.normalize()
|
||||
r = []
|
||||
for axis in doc.axes:
|
||||
r.append((axis.name, axis.minimum, axis.default, axis.maximum))
|
||||
r.sort()
|
||||
assert r == [('ccc', 0.0, 0.0, 1.0)]
|
||||
|
||||
|
||||
def test_normalise4():
|
||||
# normalisation with a map
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a4 = AxisDescriptor()
|
||||
@ -666,220 +622,170 @@ def test_normalise():
|
||||
r.sort()
|
||||
assert r == [('ddd', [(0, 0.1), (300, 0.5), (600, 0.5), (1000, 0.9)])]
|
||||
|
||||
|
||||
def test_rules(tmpdir):
|
||||
tmpdir = str(tmpdir)
|
||||
testDocPath = os.path.join(tmpdir, "testRules.designspace")
|
||||
testDocPath2 = os.path.join(tmpdir, "testRules_roundtrip.designspace")
|
||||
def test_axisMapping():
|
||||
# note: because designspance lib does not do any actual
|
||||
# processing of the mapping data, we can only check if there data is there.
|
||||
doc = DesignSpaceDocument()
|
||||
# write some axes
|
||||
a1 = AxisDescriptor()
|
||||
a1.tag = "taga"
|
||||
a1.name = "aaaa"
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
doc.addAxis(a1)
|
||||
a2 = AxisDescriptor()
|
||||
a2.tag = "tagb"
|
||||
a2.name = "bbbb"
|
||||
a2.minimum = 0
|
||||
a2.maximum = 3000
|
||||
a2.default = 0
|
||||
doc.addAxis(a2)
|
||||
a4 = AxisDescriptor()
|
||||
a4.minimum = 0
|
||||
a4.maximum = 1000
|
||||
a4.default = 0
|
||||
a4.name = "ddd"
|
||||
a4.map = [(0,100), (300, 500), (600, 500), (1000,900)]
|
||||
doc.addAxis(a4)
|
||||
doc.normalize()
|
||||
r = []
|
||||
for axis in doc.axes:
|
||||
r.append((axis.name, axis.map))
|
||||
r.sort()
|
||||
assert r == [('ddd', [(0, 0.1), (300, 0.5), (600, 0.5), (1000, 0.9)])]
|
||||
|
||||
def test_rulesConditions(tmpdir):
|
||||
# tests of rules, conditionsets and conditions
|
||||
r1 = RuleDescriptor()
|
||||
r1.name = "named.rule.1"
|
||||
r1.conditions.append(dict(name='aaaa', minimum=0, maximum=1000))
|
||||
r1.conditions.append(dict(name='bbbb', minimum=0, maximum=3000))
|
||||
r1.conditionSets.append([
|
||||
dict(name='axisName_a', minimum=0, maximum=1000),
|
||||
dict(name='axisName_b', minimum=0, maximum=3000)
|
||||
])
|
||||
r1.subs.append(("a", "a.alt"))
|
||||
|
||||
# rule with minium and maximum
|
||||
doc.addRule(r1)
|
||||
assert len(doc.rules) == 1
|
||||
assert len(doc.rules[0].conditions) == 2
|
||||
assert evaluateRule(r1, dict(aaaa = 500, bbbb = 0)) == True
|
||||
assert evaluateRule(r1, dict(aaaa = 0, bbbb = 0)) == True
|
||||
assert evaluateRule(r1, dict(aaaa = 1000, bbbb = 0)) == True
|
||||
assert evaluateRule(r1, dict(aaaa = 1000, bbbb = -100)) == False
|
||||
assert evaluateRule(r1, dict(aaaa = 1000.0001, bbbb = 0)) == False
|
||||
assert evaluateRule(r1, dict(aaaa = -0.0001, bbbb = 0)) == False
|
||||
assert evaluateRule(r1, dict(aaaa = -100, bbbb = 0)) == False
|
||||
assert processRules([r1], dict(aaaa = 500), ["a", "b", "c"]) == ['a.alt', 'b', 'c']
|
||||
assert processRules([r1], dict(aaaa = 500), ["a.alt", "b", "c"]) == ['a.alt', 'b', 'c']
|
||||
assert processRules([r1], dict(aaaa = 2000), ["a", "b", "c"]) == ['a', 'b', 'c']
|
||||
assert evaluateRule(r1, dict(axisName_a = 500, axisName_b = 0)) == True
|
||||
assert evaluateRule(r1, dict(axisName_a = 0, axisName_b = 0)) == True
|
||||
assert evaluateRule(r1, dict(axisName_a = 1000, axisName_b = 0)) == True
|
||||
assert evaluateRule(r1, dict(axisName_a = 1000, axisName_b = -100)) == False
|
||||
assert evaluateRule(r1, dict(axisName_a = 1000.0001, axisName_b = 0)) == False
|
||||
assert evaluateRule(r1, dict(axisName_a = -0.0001, axisName_b = 0)) == False
|
||||
assert evaluateRule(r1, dict(axisName_a = -100, axisName_b = 0)) == False
|
||||
assert processRules([r1], dict(axisName_a = 500, axisName_b = 0), ["a", "b", "c"]) == ['a.alt', 'b', 'c']
|
||||
assert processRules([r1], dict(axisName_a = 500, axisName_b = 0), ["a.alt", "b", "c"]) == ['a.alt', 'b', 'c']
|
||||
assert processRules([r1], dict(axisName_a = 2000, axisName_b = 0), ["a", "b", "c"]) == ['a', 'b', 'c']
|
||||
|
||||
# rule with only a maximum
|
||||
r2 = RuleDescriptor()
|
||||
r2.name = "named.rule.2"
|
||||
r2.conditions.append(dict(name='aaaa', maximum=500))
|
||||
r2.conditionSets.append([dict(name='axisName_a', maximum=500)])
|
||||
r2.subs.append(("b", "b.alt"))
|
||||
|
||||
assert evaluateRule(r2, dict(aaaa = 0)) == True
|
||||
assert evaluateRule(r2, dict(aaaa = -500)) == True
|
||||
assert evaluateRule(r2, dict(aaaa = 1000)) == False
|
||||
assert evaluateRule(r2, dict(axisName_a = 0)) == True
|
||||
assert evaluateRule(r2, dict(axisName_a = -500)) == True
|
||||
assert evaluateRule(r2, dict(axisName_a = 1000)) == False
|
||||
|
||||
# rule with only a minimum
|
||||
r3 = RuleDescriptor()
|
||||
r3.name = "named.rule.3"
|
||||
r3.conditions.append(dict(name='aaaa', minimum=500))
|
||||
r3.conditionSets.append([dict(name='axisName_a', minimum=500)])
|
||||
r3.subs.append(("c", "c.alt"))
|
||||
|
||||
assert evaluateRule(r3, dict(aaaa = 0)) == False
|
||||
assert evaluateRule(r3, dict(aaaa = 1000)) == True
|
||||
assert evaluateRule(r3, dict(bbbb = 1000)) == True
|
||||
assert evaluateRule(r3, dict(axisName_a = 0)) == False
|
||||
assert evaluateRule(r3, dict(axisName_a = 1000)) == True
|
||||
assert evaluateRule(r3, dict(axisName_a = 1000)) == True
|
||||
|
||||
# rule with only a minimum, maximum in separate conditions
|
||||
r4 = RuleDescriptor()
|
||||
r4.name = "named.rule.4"
|
||||
r4.conditions.append(dict(name='aaaa', minimum=500))
|
||||
r4.conditions.append(dict(name='bbbb', maximum=500))
|
||||
r4.conditionSets.append([
|
||||
dict(name='axisName_a', minimum=500),
|
||||
dict(name='axisName_b', maximum=500)
|
||||
])
|
||||
r4.subs.append(("c", "c.alt"))
|
||||
|
||||
assert evaluateRule(r4, dict()) == True # is this what we expect though?
|
||||
assert evaluateRule(r4, dict(aaaa = 1000, bbbb = 0)) == True
|
||||
assert evaluateRule(r4, dict(aaaa = 0, bbbb = 0)) == False
|
||||
assert evaluateRule(r4, dict(aaaa = 1000, bbbb = 1000)) == False
|
||||
assert evaluateRule(r4, dict(axisName_a = 1000, axisName_b = 0)) == True
|
||||
assert evaluateRule(r4, dict(axisName_a = 0, axisName_b = 0)) == False
|
||||
assert evaluateRule(r4, dict(axisName_a = 1000, axisName_b = 1000)) == False
|
||||
|
||||
def test_rulesDocument(tmpdir):
|
||||
# tests of rules in a document, roundtripping.
|
||||
tmpdir = str(tmpdir)
|
||||
testDocPath = os.path.join(tmpdir, "testRules.designspace")
|
||||
testDocPath2 = os.path.join(tmpdir, "testRules_roundtrip.designspace")
|
||||
doc = DesignSpaceDocument()
|
||||
a1 = AxisDescriptor()
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
a1.name = "aaaa"
|
||||
a1.tag = "aaaa"
|
||||
a1.name = "axisName_a"
|
||||
a1.tag = "TAGA"
|
||||
b1 = AxisDescriptor()
|
||||
b1.minimum = 2000
|
||||
b1.maximum = 3000
|
||||
b1.default = 2000
|
||||
b1.name = "bbbb"
|
||||
b1.tag = "bbbb"
|
||||
b1.name = "axisName_b"
|
||||
b1.tag = "TAGB"
|
||||
doc.addAxis(a1)
|
||||
doc.addAxis(b1)
|
||||
assert doc._prepAxesForBender() == {'aaaa': {'map': [], 'name': 'aaaa', 'default': 0, 'minimum': 0, 'maximum': 1000, 'tag': 'aaaa'}, 'bbbb': {'map': [], 'name': 'bbbb', 'default': 2000, 'minimum': 2000, 'maximum': 3000, 'tag': 'bbbb'}}
|
||||
|
||||
assert doc.rules[0].conditions == [{'minimum': 0, 'maximum': 1000, 'name': 'aaaa'}, {'minimum': 0, 'maximum': 3000, 'name': 'bbbb'}]
|
||||
|
||||
r1 = RuleDescriptor()
|
||||
r1.name = "named.rule.1"
|
||||
r1.conditionSets.append([
|
||||
dict(name='axisName_a', minimum=0, maximum=1000),
|
||||
dict(name='axisName_b', minimum=0, maximum=3000)
|
||||
])
|
||||
r1.subs.append(("a", "a.alt"))
|
||||
# rule with minium and maximum
|
||||
doc.addRule(r1)
|
||||
assert len(doc.rules) == 1
|
||||
assert len(doc.rules[0].conditionSets) == 1
|
||||
assert len(doc.rules[0].conditionSets[0]) == 2
|
||||
assert _axesAsDict(doc.axes) == {'axisName_a': {'map': [], 'name': 'axisName_a', 'default': 0, 'minimum': 0, 'maximum': 1000, 'tag': 'TAGA'}, 'axisName_b': {'map': [], 'name': 'axisName_b', 'default': 2000, 'minimum': 2000, 'maximum': 3000, 'tag': 'TAGB'}}
|
||||
assert doc.rules[0].conditionSets == [[
|
||||
{'minimum': 0, 'maximum': 1000, 'name': 'axisName_a'},
|
||||
{'minimum': 0, 'maximum': 3000, 'name': 'axisName_b'}]]
|
||||
assert doc.rules[0].subs == [('a', 'a.alt')]
|
||||
|
||||
doc.normalize()
|
||||
assert doc.rules[0].name == 'named.rule.1'
|
||||
assert doc.rules[0].conditions == [{'minimum': 0.0, 'maximum': 1.0, 'name': 'aaaa'}, {'minimum': 0.0, 'maximum': 1.0, 'name': 'bbbb'}]
|
||||
|
||||
assert doc.rules[0].conditionSets == [[
|
||||
{'minimum': 0.0, 'maximum': 1.0, 'name': 'axisName_a'},
|
||||
{'minimum': 0.0, 'maximum': 1.0, 'name': 'axisName_b'}]]
|
||||
# still one conditionset
|
||||
assert len(doc.rules[0].conditionSets) == 1
|
||||
doc.write(testDocPath)
|
||||
new = DesignSpaceDocument()
|
||||
# add a stray conditionset
|
||||
_addUnwrappedCondition(testDocPath)
|
||||
doc2 = DesignSpaceDocument()
|
||||
doc2.read(testDocPath)
|
||||
assert len(doc2.axes) == 2
|
||||
assert len(doc2.rules) == 1
|
||||
assert len(doc2.rules[0].conditionSets) == 2
|
||||
doc2.write(testDocPath2)
|
||||
# verify these results
|
||||
# make sure the stray condition is now neatly wrapped in a conditionset.
|
||||
doc3 = DesignSpaceDocument()
|
||||
doc3.read(testDocPath2)
|
||||
assert len(doc3.rules) == 1
|
||||
assert len(doc3.rules[0].conditionSets) == 2
|
||||
|
||||
new.read(testDocPath)
|
||||
assert len(new.axes) == 4
|
||||
assert len(new.rules) == 1
|
||||
new.write(testDocPath2)
|
||||
|
||||
|
||||
def __removeAxesFromDesignSpace(path):
|
||||
def _addUnwrappedCondition(path):
|
||||
# only for testing, so we can make an invalid designspace file
|
||||
# without making the designSpaceDocument also support it.
|
||||
# older designspace files may have conditions that are not wrapped in a conditionset
|
||||
# These can be read into a new conditionset.
|
||||
f = open(path, 'r', encoding='utf-8')
|
||||
d = f.read()
|
||||
print(d)
|
||||
f.close()
|
||||
start = d.find("<axes>")
|
||||
end = d.find("</axes>")+len("</axes>")
|
||||
n = d[0:start] + d[end:]
|
||||
d = d.replace('<rule name="named.rule.1">', '<rule name="named.rule.1">\n\t<condition maximum="22" minimum="33" name="axisName_a" />')
|
||||
f = open(path, 'w', encoding='utf-8')
|
||||
f.write(n)
|
||||
f.write(d)
|
||||
f.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def invalid_designspace():
|
||||
p = "testCheck.designspace"
|
||||
__removeAxesFromDesignSpace(p)
|
||||
yield p
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="The check method requires MutatorMath")
|
||||
def test_check(invalid_designspace, tmpdir):
|
||||
def test_documentLib(tmpdir):
|
||||
# roundtrip test of the document lib with some nested data
|
||||
tmpdir = str(tmpdir)
|
||||
# check if the checks are checking
|
||||
testDocPath = os.path.join(tmpdir, invalid_designspace)
|
||||
masterPath1 = os.path.join(tmpdir, "masters", "masterTest1.ufo")
|
||||
masterPath2 = os.path.join(tmpdir, "masters", "masterTest2.ufo")
|
||||
instancePath1 = os.path.join(tmpdir, "instances", "instanceTest1.ufo")
|
||||
instancePath2 = os.path.join(tmpdir, "instances", "instanceTest2.ufo")
|
||||
|
||||
# no default selected
|
||||
testDocPath1 = os.path.join(tmpdir, "testDocumentLibTest.designspace")
|
||||
doc = DesignSpaceDocument()
|
||||
# add master 1
|
||||
s1 = SourceDescriptor()
|
||||
s1.path = masterPath1
|
||||
s1.name = "master.ufo1"
|
||||
s1.location = dict(snap=0, pop=10)
|
||||
s1.familyName = "MasterFamilyName"
|
||||
s1.styleName = "MasterStyleNameOne"
|
||||
doc.addSource(s1)
|
||||
# add master 2
|
||||
s2 = SourceDescriptor()
|
||||
s2.path = masterPath2
|
||||
s2.name = "master.ufo2"
|
||||
s2.location = dict(snap=1000, pop=20)
|
||||
s2.familyName = "MasterFamilyName"
|
||||
s2.styleName = "MasterStyleNameTwo"
|
||||
doc.addSource(s2)
|
||||
doc.checkAxes()
|
||||
doc.getAxisOrder() == ['snap', 'pop']
|
||||
assert doc.default == None
|
||||
doc.checkDefault()
|
||||
assert doc.default.name == 'master.ufo1'
|
||||
|
||||
# default selected
|
||||
doc = DesignSpaceDocument()
|
||||
# add master 1
|
||||
s1 = SourceDescriptor()
|
||||
s1.path = masterPath1
|
||||
s1.name = "master.ufo1"
|
||||
s1.location = dict(snap=0, pop=10)
|
||||
s1.familyName = "MasterFamilyName"
|
||||
s1.styleName = "MasterStyleNameOne"
|
||||
doc.addSource(s1)
|
||||
# add master 2
|
||||
s2 = SourceDescriptor()
|
||||
s2.path = masterPath2
|
||||
s2.name = "master.ufo2"
|
||||
s2.copyInfo = True
|
||||
s2.location = dict(snap=1000, pop=20)
|
||||
s2.familyName = "MasterFamilyName"
|
||||
s2.styleName = "MasterStyleNameTwo"
|
||||
doc.addSource(s2)
|
||||
doc.checkAxes()
|
||||
assert doc.getAxisOrder() == ['snap', 'pop']
|
||||
assert doc.default == None
|
||||
doc.checkDefault()
|
||||
assert doc.default.name == 'master.ufo2'
|
||||
|
||||
# generate a doc without axes, save and read again
|
||||
doc = DesignSpaceDocument()
|
||||
# add master 1
|
||||
s1 = SourceDescriptor()
|
||||
s1.path = masterPath1
|
||||
s1.name = "master.ufo1"
|
||||
s1.location = dict(snap=0, pop=10)
|
||||
s1.familyName = "MasterFamilyName"
|
||||
s1.styleName = "MasterStyleNameOne"
|
||||
doc.addSource(s1)
|
||||
# add master 2
|
||||
s2 = SourceDescriptor()
|
||||
s2.path = masterPath2
|
||||
s2.name = "master.ufo2"
|
||||
s2.location = dict(snap=1000, pop=20)
|
||||
s2.familyName = "MasterFamilyName"
|
||||
s2.styleName = "MasterStyleNameTwo"
|
||||
doc.addSource(s2)
|
||||
doc.checkAxes()
|
||||
doc.write(testDocPath)
|
||||
__removeAxesFromDesignSpace(testDocPath)
|
||||
|
||||
a1 = AxisDescriptor()
|
||||
a1.tag = "TAGA"
|
||||
a1.name = "axisName_a"
|
||||
a1.minimum = 0
|
||||
a1.maximum = 1000
|
||||
a1.default = 0
|
||||
doc.addAxis(a1)
|
||||
dummyData = dict(a=123, b=u"äbc", c=[1,2,3], d={'a':123})
|
||||
dummyKey = "org.fontTools.designspaceLib"
|
||||
doc.lib = {dummyKey: dummyData}
|
||||
doc.write(testDocPath1)
|
||||
new = DesignSpaceDocument()
|
||||
new.read(testDocPath)
|
||||
assert len(new.axes) == 2
|
||||
new.checkAxes()
|
||||
assert len(new.axes) == 2
|
||||
assert print([a.name for a in new.axes]) == ['snap', 'pop']
|
||||
new.write(testDocPath)
|
||||
new.read(testDocPath1)
|
||||
assert dummyKey in new.lib
|
||||
assert new.lib[dummyKey] == dummyData
|
||||
|
||||
|
@ -67,8 +67,7 @@ class T1FontTest(unittest.TestCase):
|
||||
|
||||
def test_parse_lwfn(self):
|
||||
# the extended attrs are lost on git so we can't auto-detect 'LWFN'
|
||||
font = t1Lib.T1Font()
|
||||
font.data = t1Lib.readLWFN(LWFN)
|
||||
font = t1Lib.T1Font(LWFN, kind="LWFN")
|
||||
font.parse()
|
||||
self.assertEqual(font['FontName'], 'TestT1-Regular')
|
||||
self.assertTrue('Subrs' in font['Private'])
|
||||
|
@ -2,6 +2,7 @@ from __future__ import print_function, division, absolute_import
|
||||
from fontTools.misc.py23 import *
|
||||
from fontTools.ttLib.tables._g_l_y_f import GlyphCoordinates
|
||||
import sys
|
||||
import array
|
||||
import pytest
|
||||
|
||||
|
||||
@ -150,3 +151,9 @@ class GlyphCoordinatesTest(object):
|
||||
# since the Python float is truncated to a C float.
|
||||
# when using typecode 'd' it should return the correct value 243
|
||||
assert g[0][0] == round(afloat)
|
||||
|
||||
def test__checkFloat_overflow(self):
|
||||
g = GlyphCoordinates([(1, 1)], typecode="h")
|
||||
g.append((0x8000, 0))
|
||||
assert g.array.typecode == "d"
|
||||
assert g.array == array.array("d", [1.0, 1.0, 32768.0, 0.0])
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -494,12 +494,6 @@
|
||||
<namerecord nameID="273" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-BlackHighContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="274" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="275" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="1" platformID="3" platEncID="1" langID="0x409">
|
||||
Test Family
|
||||
</namerecord>
|
||||
@ -578,12 +572,6 @@
|
||||
<namerecord nameID="273" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-BlackHighContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="274" platformID="3" platEncID="1" langID="0x409">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="275" platformID="3" platEncID="1" langID="0x409">
|
||||
Contrast
|
||||
</namerecord>
|
||||
</name>
|
||||
|
||||
<post>
|
||||
@ -794,22 +782,23 @@
|
||||
</MVAR>
|
||||
|
||||
<STAT>
|
||||
<Version value="0x00010000"/>
|
||||
<Version value="0x00010002"/>
|
||||
<DesignAxisRecordSize value="8"/>
|
||||
<!-- DesignAxisCount=2 -->
|
||||
<DesignAxisRecord>
|
||||
<Axis index="0">
|
||||
<AxisTag value="wght"/>
|
||||
<AxisNameID value="274"/> <!-- Weight -->
|
||||
<AxisNameID value="256"/> <!-- Weight -->
|
||||
<AxisOrdering value="0"/>
|
||||
</Axis>
|
||||
<Axis index="1">
|
||||
<AxisTag value="cntr"/>
|
||||
<AxisNameID value="275"/> <!-- Contrast -->
|
||||
<AxisNameID value="257"/> <!-- Contrast -->
|
||||
<AxisOrdering value="1"/>
|
||||
</Axis>
|
||||
</DesignAxisRecord>
|
||||
<!-- AxisValueCount=0 -->
|
||||
<ElidedFallbackNameID value="2"/> <!-- Regular -->
|
||||
</STAT>
|
||||
|
||||
<cvar>
|
||||
|
@ -440,66 +440,6 @@
|
||||
</glyf>
|
||||
|
||||
<name>
|
||||
<namerecord nameID="256" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="257" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="258" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
ExtraLight
|
||||
</namerecord>
|
||||
<namerecord nameID="259" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-ExtraLight
|
||||
</namerecord>
|
||||
<namerecord nameID="260" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Light
|
||||
</namerecord>
|
||||
<namerecord nameID="261" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-Light
|
||||
</namerecord>
|
||||
<namerecord nameID="262" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="263" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="264" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Semibold
|
||||
</namerecord>
|
||||
<namerecord nameID="265" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-Semibold
|
||||
</namerecord>
|
||||
<namerecord nameID="266" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Bold
|
||||
</namerecord>
|
||||
<namerecord nameID="267" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-Bold
|
||||
</namerecord>
|
||||
<namerecord nameID="268" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Black
|
||||
</namerecord>
|
||||
<namerecord nameID="269" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-Black
|
||||
</namerecord>
|
||||
<namerecord nameID="270" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Black Medium Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="271" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-BlackMediumContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="272" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Black High Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="273" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
TestFamily-BlackHighContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="274" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="275" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="1" platformID="3" platEncID="1" langID="0x409">
|
||||
Test Family
|
||||
</namerecord>
|
||||
@ -524,66 +464,6 @@
|
||||
<namerecord nameID="17" platformID="3" platEncID="1" langID="0x409">
|
||||
Master 1
|
||||
</namerecord>
|
||||
<namerecord nameID="256" platformID="3" platEncID="1" langID="0x409">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="257" platformID="3" platEncID="1" langID="0x409">
|
||||
Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="258" platformID="3" platEncID="1" langID="0x409">
|
||||
ExtraLight
|
||||
</namerecord>
|
||||
<namerecord nameID="259" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-ExtraLight
|
||||
</namerecord>
|
||||
<namerecord nameID="260" platformID="3" platEncID="1" langID="0x409">
|
||||
Light
|
||||
</namerecord>
|
||||
<namerecord nameID="261" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-Light
|
||||
</namerecord>
|
||||
<namerecord nameID="262" platformID="3" platEncID="1" langID="0x409">
|
||||
Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="263" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="264" platformID="3" platEncID="1" langID="0x409">
|
||||
Semibold
|
||||
</namerecord>
|
||||
<namerecord nameID="265" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-Semibold
|
||||
</namerecord>
|
||||
<namerecord nameID="266" platformID="3" platEncID="1" langID="0x409">
|
||||
Bold
|
||||
</namerecord>
|
||||
<namerecord nameID="267" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-Bold
|
||||
</namerecord>
|
||||
<namerecord nameID="268" platformID="3" platEncID="1" langID="0x409">
|
||||
Black
|
||||
</namerecord>
|
||||
<namerecord nameID="269" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-Black
|
||||
</namerecord>
|
||||
<namerecord nameID="270" platformID="3" platEncID="1" langID="0x409">
|
||||
Black Medium Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="271" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-BlackMediumContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="272" platformID="3" platEncID="1" langID="0x409">
|
||||
Black High Contrast
|
||||
</namerecord>
|
||||
<namerecord nameID="273" platformID="3" platEncID="1" langID="0x409">
|
||||
TestFamily-BlackHighContrast
|
||||
</namerecord>
|
||||
<namerecord nameID="274" platformID="3" platEncID="1" langID="0x409">
|
||||
Weight
|
||||
</namerecord>
|
||||
<namerecord nameID="275" platformID="3" platEncID="1" langID="0x409">
|
||||
Contrast
|
||||
</namerecord>
|
||||
</name>
|
||||
|
||||
<post>
|
||||
|
@ -237,15 +237,6 @@
|
||||
<namerecord nameID="6" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
VarFont-Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="256" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Width
|
||||
</namerecord>
|
||||
<namerecord nameID="257" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Ascender
|
||||
</namerecord>
|
||||
<namerecord nameID="258" platformID="1" platEncID="0" langID="0x0" unicode="True">
|
||||
Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="1" platformID="3" platEncID="1" langID="0x409">
|
||||
VarFont
|
||||
</namerecord>
|
||||
@ -261,15 +252,6 @@
|
||||
<namerecord nameID="6" platformID="3" platEncID="1" langID="0x409">
|
||||
VarFont-Regular
|
||||
</namerecord>
|
||||
<namerecord nameID="256" platformID="3" platEncID="1" langID="0x409">
|
||||
Width
|
||||
</namerecord>
|
||||
<namerecord nameID="257" platformID="3" platEncID="1" langID="0x409">
|
||||
Ascender
|
||||
</namerecord>
|
||||
<namerecord nameID="258" platformID="3" platEncID="1" langID="0x409">
|
||||
Regular
|
||||
</namerecord>
|
||||
</name>
|
||||
|
||||
<post>
|
||||
|
@ -190,7 +190,6 @@ class BuildTest(unittest.TestCase):
|
||||
"""
|
||||
suffix = '.ttf'
|
||||
ds_path = self.get_test_input('Build.designspace')
|
||||
ufo_dir = self.get_test_input('master_ufo')
|
||||
ttx_dir = self.get_test_input('master_ttx_interpolatable_ttf')
|
||||
|
||||
self.temp_dir()
|
||||
@ -202,9 +201,31 @@ class BuildTest(unittest.TestCase):
|
||||
|
||||
ds_copy = os.path.join(self.tempdir, 'BuildMain.designspace')
|
||||
shutil.copy2(ds_path, ds_copy)
|
||||
varLib_main([ds_copy])
|
||||
|
||||
# by default, varLib.main finds master TTFs inside a
|
||||
# 'master_ttf_interpolatable' subfolder in current working dir
|
||||
cwd = os.getcwd()
|
||||
os.chdir(self.tempdir)
|
||||
try:
|
||||
varLib_main([ds_copy])
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
varfont_path = os.path.splitext(ds_copy)[0] + '-VF' + suffix
|
||||
self.assertTrue(os.path.exists(varfont_path))
|
||||
|
||||
# try again passing an explicit --master-finder
|
||||
os.remove(varfont_path)
|
||||
finder = "%s/master_ttf_interpolatable/{stem}.ttf" % self.tempdir
|
||||
varLib_main([ds_copy, "--master-finder", finder])
|
||||
self.assertTrue(os.path.exists(varfont_path))
|
||||
|
||||
# and also with explicit -o output option
|
||||
os.remove(varfont_path)
|
||||
varfont_path = os.path.splitext(varfont_path)[0] + "-o" + suffix
|
||||
varLib_main([ds_copy, "-o", varfont_path, "--master-finder", finder])
|
||||
self.assertTrue(os.path.exists(varfont_path))
|
||||
|
||||
varfont = TTFont(varfont_path)
|
||||
tables = [table_tag for table_tag in varfont.keys() if table_tag != 'head']
|
||||
expected_ttx_path = self.get_test_output('BuildMain.ttx')
|
||||
|
@ -4,3 +4,4 @@ brotli==1.0.1; platform_python_implementation != "PyPy"
|
||||
brotlipy==0.7.0; platform_python_implementation == "PyPy"
|
||||
unicodedata2==10.0.0; python_version < '3.7' and platform_python_implementation != "PyPy"
|
||||
munkres==1.0.10
|
||||
zopfli==0.1.4
|
||||
|
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 3.25.1.dev0
|
||||
current_version = 3.26.1.dev0
|
||||
commit = True
|
||||
tag = False
|
||||
tag_name = {new_version}
|
||||
|
Loading…
x
Reference in New Issue
Block a user