Merge branch 'fold-into-fontTools' into fonttools-ufolib
This commit is contained in:
commit
1f7155c432
9
Doc/source/ufoLib/converters.rst
Normal file
9
Doc/source/ufoLib/converters.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. highlight:: python
|
||||
|
||||
==========
|
||||
converters
|
||||
==========
|
||||
|
||||
.. automodule:: ufoLib.converters
|
||||
:inherited-members:
|
||||
:members:
|
9
Doc/source/ufoLib/filenames.rst
Normal file
9
Doc/source/ufoLib/filenames.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. highlight:: python
|
||||
|
||||
=========
|
||||
filenames
|
||||
=========
|
||||
|
||||
.. automodule:: ufoLib.filenames
|
||||
:inherited-members:
|
||||
:members:
|
9
Doc/source/ufoLib/glifLib.rst
Normal file
9
Doc/source/ufoLib/glifLib.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. highlight:: python
|
||||
|
||||
=======
|
||||
glifLib
|
||||
=======
|
||||
|
||||
.. automodule:: ufoLib.glifLib
|
||||
:inherited-members:
|
||||
:members:
|
9
Doc/source/ufoLib/pointPen.rst
Normal file
9
Doc/source/ufoLib/pointPen.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. highlight:: python
|
||||
|
||||
========
|
||||
pointPen
|
||||
========
|
||||
|
||||
.. automodule:: ufoLib.pointPen
|
||||
:inherited-members:
|
||||
:members:
|
9
Doc/source/ufoLib/ufoLib.rst
Normal file
9
Doc/source/ufoLib/ufoLib.rst
Normal file
@ -0,0 +1,9 @@
|
||||
.. highlight:: python
|
||||
|
||||
======
|
||||
ufoLib
|
||||
======
|
||||
|
||||
.. automodule:: ufoLib
|
||||
:inherited-members:
|
||||
:members:
|
2209
Lib/fontTools/ufoLib/__init__.py
Executable file
2209
Lib/fontTools/ufoLib/__init__.py
Executable file
File diff suppressed because it is too large
Load Diff
336
Lib/fontTools/ufoLib/converters.py
Normal file
336
Lib/fontTools/ufoLib/converters.py
Normal file
@ -0,0 +1,336 @@
|
||||
"""
|
||||
Conversion functions.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
|
||||
# adapted from the UFO spec
|
||||
|
||||
def convertUFO1OrUFO2KerningToUFO3Kerning(kerning, groups):
|
||||
# gather known kerning groups based on the prefixes
|
||||
firstReferencedGroups, secondReferencedGroups = findKnownKerningGroups(groups)
|
||||
# Make lists of groups referenced in kerning pairs.
|
||||
for first, seconds in list(kerning.items()):
|
||||
if first in groups:
|
||||
if not first.startswith("public.kern1."):
|
||||
firstReferencedGroups.add(first)
|
||||
for second in list(seconds.keys()):
|
||||
if second in groups:
|
||||
if not second.startswith("public.kern2."):
|
||||
secondReferencedGroups.add(second)
|
||||
# Create new names for these groups.
|
||||
firstRenamedGroups = {}
|
||||
for first in firstReferencedGroups:
|
||||
# Make a list of existing group names.
|
||||
existingGroupNames = list(groups.keys()) + list(firstRenamedGroups.keys())
|
||||
# Remove the old prefix from the name
|
||||
newName = first.replace("@MMK_L_", "")
|
||||
# Add the new prefix to the name.
|
||||
newName = "public.kern1." + newName
|
||||
# Make a unique group name.
|
||||
newName = makeUniqueGroupName(newName, existingGroupNames)
|
||||
# Store for use later.
|
||||
firstRenamedGroups[first] = newName
|
||||
secondRenamedGroups = {}
|
||||
for second in secondReferencedGroups:
|
||||
# Make a list of existing group names.
|
||||
existingGroupNames = list(groups.keys()) + list(secondRenamedGroups.keys())
|
||||
# Remove the old prefix from the name
|
||||
newName = second.replace("@MMK_R_", "")
|
||||
# Add the new prefix to the name.
|
||||
newName = "public.kern2." + newName
|
||||
# Make a unique group name.
|
||||
newName = makeUniqueGroupName(newName, existingGroupNames)
|
||||
# Store for use later.
|
||||
secondRenamedGroups[second] = newName
|
||||
# Populate the new group names into the kerning dictionary as needed.
|
||||
newKerning = {}
|
||||
for first, seconds in list(kerning.items()):
|
||||
first = firstRenamedGroups.get(first, first)
|
||||
newSeconds = {}
|
||||
for second, value in list(seconds.items()):
|
||||
second = secondRenamedGroups.get(second, second)
|
||||
newSeconds[second] = value
|
||||
newKerning[first] = newSeconds
|
||||
# Make copies of the referenced groups and store them
|
||||
# under the new names in the overall groups dictionary.
|
||||
allRenamedGroups = list(firstRenamedGroups.items())
|
||||
allRenamedGroups += list(secondRenamedGroups.items())
|
||||
for oldName, newName in allRenamedGroups:
|
||||
group = list(groups[oldName])
|
||||
groups[newName] = group
|
||||
# Return the kerning and the groups.
|
||||
return newKerning, groups, dict(side1=firstRenamedGroups, side2=secondRenamedGroups)
|
||||
|
||||
def findKnownKerningGroups(groups):
|
||||
"""
|
||||
This will find kerning groups with known prefixes.
|
||||
In some cases not all kerning groups will be referenced
|
||||
by the kerning pairs. The algorithm for locating groups
|
||||
in convertUFO1OrUFO2KerningToUFO3Kerning will miss these
|
||||
unreferenced groups. By scanning for known prefixes
|
||||
this function will catch all of the prefixed groups.
|
||||
|
||||
These are the prefixes and sides that are handled:
|
||||
@MMK_L_ - side 1
|
||||
@MMK_R_ - side 2
|
||||
|
||||
>>> testGroups = {
|
||||
... "@MMK_L_1" : None,
|
||||
... "@MMK_L_2" : None,
|
||||
... "@MMK_L_3" : None,
|
||||
... "@MMK_R_1" : None,
|
||||
... "@MMK_R_2" : None,
|
||||
... "@MMK_R_3" : None,
|
||||
... "@MMK_l_1" : None,
|
||||
... "@MMK_r_1" : None,
|
||||
... "@MMK_X_1" : None,
|
||||
... "foo" : None,
|
||||
... }
|
||||
>>> first, second = findKnownKerningGroups(testGroups)
|
||||
>>> sorted(first)
|
||||
['@MMK_L_1', '@MMK_L_2', '@MMK_L_3']
|
||||
>>> sorted(second)
|
||||
['@MMK_R_1', '@MMK_R_2', '@MMK_R_3']
|
||||
"""
|
||||
knownFirstGroupPrefixes = [
|
||||
"@MMK_L_"
|
||||
]
|
||||
knownSecondGroupPrefixes = [
|
||||
"@MMK_R_"
|
||||
]
|
||||
firstGroups = set()
|
||||
secondGroups = set()
|
||||
for groupName in list(groups.keys()):
|
||||
for firstPrefix in knownFirstGroupPrefixes:
|
||||
if groupName.startswith(firstPrefix):
|
||||
firstGroups.add(groupName)
|
||||
break
|
||||
for secondPrefix in knownSecondGroupPrefixes:
|
||||
if groupName.startswith(secondPrefix):
|
||||
secondGroups.add(groupName)
|
||||
break
|
||||
return firstGroups, secondGroups
|
||||
|
||||
|
||||
def makeUniqueGroupName(name, groupNames, counter=0):
|
||||
# Add a number to the name if the counter is higher than zero.
|
||||
newName = name
|
||||
if counter > 0:
|
||||
newName = "%s%d" % (newName, counter)
|
||||
# If the new name is in the existing group names, recurse.
|
||||
if newName in groupNames:
|
||||
return makeUniqueGroupName(name, groupNames, counter + 1)
|
||||
# Otherwise send back the new name.
|
||||
return newName
|
||||
|
||||
def test():
|
||||
"""
|
||||
No known prefixes.
|
||||
|
||||
>>> testKerning = {
|
||||
... "A" : {
|
||||
... "A" : 1,
|
||||
... "B" : 2,
|
||||
... "CGroup" : 3,
|
||||
... "DGroup" : 4
|
||||
... },
|
||||
... "BGroup" : {
|
||||
... "A" : 5,
|
||||
... "B" : 6,
|
||||
... "CGroup" : 7,
|
||||
... "DGroup" : 8
|
||||
... },
|
||||
... "CGroup" : {
|
||||
... "A" : 9,
|
||||
... "B" : 10,
|
||||
... "CGroup" : 11,
|
||||
... "DGroup" : 12
|
||||
... },
|
||||
... }
|
||||
>>> testGroups = {
|
||||
... "BGroup" : ["B"],
|
||||
... "CGroup" : ["C"],
|
||||
... "DGroup" : ["D"],
|
||||
... }
|
||||
>>> kerning, groups, maps = convertUFO1OrUFO2KerningToUFO3Kerning(
|
||||
... testKerning, testGroups)
|
||||
>>> expected = {
|
||||
... "A" : {
|
||||
... "A": 1,
|
||||
... "B": 2,
|
||||
... "public.kern2.CGroup": 3,
|
||||
... "public.kern2.DGroup": 4
|
||||
... },
|
||||
... "public.kern1.BGroup": {
|
||||
... "A": 5,
|
||||
... "B": 6,
|
||||
... "public.kern2.CGroup": 7,
|
||||
... "public.kern2.DGroup": 8
|
||||
... },
|
||||
... "public.kern1.CGroup": {
|
||||
... "A": 9,
|
||||
... "B": 10,
|
||||
... "public.kern2.CGroup": 11,
|
||||
... "public.kern2.DGroup": 12
|
||||
... }
|
||||
... }
|
||||
>>> kerning == expected
|
||||
True
|
||||
>>> expected = {
|
||||
... "BGroup": ["B"],
|
||||
... "CGroup": ["C"],
|
||||
... "DGroup": ["D"],
|
||||
... "public.kern1.BGroup": ["B"],
|
||||
... "public.kern1.CGroup": ["C"],
|
||||
... "public.kern2.CGroup": ["C"],
|
||||
... "public.kern2.DGroup": ["D"],
|
||||
... }
|
||||
>>> groups == expected
|
||||
True
|
||||
|
||||
Known prefixes.
|
||||
|
||||
>>> testKerning = {
|
||||
... "A" : {
|
||||
... "A" : 1,
|
||||
... "B" : 2,
|
||||
... "@MMK_R_CGroup" : 3,
|
||||
... "@MMK_R_DGroup" : 4
|
||||
... },
|
||||
... "@MMK_L_BGroup" : {
|
||||
... "A" : 5,
|
||||
... "B" : 6,
|
||||
... "@MMK_R_CGroup" : 7,
|
||||
... "@MMK_R_DGroup" : 8
|
||||
... },
|
||||
... "@MMK_L_CGroup" : {
|
||||
... "A" : 9,
|
||||
... "B" : 10,
|
||||
... "@MMK_R_CGroup" : 11,
|
||||
... "@MMK_R_DGroup" : 12
|
||||
... },
|
||||
... }
|
||||
>>> testGroups = {
|
||||
... "@MMK_L_BGroup" : ["B"],
|
||||
... "@MMK_L_CGroup" : ["C"],
|
||||
... "@MMK_L_XGroup" : ["X"],
|
||||
... "@MMK_R_CGroup" : ["C"],
|
||||
... "@MMK_R_DGroup" : ["D"],
|
||||
... "@MMK_R_XGroup" : ["X"],
|
||||
... }
|
||||
>>> kerning, groups, maps = convertUFO1OrUFO2KerningToUFO3Kerning(
|
||||
... testKerning, testGroups)
|
||||
>>> expected = {
|
||||
... "A" : {
|
||||
... "A": 1,
|
||||
... "B": 2,
|
||||
... "public.kern2.CGroup": 3,
|
||||
... "public.kern2.DGroup": 4
|
||||
... },
|
||||
... "public.kern1.BGroup": {
|
||||
... "A": 5,
|
||||
... "B": 6,
|
||||
... "public.kern2.CGroup": 7,
|
||||
... "public.kern2.DGroup": 8
|
||||
... },
|
||||
... "public.kern1.CGroup": {
|
||||
... "A": 9,
|
||||
... "B": 10,
|
||||
... "public.kern2.CGroup": 11,
|
||||
... "public.kern2.DGroup": 12
|
||||
... }
|
||||
... }
|
||||
>>> kerning == expected
|
||||
True
|
||||
>>> expected = {
|
||||
... "@MMK_L_BGroup": ["B"],
|
||||
... "@MMK_L_CGroup": ["C"],
|
||||
... "@MMK_L_XGroup": ["X"],
|
||||
... "@MMK_R_CGroup": ["C"],
|
||||
... "@MMK_R_DGroup": ["D"],
|
||||
... "@MMK_R_XGroup": ["X"],
|
||||
... "public.kern1.BGroup": ["B"],
|
||||
... "public.kern1.CGroup": ["C"],
|
||||
... "public.kern1.XGroup": ["X"],
|
||||
... "public.kern2.CGroup": ["C"],
|
||||
... "public.kern2.DGroup": ["D"],
|
||||
... "public.kern2.XGroup": ["X"],
|
||||
... }
|
||||
>>> groups == expected
|
||||
True
|
||||
|
||||
>>> from .validators import kerningValidator
|
||||
>>> kerningValidator(kerning)
|
||||
(True, None)
|
||||
|
||||
Mixture of known prefixes and groups without prefixes.
|
||||
|
||||
>>> testKerning = {
|
||||
... "A" : {
|
||||
... "A" : 1,
|
||||
... "B" : 2,
|
||||
... "@MMK_R_CGroup" : 3,
|
||||
... "DGroup" : 4
|
||||
... },
|
||||
... "BGroup" : {
|
||||
... "A" : 5,
|
||||
... "B" : 6,
|
||||
... "@MMK_R_CGroup" : 7,
|
||||
... "DGroup" : 8
|
||||
... },
|
||||
... "@MMK_L_CGroup" : {
|
||||
... "A" : 9,
|
||||
... "B" : 10,
|
||||
... "@MMK_R_CGroup" : 11,
|
||||
... "DGroup" : 12
|
||||
... },
|
||||
... }
|
||||
>>> testGroups = {
|
||||
... "BGroup" : ["B"],
|
||||
... "@MMK_L_CGroup" : ["C"],
|
||||
... "@MMK_R_CGroup" : ["C"],
|
||||
... "DGroup" : ["D"],
|
||||
... }
|
||||
>>> kerning, groups, maps = convertUFO1OrUFO2KerningToUFO3Kerning(
|
||||
... testKerning, testGroups)
|
||||
>>> expected = {
|
||||
... "A" : {
|
||||
... "A": 1,
|
||||
... "B": 2,
|
||||
... "public.kern2.CGroup": 3,
|
||||
... "public.kern2.DGroup": 4
|
||||
... },
|
||||
... "public.kern1.BGroup": {
|
||||
... "A": 5,
|
||||
... "B": 6,
|
||||
... "public.kern2.CGroup": 7,
|
||||
... "public.kern2.DGroup": 8
|
||||
... },
|
||||
... "public.kern1.CGroup": {
|
||||
... "A": 9,
|
||||
... "B": 10,
|
||||
... "public.kern2.CGroup": 11,
|
||||
... "public.kern2.DGroup": 12
|
||||
... }
|
||||
... }
|
||||
>>> kerning == expected
|
||||
True
|
||||
>>> expected = {
|
||||
... "BGroup": ["B"],
|
||||
... "@MMK_L_CGroup": ["C"],
|
||||
... "@MMK_R_CGroup": ["C"],
|
||||
... "DGroup": ["D"],
|
||||
... "public.kern1.BGroup": ["B"],
|
||||
... "public.kern1.CGroup": ["C"],
|
||||
... "public.kern2.CGroup": ["C"],
|
||||
... "public.kern2.DGroup": ["D"],
|
||||
... }
|
||||
>>> groups == expected
|
||||
True
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
9
Lib/fontTools/ufoLib/errors.py
Normal file
9
Lib/fontTools/ufoLib/errors.py
Normal file
@ -0,0 +1,9 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
|
||||
class UFOLibError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class GlifLibError(UFOLibError):
|
||||
pass
|
485
Lib/fontTools/ufoLib/etree.py
Normal file
485
Lib/fontTools/ufoLib/etree.py
Normal file
@ -0,0 +1,485 @@
|
||||
"""Shim module exporting the same ElementTree API for lxml and
|
||||
xml.etree backends.
|
||||
|
||||
When lxml is installed, it is automatically preferred over the built-in
|
||||
xml.etree module.
|
||||
On Python 2.7, the cElementTree module is preferred over the pure-python
|
||||
ElementTree module.
|
||||
|
||||
Besides exporting a unified interface, this also defines extra functions
|
||||
or subclasses built-in ElementTree classes to add features that are
|
||||
only availble in lxml, like OrderedDict for attributes, pretty_print and
|
||||
iterwalk.
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from fontTools.misc.py23 import basestring, unicode, tounicode, open
|
||||
|
||||
# we use a custom XML declaration for backward compatibility with older
|
||||
# ufoLib versions which would write it using double quotes.
|
||||
# https://github.com/unified-font-object/ufoLib/issues/158
|
||||
XML_DECLARATION = """<?xml version="1.0" encoding="%s"?>"""
|
||||
|
||||
__all__ = [
|
||||
# public symbols
|
||||
"Comment",
|
||||
"dump",
|
||||
"Element",
|
||||
"ElementTree",
|
||||
"fromstring",
|
||||
"fromstringlist",
|
||||
"iselement",
|
||||
"iterparse",
|
||||
"parse",
|
||||
"ParseError",
|
||||
"PI",
|
||||
"ProcessingInstruction",
|
||||
"QName",
|
||||
"SubElement",
|
||||
"tostring",
|
||||
"tostringlist",
|
||||
"TreeBuilder",
|
||||
"XML",
|
||||
"XMLParser",
|
||||
"XMLTreeBuilder",
|
||||
"register_namespace",
|
||||
]
|
||||
|
||||
try:
|
||||
from lxml.etree import *
|
||||
|
||||
_have_lxml = True
|
||||
except ImportError:
|
||||
try:
|
||||
from xml.etree.cElementTree import *
|
||||
|
||||
# the cElementTree version of XML function doesn't support
|
||||
# the optional 'parser' keyword argument
|
||||
from xml.etree.ElementTree import XML
|
||||
except ImportError: # pragma: no cover
|
||||
from xml.etree.ElementTree import *
|
||||
_have_lxml = False
|
||||
|
||||
import sys
|
||||
|
||||
# dict is always ordered in python >= 3.6 and on pypy
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
try:
|
||||
import __pypy__
|
||||
except ImportError:
|
||||
__pypy__ = None
|
||||
_dict_is_ordered = bool(PY36 or __pypy__)
|
||||
del PY36, __pypy__
|
||||
|
||||
if _dict_is_ordered:
|
||||
_Attrib = dict
|
||||
else:
|
||||
from collections import OrderedDict as _Attrib
|
||||
|
||||
if isinstance(Element, type):
|
||||
_Element = Element
|
||||
else:
|
||||
# in py27, cElementTree.Element cannot be subclassed, so
|
||||
# we need to import the pure-python class
|
||||
from xml.etree.ElementTree import Element as _Element
|
||||
|
||||
class Element(_Element):
|
||||
"""Element subclass that keeps the order of attributes."""
|
||||
|
||||
def __init__(self, tag, attrib=_Attrib(), **extra):
|
||||
super(Element, self).__init__(tag)
|
||||
self.attrib = _Attrib()
|
||||
if attrib:
|
||||
self.attrib.update(attrib)
|
||||
if extra:
|
||||
self.attrib.update(extra)
|
||||
|
||||
def SubElement(parent, tag, attrib=_Attrib(), **extra):
|
||||
"""Must override SubElement as well otherwise _elementtree.SubElement
|
||||
fails if 'parent' is a subclass of Element object.
|
||||
"""
|
||||
element = parent.__class__(tag, attrib, **extra)
|
||||
parent.append(element)
|
||||
return element
|
||||
|
||||
def _iterwalk(element, events, tag):
|
||||
include = tag is None or element.tag == tag
|
||||
if include and "start" in events:
|
||||
yield ("start", element)
|
||||
for e in element:
|
||||
for item in _iterwalk(e, events, tag):
|
||||
yield item
|
||||
if include:
|
||||
yield ("end", element)
|
||||
|
||||
def iterwalk(element_or_tree, events=("end",), tag=None):
|
||||
"""A tree walker that generates events from an existing tree as
|
||||
if it was parsing XML data with iterparse().
|
||||
Drop-in replacement for lxml.etree.iterwalk.
|
||||
"""
|
||||
if iselement(element_or_tree):
|
||||
element = element_or_tree
|
||||
else:
|
||||
element = element_or_tree.getroot()
|
||||
if tag == "*":
|
||||
tag = None
|
||||
for item in _iterwalk(element, events, tag):
|
||||
yield item
|
||||
|
||||
_ElementTree = ElementTree
|
||||
|
||||
class ElementTree(_ElementTree):
|
||||
"""ElementTree subclass that adds 'pretty_print' and 'doctype'
|
||||
arguments to the 'write' method.
|
||||
Currently these are only supported for the default XML serialization
|
||||
'method', and not also for "html" or "text", for these are delegated
|
||||
to the base class.
|
||||
"""
|
||||
|
||||
def write(
|
||||
self,
|
||||
file_or_filename,
|
||||
encoding=None,
|
||||
xml_declaration=False,
|
||||
method=None,
|
||||
doctype=None,
|
||||
pretty_print=False,
|
||||
):
|
||||
if method and method != "xml":
|
||||
# delegate to super-class
|
||||
super(ElementTree, self).write(
|
||||
file_or_filename,
|
||||
encoding=encoding,
|
||||
xml_declaration=xml_declaration,
|
||||
method=method,
|
||||
)
|
||||
return
|
||||
|
||||
if encoding is unicode or (
|
||||
encoding is not None and encoding.lower() == "unicode"
|
||||
):
|
||||
if xml_declaration:
|
||||
raise ValueError(
|
||||
"Serialisation to unicode must not request an XML declaration"
|
||||
)
|
||||
write_declaration = False
|
||||
encoding = "unicode"
|
||||
elif xml_declaration is None:
|
||||
# by default, write an XML declaration only for non-standard encodings
|
||||
write_declaration = encoding is not None and encoding.upper() not in (
|
||||
"ASCII",
|
||||
"UTF-8",
|
||||
"UTF8",
|
||||
"US-ASCII",
|
||||
)
|
||||
else:
|
||||
write_declaration = xml_declaration
|
||||
|
||||
if encoding is None:
|
||||
encoding = "ASCII"
|
||||
|
||||
if pretty_print:
|
||||
# NOTE this will modify the tree in-place
|
||||
_indent(self._root)
|
||||
|
||||
with _get_writer(file_or_filename, encoding) as write:
|
||||
if write_declaration:
|
||||
write(XML_DECLARATION % encoding.upper())
|
||||
if pretty_print:
|
||||
write("\n")
|
||||
if doctype:
|
||||
write(_tounicode(doctype))
|
||||
if pretty_print:
|
||||
write("\n")
|
||||
|
||||
qnames, namespaces = _namespaces(self._root)
|
||||
_serialize_xml(write, self._root, qnames, namespaces)
|
||||
|
||||
import io
|
||||
|
||||
def tostring(
|
||||
element,
|
||||
encoding=None,
|
||||
xml_declaration=None,
|
||||
method=None,
|
||||
doctype=None,
|
||||
pretty_print=False,
|
||||
):
|
||||
"""Custom 'tostring' function that uses our ElementTree subclass, with
|
||||
pretty_print support.
|
||||
"""
|
||||
stream = io.StringIO() if encoding == "unicode" else io.BytesIO()
|
||||
ElementTree(element).write(
|
||||
stream,
|
||||
encoding=encoding,
|
||||
xml_declaration=xml_declaration,
|
||||
method=method,
|
||||
doctype=doctype,
|
||||
pretty_print=pretty_print,
|
||||
)
|
||||
return stream.getvalue()
|
||||
|
||||
# serialization support
|
||||
|
||||
import re
|
||||
|
||||
# Valid XML strings can include any Unicode character, excluding control
|
||||
# characters, the surrogate blocks, FFFE, and FFFF:
|
||||
# Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||
# Here we reversed the pattern to match only the invalid characters.
|
||||
# For the 'narrow' python builds supporting only UCS-2, which represent
|
||||
# characters beyond BMP as UTF-16 surrogate pairs, we need to pass through
|
||||
# the surrogate block. I haven't found a more elegant solution...
|
||||
UCS2 = sys.maxunicode < 0x10FFFF
|
||||
if UCS2:
|
||||
_invalid_xml_string = re.compile(
|
||||
"[\u0000-\u0008\u000B-\u000C\u000E-\u001F\uFFFE-\uFFFF]"
|
||||
)
|
||||
else:
|
||||
_invalid_xml_string = re.compile(
|
||||
"[\u0000-\u0008\u000B-\u000C\u000E-\u001F\uD800-\uDFFF\uFFFE-\uFFFF]"
|
||||
)
|
||||
|
||||
def _tounicode(s):
|
||||
"""Test if a string is valid user input and decode it to unicode string
|
||||
using ASCII encoding if it's a bytes string.
|
||||
Reject all bytes/unicode input that contains non-XML characters.
|
||||
Reject all bytes input that contains non-ASCII characters.
|
||||
"""
|
||||
try:
|
||||
s = tounicode(s, encoding="ascii", errors="strict")
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError(
|
||||
"Bytes strings can only contain ASCII characters. "
|
||||
"Use unicode strings for non-ASCII characters.")
|
||||
except AttributeError:
|
||||
_raise_serialization_error(s)
|
||||
if s and _invalid_xml_string.search(s):
|
||||
raise ValueError(
|
||||
"All strings must be XML compatible: Unicode or ASCII, "
|
||||
"no NULL bytes or control characters"
|
||||
)
|
||||
return s
|
||||
|
||||
import contextlib
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _get_writer(file_or_filename, encoding):
|
||||
# returns text write method and release all resources after using
|
||||
try:
|
||||
write = file_or_filename.write
|
||||
except AttributeError:
|
||||
# file_or_filename is a file name
|
||||
f = open(
|
||||
file_or_filename,
|
||||
"w",
|
||||
encoding="utf-8" if encoding == "unicode" else encoding,
|
||||
errors="xmlcharrefreplace",
|
||||
)
|
||||
with f:
|
||||
yield f.write
|
||||
else:
|
||||
# file_or_filename is a file-like object
|
||||
# encoding determines if it is a text or binary writer
|
||||
if encoding == "unicode":
|
||||
# use a text writer as is
|
||||
yield write
|
||||
else:
|
||||
# wrap a binary writer with TextIOWrapper
|
||||
detach_buffer = False
|
||||
if isinstance(file_or_filename, io.BufferedIOBase):
|
||||
buf = file_or_filename
|
||||
elif isinstance(file_or_filename, io.RawIOBase):
|
||||
buf = io.BufferedWriter(file_or_filename)
|
||||
detach_buffer = True
|
||||
else:
|
||||
# This is to handle passed objects that aren't in the
|
||||
# IOBase hierarchy, but just have a write method
|
||||
buf = io.BufferedIOBase()
|
||||
buf.writable = lambda: True
|
||||
buf.write = write
|
||||
try:
|
||||
# TextIOWrapper uses this methods to determine
|
||||
# if BOM (for UTF-16, etc) should be added
|
||||
buf.seekable = file_or_filename.seekable
|
||||
buf.tell = file_or_filename.tell
|
||||
except AttributeError:
|
||||
pass
|
||||
wrapper = io.TextIOWrapper(
|
||||
buf,
|
||||
encoding=encoding,
|
||||
errors="xmlcharrefreplace",
|
||||
newline="\n",
|
||||
)
|
||||
try:
|
||||
yield wrapper.write
|
||||
finally:
|
||||
# Keep the original file open when the TextIOWrapper and
|
||||
# the BufferedWriter are destroyed
|
||||
wrapper.detach()
|
||||
if detach_buffer:
|
||||
buf.detach()
|
||||
|
||||
from xml.etree.ElementTree import _namespace_map
|
||||
|
||||
def _namespaces(elem):
|
||||
# identify namespaces used in this tree
|
||||
|
||||
# maps qnames to *encoded* prefix:local names
|
||||
qnames = {None: None}
|
||||
|
||||
# maps uri:s to prefixes
|
||||
namespaces = {}
|
||||
|
||||
def add_qname(qname):
|
||||
# calculate serialized qname representation
|
||||
try:
|
||||
qname = _tounicode(qname)
|
||||
if qname[:1] == "{":
|
||||
uri, tag = qname[1:].rsplit("}", 1)
|
||||
prefix = namespaces.get(uri)
|
||||
if prefix is None:
|
||||
prefix = _namespace_map.get(uri)
|
||||
if prefix is None:
|
||||
prefix = "ns%d" % len(namespaces)
|
||||
else:
|
||||
prefix = _tounicode(prefix)
|
||||
if prefix != "xml":
|
||||
namespaces[uri] = prefix
|
||||
if prefix:
|
||||
qnames[qname] = "%s:%s" % (prefix, tag)
|
||||
else:
|
||||
qnames[qname] = tag # default element
|
||||
else:
|
||||
qnames[qname] = qname
|
||||
except TypeError:
|
||||
_raise_serialization_error(qname)
|
||||
|
||||
# populate qname and namespaces table
|
||||
for elem in elem.iter():
|
||||
tag = elem.tag
|
||||
if isinstance(tag, QName):
|
||||
if tag.text not in qnames:
|
||||
add_qname(tag.text)
|
||||
elif isinstance(tag, basestring):
|
||||
if tag not in qnames:
|
||||
add_qname(tag)
|
||||
elif tag is not None and tag is not Comment and tag is not PI:
|
||||
_raise_serialization_error(tag)
|
||||
for key, value in elem.items():
|
||||
if isinstance(key, QName):
|
||||
key = key.text
|
||||
if key not in qnames:
|
||||
add_qname(key)
|
||||
if isinstance(value, QName) and value.text not in qnames:
|
||||
add_qname(value.text)
|
||||
text = elem.text
|
||||
if isinstance(text, QName) and text.text not in qnames:
|
||||
add_qname(text.text)
|
||||
return qnames, namespaces
|
||||
|
||||
def _serialize_xml(write, elem, qnames, namespaces, **kwargs):
|
||||
tag = elem.tag
|
||||
text = elem.text
|
||||
if tag is Comment:
|
||||
write("<!--%s-->" % _tounicode(text))
|
||||
elif tag is ProcessingInstruction:
|
||||
write("<?%s?>" % _tounicode(text))
|
||||
else:
|
||||
tag = qnames[_tounicode(tag) if tag is not None else None]
|
||||
if tag is None:
|
||||
if text:
|
||||
write(_escape_cdata(text))
|
||||
for e in elem:
|
||||
_serialize_xml(write, e, qnames, None)
|
||||
else:
|
||||
write("<" + tag)
|
||||
if namespaces:
|
||||
for uri, prefix in sorted(
|
||||
namespaces.items(), key=lambda x: x[1]
|
||||
): # sort on prefix
|
||||
if prefix:
|
||||
prefix = ":" + prefix
|
||||
write(' xmlns%s="%s"' % (prefix, _escape_attrib(uri)))
|
||||
attrs = elem.attrib
|
||||
if attrs:
|
||||
# try to keep existing attrib order
|
||||
if len(attrs) <= 1 or type(attrs) is _Attrib:
|
||||
items = attrs.items()
|
||||
else:
|
||||
# if plain dict, use lexical order
|
||||
items = sorted(attrs.items())
|
||||
for k, v in items:
|
||||
if isinstance(k, QName):
|
||||
k = _tounicode(k.text)
|
||||
else:
|
||||
k = _tounicode(k)
|
||||
if isinstance(v, QName):
|
||||
v = qnames[_tounicode(v.text)]
|
||||
else:
|
||||
v = _escape_attrib(v)
|
||||
write(' %s="%s"' % (qnames[k], v))
|
||||
if text is not None or len(elem):
|
||||
write(">")
|
||||
if text:
|
||||
write(_escape_cdata(text))
|
||||
for e in elem:
|
||||
_serialize_xml(write, e, qnames, None)
|
||||
write("</" + tag + ">")
|
||||
else:
|
||||
write("/>")
|
||||
if elem.tail:
|
||||
write(_escape_cdata(elem.tail))
|
||||
|
||||
def _raise_serialization_error(text):
|
||||
raise TypeError(
|
||||
"cannot serialize %r (type %s)" % (text, type(text).__name__)
|
||||
)
|
||||
|
||||
def _escape_cdata(text):
|
||||
# escape character data
|
||||
try:
|
||||
text = _tounicode(text)
|
||||
# it's worth avoiding do-nothing calls for short strings
|
||||
if "&" in text:
|
||||
text = text.replace("&", "&")
|
||||
if "<" in text:
|
||||
text = text.replace("<", "<")
|
||||
if ">" in text:
|
||||
text = text.replace(">", ">")
|
||||
return text
|
||||
except (TypeError, AttributeError):
|
||||
_raise_serialization_error(text)
|
||||
|
||||
def _escape_attrib(text):
|
||||
# escape attribute value
|
||||
try:
|
||||
text = _tounicode(text)
|
||||
if "&" in text:
|
||||
text = text.replace("&", "&")
|
||||
if "<" in text:
|
||||
text = text.replace("<", "<")
|
||||
if ">" in text:
|
||||
text = text.replace(">", ">")
|
||||
if '"' in text:
|
||||
text = text.replace('"', """)
|
||||
if "\n" in text:
|
||||
text = text.replace("\n", " ")
|
||||
return text
|
||||
except (TypeError, AttributeError):
|
||||
_raise_serialization_error(text)
|
||||
|
||||
def _indent(elem, level=0):
|
||||
# From http://effbot.org/zone/element-lib.htm#prettyprint
|
||||
i = "\n" + level * " "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
_indent(elem, level + 1)
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
214
Lib/fontTools/ufoLib/filenames.py
Normal file
214
Lib/fontTools/ufoLib/filenames.py
Normal file
@ -0,0 +1,214 @@
|
||||
"""
|
||||
User name to file name conversion.
|
||||
This was taken form the UFO 3 spec.
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from fontTools.misc.py23 import basestring, unicode
|
||||
|
||||
|
||||
illegalCharacters = "\" * + / : < > ? [ \ ] | \0".split(" ")
|
||||
illegalCharacters += [chr(i) for i in range(1, 32)]
|
||||
illegalCharacters += [chr(0x7F)]
|
||||
reservedFileNames = "CON PRN AUX CLOCK$ NUL A:-Z: COM1".lower().split(" ")
|
||||
reservedFileNames += "LPT1 LPT2 LPT3 COM2 COM3 COM4".lower().split(" ")
|
||||
maxFileNameLength = 255
|
||||
|
||||
|
||||
class NameTranslationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def userNameToFileName(userName, existing=[], prefix="", suffix=""):
|
||||
"""
|
||||
existing should be a case-insensitive list
|
||||
of all existing file names.
|
||||
|
||||
>>> userNameToFileName("a") == "a"
|
||||
True
|
||||
>>> userNameToFileName("A") == "A_"
|
||||
True
|
||||
>>> userNameToFileName("AE") == "A_E_"
|
||||
True
|
||||
>>> userNameToFileName("Ae") == "A_e"
|
||||
True
|
||||
>>> userNameToFileName("ae") == "ae"
|
||||
True
|
||||
>>> userNameToFileName("aE") == "aE_"
|
||||
True
|
||||
>>> userNameToFileName("a.alt") == "a.alt"
|
||||
True
|
||||
>>> userNameToFileName("A.alt") == "A_.alt"
|
||||
True
|
||||
>>> userNameToFileName("A.Alt") == "A_.A_lt"
|
||||
True
|
||||
>>> userNameToFileName("A.aLt") == "A_.aL_t"
|
||||
True
|
||||
>>> userNameToFileName(u"A.alT") == "A_.alT_"
|
||||
True
|
||||
>>> userNameToFileName("T_H") == "T__H_"
|
||||
True
|
||||
>>> userNameToFileName("T_h") == "T__h"
|
||||
True
|
||||
>>> userNameToFileName("t_h") == "t_h"
|
||||
True
|
||||
>>> userNameToFileName("F_F_I") == "F__F__I_"
|
||||
True
|
||||
>>> userNameToFileName("f_f_i") == "f_f_i"
|
||||
True
|
||||
>>> userNameToFileName("Aacute_V.swash") == "A_acute_V_.swash"
|
||||
True
|
||||
>>> userNameToFileName(".notdef") == "_notdef"
|
||||
True
|
||||
>>> userNameToFileName("con") == "_con"
|
||||
True
|
||||
>>> userNameToFileName("CON") == "C_O_N_"
|
||||
True
|
||||
>>> userNameToFileName("con.alt") == "_con.alt"
|
||||
True
|
||||
>>> userNameToFileName("alt.con") == "alt._con"
|
||||
True
|
||||
"""
|
||||
# the incoming name must be a unicode string
|
||||
if not isinstance(userName, unicode):
|
||||
raise ValueError("The value for userName must be a unicode string.")
|
||||
# establish the prefix and suffix lengths
|
||||
prefixLength = len(prefix)
|
||||
suffixLength = len(suffix)
|
||||
# replace an initial period with an _
|
||||
# if no prefix is to be added
|
||||
if not prefix and userName[0] == ".":
|
||||
userName = "_" + userName[1:]
|
||||
# filter the user name
|
||||
filteredUserName = []
|
||||
for character in userName:
|
||||
# replace illegal characters with _
|
||||
if character in illegalCharacters:
|
||||
character = "_"
|
||||
# add _ to all non-lower characters
|
||||
elif character != character.lower():
|
||||
character += "_"
|
||||
filteredUserName.append(character)
|
||||
userName = "".join(filteredUserName)
|
||||
# clip to 255
|
||||
sliceLength = maxFileNameLength - prefixLength - suffixLength
|
||||
userName = userName[:sliceLength]
|
||||
# test for illegal files names
|
||||
parts = []
|
||||
for part in userName.split("."):
|
||||
if part.lower() in reservedFileNames:
|
||||
part = "_" + part
|
||||
parts.append(part)
|
||||
userName = ".".join(parts)
|
||||
# test for clash
|
||||
fullName = prefix + userName + suffix
|
||||
if fullName.lower() in existing:
|
||||
fullName = handleClash1(userName, existing, prefix, suffix)
|
||||
# finished
|
||||
return fullName
|
||||
|
||||
def handleClash1(userName, existing=[], prefix="", suffix=""):
|
||||
"""
|
||||
existing should be a case-insensitive list
|
||||
of all existing file names.
|
||||
|
||||
>>> prefix = ("0" * 5) + "."
|
||||
>>> suffix = "." + ("0" * 10)
|
||||
>>> existing = ["a" * 5]
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> handleClash1(userName="A" * 5, existing=e,
|
||||
... prefix=prefix, suffix=suffix) == (
|
||||
... '00000.AAAAA000000000000001.0000000000')
|
||||
True
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> e.append(prefix + "aaaaa" + "1".zfill(15) + suffix)
|
||||
>>> handleClash1(userName="A" * 5, existing=e,
|
||||
... prefix=prefix, suffix=suffix) == (
|
||||
... '00000.AAAAA000000000000002.0000000000')
|
||||
True
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> e.append(prefix + "AAAAA" + "2".zfill(15) + suffix)
|
||||
>>> handleClash1(userName="A" * 5, existing=e,
|
||||
... prefix=prefix, suffix=suffix) == (
|
||||
... '00000.AAAAA000000000000001.0000000000')
|
||||
True
|
||||
"""
|
||||
# if the prefix length + user name length + suffix length + 15 is at
|
||||
# or past the maximum length, silce 15 characters off of the user name
|
||||
prefixLength = len(prefix)
|
||||
suffixLength = len(suffix)
|
||||
if prefixLength + len(userName) + suffixLength + 15 > maxFileNameLength:
|
||||
l = (prefixLength + len(userName) + suffixLength + 15)
|
||||
sliceLength = maxFileNameLength - l
|
||||
userName = userName[:sliceLength]
|
||||
finalName = None
|
||||
# try to add numbers to create a unique name
|
||||
counter = 1
|
||||
while finalName is None:
|
||||
name = userName + str(counter).zfill(15)
|
||||
fullName = prefix + name + suffix
|
||||
if fullName.lower() not in existing:
|
||||
finalName = fullName
|
||||
break
|
||||
else:
|
||||
counter += 1
|
||||
if counter >= 999999999999999:
|
||||
break
|
||||
# if there is a clash, go to the next fallback
|
||||
if finalName is None:
|
||||
finalName = handleClash2(existing, prefix, suffix)
|
||||
# finished
|
||||
return finalName
|
||||
|
||||
def handleClash2(existing=[], prefix="", suffix=""):
|
||||
"""
|
||||
existing should be a case-insensitive list
|
||||
of all existing file names.
|
||||
|
||||
>>> prefix = ("0" * 5) + "."
|
||||
>>> suffix = "." + ("0" * 10)
|
||||
>>> existing = [prefix + str(i) + suffix for i in range(100)]
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
||||
... '00000.100.0000000000')
|
||||
True
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> e.remove(prefix + "1" + suffix)
|
||||
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
||||
... '00000.1.0000000000')
|
||||
True
|
||||
|
||||
>>> e = list(existing)
|
||||
>>> e.remove(prefix + "2" + suffix)
|
||||
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
||||
... '00000.2.0000000000')
|
||||
True
|
||||
"""
|
||||
# calculate the longest possible string
|
||||
maxLength = maxFileNameLength - len(prefix) - len(suffix)
|
||||
maxValue = int("9" * maxLength)
|
||||
# try to find a number
|
||||
finalName = None
|
||||
counter = 1
|
||||
while finalName is None:
|
||||
fullName = prefix + str(counter) + suffix
|
||||
if fullName.lower() not in existing:
|
||||
finalName = fullName
|
||||
break
|
||||
else:
|
||||
counter += 1
|
||||
if counter >= maxValue:
|
||||
break
|
||||
# raise an error if nothing has been found
|
||||
if finalName is None:
|
||||
raise NameTranslationError("No unique name could be found.")
|
||||
# finished
|
||||
return finalName
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
1625
Lib/fontTools/ufoLib/glifLib.py
Executable file
1625
Lib/fontTools/ufoLib/glifLib.py
Executable file
File diff suppressed because it is too large
Load Diff
90
Lib/fontTools/ufoLib/kerning.py
Normal file
90
Lib/fontTools/ufoLib/kerning.py
Normal file
@ -0,0 +1,90 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
|
||||
def lookupKerningValue(pair, kerning, groups, fallback=0, glyphToFirstGroup=None, glyphToSecondGroup=None):
|
||||
"""
|
||||
Note: This expects kerning to be a flat dictionary
|
||||
of kerning pairs, not the nested structure used
|
||||
in kerning.plist.
|
||||
|
||||
>>> groups = {
|
||||
... "public.kern1.O" : ["O", "D", "Q"],
|
||||
... "public.kern2.E" : ["E", "F"]
|
||||
... }
|
||||
>>> kerning = {
|
||||
... ("public.kern1.O", "public.kern2.E") : -100,
|
||||
... ("public.kern1.O", "F") : -200,
|
||||
... ("D", "F") : -300
|
||||
... }
|
||||
>>> lookupKerningValue(("D", "F"), kerning, groups)
|
||||
-300
|
||||
>>> lookupKerningValue(("O", "F"), kerning, groups)
|
||||
-200
|
||||
>>> lookupKerningValue(("O", "E"), kerning, groups)
|
||||
-100
|
||||
>>> lookupKerningValue(("O", "O"), kerning, groups)
|
||||
0
|
||||
>>> lookupKerningValue(("E", "E"), kerning, groups)
|
||||
0
|
||||
>>> lookupKerningValue(("E", "O"), kerning, groups)
|
||||
0
|
||||
>>> lookupKerningValue(("X", "X"), kerning, groups)
|
||||
0
|
||||
>>> lookupKerningValue(("public.kern1.O", "public.kern2.E"),
|
||||
... kerning, groups)
|
||||
-100
|
||||
>>> lookupKerningValue(("public.kern1.O", "F"), kerning, groups)
|
||||
-200
|
||||
>>> lookupKerningValue(("O", "public.kern2.E"), kerning, groups)
|
||||
-100
|
||||
>>> lookupKerningValue(("public.kern1.X", "public.kern2.X"), kerning, groups)
|
||||
0
|
||||
"""
|
||||
# quickly check to see if the pair is in the kerning dictionary
|
||||
if pair in kerning:
|
||||
return kerning[pair]
|
||||
# create glyph to group mapping
|
||||
if glyphToFirstGroup is not None:
|
||||
assert glyphToSecondGroup is not None
|
||||
if glyphToSecondGroup is not None:
|
||||
assert glyphToFirstGroup is not None
|
||||
if glyphToFirstGroup is None:
|
||||
glyphToFirstGroup = {}
|
||||
glyphToSecondGroup = {}
|
||||
for group, groupMembers in groups.items():
|
||||
if group.startswith("public.kern1."):
|
||||
for glyph in groupMembers:
|
||||
glyphToFirstGroup[glyph] = group
|
||||
elif group.startswith("public.kern2."):
|
||||
for glyph in groupMembers:
|
||||
glyphToSecondGroup[glyph] = group
|
||||
# get group names and make sure first and second are glyph names
|
||||
first, second = pair
|
||||
firstGroup = secondGroup = None
|
||||
if first.startswith("public.kern1."):
|
||||
firstGroup = first
|
||||
first = None
|
||||
else:
|
||||
firstGroup = glyphToFirstGroup.get(first)
|
||||
if second.startswith("public.kern2."):
|
||||
secondGroup = second
|
||||
second = None
|
||||
else:
|
||||
secondGroup = glyphToSecondGroup.get(second)
|
||||
# make an ordered list of pairs to look up
|
||||
pairs = [
|
||||
(first, second),
|
||||
(first, secondGroup),
|
||||
(firstGroup, second),
|
||||
(firstGroup, secondGroup)
|
||||
]
|
||||
# look up the pairs and return any matches
|
||||
for pair in pairs:
|
||||
if pair in kerning:
|
||||
return kerning[pair]
|
||||
# use the fallback value
|
||||
return fallback
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
584
Lib/fontTools/ufoLib/plistlib.py
Normal file
584
Lib/fontTools/ufoLib/plistlib.py
Normal file
@ -0,0 +1,584 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import sys
|
||||
import re
|
||||
from io import BytesIO
|
||||
from datetime import datetime
|
||||
from base64 import b64encode, b64decode
|
||||
from numbers import Integral
|
||||
|
||||
try:
|
||||
from functools import singledispatch
|
||||
except ImportError:
|
||||
try:
|
||||
from singledispatch import singledispatch
|
||||
except ImportError:
|
||||
singledispatch = None
|
||||
|
||||
from ufoLib import etree
|
||||
|
||||
from fontTools.misc.py23 import (
|
||||
unicode,
|
||||
basestring,
|
||||
tounicode,
|
||||
tobytes,
|
||||
SimpleNamespace,
|
||||
range,
|
||||
)
|
||||
|
||||
# On python3, by default we deserialize <data> elements as bytes, whereas on
|
||||
# python2 we deserialize <data> elements as plistlib.Data objects, in order
|
||||
# to distinguish them from the built-in str type (which is bytes on python2).
|
||||
# Similarly, by default on python3 we serialize bytes as <data> elements;
|
||||
# however, on python2 we serialize bytes as <string> elements (they must
|
||||
# only contain ASCII characters in this case).
|
||||
# You can pass use_builtin_types=[True|False] to load/dump etc. functions to
|
||||
# enforce the same treatment of bytes across python 2 and 3.
|
||||
# NOTE that unicode type always maps to <string> element, and plistlib.Data
|
||||
# always maps to <data> element, regardless of use_builtin_types.
|
||||
PY3 = sys.version_info[0] > 2
|
||||
if PY3:
|
||||
USE_BUILTIN_TYPES = True
|
||||
else:
|
||||
USE_BUILTIN_TYPES = False
|
||||
|
||||
# we use a custom XML declaration for backward compatibility with older
|
||||
# ufoLib versions which would write it using double quotes.
|
||||
# https://github.com/unified-font-object/ufoLib/issues/158
|
||||
XML_DECLARATION = b"""<?xml version="1.0" encoding="UTF-8"?>"""
|
||||
|
||||
PLIST_DOCTYPE = (
|
||||
b'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" '
|
||||
b'"http://www.apple.com/DTDs/PropertyList-1.0.dtd">'
|
||||
)
|
||||
|
||||
# Date should conform to a subset of ISO 8601:
|
||||
# YYYY '-' MM '-' DD 'T' HH ':' MM ':' SS 'Z'
|
||||
_date_parser = re.compile(
|
||||
r"(?P<year>\d\d\d\d)"
|
||||
r"(?:-(?P<month>\d\d)"
|
||||
r"(?:-(?P<day>\d\d)"
|
||||
r"(?:T(?P<hour>\d\d)"
|
||||
r"(?::(?P<minute>\d\d)"
|
||||
r"(?::(?P<second>\d\d))"
|
||||
r"?)?)?)?)?Z",
|
||||
getattr(re, "ASCII", 0), # py3-only
|
||||
)
|
||||
|
||||
|
||||
def _date_from_string(s):
|
||||
order = ("year", "month", "day", "hour", "minute", "second")
|
||||
gd = _date_parser.match(s).groupdict()
|
||||
lst = []
|
||||
for key in order:
|
||||
val = gd[key]
|
||||
if val is None:
|
||||
break
|
||||
lst.append(int(val))
|
||||
return datetime(*lst)
|
||||
|
||||
|
||||
def _date_to_string(d):
|
||||
return "%04d-%02d-%02dT%02d:%02d:%02dZ" % (
|
||||
d.year,
|
||||
d.month,
|
||||
d.day,
|
||||
d.hour,
|
||||
d.minute,
|
||||
d.second,
|
||||
)
|
||||
|
||||
|
||||
def _encode_base64(data, maxlinelength=76, indent_level=1):
|
||||
data = b64encode(data)
|
||||
if data and maxlinelength:
|
||||
# split into multiple lines right-justified to 'maxlinelength' chars
|
||||
indent = b"\n" + b" " * indent_level
|
||||
max_length = max(16, maxlinelength - len(indent))
|
||||
chunks = []
|
||||
for i in range(0, len(data), max_length):
|
||||
chunks.append(indent)
|
||||
chunks.append(data[i : i + max_length])
|
||||
chunks.append(indent)
|
||||
data = b"".join(chunks)
|
||||
return data
|
||||
|
||||
|
||||
class Data:
|
||||
"""Wrapper for binary data returned in place of the built-in bytes type
|
||||
when loading property list data with use_builtin_types=False.
|
||||
"""
|
||||
|
||||
def __init__(self, data):
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("Expected bytes, found %s" % type(data).__name__)
|
||||
self.data = data
|
||||
|
||||
@classmethod
|
||||
def fromBase64(cls, data):
|
||||
return cls(b64decode(data))
|
||||
|
||||
def asBase64(self, maxlinelength=76, indent_level=1):
|
||||
return _encode_base64(
|
||||
self.data, maxlinelength=maxlinelength, indent_level=indent_level
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.data == other.data
|
||||
elif isinstance(other, bytes):
|
||||
return self.data == other
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, repr(self.data))
|
||||
|
||||
|
||||
class PlistTarget(object):
|
||||
""" Event handler using the ElementTree Target API that can be
|
||||
passed to a XMLParser to produce property list objects from XML.
|
||||
It is based on the CPython plistlib module's _PlistParser class,
|
||||
but does not use the expat parser.
|
||||
|
||||
>>> from ufoLib import etree
|
||||
>>> parser = etree.XMLParser(target=PlistTarget())
|
||||
>>> result = etree.XML(
|
||||
... "<dict>"
|
||||
... " <key>something</key>"
|
||||
... " <string>blah</string>"
|
||||
... "</dict>",
|
||||
... parser=parser)
|
||||
>>> result == {"something": "blah"}
|
||||
True
|
||||
|
||||
Links:
|
||||
https://github.com/python/cpython/blob/master/Lib/plistlib.py
|
||||
http://lxml.de/parsing.html#the-target-parser-interface
|
||||
"""
|
||||
|
||||
def __init__(self, use_builtin_types=None, dict_type=dict):
|
||||
self.stack = []
|
||||
self.current_key = None
|
||||
self.root = None
|
||||
if use_builtin_types is None:
|
||||
self._use_builtin_types = USE_BUILTIN_TYPES
|
||||
else:
|
||||
self._use_builtin_types = use_builtin_types
|
||||
self._dict_type = dict_type
|
||||
|
||||
def start(self, tag, attrib):
|
||||
self._data = []
|
||||
handler = _TARGET_START_HANDLERS.get(tag)
|
||||
if handler is not None:
|
||||
handler(self)
|
||||
|
||||
def end(self, tag):
|
||||
handler = _TARGET_END_HANDLERS.get(tag)
|
||||
if handler is not None:
|
||||
handler(self)
|
||||
|
||||
def data(self, data):
|
||||
self._data.append(data)
|
||||
|
||||
def close(self):
|
||||
return self.root
|
||||
|
||||
# helpers
|
||||
|
||||
def add_object(self, value):
|
||||
if self.current_key is not None:
|
||||
if not isinstance(self.stack[-1], type({})):
|
||||
raise ValueError("unexpected element: %r" % self.stack[-1])
|
||||
self.stack[-1][self.current_key] = value
|
||||
self.current_key = None
|
||||
elif not self.stack:
|
||||
# this is the root object
|
||||
self.root = value
|
||||
else:
|
||||
if not isinstance(self.stack[-1], type([])):
|
||||
raise ValueError("unexpected element: %r" % self.stack[-1])
|
||||
self.stack[-1].append(value)
|
||||
|
||||
def get_data(self):
|
||||
data = "".join(self._data)
|
||||
self._data = []
|
||||
return data
|
||||
|
||||
|
||||
# event handlers
|
||||
|
||||
|
||||
def start_dict(self):
|
||||
d = self._dict_type()
|
||||
self.add_object(d)
|
||||
self.stack.append(d)
|
||||
|
||||
|
||||
def end_dict(self):
|
||||
if self.current_key:
|
||||
raise ValueError("missing value for key '%s'" % self.current_key)
|
||||
self.stack.pop()
|
||||
|
||||
|
||||
def end_key(self):
|
||||
if self.current_key or not isinstance(self.stack[-1], type({})):
|
||||
raise ValueError("unexpected key")
|
||||
self.current_key = self.get_data()
|
||||
|
||||
|
||||
def start_array(self):
|
||||
a = []
|
||||
self.add_object(a)
|
||||
self.stack.append(a)
|
||||
|
||||
|
||||
def end_array(self):
|
||||
self.stack.pop()
|
||||
|
||||
|
||||
def end_true(self):
|
||||
self.add_object(True)
|
||||
|
||||
|
||||
def end_false(self):
|
||||
self.add_object(False)
|
||||
|
||||
|
||||
def end_integer(self):
|
||||
self.add_object(int(self.get_data()))
|
||||
|
||||
|
||||
def end_real(self):
|
||||
self.add_object(float(self.get_data()))
|
||||
|
||||
|
||||
def end_string(self):
|
||||
self.add_object(self.get_data())
|
||||
|
||||
|
||||
def end_data(self):
|
||||
if self._use_builtin_types:
|
||||
self.add_object(b64decode(self.get_data()))
|
||||
else:
|
||||
self.add_object(Data.fromBase64(self.get_data()))
|
||||
|
||||
|
||||
def end_date(self):
|
||||
self.add_object(_date_from_string(self.get_data()))
|
||||
|
||||
|
||||
_TARGET_START_HANDLERS = {"dict": start_dict, "array": start_array}
|
||||
|
||||
_TARGET_END_HANDLERS = {
|
||||
"dict": end_dict,
|
||||
"array": end_array,
|
||||
"key": end_key,
|
||||
"true": end_true,
|
||||
"false": end_false,
|
||||
"integer": end_integer,
|
||||
"real": end_real,
|
||||
"string": end_string,
|
||||
"data": end_data,
|
||||
"date": end_date,
|
||||
}
|
||||
|
||||
|
||||
# functions to build element tree from plist data
|
||||
|
||||
|
||||
def _string_element(value, ctx):
|
||||
el = etree.Element("string")
|
||||
el.text = value
|
||||
return el
|
||||
|
||||
|
||||
def _bool_element(value, ctx):
|
||||
if value:
|
||||
return etree.Element("true")
|
||||
else:
|
||||
return etree.Element("false")
|
||||
|
||||
|
||||
def _integer_element(value, ctx):
|
||||
if -1 << 63 <= value < 1 << 64:
|
||||
el = etree.Element("integer")
|
||||
el.text = "%d" % value
|
||||
return el
|
||||
else:
|
||||
raise OverflowError(value)
|
||||
|
||||
|
||||
def _real_element(value, ctx):
|
||||
el = etree.Element("real")
|
||||
el.text = repr(value)
|
||||
return el
|
||||
|
||||
|
||||
def _dict_element(d, ctx):
|
||||
el = etree.Element("dict")
|
||||
items = d.items()
|
||||
if ctx.sort_keys:
|
||||
items = sorted(items)
|
||||
ctx.indent_level += 1
|
||||
for key, value in items:
|
||||
if not isinstance(key, basestring):
|
||||
if ctx.skipkeys:
|
||||
continue
|
||||
raise TypeError("keys must be strings")
|
||||
k = etree.SubElement(el, "key")
|
||||
k.text = tounicode(key, "utf-8")
|
||||
el.append(_make_element(value, ctx))
|
||||
ctx.indent_level -= 1
|
||||
return el
|
||||
|
||||
|
||||
def _array_element(array, ctx):
|
||||
el = etree.Element("array")
|
||||
if len(array) == 0:
|
||||
return el
|
||||
ctx.indent_level += 1
|
||||
for value in array:
|
||||
el.append(_make_element(value, ctx))
|
||||
ctx.indent_level -= 1
|
||||
return el
|
||||
|
||||
|
||||
def _date_element(date, ctx):
|
||||
el = etree.Element("date")
|
||||
el.text = _date_to_string(date)
|
||||
return el
|
||||
|
||||
|
||||
def _data_element(data, ctx):
|
||||
el = etree.Element("data")
|
||||
el.text = _encode_base64(
|
||||
data,
|
||||
maxlinelength=(76 if ctx.pretty_print else None),
|
||||
indent_level=ctx.indent_level,
|
||||
)
|
||||
return el
|
||||
|
||||
|
||||
def _string_or_data_element(raw_bytes, ctx):
|
||||
if ctx.use_builtin_types:
|
||||
return _data_element(raw_bytes, ctx)
|
||||
else:
|
||||
try:
|
||||
string = raw_bytes.decode(encoding="ascii", errors="strict")
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError(
|
||||
"invalid non-ASCII bytes; use unicode string instead: %r"
|
||||
% raw_bytes
|
||||
)
|
||||
return _string_element(string, ctx)
|
||||
|
||||
|
||||
# if singledispatch is available, we use a generic '_make_element' function
|
||||
# and register overloaded implementations that are run based on the type of
|
||||
# the first argument
|
||||
|
||||
if singledispatch is not None:
|
||||
|
||||
@singledispatch
|
||||
def _make_element(value, ctx):
|
||||
raise TypeError("unsupported type: %s" % type(value))
|
||||
|
||||
_make_element.register(unicode)(_string_element)
|
||||
_make_element.register(bool)(_bool_element)
|
||||
_make_element.register(Integral)(_integer_element)
|
||||
_make_element.register(float)(_real_element)
|
||||
_make_element.register(dict)(_dict_element)
|
||||
_make_element.register(list)(_array_element)
|
||||
_make_element.register(tuple)(_array_element)
|
||||
_make_element.register(datetime)(_date_element)
|
||||
_make_element.register(bytes)(_string_or_data_element)
|
||||
_make_element.register(bytearray)(_data_element)
|
||||
_make_element.register(Data)(lambda v, ctx: _data_element(v.data, ctx))
|
||||
|
||||
else:
|
||||
# otherwise we use a long switch-like if statement
|
||||
|
||||
def _make_element(value, ctx):
|
||||
if isinstance(value, unicode):
|
||||
return _string_element(value, ctx)
|
||||
elif isinstance(value, bool):
|
||||
return _bool_element(value, ctx)
|
||||
elif isinstance(value, Integral):
|
||||
return _integer_element(value, ctx)
|
||||
elif isinstance(value, float):
|
||||
return _real_element(value, ctx)
|
||||
elif isinstance(value, dict):
|
||||
return _dict_element(value, ctx)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
return _array_element(value, ctx)
|
||||
elif isinstance(value, datetime):
|
||||
return _date_element(value, ctx)
|
||||
elif isinstance(value, bytes):
|
||||
return _string_or_data_element(value, ctx)
|
||||
elif isinstance(value, bytearray):
|
||||
return _data_element(value, ctx)
|
||||
elif isinstance(value, Data):
|
||||
return _data_element(value.data, ctx)
|
||||
|
||||
|
||||
# Public functions to create element tree from plist-compatible python
|
||||
# data structures and viceversa, for use when (de)serializing GLIF xml.
|
||||
|
||||
|
||||
def totree(
|
||||
value,
|
||||
sort_keys=True,
|
||||
skipkeys=False,
|
||||
use_builtin_types=None,
|
||||
pretty_print=True,
|
||||
indent_level=1,
|
||||
):
|
||||
if use_builtin_types is None:
|
||||
use_builtin_types = USE_BUILTIN_TYPES
|
||||
else:
|
||||
use_builtin_types = use_builtin_types
|
||||
context = SimpleNamespace(
|
||||
sort_keys=sort_keys,
|
||||
skipkeys=skipkeys,
|
||||
use_builtin_types=use_builtin_types,
|
||||
pretty_print=pretty_print,
|
||||
indent_level=indent_level,
|
||||
)
|
||||
return _make_element(value, context)
|
||||
|
||||
|
||||
def fromtree(tree, use_builtin_types=None, dict_type=dict):
|
||||
target = PlistTarget(
|
||||
use_builtin_types=use_builtin_types, dict_type=dict_type
|
||||
)
|
||||
for action, element in etree.iterwalk(tree, events=("start", "end")):
|
||||
if action == "start":
|
||||
target.start(element.tag, element.attrib)
|
||||
elif action == "end":
|
||||
# if there are no children, parse the leaf's data
|
||||
if not len(element):
|
||||
# always pass str, not None
|
||||
target.data(element.text or "")
|
||||
target.end(element.tag)
|
||||
return target.close()
|
||||
|
||||
|
||||
# python3 plistlib API
|
||||
|
||||
|
||||
def load(fp, use_builtin_types=None, dict_type=dict):
|
||||
if not hasattr(fp, "read"):
|
||||
raise AttributeError(
|
||||
"'%s' object has no attribute 'read'" % type(fp).__name__
|
||||
)
|
||||
target = PlistTarget(
|
||||
use_builtin_types=use_builtin_types, dict_type=dict_type
|
||||
)
|
||||
parser = etree.XMLParser(target=target)
|
||||
result = etree.parse(fp, parser=parser)
|
||||
# lxml returns the target object directly, while ElementTree wraps
|
||||
# it as the root of an ElementTree object
|
||||
try:
|
||||
return result.getroot()
|
||||
except AttributeError:
|
||||
return result
|
||||
|
||||
|
||||
def loads(value, use_builtin_types=None, dict_type=dict):
|
||||
fp = BytesIO(value)
|
||||
return load(fp, use_builtin_types=use_builtin_types, dict_type=dict_type)
|
||||
|
||||
|
||||
def dump(
|
||||
value,
|
||||
fp,
|
||||
sort_keys=True,
|
||||
skipkeys=False,
|
||||
use_builtin_types=None,
|
||||
pretty_print=True,
|
||||
):
|
||||
if not hasattr(fp, "write"):
|
||||
raise AttributeError(
|
||||
"'%s' object has no attribute 'write'" % type(fp).__name__
|
||||
)
|
||||
root = etree.Element("plist", version="1.0")
|
||||
el = totree(
|
||||
value,
|
||||
sort_keys=sort_keys,
|
||||
skipkeys=skipkeys,
|
||||
use_builtin_types=use_builtin_types,
|
||||
pretty_print=pretty_print,
|
||||
)
|
||||
root.append(el)
|
||||
tree = etree.ElementTree(root)
|
||||
# we write the doctype ourselves instead of using the 'doctype' argument
|
||||
# of 'write' method, becuse lxml will force adding a '\n' even when
|
||||
# pretty_print is False.
|
||||
if pretty_print:
|
||||
header = b"\n".join((XML_DECLARATION, PLIST_DOCTYPE, b""))
|
||||
else:
|
||||
header = XML_DECLARATION + PLIST_DOCTYPE
|
||||
fp.write(header)
|
||||
tree.write(
|
||||
fp, encoding="utf-8", pretty_print=pretty_print, xml_declaration=False
|
||||
)
|
||||
|
||||
|
||||
def dumps(
|
||||
value,
|
||||
sort_keys=True,
|
||||
skipkeys=False,
|
||||
use_builtin_types=None,
|
||||
pretty_print=True,
|
||||
):
|
||||
fp = BytesIO()
|
||||
dump(
|
||||
value,
|
||||
fp,
|
||||
sort_keys=sort_keys,
|
||||
skipkeys=skipkeys,
|
||||
use_builtin_types=use_builtin_types,
|
||||
pretty_print=pretty_print,
|
||||
)
|
||||
return fp.getvalue()
|
||||
|
||||
|
||||
# The following functions were part of the old py2-like ufoLib.plistlib API.
|
||||
# They are kept only for backward compatiblity.
|
||||
from .utils import deprecated
|
||||
|
||||
|
||||
@deprecated("Use 'load' instead")
|
||||
def readPlist(path_or_file):
|
||||
did_open = False
|
||||
if isinstance(path_or_file, basestring):
|
||||
path_or_file = open(path_or_file, "rb")
|
||||
did_open = True
|
||||
try:
|
||||
return load(path_or_file, use_builtin_types=False)
|
||||
finally:
|
||||
if did_open:
|
||||
path_or_file.close()
|
||||
|
||||
|
||||
@deprecated("Use 'dump' instead")
|
||||
def writePlist(value, path_or_file):
|
||||
did_open = False
|
||||
if isinstance(path_or_file, basestring):
|
||||
path_or_file = open(path_or_file, "wb")
|
||||
did_open = True
|
||||
try:
|
||||
dump(value, path_or_file, use_builtin_types=False)
|
||||
finally:
|
||||
if did_open:
|
||||
path_or_file.close()
|
||||
|
||||
|
||||
@deprecated("Use 'loads' instead")
|
||||
def readPlistFromString(data):
|
||||
return loads(tobytes(data, encoding="utf-8"), use_builtin_types=False)
|
||||
|
||||
|
||||
@deprecated("Use 'dumps' instead")
|
||||
def writePlistToString(value):
|
||||
return dumps(value, use_builtin_types=False)
|
407
Lib/fontTools/ufoLib/pointPen.py
Normal file
407
Lib/fontTools/ufoLib/pointPen.py
Normal file
@ -0,0 +1,407 @@
|
||||
"""
|
||||
=========
|
||||
PointPens
|
||||
=========
|
||||
|
||||
Where **SegmentPens** have an intuitive approach to drawing
|
||||
(if you're familiar with postscript anyway), the **PointPen**
|
||||
is geared towards accessing all the data in the contours of
|
||||
the glyph. A PointPen has a very simple interface, it just
|
||||
steps through all the points in a call from glyph.drawPoints().
|
||||
This allows the caller to provide more data for each point.
|
||||
For instance, whether or not a point is smooth, and its name.
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from fontTools.pens.basePen import AbstractPen
|
||||
import math
|
||||
|
||||
__all__ = ["AbstractPointPen", "BasePointToSegmentPen", "PointToSegmentPen",
|
||||
"SegmentToPointPen"]
|
||||
|
||||
|
||||
class AbstractPointPen(object):
|
||||
"""
|
||||
Baseclass for all PointPens.
|
||||
"""
|
||||
|
||||
def beginPath(self, identifier=None, **kwargs):
|
||||
"""Start a new sub path."""
|
||||
raise NotImplementedError
|
||||
|
||||
def endPath(self):
|
||||
"""End the current sub path."""
|
||||
raise NotImplementedError
|
||||
|
||||
def addPoint(self, pt, segmentType=None, smooth=False, name=None,
|
||||
identifier=None, **kwargs):
|
||||
"""Add a point to the current sub path."""
|
||||
raise NotImplementedError
|
||||
|
||||
def addComponent(self, baseGlyphName, transformation, identifier=None,
|
||||
**kwargs):
|
||||
"""Add a sub glyph."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BasePointToSegmentPen(AbstractPointPen):
|
||||
"""
|
||||
Base class for retrieving the outline in a segment-oriented
|
||||
way. The PointPen protocol is simple yet also a little tricky,
|
||||
so when you need an outline presented as segments but you have
|
||||
as points, do use this base implementation as it properly takes
|
||||
care of all the edge cases.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.currentPath = None
|
||||
|
||||
def beginPath(self, **kwargs):
|
||||
assert self.currentPath is None
|
||||
self.currentPath = []
|
||||
|
||||
def _flushContour(self, segments):
|
||||
"""Override this method.
|
||||
|
||||
It will be called for each non-empty sub path with a list
|
||||
of segments: the 'segments' argument.
|
||||
|
||||
The segments list contains tuples of length 2:
|
||||
(segmentType, points)
|
||||
|
||||
segmentType is one of "move", "line", "curve" or "qcurve".
|
||||
"move" may only occur as the first segment, and it signifies
|
||||
an OPEN path. A CLOSED path does NOT start with a "move", in
|
||||
fact it will not contain a "move" at ALL.
|
||||
|
||||
The 'points' field in the 2-tuple is a list of point info
|
||||
tuples. The list has 1 or more items, a point tuple has
|
||||
four items:
|
||||
(point, smooth, name, kwargs)
|
||||
'point' is an (x, y) coordinate pair.
|
||||
|
||||
For a closed path, the initial moveTo point is defined as
|
||||
the last point of the last segment.
|
||||
|
||||
The 'points' list of "move" and "line" segments always contains
|
||||
exactly one point tuple.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def endPath(self):
|
||||
assert self.currentPath is not None
|
||||
points = self.currentPath
|
||||
self.currentPath = None
|
||||
if not points:
|
||||
return
|
||||
if len(points) == 1:
|
||||
# Not much more we can do than output a single move segment.
|
||||
pt, segmentType, smooth, name, kwargs = points[0]
|
||||
segments = [("move", [(pt, smooth, name, kwargs)])]
|
||||
self._flushContour(segments)
|
||||
return
|
||||
segments = []
|
||||
if points[0][1] == "move":
|
||||
# It's an open contour, insert a "move" segment for the first
|
||||
# point and remove that first point from the point list.
|
||||
pt, segmentType, smooth, name, kwargs = points[0]
|
||||
segments.append(("move", [(pt, smooth, name, kwargs)]))
|
||||
points.pop(0)
|
||||
else:
|
||||
# It's a closed contour. Locate the first on-curve point, and
|
||||
# rotate the point list so that it _ends_ with an on-curve
|
||||
# point.
|
||||
firstOnCurve = None
|
||||
for i in range(len(points)):
|
||||
segmentType = points[i][1]
|
||||
if segmentType is not None:
|
||||
firstOnCurve = i
|
||||
break
|
||||
if firstOnCurve is None:
|
||||
# Special case for quadratics: a contour with no on-curve
|
||||
# points. Add a "None" point. (See also the Pen protocol's
|
||||
# qCurveTo() method and fontTools.pens.basePen.py.)
|
||||
points.append((None, "qcurve", None, None, None))
|
||||
else:
|
||||
points = points[firstOnCurve+1:] + points[:firstOnCurve+1]
|
||||
|
||||
currentSegment = []
|
||||
for pt, segmentType, smooth, name, kwargs in points:
|
||||
currentSegment.append((pt, smooth, name, kwargs))
|
||||
if segmentType is None:
|
||||
continue
|
||||
segments.append((segmentType, currentSegment))
|
||||
currentSegment = []
|
||||
|
||||
self._flushContour(segments)
|
||||
|
||||
def addPoint(self, pt, segmentType=None, smooth=False, name=None, **kwargs):
|
||||
self.currentPath.append((pt, segmentType, smooth, name, kwargs))
|
||||
|
||||
|
||||
class PointToSegmentPen(BasePointToSegmentPen):
|
||||
"""
|
||||
Adapter class that converts the PointPen protocol to the
|
||||
(Segment)Pen protocol.
|
||||
"""
|
||||
|
||||
def __init__(self, segmentPen, outputImpliedClosingLine=False):
|
||||
BasePointToSegmentPen.__init__(self)
|
||||
self.pen = segmentPen
|
||||
self.outputImpliedClosingLine = outputImpliedClosingLine
|
||||
|
||||
def _flushContour(self, segments):
|
||||
assert len(segments) >= 1
|
||||
pen = self.pen
|
||||
if segments[0][0] == "move":
|
||||
# It's an open path.
|
||||
closed = False
|
||||
points = segments[0][1]
|
||||
assert len(points) == 1, "illegal move segment point count: %d" % len(points)
|
||||
movePt, smooth, name, kwargs = points[0]
|
||||
del segments[0]
|
||||
else:
|
||||
# It's a closed path, do a moveTo to the last
|
||||
# point of the last segment.
|
||||
closed = True
|
||||
segmentType, points = segments[-1]
|
||||
movePt, smooth, name, kwargs = points[-1]
|
||||
if movePt is None:
|
||||
# quad special case: a contour with no on-curve points contains
|
||||
# one "qcurve" segment that ends with a point that's None. We
|
||||
# must not output a moveTo() in that case.
|
||||
pass
|
||||
else:
|
||||
pen.moveTo(movePt)
|
||||
outputImpliedClosingLine = self.outputImpliedClosingLine
|
||||
nSegments = len(segments)
|
||||
for i in range(nSegments):
|
||||
segmentType, points = segments[i]
|
||||
points = [pt for pt, smooth, name, kwargs in points]
|
||||
if segmentType == "line":
|
||||
assert len(points) == 1, "illegal line segment point count: %d" % len(points)
|
||||
pt = points[0]
|
||||
if i + 1 != nSegments or outputImpliedClosingLine or not closed:
|
||||
pen.lineTo(pt)
|
||||
elif segmentType == "curve":
|
||||
pen.curveTo(*points)
|
||||
elif segmentType == "qcurve":
|
||||
pen.qCurveTo(*points)
|
||||
else:
|
||||
assert 0, "illegal segmentType: %s" % segmentType
|
||||
if closed:
|
||||
pen.closePath()
|
||||
else:
|
||||
pen.endPath()
|
||||
|
||||
def addComponent(self, glyphName, transform, **kwargs):
|
||||
self.pen.addComponent(glyphName, transform)
|
||||
|
||||
|
||||
class SegmentToPointPen(AbstractPen):
|
||||
"""
|
||||
Adapter class that converts the (Segment)Pen protocol to the
|
||||
PointPen protocol.
|
||||
"""
|
||||
|
||||
def __init__(self, pointPen, guessSmooth=True):
|
||||
if guessSmooth:
|
||||
self.pen = GuessSmoothPointPen(pointPen)
|
||||
else:
|
||||
self.pen = pointPen
|
||||
self.contour = None
|
||||
|
||||
def _flushContour(self):
|
||||
pen = self.pen
|
||||
pen.beginPath()
|
||||
for pt, segmentType in self.contour:
|
||||
pen.addPoint(pt, segmentType=segmentType)
|
||||
pen.endPath()
|
||||
|
||||
def moveTo(self, pt):
|
||||
self.contour = []
|
||||
self.contour.append((pt, "move"))
|
||||
|
||||
def lineTo(self, pt):
|
||||
self.contour.append((pt, "line"))
|
||||
|
||||
def curveTo(self, *pts):
|
||||
for pt in pts[:-1]:
|
||||
self.contour.append((pt, None))
|
||||
self.contour.append((pts[-1], "curve"))
|
||||
|
||||
def qCurveTo(self, *pts):
|
||||
if pts[-1] is None:
|
||||
self.contour = []
|
||||
for pt in pts[:-1]:
|
||||
self.contour.append((pt, None))
|
||||
if pts[-1] is not None:
|
||||
self.contour.append((pts[-1], "qcurve"))
|
||||
|
||||
def closePath(self):
|
||||
if len(self.contour) > 1 and self.contour[0][0] == self.contour[-1][0]:
|
||||
self.contour[0] = self.contour[-1]
|
||||
del self.contour[-1]
|
||||
else:
|
||||
# There's an implied line at the end, replace "move" with "line"
|
||||
# for the first point
|
||||
pt, tp = self.contour[0]
|
||||
if tp == "move":
|
||||
self.contour[0] = pt, "line"
|
||||
self._flushContour()
|
||||
self.contour = None
|
||||
|
||||
def endPath(self):
|
||||
self._flushContour()
|
||||
self.contour = None
|
||||
|
||||
def addComponent(self, glyphName, transform):
|
||||
assert self.contour is None
|
||||
self.pen.addComponent(glyphName, transform)
|
||||
|
||||
|
||||
class GuessSmoothPointPen(AbstractPointPen):
|
||||
"""
|
||||
Filtering PointPen that tries to determine whether an on-curve point
|
||||
should be "smooth", ie. that it's a "tangent" point or a "curve" point.
|
||||
"""
|
||||
|
||||
def __init__(self, outPen):
|
||||
self._outPen = outPen
|
||||
self._points = None
|
||||
|
||||
def _flushContour(self):
|
||||
points = self._points
|
||||
nPoints = len(points)
|
||||
if not nPoints:
|
||||
return
|
||||
if points[0][1] == "move":
|
||||
# Open path.
|
||||
indices = range(1, nPoints - 1)
|
||||
elif nPoints > 1:
|
||||
# Closed path. To avoid having to mod the contour index, we
|
||||
# simply abuse Python's negative index feature, and start at -1
|
||||
indices = range(-1, nPoints - 1)
|
||||
else:
|
||||
# closed path containing 1 point (!), ignore.
|
||||
indices = []
|
||||
for i in indices:
|
||||
pt, segmentType, dummy, name, kwargs = points[i]
|
||||
if segmentType is None:
|
||||
continue
|
||||
prev = i - 1
|
||||
next = i + 1
|
||||
if points[prev][1] is not None and points[next][1] is not None:
|
||||
continue
|
||||
# At least one of our neighbors is an off-curve point
|
||||
pt = points[i][0]
|
||||
prevPt = points[prev][0]
|
||||
nextPt = points[next][0]
|
||||
if pt != prevPt and pt != nextPt:
|
||||
dx1, dy1 = pt[0] - prevPt[0], pt[1] - prevPt[1]
|
||||
dx2, dy2 = nextPt[0] - pt[0], nextPt[1] - pt[1]
|
||||
a1 = math.atan2(dx1, dy1)
|
||||
a2 = math.atan2(dx2, dy2)
|
||||
if abs(a1 - a2) < 0.05:
|
||||
points[i] = pt, segmentType, True, name, kwargs
|
||||
|
||||
for pt, segmentType, smooth, name, kwargs in points:
|
||||
self._outPen.addPoint(pt, segmentType, smooth, name, **kwargs)
|
||||
|
||||
def beginPath(self):
|
||||
assert self._points is None
|
||||
self._points = []
|
||||
self._outPen.beginPath()
|
||||
|
||||
def endPath(self):
|
||||
self._flushContour()
|
||||
self._outPen.endPath()
|
||||
self._points = None
|
||||
|
||||
def addPoint(self, pt, segmentType=None, smooth=False, name=None, **kwargs):
|
||||
self._points.append((pt, segmentType, False, name, kwargs))
|
||||
|
||||
def addComponent(self, glyphName, transformation):
|
||||
assert self._points is None
|
||||
self._outPen.addComponent(glyphName, transformation)
|
||||
|
||||
|
||||
class ReverseContourPointPen(AbstractPointPen):
|
||||
"""
|
||||
This is a PointPen that passes outline data to another PointPen, but
|
||||
reversing the winding direction of all contours. Components are simply
|
||||
passed through unchanged.
|
||||
|
||||
Closed contours are reversed in such a way that the first point remains
|
||||
the first point.
|
||||
"""
|
||||
|
||||
def __init__(self, outputPointPen):
|
||||
self.pen = outputPointPen
|
||||
# a place to store the points for the current sub path
|
||||
self.currentContour = None
|
||||
|
||||
def _flushContour(self):
|
||||
pen = self.pen
|
||||
contour = self.currentContour
|
||||
if not contour:
|
||||
pen.beginPath(identifier=self.currentContourIdentifier)
|
||||
pen.endPath()
|
||||
return
|
||||
|
||||
closed = contour[0][1] != "move"
|
||||
if not closed:
|
||||
lastSegmentType = "move"
|
||||
else:
|
||||
# Remove the first point and insert it at the end. When
|
||||
# the list of points gets reversed, this point will then
|
||||
# again be at the start. In other words, the following
|
||||
# will hold:
|
||||
# for N in range(len(originalContour)):
|
||||
# originalContour[N] == reversedContour[-N]
|
||||
contour.append(contour.pop(0))
|
||||
# Find the first on-curve point.
|
||||
firstOnCurve = None
|
||||
for i in range(len(contour)):
|
||||
if contour[i][1] is not None:
|
||||
firstOnCurve = i
|
||||
break
|
||||
if firstOnCurve is None:
|
||||
# There are no on-curve points, be basically have to
|
||||
# do nothing but contour.reverse().
|
||||
lastSegmentType = None
|
||||
else:
|
||||
lastSegmentType = contour[firstOnCurve][1]
|
||||
|
||||
contour.reverse()
|
||||
if not closed:
|
||||
# Open paths must start with a move, so we simply dump
|
||||
# all off-curve points leading up to the first on-curve.
|
||||
while contour[0][1] is None:
|
||||
contour.pop(0)
|
||||
pen.beginPath(identifier=self.currentContourIdentifier)
|
||||
for pt, nextSegmentType, smooth, name, kwargs in contour:
|
||||
if nextSegmentType is not None:
|
||||
segmentType = lastSegmentType
|
||||
lastSegmentType = nextSegmentType
|
||||
else:
|
||||
segmentType = None
|
||||
pen.addPoint(pt, segmentType=segmentType, smooth=smooth, name=name, **kwargs)
|
||||
pen.endPath()
|
||||
|
||||
def beginPath(self, identifier=None, **kwargs):
|
||||
assert self.currentContour is None
|
||||
self.currentContour = []
|
||||
self.currentContourIdentifier = identifier
|
||||
self.onCurve = []
|
||||
|
||||
def endPath(self):
|
||||
assert self.currentContour is not None
|
||||
self._flushContour()
|
||||
self.currentContour = None
|
||||
|
||||
def addPoint(self, pt, segmentType=None, smooth=False, name=None, **kwargs):
|
||||
self.currentContour.append((pt, segmentType, smooth, name, kwargs))
|
||||
|
||||
def addComponent(self, glyphName, transform, identifier=None, **kwargs):
|
||||
assert self.currentContour is None
|
||||
self.pen.addComponent(glyphName, transform, identifier=identifier, **kwargs)
|
86
Lib/fontTools/ufoLib/utils.py
Normal file
86
Lib/fontTools/ufoLib/utils.py
Normal file
@ -0,0 +1,86 @@
|
||||
"""The module contains miscellaneous helpers.
|
||||
It's not considered part of the public ufoLib API.
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import sys
|
||||
import warnings
|
||||
import functools
|
||||
from datetime import datetime
|
||||
from fontTools.misc.py23 import tounicode
|
||||
|
||||
|
||||
if hasattr(datetime, "timestamp"): # python >= 3.3
|
||||
|
||||
def datetimeAsTimestamp(dt):
|
||||
return dt.timestamp()
|
||||
|
||||
else:
|
||||
from datetime import tzinfo, timedelta
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
class UTC(tzinfo):
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
utc = UTC()
|
||||
|
||||
EPOCH = datetime.fromtimestamp(0, tz=utc)
|
||||
|
||||
def datetimeAsTimestamp(dt):
|
||||
return (dt - EPOCH).total_seconds()
|
||||
|
||||
|
||||
# TODO: should import from fontTools.misc.py23
|
||||
try:
|
||||
long = long
|
||||
except NameError:
|
||||
long = int
|
||||
|
||||
integerTypes = (int, long)
|
||||
numberTypes = (int, float, long)
|
||||
|
||||
|
||||
def deprecated(msg=""):
|
||||
"""Decorator factory to mark functions as deprecated with given message.
|
||||
|
||||
>>> @deprecated("Enough!")
|
||||
... def some_function():
|
||||
... "I just print 'hello world'."
|
||||
... print("hello world")
|
||||
>>> some_function()
|
||||
hello world
|
||||
>>> some_function.__doc__
|
||||
"I just print 'hello world'."
|
||||
"""
|
||||
|
||||
def deprecated_decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
warnings.warn(
|
||||
"{} function is a deprecated. {}".format(func.__name__, msg),
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return deprecated_decorator
|
||||
|
||||
|
||||
def fsdecode(path, encoding=sys.getfilesystemencoding()):
|
||||
return tounicode(path, encoding=encoding)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
1042
Lib/fontTools/ufoLib/validators.py
Normal file
1042
Lib/fontTools/ufoLib/validators.py
Normal file
File diff suppressed because it is too large
Load Diff
0
Tests/ufoLib/__init__.py
Normal file
0
Tests/ufoLib/__init__.py
Normal file
672
Tests/ufoLib/testSupport.py
Executable file
672
Tests/ufoLib/testSupport.py
Executable file
@ -0,0 +1,672 @@
|
||||
"""Miscellaneous helpers for our test suite."""
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
from ufoLib.utils import numberTypes
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
basestring = str
|
||||
|
||||
def getDemoFontPath():
|
||||
"""Return the path to Data/DemoFont.ufo/."""
|
||||
testdata = os.path.join(os.path.dirname(__file__), "testdata")
|
||||
return os.path.join(testdata, "DemoFont.ufo")
|
||||
|
||||
|
||||
def getDemoFontGlyphSetPath():
|
||||
"""Return the path to Data/DemoFont.ufo/glyphs/."""
|
||||
return os.path.join(getDemoFontPath(), "glyphs")
|
||||
|
||||
|
||||
# GLIF test tools
|
||||
|
||||
class Glyph(object):
|
||||
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
self.width = None
|
||||
self.height = None
|
||||
self.unicodes = None
|
||||
self.note = None
|
||||
self.lib = None
|
||||
self.image = None
|
||||
self.guidelines = None
|
||||
self.anchors = None
|
||||
self.outline = []
|
||||
|
||||
def _writePointPenCommand(self, command, args, kwargs):
|
||||
args = _listToString(args)
|
||||
kwargs = _dictToString(kwargs)
|
||||
if args and kwargs:
|
||||
return "pointPen.%s(*%s, **%s)" % (command, args, kwargs)
|
||||
elif len(args):
|
||||
return "pointPen.%s(*%s)" % (command, args)
|
||||
elif len(kwargs):
|
||||
return "pointPen.%s(**%s)" % (command, kwargs)
|
||||
else:
|
||||
return "pointPen.%s()" % command
|
||||
|
||||
def beginPath(self, **kwargs):
|
||||
self.outline.append(self._writePointPenCommand("beginPath", [], kwargs))
|
||||
|
||||
def endPath(self):
|
||||
self.outline.append(self._writePointPenCommand("endPath", [], {}))
|
||||
|
||||
def addPoint(self, *args, **kwargs):
|
||||
self.outline.append(self._writePointPenCommand("addPoint", args, kwargs))
|
||||
|
||||
def addComponent(self, *args, **kwargs):
|
||||
self.outline.append(self._writePointPenCommand("addComponent", args, kwargs))
|
||||
|
||||
def drawPoints(self, pointPen):
|
||||
if self.outline:
|
||||
py = "\n".join(self.outline)
|
||||
exec(py, {"pointPen" : pointPen})
|
||||
|
||||
def py(self):
|
||||
text = []
|
||||
if self.name is not None:
|
||||
text.append("glyph.name = \"%s\"" % self.name)
|
||||
if self.width:
|
||||
text.append("glyph.width = %r" % self.width)
|
||||
if self.height:
|
||||
text.append("glyph.height = %r" % self.height)
|
||||
if self.unicodes is not None:
|
||||
text.append("glyph.unicodes = [%s]" % ", ".join([str(i) for i in self.unicodes]))
|
||||
if self.note is not None:
|
||||
text.append("glyph.note = \"%s\"" % self.note)
|
||||
if self.lib is not None:
|
||||
text.append("glyph.lib = %s" % _dictToString(self.lib))
|
||||
if self.image is not None:
|
||||
text.append("glyph.image = %s" % _dictToString(self.image))
|
||||
if self.guidelines is not None:
|
||||
text.append("glyph.guidelines = %s" % _listToString(self.guidelines))
|
||||
if self.anchors is not None:
|
||||
text.append("glyph.anchors = %s" % _listToString(self.anchors))
|
||||
if self.outline:
|
||||
text += self.outline
|
||||
return "\n".join(text)
|
||||
|
||||
def _dictToString(d):
|
||||
text = []
|
||||
for key, value in sorted(d.items()):
|
||||
if value is None:
|
||||
continue
|
||||
key = "\"%s\"" % key
|
||||
if isinstance(value, dict):
|
||||
value = _dictToString(value)
|
||||
elif isinstance(value, list):
|
||||
value = _listToString(value)
|
||||
elif isinstance(value, tuple):
|
||||
value = _tupleToString(value)
|
||||
elif isinstance(value, numberTypes):
|
||||
value = repr(value)
|
||||
elif isinstance(value, basestring):
|
||||
value = "\"%s\"" % value
|
||||
text.append("%s : %s" % (key, value))
|
||||
if not text:
|
||||
return ""
|
||||
return "{%s}" % ", ".join(text)
|
||||
|
||||
def _listToString(l):
|
||||
text = []
|
||||
for value in l:
|
||||
if isinstance(value, dict):
|
||||
value = _dictToString(value)
|
||||
elif isinstance(value, list):
|
||||
value = _listToString(value)
|
||||
elif isinstance(value, tuple):
|
||||
value = _tupleToString(value)
|
||||
elif isinstance(value, numberTypes):
|
||||
value = repr(value)
|
||||
elif isinstance(value, basestring):
|
||||
value = "\"%s\"" % value
|
||||
text.append(value)
|
||||
if not text:
|
||||
return ""
|
||||
return "[%s]" % ", ".join(text)
|
||||
|
||||
def _tupleToString(t):
|
||||
text = []
|
||||
for value in t:
|
||||
if isinstance(value, dict):
|
||||
value = _dictToString(value)
|
||||
elif isinstance(value, list):
|
||||
value = _listToString(value)
|
||||
elif isinstance(value, tuple):
|
||||
value = _tupleToString(value)
|
||||
elif isinstance(value, numberTypes):
|
||||
value = repr(value)
|
||||
elif isinstance(value, basestring):
|
||||
value = "\"%s\"" % value
|
||||
text.append(value)
|
||||
if not text:
|
||||
return ""
|
||||
return "(%s)" % ", ".join(text)
|
||||
|
||||
def stripText(text):
|
||||
new = []
|
||||
for line in text.strip().splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
new.append(line)
|
||||
return "\n".join(new)
|
||||
|
||||
# font info values used by several tests
|
||||
|
||||
fontInfoVersion1 = {
|
||||
"familyName" : "Some Font (Family Name)",
|
||||
"styleName" : "Regular (Style Name)",
|
||||
"fullName" : "Some Font-Regular (Postscript Full Name)",
|
||||
"fontName" : "SomeFont-Regular (Postscript Font Name)",
|
||||
"menuName" : "Some Font Regular (Style Map Family Name)",
|
||||
"fontStyle" : 64,
|
||||
"note" : "A note.",
|
||||
"versionMajor" : 1,
|
||||
"versionMinor" : 0,
|
||||
"year" : 2008,
|
||||
"copyright" : "Copyright Some Foundry.",
|
||||
"notice" : "Some Font by Some Designer for Some Foundry.",
|
||||
"trademark" : "Trademark Some Foundry",
|
||||
"license" : "License info for Some Foundry.",
|
||||
"licenseURL" : "http://somefoundry.com/license",
|
||||
"createdBy" : "Some Foundry",
|
||||
"designer" : "Some Designer",
|
||||
"designerURL" : "http://somedesigner.com",
|
||||
"vendorURL" : "http://somefoundry.com",
|
||||
"unitsPerEm" : 1000,
|
||||
"ascender" : 750,
|
||||
"descender" : -250,
|
||||
"capHeight" : 750,
|
||||
"xHeight" : 500,
|
||||
"defaultWidth" : 400,
|
||||
"slantAngle" : -12.5,
|
||||
"italicAngle" : -12.5,
|
||||
"widthName" : "Medium (normal)",
|
||||
"weightName" : "Medium",
|
||||
"weightValue" : 500,
|
||||
"fondName" : "SomeFont Regular (FOND Name)",
|
||||
"otFamilyName" : "Some Font (Preferred Family Name)",
|
||||
"otStyleName" : "Regular (Preferred Subfamily Name)",
|
||||
"otMacName" : "Some Font Regular (Compatible Full Name)",
|
||||
"msCharSet" : 0,
|
||||
"fondID" : 15000,
|
||||
"uniqueID" : 4000000,
|
||||
"ttVendor" : "SOME",
|
||||
"ttUniqueID" : "OpenType name Table Unique ID",
|
||||
"ttVersion" : "OpenType name Table Version",
|
||||
}
|
||||
|
||||
fontInfoVersion2 = {
|
||||
"familyName" : "Some Font (Family Name)",
|
||||
"styleName" : "Regular (Style Name)",
|
||||
"styleMapFamilyName" : "Some Font Regular (Style Map Family Name)",
|
||||
"styleMapStyleName" : "regular",
|
||||
"versionMajor" : 1,
|
||||
"versionMinor" : 0,
|
||||
"year" : 2008,
|
||||
"copyright" : "Copyright Some Foundry.",
|
||||
"trademark" : "Trademark Some Foundry",
|
||||
"unitsPerEm" : 1000,
|
||||
"descender" : -250,
|
||||
"xHeight" : 500,
|
||||
"capHeight" : 750,
|
||||
"ascender" : 750,
|
||||
"italicAngle" : -12.5,
|
||||
"note" : "A note.",
|
||||
"openTypeHeadCreated" : "2000/01/01 00:00:00",
|
||||
"openTypeHeadLowestRecPPEM" : 10,
|
||||
"openTypeHeadFlags" : [0, 1],
|
||||
"openTypeHheaAscender" : 750,
|
||||
"openTypeHheaDescender" : -250,
|
||||
"openTypeHheaLineGap" : 200,
|
||||
"openTypeHheaCaretSlopeRise" : 1,
|
||||
"openTypeHheaCaretSlopeRun" : 0,
|
||||
"openTypeHheaCaretOffset" : 0,
|
||||
"openTypeNameDesigner" : "Some Designer",
|
||||
"openTypeNameDesignerURL" : "http://somedesigner.com",
|
||||
"openTypeNameManufacturer" : "Some Foundry",
|
||||
"openTypeNameManufacturerURL" : "http://somefoundry.com",
|
||||
"openTypeNameLicense" : "License info for Some Foundry.",
|
||||
"openTypeNameLicenseURL" : "http://somefoundry.com/license",
|
||||
"openTypeNameVersion" : "OpenType name Table Version",
|
||||
"openTypeNameUniqueID" : "OpenType name Table Unique ID",
|
||||
"openTypeNameDescription" : "Some Font by Some Designer for Some Foundry.",
|
||||
"openTypeNamePreferredFamilyName" : "Some Font (Preferred Family Name)",
|
||||
"openTypeNamePreferredSubfamilyName" : "Regular (Preferred Subfamily Name)",
|
||||
"openTypeNameCompatibleFullName" : "Some Font Regular (Compatible Full Name)",
|
||||
"openTypeNameSampleText" : "Sample Text for Some Font.",
|
||||
"openTypeNameWWSFamilyName" : "Some Font (WWS Family Name)",
|
||||
"openTypeNameWWSSubfamilyName" : "Regular (WWS Subfamily Name)",
|
||||
"openTypeOS2WidthClass" : 5,
|
||||
"openTypeOS2WeightClass" : 500,
|
||||
"openTypeOS2Selection" : [3],
|
||||
"openTypeOS2VendorID" : "SOME",
|
||||
"openTypeOS2Panose" : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
"openTypeOS2FamilyClass" : [1, 1],
|
||||
"openTypeOS2UnicodeRanges" : [0, 1],
|
||||
"openTypeOS2CodePageRanges" : [0, 1],
|
||||
"openTypeOS2TypoAscender" : 750,
|
||||
"openTypeOS2TypoDescender" : -250,
|
||||
"openTypeOS2TypoLineGap" : 200,
|
||||
"openTypeOS2WinAscent" : 750,
|
||||
"openTypeOS2WinDescent" : 250,
|
||||
"openTypeOS2Type" : [],
|
||||
"openTypeOS2SubscriptXSize" : 200,
|
||||
"openTypeOS2SubscriptYSize" : 400,
|
||||
"openTypeOS2SubscriptXOffset" : 0,
|
||||
"openTypeOS2SubscriptYOffset" : -100,
|
||||
"openTypeOS2SuperscriptXSize" : 200,
|
||||
"openTypeOS2SuperscriptYSize" : 400,
|
||||
"openTypeOS2SuperscriptXOffset" : 0,
|
||||
"openTypeOS2SuperscriptYOffset" : 200,
|
||||
"openTypeOS2StrikeoutSize" : 20,
|
||||
"openTypeOS2StrikeoutPosition" : 300,
|
||||
"openTypeVheaVertTypoAscender" : 750,
|
||||
"openTypeVheaVertTypoDescender" : -250,
|
||||
"openTypeVheaVertTypoLineGap" : 200,
|
||||
"openTypeVheaCaretSlopeRise" : 0,
|
||||
"openTypeVheaCaretSlopeRun" : 1,
|
||||
"openTypeVheaCaretOffset" : 0,
|
||||
"postscriptFontName" : "SomeFont-Regular (Postscript Font Name)",
|
||||
"postscriptFullName" : "Some Font-Regular (Postscript Full Name)",
|
||||
"postscriptSlantAngle" : -12.5,
|
||||
"postscriptUniqueID" : 4000000,
|
||||
"postscriptUnderlineThickness" : 20,
|
||||
"postscriptUnderlinePosition" : -200,
|
||||
"postscriptIsFixedPitch" : False,
|
||||
"postscriptBlueValues" : [500, 510],
|
||||
"postscriptOtherBlues" : [-250, -260],
|
||||
"postscriptFamilyBlues" : [500, 510],
|
||||
"postscriptFamilyOtherBlues" : [-250, -260],
|
||||
"postscriptStemSnapH" : [100, 120],
|
||||
"postscriptStemSnapV" : [80, 90],
|
||||
"postscriptBlueFuzz" : 1,
|
||||
"postscriptBlueShift" : 7,
|
||||
"postscriptBlueScale" : 0.039625,
|
||||
"postscriptForceBold" : True,
|
||||
"postscriptDefaultWidthX" : 400,
|
||||
"postscriptNominalWidthX" : 400,
|
||||
"postscriptWeightName" : "Medium",
|
||||
"postscriptDefaultCharacter" : ".notdef",
|
||||
"postscriptWindowsCharacterSet" : 1,
|
||||
"macintoshFONDFamilyID" : 15000,
|
||||
"macintoshFONDName" : "SomeFont Regular (FOND Name)",
|
||||
}
|
||||
|
||||
fontInfoVersion3 = {
|
||||
"familyName" : "Some Font (Family Name)",
|
||||
"styleName" : "Regular (Style Name)",
|
||||
"styleMapFamilyName" : "Some Font Regular (Style Map Family Name)",
|
||||
"styleMapStyleName" : "regular",
|
||||
"versionMajor" : 1,
|
||||
"versionMinor" : 0,
|
||||
"year" : 2008,
|
||||
"copyright" : "Copyright Some Foundry.",
|
||||
"trademark" : "Trademark Some Foundry",
|
||||
"unitsPerEm" : 1000,
|
||||
"descender" : -250,
|
||||
"xHeight" : 500,
|
||||
"capHeight" : 750,
|
||||
"ascender" : 750,
|
||||
"italicAngle" : -12.5,
|
||||
"note" : "A note.",
|
||||
"openTypeGaspRangeRecords" : [
|
||||
dict(rangeMaxPPEM=10, rangeGaspBehavior=[0]),
|
||||
dict(rangeMaxPPEM=20, rangeGaspBehavior=[1]),
|
||||
dict(rangeMaxPPEM=30, rangeGaspBehavior=[2]),
|
||||
dict(rangeMaxPPEM=40, rangeGaspBehavior=[3]),
|
||||
dict(rangeMaxPPEM=50, rangeGaspBehavior=[0, 1, 2, 3]),
|
||||
dict(rangeMaxPPEM=0xFFFF, rangeGaspBehavior=[0])
|
||||
],
|
||||
"openTypeHeadCreated" : "2000/01/01 00:00:00",
|
||||
"openTypeHeadLowestRecPPEM" : 10,
|
||||
"openTypeHeadFlags" : [0, 1],
|
||||
"openTypeHheaAscender" : 750,
|
||||
"openTypeHheaDescender" : -250,
|
||||
"openTypeHheaLineGap" : 200,
|
||||
"openTypeHheaCaretSlopeRise" : 1,
|
||||
"openTypeHheaCaretSlopeRun" : 0,
|
||||
"openTypeHheaCaretOffset" : 0,
|
||||
"openTypeNameDesigner" : "Some Designer",
|
||||
"openTypeNameDesignerURL" : "http://somedesigner.com",
|
||||
"openTypeNameManufacturer" : "Some Foundry",
|
||||
"openTypeNameManufacturerURL" : "http://somefoundry.com",
|
||||
"openTypeNameLicense" : "License info for Some Foundry.",
|
||||
"openTypeNameLicenseURL" : "http://somefoundry.com/license",
|
||||
"openTypeNameVersion" : "OpenType name Table Version",
|
||||
"openTypeNameUniqueID" : "OpenType name Table Unique ID",
|
||||
"openTypeNameDescription" : "Some Font by Some Designer for Some Foundry.",
|
||||
"openTypeNamePreferredFamilyName" : "Some Font (Preferred Family Name)",
|
||||
"openTypeNamePreferredSubfamilyName" : "Regular (Preferred Subfamily Name)",
|
||||
"openTypeNameCompatibleFullName" : "Some Font Regular (Compatible Full Name)",
|
||||
"openTypeNameSampleText" : "Sample Text for Some Font.",
|
||||
"openTypeNameWWSFamilyName" : "Some Font (WWS Family Name)",
|
||||
"openTypeNameWWSSubfamilyName" : "Regular (WWS Subfamily Name)",
|
||||
"openTypeNameRecords" : [
|
||||
dict(nameID=1, platformID=1, encodingID=1, languageID=1, string="Name Record."),
|
||||
dict(nameID=2, platformID=1, encodingID=1, languageID=1, string="Name Record.")
|
||||
],
|
||||
"openTypeOS2WidthClass" : 5,
|
||||
"openTypeOS2WeightClass" : 500,
|
||||
"openTypeOS2Selection" : [3],
|
||||
"openTypeOS2VendorID" : "SOME",
|
||||
"openTypeOS2Panose" : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
"openTypeOS2FamilyClass" : [1, 1],
|
||||
"openTypeOS2UnicodeRanges" : [0, 1],
|
||||
"openTypeOS2CodePageRanges" : [0, 1],
|
||||
"openTypeOS2TypoAscender" : 750,
|
||||
"openTypeOS2TypoDescender" : -250,
|
||||
"openTypeOS2TypoLineGap" : 200,
|
||||
"openTypeOS2WinAscent" : 750,
|
||||
"openTypeOS2WinDescent" : 250,
|
||||
"openTypeOS2Type" : [],
|
||||
"openTypeOS2SubscriptXSize" : 200,
|
||||
"openTypeOS2SubscriptYSize" : 400,
|
||||
"openTypeOS2SubscriptXOffset" : 0,
|
||||
"openTypeOS2SubscriptYOffset" : -100,
|
||||
"openTypeOS2SuperscriptXSize" : 200,
|
||||
"openTypeOS2SuperscriptYSize" : 400,
|
||||
"openTypeOS2SuperscriptXOffset" : 0,
|
||||
"openTypeOS2SuperscriptYOffset" : 200,
|
||||
"openTypeOS2StrikeoutSize" : 20,
|
||||
"openTypeOS2StrikeoutPosition" : 300,
|
||||
"openTypeVheaVertTypoAscender" : 750,
|
||||
"openTypeVheaVertTypoDescender" : -250,
|
||||
"openTypeVheaVertTypoLineGap" : 200,
|
||||
"openTypeVheaCaretSlopeRise" : 0,
|
||||
"openTypeVheaCaretSlopeRun" : 1,
|
||||
"openTypeVheaCaretOffset" : 0,
|
||||
"postscriptFontName" : "SomeFont-Regular (Postscript Font Name)",
|
||||
"postscriptFullName" : "Some Font-Regular (Postscript Full Name)",
|
||||
"postscriptSlantAngle" : -12.5,
|
||||
"postscriptUniqueID" : 4000000,
|
||||
"postscriptUnderlineThickness" : 20,
|
||||
"postscriptUnderlinePosition" : -200,
|
||||
"postscriptIsFixedPitch" : False,
|
||||
"postscriptBlueValues" : [500, 510],
|
||||
"postscriptOtherBlues" : [-250, -260],
|
||||
"postscriptFamilyBlues" : [500, 510],
|
||||
"postscriptFamilyOtherBlues" : [-250, -260],
|
||||
"postscriptStemSnapH" : [100, 120],
|
||||
"postscriptStemSnapV" : [80, 90],
|
||||
"postscriptBlueFuzz" : 1,
|
||||
"postscriptBlueShift" : 7,
|
||||
"postscriptBlueScale" : 0.039625,
|
||||
"postscriptForceBold" : True,
|
||||
"postscriptDefaultWidthX" : 400,
|
||||
"postscriptNominalWidthX" : 400,
|
||||
"postscriptWeightName" : "Medium",
|
||||
"postscriptDefaultCharacter" : ".notdef",
|
||||
"postscriptWindowsCharacterSet" : 1,
|
||||
"macintoshFONDFamilyID" : 15000,
|
||||
"macintoshFONDName" : "SomeFont Regular (FOND Name)",
|
||||
"woffMajorVersion" : 1,
|
||||
"woffMinorVersion" : 0,
|
||||
"woffMetadataUniqueID" : dict(id="string"),
|
||||
"woffMetadataVendor" : dict(name="Some Foundry", url="http://somefoundry.com"),
|
||||
"woffMetadataCredits" : dict(
|
||||
credits=[
|
||||
dict(name="Some Designer"),
|
||||
dict(name=""),
|
||||
dict(name="Some Designer", url="http://somedesigner.com"),
|
||||
dict(name="Some Designer", url=""),
|
||||
dict(name="Some Designer", role="Designer"),
|
||||
dict(name="Some Designer", role=""),
|
||||
dict(name="Some Designer", dir="ltr"),
|
||||
dict(name="rengiseD emoS", dir="rtl"),
|
||||
{"name" : "Some Designer", "class" : "hello"},
|
||||
{"name" : "Some Designer", "class" : ""},
|
||||
]
|
||||
),
|
||||
"woffMetadataDescription" : dict(
|
||||
url="http://somefoundry.com/foo/description",
|
||||
text=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "foo"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
]
|
||||
),
|
||||
"woffMetadataLicense" : dict(
|
||||
url="http://somefoundry.com/foo/license",
|
||||
id="foo",
|
||||
text=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "foo"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
]
|
||||
),
|
||||
"woffMetadataCopyright" : dict(
|
||||
text=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "foo"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
]
|
||||
),
|
||||
"woffMetadataTrademark" : dict(
|
||||
text=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "foo"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
]
|
||||
),
|
||||
"woffMetadataLicensee" : dict(
|
||||
name="Some Licensee"
|
||||
),
|
||||
"woffMetadataExtensions" : [
|
||||
dict(
|
||||
# everything
|
||||
names=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "hello"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
],
|
||||
items=[
|
||||
# everything
|
||||
dict(
|
||||
id="foo",
|
||||
names=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "hello"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
],
|
||||
values=[
|
||||
dict(text="foo"),
|
||||
dict(text=""),
|
||||
dict(text="foo", language="bar"),
|
||||
dict(text="foo", language=""),
|
||||
dict(text="foo", dir="ltr"),
|
||||
dict(text="foo", dir="rtl"),
|
||||
{"text" : "foo", "class" : "hello"},
|
||||
{"text" : "foo", "class" : ""},
|
||||
]
|
||||
),
|
||||
# no id
|
||||
dict(
|
||||
names=[
|
||||
dict(text="foo")
|
||||
],
|
||||
values=[
|
||||
dict(text="foo")
|
||||
]
|
||||
)
|
||||
]
|
||||
),
|
||||
# no names
|
||||
dict(
|
||||
items=[
|
||||
dict(
|
||||
id="foo",
|
||||
names=[
|
||||
dict(text="foo")
|
||||
],
|
||||
values=[
|
||||
dict(text="foo")
|
||||
]
|
||||
)
|
||||
]
|
||||
),
|
||||
],
|
||||
"guidelines" : [
|
||||
# ints
|
||||
dict(x=100, y=200, angle=45),
|
||||
# floats
|
||||
dict(x=100.5, y=200.5, angle=45.5),
|
||||
# edges
|
||||
dict(x=0, y=0, angle=0),
|
||||
dict(x=0, y=0, angle=360),
|
||||
dict(x=0, y=0, angle=360.0),
|
||||
# no y
|
||||
dict(x=100),
|
||||
# no x
|
||||
dict(y=200),
|
||||
# name
|
||||
dict(x=100, y=200, angle=45, name="foo"),
|
||||
dict(x=100, y=200, angle=45, name=""),
|
||||
# identifier
|
||||
dict(x=100, y=200, angle=45, identifier="guide1"),
|
||||
dict(x=100, y=200, angle=45, identifier="guide2"),
|
||||
dict(x=100, y=200, angle=45, identifier="\x20"),
|
||||
dict(x=100, y=200, angle=45, identifier="\x7E"),
|
||||
# colors
|
||||
dict(x=100, y=200, angle=45, color="0,0,0,0"),
|
||||
dict(x=100, y=200, angle=45, color="1,0,0,0"),
|
||||
dict(x=100, y=200, angle=45, color="1,1,1,1"),
|
||||
dict(x=100, y=200, angle=45, color="0,1,0,0"),
|
||||
dict(x=100, y=200, angle=45, color="0,0,1,0"),
|
||||
dict(x=100, y=200, angle=45, color="0,0,0,1"),
|
||||
dict(x=100, y=200, angle=45, color="1, 0, 0, 0"),
|
||||
dict(x=100, y=200, angle=45, color="0, 1, 0, 0"),
|
||||
dict(x=100, y=200, angle=45, color="0, 0, 1, 0"),
|
||||
dict(x=100, y=200, angle=45, color="0, 0, 0, 1"),
|
||||
dict(x=100, y=200, angle=45, color=".5,0,0,0"),
|
||||
dict(x=100, y=200, angle=45, color="0,.5,0,0"),
|
||||
dict(x=100, y=200, angle=45, color="0,0,.5,0"),
|
||||
dict(x=100, y=200, angle=45, color="0,0,0,.5"),
|
||||
dict(x=100, y=200, angle=45, color=".5,1,1,1"),
|
||||
dict(x=100, y=200, angle=45, color="1,.5,1,1"),
|
||||
dict(x=100, y=200, angle=45, color="1,1,.5,1"),
|
||||
dict(x=100, y=200, angle=45, color="1,1,1,.5"),
|
||||
],
|
||||
}
|
||||
|
||||
expectedFontInfo1To2Conversion = {
|
||||
"familyName" : "Some Font (Family Name)",
|
||||
"styleMapFamilyName" : "Some Font Regular (Style Map Family Name)",
|
||||
"styleMapStyleName" : "regular",
|
||||
"styleName" : "Regular (Style Name)",
|
||||
"unitsPerEm" : 1000,
|
||||
"ascender" : 750,
|
||||
"capHeight" : 750,
|
||||
"xHeight" : 500,
|
||||
"descender" : -250,
|
||||
"italicAngle" : -12.5,
|
||||
"versionMajor" : 1,
|
||||
"versionMinor" : 0,
|
||||
"year" : 2008,
|
||||
"copyright" : "Copyright Some Foundry.",
|
||||
"trademark" : "Trademark Some Foundry",
|
||||
"note" : "A note.",
|
||||
"macintoshFONDFamilyID" : 15000,
|
||||
"macintoshFONDName" : "SomeFont Regular (FOND Name)",
|
||||
"openTypeNameCompatibleFullName" : "Some Font Regular (Compatible Full Name)",
|
||||
"openTypeNameDescription" : "Some Font by Some Designer for Some Foundry.",
|
||||
"openTypeNameDesigner" : "Some Designer",
|
||||
"openTypeNameDesignerURL" : "http://somedesigner.com",
|
||||
"openTypeNameLicense" : "License info for Some Foundry.",
|
||||
"openTypeNameLicenseURL" : "http://somefoundry.com/license",
|
||||
"openTypeNameManufacturer" : "Some Foundry",
|
||||
"openTypeNameManufacturerURL" : "http://somefoundry.com",
|
||||
"openTypeNamePreferredFamilyName" : "Some Font (Preferred Family Name)",
|
||||
"openTypeNamePreferredSubfamilyName": "Regular (Preferred Subfamily Name)",
|
||||
"openTypeNameCompatibleFullName" : "Some Font Regular (Compatible Full Name)",
|
||||
"openTypeNameUniqueID" : "OpenType name Table Unique ID",
|
||||
"openTypeNameVersion" : "OpenType name Table Version",
|
||||
"openTypeOS2VendorID" : "SOME",
|
||||
"openTypeOS2WeightClass" : 500,
|
||||
"openTypeOS2WidthClass" : 5,
|
||||
"postscriptDefaultWidthX" : 400,
|
||||
"postscriptFontName" : "SomeFont-Regular (Postscript Font Name)",
|
||||
"postscriptFullName" : "Some Font-Regular (Postscript Full Name)",
|
||||
"postscriptSlantAngle" : -12.5,
|
||||
"postscriptUniqueID" : 4000000,
|
||||
"postscriptWeightName" : "Medium",
|
||||
"postscriptWindowsCharacterSet" : 1
|
||||
}
|
||||
|
||||
expectedFontInfo2To1Conversion = {
|
||||
"familyName" : "Some Font (Family Name)",
|
||||
"menuName" : "Some Font Regular (Style Map Family Name)",
|
||||
"fontStyle" : 64,
|
||||
"styleName" : "Regular (Style Name)",
|
||||
"unitsPerEm" : 1000,
|
||||
"ascender" : 750,
|
||||
"capHeight" : 750,
|
||||
"xHeight" : 500,
|
||||
"descender" : -250,
|
||||
"italicAngle" : -12.5,
|
||||
"versionMajor" : 1,
|
||||
"versionMinor" : 0,
|
||||
"copyright" : "Copyright Some Foundry.",
|
||||
"trademark" : "Trademark Some Foundry",
|
||||
"note" : "A note.",
|
||||
"fondID" : 15000,
|
||||
"fondName" : "SomeFont Regular (FOND Name)",
|
||||
"fullName" : "Some Font Regular (Compatible Full Name)",
|
||||
"notice" : "Some Font by Some Designer for Some Foundry.",
|
||||
"designer" : "Some Designer",
|
||||
"designerURL" : "http://somedesigner.com",
|
||||
"license" : "License info for Some Foundry.",
|
||||
"licenseURL" : "http://somefoundry.com/license",
|
||||
"createdBy" : "Some Foundry",
|
||||
"vendorURL" : "http://somefoundry.com",
|
||||
"otFamilyName" : "Some Font (Preferred Family Name)",
|
||||
"otStyleName" : "Regular (Preferred Subfamily Name)",
|
||||
"otMacName" : "Some Font Regular (Compatible Full Name)",
|
||||
"ttUniqueID" : "OpenType name Table Unique ID",
|
||||
"ttVersion" : "OpenType name Table Version",
|
||||
"ttVendor" : "SOME",
|
||||
"weightValue" : 500,
|
||||
"widthName" : "Medium (normal)",
|
||||
"defaultWidth" : 400,
|
||||
"fontName" : "SomeFont-Regular (Postscript Font Name)",
|
||||
"fullName" : "Some Font-Regular (Postscript Full Name)",
|
||||
"slantAngle" : -12.5,
|
||||
"uniqueID" : 4000000,
|
||||
"weightName" : "Medium",
|
||||
"msCharSet" : 0,
|
||||
"year" : 2008
|
||||
}
|
1337
Tests/ufoLib/test_GLIF1.py
Normal file
1337
Tests/ufoLib/test_GLIF1.py
Normal file
File diff suppressed because it is too large
Load Diff
2372
Tests/ufoLib/test_GLIF2.py
Normal file
2372
Tests/ufoLib/test_GLIF2.py
Normal file
File diff suppressed because it is too large
Load Diff
152
Tests/ufoLib/test_UFO1.py
Normal file
152
Tests/ufoLib/test_UFO1.py
Normal file
@ -0,0 +1,152 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
import tempfile
|
||||
from io import open
|
||||
from ufoLib import UFOReader, UFOWriter, UFOLibError
|
||||
from ufoLib import plistlib
|
||||
from .testSupport import fontInfoVersion1, fontInfoVersion2
|
||||
|
||||
|
||||
class TestInfoObject(object): pass
|
||||
|
||||
|
||||
class ReadFontInfoVersion1TestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dstDir = tempfile.mktemp()
|
||||
os.mkdir(self.dstDir)
|
||||
metaInfo = {
|
||||
"creator": "test",
|
||||
"formatVersion": 1
|
||||
}
|
||||
path = os.path.join(self.dstDir, "metainfo.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(metaInfo, f)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.dstDir)
|
||||
|
||||
def _writeInfoToPlist(self, info):
|
||||
path = os.path.join(self.dstDir, "fontinfo.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(info, f)
|
||||
|
||||
def testRead(self):
|
||||
originalData = dict(fontInfoVersion1)
|
||||
self._writeInfoToPlist(originalData)
|
||||
infoObject = TestInfoObject()
|
||||
reader = UFOReader(self.dstDir, validate=True)
|
||||
reader.readInfo(infoObject)
|
||||
for attr in dir(infoObject):
|
||||
if attr not in fontInfoVersion2:
|
||||
continue
|
||||
originalValue = fontInfoVersion2[attr]
|
||||
readValue = getattr(infoObject, attr)
|
||||
self.assertEqual(originalValue, readValue)
|
||||
|
||||
def testFontStyleConversion(self):
|
||||
fontStyle1To2 = {
|
||||
64 : "regular",
|
||||
1 : "italic",
|
||||
32 : "bold",
|
||||
33 : "bold italic"
|
||||
}
|
||||
for old, new in list(fontStyle1To2.items()):
|
||||
info = dict(fontInfoVersion1)
|
||||
info["fontStyle"] = old
|
||||
self._writeInfoToPlist(info)
|
||||
reader = UFOReader(self.dstDir, validate=True)
|
||||
infoObject = TestInfoObject()
|
||||
reader.readInfo(infoObject)
|
||||
self.assertEqual(new, infoObject.styleMapStyleName)
|
||||
|
||||
def testWidthNameConversion(self):
|
||||
widthName1To2 = {
|
||||
"Ultra-condensed" : 1,
|
||||
"Extra-condensed" : 2,
|
||||
"Condensed" : 3,
|
||||
"Semi-condensed" : 4,
|
||||
"Medium (normal)" : 5,
|
||||
"Semi-expanded" : 6,
|
||||
"Expanded" : 7,
|
||||
"Extra-expanded" : 8,
|
||||
"Ultra-expanded" : 9
|
||||
}
|
||||
for old, new in list(widthName1To2.items()):
|
||||
info = dict(fontInfoVersion1)
|
||||
info["widthName"] = old
|
||||
self._writeInfoToPlist(info)
|
||||
reader = UFOReader(self.dstDir, validate=True)
|
||||
infoObject = TestInfoObject()
|
||||
reader.readInfo(infoObject)
|
||||
self.assertEqual(new, infoObject.openTypeOS2WidthClass)
|
||||
|
||||
|
||||
class WriteFontInfoVersion1TestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.tempDir = tempfile.mktemp()
|
||||
os.mkdir(self.tempDir)
|
||||
self.dstDir = os.path.join(self.tempDir, "test.ufo")
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempDir)
|
||||
|
||||
def makeInfoObject(self):
|
||||
infoObject = TestInfoObject()
|
||||
for attr, value in list(fontInfoVersion2.items()):
|
||||
setattr(infoObject, attr, value)
|
||||
return infoObject
|
||||
|
||||
def readPlist(self):
|
||||
path = os.path.join(self.dstDir, "fontinfo.plist")
|
||||
with open(path, "rb") as f:
|
||||
plist = plistlib.load(f)
|
||||
return plist
|
||||
|
||||
def testWrite(self):
|
||||
infoObject = self.makeInfoObject()
|
||||
writer = UFOWriter(self.dstDir, formatVersion=1)
|
||||
writer.writeInfo(infoObject)
|
||||
writtenData = self.readPlist()
|
||||
for attr, originalValue in list(fontInfoVersion1.items()):
|
||||
newValue = writtenData[attr]
|
||||
self.assertEqual(newValue, originalValue)
|
||||
|
||||
def testFontStyleConversion(self):
|
||||
fontStyle1To2 = {
|
||||
64 : "regular",
|
||||
1 : "italic",
|
||||
32 : "bold",
|
||||
33 : "bold italic"
|
||||
}
|
||||
for old, new in list(fontStyle1To2.items()):
|
||||
infoObject = self.makeInfoObject()
|
||||
infoObject.styleMapStyleName = new
|
||||
writer = UFOWriter(self.dstDir, formatVersion=1)
|
||||
writer.writeInfo(infoObject)
|
||||
writtenData = self.readPlist()
|
||||
self.assertEqual(writtenData["fontStyle"], old)
|
||||
|
||||
def testWidthNameConversion(self):
|
||||
widthName1To2 = {
|
||||
"Ultra-condensed" : 1,
|
||||
"Extra-condensed" : 2,
|
||||
"Condensed" : 3,
|
||||
"Semi-condensed" : 4,
|
||||
"Medium (normal)" : 5,
|
||||
"Semi-expanded" : 6,
|
||||
"Expanded" : 7,
|
||||
"Extra-expanded" : 8,
|
||||
"Ultra-expanded" : 9
|
||||
}
|
||||
for old, new in list(widthName1To2.items()):
|
||||
infoObject = self.makeInfoObject()
|
||||
infoObject.openTypeOS2WidthClass = new
|
||||
writer = UFOWriter(self.dstDir, formatVersion=1)
|
||||
writer.writeInfo(infoObject)
|
||||
writtenData = self.readPlist()
|
||||
self.assertEqual(writtenData["widthName"], old)
|
1414
Tests/ufoLib/test_UFO2.py
Normal file
1414
Tests/ufoLib/test_UFO2.py
Normal file
File diff suppressed because it is too large
Load Diff
4686
Tests/ufoLib/test_UFO3.py
Normal file
4686
Tests/ufoLib/test_UFO3.py
Normal file
File diff suppressed because it is too large
Load Diff
347
Tests/ufoLib/test_UFOConversion.py
Normal file
347
Tests/ufoLib/test_UFOConversion.py
Normal file
@ -0,0 +1,347 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
import tempfile
|
||||
from io import open
|
||||
from ufoLib import UFOReader, UFOWriter
|
||||
from ufoLib import plistlib
|
||||
from .testSupport import expectedFontInfo1To2Conversion, expectedFontInfo2To1Conversion
|
||||
|
||||
|
||||
# the format version 1 lib.plist contains some data
|
||||
# that these tests shouldn't be concerned about.
|
||||
removeFromFormatVersion1Lib = [
|
||||
"org.robofab.opentype.classes",
|
||||
"org.robofab.opentype.features",
|
||||
"org.robofab.opentype.featureorder",
|
||||
"org.robofab.postScriptHintData"
|
||||
]
|
||||
|
||||
|
||||
class ConversionFunctionsTestCase(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
path = self.getFontPath("TestFont1 (UFO1) converted.ufo")
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
path = self.getFontPath("TestFont1 (UFO2) converted.ufo")
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
def getFontPath(self, fileName):
|
||||
testdata = os.path.join(os.path.dirname(__file__), "testdata")
|
||||
return os.path.join(testdata, fileName)
|
||||
|
||||
def compareFileStructures(self, path1, path2, expectedInfoData, testFeatures):
|
||||
# result
|
||||
metainfoPath1 = os.path.join(path1, "metainfo.plist")
|
||||
fontinfoPath1 = os.path.join(path1, "fontinfo.plist")
|
||||
kerningPath1 = os.path.join(path1, "kerning.plist")
|
||||
groupsPath1 = os.path.join(path1, "groups.plist")
|
||||
libPath1 = os.path.join(path1, "lib.plist")
|
||||
featuresPath1 = os.path.join(path1, "features.plist")
|
||||
glyphsPath1 = os.path.join(path1, "glyphs")
|
||||
glyphsPath1_contents = os.path.join(glyphsPath1, "contents.plist")
|
||||
glyphsPath1_A = os.path.join(glyphsPath1, "A_.glif")
|
||||
glyphsPath1_B = os.path.join(glyphsPath1, "B_.glif")
|
||||
# expected result
|
||||
metainfoPath2 = os.path.join(path2, "metainfo.plist")
|
||||
fontinfoPath2 = os.path.join(path2, "fontinfo.plist")
|
||||
kerningPath2 = os.path.join(path2, "kerning.plist")
|
||||
groupsPath2 = os.path.join(path2, "groups.plist")
|
||||
libPath2 = os.path.join(path2, "lib.plist")
|
||||
featuresPath2 = os.path.join(path2, "features.plist")
|
||||
glyphsPath2 = os.path.join(path2, "glyphs")
|
||||
glyphsPath2_contents = os.path.join(glyphsPath2, "contents.plist")
|
||||
glyphsPath2_A = os.path.join(glyphsPath2, "A_.glif")
|
||||
glyphsPath2_B = os.path.join(glyphsPath2, "B_.glif")
|
||||
# look for existence
|
||||
self.assertEqual(os.path.exists(metainfoPath1), True)
|
||||
self.assertEqual(os.path.exists(fontinfoPath1), True)
|
||||
self.assertEqual(os.path.exists(kerningPath1), True)
|
||||
self.assertEqual(os.path.exists(groupsPath1), True)
|
||||
self.assertEqual(os.path.exists(libPath1), True)
|
||||
self.assertEqual(os.path.exists(glyphsPath1), True)
|
||||
self.assertEqual(os.path.exists(glyphsPath1_contents), True)
|
||||
self.assertEqual(os.path.exists(glyphsPath1_A), True)
|
||||
self.assertEqual(os.path.exists(glyphsPath1_B), True)
|
||||
if testFeatures:
|
||||
self.assertEqual(os.path.exists(featuresPath1), True)
|
||||
# look for aggrement
|
||||
with open(metainfoPath1, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(metainfoPath2, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
with open(fontinfoPath1, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
self.assertEqual(sorted(data1.items()), sorted(expectedInfoData.items()))
|
||||
with open(kerningPath1, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(kerningPath2, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
with open(groupsPath1, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(groupsPath2, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
with open(libPath1, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(libPath2, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
if "UFO1" in libPath1:
|
||||
for key in removeFromFormatVersion1Lib:
|
||||
if key in data1:
|
||||
del data1[key]
|
||||
if "UFO1" in libPath2:
|
||||
for key in removeFromFormatVersion1Lib:
|
||||
if key in data2:
|
||||
del data2[key]
|
||||
self.assertEqual(data1, data2)
|
||||
with open(glyphsPath1_contents, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(glyphsPath2_contents, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
with open(glyphsPath1_A, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(glyphsPath2_A, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
with open(glyphsPath1_B, "rb") as f:
|
||||
data1 = plistlib.load(f)
|
||||
with open(glyphsPath2_B, "rb") as f:
|
||||
data2 = plistlib.load(f)
|
||||
self.assertEqual(data1, data2)
|
||||
|
||||
|
||||
# ---------------------
|
||||
# kerning up conversion
|
||||
# ---------------------
|
||||
|
||||
class TestInfoObject(object): pass
|
||||
|
||||
|
||||
class KerningUpConversionTestCase(unittest.TestCase):
|
||||
|
||||
expectedKerning = {
|
||||
("public.kern1.BGroup", "public.kern2.CGroup"): 7,
|
||||
("public.kern1.BGroup", "public.kern2.DGroup"): 8,
|
||||
("public.kern1.BGroup", "A"): 5,
|
||||
("public.kern1.BGroup", "B"): 6,
|
||||
("public.kern1.CGroup", "public.kern2.CGroup"): 11,
|
||||
("public.kern1.CGroup", "public.kern2.DGroup"): 12,
|
||||
("public.kern1.CGroup", "A"): 9,
|
||||
("public.kern1.CGroup", "B"): 10,
|
||||
("A", "public.kern2.CGroup"): 3,
|
||||
("A", "public.kern2.DGroup"): 4,
|
||||
("A", "A"): 1,
|
||||
("A", "B"): 2
|
||||
}
|
||||
|
||||
expectedGroups = {
|
||||
"BGroup": ["B"],
|
||||
"CGroup": ["C", "Ccedilla"],
|
||||
"DGroup": ["D"],
|
||||
"public.kern1.BGroup": ["B"],
|
||||
"public.kern1.CGroup": ["C", "Ccedilla"],
|
||||
"public.kern2.CGroup": ["C", "Ccedilla"],
|
||||
"public.kern2.DGroup": ["D"],
|
||||
"Not A Kerning Group" : ["A"]
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
self.tempDir = tempfile.mktemp()
|
||||
os.mkdir(self.tempDir)
|
||||
self.ufoPath = os.path.join(self.tempDir, "test.ufo")
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempDir)
|
||||
|
||||
def makeUFO(self, formatVersion):
|
||||
self.clearUFO()
|
||||
if not os.path.exists(self.ufoPath):
|
||||
os.mkdir(self.ufoPath)
|
||||
# metainfo.plist
|
||||
metaInfo = dict(creator="test", formatVersion=formatVersion)
|
||||
path = os.path.join(self.ufoPath, "metainfo.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(metaInfo, f)
|
||||
# kerning
|
||||
kerning = {
|
||||
"A" : {
|
||||
"A" : 1,
|
||||
"B" : 2,
|
||||
"CGroup" : 3,
|
||||
"DGroup" : 4
|
||||
},
|
||||
"BGroup" : {
|
||||
"A" : 5,
|
||||
"B" : 6,
|
||||
"CGroup" : 7,
|
||||
"DGroup" : 8
|
||||
},
|
||||
"CGroup" : {
|
||||
"A" : 9,
|
||||
"B" : 10,
|
||||
"CGroup" : 11,
|
||||
"DGroup" : 12
|
||||
}
|
||||
}
|
||||
path = os.path.join(self.ufoPath, "kerning.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(kerning, f)
|
||||
# groups
|
||||
groups = {
|
||||
"BGroup" : ["B"],
|
||||
"CGroup" : ["C", "Ccedilla"],
|
||||
"DGroup" : ["D"],
|
||||
"Not A Kerning Group" : ["A"]
|
||||
}
|
||||
path = os.path.join(self.ufoPath, "groups.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(groups, f)
|
||||
# font info
|
||||
fontInfo = {
|
||||
"familyName" : "Test"
|
||||
}
|
||||
path = os.path.join(self.ufoPath, "fontinfo.plist")
|
||||
with open(path, "wb") as f:
|
||||
plistlib.dump(fontInfo, f)
|
||||
|
||||
def clearUFO(self):
|
||||
if os.path.exists(self.ufoPath):
|
||||
shutil.rmtree(self.ufoPath)
|
||||
|
||||
def testUFO1(self):
|
||||
self.makeUFO(formatVersion=2)
|
||||
reader = UFOReader(self.ufoPath, validate=True)
|
||||
kerning = reader.readKerning()
|
||||
self.assertEqual(self.expectedKerning, kerning)
|
||||
groups = reader.readGroups()
|
||||
self.assertEqual(self.expectedGroups, groups)
|
||||
info = TestInfoObject()
|
||||
reader.readInfo(info)
|
||||
|
||||
def testUFO2(self):
|
||||
self.makeUFO(formatVersion=2)
|
||||
reader = UFOReader(self.ufoPath, validate=True)
|
||||
kerning = reader.readKerning()
|
||||
self.assertEqual(self.expectedKerning, kerning)
|
||||
groups = reader.readGroups()
|
||||
self.assertEqual(self.expectedGroups, groups)
|
||||
info = TestInfoObject()
|
||||
reader.readInfo(info)
|
||||
|
||||
|
||||
class KerningDownConversionTestCase(unittest.TestCase):
|
||||
|
||||
expectedKerning = {
|
||||
("public.kern1.BGroup", "public.kern2.CGroup"): 7,
|
||||
("public.kern1.BGroup", "public.kern2.DGroup"): 8,
|
||||
("public.kern1.BGroup", "A"): 5,
|
||||
("public.kern1.BGroup", "B"): 6,
|
||||
("public.kern1.CGroup", "public.kern2.CGroup"): 11,
|
||||
("public.kern1.CGroup", "public.kern2.DGroup"): 12,
|
||||
("public.kern1.CGroup", "A"): 9,
|
||||
("public.kern1.CGroup", "B"): 10,
|
||||
("A", "public.kern2.CGroup"): 3,
|
||||
("A", "public.kern2.DGroup"): 4,
|
||||
("A", "A"): 1,
|
||||
("A", "B"): 2
|
||||
}
|
||||
|
||||
groups = {
|
||||
"BGroup": ["B"],
|
||||
"CGroup": ["C"],
|
||||
"DGroup": ["D"],
|
||||
"public.kern1.BGroup": ["B"],
|
||||
"public.kern1.CGroup": ["C", "Ccedilla"],
|
||||
"public.kern2.CGroup": ["C", "Ccedilla"],
|
||||
"public.kern2.DGroup": ["D"],
|
||||
"Not A Kerning Group" : ["A"]
|
||||
}
|
||||
expectedWrittenGroups = {
|
||||
"BGroup": ["B"],
|
||||
"CGroup": ["C", "Ccedilla"],
|
||||
"DGroup": ["D"],
|
||||
"Not A Kerning Group" : ["A"]
|
||||
}
|
||||
|
||||
kerning = {
|
||||
("public.kern1.BGroup", "public.kern2.CGroup"): 7,
|
||||
("public.kern1.BGroup", "public.kern2.DGroup"): 8,
|
||||
("public.kern1.BGroup", "A"): 5,
|
||||
("public.kern1.BGroup", "B"): 6,
|
||||
("public.kern1.CGroup", "public.kern2.CGroup"): 11,
|
||||
("public.kern1.CGroup", "public.kern2.DGroup"): 12,
|
||||
("public.kern1.CGroup", "A"): 9,
|
||||
("public.kern1.CGroup", "B"): 10,
|
||||
("A", "public.kern2.CGroup"): 3,
|
||||
("A", "public.kern2.DGroup"): 4,
|
||||
("A", "A"): 1,
|
||||
("A", "B"): 2
|
||||
}
|
||||
expectedWrittenKerning = {
|
||||
"BGroup" : {
|
||||
"CGroup" : 7,
|
||||
"DGroup" : 8,
|
||||
"A" : 5,
|
||||
"B" : 6
|
||||
},
|
||||
"CGroup" : {
|
||||
"CGroup" : 11,
|
||||
"DGroup" : 12,
|
||||
"A" : 9,
|
||||
"B" : 10
|
||||
},
|
||||
"A" : {
|
||||
"CGroup" : 3,
|
||||
"DGroup" : 4,
|
||||
"A" : 1,
|
||||
"B" : 2
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
downConversionMapping = {
|
||||
"side1" : {
|
||||
"BGroup" : "public.kern1.BGroup",
|
||||
"CGroup" : "public.kern1.CGroup"
|
||||
},
|
||||
"side2" : {
|
||||
"CGroup" : "public.kern2.CGroup",
|
||||
"DGroup" : "public.kern2.DGroup"
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
self.tempDir = tempfile.mktemp()
|
||||
os.mkdir(self.tempDir)
|
||||
self.dstDir = os.path.join(self.tempDir, "test.ufo")
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempDir)
|
||||
|
||||
def tearDownUFO(self):
|
||||
shutil.rmtree(self.dstDir)
|
||||
|
||||
def testWrite(self):
|
||||
writer = UFOWriter(self.dstDir, formatVersion=2)
|
||||
writer.setKerningGroupConversionRenameMaps(self.downConversionMapping)
|
||||
writer.writeKerning(self.kerning)
|
||||
writer.writeGroups(self.groups)
|
||||
# test groups
|
||||
path = os.path.join(self.dstDir, "groups.plist")
|
||||
with open(path, "rb") as f:
|
||||
writtenGroups = plistlib.load(f)
|
||||
self.assertEqual(writtenGroups, self.expectedWrittenGroups)
|
||||
# test kerning
|
||||
path = os.path.join(self.dstDir, "kerning.plist")
|
||||
with open(path, "rb") as f:
|
||||
writtenKerning = plistlib.load(f)
|
||||
self.assertEqual(writtenKerning, self.expectedWrittenKerning)
|
||||
self.tearDownUFO()
|
99
Tests/ufoLib/test_UFOZ.py
Normal file
99
Tests/ufoLib/test_UFOZ.py
Normal file
@ -0,0 +1,99 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from fontTools.misc.py23 import tostr
|
||||
from ufoLib import UFOReader, UFOWriter, UFOFileStructure
|
||||
from ufoLib.errors import UFOLibError, GlifLibError
|
||||
from ufoLib import plistlib
|
||||
import sys
|
||||
import os
|
||||
import fs.osfs
|
||||
import fs.tempfs
|
||||
import fs.memoryfs
|
||||
import fs.copy
|
||||
import pytest
|
||||
import warnings
|
||||
|
||||
|
||||
TESTDATA = fs.osfs.OSFS(
|
||||
os.path.join(os.path.dirname(__file__), "testdata")
|
||||
)
|
||||
TEST_UFO3 = "TestFont1 (UFO3).ufo"
|
||||
TEST_UFOZ = "TestFont1 (UFO3).ufoz"
|
||||
|
||||
|
||||
@pytest.fixture(params=[TEST_UFO3, TEST_UFOZ])
|
||||
def testufo(request):
|
||||
name = request.param
|
||||
with fs.tempfs.TempFS() as tmp:
|
||||
if TESTDATA.isdir(name):
|
||||
fs.copy.copy_dir(TESTDATA, name, tmp, name)
|
||||
else:
|
||||
fs.copy.copy_file(TESTDATA, name, tmp, name)
|
||||
yield tmp.getsyspath(name)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def testufoz():
|
||||
with fs.tempfs.TempFS() as tmp:
|
||||
fs.copy.copy_file(TESTDATA, TEST_UFOZ, tmp, TEST_UFOZ)
|
||||
yield tmp.getsyspath(TEST_UFOZ)
|
||||
|
||||
|
||||
class TestUFOZ(object):
|
||||
|
||||
def test_read(self, testufoz):
|
||||
with UFOReader(testufoz) as reader:
|
||||
assert reader.fileStructure == UFOFileStructure.ZIP
|
||||
assert reader.formatVersion == 3
|
||||
|
||||
def test_write(self, testufoz):
|
||||
with UFOWriter(testufoz, structure="zip") as writer:
|
||||
writer.writeLib({"hello world": 123})
|
||||
with UFOReader(testufoz) as reader:
|
||||
assert reader.readLib() == {"hello world": 123}
|
||||
|
||||
|
||||
def test_pathlike(testufo):
|
||||
|
||||
class PathLike(object):
|
||||
|
||||
def __init__(self, s):
|
||||
self._path = s
|
||||
|
||||
def __fspath__(self):
|
||||
return tostr(self._path, sys.getfilesystemencoding())
|
||||
|
||||
path = PathLike(testufo)
|
||||
|
||||
with UFOReader(path) as reader:
|
||||
assert reader._path == path.__fspath__()
|
||||
|
||||
with UFOWriter(path) as writer:
|
||||
assert writer._path == path.__fspath__()
|
||||
|
||||
|
||||
def test_path_attribute_deprecated(testufo):
|
||||
with UFOWriter(testufo) as writer:
|
||||
with pytest.warns(DeprecationWarning, match="The 'path' attribute"):
|
||||
writer.path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def memufo():
|
||||
m = fs.memoryfs.MemoryFS()
|
||||
fs.copy.copy_dir(TESTDATA, TEST_UFO3, m, "/")
|
||||
return m
|
||||
|
||||
|
||||
class TestMemoryFS(object):
|
||||
|
||||
def test_init_reader(self, memufo):
|
||||
with UFOReader(memufo) as reader:
|
||||
assert reader.formatVersion == 3
|
||||
assert reader.fileStructure == UFOFileStructure.PACKAGE
|
||||
|
||||
def test_init_writer(self):
|
||||
m = fs.memoryfs.MemoryFS()
|
||||
with UFOWriter(m) as writer:
|
||||
assert m.exists("metainfo.plist")
|
||||
assert writer._path == "<memfs>"
|
55
Tests/ufoLib/test_etree.py
Normal file
55
Tests/ufoLib/test_etree.py
Normal file
@ -0,0 +1,55 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from ufoLib import etree
|
||||
from collections import OrderedDict
|
||||
import io
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"xml",
|
||||
[
|
||||
(
|
||||
"<root>"
|
||||
'<element key="value">text</element>'
|
||||
"<element>text</element>tail"
|
||||
"<empty-element/>"
|
||||
"</root>"
|
||||
),
|
||||
(
|
||||
"<root>\n"
|
||||
' <element key="value">text</element>\n'
|
||||
" <element>text</element>tail\n"
|
||||
" <empty-element/>\n"
|
||||
"</root>"
|
||||
),
|
||||
(
|
||||
'<axis default="400" maximum="1000" minimum="1" name="weight" tag="wght">'
|
||||
'<labelname xml:lang="fa-IR">قطر</labelname>'
|
||||
"</axis>"
|
||||
),
|
||||
],
|
||||
ids=["simple_xml_no_indent", "simple_xml_indent", "xml_ns_attrib_utf_8"],
|
||||
)
|
||||
def test_roundtrip_string(xml):
|
||||
root = etree.fromstring(xml.encode("utf-8"))
|
||||
result = etree.tostring(root, encoding="utf-8").decode("utf-8")
|
||||
assert result == xml
|
||||
|
||||
|
||||
def test_pretty_print():
|
||||
root = etree.Element("root")
|
||||
attrs = OrderedDict([("c", "2"), ("b", "1"), ("a", "0")])
|
||||
etree.SubElement(root, "element", attrs).text = "text"
|
||||
etree.SubElement(root, "element").text = "text"
|
||||
root.append(etree.Element("empty-element"))
|
||||
|
||||
result = etree.tostring(root, encoding="unicode", pretty_print=True)
|
||||
|
||||
assert result == (
|
||||
"<root>\n"
|
||||
' <element c="2" b="1" a="0">text</element>\n'
|
||||
" <element>text</element>\n"
|
||||
" <empty-element/>\n"
|
||||
"</root>\n"
|
||||
)
|
98
Tests/ufoLib/test_filenames.py
Normal file
98
Tests/ufoLib/test_filenames.py
Normal file
@ -0,0 +1,98 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import unittest
|
||||
from ufoLib.filenames import userNameToFileName, handleClash1, handleClash2
|
||||
|
||||
|
||||
class TestFilenames(unittest.TestCase):
|
||||
|
||||
def test_userNameToFileName(self):
|
||||
self.assertEqual(userNameToFileName("a"), "a")
|
||||
self.assertEqual(userNameToFileName("A"), "A_")
|
||||
self.assertEqual(userNameToFileName("AE"), "A_E_")
|
||||
self.assertEqual(userNameToFileName("Ae"), "A_e")
|
||||
self.assertEqual(userNameToFileName("ae"), "ae")
|
||||
self.assertEqual(userNameToFileName("aE"), "aE_")
|
||||
self.assertEqual(userNameToFileName("a.alt"), "a.alt")
|
||||
self.assertEqual(userNameToFileName("A.alt"), "A_.alt")
|
||||
self.assertEqual(userNameToFileName("A.Alt"), "A_.A_lt")
|
||||
self.assertEqual(userNameToFileName("A.aLt"), "A_.aL_t")
|
||||
self.assertEqual(userNameToFileName("A.alT"), "A_.alT_")
|
||||
self.assertEqual(userNameToFileName("T_H"), "T__H_")
|
||||
self.assertEqual(userNameToFileName("T_h"), "T__h")
|
||||
self.assertEqual(userNameToFileName("t_h"), "t_h")
|
||||
self.assertEqual(userNameToFileName("F_F_I"), "F__F__I_")
|
||||
self.assertEqual(userNameToFileName("f_f_i"), "f_f_i")
|
||||
self.assertEqual(userNameToFileName("Aacute_V.swash"),
|
||||
"A_acute_V_.swash")
|
||||
self.assertEqual(userNameToFileName(".notdef"), "_notdef")
|
||||
self.assertEqual(userNameToFileName("con"), "_con")
|
||||
self.assertEqual(userNameToFileName("CON"), "C_O_N_")
|
||||
self.assertEqual(userNameToFileName("con.alt"), "_con.alt")
|
||||
self.assertEqual(userNameToFileName("alt.con"), "alt._con")
|
||||
|
||||
def test_userNameToFileName_ValueError(self):
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName(b"a")
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName({"a"})
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName(("a",))
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName(["a"])
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName(["a"])
|
||||
with self.assertRaises(ValueError):
|
||||
userNameToFileName(b"\xd8\x00")
|
||||
|
||||
def test_handleClash1(self):
|
||||
prefix = ("0" * 5) + "."
|
||||
suffix = "." + ("0" * 10)
|
||||
existing = ["a" * 5]
|
||||
|
||||
e = list(existing)
|
||||
self.assertEqual(
|
||||
handleClash1(userName="A" * 5, existing=e, prefix=prefix,
|
||||
suffix=suffix),
|
||||
'00000.AAAAA000000000000001.0000000000'
|
||||
)
|
||||
|
||||
e = list(existing)
|
||||
e.append(prefix + "aaaaa" + "1".zfill(15) + suffix)
|
||||
self.assertEqual(
|
||||
handleClash1(userName="A" * 5, existing=e, prefix=prefix,
|
||||
suffix=suffix),
|
||||
'00000.AAAAA000000000000002.0000000000'
|
||||
)
|
||||
|
||||
e = list(existing)
|
||||
e.append(prefix + "AAAAA" + "2".zfill(15) + suffix)
|
||||
self.assertEqual(
|
||||
handleClash1(userName="A" * 5, existing=e, prefix=prefix,
|
||||
suffix=suffix),
|
||||
'00000.AAAAA000000000000001.0000000000'
|
||||
)
|
||||
|
||||
def test_handleClash2(self):
|
||||
prefix = ("0" * 5) + "."
|
||||
suffix = "." + ("0" * 10)
|
||||
existing = [prefix + str(i) + suffix for i in range(100)]
|
||||
|
||||
e = list(existing)
|
||||
self.assertEqual(
|
||||
handleClash2(existing=e, prefix=prefix, suffix=suffix),
|
||||
'00000.100.0000000000'
|
||||
)
|
||||
|
||||
e = list(existing)
|
||||
e.remove(prefix + "1" + suffix)
|
||||
self.assertEqual(
|
||||
handleClash2(existing=e, prefix=prefix, suffix=suffix),
|
||||
'00000.1.0000000000'
|
||||
)
|
||||
|
||||
e = list(existing)
|
||||
e.remove(prefix + "2" + suffix)
|
||||
self.assertEqual(
|
||||
handleClash2(existing=e, prefix=prefix, suffix=suffix),
|
||||
'00000.2.0000000000'
|
||||
)
|
164
Tests/ufoLib/test_glifLib.py
Normal file
164
Tests/ufoLib/test_glifLib.py
Normal file
@ -0,0 +1,164 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import unittest
|
||||
from io import open
|
||||
from .testSupport import getDemoFontGlyphSetPath
|
||||
from ufoLib.glifLib import (
|
||||
GlyphSet, glyphNameToFileName, readGlyphFromString, writeGlyphToString,
|
||||
_XML_DECLARATION,
|
||||
)
|
||||
|
||||
GLYPHSETDIR = getDemoFontGlyphSetPath()
|
||||
|
||||
|
||||
class GlyphSetTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.dstDir = tempfile.mktemp()
|
||||
os.mkdir(self.dstDir)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.dstDir)
|
||||
|
||||
def testRoundTrip(self):
|
||||
import difflib
|
||||
srcDir = GLYPHSETDIR
|
||||
dstDir = self.dstDir
|
||||
src = GlyphSet(srcDir, ufoFormatVersion=2, validateRead=True, validateWrite=True)
|
||||
dst = GlyphSet(dstDir, ufoFormatVersion=2, validateRead=True, validateWrite=True)
|
||||
for glyphName in src.keys():
|
||||
g = src[glyphName]
|
||||
g.drawPoints(None) # load attrs
|
||||
dst.writeGlyph(glyphName, g, g.drawPoints)
|
||||
# compare raw file data:
|
||||
for glyphName in sorted(src.keys()):
|
||||
fileName = src.contents[glyphName]
|
||||
with open(os.path.join(srcDir, fileName), "r") as f:
|
||||
org = f.read()
|
||||
with open(os.path.join(dstDir, fileName), "r") as f:
|
||||
new = f.read()
|
||||
added = []
|
||||
removed = []
|
||||
for line in difflib.unified_diff(
|
||||
org.split("\n"), new.split("\n")):
|
||||
if line.startswith("+ "):
|
||||
added.append(line[1:])
|
||||
elif line.startswith("- "):
|
||||
removed.append(line[1:])
|
||||
self.assertEqual(
|
||||
added, removed,
|
||||
"%s.glif file differs after round tripping" % glyphName)
|
||||
|
||||
def testRebuildContents(self):
|
||||
gset = GlyphSet(GLYPHSETDIR, validateRead=True, validateWrite=True)
|
||||
contents = gset.contents
|
||||
gset.rebuildContents()
|
||||
self.assertEqual(contents, gset.contents)
|
||||
|
||||
def testReverseContents(self):
|
||||
gset = GlyphSet(GLYPHSETDIR, validateRead=True, validateWrite=True)
|
||||
d = {}
|
||||
for k, v in gset.getReverseContents().items():
|
||||
d[v] = k
|
||||
org = {}
|
||||
for k, v in gset.contents.items():
|
||||
org[k] = v.lower()
|
||||
self.assertEqual(d, org)
|
||||
|
||||
def testReverseContents2(self):
|
||||
src = GlyphSet(GLYPHSETDIR, validateRead=True, validateWrite=True)
|
||||
dst = GlyphSet(self.dstDir, validateRead=True, validateWrite=True)
|
||||
dstMap = dst.getReverseContents()
|
||||
self.assertEqual(dstMap, {})
|
||||
for glyphName in src.keys():
|
||||
g = src[glyphName]
|
||||
g.drawPoints(None) # load attrs
|
||||
dst.writeGlyph(glyphName, g, g.drawPoints)
|
||||
self.assertNotEqual(dstMap, {})
|
||||
srcMap = dict(src.getReverseContents()) # copy
|
||||
self.assertEqual(dstMap, srcMap)
|
||||
del srcMap["a.glif"]
|
||||
dst.deleteGlyph("a")
|
||||
self.assertEqual(dstMap, srcMap)
|
||||
|
||||
def testCustomFileNamingScheme(self):
|
||||
def myGlyphNameToFileName(glyphName, glyphSet):
|
||||
return "prefix" + glyphNameToFileName(glyphName, glyphSet)
|
||||
src = GlyphSet(GLYPHSETDIR, validateRead=True, validateWrite=True)
|
||||
dst = GlyphSet(self.dstDir, myGlyphNameToFileName, validateRead=True, validateWrite=True)
|
||||
for glyphName in src.keys():
|
||||
g = src[glyphName]
|
||||
g.drawPoints(None) # load attrs
|
||||
dst.writeGlyph(glyphName, g, g.drawPoints)
|
||||
d = {}
|
||||
for k, v in src.contents.items():
|
||||
d[k] = "prefix" + v
|
||||
self.assertEqual(d, dst.contents)
|
||||
|
||||
def testGetUnicodes(self):
|
||||
src = GlyphSet(GLYPHSETDIR, validateRead=True, validateWrite=True)
|
||||
unicodes = src.getUnicodes()
|
||||
for glyphName in src.keys():
|
||||
g = src[glyphName]
|
||||
g.drawPoints(None) # load attrs
|
||||
if not hasattr(g, "unicodes"):
|
||||
self.assertEqual(unicodes[glyphName], [])
|
||||
else:
|
||||
self.assertEqual(g.unicodes, unicodes[glyphName])
|
||||
|
||||
|
||||
class FileNameTests(unittest.TestCase):
|
||||
|
||||
def testDefaultFileNameScheme(self):
|
||||
self.assertEqual(glyphNameToFileName("a", None), "a.glif")
|
||||
self.assertEqual(glyphNameToFileName("A", None), "A_.glif")
|
||||
self.assertEqual(glyphNameToFileName("Aring", None), "A_ring.glif")
|
||||
self.assertEqual(glyphNameToFileName("F_A_B", None), "F__A__B_.glif")
|
||||
self.assertEqual(glyphNameToFileName("A.alt", None), "A_.alt.glif")
|
||||
self.assertEqual(glyphNameToFileName("A.Alt", None), "A_.A_lt.glif")
|
||||
self.assertEqual(glyphNameToFileName(".notdef", None), "_notdef.glif")
|
||||
self.assertEqual(glyphNameToFileName("T_H", None), "T__H_.glif")
|
||||
self.assertEqual(glyphNameToFileName("T_h", None), "T__h.glif")
|
||||
self.assertEqual(glyphNameToFileName("t_h", None), "t_h.glif")
|
||||
self.assertEqual(glyphNameToFileName("F_F_I", None), "F__F__I_.glif")
|
||||
self.assertEqual(glyphNameToFileName("f_f_i", None), "f_f_i.glif")
|
||||
self.assertEqual(glyphNameToFileName("AE", None), "A_E_.glif")
|
||||
self.assertEqual(glyphNameToFileName("Ae", None), "A_e.glif")
|
||||
self.assertEqual(glyphNameToFileName("ae", None), "ae.glif")
|
||||
self.assertEqual(glyphNameToFileName("aE", None), "aE_.glif")
|
||||
self.assertEqual(glyphNameToFileName("a.alt", None), "a.alt.glif")
|
||||
self.assertEqual(glyphNameToFileName("A.aLt", None), "A_.aL_t.glif")
|
||||
self.assertEqual(glyphNameToFileName("A.alT", None), "A_.alT_.glif")
|
||||
self.assertEqual(glyphNameToFileName("Aacute_V.swash", None), "A_acute_V_.swash.glif")
|
||||
self.assertEqual(glyphNameToFileName(".notdef", None), "_notdef.glif")
|
||||
self.assertEqual(glyphNameToFileName("con", None), "_con.glif")
|
||||
self.assertEqual(glyphNameToFileName("CON", None), "C_O_N_.glif")
|
||||
self.assertEqual(glyphNameToFileName("con.alt", None), "_con.alt.glif")
|
||||
self.assertEqual(glyphNameToFileName("alt.con", None), "alt._con.glif")
|
||||
|
||||
|
||||
class _Glyph(object):
|
||||
pass
|
||||
|
||||
|
||||
class ReadWriteFuncTest(unittest.TestCase):
|
||||
|
||||
def testRoundTrip(self):
|
||||
glyph = _Glyph()
|
||||
glyph.name = "a"
|
||||
glyph.unicodes = [0x0061]
|
||||
|
||||
s1 = writeGlyphToString(glyph.name, glyph)
|
||||
|
||||
glyph2 = _Glyph()
|
||||
readGlyphFromString(s1, glyph2)
|
||||
self.assertEqual(glyph.__dict__, glyph2.__dict__)
|
||||
|
||||
s2 = writeGlyphToString(glyph2.name, glyph2)
|
||||
self.assertEqual(s1, s2)
|
||||
|
||||
def testXmlDeclaration(self):
|
||||
s = writeGlyphToString("a", _Glyph())
|
||||
self.assertTrue(s.startswith(_XML_DECLARATION.decode("utf-8")))
|
533
Tests/ufoLib/test_plistlib.py
Normal file
533
Tests/ufoLib/test_plistlib.py
Normal file
@ -0,0 +1,533 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import sys
|
||||
import os
|
||||
import datetime
|
||||
import codecs
|
||||
import collections
|
||||
from io import BytesIO
|
||||
from numbers import Integral
|
||||
from fontTools.misc.py23 import tounicode, unicode
|
||||
from ufoLib import etree
|
||||
from ufoLib import plistlib
|
||||
import pytest
|
||||
|
||||
|
||||
PY2 = sys.version_info < (3,)
|
||||
if PY2:
|
||||
# This is a ResourceWarning that only happens on py27 at interpreter
|
||||
# finalization, and only when coverage is enabled. We can ignore it.
|
||||
# https://github.com/numpy/numpy/issues/3778#issuecomment-24885336
|
||||
pytestmark = pytest.mark.filterwarnings(
|
||||
"ignore:tp_compare didn't return -1 or -2 for exception"
|
||||
)
|
||||
|
||||
# The testdata is generated using https://github.com/python/cpython/...
|
||||
# Mac/Tools/plistlib_generate_testdata.py
|
||||
# which uses PyObjC to control the Cocoa classes for generating plists
|
||||
datadir = os.path.join(os.path.dirname(__file__), "testdata")
|
||||
with open(os.path.join(datadir, "test.plist"), "rb") as fp:
|
||||
TESTDATA = fp.read()
|
||||
|
||||
|
||||
def _test_pl(use_builtin_types):
|
||||
DataClass = bytes if use_builtin_types else plistlib.Data
|
||||
pl = dict(
|
||||
aString="Doodah",
|
||||
aList=["A", "B", 12, 32.5, [1, 2, 3]],
|
||||
aFloat=0.5,
|
||||
anInt=728,
|
||||
aBigInt=2 ** 63 - 44,
|
||||
aBigInt2=2 ** 63 + 44,
|
||||
aNegativeInt=-5,
|
||||
aNegativeBigInt=-80000000000,
|
||||
aDict=dict(
|
||||
anotherString="<hello & 'hi' there!>",
|
||||
aUnicodeValue="M\xe4ssig, Ma\xdf",
|
||||
aTrueValue=True,
|
||||
aFalseValue=False,
|
||||
deeperDict=dict(a=17, b=32.5, c=[1, 2, "text"]),
|
||||
),
|
||||
someData=DataClass(b"<binary gunk>"),
|
||||
someMoreData=DataClass(b"<lots of binary gunk>\0\1\2\3" * 10),
|
||||
nestedData=[DataClass(b"<lots of binary gunk>\0\1\2\3" * 10)],
|
||||
aDate=datetime.datetime(2004, 10, 26, 10, 33, 33),
|
||||
anEmptyDict=dict(),
|
||||
anEmptyList=list(),
|
||||
)
|
||||
pl["\xc5benraa"] = "That was a unicode key."
|
||||
return pl
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pl():
|
||||
return _test_pl(use_builtin_types=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pl_no_builtin_types():
|
||||
return _test_pl(use_builtin_types=False)
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
params=[True, False],
|
||||
ids=["builtin=True", "builtin=False"],
|
||||
)
|
||||
def use_builtin_types(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def parametrized_pl(use_builtin_types):
|
||||
return _test_pl(use_builtin_types), use_builtin_types
|
||||
|
||||
|
||||
def test__test_pl():
|
||||
# sanity test that checks that the two values are equivalent
|
||||
# (plistlib.Data implements __eq__ against bytes values)
|
||||
pl = _test_pl(use_builtin_types=False)
|
||||
pl2 = _test_pl(use_builtin_types=True)
|
||||
assert pl == pl2
|
||||
|
||||
|
||||
def test_io(tmpdir, parametrized_pl):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
testpath = tmpdir / "test.plist"
|
||||
with testpath.open("wb") as fp:
|
||||
plistlib.dump(pl, fp, use_builtin_types=use_builtin_types)
|
||||
|
||||
with testpath.open("rb") as fp:
|
||||
pl2 = plistlib.load(fp, use_builtin_types=use_builtin_types)
|
||||
|
||||
assert pl == pl2
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
plistlib.dump(pl, "filename")
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
plistlib.load("filename")
|
||||
|
||||
|
||||
def test_invalid_type():
|
||||
pl = [object()]
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
plistlib.dumps(pl)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pl",
|
||||
[
|
||||
0,
|
||||
2 ** 8 - 1,
|
||||
2 ** 8,
|
||||
2 ** 16 - 1,
|
||||
2 ** 16,
|
||||
2 ** 32 - 1,
|
||||
2 ** 32,
|
||||
2 ** 63 - 1,
|
||||
2 ** 64 - 1,
|
||||
1,
|
||||
-2 ** 63,
|
||||
],
|
||||
)
|
||||
def test_int(pl):
|
||||
data = plistlib.dumps(pl)
|
||||
pl2 = plistlib.loads(data)
|
||||
assert isinstance(pl2, Integral)
|
||||
assert pl == pl2
|
||||
data2 = plistlib.dumps(pl2)
|
||||
assert data == data2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pl", [2 ** 64 + 1, 2 ** 127 - 1, -2 ** 64, -2 ** 127]
|
||||
)
|
||||
def test_int_overflow(pl):
|
||||
with pytest.raises(OverflowError):
|
||||
plistlib.dumps(pl)
|
||||
|
||||
|
||||
def test_bytearray(use_builtin_types):
|
||||
DataClass = bytes if use_builtin_types else plistlib.Data
|
||||
pl = DataClass(b"<binary gunk\0\1\2\3>")
|
||||
array = bytearray(pl) if use_builtin_types else bytearray(pl.data)
|
||||
data = plistlib.dumps(array)
|
||||
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
|
||||
assert isinstance(pl2, DataClass)
|
||||
assert pl2 == pl
|
||||
data2 = plistlib.dumps(pl2, use_builtin_types=use_builtin_types)
|
||||
assert data == data2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"DataClass, use_builtin_types",
|
||||
[(bytes, True), (plistlib.Data, True), (plistlib.Data, False)],
|
||||
ids=[
|
||||
"bytes|builtin_types=True",
|
||||
"Data|builtin_types=True",
|
||||
"Data|builtin_types=False",
|
||||
],
|
||||
)
|
||||
def test_bytes_data(DataClass, use_builtin_types):
|
||||
pl = DataClass(b"<binary gunk\0\1\2\3>")
|
||||
data = plistlib.dumps(pl, use_builtin_types=use_builtin_types)
|
||||
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
|
||||
assert isinstance(pl2, bytes if use_builtin_types else plistlib.Data)
|
||||
assert pl2 == pl
|
||||
data2 = plistlib.dumps(pl2, use_builtin_types=use_builtin_types)
|
||||
assert data == data2
|
||||
|
||||
|
||||
def test_bytes_string(use_builtin_types):
|
||||
pl = b"some ASCII bytes"
|
||||
data = plistlib.dumps(pl, use_builtin_types=False)
|
||||
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
|
||||
assert isinstance(pl2, unicode) # it's always a <string>
|
||||
assert pl2 == pl.decode()
|
||||
|
||||
|
||||
def test_indentation_array():
|
||||
data = [[[[[[[[{"test": "aaaaaa"}]]]]]]]]
|
||||
assert plistlib.loads(plistlib.dumps(data)) == data
|
||||
|
||||
|
||||
def test_indentation_dict():
|
||||
data = {
|
||||
"1": {"2": {"3": {"4": {"5": {"6": {"7": {"8": {"9": "aaaaaa"}}}}}}}}
|
||||
}
|
||||
assert plistlib.loads(plistlib.dumps(data)) == data
|
||||
|
||||
|
||||
def test_indentation_dict_mix():
|
||||
data = {"1": {"2": [{"3": [[[[[{"test": "aaaaaa"}]]]]]}]}}
|
||||
assert plistlib.loads(plistlib.dumps(data)) == data
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="we use two spaces, Apple uses tabs")
|
||||
def test_apple_formatting(parametrized_pl):
|
||||
# we also split base64 data into multiple lines differently:
|
||||
# both right-justify data to 76 chars, but Apple's treats tabs
|
||||
# as 8 spaces, whereas we use 2 spaces
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
pl = plistlib.loads(TESTDATA, use_builtin_types=use_builtin_types)
|
||||
data = plistlib.dumps(pl, use_builtin_types=use_builtin_types)
|
||||
assert data == TESTDATA
|
||||
|
||||
|
||||
def test_apple_formatting_fromliteral(parametrized_pl):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
pl2 = plistlib.loads(TESTDATA, use_builtin_types=use_builtin_types)
|
||||
assert pl == pl2
|
||||
|
||||
|
||||
def test_apple_roundtrips(use_builtin_types):
|
||||
pl = plistlib.loads(TESTDATA, use_builtin_types=use_builtin_types)
|
||||
data = plistlib.dumps(pl, use_builtin_types=use_builtin_types)
|
||||
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
|
||||
data2 = plistlib.dumps(pl2, use_builtin_types=use_builtin_types)
|
||||
assert data == data2
|
||||
|
||||
|
||||
def test_bytesio(parametrized_pl):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
b = BytesIO()
|
||||
plistlib.dump(pl, b, use_builtin_types=use_builtin_types)
|
||||
pl2 = plistlib.load(
|
||||
BytesIO(b.getvalue()), use_builtin_types=use_builtin_types
|
||||
)
|
||||
assert pl == pl2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sort_keys", [False, True])
|
||||
def test_keysort_bytesio(sort_keys):
|
||||
pl = collections.OrderedDict()
|
||||
pl["b"] = 1
|
||||
pl["a"] = 2
|
||||
pl["c"] = 3
|
||||
|
||||
b = BytesIO()
|
||||
|
||||
plistlib.dump(pl, b, sort_keys=sort_keys)
|
||||
pl2 = plistlib.load(
|
||||
BytesIO(b.getvalue()), dict_type=collections.OrderedDict
|
||||
)
|
||||
|
||||
assert dict(pl) == dict(pl2)
|
||||
if sort_keys:
|
||||
assert list(pl2.keys()) == ["a", "b", "c"]
|
||||
else:
|
||||
assert list(pl2.keys()) == ["b", "a", "c"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sort_keys", [False, True])
|
||||
def test_keysort(sort_keys):
|
||||
pl = collections.OrderedDict()
|
||||
pl["b"] = 1
|
||||
pl["a"] = 2
|
||||
pl["c"] = 3
|
||||
|
||||
data = plistlib.dumps(pl, sort_keys=sort_keys)
|
||||
pl2 = plistlib.loads(data, dict_type=collections.OrderedDict)
|
||||
|
||||
assert dict(pl) == dict(pl2)
|
||||
if sort_keys:
|
||||
assert list(pl2.keys()) == ["a", "b", "c"]
|
||||
else:
|
||||
assert list(pl2.keys()) == ["b", "a", "c"]
|
||||
|
||||
|
||||
def test_keys_no_string():
|
||||
pl = {42: "aNumber"}
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
plistlib.dumps(pl)
|
||||
|
||||
b = BytesIO()
|
||||
with pytest.raises(TypeError):
|
||||
plistlib.dump(pl, b)
|
||||
|
||||
|
||||
def test_skipkeys():
|
||||
pl = {42: "aNumber", "snake": "aWord"}
|
||||
|
||||
data = plistlib.dumps(pl, skipkeys=True, sort_keys=False)
|
||||
|
||||
pl2 = plistlib.loads(data)
|
||||
assert pl2 == {"snake": "aWord"}
|
||||
|
||||
fp = BytesIO()
|
||||
plistlib.dump(pl, fp, skipkeys=True, sort_keys=False)
|
||||
data = fp.getvalue()
|
||||
pl2 = plistlib.loads(fp.getvalue())
|
||||
assert pl2 == {"snake": "aWord"}
|
||||
|
||||
|
||||
def test_tuple_members():
|
||||
pl = {"first": (1, 2), "second": (1, 2), "third": (3, 4)}
|
||||
|
||||
data = plistlib.dumps(pl)
|
||||
pl2 = plistlib.loads(data)
|
||||
assert pl2 == {"first": [1, 2], "second": [1, 2], "third": [3, 4]}
|
||||
assert pl2["first"] is not pl2["second"]
|
||||
|
||||
|
||||
def test_list_members():
|
||||
pl = {"first": [1, 2], "second": [1, 2], "third": [3, 4]}
|
||||
|
||||
data = plistlib.dumps(pl)
|
||||
pl2 = plistlib.loads(data)
|
||||
assert pl2 == {"first": [1, 2], "second": [1, 2], "third": [3, 4]}
|
||||
assert pl2["first"] is not pl2["second"]
|
||||
|
||||
|
||||
def test_dict_members():
|
||||
pl = {"first": {"a": 1}, "second": {"a": 1}, "third": {"b": 2}}
|
||||
|
||||
data = plistlib.dumps(pl)
|
||||
pl2 = plistlib.loads(data)
|
||||
assert pl2 == {"first": {"a": 1}, "second": {"a": 1}, "third": {"b": 2}}
|
||||
assert pl2["first"] is not pl2["second"]
|
||||
|
||||
|
||||
def test_controlcharacters():
|
||||
for i in range(128):
|
||||
c = chr(i)
|
||||
testString = "string containing %s" % c
|
||||
if i >= 32 or c in "\r\n\t":
|
||||
# \r, \n and \t are the only legal control chars in XML
|
||||
data = plistlib.dumps(testString)
|
||||
# the stdlib's plistlib writer, as well as the elementtree
|
||||
# parser, always replace \r with \n inside string values;
|
||||
# lxml doesn't (the ctrl character is escaped), so it roundtrips
|
||||
if c != "\r" or etree._have_lxml:
|
||||
assert plistlib.loads(data) == testString
|
||||
else:
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.dumps(testString)
|
||||
|
||||
|
||||
def test_non_bmp_characters():
|
||||
pl = {"python": "\U0001f40d"}
|
||||
data = plistlib.dumps(pl)
|
||||
assert plistlib.loads(data) == pl
|
||||
|
||||
|
||||
def test_nondictroot():
|
||||
test1 = "abc"
|
||||
test2 = [1, 2, 3, "abc"]
|
||||
result1 = plistlib.loads(plistlib.dumps(test1))
|
||||
result2 = plistlib.loads(plistlib.dumps(test2))
|
||||
assert test1 == result1
|
||||
assert test2 == result2
|
||||
|
||||
|
||||
def test_invalidarray():
|
||||
for i in [
|
||||
"<key>key inside an array</key>",
|
||||
"<key>key inside an array2</key><real>3</real>",
|
||||
"<true/><key>key inside an array3</key>",
|
||||
]:
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.loads(
|
||||
("<plist><array>%s</array></plist>" % i).encode("utf-8")
|
||||
)
|
||||
|
||||
|
||||
def test_invaliddict():
|
||||
for i in [
|
||||
"<key><true/>k</key><string>compound key</string>",
|
||||
"<key>single key</key>",
|
||||
"<string>missing key</string>",
|
||||
"<key>k1</key><string>v1</string><real>5.3</real>"
|
||||
"<key>k1</key><key>k2</key><string>double key</string>",
|
||||
]:
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.loads(("<plist><dict>%s</dict></plist>" % i).encode())
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.loads(
|
||||
("<plist><array><dict>%s</dict></array></plist>" % i).encode()
|
||||
)
|
||||
|
||||
|
||||
def test_invalidinteger():
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.loads(b"<plist><integer>not integer</integer></plist>")
|
||||
|
||||
|
||||
def test_invalidreal():
|
||||
with pytest.raises(ValueError):
|
||||
plistlib.loads(b"<plist><integer>not real</integer></plist>")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"xml_encoding, encoding, bom",
|
||||
[
|
||||
(b"utf-8", "utf-8", codecs.BOM_UTF8),
|
||||
(b"utf-16", "utf-16-le", codecs.BOM_UTF16_LE),
|
||||
(b"utf-16", "utf-16-be", codecs.BOM_UTF16_BE),
|
||||
# expat parser (used by ElementTree) does't support UTF-32
|
||||
# (b"utf-32", "utf-32-le", codecs.BOM_UTF32_LE),
|
||||
# (b"utf-32", "utf-32-be", codecs.BOM_UTF32_BE),
|
||||
],
|
||||
)
|
||||
def test_xml_encodings(parametrized_pl, xml_encoding, encoding, bom):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
data = TESTDATA.replace(b"UTF-8", xml_encoding)
|
||||
data = bom + data.decode("utf-8").encode(encoding)
|
||||
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
|
||||
assert pl == pl2
|
||||
|
||||
|
||||
def test_fromtree(parametrized_pl):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
tree = etree.fromstring(TESTDATA)
|
||||
pl2 = plistlib.fromtree(tree, use_builtin_types=use_builtin_types)
|
||||
assert pl == pl2
|
||||
|
||||
|
||||
def _strip(txt):
|
||||
return (
|
||||
"".join(l.strip() for l in tounicode(txt, "utf-8").splitlines())
|
||||
if txt is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
|
||||
def test_totree(parametrized_pl):
|
||||
pl, use_builtin_types = parametrized_pl
|
||||
tree = etree.fromstring(TESTDATA)[0] # ignore root 'plist' element
|
||||
tree2 = plistlib.totree(pl, use_builtin_types=use_builtin_types)
|
||||
assert tree.tag == tree2.tag == "dict"
|
||||
for (_, e1), (_, e2) in zip(etree.iterwalk(tree), etree.iterwalk(tree2)):
|
||||
assert e1.tag == e2.tag
|
||||
assert e1.attrib == e2.attrib
|
||||
assert len(e1) == len(e2)
|
||||
# ignore whitespace
|
||||
assert _strip(e1.text) == _strip(e2.text)
|
||||
|
||||
|
||||
def test_no_pretty_print(use_builtin_types):
|
||||
data = plistlib.dumps(
|
||||
{"data": b"hello" if use_builtin_types else plistlib.Data(b"hello")},
|
||||
pretty_print=False,
|
||||
use_builtin_types=use_builtin_types,
|
||||
)
|
||||
assert data == (
|
||||
plistlib.XML_DECLARATION
|
||||
+ plistlib.PLIST_DOCTYPE
|
||||
+ b'<plist version="1.0">'
|
||||
b"<dict>"
|
||||
b"<key>data</key>"
|
||||
b"<data>aGVsbG8=</data>"
|
||||
b"</dict>"
|
||||
b"</plist>"
|
||||
)
|
||||
|
||||
|
||||
def test_readPlist_from_path(pl):
|
||||
path = os.path.join(datadir, "test.plist")
|
||||
pl2 = plistlib.readPlist(path)
|
||||
assert isinstance(pl2["someData"], plistlib.Data)
|
||||
assert pl2 == pl
|
||||
|
||||
|
||||
def test_readPlist_from_file(pl):
|
||||
with open(os.path.join(datadir, "test.plist"), "rb") as f:
|
||||
pl2 = plistlib.readPlist(f)
|
||||
assert isinstance(pl2["someData"], plistlib.Data)
|
||||
assert pl2 == pl
|
||||
assert not f.closed
|
||||
|
||||
|
||||
def test_readPlistFromString(pl):
|
||||
pl2 = plistlib.readPlistFromString(TESTDATA)
|
||||
assert isinstance(pl2["someData"], plistlib.Data)
|
||||
assert pl2 == pl
|
||||
|
||||
|
||||
def test_writePlist_to_path(tmpdir, pl_no_builtin_types):
|
||||
testpath = tmpdir / "test.plist"
|
||||
plistlib.writePlist(pl_no_builtin_types, str(testpath))
|
||||
with testpath.open("rb") as fp:
|
||||
pl2 = plistlib.load(fp, use_builtin_types=False)
|
||||
assert pl2 == pl_no_builtin_types
|
||||
|
||||
|
||||
def test_writePlist_to_file(tmpdir, pl_no_builtin_types):
|
||||
testpath = tmpdir / "test.plist"
|
||||
with testpath.open("wb") as fp:
|
||||
plistlib.writePlist(pl_no_builtin_types, fp)
|
||||
with testpath.open("rb") as fp:
|
||||
pl2 = plistlib.load(fp, use_builtin_types=False)
|
||||
assert pl2 == pl_no_builtin_types
|
||||
|
||||
|
||||
def test_writePlistToString(pl_no_builtin_types):
|
||||
data = plistlib.writePlistToString(pl_no_builtin_types)
|
||||
pl2 = plistlib.loads(data)
|
||||
assert pl2 == pl_no_builtin_types
|
||||
|
||||
|
||||
def test_load_use_builtin_types_default():
|
||||
pl = plistlib.loads(TESTDATA)
|
||||
expected = plistlib.Data if PY2 else bytes
|
||||
assert isinstance(pl["someData"], expected)
|
||||
|
||||
|
||||
def test_dump_use_builtin_types_default(pl_no_builtin_types):
|
||||
data = plistlib.dumps(pl_no_builtin_types)
|
||||
pl2 = plistlib.loads(data)
|
||||
expected = plistlib.Data if PY2 else bytes
|
||||
assert isinstance(pl2["someData"], expected)
|
||||
assert pl2 == pl_no_builtin_types
|
||||
|
||||
|
||||
def test_non_ascii_bytes():
|
||||
with pytest.raises(ValueError, match="invalid non-ASCII bytes"):
|
||||
plistlib.dumps("\U0001f40d".encode("utf-8"), use_builtin_types=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(pytest.main(sys.argv))
|
46
Tests/ufoLib/testdata/DemoFont.ufo/fontinfo.plist
vendored
Normal file
46
Tests/ufoLib/testdata/DemoFont.ufo/fontinfo.plist
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>ascender</key>
|
||||
<integer>750</integer>
|
||||
<key>capHeight</key>
|
||||
<integer>527</integer>
|
||||
<key>copyright</key>
|
||||
<string>TOKEN COPYRIGHT STRING. COPYRIGHT SAME AS PACKAGE. </string>
|
||||
<key>defaultWidth</key>
|
||||
<integer>500</integer>
|
||||
<key>descender</key>
|
||||
<integer>-170</integer>
|
||||
<key>designer</key>
|
||||
<string>Various</string>
|
||||
<key>designerURL</key>
|
||||
<string></string>
|
||||
<key>familyName</key>
|
||||
<string>UFODEMOFONT</string>
|
||||
<key>fontStyle</key>
|
||||
<integer>64</integer>
|
||||
<key>license</key>
|
||||
<string>LICENSE SAME AS PACKAGE.</string>
|
||||
<key>notice</key>
|
||||
<string>TOKEN DESCRIPTION</string>
|
||||
<key>styleName</key>
|
||||
<string>JUSTADEMO</string>
|
||||
<key>trademark</key>
|
||||
<string>NO TRADEMARKS</string>
|
||||
<key>ttVendor</key>
|
||||
<string>NONE</string>
|
||||
<key>ttVersion</key>
|
||||
<string>Version 1.000;PS development 5;hotconv 1.0.38</string>
|
||||
<key>unitsPerEm</key>
|
||||
<integer>1000</integer>
|
||||
<key>vendorURL</key>
|
||||
<string></string>
|
||||
<key>versionMajor</key>
|
||||
<integer>1</integer>
|
||||
<key>xHeight</key>
|
||||
<integer>456</integer>
|
||||
<key>year</key>
|
||||
<integer>2003</integer>
|
||||
</dict>
|
||||
</plist>
|
40
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/A_.glif
vendored
Normal file
40
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/A_.glif
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="A" format="1">
|
||||
<advance width="487"/>
|
||||
<unicode hex="0041"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="243" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="243" y="739" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="243" y="-75" type="move" name="bottom"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="243" y="739" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="243" y="-75" type="move" name="bottom"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="460" y="0" type="line"/>
|
||||
<point x="318" y="664" type="line"/>
|
||||
<point x="169" y="664" type="line"/>
|
||||
<point x="27" y="0" type="line"/>
|
||||
<point x="129" y="0" type="line"/>
|
||||
<point x="150" y="94" type="line"/>
|
||||
<point x="328" y="94" type="line"/>
|
||||
<point x="348" y="0" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="307" y="189" type="line"/>
|
||||
<point x="172" y="189" type="line"/>
|
||||
<point x="214" y="398" type="line"/>
|
||||
<point x="239" y="541" type="line"/>
|
||||
<point x="249" y="541" type="line"/>
|
||||
<point x="264" y="399" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
46
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/B_.glif
vendored
Normal file
46
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/B_.glif
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="B" format="1">
|
||||
<advance width="460"/>
|
||||
<unicode hex="0042"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="201" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="412" y="474" type="curve" smooth="yes"/>
|
||||
<point x="412" y="606"/>
|
||||
<point x="323" y="664"/>
|
||||
<point x="170" y="664" type="curve" smooth="yes"/>
|
||||
<point x="47" y="664" type="line"/>
|
||||
<point x="47" y="0" type="line"/>
|
||||
<point x="170" y="0" type="line" smooth="yes"/>
|
||||
<point x="340" y="0"/>
|
||||
<point x="421" y="70"/>
|
||||
<point x="421" y="189" type="curve" smooth="yes"/>
|
||||
<point x="421" y="265"/>
|
||||
<point x="358" y="330"/>
|
||||
<point x="285" y="330" type="curve"/>
|
||||
<point x="285" y="340" type="line"/>
|
||||
<point x="358" y="340"/>
|
||||
<point x="412" y="392"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="151" y="284" type="line"/>
|
||||
<point x="264" y="284"/>
|
||||
<point x="314" y="253"/>
|
||||
<point x="314" y="189" type="curve" smooth="yes"/>
|
||||
<point x="314" y="130"/>
|
||||
<point x="265" y="95"/>
|
||||
<point x="151" y="95" type="curve"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="151" y="569" type="line"/>
|
||||
<point x="259" y="569"/>
|
||||
<point x="304" y="551"/>
|
||||
<point x="304" y="474" type="curve" smooth="yes"/>
|
||||
<point x="304" y="409"/>
|
||||
<point x="261" y="379"/>
|
||||
<point x="151" y="379" type="curve"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
22
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/F_.glif
vendored
Normal file
22
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/F_.glif
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="F" format="1">
|
||||
<advance width="417"/>
|
||||
<unicode hex="0046"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="213" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="332" y="350" type="line"/>
|
||||
<point x="151" y="350" type="line"/>
|
||||
<point x="151" y="560" type="line"/>
|
||||
<point x="379" y="560" type="line"/>
|
||||
<point x="379" y="664" type="line"/>
|
||||
<point x="47" y="664" type="line"/>
|
||||
<point x="47" y="0" type="line"/>
|
||||
<point x="151" y="0" type="line"/>
|
||||
<point x="151" y="250" type="line"/>
|
||||
<point x="332" y="250" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
9
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/F__A__B_.glif
vendored
Normal file
9
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/F__A__B_.glif
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="F_A_B" format="1">
|
||||
<advance width="900"/>
|
||||
<outline>
|
||||
<component base="A"/>
|
||||
<component base="B" xOffset="350"/>
|
||||
<component base="F" xScale="0.965925826289" xyScale="-0.258819045103" yxScale="0.258819045103" yScale="0.965925826289" xOffset="-50" yOffset="500"/>
|
||||
</outline>
|
||||
</glyph>
|
45
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/G_.glif
vendored
Normal file
45
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/G_.glif
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="G" format="1">
|
||||
<advance width="494"/>
|
||||
<unicode hex="0047"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="301" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="272" y="0" type="move" name="bottom"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="446" y="379" type="line"/>
|
||||
<point x="231" y="379" type="line"/>
|
||||
<point x="231" y="284" type="line"/>
|
||||
<point x="350" y="284" type="line"/>
|
||||
<point x="350" y="98" type="line"/>
|
||||
<point x="338" y="95"/>
|
||||
<point x="300" y="94"/>
|
||||
<point x="288" y="94" type="curve" smooth="yes"/>
|
||||
<point x="197" y="94"/>
|
||||
<point x="142" y="130"/>
|
||||
<point x="142" y="322" type="curve" smooth="yes"/>
|
||||
<point x="142" y="514"/>
|
||||
<point x="177" y="569"/>
|
||||
<point x="300" y="569" type="curve" smooth="yes"/>
|
||||
<point x="324" y="569"/>
|
||||
<point x="387" y="567"/>
|
||||
<point x="417" y="563" type="curve"/>
|
||||
<point x="427" y="653" type="line"/>
|
||||
<point x="401" y="663"/>
|
||||
<point x="338" y="674"/>
|
||||
<point x="300" y="674" type="curve" smooth="yes"/>
|
||||
<point x="120" y="674"/>
|
||||
<point x="37" y="570"/>
|
||||
<point x="37" y="322" type="curve" smooth="yes"/>
|
||||
<point x="37" y="71"/>
|
||||
<point x="134" y="-9"/>
|
||||
<point x="272" y="-9" type="curve" smooth="yes"/>
|
||||
<point x="353" y="-9"/>
|
||||
<point x="396" y="-1"/>
|
||||
<point x="446" y="18" type="curve"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
41
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/O_.glif
vendored
Normal file
41
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/O_.glif
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="O" format="1">
|
||||
<advance width="513"/>
|
||||
<unicode hex="004F"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="259" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="257" y="0" type="move" name="bottom"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="474" y="332" type="curve" smooth="yes"/>
|
||||
<point x="474" y="579"/>
|
||||
<point x="414" y="674"/>
|
||||
<point x="257" y="674" type="curve" smooth="yes"/>
|
||||
<point x="106" y="674"/>
|
||||
<point x="37" y="567"/>
|
||||
<point x="37" y="332" type="curve" smooth="yes"/>
|
||||
<point x="37" y="85"/>
|
||||
<point x="98" y="-9"/>
|
||||
<point x="256" y="-9" type="curve" smooth="yes"/>
|
||||
<point x="405" y="-9"/>
|
||||
<point x="474" y="98"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="257" y="574" type="curve" smooth="yes"/>
|
||||
<point x="336" y="574"/>
|
||||
<point x="367" y="511"/>
|
||||
<point x="367" y="332" type="curve" smooth="yes"/>
|
||||
<point x="367" y="163"/>
|
||||
<point x="332" y="91"/>
|
||||
<point x="256" y="91" type="curve" smooth="yes"/>
|
||||
<point x="176" y="91"/>
|
||||
<point x="145" y="153"/>
|
||||
<point x="145" y="332" type="curve" smooth="yes"/>
|
||||
<point x="145" y="501"/>
|
||||
<point x="180" y="574"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
37
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/R_.glif
vendored
Normal file
37
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/R_.glif
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="R" format="1">
|
||||
<advance width="463"/>
|
||||
<unicode hex="0052"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="208" y="681" type="move" name="top"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="445" y="0" type="line"/>
|
||||
<point x="319" y="249" type="line"/>
|
||||
<point x="380" y="286"/>
|
||||
<point x="417" y="349"/>
|
||||
<point x="417" y="436" type="curve" smooth="yes"/>
|
||||
<point x="417" y="590"/>
|
||||
<point x="315" y="664"/>
|
||||
<point x="151" y="664" type="curve" smooth="yes"/>
|
||||
<point x="47" y="664" type="line"/>
|
||||
<point x="47" y="0" type="line"/>
|
||||
<point x="151" y="0" type="line"/>
|
||||
<point x="151" y="208" type="line"/>
|
||||
<point x="180" y="208"/>
|
||||
<point x="197" y="210"/>
|
||||
<point x="221" y="214" type="curve"/>
|
||||
<point x="331" y="0" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="313" y="436" type="curve" smooth="yes"/>
|
||||
<point x="313" y="345"/>
|
||||
<point x="250" y="303"/>
|
||||
<point x="151" y="303" type="curve"/>
|
||||
<point x="151" y="569" type="line"/>
|
||||
<point x="251" y="569"/>
|
||||
<point x="313" y="535"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
6
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/a.glif
vendored
Normal file
6
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/a.glif
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="a" format="1">
|
||||
<unicode hex="0061"/>
|
||||
<outline>
|
||||
</outline>
|
||||
</glyph>
|
26
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/contents.plist
vendored
Normal file
26
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/contents.plist
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<string>A_.glif</string>
|
||||
<key>B</key>
|
||||
<string>B_.glif</string>
|
||||
<key>F</key>
|
||||
<string>F_.glif</string>
|
||||
<key>F_A_B</key>
|
||||
<string>F__A__B_.glif</string>
|
||||
<key>G</key>
|
||||
<string>G_.glif</string>
|
||||
<key>O</key>
|
||||
<string>O_.glif</string>
|
||||
<key>R</key>
|
||||
<string>R_.glif</string>
|
||||
<key>a</key>
|
||||
<string>a.glif</string>
|
||||
<key>testglyph1</key>
|
||||
<string>testglyph1.glif</string>
|
||||
<key>testglyph1.reversed</key>
|
||||
<string>testglyph1.reversed.glif</string>
|
||||
</dict>
|
||||
</plist>
|
18
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/testglyph1.glif
vendored
Normal file
18
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/testglyph1.glif
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="testglyph1" format="1">
|
||||
<advance width="500"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="58" y="443" type="move"/>
|
||||
<point x="84" y="667" type="line"/>
|
||||
<point x="313" y="632" type="line"/>
|
||||
<point x="354" y="380" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="328" y="238" type="line"/>
|
||||
<point x="328" y="32" type="line"/>
|
||||
<point x="90" y="29" type="line"/>
|
||||
<point x="87" y="235" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
18
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/testglyph1.reversed.glif
vendored
Normal file
18
Tests/ufoLib/testdata/DemoFont.ufo/glyphs/testglyph1.reversed.glif
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="testglyph1.reversed" format="1">
|
||||
<advance width="500"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="354" y="380" type="move"/>
|
||||
<point x="313" y="632" type="line"/>
|
||||
<point x="84" y="667" type="line"/>
|
||||
<point x="58" y="443" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="328" y="238" type="line"/>
|
||||
<point x="87" y="235" type="line"/>
|
||||
<point x="90" y="29" type="line"/>
|
||||
<point x="328" y="32" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
6
Tests/ufoLib/testdata/DemoFont.ufo/lib.plist
vendored
Normal file
6
Tests/ufoLib/testdata/DemoFont.ufo/lib.plist
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/DemoFont.ufo/metainfo.plist
vendored
Normal file
10
Tests/ufoLib/testdata/DemoFont.ufo/metainfo.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>creator</key>
|
||||
<string>org.robofab.ufoLib</string>
|
||||
<key>formatVersion</key>
|
||||
<integer>1</integer>
|
||||
</dict>
|
||||
</plist>
|
87
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/fontinfo.plist
vendored
Normal file
87
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/fontinfo.plist
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>ascender</key>
|
||||
<integer>750</integer>
|
||||
<key>capHeight</key>
|
||||
<integer>750</integer>
|
||||
<key>copyright</key>
|
||||
<string>Copyright Some Foundry.</string>
|
||||
<key>createdBy</key>
|
||||
<string>Some Foundry</string>
|
||||
<key>defaultWidth</key>
|
||||
<integer>400</integer>
|
||||
<key>descender</key>
|
||||
<integer>-250</integer>
|
||||
<key>designer</key>
|
||||
<string>Some Designer</string>
|
||||
<key>designerURL</key>
|
||||
<string>http://somedesigner.com</string>
|
||||
<key>familyName</key>
|
||||
<string>Some Font (Family Name)</string>
|
||||
<key>fondID</key>
|
||||
<integer>15000</integer>
|
||||
<key>fondName</key>
|
||||
<string>SomeFont Regular (FOND Name)</string>
|
||||
<key>fontName</key>
|
||||
<string>SomeFont-Regular (Postscript Font Name)</string>
|
||||
<key>fontStyle</key>
|
||||
<integer>64</integer>
|
||||
<key>fullName</key>
|
||||
<string>Some Font-Regular (Postscript Full Name)</string>
|
||||
<key>italicAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>license</key>
|
||||
<string>License info for Some Foundry.</string>
|
||||
<key>licenseURL</key>
|
||||
<string>http://somefoundry.com/license</string>
|
||||
<key>menuName</key>
|
||||
<string>Some Font Regular (Style Map Family Name)</string>
|
||||
<key>msCharSet</key>
|
||||
<integer>0</integer>
|
||||
<key>note</key>
|
||||
<string>A note.</string>
|
||||
<key>notice</key>
|
||||
<string>Some Font by Some Designer for Some Foundry.</string>
|
||||
<key>otFamilyName</key>
|
||||
<string>Some Font (Preferred Family Name)</string>
|
||||
<key>otMacName</key>
|
||||
<string>Some Font Regular (Compatible Full Name)</string>
|
||||
<key>otStyleName</key>
|
||||
<string>Regular (Preferred Subfamily Name)</string>
|
||||
<key>slantAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>styleName</key>
|
||||
<string>Regular (Style Name)</string>
|
||||
<key>trademark</key>
|
||||
<string>Trademark Some Foundry</string>
|
||||
<key>ttUniqueID</key>
|
||||
<string>OpenType name Table Unique ID</string>
|
||||
<key>ttVendor</key>
|
||||
<string>SOME</string>
|
||||
<key>ttVersion</key>
|
||||
<string>OpenType name Table Version</string>
|
||||
<key>uniqueID</key>
|
||||
<integer>4000000</integer>
|
||||
<key>unitsPerEm</key>
|
||||
<integer>1000</integer>
|
||||
<key>vendorURL</key>
|
||||
<string>http://somefoundry.com</string>
|
||||
<key>versionMajor</key>
|
||||
<integer>1</integer>
|
||||
<key>versionMinor</key>
|
||||
<integer>0</integer>
|
||||
<key>weightName</key>
|
||||
<string>Medium</string>
|
||||
<key>weightValue</key>
|
||||
<integer>500</integer>
|
||||
<key>widthName</key>
|
||||
<string>Medium (normal)</string>
|
||||
<key>xHeight</key>
|
||||
<integer>500</integer>
|
||||
<key>year</key>
|
||||
<integer>2008</integer>
|
||||
</dict>
|
||||
</plist>
|
||||
|
13
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/A_.glif
vendored
Normal file
13
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/A_.glif
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="A" format="1">
|
||||
<advance width="740"/>
|
||||
<unicode hex="0041"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="20" y="0" type="line"/>
|
||||
<point x="720" y="0" type="line"/>
|
||||
<point x="720" y="700" type="line"/>
|
||||
<point x="20" y="700" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
21
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/B_.glif
vendored
Normal file
21
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/B_.glif
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="B" format="1">
|
||||
<advance width="740"/>
|
||||
<unicode hex="0042"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="20" y="350" type="curve" smooth="yes"/>
|
||||
<point x="20" y="157"/>
|
||||
<point x="177" y="0"/>
|
||||
<point x="370" y="0" type="curve" smooth="yes"/>
|
||||
<point x="563" y="0"/>
|
||||
<point x="720" y="157"/>
|
||||
<point x="720" y="350" type="curve" smooth="yes"/>
|
||||
<point x="720" y="543"/>
|
||||
<point x="563" y="700"/>
|
||||
<point x="370" y="700" type="curve" smooth="yes"/>
|
||||
<point x="177" y="700"/>
|
||||
<point x="20" y="543"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/contents.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/glyphs/contents.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<string>A_.glif</string>
|
||||
<key>B</key>
|
||||
<string>B_.glif</string>
|
||||
</dict>
|
||||
</plist>
|
15
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/groups.plist
vendored
Normal file
15
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/groups.plist
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>group1</key>
|
||||
<array>
|
||||
<string>A</string>
|
||||
</array>
|
||||
<key>group2</key>
|
||||
<array>
|
||||
<string>A</string>
|
||||
<string>B</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
16
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/kerning.plist
vendored
Normal file
16
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/kerning.plist
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<dict>
|
||||
<key>B</key>
|
||||
<integer>100</integer>
|
||||
</dict>
|
||||
<key>B</key>
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<integer>-200</integer>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
72
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/lib.plist
vendored
Normal file
72
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/lib.plist
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>org.robofab.opentype.classes</key>
|
||||
<string>@myClass = [A B];
|
||||
</string>
|
||||
<key>org.robofab.opentype.featureorder</key>
|
||||
<array>
|
||||
<string>liga</string>
|
||||
</array>
|
||||
<key>org.robofab.opentype.features</key>
|
||||
<dict>
|
||||
<key>liga</key>
|
||||
<string>feature liga {
|
||||
sub A A by b;
|
||||
} liga;
|
||||
</string>
|
||||
</dict>
|
||||
<key>org.robofab.postScriptHintData</key>
|
||||
<dict>
|
||||
<key>blueFuzz</key>
|
||||
<integer>1</integer>
|
||||
<key>blueScale</key>
|
||||
<real>0.039625</real>
|
||||
<key>blueShift</key>
|
||||
<integer>7</integer>
|
||||
<key>blueValues</key>
|
||||
<array>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
</array>
|
||||
<key>familyBlues</key>
|
||||
<array>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
</array>
|
||||
<key>familyOtherBlues</key>
|
||||
<array>
|
||||
<array>
|
||||
<integer>-260</integer>
|
||||
<integer>-250</integer>
|
||||
</array>
|
||||
</array>
|
||||
<key>forceBold</key>
|
||||
<true/>
|
||||
<key>hStems</key>
|
||||
<array>
|
||||
<integer>100</integer>
|
||||
<integer>120</integer>
|
||||
</array>
|
||||
<key>otherBlues</key>
|
||||
<array>
|
||||
<array>
|
||||
<integer>-260</integer>
|
||||
<integer>-250</integer>
|
||||
</array>
|
||||
</array>
|
||||
<key>vStems</key>
|
||||
<array>
|
||||
<integer>80</integer>
|
||||
<integer>90</integer>
|
||||
</array>
|
||||
</dict>
|
||||
<key>org.robofab.testFontLibData</key>
|
||||
<string>Foo Bar</string>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/metainfo.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO1).ufo/metainfo.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>creator</key>
|
||||
<string>org.robofab.ufoLib</string>
|
||||
<key>formatVersion</key>
|
||||
<integer>1</integer>
|
||||
</dict>
|
||||
</plist>
|
5
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/features.fea
vendored
Normal file
5
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/features.fea
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
@myClass = [A B];
|
||||
|
||||
feature liga {
|
||||
sub A A by b;
|
||||
} liga;
|
239
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/fontinfo.plist
vendored
Normal file
239
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/fontinfo.plist
vendored
Normal file
@ -0,0 +1,239 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>ascender</key>
|
||||
<integer>750</integer>
|
||||
<key>capHeight</key>
|
||||
<integer>750</integer>
|
||||
<key>copyright</key>
|
||||
<string>Copyright Some Foundry.</string>
|
||||
<key>descender</key>
|
||||
<integer>-250</integer>
|
||||
<key>familyName</key>
|
||||
<string>Some Font (Family Name)</string>
|
||||
<key>italicAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>macintoshFONDFamilyID</key>
|
||||
<integer>15000</integer>
|
||||
<key>macintoshFONDName</key>
|
||||
<string>SomeFont Regular (FOND Name)</string>
|
||||
<key>note</key>
|
||||
<string>A note.</string>
|
||||
<key>openTypeHeadCreated</key>
|
||||
<string>2000/01/01 00:00:00</string>
|
||||
<key>openTypeHeadFlags</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeHeadLowestRecPPEM</key>
|
||||
<integer>10</integer>
|
||||
<key>openTypeHheaAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeHheaCaretOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeHheaCaretSlopeRise</key>
|
||||
<integer>1</integer>
|
||||
<key>openTypeHheaCaretSlopeRun</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeHheaDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeHheaLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeNameCompatibleFullName</key>
|
||||
<string>Some Font Regular (Compatible Full Name)</string>
|
||||
<key>openTypeNameDescription</key>
|
||||
<string>Some Font by Some Designer for Some Foundry.</string>
|
||||
<key>openTypeNameDesigner</key>
|
||||
<string>Some Designer</string>
|
||||
<key>openTypeNameDesignerURL</key>
|
||||
<string>http://somedesigner.com</string>
|
||||
<key>openTypeNameLicense</key>
|
||||
<string>License info for Some Foundry.</string>
|
||||
<key>openTypeNameLicenseURL</key>
|
||||
<string>http://somefoundry.com/license</string>
|
||||
<key>openTypeNameManufacturer</key>
|
||||
<string>Some Foundry</string>
|
||||
<key>openTypeNameManufacturerURL</key>
|
||||
<string>http://somefoundry.com</string>
|
||||
<key>openTypeNamePreferredFamilyName</key>
|
||||
<string>Some Font (Preferred Family Name)</string>
|
||||
<key>openTypeNamePreferredSubfamilyName</key>
|
||||
<string>Regular (Preferred Subfamily Name)</string>
|
||||
<key>openTypeNameSampleText</key>
|
||||
<string>Sample Text for Some Font.</string>
|
||||
<key>openTypeNameUniqueID</key>
|
||||
<string>OpenType name Table Unique ID</string>
|
||||
<key>openTypeNameVersion</key>
|
||||
<string>OpenType name Table Version</string>
|
||||
<key>openTypeNameWWSFamilyName</key>
|
||||
<string>Some Font (WWS Family Name)</string>
|
||||
<key>openTypeNameWWSSubfamilyName</key>
|
||||
<string>Regular (WWS Subfamily Name)</string>
|
||||
<key>openTypeOS2CodePageRanges</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2Panose</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
<integer>2</integer>
|
||||
<integer>3</integer>
|
||||
<integer>4</integer>
|
||||
<integer>5</integer>
|
||||
<integer>6</integer>
|
||||
<integer>7</integer>
|
||||
<integer>8</integer>
|
||||
<integer>9</integer>
|
||||
</array>
|
||||
<key>openTypeOS2FamilyClass</key>
|
||||
<array>
|
||||
<integer>1</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2Selection</key>
|
||||
<array>
|
||||
<integer>3</integer>
|
||||
</array>
|
||||
<key>openTypeOS2StrikeoutPosition</key>
|
||||
<integer>300</integer>
|
||||
<key>openTypeOS2StrikeoutSize</key>
|
||||
<integer>20</integer>
|
||||
<key>openTypeOS2SubscriptXOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeOS2SubscriptXSize</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SubscriptYOffset</key>
|
||||
<integer>-100</integer>
|
||||
<key>openTypeOS2SubscriptYSize</key>
|
||||
<integer>400</integer>
|
||||
<key>openTypeOS2SuperscriptXOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeOS2SuperscriptXSize</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SuperscriptYOffset</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SuperscriptYSize</key>
|
||||
<integer>400</integer>
|
||||
<key>openTypeOS2Type</key>
|
||||
<array>
|
||||
</array>
|
||||
<key>openTypeOS2TypoAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeOS2TypoDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeOS2TypoLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2UnicodeRanges</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2VendorID</key>
|
||||
<string>SOME</string>
|
||||
<key>openTypeOS2WeightClass</key>
|
||||
<integer>500</integer>
|
||||
<key>openTypeOS2WidthClass</key>
|
||||
<integer>5</integer>
|
||||
<key>openTypeOS2WinAscent</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeOS2WinDescent</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeVheaCaretOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeVheaCaretSlopeRise</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeVheaCaretSlopeRun</key>
|
||||
<integer>1</integer>
|
||||
<key>openTypeVheaVertTypoAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeVheaVertTypoDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeVheaVertTypoLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>postscriptBlueFuzz</key>
|
||||
<integer>1</integer>
|
||||
<key>postscriptBlueScale</key>
|
||||
<real>0.039625</real>
|
||||
<key>postscriptBlueShift</key>
|
||||
<integer>7</integer>
|
||||
<key>postscriptBlueValues</key>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
<key>postscriptDefaultCharacter</key>
|
||||
<string>.notdef</string>
|
||||
<key>postscriptDefaultWidthX</key>
|
||||
<integer>400</integer>
|
||||
<key>postscriptFamilyBlues</key>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
<key>postscriptFamilyOtherBlues</key>
|
||||
<array>
|
||||
<integer>-250</integer>
|
||||
<integer>-260</integer>
|
||||
</array>
|
||||
<key>postscriptFontName</key>
|
||||
<string>SomeFont-Regular (Postscript Font Name)</string>
|
||||
<key>postscriptForceBold</key>
|
||||
<true/>
|
||||
<key>postscriptFullName</key>
|
||||
<string>Some Font-Regular (Postscript Full Name)</string>
|
||||
<key>postscriptIsFixedPitch</key>
|
||||
<false/>
|
||||
<key>postscriptNominalWidthX</key>
|
||||
<integer>400</integer>
|
||||
<key>postscriptOtherBlues</key>
|
||||
<array>
|
||||
<integer>-250</integer>
|
||||
<integer>-260</integer>
|
||||
</array>
|
||||
<key>postscriptSlantAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>postscriptStemSnapH</key>
|
||||
<array>
|
||||
<integer>100</integer>
|
||||
<integer>120</integer>
|
||||
</array>
|
||||
<key>postscriptStemSnapV</key>
|
||||
<array>
|
||||
<integer>80</integer>
|
||||
<integer>90</integer>
|
||||
</array>
|
||||
<key>postscriptUnderlinePosition</key>
|
||||
<integer>-200</integer>
|
||||
<key>postscriptUnderlineThickness</key>
|
||||
<integer>20</integer>
|
||||
<key>postscriptUniqueID</key>
|
||||
<integer>4000000</integer>
|
||||
<key>postscriptWeightName</key>
|
||||
<string>Medium</string>
|
||||
<key>postscriptWindowsCharacterSet</key>
|
||||
<integer>1</integer>
|
||||
<key>styleMapFamilyName</key>
|
||||
<string>Some Font Regular (Style Map Family Name)</string>
|
||||
<key>styleMapStyleName</key>
|
||||
<string>regular</string>
|
||||
<key>styleName</key>
|
||||
<string>Regular (Style Name)</string>
|
||||
<key>trademark</key>
|
||||
<string>Trademark Some Foundry</string>
|
||||
<key>unitsPerEm</key>
|
||||
<integer>1000</integer>
|
||||
<key>versionMajor</key>
|
||||
<integer>1</integer>
|
||||
<key>versionMinor</key>
|
||||
<integer>0</integer>
|
||||
<key>xHeight</key>
|
||||
<integer>500</integer>
|
||||
<key>year</key>
|
||||
<integer>2008</integer>
|
||||
</dict>
|
||||
</plist>
|
||||
|
13
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/A_.glif
vendored
Normal file
13
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/A_.glif
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="A" format="1">
|
||||
<advance width="740"/>
|
||||
<unicode hex="0041"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="20" y="0" type="line"/>
|
||||
<point x="720" y="0" type="line"/>
|
||||
<point x="720" y="700" type="line"/>
|
||||
<point x="20" y="700" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
21
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/B_.glif
vendored
Normal file
21
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/B_.glif
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="B" format="1">
|
||||
<advance width="740"/>
|
||||
<unicode hex="0042"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="20" y="350" type="curve" smooth="yes"/>
|
||||
<point x="20" y="157"/>
|
||||
<point x="177" y="0"/>
|
||||
<point x="370" y="0" type="curve" smooth="yes"/>
|
||||
<point x="563" y="0"/>
|
||||
<point x="720" y="157"/>
|
||||
<point x="720" y="350" type="curve" smooth="yes"/>
|
||||
<point x="720" y="543"/>
|
||||
<point x="563" y="700"/>
|
||||
<point x="370" y="700" type="curve" smooth="yes"/>
|
||||
<point x="177" y="700"/>
|
||||
<point x="20" y="543"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/contents.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/glyphs/contents.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<string>A_.glif</string>
|
||||
<key>B</key>
|
||||
<string>B_.glif</string>
|
||||
</dict>
|
||||
</plist>
|
15
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/groups.plist
vendored
Normal file
15
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/groups.plist
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>group1</key>
|
||||
<array>
|
||||
<string>A</string>
|
||||
</array>
|
||||
<key>group2</key>
|
||||
<array>
|
||||
<string>A</string>
|
||||
<string>B</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
16
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/kerning.plist
vendored
Normal file
16
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/kerning.plist
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<dict>
|
||||
<key>B</key>
|
||||
<integer>100</integer>
|
||||
</dict>
|
||||
<key>B</key>
|
||||
<dict>
|
||||
<key>A</key>
|
||||
<integer>-200</integer>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
8
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/lib.plist
vendored
Normal file
8
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/lib.plist
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>org.robofab.testFontLibData</key>
|
||||
<string>Foo Bar</string>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/metainfo.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO2).ufo/metainfo.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>creator</key>
|
||||
<string>org.robofab.ufoLib</string>
|
||||
<key>formatVersion</key>
|
||||
<integer>2</integer>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/data/com.github.fonttools.ttx/CUST.ttx
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/data/com.github.fonttools.ttx/CUST.ttx
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ttFont sfntVersion="\x00\x01\x00\x00" ttLibVersion="3.13">
|
||||
|
||||
<CUST raw="True">
|
||||
<hexdata>
|
||||
0001beef
|
||||
</hexdata>
|
||||
</CUST>
|
||||
|
||||
</ttFont>
|
338
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/fontinfo.plist
vendored
Normal file
338
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/fontinfo.plist
vendored
Normal file
@ -0,0 +1,338 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>ascender</key>
|
||||
<integer>750</integer>
|
||||
<key>capHeight</key>
|
||||
<integer>750</integer>
|
||||
<key>copyright</key>
|
||||
<string>Copyright © Some Foundry.</string>
|
||||
<key>descender</key>
|
||||
<integer>-250</integer>
|
||||
<key>familyName</key>
|
||||
<string>Some Font (Family Name)</string>
|
||||
<key>guidelines</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>x</key>
|
||||
<integer>250</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>x</key>
|
||||
<integer>-20</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>x</key>
|
||||
<integer>30</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>y</key>
|
||||
<integer>500</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>y</key>
|
||||
<integer>-200</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>y</key>
|
||||
<integer>700</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>angle</key>
|
||||
<integer>135</integer>
|
||||
<key>x</key>
|
||||
<integer>0</integer>
|
||||
<key>y</key>
|
||||
<integer>0</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>angle</key>
|
||||
<integer>45</integer>
|
||||
<key>x</key>
|
||||
<integer>0</integer>
|
||||
<key>y</key>
|
||||
<integer>700</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>angle</key>
|
||||
<integer>135</integer>
|
||||
<key>x</key>
|
||||
<integer>20</integer>
|
||||
<key>y</key>
|
||||
<integer>0</integer>
|
||||
</dict>
|
||||
</array>
|
||||
<key>italicAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>macintoshFONDFamilyID</key>
|
||||
<integer>15000</integer>
|
||||
<key>macintoshFONDName</key>
|
||||
<string>SomeFont Regular (FOND Name)</string>
|
||||
<key>note</key>
|
||||
<string>A note.</string>
|
||||
<key>openTypeGaspRangeRecords</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>rangeGaspBehavior</key>
|
||||
<array>
|
||||
<integer>1</integer>
|
||||
<integer>3</integer>
|
||||
</array>
|
||||
<key>rangeMaxPPEM</key>
|
||||
<integer>7</integer>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>rangeGaspBehavior</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
<integer>2</integer>
|
||||
<integer>3</integer>
|
||||
</array>
|
||||
<key>rangeMaxPPEM</key>
|
||||
<integer>65535</integer>
|
||||
</dict>
|
||||
</array>
|
||||
<key>openTypeHeadCreated</key>
|
||||
<string>2000/01/01 00:00:00</string>
|
||||
<key>openTypeHeadFlags</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeHeadLowestRecPPEM</key>
|
||||
<integer>10</integer>
|
||||
<key>openTypeHheaAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeHheaCaretOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeHheaCaretSlopeRise</key>
|
||||
<integer>1</integer>
|
||||
<key>openTypeHheaCaretSlopeRun</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeHheaDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeHheaLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeNameCompatibleFullName</key>
|
||||
<string>Some Font Regular (Compatible Full Name)</string>
|
||||
<key>openTypeNameDescription</key>
|
||||
<string>Some Font by Some Designer for Some Foundry.</string>
|
||||
<key>openTypeNameDesigner</key>
|
||||
<string>Some Designer</string>
|
||||
<key>openTypeNameDesignerURL</key>
|
||||
<string>http://somedesigner.com</string>
|
||||
<key>openTypeNameLicense</key>
|
||||
<string>License info for Some Foundry.</string>
|
||||
<key>openTypeNameLicenseURL</key>
|
||||
<string>http://somefoundry.com/license</string>
|
||||
<key>openTypeNameManufacturer</key>
|
||||
<string>Some Foundry</string>
|
||||
<key>openTypeNameManufacturerURL</key>
|
||||
<string>http://somefoundry.com</string>
|
||||
<key>openTypeNamePreferredFamilyName</key>
|
||||
<string>Some Font (Preferred Family Name)</string>
|
||||
<key>openTypeNamePreferredSubfamilyName</key>
|
||||
<string>Regular (Preferred Subfamily Name)</string>
|
||||
<key>openTypeNameRecords</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>encodingID</key>
|
||||
<integer>0</integer>
|
||||
<key>languageID</key>
|
||||
<integer>0</integer>
|
||||
<key>nameID</key>
|
||||
<integer>3</integer>
|
||||
<key>platformID</key>
|
||||
<integer>1</integer>
|
||||
<key>string</key>
|
||||
<string>Unique Font Identifier</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>encodingID</key>
|
||||
<integer>1</integer>
|
||||
<key>languageID</key>
|
||||
<integer>1033</integer>
|
||||
<key>nameID</key>
|
||||
<integer>8</integer>
|
||||
<key>platformID</key>
|
||||
<integer>3</integer>
|
||||
<key>string</key>
|
||||
<string>Some Foundry (Manufacturer Name)</string>
|
||||
</dict>
|
||||
</array>
|
||||
<key>openTypeNameSampleText</key>
|
||||
<string>Sample Text for Some Font.</string>
|
||||
<key>openTypeNameUniqueID</key>
|
||||
<string>OpenType name Table Unique ID</string>
|
||||
<key>openTypeNameVersion</key>
|
||||
<string>OpenType name Table Version</string>
|
||||
<key>openTypeNameWWSFamilyName</key>
|
||||
<string>Some Font (WWS Family Name)</string>
|
||||
<key>openTypeNameWWSSubfamilyName</key>
|
||||
<string>Regular (WWS Subfamily Name)</string>
|
||||
<key>openTypeOS2CodePageRanges</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2FamilyClass</key>
|
||||
<array>
|
||||
<integer>1</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2Panose</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
<integer>2</integer>
|
||||
<integer>3</integer>
|
||||
<integer>4</integer>
|
||||
<integer>5</integer>
|
||||
<integer>6</integer>
|
||||
<integer>7</integer>
|
||||
<integer>8</integer>
|
||||
<integer>9</integer>
|
||||
</array>
|
||||
<key>openTypeOS2Selection</key>
|
||||
<array>
|
||||
<integer>3</integer>
|
||||
</array>
|
||||
<key>openTypeOS2StrikeoutPosition</key>
|
||||
<integer>300</integer>
|
||||
<key>openTypeOS2StrikeoutSize</key>
|
||||
<integer>20</integer>
|
||||
<key>openTypeOS2SubscriptXOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeOS2SubscriptXSize</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SubscriptYOffset</key>
|
||||
<integer>-100</integer>
|
||||
<key>openTypeOS2SubscriptYSize</key>
|
||||
<integer>400</integer>
|
||||
<key>openTypeOS2SuperscriptXOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeOS2SuperscriptXSize</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SuperscriptYOffset</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2SuperscriptYSize</key>
|
||||
<integer>400</integer>
|
||||
<key>openTypeOS2Type</key>
|
||||
<array/>
|
||||
<key>openTypeOS2TypoAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeOS2TypoDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeOS2TypoLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>openTypeOS2UnicodeRanges</key>
|
||||
<array>
|
||||
<integer>0</integer>
|
||||
<integer>1</integer>
|
||||
</array>
|
||||
<key>openTypeOS2VendorID</key>
|
||||
<string>SOME</string>
|
||||
<key>openTypeOS2WeightClass</key>
|
||||
<integer>500</integer>
|
||||
<key>openTypeOS2WidthClass</key>
|
||||
<integer>5</integer>
|
||||
<key>openTypeOS2WinAscent</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeOS2WinDescent</key>
|
||||
<integer>250</integer>
|
||||
<key>openTypeVheaCaretOffset</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeVheaCaretSlopeRise</key>
|
||||
<integer>0</integer>
|
||||
<key>openTypeVheaCaretSlopeRun</key>
|
||||
<integer>1</integer>
|
||||
<key>openTypeVheaVertTypoAscender</key>
|
||||
<integer>750</integer>
|
||||
<key>openTypeVheaVertTypoDescender</key>
|
||||
<integer>-250</integer>
|
||||
<key>openTypeVheaVertTypoLineGap</key>
|
||||
<integer>200</integer>
|
||||
<key>postscriptBlueFuzz</key>
|
||||
<integer>1</integer>
|
||||
<key>postscriptBlueScale</key>
|
||||
<real>0.039625</real>
|
||||
<key>postscriptBlueShift</key>
|
||||
<integer>7</integer>
|
||||
<key>postscriptBlueValues</key>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
<key>postscriptDefaultCharacter</key>
|
||||
<string>.notdef</string>
|
||||
<key>postscriptDefaultWidthX</key>
|
||||
<integer>400</integer>
|
||||
<key>postscriptFamilyBlues</key>
|
||||
<array>
|
||||
<integer>500</integer>
|
||||
<integer>510</integer>
|
||||
</array>
|
||||
<key>postscriptFamilyOtherBlues</key>
|
||||
<array>
|
||||
<integer>-250</integer>
|
||||
<integer>-260</integer>
|
||||
</array>
|
||||
<key>postscriptFontName</key>
|
||||
<string>SomeFont-Regular (Postscript Font Name)</string>
|
||||
<key>postscriptForceBold</key>
|
||||
<true/>
|
||||
<key>postscriptFullName</key>
|
||||
<string>Some Font-Regular (Postscript Full Name)</string>
|
||||
<key>postscriptIsFixedPitch</key>
|
||||
<false/>
|
||||
<key>postscriptNominalWidthX</key>
|
||||
<integer>400</integer>
|
||||
<key>postscriptOtherBlues</key>
|
||||
<array>
|
||||
<integer>-250</integer>
|
||||
<integer>-260</integer>
|
||||
</array>
|
||||
<key>postscriptSlantAngle</key>
|
||||
<real>-12.5</real>
|
||||
<key>postscriptStemSnapH</key>
|
||||
<array>
|
||||
<integer>100</integer>
|
||||
<integer>120</integer>
|
||||
</array>
|
||||
<key>postscriptStemSnapV</key>
|
||||
<array>
|
||||
<integer>80</integer>
|
||||
<integer>90</integer>
|
||||
</array>
|
||||
<key>postscriptUnderlinePosition</key>
|
||||
<integer>-200</integer>
|
||||
<key>postscriptUnderlineThickness</key>
|
||||
<integer>20</integer>
|
||||
<key>postscriptUniqueID</key>
|
||||
<integer>4000000</integer>
|
||||
<key>postscriptWeightName</key>
|
||||
<string>Medium</string>
|
||||
<key>postscriptWindowsCharacterSet</key>
|
||||
<integer>1</integer>
|
||||
<key>styleMapFamilyName</key>
|
||||
<string>Some Font Regular (Style Map Family Name)</string>
|
||||
<key>styleMapStyleName</key>
|
||||
<string>regular</string>
|
||||
<key>styleName</key>
|
||||
<string>Regular (Style Name)</string>
|
||||
<key>trademark</key>
|
||||
<string>Trademark Some Foundry</string>
|
||||
<key>unitsPerEm</key>
|
||||
<integer>1000</integer>
|
||||
<key>versionMajor</key>
|
||||
<integer>1</integer>
|
||||
<key>versionMinor</key>
|
||||
<integer>0</integer>
|
||||
<key>xHeight</key>
|
||||
<integer>500</integer>
|
||||
<key>year</key>
|
||||
<integer>2008</integer>
|
||||
</dict>
|
||||
</plist>
|
18
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/_notdef.glif
vendored
Normal file
18
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/_notdef.glif
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name=".notdef" format="2">
|
||||
<advance height="1000" width="500"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="450" y="0" type="line"/>
|
||||
<point x="450" y="750" type="line"/>
|
||||
<point x="50" y="750" type="line"/>
|
||||
<point x="50" y="0" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="400" y="50" type="line"/>
|
||||
<point x="100" y="50" type="line"/>
|
||||
<point x="100" y="700" type="line"/>
|
||||
<point x="400" y="700" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
12
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/a.glif
vendored
Normal file
12
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/a.glif
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="a" format="2">
|
||||
<advance height="750" width="388"/>
|
||||
<unicode hex="0061"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="66" y="0" type="line"/>
|
||||
<point x="322" y="0" type="line"/>
|
||||
<point x="194" y="510" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
13
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/b.glif
vendored
Normal file
13
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/b.glif
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="b" format="2">
|
||||
<advance height="750" width="410"/>
|
||||
<unicode hex="0062"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="100" y="505" type="line"/>
|
||||
<point x="100" y="-5" type="line"/>
|
||||
<point x="310" y="-5" type="line"/>
|
||||
<point x="310" y="505" type="line"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
16
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/c.glif
vendored
Normal file
16
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/c.glif
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="c" format="2">
|
||||
<advance height="750" width="374"/>
|
||||
<unicode hex="0063"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="300" y="-10" type="curve"/>
|
||||
<point x="300" y="500" type="line"/>
|
||||
<point x="150" y="500"/>
|
||||
<point x="100" y="450"/>
|
||||
<point x="100" y="245" type="curve"/>
|
||||
<point x="100" y="40"/>
|
||||
<point x="150" y="-10"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
34
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/contents.plist
vendored
Normal file
34
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/contents.plist
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>.notdef</key>
|
||||
<string>_notdef.glif</string>
|
||||
<key>a</key>
|
||||
<string>a.glif</string>
|
||||
<key>b</key>
|
||||
<string>b.glif</string>
|
||||
<key>c</key>
|
||||
<string>c.glif</string>
|
||||
<key>d</key>
|
||||
<string>d.glif</string>
|
||||
<key>e</key>
|
||||
<string>e.glif</string>
|
||||
<key>f</key>
|
||||
<string>f.glif</string>
|
||||
<key>g</key>
|
||||
<string>g.glif</string>
|
||||
<key>h</key>
|
||||
<string>h.glif</string>
|
||||
<key>i</key>
|
||||
<string>i.glif</string>
|
||||
<key>j</key>
|
||||
<string>j.glif</string>
|
||||
<key>k</key>
|
||||
<string>k.glif</string>
|
||||
<key>l</key>
|
||||
<string>l.glif</string>
|
||||
<key>space</key>
|
||||
<string>space.glif</string>
|
||||
</dict>
|
||||
</plist>
|
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/d.glif
vendored
Normal file
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/d.glif
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="d" format="2">
|
||||
<advance height="750" width="374"/>
|
||||
<unicode hex="0064"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="150.66" y="197.32" type="curve"/>
|
||||
<point x="117" y="197.32"/>
|
||||
<point x="90.33" y="170.33"/>
|
||||
<point x="90.33" y="137" type="curve"/>
|
||||
<point x="90.33" y="103.67"/>
|
||||
<point x="117" y="77.01"/>
|
||||
<point x="150.66" y="77.01" type="curve"/>
|
||||
<point x="183.99" y="77.01"/>
|
||||
<point x="210.65" y="103.67"/>
|
||||
<point x="210.65" y="137" type="curve"/>
|
||||
<point x="210.65" y="170.33"/>
|
||||
<point x="183.99" y="197.32"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/e.glif
vendored
Normal file
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/e.glif
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="e" format="2">
|
||||
<advance height="750" width="388"/>
|
||||
<unicode hex="0065"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="66" y="510" type="line"/>
|
||||
<point x="194" y="75" type="line"/>
|
||||
<point x="322" y="510" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="-55" y="23" type="curve"/>
|
||||
<point x="454" y="23" type="line"/>
|
||||
<point x="454" y="173"/>
|
||||
<point x="404" y="223"/>
|
||||
<point x="199" y="223" type="curve"/>
|
||||
<point x="-5" y="223"/>
|
||||
<point x="-55" y="173"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/f.glif
vendored
Normal file
21
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/f.glif
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="f" format="2">
|
||||
<advance height="750" width="410"/>
|
||||
<unicode hex="0066"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="66" y="510" type="line"/>
|
||||
<point x="322" y="510" type="line"/>
|
||||
<point x="194" y="75" type="line"/>
|
||||
</contour>
|
||||
<contour>
|
||||
<point x="-55" y="23" type="curve"/>
|
||||
<point x="454" y="23" type="line"/>
|
||||
<point x="454" y="173"/>
|
||||
<point x="404" y="223"/>
|
||||
<point x="199" y="223" type="curve"/>
|
||||
<point x="-5" y="223"/>
|
||||
<point x="-55" y="173"/>
|
||||
</contour>
|
||||
</outline>
|
||||
</glyph>
|
8
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/g.glif
vendored
Normal file
8
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/g.glif
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="g" format="2">
|
||||
<advance height="750" width="388"/>
|
||||
<unicode hex="0067"/>
|
||||
<outline>
|
||||
<component base="a"/>
|
||||
</outline>
|
||||
</glyph>
|
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/h.glif
vendored
Normal file
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/h.glif
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="h" format="2">
|
||||
<advance height="1000" width="410"/>
|
||||
<unicode hex="0068"/>
|
||||
<outline>
|
||||
<component base="d" xOffset="60" yOffset="460"/>
|
||||
<component base="b"/>
|
||||
</outline>
|
||||
</glyph>
|
17
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/i.glif
vendored
Normal file
17
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/i.glif
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="i" format="2">
|
||||
<advance height="750" width="600"/>
|
||||
<unicode hex="0069"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="-55" y="-80" type="curve"/>
|
||||
<point x="454" y="-80" type="line"/>
|
||||
<point x="454" y="69"/>
|
||||
<point x="404" y="119"/>
|
||||
<point x="199" y="119" type="curve"/>
|
||||
<point x="-5" y="119"/>
|
||||
<point x="-55" y="69"/>
|
||||
</contour>
|
||||
<component base="a"/>
|
||||
</outline>
|
||||
</glyph>
|
17
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/j.glif
vendored
Normal file
17
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/j.glif
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="j" format="2">
|
||||
<advance height="1000" width="600"/>
|
||||
<unicode hex="006A"/>
|
||||
<outline>
|
||||
<contour>
|
||||
<point x="-55" y="-80" type="curve"/>
|
||||
<point x="454" y="-80" type="line"/>
|
||||
<point x="454" y="69"/>
|
||||
<point x="404" y="119"/>
|
||||
<point x="199" y="119" type="curve"/>
|
||||
<point x="-5" y="119"/>
|
||||
<point x="-55" y="69"/>
|
||||
</contour>
|
||||
<component base="a" yScale="-1" yOffset="230"/>
|
||||
</outline>
|
||||
</glyph>
|
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/k.glif
vendored
Normal file
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/k.glif
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="k" format="2">
|
||||
<advance height="1000" width="600"/>
|
||||
<unicode hex="006B"/>
|
||||
<outline>
|
||||
<component base="a"/>
|
||||
<component base="a" xOffset="100"/>
|
||||
</outline>
|
||||
</glyph>
|
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/l.glif
vendored
Normal file
9
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/l.glif
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="l" format="2">
|
||||
<advance height="1000" width="600"/>
|
||||
<unicode hex="006C"/>
|
||||
<outline>
|
||||
<component base="a" xScale="-1" xOffset="400"/>
|
||||
<component base="a" xOffset="100"/>
|
||||
</outline>
|
||||
</glyph>
|
7
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/space.glif
vendored
Normal file
7
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/glyphs/space.glif
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<glyph name="space" format="2">
|
||||
<advance height="250" width="250"/>
|
||||
<unicode hex="0020"/>
|
||||
<outline>
|
||||
</outline>
|
||||
</glyph>
|
20
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/kerning.plist
vendored
Normal file
20
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/kerning.plist
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>a</key>
|
||||
<dict>
|
||||
<key>a</key>
|
||||
<integer>5</integer>
|
||||
<key>b</key>
|
||||
<integer>-10</integer>
|
||||
<key>space</key>
|
||||
<integer>1</integer>
|
||||
</dict>
|
||||
<key>b</key>
|
||||
<dict>
|
||||
<key>a</key>
|
||||
<integer>-7</integer>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/layercontents.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/layercontents.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<array>
|
||||
<array>
|
||||
<string>public.default</string>
|
||||
<string>glyphs</string>
|
||||
</array>
|
||||
</array>
|
||||
</plist>
|
25
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/lib.plist
vendored
Normal file
25
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/lib.plist
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>public.glyphOrder</key>
|
||||
<array>
|
||||
<string>.notdef</string>
|
||||
<string>glyph1</string>
|
||||
<string>glyph2</string>
|
||||
<string>space</string>
|
||||
<string>a</string>
|
||||
<string>b</string>
|
||||
<string>c</string>
|
||||
<string>d</string>
|
||||
<string>e</string>
|
||||
<string>f</string>
|
||||
<string>g</string>
|
||||
<string>h</string>
|
||||
<string>i</string>
|
||||
<string>j</string>
|
||||
<string>k</string>
|
||||
<string>l</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/metainfo.plist
vendored
Normal file
10
Tests/ufoLib/testdata/TestFont1 (UFO3).ufo/metainfo.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>creator</key>
|
||||
<string>org.robofab.ufoLib</string>
|
||||
<key>formatVersion</key>
|
||||
<integer>3</integer>
|
||||
</dict>
|
||||
</plist>
|
BIN
Tests/ufoLib/testdata/TestFont1 (UFO3).ufoz
vendored
Normal file
BIN
Tests/ufoLib/testdata/TestFont1 (UFO3).ufoz
vendored
Normal file
Binary file not shown.
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.directory/bar/lol.txt
vendored
Normal file
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.directory/bar/lol.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
lol.txt
|
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.directory/foo.txt
vendored
Normal file
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.directory/foo.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
foo.txt
|
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.file.txt
vendored
Normal file
1
Tests/ufoLib/testdata/UFO3-Read Data.ufo/data/org.unifiedfontobject.file.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
file.txt
|
10
Tests/ufoLib/testdata/UFO3-Read Data.ufo/metainfo.plist
vendored
Normal file
10
Tests/ufoLib/testdata/UFO3-Read Data.ufo/metainfo.plist
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>creator</key>
|
||||
<string>org.robofab.ufoLib</string>
|
||||
<key>formatVersion</key>
|
||||
<integer>3</integer>
|
||||
</dict>
|
||||
</plist>
|
87
Tests/ufoLib/testdata/test.plist
vendored
Normal file
87
Tests/ufoLib/testdata/test.plist
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>aBigInt</key>
|
||||
<integer>9223372036854775764</integer>
|
||||
<key>aBigInt2</key>
|
||||
<integer>9223372036854775852</integer>
|
||||
<key>aDate</key>
|
||||
<date>2004-10-26T10:33:33Z</date>
|
||||
<key>aDict</key>
|
||||
<dict>
|
||||
<key>aFalseValue</key>
|
||||
<false/>
|
||||
<key>aTrueValue</key>
|
||||
<true/>
|
||||
<key>aUnicodeValue</key>
|
||||
<string>Mässig, Maß</string>
|
||||
<key>anotherString</key>
|
||||
<string><hello & 'hi' there!></string>
|
||||
<key>deeperDict</key>
|
||||
<dict>
|
||||
<key>a</key>
|
||||
<integer>17</integer>
|
||||
<key>b</key>
|
||||
<real>32.5</real>
|
||||
<key>c</key>
|
||||
<array>
|
||||
<integer>1</integer>
|
||||
<integer>2</integer>
|
||||
<string>text</string>
|
||||
</array>
|
||||
</dict>
|
||||
</dict>
|
||||
<key>aFloat</key>
|
||||
<real>0.5</real>
|
||||
<key>aList</key>
|
||||
<array>
|
||||
<string>A</string>
|
||||
<string>B</string>
|
||||
<integer>12</integer>
|
||||
<real>32.5</real>
|
||||
<array>
|
||||
<integer>1</integer>
|
||||
<integer>2</integer>
|
||||
<integer>3</integer>
|
||||
</array>
|
||||
</array>
|
||||
<key>aNegativeBigInt</key>
|
||||
<integer>-80000000000</integer>
|
||||
<key>aNegativeInt</key>
|
||||
<integer>-5</integer>
|
||||
<key>aString</key>
|
||||
<string>Doodah</string>
|
||||
<key>anEmptyDict</key>
|
||||
<dict/>
|
||||
<key>anEmptyList</key>
|
||||
<array/>
|
||||
<key>anInt</key>
|
||||
<integer>728</integer>
|
||||
<key>nestedData</key>
|
||||
<array>
|
||||
<data>
|
||||
PGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5r
|
||||
PgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5
|
||||
IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBvZiBi
|
||||
aW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3Rz
|
||||
IG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQID
|
||||
PGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAw==
|
||||
</data>
|
||||
</array>
|
||||
<key>someData</key>
|
||||
<data>
|
||||
PGJpbmFyeSBndW5rPg==
|
||||
</data>
|
||||
<key>someMoreData</key>
|
||||
<data>
|
||||
PGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5rPgABAgM8
|
||||
bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxs
|
||||
b3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxv
|
||||
dHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90
|
||||
cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAw==
|
||||
</data>
|
||||
<key>Åbenraa</key>
|
||||
<string>That was a unicode key.</string>
|
||||
</dict>
|
||||
</plist>
|
Loading…
x
Reference in New Issue
Block a user