2018-02-21 18:02:55 +00:00
|
|
|
from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound
|
2015-08-21 17:09:46 +02:00
|
|
|
from fontTools.feaLib.lexer import IncludingLexer, Lexer
|
2021-08-20 00:45:43 +02:00
|
|
|
from fontTools.misc.textTools import tobytes
|
2021-03-29 11:45:58 +02:00
|
|
|
from io import StringIO
|
2015-07-31 17:09:19 +02:00
|
|
|
import os
|
2018-02-28 18:40:15 +00:00
|
|
|
import shutil
|
|
|
|
import tempfile
|
2015-07-31 11:53:36 +02:00
|
|
|
import unittest
|
|
|
|
|
|
|
|
|
|
|
|
def lex(s):
|
2015-07-31 14:35:54 +02:00
|
|
|
return [(typ, tok) for (typ, tok, _) in Lexer(s, "test.fea")]
|
2015-07-31 11:53:36 +02:00
|
|
|
|
2018-01-22 17:07:28 +00:00
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
class LexerTest(unittest.TestCase):
|
2015-08-01 17:34:02 +02:00
|
|
|
def __init__(self, methodName):
|
|
|
|
unittest.TestCase.__init__(self, methodName)
|
|
|
|
# Python 3 renamed assertRaisesRegexp to assertRaisesRegex,
|
|
|
|
# and fires deprecation warnings if a program uses the old name.
|
|
|
|
if not hasattr(self, "assertRaisesRegex"):
|
|
|
|
self.assertRaisesRegex = self.assertRaisesRegexp
|
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
def test_empty(self):
|
|
|
|
self.assertEqual(lex(""), [])
|
|
|
|
self.assertEqual(lex(" \t "), [])
|
|
|
|
|
|
|
|
def test_name(self):
|
|
|
|
self.assertEqual(lex("a17"), [(Lexer.NAME, "a17")])
|
|
|
|
self.assertEqual(lex(".notdef"), [(Lexer.NAME, ".notdef")])
|
|
|
|
self.assertEqual(lex("two.oldstyle"), [(Lexer.NAME, "two.oldstyle")])
|
|
|
|
self.assertEqual(lex("_"), [(Lexer.NAME, "_")])
|
|
|
|
self.assertEqual(lex("\\table"), [(Lexer.NAME, "\\table")])
|
2015-09-14 10:53:37 +02:00
|
|
|
self.assertEqual(lex("a+*:^~!"), [(Lexer.NAME, "a+*:^~!")])
|
2017-02-11 12:05:01 +01:00
|
|
|
self.assertEqual(lex("with-dash"), [(Lexer.NAME, "with-dash")])
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
def test_cid(self):
|
|
|
|
self.assertEqual(lex("\\0 \\987"), [(Lexer.CID, 0), (Lexer.CID, 987)])
|
|
|
|
|
2015-08-01 17:34:02 +02:00
|
|
|
def test_glyphclass(self):
|
|
|
|
self.assertEqual(lex("@Vowel.sc"), [(Lexer.GLYPHCLASS, "Vowel.sc")])
|
2019-02-10 11:59:04 +02:00
|
|
|
self.assertEqual(lex("@Vowel-sc"), [(Lexer.GLYPHCLASS, "Vowel-sc")])
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertRaisesRegex(FeatureLibError, "Expected glyph class", lex, "@(a)")
|
|
|
|
self.assertRaisesRegex(FeatureLibError, "Expected glyph class", lex, "@ A")
|
2024-01-22 14:10:46 +02:00
|
|
|
self.assertEqual(lex("@" + ("A" * 600)), [(Lexer.GLYPHCLASS, "A" * 600)])
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertRaisesRegex(
|
|
|
|
FeatureLibError, "Glyph class names must consist of", lex, "@Ab:c"
|
|
|
|
)
|
2015-08-01 17:34:02 +02:00
|
|
|
|
2015-07-31 15:20:40 +02:00
|
|
|
def test_include(self):
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(
|
|
|
|
lex("include (~/foo/bar baz.fea);"),
|
|
|
|
[
|
|
|
|
(Lexer.NAME, "include"),
|
|
|
|
(Lexer.FILENAME, "~/foo/bar baz.fea"),
|
|
|
|
(Lexer.SYMBOL, ";"),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
lex("include # Comment\n (foo) \n;"),
|
|
|
|
[
|
|
|
|
(Lexer.NAME, "include"),
|
|
|
|
(Lexer.COMMENT, "# Comment"),
|
|
|
|
(Lexer.FILENAME, "foo"),
|
|
|
|
(Lexer.SYMBOL, ";"),
|
|
|
|
],
|
|
|
|
)
|
2015-08-21 17:09:46 +02:00
|
|
|
self.assertRaises(FeatureLibError, lex, "include blah")
|
|
|
|
self.assertRaises(FeatureLibError, lex, "include (blah")
|
2015-07-31 15:20:40 +02:00
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
def test_number(self):
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(lex("123 -456"), [(Lexer.NUMBER, 123), (Lexer.NUMBER, -456)])
|
2019-08-17 05:53:52 +02:00
|
|
|
self.assertEqual(lex("0xCAFED00D"), [(Lexer.HEXADECIMAL, 0xCAFED00D)])
|
|
|
|
self.assertEqual(lex("0xcafed00d"), [(Lexer.HEXADECIMAL, 0xCAFED00D)])
|
|
|
|
self.assertEqual(lex("010"), [(Lexer.OCTAL, 0o10)])
|
2015-07-31 11:53:36 +02:00
|
|
|
|
2016-01-11 18:01:47 +01:00
|
|
|
def test_float(self):
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(lex("1.23 -4.5"), [(Lexer.FLOAT, 1.23), (Lexer.FLOAT, -4.5)])
|
2016-01-11 18:01:47 +01:00
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
def test_symbol(self):
|
|
|
|
self.assertEqual(lex("a'"), [(Lexer.NAME, "a"), (Lexer.SYMBOL, "'")])
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(lex("-A-B"), [(Lexer.SYMBOL, "-"), (Lexer.NAME, "A-B")])
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(
|
|
|
|
lex("foo - -2"),
|
2022-12-13 11:26:36 +00:00
|
|
|
[(Lexer.NAME, "foo"), (Lexer.SYMBOL, "-"), (Lexer.NUMBER, -2)],
|
|
|
|
)
|
2015-07-31 11:53:36 +02:00
|
|
|
|
2017-03-09 15:19:19 +01:00
|
|
|
def test_comment(self):
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(
|
|
|
|
lex("# Comment\n#"), [(Lexer.COMMENT, "# Comment"), (Lexer.COMMENT, "#")]
|
|
|
|
)
|
2017-03-09 15:19:19 +01:00
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
def test_string(self):
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(
|
|
|
|
lex('"foo" "bar"'), [(Lexer.STRING, "foo"), (Lexer.STRING, "bar")]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
lex('"foo \nbar\r baz \r\nqux\n\n "'), [(Lexer.STRING, "foo bar baz qux ")]
|
|
|
|
)
|
2017-02-14 10:29:57 +01:00
|
|
|
# The lexer should preserve escape sequences because they have
|
|
|
|
# different interpretations depending on context. For better
|
|
|
|
# or for worse, that is how the OpenType Feature File Syntax
|
|
|
|
# has been specified; see section 9.e (name table) for examples.
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertEqual(
|
|
|
|
lex(r'"M\00fcller-Lanc\00e9"'), # 'nameid 9'
|
|
|
|
[(Lexer.STRING, r"M\00fcller-Lanc\00e9")],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
lex(r'"M\9fller-Lanc\8e"'), # 'nameid 9 1'
|
|
|
|
[(Lexer.STRING, r"M\9fller-Lanc\8e")],
|
|
|
|
)
|
2017-02-14 10:29:57 +01:00
|
|
|
self.assertRaises(FeatureLibError, lex, '"foo\n bar')
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
def test_bad_character(self):
|
2015-08-21 17:09:46 +02:00
|
|
|
self.assertRaises(FeatureLibError, lambda: lex("123 \u0001"))
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
def test_newline(self):
|
2015-09-04 16:22:16 +02:00
|
|
|
def lines(s):
|
2020-07-02 14:09:10 +01:00
|
|
|
return [loc.line for (_, _, loc) in Lexer(s, "test.fea")]
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lines("FOO\n\nBAR\nBAZ"), [1, 3, 4]) # Unix
|
|
|
|
self.assertEqual(lines("FOO\r\rBAR\rBAZ"), [1, 3, 4]) # Macintosh
|
|
|
|
self.assertEqual(lines("FOO\r\n\r\n BAR\r\nBAZ"), [1, 3, 4]) # Windows
|
|
|
|
self.assertEqual(lines("FOO\n\rBAR\r\nBAZ"), [1, 3, 4]) # mixed
|
|
|
|
|
|
|
|
def test_location(self):
|
2015-09-04 16:22:16 +02:00
|
|
|
def locs(s):
|
2020-07-02 14:09:10 +01:00
|
|
|
return [str(loc) for (_, _, loc) in Lexer(s, "test.fea")]
|
2022-12-13 11:26:36 +00:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
locs("a b # Comment\n12 @x"),
|
|
|
|
[
|
|
|
|
"test.fea:1:1",
|
|
|
|
"test.fea:1:3",
|
|
|
|
"test.fea:1:5",
|
|
|
|
"test.fea:2:1",
|
|
|
|
"test.fea:2:4",
|
|
|
|
],
|
|
|
|
)
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
def test_scan_over_(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
lexer = Lexer("abbacabba12", "test.fea")
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_over_("xyz")
|
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_over_("abc")
|
|
|
|
self.assertEqual(lexer.pos_, 9)
|
|
|
|
lexer.scan_over_("abc")
|
|
|
|
self.assertEqual(lexer.pos_, 9)
|
|
|
|
lexer.scan_over_("0123456789")
|
|
|
|
self.assertEqual(lexer.pos_, 11)
|
|
|
|
|
|
|
|
def test_scan_until_(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
lexer = Lexer("foo'bar", "test.fea")
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_until_("'")
|
|
|
|
self.assertEqual(lexer.pos_, 3)
|
|
|
|
lexer.scan_until_("'")
|
|
|
|
self.assertEqual(lexer.pos_, 3)
|
|
|
|
|
|
|
|
|
2015-07-31 17:09:19 +02:00
|
|
|
class IncludingLexerTest(unittest.TestCase):
|
|
|
|
@staticmethod
|
|
|
|
def getpath(filename):
|
|
|
|
path, _ = os.path.split(__file__)
|
2017-01-16 09:36:10 +00:00
|
|
|
return os.path.join(path, "data", filename)
|
2015-07-31 17:09:19 +02:00
|
|
|
|
|
|
|
def test_include(self):
|
2017-02-13 08:42:30 +01:00
|
|
|
lexer = IncludingLexer(self.getpath("include/include4.fea"))
|
2022-12-13 11:26:36 +00:00
|
|
|
result = [
|
|
|
|
"%s %s:%d" % (token, os.path.split(loc.file)[1], loc.line)
|
|
|
|
for _, token, loc in lexer
|
|
|
|
]
|
|
|
|
self.assertEqual(
|
|
|
|
result,
|
|
|
|
[
|
|
|
|
"I4a include4.fea:1",
|
|
|
|
"I3a include3.fea:1",
|
|
|
|
"I2a include2.fea:1",
|
|
|
|
"I1a include1.fea:1",
|
|
|
|
"I0 include0.fea:1",
|
|
|
|
"I1b include1.fea:3",
|
|
|
|
"; include2.fea:2",
|
|
|
|
"I2b include2.fea:3",
|
|
|
|
"; include3.fea:2",
|
|
|
|
"I3b include3.fea:3",
|
|
|
|
"; include4.fea:2",
|
|
|
|
"I4b include4.fea:3",
|
|
|
|
],
|
|
|
|
)
|
2015-07-31 17:09:19 +02:00
|
|
|
|
|
|
|
def test_include_limit(self):
|
2017-02-13 08:42:30 +01:00
|
|
|
lexer = IncludingLexer(self.getpath("include/include6.fea"))
|
2015-08-21 17:09:46 +02:00
|
|
|
self.assertRaises(FeatureLibError, lambda: list(lexer))
|
2015-07-31 17:09:19 +02:00
|
|
|
|
|
|
|
def test_include_self(self):
|
2017-02-13 08:42:30 +01:00
|
|
|
lexer = IncludingLexer(self.getpath("include/includeself.fea"))
|
2015-08-21 17:09:46 +02:00
|
|
|
self.assertRaises(FeatureLibError, lambda: list(lexer))
|
2015-07-31 17:09:19 +02:00
|
|
|
|
|
|
|
def test_include_missing_file(self):
|
2017-02-13 08:42:30 +01:00
|
|
|
lexer = IncludingLexer(self.getpath("include/includemissingfile.fea"))
|
2022-12-13 11:26:36 +00:00
|
|
|
self.assertRaisesRegex(
|
|
|
|
IncludedFeaNotFound,
|
|
|
|
"includemissingfile.fea:1:8: The following feature file "
|
|
|
|
"should be included but cannot be found: "
|
|
|
|
"missingfile.fea",
|
|
|
|
lambda: list(lexer),
|
|
|
|
)
|
2018-02-28 18:40:15 +00:00
|
|
|
|
|
|
|
def test_featurefilepath_None(self):
|
2021-03-29 11:45:58 +02:00
|
|
|
lexer = IncludingLexer(StringIO("# foobar"))
|
2018-02-28 18:40:15 +00:00
|
|
|
self.assertIsNone(lexer.featurefilepath)
|
2020-07-02 14:09:10 +01:00
|
|
|
files = set(loc.file for _, _, loc in lexer)
|
2018-02-28 18:40:15 +00:00
|
|
|
self.assertIn("<features>", files)
|
|
|
|
|
|
|
|
def test_include_absolute_path(self):
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as included:
|
2022-12-13 11:26:36 +00:00
|
|
|
included.write(
|
|
|
|
tobytes(
|
|
|
|
"""
|
2018-02-28 18:40:15 +00:00
|
|
|
feature kern {
|
|
|
|
pos A B -40;
|
|
|
|
} kern;
|
2022-12-13 11:26:36 +00:00
|
|
|
""",
|
|
|
|
encoding="utf-8",
|
|
|
|
)
|
|
|
|
)
|
2021-03-29 11:45:58 +02:00
|
|
|
including = StringIO("include(%s);" % included.name)
|
2018-02-28 18:40:15 +00:00
|
|
|
try:
|
|
|
|
lexer = IncludingLexer(including)
|
2020-07-02 14:09:10 +01:00
|
|
|
files = set(loc.file for _, _, loc in lexer)
|
2018-02-28 18:40:15 +00:00
|
|
|
self.assertIn(included.name, files)
|
|
|
|
finally:
|
|
|
|
os.remove(included.name)
|
|
|
|
|
|
|
|
def test_include_relative_to_cwd(self):
|
|
|
|
# save current working directory, to be restored later
|
|
|
|
cwd = os.getcwd()
|
|
|
|
tmpdir = tempfile.mkdtemp()
|
|
|
|
try:
|
|
|
|
# create new feature file in a temporary directory
|
2022-12-13 11:26:36 +00:00
|
|
|
with open(
|
|
|
|
os.path.join(tmpdir, "included.fea"), "w", encoding="utf-8"
|
|
|
|
) as included:
|
|
|
|
included.write(
|
|
|
|
"""
|
2018-02-28 18:40:15 +00:00
|
|
|
feature kern {
|
|
|
|
pos A B -40;
|
|
|
|
} kern;
|
2022-12-13 11:26:36 +00:00
|
|
|
"""
|
|
|
|
)
|
2018-02-28 18:40:15 +00:00
|
|
|
# change current folder to the temporary dir
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
# instantiate a new lexer that includes the above file
|
|
|
|
# using a relative path; the IncludingLexer does not
|
|
|
|
# itself have a path, because it was initialized from
|
|
|
|
# an in-memory stream, so it will use the current working
|
|
|
|
# directory to resolve relative include statements
|
2021-03-29 11:45:58 +02:00
|
|
|
lexer = IncludingLexer(StringIO("include(included.fea);"))
|
2020-07-02 14:09:10 +01:00
|
|
|
files = set(os.path.realpath(loc.file) for _, _, loc in lexer)
|
2018-03-01 20:50:42 +00:00
|
|
|
expected = os.path.realpath(included.name)
|
|
|
|
self.assertIn(expected, files)
|
2018-02-28 18:40:15 +00:00
|
|
|
finally:
|
|
|
|
# remove temporary folder and restore previous working directory
|
|
|
|
os.chdir(cwd)
|
|
|
|
shutil.rmtree(tmpdir)
|
2015-07-31 17:09:19 +02:00
|
|
|
|
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
if __name__ == "__main__":
|
2017-01-11 13:05:35 +00:00
|
|
|
import sys
|
2022-12-13 11:26:36 +00:00
|
|
|
|
2017-01-11 13:05:35 +00:00
|
|
|
sys.exit(unittest.main())
|