2015-07-31 11:53:36 +02:00
|
|
|
from __future__ import print_function, division, absolute_import
|
|
|
|
from __future__ import unicode_literals
|
2015-07-31 17:09:19 +02:00
|
|
|
from fontTools.feaLib.lexer import IncludingLexer, Lexer, LexerError
|
|
|
|
import os
|
2015-07-31 11:53:36 +02:00
|
|
|
import unittest
|
|
|
|
|
|
|
|
|
|
|
|
def lex(s):
|
2015-07-31 14:35:54 +02:00
|
|
|
return [(typ, tok) for (typ, tok, _) in Lexer(s, "test.fea")]
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LexerTest(unittest.TestCase):
|
|
|
|
def test_empty(self):
|
|
|
|
self.assertEqual(lex(""), [])
|
|
|
|
self.assertEqual(lex(" \t "), [])
|
|
|
|
|
|
|
|
def test_name(self):
|
|
|
|
self.assertEqual(lex("a17"), [(Lexer.NAME, "a17")])
|
|
|
|
self.assertEqual(lex(".notdef"), [(Lexer.NAME, ".notdef")])
|
|
|
|
self.assertEqual(lex("two.oldstyle"), [(Lexer.NAME, "two.oldstyle")])
|
|
|
|
self.assertEqual(lex("_"), [(Lexer.NAME, "_")])
|
|
|
|
self.assertEqual(lex("\\table"), [(Lexer.NAME, "\\table")])
|
|
|
|
|
|
|
|
def test_cid(self):
|
|
|
|
self.assertEqual(lex("\\0 \\987"), [(Lexer.CID, 0), (Lexer.CID, 987)])
|
|
|
|
|
2015-07-31 15:20:40 +02:00
|
|
|
def test_include(self):
|
|
|
|
self.assertEqual(lex("include (~/foo/bar baz.fea);"), [
|
|
|
|
(Lexer.NAME, "include"),
|
|
|
|
(Lexer.FILENAME, "~/foo/bar baz.fea"),
|
|
|
|
(Lexer.SYMBOL, ";")
|
|
|
|
])
|
|
|
|
self.assertEqual(lex("include # Comment\n (foo) \n;"), [
|
|
|
|
(Lexer.NAME, "include"),
|
|
|
|
(Lexer.FILENAME, "foo"),
|
|
|
|
(Lexer.SYMBOL, ";")
|
|
|
|
])
|
|
|
|
self.assertRaises(LexerError, lex, "include blah")
|
|
|
|
self.assertRaises(LexerError, lex, "include (blah")
|
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
def test_number(self):
|
|
|
|
self.assertEqual(lex("123 -456"),
|
|
|
|
[(Lexer.NUMBER, 123), (Lexer.NUMBER, -456)])
|
|
|
|
|
|
|
|
def test_symbol(self):
|
|
|
|
self.assertEqual(lex("a'"), [(Lexer.NAME, "a"), (Lexer.SYMBOL, "'")])
|
|
|
|
self.assertEqual(
|
|
|
|
lex("foo - -2"),
|
|
|
|
[(Lexer.NAME, "foo"), (Lexer.SYMBOL, "-"), (Lexer.NUMBER, -2)])
|
|
|
|
|
|
|
|
def test_comment(self):
|
|
|
|
self.assertEqual(lex("# Comment\n#"), [])
|
|
|
|
|
|
|
|
def test_string(self):
|
|
|
|
self.assertEqual(lex('"foo" "bar"'),
|
|
|
|
[(Lexer.STRING, "foo"), (Lexer.STRING, "bar")])
|
|
|
|
self.assertRaises(LexerError, lambda: lex('"foo\n bar"'))
|
|
|
|
|
|
|
|
def test_bad_character(self):
|
|
|
|
self.assertRaises(LexerError, lambda: lex("123 \u0001"))
|
|
|
|
|
|
|
|
def test_newline(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
lines = lambda s: [loc[1] for (_, _, loc) in Lexer(s, "test.fea")]
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lines("FOO\n\nBAR\nBAZ"), [1, 3, 4]) # Unix
|
|
|
|
self.assertEqual(lines("FOO\r\rBAR\rBAZ"), [1, 3, 4]) # Macintosh
|
|
|
|
self.assertEqual(lines("FOO\r\n\r\n BAR\r\nBAZ"), [1, 3, 4]) # Windows
|
|
|
|
self.assertEqual(lines("FOO\n\rBAR\r\nBAZ"), [1, 3, 4]) # mixed
|
|
|
|
|
|
|
|
def test_location(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
locs = lambda s: ["%s:%d:%d" % loc
|
|
|
|
for (_, _, loc) in Lexer(s, "test.fea")]
|
|
|
|
self.assertEqual(locs("a b # Comment\n12 @x"), [
|
|
|
|
"test.fea:1:1", "test.fea:1:3", "test.fea:2:1",
|
|
|
|
"test.fea:2:4", "test.fea:2:5"
|
|
|
|
])
|
2015-07-31 11:53:36 +02:00
|
|
|
|
|
|
|
def test_scan_over_(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
lexer = Lexer("abbacabba12", "test.fea")
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_over_("xyz")
|
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_over_("abc")
|
|
|
|
self.assertEqual(lexer.pos_, 9)
|
|
|
|
lexer.scan_over_("abc")
|
|
|
|
self.assertEqual(lexer.pos_, 9)
|
|
|
|
lexer.scan_over_("0123456789")
|
|
|
|
self.assertEqual(lexer.pos_, 11)
|
|
|
|
|
|
|
|
def test_scan_until_(self):
|
2015-07-31 14:35:54 +02:00
|
|
|
lexer = Lexer("foo'bar", "test.fea")
|
2015-07-31 11:53:36 +02:00
|
|
|
self.assertEqual(lexer.pos_, 0)
|
|
|
|
lexer.scan_until_("'")
|
|
|
|
self.assertEqual(lexer.pos_, 3)
|
|
|
|
lexer.scan_until_("'")
|
|
|
|
self.assertEqual(lexer.pos_, 3)
|
|
|
|
|
|
|
|
|
2015-07-31 17:09:19 +02:00
|
|
|
class IncludingLexerTest(unittest.TestCase):
|
|
|
|
@staticmethod
|
|
|
|
def getpath(filename):
|
|
|
|
path, _ = os.path.split(__file__)
|
|
|
|
return os.path.join(path, "testdata", filename)
|
|
|
|
|
|
|
|
def test_include(self):
|
|
|
|
lexer = IncludingLexer(self.getpath("include4.fea"))
|
|
|
|
result = ['%s %s:%d' % (token, os.path.split(loc[0])[1], loc[1])
|
|
|
|
for _, token, loc in lexer]
|
|
|
|
self.assertEqual(result, [
|
|
|
|
"I4a include4.fea:1",
|
|
|
|
"I3a include3.fea:1",
|
|
|
|
"I2a include2.fea:1",
|
|
|
|
"I1a include1.fea:1",
|
|
|
|
"I0 include0.fea:1",
|
|
|
|
"I1b include1.fea:3",
|
|
|
|
"I2b include2.fea:3",
|
|
|
|
"I3b include3.fea:3",
|
|
|
|
"I4b include4.fea:3"
|
|
|
|
])
|
|
|
|
|
|
|
|
def test_include_limit(self):
|
|
|
|
lexer = IncludingLexer(self.getpath("include6.fea"))
|
|
|
|
self.assertRaises(LexerError, lambda: list(lexer))
|
|
|
|
|
|
|
|
def test_include_self(self):
|
|
|
|
lexer = IncludingLexer(self.getpath("includeself.fea"))
|
|
|
|
self.assertRaises(LexerError, lambda: list(lexer))
|
|
|
|
|
|
|
|
def test_include_missing_file(self):
|
|
|
|
lexer = IncludingLexer(self.getpath("includemissingfile.fea"))
|
|
|
|
self.assertRaises(LexerError, lambda: list(lexer))
|
|
|
|
|
|
|
|
|
2015-07-31 11:53:36 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
unittest.main()
|