Merge pull request #1203 from anthrotype/absolute-include
[feaLib.lexer] pass included filename token to IncludedFeaNotFound
This commit is contained in:
commit
fd807bd70a
@ -56,7 +56,7 @@ class Lexer(object):
|
||||
|
||||
def location_(self):
|
||||
column = self.pos_ - self.line_start_ + 1
|
||||
return (self.filename_, self.line_, column)
|
||||
return (self.filename_ or "<features>", self.line_, column)
|
||||
|
||||
def next_(self):
|
||||
self.scan_over_(Lexer.CHAR_WHITESPACE_)
|
||||
@ -211,33 +211,42 @@ class IncludingLexer(object):
|
||||
#semi_type, semi_token, semi_location = lexer.next()
|
||||
#if semi_type is not Lexer.SYMBOL or semi_token != ";":
|
||||
# raise FeatureLibError("Expected ';'", semi_location)
|
||||
curpath = os.path.dirname(self.featurefilepath)
|
||||
path = os.path.join(curpath, fname_token)
|
||||
if os.path.isabs(fname_token):
|
||||
path = fname_token
|
||||
else:
|
||||
if self.featurefilepath is not None:
|
||||
curpath = os.path.dirname(self.featurefilepath)
|
||||
else:
|
||||
# if the IncludingLexer was initialized from an in-memory
|
||||
# file-like stream, it doesn't have a 'name' pointing to
|
||||
# its filesystem path, therefore we fall back to using the
|
||||
# current working directory to resolve relative includes
|
||||
curpath = os.getcwd()
|
||||
path = os.path.join(curpath, fname_token)
|
||||
if len(self.lexers_) >= 5:
|
||||
raise FeatureLibError("Too many recursive includes",
|
||||
fname_location)
|
||||
self.lexers_.append(self.make_lexer_(path, fname_location))
|
||||
continue
|
||||
try:
|
||||
self.lexers_.append(self.make_lexer_(path))
|
||||
except IOError as err:
|
||||
# FileNotFoundError does not exist on Python < 3.3
|
||||
import errno
|
||||
if err.errno == errno.ENOENT:
|
||||
raise IncludedFeaNotFound(fname_token, fname_location)
|
||||
raise # pragma: no cover
|
||||
else:
|
||||
return (token_type, token, location)
|
||||
raise StopIteration()
|
||||
|
||||
@staticmethod
|
||||
def make_lexer_(file_or_path, location=None):
|
||||
def make_lexer_(file_or_path):
|
||||
if hasattr(file_or_path, "read"):
|
||||
fileobj, closing = file_or_path, False
|
||||
else:
|
||||
filename, closing = file_or_path, True
|
||||
try:
|
||||
fileobj = open(filename, "r", encoding="utf-8")
|
||||
except IOError as err:
|
||||
# FileNotFoundError does not exist on Python < 3.3
|
||||
import errno
|
||||
if err.errno == errno.ENOENT:
|
||||
raise IncludedFeaNotFound(str(err), location)
|
||||
raise # pragma: no cover
|
||||
fileobj = open(filename, "r", encoding="utf-8")
|
||||
data = fileobj.read()
|
||||
filename = fileobj.name if hasattr(fileobj, "name") else "<features>"
|
||||
filename = getattr(fileobj, "name", None)
|
||||
if closing:
|
||||
fileobj.close()
|
||||
return Lexer(data, filename)
|
||||
|
@ -1,8 +1,11 @@
|
||||
from __future__ import print_function, division, absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from fontTools.misc.py23 import *
|
||||
from fontTools.feaLib.error import FeatureLibError, IncludedFeaNotFound
|
||||
from fontTools.feaLib.lexer import IncludingLexer, Lexer
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
|
||||
@ -174,7 +177,58 @@ class IncludingLexerTest(unittest.TestCase):
|
||||
|
||||
def test_include_missing_file(self):
|
||||
lexer = IncludingLexer(self.getpath("include/includemissingfile.fea"))
|
||||
self.assertRaises(IncludedFeaNotFound, lambda: list(lexer))
|
||||
self.assertRaisesRegex(IncludedFeaNotFound,
|
||||
"includemissingfile.fea:1:8: missingfile.fea",
|
||||
lambda: list(lexer))
|
||||
|
||||
def test_featurefilepath_None(self):
|
||||
lexer = IncludingLexer(UnicodeIO("# foobar"))
|
||||
self.assertIsNone(lexer.featurefilepath)
|
||||
files = set(loc[0] for _, _, loc in lexer)
|
||||
self.assertIn("<features>", files)
|
||||
|
||||
def test_include_absolute_path(self):
|
||||
with tempfile.NamedTemporaryFile(delete=False) as included:
|
||||
included.write(tobytes("""
|
||||
feature kern {
|
||||
pos A B -40;
|
||||
} kern;
|
||||
""", encoding="utf-8"))
|
||||
including = UnicodeIO("include(%s);" % included.name)
|
||||
try:
|
||||
lexer = IncludingLexer(including)
|
||||
files = set(loc[0] for _, _, loc in lexer)
|
||||
self.assertIn(included.name, files)
|
||||
finally:
|
||||
os.remove(included.name)
|
||||
|
||||
def test_include_relative_to_cwd(self):
|
||||
# save current working directory, to be restored later
|
||||
cwd = os.getcwd()
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
# create new feature file in a temporary directory
|
||||
with open(os.path.join(tmpdir, "included.fea"), "w",
|
||||
encoding="utf-8") as included:
|
||||
included.write("""
|
||||
feature kern {
|
||||
pos A B -40;
|
||||
} kern;
|
||||
""")
|
||||
# change current folder to the temporary dir
|
||||
os.chdir(tmpdir)
|
||||
# instantiate a new lexer that includes the above file
|
||||
# using a relative path; the IncludingLexer does not
|
||||
# itself have a path, because it was initialized from
|
||||
# an in-memory stream, so it will use the current working
|
||||
# directory to resolve relative include statements
|
||||
lexer = IncludingLexer(UnicodeIO("include(included.fea);"))
|
||||
files = set(loc[0] for _, _, loc in lexer)
|
||||
self.assertIn(included.name, files)
|
||||
finally:
|
||||
# remove temporary folder and restore previous working directory
|
||||
os.chdir(cwd)
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Loading…
x
Reference in New Issue
Block a user