[feaLib] Simplify parser API by removing the ignore_comments
option
https://github.com/fonttools/fonttools/pull/879#discussion_r104947968 https://github.com/fonttools/fonttools/issues/829
This commit is contained in:
parent
ac2762f0f3
commit
bc0670f53f
@ -14,10 +14,8 @@ log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Parser(object):
|
class Parser(object):
|
||||||
|
|
||||||
extensions = {}
|
extensions = {}
|
||||||
ast = ast
|
ast = ast
|
||||||
ignore_comments = True
|
|
||||||
|
|
||||||
def __init__(self, featurefile, glyphMap):
|
def __init__(self, featurefile, glyphMap):
|
||||||
self.glyphMap_ = glyphMap
|
self.glyphMap_ = glyphMap
|
||||||
@ -791,7 +789,7 @@ class Parser(object):
|
|||||||
|
|
||||||
def parse_table_GDEF_(self, table):
|
def parse_table_GDEF_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -813,7 +811,7 @@ class Parser(object):
|
|||||||
|
|
||||||
def parse_table_head_(self, table):
|
def parse_table_head_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -828,7 +826,7 @@ class Parser(object):
|
|||||||
def parse_table_hhea_(self, table):
|
def parse_table_hhea_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
fields = ("CaretOffset", "Ascender", "Descender", "LineGap")
|
fields = ("CaretOffset", "Ascender", "Descender", "LineGap")
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -849,7 +847,7 @@ class Parser(object):
|
|||||||
def parse_table_vhea_(self, table):
|
def parse_table_vhea_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
fields = ("VertTypoAscender", "VertTypoDescender", "VertTypoLineGap")
|
fields = ("VertTypoAscender", "VertTypoDescender", "VertTypoLineGap")
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -869,7 +867,7 @@ class Parser(object):
|
|||||||
|
|
||||||
def parse_table_name_(self, table):
|
def parse_table_name_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -954,7 +952,7 @@ class Parser(object):
|
|||||||
|
|
||||||
def parse_table_BASE_(self, table):
|
def parse_table_BASE_(self, table):
|
||||||
statements = table.statements
|
statements = table.statements
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -981,7 +979,7 @@ class Parser(object):
|
|||||||
"winAscent", "winDescent", "XHeight", "CapHeight",
|
"winAscent", "winDescent", "XHeight", "CapHeight",
|
||||||
"WeightClass", "WidthClass", "LowerOpSize", "UpperOpSize")
|
"WeightClass", "WidthClass", "LowerOpSize", "UpperOpSize")
|
||||||
ranges = ("UnicodeRange", "CodePageRange")
|
ranges = ("UnicodeRange", "CodePageRange")
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -1192,7 +1190,7 @@ class Parser(object):
|
|||||||
symtab.enter_scope()
|
symtab.enter_scope()
|
||||||
|
|
||||||
statements = block.statements
|
statements = block.statements
|
||||||
while self.next_token_ != "}" or (not self.ignore_comments and len(self.cur_comments_)):
|
while self.next_token_ != "}" or self.cur_comments_:
|
||||||
self.advance_lexer_(comments=True)
|
self.advance_lexer_(comments=True)
|
||||||
if self.cur_token_type_ is Lexer.COMMENT:
|
if self.cur_token_type_ is Lexer.COMMENT:
|
||||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||||
@ -1370,7 +1368,7 @@ class Parser(object):
|
|||||||
raise FeatureLibError("Expected a string", self.cur_token_location_)
|
raise FeatureLibError("Expected a string", self.cur_token_location_)
|
||||||
|
|
||||||
def advance_lexer_(self, comments=False):
|
def advance_lexer_(self, comments=False):
|
||||||
if not self.ignore_comments and comments and len(self.cur_comments_):
|
if comments and self.cur_comments_:
|
||||||
self.cur_token_type_ = Lexer.COMMENT
|
self.cur_token_type_ = Lexer.COMMENT
|
||||||
self.cur_token_, self.cur_token_location_ = self.cur_comments_.pop(0)
|
self.cur_token_, self.cur_token_location_ = self.cur_comments_.pop(0)
|
||||||
return
|
return
|
||||||
|
@ -138,13 +138,9 @@ class BuilderTest(unittest.TestCase):
|
|||||||
def check_fea2fea_file(self, name, base=None, parser=Parser):
|
def check_fea2fea_file(self, name, base=None, parser=Parser):
|
||||||
font = makeTTFont()
|
font = makeTTFont()
|
||||||
fname = (name + ".fea") if '.' not in name else name
|
fname = (name + ".fea") if '.' not in name else name
|
||||||
temp = parser.ignore_comments
|
|
||||||
parser.ignore_comments = False
|
|
||||||
try:
|
|
||||||
p = parser(self.getpath(fname), glyphMap=font.getReverseGlyphMap())
|
p = parser(self.getpath(fname), glyphMap=font.getReverseGlyphMap())
|
||||||
doc = p.parse()
|
doc = p.parse()
|
||||||
actual = self.normal_fea(doc.asFea().split("\n"))
|
actual = self.normal_fea(doc.asFea().split("\n"))
|
||||||
|
|
||||||
with open(self.getpath(base or fname), "r", encoding="utf-8") as ofile:
|
with open(self.getpath(base or fname), "r", encoding="utf-8") as ofile:
|
||||||
expected = self.normal_fea(ofile.readlines())
|
expected = self.normal_fea(ofile.readlines())
|
||||||
|
|
||||||
@ -157,8 +153,6 @@ class BuilderTest(unittest.TestCase):
|
|||||||
sys.stderr.write(line+"\n")
|
sys.stderr.write(line+"\n")
|
||||||
self.fail("Fea2Fea output is different from expected. "
|
self.fail("Fea2Fea output is different from expected. "
|
||||||
"Generated:\n{}\n".format("\n".join(actual)))
|
"Generated:\n{}\n".format("\n".join(actual)))
|
||||||
finally:
|
|
||||||
parser.ignore_comments = temp
|
|
||||||
|
|
||||||
def normal_fea(self, lines):
|
def normal_fea(self, lines):
|
||||||
output = []
|
output = []
|
||||||
|
@ -1246,7 +1246,7 @@ class ParserTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_substitute_lookups(self): # GSUB LookupType 6
|
def test_substitute_lookups(self): # GSUB LookupType 6
|
||||||
doc = Parser(self.getpath("spec5fi1.fea"), GLYPHMAP).parse()
|
doc = Parser(self.getpath("spec5fi1.fea"), GLYPHMAP).parse()
|
||||||
[langsys, ligs, sub, feature] = doc.statements
|
[_, _, _, langsys, ligs, sub, feature] = doc.statements
|
||||||
self.assertEqual(feature.statements[0].lookups, [ligs, None, sub])
|
self.assertEqual(feature.statements[0].lookups, [ligs, None, sub])
|
||||||
self.assertEqual(feature.statements[1].lookups, [ligs, None, sub])
|
self.assertEqual(feature.statements[1].lookups, [ligs, None, sub])
|
||||||
|
|
||||||
@ -1463,7 +1463,6 @@ class ParserTest(unittest.TestCase):
|
|||||||
def parse(self, text, glyphMap=GLYPHMAP):
|
def parse(self, text, glyphMap=GLYPHMAP):
|
||||||
featurefile = UnicodeIO(text)
|
featurefile = UnicodeIO(text)
|
||||||
p = Parser(featurefile, glyphMap)
|
p = Parser(featurefile, glyphMap)
|
||||||
p.ignore_comments = False
|
|
||||||
return p.parse()
|
return p.parse()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
Loading…
x
Reference in New Issue
Block a user