[feaLib.ast] make location=None by default
This would make it easier to construct feaLib AST from code, where the location is not defined and thus should be None. Also, we can make other arguments as kwargs with a default value, now that the first parameter is no longer 'location'.
This commit is contained in:
parent
ff2144ee6a
commit
e5c0bdd5df
@ -49,7 +49,7 @@ def asFea(g):
|
||||
|
||||
class Element(object):
|
||||
|
||||
def __init__(self, location):
|
||||
def __init__(self, location=None):
|
||||
self.location = location
|
||||
|
||||
def build(self, builder):
|
||||
@ -71,7 +71,7 @@ class Expression(Element):
|
||||
|
||||
|
||||
class Comment(Element):
|
||||
def __init__(self, location, text):
|
||||
def __init__(self, text, location=None):
|
||||
super(Comment, self).__init__(location)
|
||||
self.text = text
|
||||
|
||||
@ -81,7 +81,7 @@ class Comment(Element):
|
||||
|
||||
class GlyphName(Expression):
|
||||
"""A single glyph name, such as cedilla."""
|
||||
def __init__(self, location, glyph):
|
||||
def __init__(self, glyph, location=None):
|
||||
Expression.__init__(self, location)
|
||||
self.glyph = glyph
|
||||
|
||||
@ -94,7 +94,7 @@ class GlyphName(Expression):
|
||||
|
||||
class GlyphClass(Expression):
|
||||
"""A glyph class, such as [acute cedilla grave]."""
|
||||
def __init__(self, location, glyphs=None):
|
||||
def __init__(self, glyphs=None, location=None):
|
||||
Expression.__init__(self, location)
|
||||
self.glyphs = glyphs if glyphs is not None else []
|
||||
self.original = []
|
||||
@ -142,7 +142,7 @@ class GlyphClass(Expression):
|
||||
|
||||
class GlyphClassName(Expression):
|
||||
"""A glyph class name, such as @FRENCH_MARKS."""
|
||||
def __init__(self, location, glyphclass):
|
||||
def __init__(self, glyphclass, location=None):
|
||||
Expression.__init__(self, location)
|
||||
assert isinstance(glyphclass, GlyphClassDefinition)
|
||||
self.glyphclass = glyphclass
|
||||
@ -156,7 +156,7 @@ class GlyphClassName(Expression):
|
||||
|
||||
class MarkClassName(Expression):
|
||||
"""A mark class name, such as @FRENCH_MARKS defined with markClass."""
|
||||
def __init__(self, location, markClass):
|
||||
def __init__(self, markClass, location=None):
|
||||
Expression.__init__(self, location)
|
||||
assert isinstance(markClass, MarkClass)
|
||||
self.markClass = markClass
|
||||
@ -169,7 +169,7 @@ class MarkClassName(Expression):
|
||||
|
||||
|
||||
class AnonymousBlock(Statement):
|
||||
def __init__(self, tag, content, location):
|
||||
def __init__(self, tag, content, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.tag, self.content = tag, content
|
||||
|
||||
@ -181,7 +181,7 @@ class AnonymousBlock(Statement):
|
||||
|
||||
|
||||
class Block(Statement):
|
||||
def __init__(self, location):
|
||||
def __init__(self, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.statements = []
|
||||
|
||||
@ -205,7 +205,7 @@ class FeatureFile(Block):
|
||||
|
||||
|
||||
class FeatureBlock(Block):
|
||||
def __init__(self, location, name, use_extension):
|
||||
def __init__(self, name, use_extension=False, location=None):
|
||||
Block.__init__(self, location)
|
||||
self.name, self.use_extension = name, use_extension
|
||||
|
||||
@ -230,7 +230,7 @@ class FeatureBlock(Block):
|
||||
|
||||
|
||||
class FeatureNamesBlock(Block):
|
||||
def __init__(self, location):
|
||||
def __init__(self, location=None):
|
||||
Block.__init__(self, location)
|
||||
|
||||
def asFea(self, indent=""):
|
||||
@ -241,7 +241,7 @@ class FeatureNamesBlock(Block):
|
||||
|
||||
|
||||
class LookupBlock(Block):
|
||||
def __init__(self, location, name, use_extension):
|
||||
def __init__(self, name, use_extension=False, location=None):
|
||||
Block.__init__(self, location)
|
||||
self.name, self.use_extension = name, use_extension
|
||||
|
||||
@ -259,7 +259,7 @@ class LookupBlock(Block):
|
||||
|
||||
|
||||
class TableBlock(Block):
|
||||
def __init__(self, location, name):
|
||||
def __init__(self, name, location=None):
|
||||
Block.__init__(self, location)
|
||||
self.name = name
|
||||
|
||||
@ -272,7 +272,7 @@ class TableBlock(Block):
|
||||
|
||||
class GlyphClassDefinition(Statement):
|
||||
"""Example: @UPPERCASE = [A-Z];"""
|
||||
def __init__(self, location, name, glyphs):
|
||||
def __init__(self, name, glyphs, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.name = name
|
||||
self.glyphs = glyphs
|
||||
@ -286,8 +286,8 @@ class GlyphClassDefinition(Statement):
|
||||
|
||||
class GlyphClassDefStatement(Statement):
|
||||
"""Example: GlyphClassDef @UPPERCASE, [B], [C], [D];"""
|
||||
def __init__(self, location, baseGlyphs, markGlyphs,
|
||||
ligatureGlyphs, componentGlyphs):
|
||||
def __init__(self, baseGlyphs, markGlyphs, ligatureGlyphs,
|
||||
componentGlyphs, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.baseGlyphs, self.markGlyphs = (baseGlyphs, markGlyphs)
|
||||
self.ligatureGlyphs = ligatureGlyphs
|
||||
@ -328,9 +328,13 @@ class MarkClass(object):
|
||||
for glyph in definition.glyphSet():
|
||||
if glyph in self.glyphs:
|
||||
otherLoc = self.glyphs[glyph].location
|
||||
if otherLoc is None:
|
||||
end = ""
|
||||
else:
|
||||
end = " at %s:%d:%d" % (
|
||||
otherLoc[0], otherLoc[1], otherLoc[2])
|
||||
raise FeatureLibError(
|
||||
"Glyph %s already defined at %s:%d:%d" % (
|
||||
glyph, otherLoc[0], otherLoc[1], otherLoc[2]),
|
||||
"Glyph %s already defined%s" % (glyph, end),
|
||||
definition.location)
|
||||
self.glyphs[glyph] = definition
|
||||
|
||||
@ -343,7 +347,7 @@ class MarkClass(object):
|
||||
|
||||
|
||||
class MarkClassDefinition(Statement):
|
||||
def __init__(self, location, markClass, anchor, glyphs):
|
||||
def __init__(self, markClass, anchor, glyphs, location=None):
|
||||
Statement.__init__(self, location)
|
||||
assert isinstance(markClass, MarkClass)
|
||||
assert isinstance(anchor, Anchor) and isinstance(glyphs, Expression)
|
||||
@ -359,7 +363,7 @@ class MarkClassDefinition(Statement):
|
||||
|
||||
|
||||
class AlternateSubstStatement(Statement):
|
||||
def __init__(self, location, prefix, glyph, suffix, replacement):
|
||||
def __init__(self, prefix, glyph, suffix, replacement, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.glyph, self.suffix = (prefix, glyph, suffix)
|
||||
self.replacement = replacement
|
||||
@ -391,8 +395,8 @@ class AlternateSubstStatement(Statement):
|
||||
|
||||
|
||||
class Anchor(Expression):
|
||||
def __init__(self, location, name, x, y, contourpoint,
|
||||
xDeviceTable, yDeviceTable):
|
||||
def __init__(self, x, y, name=None, contourpoint=None,
|
||||
xDeviceTable=None, yDeviceTable=None, location=None):
|
||||
Expression.__init__(self, location)
|
||||
self.name = name
|
||||
self.x, self.y, self.contourpoint = x, y, contourpoint
|
||||
@ -414,7 +418,7 @@ class Anchor(Expression):
|
||||
|
||||
|
||||
class AnchorDefinition(Statement):
|
||||
def __init__(self, location, name, x, y, contourpoint):
|
||||
def __init__(self, name, x, y, contourpoint=None, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.name, self.x, self.y, self.contourpoint = name, x, y, contourpoint
|
||||
|
||||
@ -427,7 +431,7 @@ class AnchorDefinition(Statement):
|
||||
|
||||
|
||||
class AttachStatement(Statement):
|
||||
def __init__(self, location, glyphs, contourPoints):
|
||||
def __init__(self, glyphs, contourPoints, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.glyphs, self.contourPoints = (glyphs, contourPoints)
|
||||
|
||||
@ -441,7 +445,7 @@ class AttachStatement(Statement):
|
||||
|
||||
|
||||
class ChainContextPosStatement(Statement):
|
||||
def __init__(self, location, prefix, glyphs, suffix, lookups):
|
||||
def __init__(self, prefix, glyphs, suffix, lookups, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.glyphs, self.suffix = prefix, glyphs, suffix
|
||||
self.lookups = lookups
|
||||
@ -473,7 +477,7 @@ class ChainContextPosStatement(Statement):
|
||||
|
||||
|
||||
class ChainContextSubstStatement(Statement):
|
||||
def __init__(self, location, prefix, glyphs, suffix, lookups):
|
||||
def __init__(self, prefix, glyphs, suffix, lookups, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.glyphs, self.suffix = prefix, glyphs, suffix
|
||||
self.lookups = lookups
|
||||
@ -505,7 +509,7 @@ class ChainContextSubstStatement(Statement):
|
||||
|
||||
|
||||
class CursivePosStatement(Statement):
|
||||
def __init__(self, location, glyphclass, entryAnchor, exitAnchor):
|
||||
def __init__(self, glyphclass, entryAnchor, exitAnchor, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.glyphclass = glyphclass
|
||||
self.entryAnchor, self.exitAnchor = entryAnchor, exitAnchor
|
||||
@ -522,7 +526,7 @@ class CursivePosStatement(Statement):
|
||||
|
||||
class FeatureReferenceStatement(Statement):
|
||||
"""Example: feature salt;"""
|
||||
def __init__(self, location, featureName):
|
||||
def __init__(self, featureName, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.location, self.featureName = (location, featureName)
|
||||
|
||||
@ -534,7 +538,7 @@ class FeatureReferenceStatement(Statement):
|
||||
|
||||
|
||||
class IgnorePosStatement(Statement):
|
||||
def __init__(self, location, chainContexts):
|
||||
def __init__(self, chainContexts, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.chainContexts = chainContexts
|
||||
|
||||
@ -563,7 +567,7 @@ class IgnorePosStatement(Statement):
|
||||
|
||||
|
||||
class IgnoreSubstStatement(Statement):
|
||||
def __init__(self, location, chainContexts):
|
||||
def __init__(self, chainContexts, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.chainContexts = chainContexts
|
||||
|
||||
@ -592,7 +596,7 @@ class IgnoreSubstStatement(Statement):
|
||||
|
||||
|
||||
class IncludeStatement(Statement):
|
||||
def __init__(self, location, filename):
|
||||
def __init__(self, filename, location=None):
|
||||
super(IncludeStatement, self).__init__(location)
|
||||
self.filename = filename
|
||||
|
||||
@ -608,7 +612,8 @@ class IncludeStatement(Statement):
|
||||
|
||||
|
||||
class LanguageStatement(Statement):
|
||||
def __init__(self, location, language, include_default, required):
|
||||
def __init__(self, language, include_default=True, required=False,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
assert(len(language) == 4)
|
||||
self.language = language
|
||||
@ -631,7 +636,7 @@ class LanguageStatement(Statement):
|
||||
|
||||
|
||||
class LanguageSystemStatement(Statement):
|
||||
def __init__(self, location, script, language):
|
||||
def __init__(self, script, language, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.script, self.language = (script, language)
|
||||
|
||||
@ -643,7 +648,7 @@ class LanguageSystemStatement(Statement):
|
||||
|
||||
|
||||
class FontRevisionStatement(Statement):
|
||||
def __init__(self, location, revision):
|
||||
def __init__(self, revision, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.revision = revision
|
||||
|
||||
@ -655,7 +660,7 @@ class FontRevisionStatement(Statement):
|
||||
|
||||
|
||||
class LigatureCaretByIndexStatement(Statement):
|
||||
def __init__(self, location, glyphs, carets):
|
||||
def __init__(self, glyphs, carets, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.glyphs, self.carets = (glyphs, carets)
|
||||
|
||||
@ -669,7 +674,7 @@ class LigatureCaretByIndexStatement(Statement):
|
||||
|
||||
|
||||
class LigatureCaretByPosStatement(Statement):
|
||||
def __init__(self, location, glyphs, carets):
|
||||
def __init__(self, glyphs, carets, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.glyphs, self.carets = (glyphs, carets)
|
||||
|
||||
@ -683,8 +688,8 @@ class LigatureCaretByPosStatement(Statement):
|
||||
|
||||
|
||||
class LigatureSubstStatement(Statement):
|
||||
def __init__(self, location, prefix, glyphs, suffix, replacement,
|
||||
forceChain):
|
||||
def __init__(self, prefix, glyphs, suffix, replacement,
|
||||
forceChain, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.glyphs, self.suffix = (prefix, glyphs, suffix)
|
||||
self.replacement, self.forceChain = replacement, forceChain
|
||||
@ -714,7 +719,8 @@ class LigatureSubstStatement(Statement):
|
||||
|
||||
|
||||
class LookupFlagStatement(Statement):
|
||||
def __init__(self, location, value, markAttachment, markFilteringSet):
|
||||
def __init__(self, value=0, markAttachment=None, markFilteringSet=None,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.value = value
|
||||
self.markAttachment = markAttachment
|
||||
@ -747,7 +753,7 @@ class LookupFlagStatement(Statement):
|
||||
|
||||
|
||||
class LookupReferenceStatement(Statement):
|
||||
def __init__(self, location, lookup):
|
||||
def __init__(self, lookup, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.location, self.lookup = (location, lookup)
|
||||
|
||||
@ -759,7 +765,7 @@ class LookupReferenceStatement(Statement):
|
||||
|
||||
|
||||
class MarkBasePosStatement(Statement):
|
||||
def __init__(self, location, base, marks):
|
||||
def __init__(self, base, marks, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.base, self.marks = base, marks
|
||||
|
||||
@ -775,7 +781,7 @@ class MarkBasePosStatement(Statement):
|
||||
|
||||
|
||||
class MarkLigPosStatement(Statement):
|
||||
def __init__(self, location, ligatures, marks):
|
||||
def __init__(self, ligatures, marks, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.ligatures, self.marks = ligatures, marks
|
||||
|
||||
@ -799,7 +805,7 @@ class MarkLigPosStatement(Statement):
|
||||
|
||||
|
||||
class MarkMarkPosStatement(Statement):
|
||||
def __init__(self, location, baseMarks, marks):
|
||||
def __init__(self, baseMarks, marks, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.baseMarks, self.marks = baseMarks, marks
|
||||
|
||||
@ -815,7 +821,7 @@ class MarkMarkPosStatement(Statement):
|
||||
|
||||
|
||||
class MultipleSubstStatement(Statement):
|
||||
def __init__(self, location, prefix, glyph, suffix, replacement):
|
||||
def __init__(self, prefix, glyph, suffix, replacement, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.glyph, self.suffix = prefix, glyph, suffix
|
||||
self.replacement = replacement
|
||||
@ -843,8 +849,9 @@ class MultipleSubstStatement(Statement):
|
||||
|
||||
|
||||
class PairPosStatement(Statement):
|
||||
def __init__(self, location, enumerated,
|
||||
glyphs1, valuerecord1, glyphs2, valuerecord2):
|
||||
def __init__(self, enumerated,
|
||||
glyphs1, valuerecord1, glyphs2, valuerecord2,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.enumerated = enumerated
|
||||
self.glyphs1, self.valuerecord1 = glyphs1, valuerecord1
|
||||
@ -884,7 +891,8 @@ class PairPosStatement(Statement):
|
||||
|
||||
|
||||
class ReverseChainSingleSubstStatement(Statement):
|
||||
def __init__(self, location, old_prefix, old_suffix, glyphs, replacements):
|
||||
def __init__(self, old_prefix, old_suffix, glyphs, replacements,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.old_prefix, self.old_suffix = old_prefix, old_suffix
|
||||
self.glyphs = glyphs
|
||||
@ -915,7 +923,8 @@ class ReverseChainSingleSubstStatement(Statement):
|
||||
|
||||
|
||||
class SingleSubstStatement(Statement):
|
||||
def __init__(self, location, glyphs, replace, prefix, suffix, forceChain):
|
||||
def __init__(self, glyphs, replace, prefix, suffix, forceChain,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.prefix, self.suffix = prefix, suffix
|
||||
self.forceChain = forceChain
|
||||
@ -948,7 +957,7 @@ class SingleSubstStatement(Statement):
|
||||
|
||||
|
||||
class ScriptStatement(Statement):
|
||||
def __init__(self, location, script):
|
||||
def __init__(self, script, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.script = script
|
||||
|
||||
@ -960,7 +969,7 @@ class ScriptStatement(Statement):
|
||||
|
||||
|
||||
class SinglePosStatement(Statement):
|
||||
def __init__(self, location, pos, prefix, suffix, forceChain):
|
||||
def __init__(self, pos, prefix, suffix, forceChain, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.pos, self.prefix, self.suffix = pos, prefix, suffix
|
||||
self.forceChain = forceChain
|
||||
@ -989,14 +998,16 @@ class SinglePosStatement(Statement):
|
||||
|
||||
|
||||
class SubtableStatement(Statement):
|
||||
def __init__(self, location):
|
||||
def __init__(self, location=None):
|
||||
Statement.__init__(self, location)
|
||||
|
||||
|
||||
class ValueRecord(Expression):
|
||||
def __init__(self, location, vertical,
|
||||
xPlacement, yPlacement, xAdvance, yAdvance,
|
||||
xPlaDevice, yPlaDevice, xAdvDevice, yAdvDevice):
|
||||
def __init__(self, xPlacement=None, yPlacement=None,
|
||||
xAdvance=None, yAdvance=None,
|
||||
xPlaDevice=None, yPlaDevice=None,
|
||||
xAdvDevice=None, yAdvDevice=None,
|
||||
vertical=False, location=None):
|
||||
Expression.__init__(self, location)
|
||||
self.xPlacement, self.yPlacement = (xPlacement, yPlacement)
|
||||
self.xAdvance, self.yAdvance = (xAdvance, yAdvance)
|
||||
@ -1049,7 +1060,7 @@ class ValueRecord(Expression):
|
||||
|
||||
|
||||
class ValueRecordDefinition(Statement):
|
||||
def __init__(self, location, name, value):
|
||||
def __init__(self, name, value, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.name = name
|
||||
self.value = value
|
||||
@ -1068,8 +1079,8 @@ def simplify_name_attributes(pid, eid, lid):
|
||||
|
||||
|
||||
class NameRecord(Statement):
|
||||
def __init__(self, location, nameID, platformID,
|
||||
platEncID, langID, string):
|
||||
def __init__(self, nameID, platformID, platEncID, langID, string,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.nameID = nameID
|
||||
self.platformID = platformID
|
||||
@ -1123,8 +1134,8 @@ class FeatureNameStatement(NameRecord):
|
||||
|
||||
|
||||
class SizeParameters(Statement):
|
||||
def __init__(self, location, DesignSize, SubfamilyID, RangeStart,
|
||||
RangeEnd):
|
||||
def __init__(self, DesignSize, SubfamilyID, RangeStart, RangeEnd,
|
||||
location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.DesignSize = DesignSize
|
||||
self.SubfamilyID = SubfamilyID
|
||||
@ -1143,7 +1154,7 @@ class SizeParameters(Statement):
|
||||
|
||||
|
||||
class BaseAxis(Statement):
|
||||
def __init__(self, location, bases, scripts, vertical):
|
||||
def __init__(self, bases, scripts, vertical, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.bases = bases
|
||||
self.scripts = scripts
|
||||
@ -1160,7 +1171,7 @@ class BaseAxis(Statement):
|
||||
|
||||
|
||||
class OS2Field(Statement):
|
||||
def __init__(self, location, key, value):
|
||||
def __init__(self, key, value, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.key = key
|
||||
self.value = value
|
||||
@ -1185,7 +1196,7 @@ class OS2Field(Statement):
|
||||
|
||||
|
||||
class HheaField(Statement):
|
||||
def __init__(self, location, key, value):
|
||||
def __init__(self, key, value, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.key = key
|
||||
self.value = value
|
||||
@ -1200,7 +1211,7 @@ class HheaField(Statement):
|
||||
|
||||
|
||||
class VheaField(Statement):
|
||||
def __init__(self, location, key, value):
|
||||
def __init__(self, key, value, location=None):
|
||||
Statement.__init__(self, location)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
@ -52,7 +52,9 @@ class Parser(object):
|
||||
while self.next_token_type_ is not None or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(
|
||||
self.ast.Comment(self.cur_token_,
|
||||
location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("include"):
|
||||
statements.append(self.parse_include_())
|
||||
elif self.cur_token_type_ is Lexer.GLYPHCLASS:
|
||||
@ -103,9 +105,11 @@ class Parser(object):
|
||||
'Unknown anchor "%s"' % name,
|
||||
self.cur_token_location_)
|
||||
self.expect_symbol_(">")
|
||||
return self.ast.Anchor(location, name, anchordef.x, anchordef.y,
|
||||
anchordef.contourpoint,
|
||||
xDeviceTable=None, yDeviceTable=None)
|
||||
return self.ast.Anchor(anchordef.x, anchordef.y,
|
||||
name=name,
|
||||
contourpoint=anchordef.contourpoint,
|
||||
xDeviceTable=None, yDeviceTable=None,
|
||||
location=location)
|
||||
|
||||
x, y = self.expect_number_(), self.expect_number_()
|
||||
|
||||
@ -121,8 +125,11 @@ class Parser(object):
|
||||
xDeviceTable, yDeviceTable = None, None
|
||||
|
||||
self.expect_symbol_(">")
|
||||
return self.ast.Anchor(location, None, x, y, contourpoint,
|
||||
xDeviceTable, yDeviceTable)
|
||||
return self.ast.Anchor(x, y, name=None,
|
||||
contourpoint=contourpoint,
|
||||
xDeviceTable=xDeviceTable,
|
||||
yDeviceTable=yDeviceTable,
|
||||
location=location)
|
||||
|
||||
def parse_anchor_marks_(self):
|
||||
"""Parses a sequence of [<anchor> mark @MARKCLASS]*."""
|
||||
@ -146,7 +153,9 @@ class Parser(object):
|
||||
contourpoint = self.expect_number_()
|
||||
name = self.expect_name_()
|
||||
self.expect_symbol_(";")
|
||||
anchordef = self.ast.AnchorDefinition(location, name, x, y, contourpoint)
|
||||
anchordef = self.ast.AnchorDefinition(name, x, y,
|
||||
contourpoint=contourpoint,
|
||||
location=location)
|
||||
self.anchors_.define(name, anchordef)
|
||||
return anchordef
|
||||
|
||||
@ -159,7 +168,7 @@ class Parser(object):
|
||||
end_tag = self.expect_tag_()
|
||||
assert tag == end_tag, "bad splitting in Lexer.scan_anonymous_block()"
|
||||
self.expect_symbol_(';')
|
||||
return self.ast.AnonymousBlock(tag, content, location)
|
||||
return self.ast.AnonymousBlock(tag, content, location=location)
|
||||
|
||||
def parse_attach_(self):
|
||||
assert self.is_cur_keyword_("Attach")
|
||||
@ -169,7 +178,8 @@ class Parser(object):
|
||||
while self.next_token_ != ";":
|
||||
contourPoints.add(self.expect_number_())
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.AttachStatement(location, glyphs, contourPoints)
|
||||
return self.ast.AttachStatement(glyphs, contourPoints,
|
||||
location=location)
|
||||
|
||||
def parse_enumerate_(self, vertical):
|
||||
assert self.cur_token_ in {"enumerate", "enum"}
|
||||
@ -200,8 +210,9 @@ class Parser(object):
|
||||
else:
|
||||
componentGlyphs = None
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.GlyphClassDefStatement(location, baseGlyphs, markGlyphs,
|
||||
ligatureGlyphs, componentGlyphs)
|
||||
return self.ast.GlyphClassDefStatement(baseGlyphs, markGlyphs,
|
||||
ligatureGlyphs, componentGlyphs,
|
||||
location=location)
|
||||
|
||||
def parse_glyphclass_definition_(self):
|
||||
"""Parses glyph class definitions such as '@UPPERCASE = [A-Z];'"""
|
||||
@ -209,7 +220,8 @@ class Parser(object):
|
||||
self.expect_symbol_("=")
|
||||
glyphs = self.parse_glyphclass_(accept_glyphname=False)
|
||||
self.expect_symbol_(";")
|
||||
glyphclass = self.ast.GlyphClassDefinition(location, name, glyphs)
|
||||
glyphclass = self.ast.GlyphClassDefinition(name, glyphs,
|
||||
location=location)
|
||||
self.glyphclasses_.define(name, glyphclass)
|
||||
return glyphclass
|
||||
|
||||
@ -256,7 +268,7 @@ class Parser(object):
|
||||
if (accept_glyphname and
|
||||
self.next_token_type_ in (Lexer.NAME, Lexer.CID)):
|
||||
glyph = self.expect_glyph_()
|
||||
return self.ast.GlyphName(self.cur_token_location_, glyph)
|
||||
return self.ast.GlyphName(glyph, location=self.cur_token_location_)
|
||||
if self.next_token_type_ is Lexer.GLYPHCLASS:
|
||||
self.advance_lexer_()
|
||||
gc = self.glyphclasses_.resolve(self.cur_token_)
|
||||
@ -265,13 +277,15 @@ class Parser(object):
|
||||
"Unknown glyph class @%s" % self.cur_token_,
|
||||
self.cur_token_location_)
|
||||
if isinstance(gc, self.ast.MarkClass):
|
||||
return self.ast.MarkClassName(self.cur_token_location_, gc)
|
||||
return self.ast.MarkClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
else:
|
||||
return self.ast.GlyphClassName(self.cur_token_location_, gc)
|
||||
return self.ast.GlyphClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
|
||||
self.expect_symbol_("[")
|
||||
location = self.cur_token_location_
|
||||
glyphs = self.ast.GlyphClass(location)
|
||||
glyphs = self.ast.GlyphClass(location=location)
|
||||
while self.next_token_ != "]":
|
||||
if self.next_token_type_ is Lexer.NAME:
|
||||
glyph = self.expect_glyph_()
|
||||
@ -310,9 +324,11 @@ class Parser(object):
|
||||
"Unknown glyph class @%s" % self.cur_token_,
|
||||
self.cur_token_location_)
|
||||
if isinstance(gc, self.ast.MarkClass):
|
||||
gc = self.ast.MarkClassName(self.cur_token_location_, gc)
|
||||
gc = self.ast.MarkClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
else:
|
||||
gc = self.ast.GlyphClassName(self.cur_token_location_, gc)
|
||||
gc = self.ast.GlyphClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
glyphs.add_class(gc)
|
||||
else:
|
||||
raise FeatureLibError(
|
||||
@ -330,9 +346,11 @@ class Parser(object):
|
||||
"Unknown glyph class @%s" % name,
|
||||
self.cur_token_location_)
|
||||
if isinstance(gc, self.ast.MarkClass):
|
||||
return self.ast.MarkClassName(self.cur_token_location_, gc)
|
||||
return self.ast.MarkClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
else:
|
||||
return self.ast.GlyphClassName(self.cur_token_location_, gc)
|
||||
return self.ast.GlyphClassName(
|
||||
gc, location=self.cur_token_location_)
|
||||
|
||||
def parse_glyph_pattern_(self, vertical):
|
||||
prefix, glyphs, lookups, values, suffix = ([], [], [], [], [])
|
||||
@ -412,14 +430,16 @@ class Parser(object):
|
||||
raise FeatureLibError(
|
||||
"No lookups can be specified for \"ignore sub\"",
|
||||
location)
|
||||
return self.ast.IgnoreSubstStatement(location, chainContext)
|
||||
return self.ast.IgnoreSubstStatement(chainContext,
|
||||
location=location)
|
||||
if self.cur_token_ in ["position", "pos"]:
|
||||
chainContext, hasLookups = self.parse_chain_context_()
|
||||
if hasLookups:
|
||||
raise FeatureLibError(
|
||||
"No lookups can be specified for \"ignore pos\"",
|
||||
location)
|
||||
return self.ast.IgnorePosStatement(location, chainContext)
|
||||
return self.ast.IgnorePosStatement(chainContext,
|
||||
location=location)
|
||||
raise FeatureLibError(
|
||||
"Expected \"substitute\" or \"position\"",
|
||||
self.cur_token_location_)
|
||||
@ -429,7 +449,7 @@ class Parser(object):
|
||||
location = self.cur_token_location_
|
||||
filename = self.expect_filename_()
|
||||
# self.expect_symbol_(";")
|
||||
return ast.IncludeStatement(location, filename)
|
||||
return ast.IncludeStatement(filename, location=location)
|
||||
|
||||
def parse_language_(self):
|
||||
assert self.is_cur_keyword_("language")
|
||||
@ -442,8 +462,9 @@ class Parser(object):
|
||||
self.expect_keyword_("required")
|
||||
required = True
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LanguageStatement(location, language,
|
||||
include_default, required)
|
||||
return self.ast.LanguageStatement(language,
|
||||
include_default, required,
|
||||
location=location)
|
||||
|
||||
def parse_ligatureCaretByIndex_(self):
|
||||
assert self.is_cur_keyword_("LigatureCaretByIndex")
|
||||
@ -453,7 +474,8 @@ class Parser(object):
|
||||
while self.next_token_ != ";":
|
||||
carets.append(self.expect_number_())
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LigatureCaretByIndexStatement(location, glyphs, carets)
|
||||
return self.ast.LigatureCaretByIndexStatement(glyphs, carets,
|
||||
location=location)
|
||||
|
||||
def parse_ligatureCaretByPos_(self):
|
||||
assert self.is_cur_keyword_("LigatureCaretByPos")
|
||||
@ -463,7 +485,8 @@ class Parser(object):
|
||||
while self.next_token_ != ";":
|
||||
carets.append(self.expect_number_())
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LigatureCaretByPosStatement(location, glyphs, carets)
|
||||
return self.ast.LigatureCaretByPosStatement(glyphs, carets,
|
||||
location=location)
|
||||
|
||||
def parse_lookup_(self, vertical):
|
||||
assert self.is_cur_keyword_("lookup")
|
||||
@ -475,14 +498,15 @@ class Parser(object):
|
||||
raise FeatureLibError("Unknown lookup \"%s\"" % name,
|
||||
self.cur_token_location_)
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LookupReferenceStatement(location, lookup)
|
||||
return self.ast.LookupReferenceStatement(lookup,
|
||||
location=location)
|
||||
|
||||
use_extension = False
|
||||
if self.next_token_ == "useExtension":
|
||||
self.expect_keyword_("useExtension")
|
||||
use_extension = True
|
||||
|
||||
block = self.ast.LookupBlock(location, name, use_extension)
|
||||
block = self.ast.LookupBlock(name, use_extension, location=location)
|
||||
self.parse_block_(block, vertical)
|
||||
self.lookups_.define(name, block)
|
||||
return block
|
||||
@ -495,7 +519,7 @@ class Parser(object):
|
||||
if self.next_token_type_ == Lexer.NUMBER:
|
||||
value = self.expect_number_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LookupFlagStatement(location, value, None, None)
|
||||
return self.ast.LookupFlagStatement(value, location=location)
|
||||
|
||||
# format A: "lookupflag RightToLeft MarkAttachmentType @M;"
|
||||
value, markAttachment, markFilteringSet = 0, None, None
|
||||
@ -523,8 +547,10 @@ class Parser(object):
|
||||
'"%s" is not a recognized lookupflag' % self.next_token_,
|
||||
self.next_token_location_)
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.LookupFlagStatement(location, value,
|
||||
markAttachment, markFilteringSet)
|
||||
return self.ast.LookupFlagStatement(value,
|
||||
markAttachment=markAttachment,
|
||||
markFilteringSet=markFilteringSet,
|
||||
location=location)
|
||||
|
||||
def parse_markClass_(self):
|
||||
assert self.is_cur_keyword_("markClass")
|
||||
@ -538,7 +564,8 @@ class Parser(object):
|
||||
markClass = self.ast.MarkClass(name)
|
||||
self.doc_.markClasses[name] = markClass
|
||||
self.glyphclasses_.define(name, markClass)
|
||||
mcdef = self.ast.MarkClassDefinition(location, markClass, anchor, glyphs)
|
||||
mcdef = self.ast.MarkClassDefinition(markClass, anchor, glyphs,
|
||||
location=location)
|
||||
markClass.addDefinition(mcdef)
|
||||
return mcdef
|
||||
|
||||
@ -565,7 +592,7 @@ class Parser(object):
|
||||
"If \"lookup\" is present, no values must be specified",
|
||||
location)
|
||||
return self.ast.ChainContextPosStatement(
|
||||
location, prefix, glyphs, suffix, lookups)
|
||||
prefix, glyphs, suffix, lookups, location=location)
|
||||
|
||||
# Pair positioning, format A: "pos V 10 A -10;"
|
||||
# Pair positioning, format B: "pos V A -20;"
|
||||
@ -573,14 +600,16 @@ class Parser(object):
|
||||
if values[0] is None: # Format B: "pos V A -20;"
|
||||
values.reverse()
|
||||
return self.ast.PairPosStatement(
|
||||
location, enumerated,
|
||||
glyphs[0], values[0], glyphs[1], values[1])
|
||||
enumerated,
|
||||
glyphs[0], values[0], glyphs[1], values[1],
|
||||
location=location)
|
||||
|
||||
if enumerated:
|
||||
raise FeatureLibError(
|
||||
'"enumerate" is only allowed with pair positionings', location)
|
||||
return self.ast.SinglePosStatement(location, list(zip(glyphs, values)),
|
||||
prefix, suffix, forceChain=hasMarks)
|
||||
return self.ast.SinglePosStatement(list(zip(glyphs, values)),
|
||||
prefix, suffix, forceChain=hasMarks,
|
||||
location=location)
|
||||
|
||||
def parse_position_cursive_(self, enumerated, vertical):
|
||||
location = self.cur_token_location_
|
||||
@ -595,7 +624,7 @@ class Parser(object):
|
||||
exitAnchor = self.parse_anchor_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.CursivePosStatement(
|
||||
location, glyphclass, entryAnchor, exitAnchor)
|
||||
glyphclass, entryAnchor, exitAnchor, location=location)
|
||||
|
||||
def parse_position_base_(self, enumerated, vertical):
|
||||
location = self.cur_token_location_
|
||||
@ -608,7 +637,7 @@ class Parser(object):
|
||||
base = self.parse_glyphclass_(accept_glyphname=True)
|
||||
marks = self.parse_anchor_marks_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.MarkBasePosStatement(location, base, marks)
|
||||
return self.ast.MarkBasePosStatement(base, marks, location=location)
|
||||
|
||||
def parse_position_ligature_(self, enumerated, vertical):
|
||||
location = self.cur_token_location_
|
||||
@ -624,7 +653,7 @@ class Parser(object):
|
||||
self.expect_keyword_("ligComponent")
|
||||
marks.append(self.parse_anchor_marks_())
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.MarkLigPosStatement(location, ligatures, marks)
|
||||
return self.ast.MarkLigPosStatement(ligatures, marks, location=location)
|
||||
|
||||
def parse_position_mark_(self, enumerated, vertical):
|
||||
location = self.cur_token_location_
|
||||
@ -637,13 +666,14 @@ class Parser(object):
|
||||
baseMarks = self.parse_glyphclass_(accept_glyphname=True)
|
||||
marks = self.parse_anchor_marks_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.MarkMarkPosStatement(location, baseMarks, marks)
|
||||
return self.ast.MarkMarkPosStatement(baseMarks, marks,
|
||||
location=location)
|
||||
|
||||
def parse_script_(self):
|
||||
assert self.is_cur_keyword_("script")
|
||||
location, script = self.cur_token_location_, self.expect_script_tag_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.ScriptStatement(location, script)
|
||||
return self.ast.ScriptStatement(script, location=location)
|
||||
|
||||
def parse_substitute_(self):
|
||||
assert self.cur_token_ in {"substitute", "sub", "reversesub", "rsub"}
|
||||
@ -687,7 +717,7 @@ class Parser(object):
|
||||
'Expected a single glyphclass after "from"',
|
||||
location)
|
||||
return self.ast.AlternateSubstStatement(
|
||||
location, old_prefix, old[0], old_suffix, new[0])
|
||||
old_prefix, old[0], old_suffix, new[0], location=location)
|
||||
|
||||
num_lookups = len([l for l in lookups if l is not None])
|
||||
|
||||
@ -707,9 +737,10 @@ class Parser(object):
|
||||
'but found a glyph class with %d elements' %
|
||||
(len(glyphs), len(replacements)), location)
|
||||
return self.ast.SingleSubstStatement(
|
||||
location, old, new,
|
||||
old, new,
|
||||
old_prefix, old_suffix,
|
||||
forceChain=hasMarks
|
||||
forceChain=hasMarks,
|
||||
location=location
|
||||
)
|
||||
|
||||
# GSUB lookup type 2: Multiple substitution.
|
||||
@ -719,8 +750,8 @@ class Parser(object):
|
||||
len(new) > 1 and max([len(n.glyphSet()) for n in new]) == 1 and
|
||||
num_lookups == 0):
|
||||
return self.ast.MultipleSubstStatement(
|
||||
location, old_prefix, tuple(old[0].glyphSet())[0], old_suffix,
|
||||
tuple([list(n.glyphSet())[0] for n in new]))
|
||||
old_prefix, tuple(old[0].glyphSet())[0], old_suffix,
|
||||
tuple([list(n.glyphSet())[0] for n in new]), location=location)
|
||||
|
||||
# GSUB lookup type 4: Ligature substitution.
|
||||
# Format: "substitute f f i by f_f_i;"
|
||||
@ -729,8 +760,9 @@ class Parser(object):
|
||||
len(new[0].glyphSet()) == 1 and
|
||||
num_lookups == 0):
|
||||
return self.ast.LigatureSubstStatement(
|
||||
location, old_prefix, old, old_suffix,
|
||||
list(new[0].glyphSet())[0], forceChain=hasMarks)
|
||||
old_prefix, old, old_suffix,
|
||||
list(new[0].glyphSet())[0], forceChain=hasMarks,
|
||||
location=location)
|
||||
|
||||
# GSUB lookup type 8: Reverse chaining substitution.
|
||||
if reverse:
|
||||
@ -758,7 +790,7 @@ class Parser(object):
|
||||
'but found a glyph class with %d elements' %
|
||||
(len(glyphs), len(replacements)), location)
|
||||
return self.ast.ReverseChainSingleSubstStatement(
|
||||
location, old_prefix, old_suffix, old, new)
|
||||
old_prefix, old_suffix, old, new, location=location)
|
||||
|
||||
if len(old) > 1 and len(new) > 1:
|
||||
raise FeatureLibError(
|
||||
@ -769,14 +801,14 @@ class Parser(object):
|
||||
# GSUB lookup type 6: Chaining contextual substitution.
|
||||
assert len(new) == 0, new
|
||||
rule = self.ast.ChainContextSubstStatement(
|
||||
location, old_prefix, old, old_suffix, lookups)
|
||||
old_prefix, old, old_suffix, lookups, location=location)
|
||||
return rule
|
||||
|
||||
def parse_subtable_(self):
|
||||
assert self.is_cur_keyword_("subtable")
|
||||
location = self.cur_token_location_
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.SubtableStatement(location)
|
||||
return self.ast.SubtableStatement(location=location)
|
||||
|
||||
def parse_size_parameters_(self):
|
||||
assert self.is_cur_keyword_("parameters")
|
||||
@ -791,20 +823,22 @@ class Parser(object):
|
||||
RangeEnd = self.expect_decipoint_()
|
||||
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.SizeParameters(location, DesignSize, SubfamilyID,
|
||||
RangeStart, RangeEnd)
|
||||
return self.ast.SizeParameters(DesignSize, SubfamilyID,
|
||||
RangeStart, RangeEnd,
|
||||
location=location)
|
||||
|
||||
def parse_size_menuname_(self):
|
||||
assert self.is_cur_keyword_("sizemenuname")
|
||||
location = self.cur_token_location_
|
||||
platformID, platEncID, langID, string = self.parse_name_()
|
||||
return self.ast.FeatureNameStatement(location, "size", platformID,
|
||||
platEncID, langID, string)
|
||||
return self.ast.FeatureNameStatement("size", platformID,
|
||||
platEncID, langID, string,
|
||||
location=location)
|
||||
|
||||
def parse_table_(self):
|
||||
assert self.is_cur_keyword_("table")
|
||||
location, name = self.cur_token_location_, self.expect_tag_()
|
||||
table = self.ast.TableBlock(location, name)
|
||||
table = self.ast.TableBlock(name, location=location)
|
||||
self.expect_symbol_("{")
|
||||
handler = {
|
||||
"GDEF": self.parse_table_GDEF_,
|
||||
@ -833,7 +867,8 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("Attach"):
|
||||
statements.append(self.parse_attach_())
|
||||
elif self.is_cur_keyword_("GlyphClassDef"):
|
||||
@ -855,7 +890,8 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("FontRevision"):
|
||||
statements.append(self.parse_FontRevision_())
|
||||
elif self.cur_token_ == ";":
|
||||
@ -870,12 +906,14 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.cur_token_type_ is Lexer.NAME and self.cur_token_ in fields:
|
||||
key = self.cur_token_.lower()
|
||||
value = self.expect_number_()
|
||||
statements.append(
|
||||
self.ast.HheaField(self.cur_token_location_, key, value))
|
||||
self.ast.HheaField(key, value,
|
||||
location=self.cur_token_location_))
|
||||
if self.next_token_ != ";":
|
||||
raise FeatureLibError("Incomplete statement", self.next_token_location_)
|
||||
elif self.cur_token_ == ";":
|
||||
@ -891,12 +929,14 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.cur_token_type_ is Lexer.NAME and self.cur_token_ in fields:
|
||||
key = self.cur_token_.lower()
|
||||
value = self.expect_number_()
|
||||
statements.append(
|
||||
self.ast.VheaField(self.cur_token_location_, key, value))
|
||||
self.ast.VheaField(key, value,
|
||||
location=self.cur_token_location_))
|
||||
if self.next_token_ != ";":
|
||||
raise FeatureLibError("Incomplete statement", self.next_token_location_)
|
||||
elif self.cur_token_ == ";":
|
||||
@ -911,7 +951,8 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("nameid"):
|
||||
statement = self.parse_nameid_()
|
||||
if statement:
|
||||
@ -966,8 +1007,8 @@ class Parser(object):
|
||||
return None
|
||||
|
||||
platformID, platEncID, langID, string = self.parse_name_()
|
||||
return self.ast.NameRecord(location, nameID, platformID, platEncID,
|
||||
langID, string)
|
||||
return self.ast.NameRecord(nameID, platformID, platEncID,
|
||||
langID, string, location=location)
|
||||
|
||||
def unescape_string_(self, string, encoding):
|
||||
if encoding == "utf_16_be":
|
||||
@ -996,21 +1037,24 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("HorizAxis.BaseTagList"):
|
||||
horiz_bases = self.parse_base_tag_list_()
|
||||
elif self.is_cur_keyword_("HorizAxis.BaseScriptList"):
|
||||
horiz_scripts = self.parse_base_script_list_(len(horiz_bases))
|
||||
statements.append(
|
||||
self.ast.BaseAxis(self.cur_token_location_, horiz_bases,
|
||||
horiz_scripts, False))
|
||||
self.ast.BaseAxis(horiz_bases,
|
||||
horiz_scripts, False,
|
||||
location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("VertAxis.BaseTagList"):
|
||||
vert_bases = self.parse_base_tag_list_()
|
||||
elif self.is_cur_keyword_("VertAxis.BaseScriptList"):
|
||||
vert_scripts = self.parse_base_script_list_(len(vert_bases))
|
||||
statements.append(
|
||||
self.ast.BaseAxis(self.cur_token_location_, vert_bases,
|
||||
vert_scripts, True))
|
||||
self.ast.BaseAxis(vert_bases,
|
||||
vert_scripts, True,
|
||||
location=self.cur_token_location_))
|
||||
elif self.cur_token_ == ";":
|
||||
continue
|
||||
|
||||
@ -1023,7 +1067,8 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.cur_token_type_ is Lexer.NAME:
|
||||
key = self.cur_token_.lower()
|
||||
value = None
|
||||
@ -1040,7 +1085,8 @@ class Parser(object):
|
||||
elif self.is_cur_keyword_("Vendor"):
|
||||
value = self.expect_string_()
|
||||
statements.append(
|
||||
self.ast.OS2Field(self.cur_token_location_, key, value))
|
||||
self.ast.OS2Field(key, value,
|
||||
location=self.cur_token_location_))
|
||||
elif self.cur_token_ == ";":
|
||||
continue
|
||||
|
||||
@ -1091,13 +1137,13 @@ class Parser(object):
|
||||
if self.next_token_type_ is Lexer.NUMBER:
|
||||
number, location = self.expect_number_(), self.cur_token_location_
|
||||
if vertical:
|
||||
val = self.ast.ValueRecord(location, vertical,
|
||||
None, None, None, number,
|
||||
None, None, None, None)
|
||||
val = self.ast.ValueRecord(yAdvance=number,
|
||||
vertical=vertical,
|
||||
location=location)
|
||||
else:
|
||||
val = self.ast.ValueRecord(location, vertical,
|
||||
None, None, number, None,
|
||||
None, None, None, None)
|
||||
val = self.ast.ValueRecord(xAdvance=number,
|
||||
vertical=vertical,
|
||||
location=location)
|
||||
return val
|
||||
self.expect_symbol_("<")
|
||||
location = self.cur_token_location_
|
||||
@ -1139,8 +1185,9 @@ class Parser(object):
|
||||
|
||||
self.expect_symbol_(">")
|
||||
return self.ast.ValueRecord(
|
||||
location, vertical, xPlacement, yPlacement, xAdvance, yAdvance,
|
||||
xPlaDevice, yPlaDevice, xAdvDevice, yAdvDevice)
|
||||
xPlacement, yPlacement, xAdvance, yAdvance,
|
||||
xPlaDevice, yPlaDevice, xAdvDevice, yAdvDevice,
|
||||
vertical=vertical, location=location)
|
||||
|
||||
def parse_valuerecord_definition_(self, vertical):
|
||||
assert self.is_cur_keyword_("valueRecordDef")
|
||||
@ -1148,7 +1195,7 @@ class Parser(object):
|
||||
value = self.parse_valuerecord_(vertical)
|
||||
name = self.expect_name_()
|
||||
self.expect_symbol_(";")
|
||||
vrd = self.ast.ValueRecordDefinition(location, name, value)
|
||||
vrd = self.ast.ValueRecordDefinition(name, value, location=location)
|
||||
self.valuerecords_.define(name, vrd)
|
||||
return vrd
|
||||
|
||||
@ -1162,7 +1209,8 @@ class Parser(object):
|
||||
raise FeatureLibError(
|
||||
'For script "DFLT", the language must be "dflt"',
|
||||
self.cur_token_location_)
|
||||
return self.ast.LanguageSystemStatement(location, script, language)
|
||||
return self.ast.LanguageSystemStatement(script, language,
|
||||
location=location)
|
||||
|
||||
def parse_feature_block_(self):
|
||||
assert self.cur_token_ == "feature"
|
||||
@ -1180,7 +1228,8 @@ class Parser(object):
|
||||
self.expect_keyword_("useExtension")
|
||||
use_extension = True
|
||||
|
||||
block = self.ast.FeatureBlock(location, tag, use_extension)
|
||||
block = self.ast.FeatureBlock(tag, use_extension=use_extension,
|
||||
location=location)
|
||||
self.parse_block_(block, vertical, stylisticset, size_feature)
|
||||
return block
|
||||
|
||||
@ -1189,7 +1238,8 @@ class Parser(object):
|
||||
location = self.cur_token_location_
|
||||
featureName = self.expect_tag_()
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.FeatureReferenceStatement(location, featureName)
|
||||
return self.ast.FeatureReferenceStatement(featureName,
|
||||
location=location)
|
||||
|
||||
def parse_featureNames_(self, tag):
|
||||
assert self.cur_token_ == "featureNames", self.cur_token_
|
||||
@ -1200,13 +1250,15 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
block.statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
block.statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.is_cur_keyword_("name"):
|
||||
location = self.cur_token_location_
|
||||
platformID, platEncID, langID, string = self.parse_name_()
|
||||
block.statements.append(
|
||||
self.ast.FeatureNameStatement(location, tag, platformID,
|
||||
platEncID, langID, string))
|
||||
self.ast.FeatureNameStatement(tag, platformID,
|
||||
platEncID, langID, string,
|
||||
location=location))
|
||||
elif self.cur_token_ == ";":
|
||||
continue
|
||||
else:
|
||||
@ -1225,7 +1277,7 @@ class Parser(object):
|
||||
if version <= 0:
|
||||
raise FeatureLibError("Font revision numbers must be positive",
|
||||
location)
|
||||
return self.ast.FontRevisionStatement(location, version)
|
||||
return self.ast.FontRevisionStatement(version, location=location)
|
||||
|
||||
def parse_block_(self, block, vertical, stylisticset=None,
|
||||
size_feature=False):
|
||||
@ -1237,7 +1289,8 @@ class Parser(object):
|
||||
while self.next_token_ != "}" or self.cur_comments_:
|
||||
self.advance_lexer_(comments=True)
|
||||
if self.cur_token_type_ is Lexer.COMMENT:
|
||||
statements.append(self.ast.Comment(self.cur_token_location_, self.cur_token_))
|
||||
statements.append(self.ast.Comment(
|
||||
self.cur_token_, location=self.cur_token_location_))
|
||||
elif self.cur_token_type_ is Lexer.GLYPHCLASS:
|
||||
statements.append(self.parse_glyphclass_definition_())
|
||||
elif self.is_cur_keyword_("anchorDef"):
|
||||
@ -1311,9 +1364,10 @@ class Parser(object):
|
||||
if has_single and has_multiple:
|
||||
for i, s in enumerate(statements):
|
||||
if isinstance(s, self.ast.SingleSubstStatement):
|
||||
statements[i] = self.ast.MultipleSubstStatement(s.location,
|
||||
statements[i] = self.ast.MultipleSubstStatement(
|
||||
s.prefix, s.glyphs[0].glyphSet()[0], s.suffix,
|
||||
[r.glyphSet()[0] for r in s.replacements])
|
||||
[r.glyphSet()[0] for r in s.replacements],
|
||||
location=s.location)
|
||||
|
||||
def is_cur_keyword_(self, k):
|
||||
if self.cur_token_type_ is Lexer.NAME:
|
||||
|
@ -438,7 +438,8 @@ class BuilderTest(unittest.TestCase):
|
||||
m = self.expect_markClass_reference_()
|
||||
marks.append(m)
|
||||
self.expect_symbol_(";")
|
||||
return self.ast.MarkBasePosStatement(location, base, marks)
|
||||
return self.ast.MarkBasePosStatement(base, marks,
|
||||
location=location)
|
||||
|
||||
def parseBaseClass(self):
|
||||
if not hasattr(self.doc_, 'baseClasses'):
|
||||
@ -453,7 +454,8 @@ class BuilderTest(unittest.TestCase):
|
||||
baseClass = ast_BaseClass(name)
|
||||
self.doc_.baseClasses[name] = baseClass
|
||||
self.glyphclasses_.define(name, baseClass)
|
||||
bcdef = ast_BaseClassDefinition(location, baseClass, anchor, glyphs)
|
||||
bcdef = ast_BaseClassDefinition(baseClass, anchor, glyphs,
|
||||
location=location)
|
||||
baseClass.addDefinition(bcdef)
|
||||
return bcdef
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user