示例#1
0
 def parse_def_script_(self):
     assert self.is_cur_keyword_("DEF_SCRIPT")
     location = self.cur_token_location_
     name = None
     if self.next_token_ == "NAME":
         self.expect_keyword_("NAME")
         name = self.expect_string_()
     self.expect_keyword_("TAG")
     tag = self.expect_string_()
     if self.scripts_.resolve(tag) is not None:
         raise VoltLibError(
             'Script "%s" already defined, '
             'script tags are case insensitive' % tag, location)
     self.langs_.enter_scope()
     langs = []
     while self.next_token_ != "END_SCRIPT":
         self.advance_lexer_()
         lang = self.parse_langsys_()
         self.expect_keyword_("END_LANGSYS")
         if self.langs_.resolve(lang.tag) is not None:
             raise VoltLibError(
                 'Language "%s" already defined in script "%s", '
                 'language tags are case insensitive' % (lang.tag, tag),
                 location)
         self.langs_.define(lang.tag, lang)
         langs.append(lang)
     self.expect_keyword_("END_SCRIPT")
     self.langs_.exit_scope()
     def_script = ast.ScriptDefinition(name, tag, langs, location=location)
     self.scripts_.define(tag, def_script)
     return def_script
示例#2
0
 def parse_substitution_(self, reversal):
     assert self.is_cur_keyword_("AS_SUBSTITUTION")
     location = self.cur_token_location_
     src = []
     dest = []
     if self.next_token_ != "SUB":
         raise VoltLibError("Expected SUB", location)
     while self.next_token_ == "SUB":
         self.expect_keyword_("SUB")
         src.append(self.parse_coverage_())
         self.expect_keyword_("WITH")
         dest.append(self.parse_coverage_())
         self.expect_keyword_("END_SUB")
     self.expect_keyword_("END_SUBSTITUTION")
     max_src = max([len(cov) for cov in src])
     max_dest = max([len(cov) for cov in dest])
     # many to many or mixed is invalid
     if ((max_src > 1 and max_dest > 1)
             or (reversal and (max_src > 1 or max_dest > 1))):
         raise VoltLibError("Invalid substitution type", location)
     mapping = OrderedDict(zip(tuple(src), tuple(dest)))
     if max_src == 1 and max_dest == 1:
         if reversal:
             sub = ast.SubstitutionReverseChainingSingleDefinition(
                 mapping, location=location)
         else:
             sub = ast.SubstitutionSingleDefinition(mapping,
                                                    location=location)
     elif max_src == 1 and max_dest > 1:
         sub = ast.SubstitutionMultipleDefinition(mapping,
                                                  location=location)
     elif max_src > 1 and max_dest == 1:
         sub = ast.SubstitutionLigatureDefinition(mapping,
                                                  location=location)
     return sub
示例#3
0
 def parse_def_anchor_(self):
     assert self.is_cur_keyword_("DEF_ANCHOR")
     location = self.cur_token_location_
     name = self.expect_string_()
     self.expect_keyword_("ON")
     gid = self.expect_number_()
     self.expect_keyword_("GLYPH")
     glyph_name = self.expect_name_()
     # check for duplicate anchor names on this glyph
     if (glyph_name in self.anchors_
             and self.anchors_[glyph_name].resolve(name) is not None):
         raise VoltLibError(
             'Anchor "%s" already defined, '
             'anchor names are case insensitive' % name,
             location
         )
     self.expect_keyword_("COMPONENT")
     component = self.expect_number_()
     if self.next_token_ == "LOCKED":
         locked = True
         self.advance_lexer_()
     else:
         locked = False
     self.expect_keyword_("AT")
     pos = self.parse_pos_()
     self.expect_keyword_("END_ANCHOR")
     anchor = ast.AnchorDefinition(location, name, gid, glyph_name,
                                   component, locked, pos)
     if glyph_name not in self.anchors_:
         self.anchors_[glyph_name] = SymbolTable()
     self.anchors_[glyph_name].define(name, anchor)
     return anchor
示例#4
0
 def parse(self):
     statements = self.doc_.statements
     while self.next_token_type_ is not None:
         self.advance_lexer_()
         if self.cur_token_ in PARSE_FUNCS.keys():
             func = getattr(self, PARSE_FUNCS[self.cur_token_])
             statements.append(func())
         elif self.is_cur_keyword_("END"):
             if self.next_token_type_ is not None:
                 raise VoltLibError("Expected the end of the file",
                                    self.cur_token_location_)
             return self.doc_
         else:
             raise VoltLibError(
                 "Expected " + ", ".join(sorted(PARSE_FUNCS.keys())),
                 self.cur_token_location_)
     return self.doc_
示例#5
0
 def glyphSet(self, groups=None):
     group = self.parser_.resolve_group(self.group)
     if group is not None:
         self.glyphs_ = group.glyphSet(groups)
         return self.glyphs_
     else:
         raise VoltLibError(
             'Group "%s" is used but undefined.' % (self.group),
             self.location)
示例#6
0
    def next_(self):
        self.scan_over_(Lexer.CHAR_WHITESPACE_)
        column = self.pos_ - self.line_start_ + 1
        location = (self.filename_, self.line_, column)
        start = self.pos_
        text = self.text_
        limit = len(text)
        if start >= limit:
            raise StopIteration()
        cur_char = text[start]
        next_char = text[start + 1] if start + 1 < limit else None

        if cur_char == "\n":
            self.pos_ += 1
            self.line_ += 1
            self.line_start_ = self.pos_
            return (Lexer.NEWLINE, None, location)
        if cur_char == "\r":
            self.pos_ += (2 if next_char == "\n" else 1)
            self.line_ += 1
            self.line_start_ = self.pos_
            return (Lexer.NEWLINE, None, location)
        if cur_char == '"':
            self.pos_ += 1
            self.scan_until_('"\r\n')
            if self.pos_ < self.text_length_ and self.text_[self.pos_] == '"':
                self.pos_ += 1
                return (Lexer.STRING, text[start + 1:self.pos_ - 1], location)
            else:
                raise VoltLibError("Expected '\"' to terminate string",
                                   location)
        if cur_char in Lexer.CHAR_NAME_START_:
            self.pos_ += 1
            self.scan_over_(Lexer.CHAR_NAME_CONTINUATION_)
            token = text[start:self.pos_]
            return (Lexer.NAME, token, location)
        if cur_char in Lexer.CHAR_DIGIT_:
            self.scan_over_(Lexer.CHAR_DIGIT_)
            return (Lexer.NUMBER, int(text[start:self.pos_], 10), location)
        if cur_char == "-" and next_char in Lexer.CHAR_DIGIT_:
            self.pos_ += 1
            self.scan_over_(Lexer.CHAR_DIGIT_)
            return (Lexer.NUMBER, int(text[start:self.pos_], 10), location)
        raise VoltLibError("Unexpected character: '%s'" % cur_char, location)
示例#7
0
 def parse_unicode_values_(self):
     location = self.cur_token_location_
     try:
         unicode_values = self.expect_string_().split(",")
         unicode_values = [
             int(uni[2:], 16) for uni in unicode_values if uni != ""
         ]
     except ValueError as err:
         raise VoltLibError(str(err), location)
     return unicode_values if unicode_values != [] else None
示例#8
0
 def glyphSet(self, groups=None):
     if groups is not None and self.name in groups:
         raise VoltLibError('Group "%s" contains itself.' % (self.name),
                            self.location)
     if self.glyphs_ is None:
         if groups is None:
             groups = set({self.name})
         else:
             groups.add(self.name)
         self.glyphs_ = self.enum.glyphSet(groups)
     return self.glyphs_
示例#9
0
 def parse_def_glyph_(self):
     assert self.is_cur_keyword_("DEF_GLYPH")
     location = self.cur_token_location_
     name = self.expect_string_()
     self.expect_keyword_("ID")
     gid = self.expect_number_()
     if gid < 0:
         raise VoltLibError("Invalid glyph ID", self.cur_token_location_)
     gunicode = None
     if self.next_token_ == "UNICODE":
         self.expect_keyword_("UNICODE")
         gunicode = [self.expect_number_()]
         if gunicode[0] < 0:
             raise VoltLibError("Invalid glyph UNICODE",
                                self.cur_token_location_)
     elif self.next_token_ == "UNICODEVALUES":
         self.expect_keyword_("UNICODEVALUES")
         gunicode = self.parse_unicode_values_()
     gtype = None
     if self.next_token_ == "TYPE":
         self.expect_keyword_("TYPE")
         gtype = self.expect_name_()
         assert gtype in ("BASE", "LIGATURE", "MARK", "COMPONENT")
     components = None
     if self.next_token_ == "COMPONENTS":
         self.expect_keyword_("COMPONENTS")
         components = self.expect_number_()
     self.expect_keyword_("END_GLYPH")
     if self.glyphs_.resolve(name) is not None:
         raise VoltLibError(
             'Glyph "%s" (gid %i) already defined' % (name, gid), location)
     def_glyph = ast.GlyphDefinition(name,
                                     gid,
                                     gunicode,
                                     gtype,
                                     components,
                                     location=location)
     self.glyphs_.define(name, def_glyph)
     return def_glyph
示例#10
0
 def parse_def_group_(self):
     assert self.is_cur_keyword_("DEF_GROUP")
     location = self.cur_token_location_
     name = self.expect_string_()
     enum = None
     if self.next_token_ == "ENUM":
         enum = self.parse_enum_()
     self.expect_keyword_("END_GROUP")
     if self.groups_.resolve(name) is not None:
         raise VoltLibError(
             'Glyph group "%s" already defined, '
             'group names are case insensitive' % name, location)
     def_group = ast.GroupDefinition(name, enum, location=location)
     self.groups_.define(name, def_group)
     return def_group
示例#11
0
 def parse_position_(self):
     assert self.is_cur_keyword_("AS_POSITION")
     location = self.cur_token_location_
     pos_type = self.expect_name_()
     if pos_type not in ("ATTACH", "ATTACH_CURSIVE", "ADJUST_PAIR",
                         "ADJUST_SINGLE"):
         raise VoltLibError(
             "Expected ATTACH, ATTACH_CURSIVE, ADJUST_PAIR, ADJUST_SINGLE",
             location)
     if pos_type == "ATTACH":
         position = self.parse_attach_()
     elif pos_type == "ATTACH_CURSIVE":
         position = self.parse_attach_cursive_()
     elif pos_type == "ADJUST_PAIR":
         position = self.parse_adjust_pair_()
     elif pos_type == "ADJUST_SINGLE":
         position = self.parse_adjust_single_()
     self.expect_keyword_("END_POSITION")
     return position
示例#12
0
 def expect_number_(self):
     self.advance_lexer_()
     if self.cur_token_type_ is not Lexer.NUMBER:
         raise VoltLibError("Expected a number", self.cur_token_location_)
     return self.cur_token_
示例#13
0
 def expect_name_(self):
     self.advance_lexer_()
     if self.cur_token_type_ is Lexer.NAME:
         return self.cur_token_
     raise VoltLibError("Expected a name", self.cur_token_location_)
示例#14
0
 def expect_keyword_(self, keyword):
     self.advance_lexer_()
     if self.cur_token_type_ is Lexer.NAME and self.cur_token_ == keyword:
         return self.cur_token_
     raise VoltLibError("Expected \"%s\"" % keyword,
                        self.cur_token_location_)
示例#15
0
 def expect_string_(self):
     self.advance_lexer_()
     if self.cur_token_type_ is not Lexer.STRING:
         raise VoltLibError("Expected a string", self.cur_token_location_)
     return self.cur_token_
示例#16
0
 def parse_def_lookup_(self):
     assert self.is_cur_keyword_("DEF_LOOKUP")
     location = self.cur_token_location_
     name = self.expect_string_()
     if not name[0].isalpha():
         raise VoltLibError(
             'Lookup name "%s" must start with a letter' % name, location)
     if self.lookups_.resolve(name) is not None:
         raise VoltLibError(
             'Lookup "%s" already defined, '
             'lookup names are case insensitive' % name, location)
     process_base = True
     if self.next_token_ == "PROCESS_BASE":
         self.advance_lexer_()
     elif self.next_token_ == "SKIP_BASE":
         self.advance_lexer_()
         process_base = False
     process_marks = True
     mark_glyph_set = None
     if self.next_token_ == "PROCESS_MARKS":
         self.advance_lexer_()
         if self.next_token_ == "MARK_GLYPH_SET":
             self.advance_lexer_()
             mark_glyph_set = self.expect_string_()
         elif self.next_token_type_ == Lexer.STRING:
             process_marks = self.expect_string_()
         elif self.next_token_ == "ALL":
             self.advance_lexer_()
         else:
             raise VoltLibError(
                 "Expected ALL, MARK_GLYPH_SET or an ID. "
                 "Got %s" % (self.next_token_type_), location)
     elif self.next_token_ == "SKIP_MARKS":
         self.advance_lexer_()
         process_marks = False
     direction = None
     if self.next_token_ == "DIRECTION":
         self.expect_keyword_("DIRECTION")
         direction = self.expect_name_()
         assert direction in ("LTR", "RTL")
     reversal = None
     if self.next_token_ == "REVERSAL":
         self.expect_keyword_("REVERSAL")
         reversal = True
     comments = None
     if self.next_token_ == "COMMENTS":
         self.expect_keyword_("COMMENTS")
         comments = self.expect_string_()
     context = []
     while self.next_token_ in ("EXCEPT_CONTEXT", "IN_CONTEXT"):
         context = self.parse_context_()
     as_pos_or_sub = self.expect_name_()
     sub = None
     pos = None
     if as_pos_or_sub == "AS_SUBSTITUTION":
         sub = self.parse_substitution_(reversal)
     elif as_pos_or_sub == "AS_POSITION":
         pos = self.parse_position_()
     else:
         raise VoltLibError(
             "Expected AS_SUBSTITUTION or AS_POSITION. "
             "Got %s" % (as_pos_or_sub), location)
     def_lookup = ast.LookupDefinition(name,
                                       process_base,
                                       process_marks,
                                       mark_glyph_set,
                                       direction,
                                       reversal,
                                       comments,
                                       context,
                                       sub,
                                       pos,
                                       location=location)
     self.lookups_.define(name, def_lookup)
     return def_lookup