def check_text_tags(s): tokens = textsupport.tokenize(unicode(s)) tag_stack = [ ] for type, text in tokens: #@ReservedAssignment if type != TAG: continue # Strip off arguments for tags. if text.find('=') != -1: text = text[:text.find('=')] # Closing tag. if text and text[0] == '/': if not tag_stack: return "Close text tag '%s' does not match an open text tag." % text if tag_stack[-1] != text[1:]: return "Close text tag '%s' does not match open text tag '%s'." % (text, tag_stack[-1]) tag_stack.pop() continue if text not in text_tags: return "Text tag '%s' is not known." % text if text_tags[text]: tag_stack.append(text) if tag_stack: return "One or more text tags were left open at the end of the string: " + ", ".join([ "'" + i + "'" for i in tag_stack]) return None
def check_text_tags(s): """ :doc: lint Checks the text tags in s for correctness. Returns an error string if there is an error, or None if there is no error. """ all_tags = dict(text_tags) custom_tags = renpy.config.custom_text_tags if custom_tags: all_tags.update(custom_tags) self_closing_custom_tags = renpy.config.self_closing_custom_text_tags if self_closing_custom_tags: all_tags.update(dict.fromkeys(self_closing_custom_tags, False)) try: tokens = textsupport.tokenize(unicode(s)) except Exception as e: return e.args[0] tag_stack = [] for type, text in tokens: # @ReservedAssignment if type != TAG: continue if text[0] == "#": continue # Strip off arguments for tags. if text.find('=') != -1: text = text[:text.find('=')] # Closing tag. if text and text[0] == '/': if not tag_stack: return "Close text tag '%s' does not match an open text tag." % text if tag_stack[-1] != text[1:]: return "Close text tag '%s' does not match open text tag '%s'." % ( text, tag_stack[-1]) tag_stack.pop() continue if text not in all_tags: return "Text tag '%s' is not known." % text if all_tags[text]: tag_stack.append(text) if tag_stack: return "One or more text tags were left open at the end of the string: " + ", ".join( ["'" + i + "'" for i in tag_stack]) return None
def check_text_tags(s): """ :doc: lint Checks the text tags in s for correctness. Returns an error string if there is an error, or None if there is no error. """ custom_tags = renpy.config.custom_text_tags if custom_tags: all_tags = dict(text_tags) all_tags.update(renpy.config.custom_text_tags) else: all_tags = text_tags try: tokens = textsupport.tokenize(unicode(s)) except Exception as e: return e.args[0] tag_stack = [ ] for type, text in tokens: # @ReservedAssignment if type != TAG: continue if text[0] == "#": continue # Strip off arguments for tags. if text.find('=') != -1: text = text[:text.find('=')] # Closing tag. if text and text[0] == '/': if not tag_stack: return "Close text tag '%s' does not match an open text tag." % text if tag_stack[-1] != text[1:]: return "Close text tag '%s' does not match open text tag '%s'." % (text, tag_stack[-1]) tag_stack.pop() continue if text not in all_tags: return "Text tag '%s' is not known." % text if all_tags[text]: tag_stack.append(text) if tag_stack: return "One or more text tags were left open at the end of the string: " + ", ".join([ "'" + i + "'" for i in tag_stack]) return None
def tokenize(self, text): """ Convert the text into a list of tokens. """ tokens = [] for i in text: if isinstance(i, unicode): tokens.extend(textsupport.tokenize(i)) elif isinstance(i, str): tokens.extend(textsupport.tokenize(unicode(i))) elif isinstance(i, renpy.display.core.Displayable): tokens.append((DISPLAYABLE, i)) else: raise Exception("Can't display {0!r} as Text.".format(i)) return tokens
def tokenize(self, text): """ Convert the text into a list of tokens. """ tokens = [ ] for i in text: if isinstance(i, unicode): tokens.extend(textsupport.tokenize(i)) elif isinstance(i, str): tokens.extend(textsupport.tokenize(unicode(i))) elif isinstance(i, renpy.display.core.Displayable): tokens.append((DISPLAYABLE, i)) else: raise Exception("Can't display {0!r} as Text.".format(i)) return tokens
def filter_text_tags(s, allow=None, deny=None): """ :doc: text_utility Returns a copy of `s` with the text tags filtered. Exactly one of the `allow` and `deny` keyword arguments must be given. `allow` A set of tags that are allowed. If a tag is not in this list, it is removed. `deny` A set of tags that are denied. If a tage is not in this list, it is kept in the string. """ if (allow is None) and (deny is None): raise Exception( "Only one of the allow and deny keyword arguments should be given to filter_text_tags." ) if (allow is not None) and (deny is not None): raise Exception( "Only one of the allow and deny keyword arguments should be given to filter_text_tags." ) tokens = textsupport.tokenize(str(s)) rv = [] for tokentype, text in tokens: if tokentype == PARAGRAPH: rv.append("\n") elif tokentype == TAG: kind = text.partition("=")[0] if kind and (kind[0] == "/"): kind = kind[1:] if allow is not None: if kind in allow: rv.append("{" + text + "}") else: if kind not in deny: rv.append("{" + text + "}") else: rv.append(text) return "".join(rv)
def filter_text_tags(s, allow=None, deny=None): """ :doc: text_utility Returns a copy of `s` with the text tags filtered. Exactly one of the `allow` and `deny` keyword arguments must be given. `allow` A set of tags that are allowed. If a tag is not in this list, it is removed. `deny` A set of tags that are denied. If a tag is not in this list, it is kept in the string. """ if (allow is None) and (deny is None): raise Exception("Only one of the allow and deny keyword arguments should be given to filter_text_tags.") if (allow is not None) and (deny is not None): raise Exception("Only one of the allow and deny keyword arguments should be given to filter_text_tags.") tokens = textsupport.tokenize(unicode(s)) rv = [ ] for tokentype, text in tokens: if tokentype == PARAGRAPH: rv.append("\n") elif tokentype == TAG: kind = text.partition("=")[0] if kind and (kind[0] == "/"): kind = kind[1:] if allow is not None: if kind in allow: rv.append("{" + text + "}") else: if kind not in deny: rv.append("{" + text + "}") else: rv.append(text) return "".join(rv)
def filter_alt_text(s): """ Returns a copy of `s` with the contents of text tags that shouldn't be in alt text filtered. This returns just the text to say, with no text tags at all in it. """ tokens = textsupport.tokenize(str(s)) if renpy.config.custom_text_tags or renpy.config.self_closing_custom_text_tags or ( renpy.config.replace_text is not None): tokens = renpy.text.text.Text.apply_custom_tags(tokens) rv = [] active = set() for tokentype, text in tokens: if tokentype == PARAGRAPH: rv.append("\n") elif tokentype == TAG: kind = text.partition("=")[0] if kind.startswith("/"): kind = kind[1:] end = True else: end = False if kind in renpy.config.tts_filter_tags: if end: active.discard(kind) else: active.add(kind) else: if not active: rv.append(text) return "".join(rv)