def testharness(): import sys import dekeywordizer import deprefixizer explicitURI = "<[^>]*>" comment = '#[^\\n]*' numericLiteral = """[-+]?[0-9]+(\\.[0-9]+)?(e[-+]?[0-9]+)?""" bareNameChar = "[a-zA-Z_][a-zA-Z0-9_]" bareName = "[a-zA-Z_][a-zA-Z0-9_]*" #This is totally wrong bareNameOnly = bareName + '$' qName = "(" + bareName + ")?:(" + bareName + ")?" variable = '\\?' + bareName langcode = "[a-z]+(-[a-z0-9]+)*" string = "(\"\"\"[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\")|(\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\")" equals = "=" implies = "=>" backwards_implies = "<=" carrot = "\\^" double_carrot = "\\^\\^" singleChars = ';,.()[]{}!' whiteSpace = ' \t\n' argList = [numericLiteral, explicitURI, comment, bareName, qName, variable, langcode, \ string, equals, implies, backwards_implies, carrot, double_carrot] realArgList = [deleteme(x) for x in argList] z = tokenize(file(sys.argv[1]), singleChars, whiteSpace, realArgList) y = dekeywordizer.dekeywordize(z, re.compile(bareNameOnly, re.S + re.U)) prefixMap = {} return deprefixizer.deprefixize(y, prefixMap, re.compile(qName, re.S + re.U), re.compile(explicitURI, re.S + re.U))
def testharness(): import sys import dekeywordizer import deprefixizer explicitURI = "<[^>]*>" comment = '#[^\\n]*' numericLiteral = """[-+]?[0-9]+(\\.[0-9]+)?(e[-+]?[0-9]+)?""" bareNameChar = "[a-zA-Z_][a-zA-Z0-9_]" bareName = "[a-zA-Z_][a-zA-Z0-9_]*" #This is totally wrong bareNameOnly = bareName + '$' qName = "(" + bareName + ")?:(" + bareName + ")?" variable = '\\?' + bareName langcode = "[a-z]+(-[a-z0-9]+)*" string = "(\"\"\"[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\")|(\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\")" equals = "=" implies = "=>" backwards_implies = "<=" carrot = "\\^" double_carrot = "\\^\\^" singleChars = ';,.()[]{}!' whiteSpace = ' \t\n' argList = [numericLiteral, explicitURI, comment, bareName, qName, variable, langcode, \ string, equals, implies, backwards_implies, carrot, double_carrot] realArgList = [deleteme(x) for x in argList] z = tokenize(file(sys.argv[1]), singleChars, whiteSpace, realArgList) y = dekeywordizer.dekeywordize(z, re.compile(bareNameOnly, re.S+re.U)) prefixMap = {} return deprefixizer.deprefixize(y, prefixMap, re.compile(qName, re.S+re.U), re.compile(explicitURI, re.S+re.U))
def feed(self, octets): string = octets.decode('utf_8') z = tokenizer.tokenize(string, singleChars, whiteSpace, realArgList) y = dekeywordizer.dekeywordize(z, bareNameOnly) x = deprefixizer.deprefixize(y, self._baseURI, self._bindings, qName, explicitURI, self._tripleMaker.bind) self._parse(x)