Exemplo n.º 1
0
def processUpdate(graph, updateString, initBindings={}, initNs={}, base=None):
    """
    Process a SPARQL Update Request
    returns Nothing on success or raises Exceptions on error
    """
    evalUpdate(graph, translateUpdate(parseUpdate(updateString), base, initNs),
               initBindings)
Exemplo n.º 2
0
def processUpdate(graph, updateString, initBindings={}, initNs={}, base=None):
    """
    Process a SPARQL Update Request
    returns Nothing on success or raises Exceptions on error
    """
    evalUpdate(graph, translateUpdate(
        parseUpdate(updateString), base, initNs), initBindings)
Exemplo n.º 3
0
    def testUpdateWith(self):
        parsed_query = configure_update_dataset(parseUpdate(self.update_with), [], [])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 3)
        self.assertTrue('withClause' in parsed_query.request[0])
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])
        self.assertEqual(parsed_query.request[0]['withClause'], URIRef('http://example.org/with/'))

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_with), [], ['urn:named'])

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_with), ['urn:default'], [])

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_with), ['urn:default'], ['urn:named'])
 def expanded():
     template = template_func()
     updateString = template.format(csv = CSV,
                                    xsd = XSD,
                                    rdf = RDF, 
                                    rdfs = RDFS, 
                                    prov = PROV, 
                                    foaf = FOAF,
                                    sepake = SEPAKE)
     return translateUpdate(parseUpdate(updateString), None, {})
Exemplo n.º 5
0
    def testUpdateUsingNamed(self):
        parsed_query = configure_update_dataset(parseUpdate(self.update_named), [], [])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 3)
        self.assertTrue('using' in parsed_query.request[0])
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])
        self.assertEqual(len(parsed_query.request[0]['using']), 1)
        self.assertEqual(
            parsed_query.request[0]['using'][0]['named'], URIRef('http://example.org/named/'))

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_named), [], ['urn:named'])

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_named), ['urn:default'], [])

        self.assertRaises(SparqlProtocolError,
                          configure_update_dataset, parseUpdate(self.update_named), ['urn:default'], ['urn:named'])
Exemplo n.º 6
0
 def __init__(self,query):
     self.query = query
     with open(query, 'r') as myfile:
       self.text = myfile.read()
     self.text = tokenSubstitute(self.text)
     self.update = False
     if re.search('INSERT', self.text, re.IGNORECASE) or re.search('DELETE', self.text, re.IGNORECASE):
         self.update = True
         self.compiled = translateUpdate(parseUpdate(self.text))
     else:
         self.compiled = prepareQuery(self.text)
     QUERYDEFS[self.query] = self
Exemplo n.º 7
0
    def testUpdate(self):
        parsed_query = configure_update_dataset(parseUpdate(self.update), [], [])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 2)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])

        parsed_query = configure_update_dataset(parseUpdate(self.update), None, None)
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 2)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])

        parsed_query = configure_update_dataset(parseUpdate(self.update), [], None)
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 2)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])

        parsed_query = configure_update_dataset(parseUpdate(self.update), None, [])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 2)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])

        parsed_query = configure_update_dataset(parseUpdate(self.update), ['urn:default'], [])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 3)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])
        self.assertTrue('using' in parsed_query.request[0])
        self.assertEqual(len(parsed_query.request[0]['using']), 1)
        self.assertEqual(parsed_query.request[0]['using'][0]['default'], URIRef('urn:default'))

        parsed_query = configure_update_dataset(parseUpdate(self.update), [], ['urn:named'])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 3)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])
        self.assertTrue('using' in parsed_query.request[0])
        self.assertEqual(len(parsed_query.request[0]['using']), 1)
        self.assertEqual(parsed_query.request[0]['using'][0]['named'], URIRef('urn:named'))

        parsed_query = configure_update_dataset(parseUpdate(self.update), ['urn:default'], ['urn:named'])
        self.assertEqual(len(parsed_query.request), 1)
        self.assertEqual(len(parsed_query.request[0]), 3)
        self.assertTrue('insert' in parsed_query.request[0])
        self.assertTrue('where' in parsed_query.request[0])
        self.assertTrue('using' in parsed_query.request[0])
        self.assertEqual(len(parsed_query.request[0]['using']), 2)
        self.assertEqual(parsed_query.request[0]['using'][0]['default'], URIRef('urn:default'))
        self.assertEqual(parsed_query.request[0]['using'][1]['named'], URIRef('urn:named'))
Exemplo n.º 8
0
def parse_update_type(query, base=None, default_graph=[], named_graph=[]):
    """Parse an update and add default and named graph uri if possible."""
    try:
        parsed_update = parseUpdate(query)
        parsed_update = configure_update_dataset(parsed_update, default_graph, named_graph)
        translated_update = translateUpdate(parsed_update, base=base)
    except ParseException:
        raise UnSupportedQuery()
    except SparqlProtocolError as e:
        raise e

    if base is not None and not isAbsoluteUri(base):
        raise NonAbsoluteBaseError()

    if not is_valid_update_base(parsed_update):
        raise NonAbsoluteBaseError()

    return parsed_update.request[0].name, translated_update
Exemplo n.º 9
0
def parse_body(body):
    subjects = []
    try:
        for block in translateUpdate(parseUpdate(body)):
            for key in block.keys():
                if key in ['delete', 'insert']:
                    subjects += get_subjects_from_quads(block[key]['quads'])
                    subjects += get_subjects_from_triples(
                        block[key]['triples'])
                if key in ['quads']:
                    subjects += get_subjects_from_quads(block['quads'])
                if key in ['triples']:
                    subjects += get_subjects_from_triples(block['triples'])
    except (RecursionError, ParseException):
        # Swallow a parse error, since the sparql made it to Neptune
        logger.error("SPARQL ERROR PARSING: %s", body)

    return subjects
Exemplo n.º 10
0
def parse_update_type(query, base=None, default_graph=[], named_graph=[]):
    """Parse an update and add default and named graph uri if possible."""
    try:
        parsed_update = parseUpdate(query)
        parsed_update = configure_update_dataset(parsed_update, default_graph,
                                                 named_graph)
        translated_update = translateUpdate(parsed_update, base=base)
    except ParseException:
        raise UnSupportedQuery()
    except SparqlProtocolError as e:
        raise e

    if base is not None and not isAbsoluteUri(base):
        raise NonAbsoluteBaseError()

    if not is_valid_update_base(parsed_update):
        raise NonAbsoluteBaseError()

    return parsed_update.request[0].name, translated_update
Exemplo n.º 11
0
def parse_query(query):

    query = query.replace('INSERT', 'INSERT DATA', 1)
    query = query.replace('insert', 'INSERT DATA', 1)

    parsed = parser.parseUpdate(query)
    prefix_list = _construct_prefix_list(parsed['prologue'][0])

    parsed_triplets = []

    for triple_set in parsed['request'][0]['quads']['triples']:
        triples = []
        for triple_unit in triple_set:
            value = _get_value(triple_unit, prefix_list)
            triples.append(value)
        parsed_triplets += [
            triples[i:i + 3] for i in xrange(0, len(triples), 3)
        ]

    return parsed_triplets
Exemplo n.º 12
0
    def local():
        from rdflib.plugins.sparql.parser import parseUpdate
        from rdflib.plugins.sparql.algebra import translateUpdate

        graph = update_host
        update_str = q

        parsetree = parseUpdate(update_str)
        query = translateUpdate(parsetree)
        try:
            context_aware = query[0].get('delete', {}).get('quads', {}).keys()
        except AttributeError:
            context_aware = None
        if context_aware:
            update_str = re.sub(r'{(.*)GRAPH(.*)WHERE', 'WHERE', update_str)
            delete_graph = graph.get_context(URIRef(context_aware[0]))
        else:
            delete_graph = graph

        delete_graph.update(update_str)
        if not len(delete_graph):
            graph.remove_context(delete_graph)
Exemplo n.º 13
0
def parse_query_type(query, base=None):
    try:
        parsedQuery = parseQuery(query)
        translatedQuery = translateQuery(parsedQuery, base=base)
        # Check if BASE is absolute http(s) URI
        for value in parsedQuery[0]:
            if value.name == 'Base' and not isAbsoluteUri(value.iri):
                raise UnSupportedQueryType()
        return translatedQuery.algebra.name, translatedQuery
    except ParseException:
        pass

    try:
        parsedUpdate = parseUpdate(query)
        translatedUpdate = translateUpdate(parsedUpdate, base=base)
        # Check if BASE is absolute http(s) URI
        for value in parsedUpdate.prologue[0]:
            if value.name == 'Base' and not isAbsoluteUri(value.iri):
                raise UnSupportedQueryType()
        return parsedUpdate.request[0].name, translatedUpdate
    except ParseException:
        raise UnSupportedQueryType
Exemplo n.º 14
0
def forwardAndVerifyStores(repo, store, updateStrings):

    try:
        parsedUpdate = parser.parseUpdate("".join(updateStrings))
        query = algebra.translateUpdate(parsedUpdate)
        before = len(store)
        store.update(query)
        after = len(store)
        #print ("Store had", before, "statements and has", after, "statements")
        if before != after:
            f = store.serialize(format="nquads").decode("utf-8")

            print("currently on commit", repo.head.commit)
            nextcommit = getNextCommit()
            print("checking out", nextcommit)
            repo.git.checkout(nextcommit)

            graphFile = open(args.quitrepo + "/graph.nq", 'r')
            left = set(filter(lambda line: line, f.split("\n")))
            right = set(
                filter(lambda line: line,
                       set(line.strip() for line in set(graphFile))))
            graphFile.close()

            if not compareSets(right, left):
                print("update query was: \"{}\"".format(
                    "".join(updateStrings)))
                return nextcommit

    except Exception as e:
        print('Something is wrong in the function:', e)
        import traceback
        traceback.print_tb(e.__traceback__, limit=20)
        #print ("".join(updateStrings))
        exit(1)

    return None
Exemplo n.º 15
0
            value = item.values()[0].decode()
    elif class_name == 'Literal':
        value = item.decode()

    return value


def construct_prefix_list(data):
    prefixes = {}

    for item in data:
        prefix = item.prefix
        iri = item.iri.decode()
        prefixes[prefix] = iri

    return prefixes


p = parser.parseUpdate(queries['with_prefix'])
prefix_list = construct_prefix_list(p['prologue'][0])

all_triplets = []

for triple in p['request'][0]['quads']['triples']:
    triples = []
    for triple_unit in triple:
        value = get_value(triple_unit, prefix_list)
        triples.append(value)
    all_triplets += [triples[i:i + 3] for i in xrange(0, len(triples), 3)]
print all_triplets
Exemplo n.º 16
0
 def prepareUpdate(self, updateString, initNs={}, base=None):
     """Parse and translate a SPARQL Query."""
     parsedUpdate = parser.parseUpdate(str(updateString))
     return algebra.translateUpdate(parsedUpdate, base, initNs)
Exemplo n.º 17
0
def _parseUpdate(sparql):
    return parseUpdate(sparql)
Exemplo n.º 18
0
    def update(self, strOrQuery, initBindings={}, initNs={}):
        if isinstance(strOrQuery, str):
            strOrQuery = translateUpdate(parseUpdate(strOrQuery),
                                         initNs=initNs)

        return evalUpdate(self.graph, strOrQuery, initBindings)
Exemplo n.º 19
0
def explain(query, file, config_file, graph_uri, indentnb, update, parse):
    coloredlogs.install(level='INFO',
                        fmt='%(asctime)s - %(levelname)s %(message)s')
    logger = logging.getLogger(__name__)

    if query is None and file is None:
        print(
            "Error: you must specificy a query to execute, either with --query or --file. See sage-query --help for more informations."
        )
        exit(1)

    # load query from file if required
    if file is not None:
        with open(file) as query_file:
            query = query_file.read()

    dataset = load_config(config_file)
    if dataset is None:
        print("config file {config_file} not found")
        exit(1)

    graph = dataset.get_graph(graph_uri)
    if graph is None:
        print("RDF Graph  not found:" + graph_uri)
        exit(1)

    engine = SageEngine()
    pp = pprint.PrettyPrinter(indent=indentnb)

    if query is None:
        exit(1)

    print("------------")
    print("Query")
    print("------------")
    print(query)

    if update:
        pq = parseUpdate(query)
    else:
        pq = parseQuery(query)

    if pq is None:
        exit(1)

    if parse:
        print("------------")
        print("Parsed Query")
        print("------------")
        pp.pprint(pq)
        print(prettify_parsetree(pq))

    if update:
        tq = translateUpdate(pq)
    else:
        tq = translateQuery(pq)
    print("------------")
    print("Algebra")
    print("------------")
    print(pprintAlgebra(tq))

    #logical_plan = tq.algebra
    cards = list()

    iterator, cards = parse_query(query, dataset, graph_uri)

    print("-----------------")
    print("Iterator pipeline")
    print("-----------------")
    print(iterator)
    print("-----------------")
    print("Cardinalities")
    print("-----------------")
    pp.pprint(cards)
Exemplo n.º 20
0
def parse_update(
        query: dict,
        dataset: Dataset,
        default_graph: str,
        as_of: Optional[datetime] = None) -> Tuple[PreemptableIterator, dict]:
    """Parse a SPARQL UPDATE query into a physical query execution plan.

    For parsing classic SPARQL query, please refers to the `parse_query` method.

    Args:
      * query: SPARQL query to parse.
      * dataset: RDF dataset on which the query is executed.
      * default_graph: URI of the default graph.
      * as_of: A timestamp used to perform all reads against a consistent version of the dataset. If `None`, use the latest version of the dataset, which does not guarantee snapshot isolation.

    Returns: A tuple (`iterator`, `cardinalities`) where:
      * `iterator` is the root of a pipeline of iterators used to execute the query.
      * `cardinalities` is the list of estimated cardinalities of all triple patterns in the query.

    Throws: `UnsupportedSPARQL` is the SPARQL query contains features not supported by the SaGe query engine.
    """
    # TODO change that, only used for testing
    consistency_level = "serializable"
    # consistency_level = dataset._config["consistency"] if "consistency" in dataset._config else "atomic_per_row"
    operations = translateUpdate(parseUpdate(query))
    if len(operations) > 1:
        raise UnsupportedSPARQL(
            "Only a single INSERT DATA/DELETE DATA is permitted by query. Consider sending yourt query in multiple SPARQL queries."
        )
    operation = operations[0]
    if operation.name == 'InsertData' or operation.name == 'DeleteData':
        # create RDF quads to insert/delete into/from the default graph
        quads = get_quads_from_update(operation, default_graph)
        # build the preemptable update operator used to insert/delete RDF triples
        if operation.name == 'InsertData':
            return InsertOperator(quads, dataset), dict()
        else:
            return DeleteOperator(quads, dataset), dict()
    elif operation.name == 'Modify':
        where_root = operation.where
        # unravel shitty things chained together
        if where_root.name == 'Join':
            if where_root.p1.name == 'BGP' and len(where_root.p1.triples) == 0:
                where_root = where_root.p2
            elif where_root.p2.name == 'BGP' and len(
                    where_root.p2.triples) == 0:
                where_root = where_root.p1

        # for consistency = serializable, use a SerializableUpdate iterator
        if consistency_level == "serializable":
            # build the read iterator
            cardinalities = list()
            read_iterator = parse_query_alt(where_root,
                                            dataset, [default_graph],
                                            cardinalities,
                                            as_of=as_of)
            # get the delete and/or insert templates
            #print("read iterator:"+str(read_iterator))
            delete_templates = list()
            insert_templates = list()
            if operation.delete is not None:
                delete_templates = get_quads_from_update(
                    operation.delete, default_graph)
            if operation.insert is not None:
                insert_templates = get_quads_from_update(
                    operation.insert, default_graph)

            # build the SerializableUpdate iterator
            return SerializableUpdate(dataset, read_iterator, delete_templates,
                                      insert_templates), cardinalities
        else:
            # Build the IF EXISTS style query from an UPDATE query with bounded RDF triples
            # in the WHERE, INSERT and DELETE clause.

            # assert that all RDF triples from the WHERE clause are bounded
            if_exists_quads = where_root.triples
            for s, p, o in if_exists_quads:
                if type(s) is Variable or type(s) is BNode or type(
                        p) is Variable or type(p) is BNode or type(
                            o) is Variable or type(o) is BNode:
                    raise UnsupportedSPARQL(
                        "Only INSERT DATA and DELETE DATA queries are supported by the SaGe server. For evaluating other type of SPARQL UPDATE queries, please use a Sage Smart Client."
                    )
            # localize all triples in the default graph
            if_exists_quads = list(
                localize_triples(where_root.triples, [default_graph]))

            # get the delete and/or insert triples
            delete_quads = list()
            insert_quads = list()
            if operation.delete is not None:
                delete_quads = get_quads_from_update(operation.delete,
                                                     default_graph)
            if operation.insert is not None:
                insert_quads = get_quads_from_update(operation.insert,
                                                     default_graph)

            # build the UpdateSequenceOperator operator
            if_exists_op = IfExistsOperator(if_exists_quads, dataset, as_of)
            delete_op = DeleteOperator(delete_quads, dataset)
            insert_op = DeleteOperator(insert_quads, dataset)
            return UpdateSequenceOperator(if_exists_op, delete_op,
                                          insert_op), dict()
    else:
        raise UnsupportedSPARQL(
            "Only INSERT DATA and DELETE DATA queries are supported by the SaGe server. For evaluating other type of SPARQL UPDATE queries, please use a Sage Smart Client."
        )
Exemplo n.º 21
0
 def prepareUpdate(self, updateString, initNs={}, base=None):
     """Parse and translate a SPARQL Query."""
     parsedUpdate = parser.parseUpdate(str(updateString))
     return algebra.translateUpdate(parsedUpdate, base, initNs)
Exemplo n.º 22
0
def update_test(t):

    # the update-eval tests refer to graphs on http://example.org
    rdflib_sparql_module.SPARQL_LOAD_GRAPHS = False

    uri, name, comment, data, graphdata, query, res, syntax = t

    if uri in skiptests:
        raise SkipTest()

    try:
        g = Dataset()

        if not res:
            if syntax:
                with bopen(query[7:]) as f:
                    translateUpdate(parseUpdate(f))
            else:
                try:
                    with bopen(query[7:]) as f:
                        translateUpdate(parseUpdate(f))
                    raise AssertionError("Query shouldn't have parsed!")
                except:
                    pass  # negative syntax test
            return

        resdata, resgraphdata = res

        # read input graphs
        if data:
            g.default_context.load(data, format=_fmt(data))

        if graphdata:
            for x, l in graphdata:
                g.load(x, publicID=URIRef(l), format=_fmt(x))

        with bopen(query[7:]) as f:
            req = translateUpdate(parseUpdate(f))
        evalUpdate(g, req)

        # read expected results
        resg = Dataset()
        if resdata:
            resg.default_context.load(resdata, format=_fmt(resdata))

        if resgraphdata:
            for x, l in resgraphdata:
                resg.load(x, publicID=URIRef(l), format=_fmt(x))

        eq(
            set(x.identifier for x in g.contexts() if x != g.default_context),
            set(x.identifier for x in resg.contexts()
                if x != resg.default_context),
            "named graphs in datasets do not match",
        )
        assert isomorphic(
            g.default_context,
            resg.default_context), "Default graphs are not isomorphic"

        for x in g.contexts():
            if x == g.default_context:
                continue
            assert isomorphic(x, resg.get_context(
                x.identifier)), ("Graphs with ID %s are not isomorphic" %
                                 x.identifier)

    except Exception as e:

        if isinstance(e, AssertionError):
            failed_tests.append(uri)
            fails[str(e)] += 1
        else:
            error_tests.append(uri)
            errors[str(e)] += 1

        if DEBUG_ERROR and not isinstance(e, AssertionError) or DEBUG_FAIL:
            print("======================================")
            print(uri)
            print(name)
            print(comment)

            if not res:
                if syntax:
                    print("Positive syntax test")
                else:
                    print("Negative syntax test")

            if data:
                print("----------------- DATA --------------------")
                print(">>>", data)
                print(bopen_read_close(data[7:]))
            if graphdata:
                print("----------------- GRAPHDATA --------------------")
                for x, l in graphdata:
                    print(">>>", x, l)
                    print(bopen_read_close(x[7:]))

            print("----------------- Request -------------------")
            print(">>>", query)
            print(bopen_read_close(query[7:]))

            if res:
                if resdata:
                    print("----------------- RES DATA --------------------")
                    print(">>>", resdata)
                    print(bopen_read_close(resdata[7:]))
                if resgraphdata:
                    print(
                        "----------------- RES GRAPHDATA -------------------")
                    for x, l in resgraphdata:
                        print(">>>", x, l)
                        print(bopen_read_close(x[7:]))

            print("------------- MY RESULT ----------")
            print(g.serialize(format="trig"))

            try:
                pq = translateUpdate(parseUpdate(bopen_read_close(query[7:])))
                print("----------------- Parsed ------------------")
                pprintAlgebra(pq)
                # print pq
            except:
                print("(parser error)")

            print(decodeStringEscape(str(e)))

            import pdb

            pdb.post_mortem(sys.exc_info()[2])
        raise
Exemplo n.º 23
0
def update_test(t):

    # the update-eval tests refer to graphs on http://example.org
    rdflib_sparql_module.SPARQL_LOAD_GRAPHS = False

    uri, name, comment, data, graphdata, query, res, syntax = t

    if uri in skiptests:
        raise SkipTest()

    try:
        g = Dataset()

        if not res:
            if syntax:
                translateUpdate(parseUpdate(open(query[7:])))
            else:
                try:
                    translateUpdate(parseUpdate(open(query[7:])))
                    raise AssertionError("Query shouldn't have parsed!")
                except:
                    pass  # negative syntax test
            return

        resdata, resgraphdata = res

        # read input graphs
        if data:
            g.default_context.load(data, format=_fmt(data))

        if graphdata:
            for x, l in graphdata:
                g.load(x, publicID=URIRef(l), format=_fmt(x))

        req = translateUpdate(parseUpdate(open(query[7:])))
        evalUpdate(g, req)

        # read expected results
        resg = Dataset()
        if resdata:
            resg.default_context.load(resdata, format=_fmt(resdata))

        if resgraphdata:
            for x, l in resgraphdata:
                resg.load(x, publicID=URIRef(l), format=_fmt(x))

        eq(set(x.identifier for x in g.contexts() if x != g.default_context),
           set(x.identifier for x in resg.contexts()
               if x != resg.default_context), 'named graphs in datasets do not match')
        assert isomorphic(g.default_context, resg.default_context), \
            'Default graphs are not isomorphic'

        for x in g.contexts():
            if x == g.default_context:
                continue
            assert isomorphic(x, resg.get_context(x.identifier)), \
                "Graphs with ID %s are not isomorphic" % x.identifier

    except Exception, e:

        if isinstance(e, AssertionError):
            failed_tests.append(uri)
            fails[str(e)] += 1
        else:
            error_tests.append(uri)
            errors[str(e)] += 1

        if DEBUG_ERROR and not isinstance(e, AssertionError) or DEBUG_FAIL:
            print "======================================"
            print uri
            print name
            print comment

            if not res:
                if syntax:
                    print "Positive syntax test"
                else:
                    print "Negative syntax test"

            if data:
                print "----------------- DATA --------------------"
                print ">>>", data
                print open(data[7:]).read()
            if graphdata:
                print "----------------- GRAPHDATA --------------------"
                for x, l in graphdata:
                    print ">>>", x, l
                    print open(x[7:]).read()

            print "----------------- Request -------------------"
            print ">>>", query
            print open(query[7:]).read()

            if res:
                if resdata:
                    print "----------------- RES DATA --------------------"
                    print ">>>", resdata
                    print open(resdata[7:]).read()
                if resgraphdata:
                    print "----------------- RES GRAPHDATA -------------------"
                    for x, l in resgraphdata:
                        print ">>>", x, l
                        print open(x[7:]).read()

            print "------------- MY RESULT ----------"
            print g.serialize(format='trig')

            try:
                pq = translateUpdate(parseUpdate(open(query[7:]).read()))
                print "----------------- Parsed ------------------"
                pprintAlgebra(pq)
                # print pq
            except:
                print "(parser error)"

            print decodeStringEscape(unicode(e))

            import pdb
            pdb.post_mortem(sys.exc_info()[2])
        raise
Exemplo n.º 24
0
def _parseUpdate(sparql):
    return parseUpdate(sparql)
Exemplo n.º 25
0
    def update(self, strOrQuery, initBindings={}, initNs={}):
        if isinstance(strOrQuery, str): 
            strOrQuery=translateUpdate(parseUpdate(strOrQuery), initNs=initNs)

        return evalUpdate(self.graph, strOrQuery, initBindings)
Exemplo n.º 26
0
def update_test(t):

    # the update-eval tests refer to graphs on http://example.org
    rdflib_sparql_module.SPARQL_LOAD_GRAPHS = False

    uri, name, comment, data, graphdata, query, res, syntax = t

    if uri in skiptests:
        raise SkipTest()

    try:
        g = ConjunctiveGraph()

        if not res:
            if syntax:
                translateUpdate(parseUpdate(open(query[7:])))
            else:
                try:
                    translateUpdate(parseUpdate(open(query[7:])))
                    raise AssertionError("Query shouldn't have parsed!")
                except:
                    pass  # negative syntax test
            return

        resdata, resgraphdata = res

        # read input graphs
        if data:
            g.default_context.load(data, format=_fmt(data))

        if graphdata:
            for x, l in graphdata:
                g.load(x, publicID=URIRef(l), format=_fmt(x))

        req = translateUpdate(parseUpdate(open(query[7:])))
        evalUpdate(g, req)

        # read expected results
        resg = ConjunctiveGraph()
        if resdata:
            resg.default_context.load(resdata, format=_fmt(resdata))

        if resgraphdata:
            for x, l in resgraphdata:
                resg.load(x, publicID=URIRef(l), format=_fmt(x))

        eq(set(x.identifier for x in g.contexts() if x != g.default_context),
           set(x.identifier for x in resg.contexts()
               if x != resg.default_context))
        assert isomorphic(g.default_context, resg.default_context), \
            'Default graphs are not isomorphic'

        for x in g.contexts():
            if x == g.default_context:
                continue
            assert isomorphic(x, resg.get_context(x.identifier)), \
                "Graphs with ID %s are not isomorphic" % x.identifier

    except Exception, e:

        if isinstance(e, AssertionError):
            failed_tests.append(uri)
            fails[str(e)] += 1
        else:
            error_tests.append(uri)
            errors[str(e)] += 1

        if DEBUG_ERROR and not isinstance(e, AssertionError) or DEBUG_FAIL:
            print "======================================"
            print uri
            print name
            print comment

            if not res:
                if syntax:
                    print "Positive syntax test"
                else:
                    print "Negative syntax test"

            if data:
                print "----------------- DATA --------------------"
                print ">>>", data
                print open(data[7:]).read()
            if graphdata:
                print "----------------- GRAPHDATA --------------------"
                for x, l in graphdata:
                    print ">>>", x, l
                    print open(x[7:]).read()

            print "----------------- Request -------------------"
            print ">>>", query
            print open(query[7:]).read()

            if res:
                if resdata:
                    print "----------------- RES DATA --------------------"
                    print ">>>", resdata
                    print open(resdata[7:]).read()
                if resgraphdata:
                    print "----------------- RES GRAPHDATA -------------------"
                    for x, l in resgraphdata:
                        print ">>>", x, l
                        print open(x[7:]).read()

            print "------------- MY RESULT ----------"
            print g.serialize(format='trig')

            try:
                pq = translateUpdate(parseUpdate(open(query[7:]).read()))
                print "----------------- Parsed ------------------"
                pprintAlgebra(pq)
                # print pq
            except:
                print "(parser error)"

            print decodeStringEscape(unicode(e))

            import pdb
            pdb.post_mortem(sys.exc_info()[2])
        raise