Пример #1
0
def getHostName(graph, ifUri):
    """
    Given a URI of an interface, return the name of the host
    If the host has a 'name' property, return that value.
    Otherwise, strip off the first part of the URI (until '#') and return the last part.
    """
    global rdf,rdfs,ndl,bindings,rdfcompatmode
    select = ("?hostUri","?hostName")
    where = [GraphPattern([
            ("?hostUri", ndl["hasInterface"], ifUri),
            ("?hostUri", ndl["name"], "?hostName"),
            ]),
        GraphPattern([
            ("?hostUri", ndl["hasInterface"], ifUri),
            ("?hostUri", rdfs["label"], "?hostName"),
            ])]
    # Create a SPARQLGraph wrapper object out of the normal Graph
    sparqlGrph = SPARQLGraph(graph)
    # Make the query
    if rdfcompatmode:
        result = sparqlGrph.query(select, where)
    else:
        result = Query.query(sparqlGrph, select, where, initialBindings=bindings)
    if result:
        hostUri, hostName = result[0]
        ifName = ifUri.replace(hostUri+":","")
        return (hostName,ifName)
    else:
        return (ifUri[ifUri.find("#"):],"")
Пример #2
0
def getLocations(graph):
    locations = []
    global rdf, rdfs, ndl, bindings, rdfcompatmode
    sparqlGrph = SPARQLGraph(graph)
    for loc in graph.subjects(predicate=rdf["type"], object=ndl["Location"]):
        select = ("?hostName")
        where = [
            GraphPattern([
                ("?hostUri", ndl["locatedAt"], loc),
                ("?hostUri", ndl["name"], "?hostName"),
            ]),
            GraphPattern([
                ("?hostUri", ndl["locatedAt"], loc),
                ("?hostUri", rdfs["label"], "?hostName"),
            ])
        ]
        # Create a SPARQLGraph wrapper object out of the normal Graph
        # Make the query
        if rdfcompatmode:
            result = sparqlGrph.query(select, where)
        else:
            result = Query.query(sparqlGrph,
                                 select,
                                 where,
                                 initialBindings=bindings)
        if result:
            locations.append(result)
    return locations
Пример #3
0
def getHostName(graph, ifUri):
    """
    Given a URI of an interface, return the name of the host
    If the host has a 'name' property, return that value.
    Otherwise, strip off the first part of the URI (until '#') and return the last part.
    """
    global rdf, rdfs, ndl, bindings, rdfcompatmode
    select = ("?hostUri", "?hostName")
    where = [
        GraphPattern([
            ("?hostUri", ndl["hasInterface"], ifUri),
            ("?hostUri", ndl["name"], "?hostName"),
        ]),
        GraphPattern([
            ("?hostUri", ndl["hasInterface"], ifUri),
            ("?hostUri", rdfs["label"], "?hostName"),
        ])
    ]
    # Create a SPARQLGraph wrapper object out of the normal Graph
    sparqlGrph = SPARQLGraph(graph)
    # Make the query
    if rdfcompatmode:
        result = sparqlGrph.query(select, where)
    else:
        result = Query.query(sparqlGrph,
                             select,
                             where,
                             initialBindings=bindings)
    if result:
        hostUri, hostName = result[0]
        ifName = ifUri.replace(hostUri + ":", "")
        return (hostName, ifName)
    else:
        return (ifUri[ifUri.find("#"):], "")
Пример #4
0
    def describe(self, selection, forward=True, backward=True):
        """
        The DESCRIBE Form in the SPARQL draft is still in state of
        flux, so this is just a temporary method, in fact.  It may not
        correspond to what the final version of describe will be (if
        it stays in the draft at all, that is).  At present, it is
        simply a wrapper around L{cluster}.

        @param selection: a selection to define the seeds for
        clustering via the selection; the result of select used for
        the clustering seed

        @param forward: cluster forward yes or no
        @type forward: Boolean
        @param backward: cluster backward yes or no
        @type backward: Boolean
        """
        if forward and backward:
            return self.cluster(selection)
        elif forward:
            return self.clusterForward(selection)
        elif backward:
            return self.clusterBackward(selection)
        else:
            return SPARQLGraph()
Пример #5
0
    def clusterForward(self, selection):
        """
        Forward clustering, using all the results of the query as
        seeds (when appropriate). It is based on the usage of the
        L{cluster forward<rdflib.sparql.sparqlGraph.clusterForward>}
        method for triple store.

        @param selection: a selection to define the seeds for
        clustering via the selection; the result of select used for
        the clustering seed

        @return: a new triple store
        @rtype: L{sparqlGraph<rdflib.sparql.sparqlGraph>}
        """
        if self.parent1 != None and self.parent2 != None:
            return self.parent1.clusterForward(
                selection) + self.parent2.clusterForward(selection)
        else:
            clusterF = SPARQLGraph()
            for r in reduce(lambda x, y: list(x) + list(y),
                            self.select(selection), ()):
                try:
                    check_subject(r)
                    self.triples.clusterForward(r, clusterF)
                except:
                    # no real problem, this is a literal, just forget about it
                    continue
            return clusterF
Пример #6
0
def getConnections(graph):
    """
    Given a NDL triplet graph, return lists of external and
    internal connections.
    
    The method runs a SPARQL query on the graph.
    Next step is to filter out the equivalent connections, which is done using a
    stack, because lists cannot be altered while iterating over them.
    
    Difference between internal and external is currently based on whether the
    symmetric connectedTo property is present in the graph.
    
    The results are beautified using the getHostName method.
    """
    global rdf, rdfs, ndl, bindings, rdfcompatmode
    select = ("?ifA", "?ifB")
    where = GraphPattern([
        ("?ifA", ndl["connectedTo"], "?ifB"),
        ("?ifB", ndl["connectedTo"], "?ifA"),
    ])
    # Create a SPARQLGraph wrapper object out of the normal Graph
    sparqlGrph = SPARQLGraph(graph)
    # Make the query
    if rdfcompatmode:
        result = sparqlGrph.query(select, where)
    else:
        result = Query.query(sparqlGrph,
                             select,
                             where,
                             initialBindings=bindings)
    #print "Found %d connections" % len(result)
    internalConnections = []
    externalConnections = []
    while len(result) > 0:
        ifA, ifB = result.pop()
        if (ifB, ifA) in result:
            result.remove((ifB, ifA))
            internalConnections.append(
                (getHostName(graph, ifA), getHostName(graph, ifB)))
        else:
            externalConnections.append(
                (getHostName(graph, ifA), getHostName(graph, ifB)))
    locations = getLocations(graph)
    return internalConnections, externalConnections, locations
Пример #7
0
def getConnections(graph):
    """
    Given a NDL triplet graph, return lists of external and
    internal connections.
    
    The method runs a SPARQL query on the graph.
    Next step is to filter out the equivalent connections, which is done using a
    stack, because lists cannot be altered while iterating over them.
    
    Difference between internal and external is currently based on whether the
    symmetric connectedTo property is present in the graph.
    
    The results are beautified using the getHostName method.
    """
    global rdf,rdfs,ndl,bindings,rdfcompatmode
    select = ("?ifA","?ifB")
    where = GraphPattern([
            ("?ifA", ndl["connectedTo"], "?ifB"),
            ("?ifB", ndl["connectedTo"], "?ifA"),
            ])
    # Create a SPARQLGraph wrapper object out of the normal Graph
    sparqlGrph = SPARQLGraph(graph)
    # Make the query
    if rdfcompatmode:
        result = sparqlGrph.query(select, where)
    else:
        result = Query.query(sparqlGrph, select, where, initialBindings=bindings)
    #print "Found %d connections" % len(result)
    internalConnections = []
    externalConnections = []
    while len(result) > 0:
        ifA,ifB = result.pop()
        if (ifB,ifA) in result:
            result.remove((ifB,ifA))
            internalConnections.append((getHostName(graph,ifA), getHostName(graph,ifB)))
        else:
            externalConnections.append((getHostName(graph,ifA), getHostName(graph,ifB)))
    locations = getLocations(graph)
    return internalConnections, externalConnections, locations
Пример #8
0
def getLocations(graph):
    locations = []
    global rdf,rdfs,ndl,bindings,rdfcompatmode
    sparqlGrph = SPARQLGraph(graph)
    for loc in graph.subjects(predicate=rdf["type"], object=ndl["Location"]):
        select = ("?hostName")
        where = [GraphPattern([
                ("?hostUri", ndl["locatedAt"], loc),
                ("?hostUri", ndl["name"], "?hostName"),
                ]),
            GraphPattern([
                ("?hostUri", ndl["locatedAt"], loc),
                ("?hostUri", rdfs["label"], "?hostName"),
                ])]
        # Create a SPARQLGraph wrapper object out of the normal Graph
        # Make the query
        if rdfcompatmode:
            result = sparqlGrph.query(select, where)
        else:
            result = Query.query(sparqlGrph, select, where, initialBindings=bindings)
        if result:
            locations.append(result)
    return locations
Пример #9
0
    def query(self):
        """
        Make a SPARQL query

        @return: posts result
        """

        sparqlGr = SPARQLGraph(self.graph)
        select = ('?post', '?postTitle', '?date', '?userName', '?content',
                  '?parent')
        where = GraphPattern([('?post', RDF['type'], SIOC['Post']),
                              ('?post', DC['title'], '?postTitle'),
                              ('?post', DCTERMS['created'], '?date'),
                              ('?post', SIOC['content'], '?content'),
                              ('?post', SIOC['has_creator'], '?user'),
                              ('?user', SIOC['name'], '?userName')])
        opt = GraphPattern([('?post', SIOC['reply_of'], '?parent')])
        posts = Query.query(sparqlGr, select, where, opt)
        return self.orderByDate(posts)
Пример #10
0
    def construct(self, pattern=None):
        """
        Expand the subgraph based on the pattern or, if None, the
        internal bindings.

        In the former case the binding is used to instantiate the
        triplets in the patterns; in the latter, the original
        statements are used as patterns.

        The result is a separate triple store containing the subgraph.

        @param pattern: a L{GraphPattern<rdflib.sparql.graphPattern.GraphPattern>} instance or None
        @return: a new triple store
        @rtype: L{sparqlGraph<rdflib.sparql.sparqlGraph>}
        """
        if self.parent1 != None and self.parent2 != None:
            return self.parent1.construct(pattern) + self.parent2.construct(
                pattern)
        else:
            subgraph = SPARQLGraph()
            self.top.expandSubgraph(subgraph, pattern)
            return subgraph
Пример #11
0
    def clusterBackward(self, selection):
        """
        Backward clustering, using all the results of the query as
        seeds (when appropriate). It is based on the usage of the
        L{cluster backward<rdflib.sparql.sparqlGraph.clusterBackward>}
        method for triple store.

        @param selection: a selection to define the seeds for
        clustering via the selection; the result of select used for
        the clustering seed

        @return: a new triple store
        @rtype: L{sparqlGraph<rdflib.sparql.sparqlGraph>}
        """
        if self.parent1 != None and self.parent2 != None:
            return self.parent1.clusterBackward(
                selection) + self.parent2.clusterBackward(selection)
        else:
            clusterB = SPARQLGraph()
            # to be on the safe side, see if the query has been properly finished
            for r in reduce(lambda x, y: list(x) + list(y),
                            self.select(selection), ()):
                self.triples.clusterBackward(r, clusterB)
            return clusterB