Exemplo n.º 1
0
def work(identification, graph_name, step_to_do, redis_fn):
    global redis_connection, strict_redis_connection, sparql_server, step, step_graph
    step = step_to_do

    log('work ' + '[' + str(step) + ']')

    #for Collections
    step_graph = ConjunctiveGraph(sparqlstore.SPARQLStore(sparql_uri),
                                  graph_name)

    sparql_server = sparql.SPARQLServer(sparql_uri)
    redis_connection = redislite.Redis(redis_fn)
    strict_redis_connection = redislite.StrictRedis(redis_fn)

    gv_output_file_name = identification + '_' + str(step).zfill(7) + '.gv'

    if list(subjects(RDF.type, kbdbg.frame)) == []:
        log('no frames.' + '[' + str(step) + ']')
        put_last_bindings(step, [])
        return

    if (step == global_start - 1):
        gv_output_file_name = 'dummy'
    try:
        os.unlink(gv_output_file_name)
    except FileNotFoundError:
        pass

    gv_output_file = open(gv_output_file_name, 'w')
    e = Emitter(gv_output_file, step)
    e.generate_gv_image()
    gv_output_file.close()

    if (step == global_start - 1):
        return

    log('convert..' + '[' + str(step) + ']')
    #cmd, args = subprocess.check_output, ("convert", '-regard-warnings', "-extent", '6000x3000',  gv_output_file_name, '-gravity', 'NorthWest', '-background', 'white', gv_output_file_name + '.svg')
    cmd, args = subprocess.check_output, ("dot", '-Tsvg', gv_output_file_name,
                                          '-O')
    try:
        r = cmd(args, stderr=subprocess.STDOUT)
        if r != b"":
            raise RuntimeError('[' + str(step) + '] ' + str(r))
    except subprocess.CalledProcessError as e:
        log('[' + str(step) + ']' + e.output)
    log('convert done.' + '[' + str(step) + ']')

    if len(stats):
        print('stats:')
        for i in stats:
            print(i)
        #stats.clear()

    redis_connection._cleanup()
    strict_redis_connection._cleanup()
Exemplo n.º 2
0
def run(start, end, workers):
    global global_start, graph_name_start
    global_start = start

    redis_fn = redislite.Redis().db
    if workers:
        worker_pool = ProcessPoolExecutor(max_workers=workers)

    runs_graph = Graph(sparqlstore.SPARQLStore(sparql_uri), default_graph)
    graph_name_start = runs_graph.value(kbdbg.latest, kbdbg['is'],
                                        any=False).toPython()
    identification = fix_up_identification(graph_name_start)

    step_to_submit = -1

    for step_graph_uri in profile(
            list, (Collection(runs_graph, URIRef(graph_name_start)), )):
        step_to_submit += 1
        if step_to_submit < start - 1:
            log("skipping [" + str(step_to_submit) + ']')
            continue
        if step_to_submit > end and end != -1:
            log("ending")
            break

        args = (identification, step_graph_uri, step_to_submit, redis_fn)
        if not workers:
            work(*args)
        else:
            log('submit ' + '[' + str(step_to_submit) + ']' +
                ' (queue size: ' + str(len(futures)) + ')')
            if len(futures) > workers:
                time.sleep(len(futures) - workers)
            fut = worker_pool.submit(work, *args)
            fut.step = step_to_submit
            futures.append(fut)
            log('submitted ')
            check_futures()
        log('loop ')

    if workers:
        worker_pool.shutdown()
    check_futures()
Exemplo n.º 3
0
def main(test):
    number_of_passed = 0
    number_of_failed = 0
    number_of_failed_with_exception = 0

    graph = Graph(
        sparqlstore.SPARQLStore(config.sparqlstore['sesame_url'],
                                context_aware=False))
    graph.bind('foaf', FOAF)
    graph.bind('xsd', XSD)
    graph.bind('swrc', SWRC)
    graph.bind('swc', SWC)
    graph.bind('skos', SKOS)
    graph.bind('bibo', BIBO)
    graph.bind('dcterms', DCTERMS)
    graph.bind('dc', DC)
    graph.bind('timeline', TIMELINE)

    for f in os.listdir(TESTS_ROOT_DIR):
        if (test is not None and f == test) or test is None:
            print "========================="
            print "[TEST %s] Running..." % f
            try:
                passed = True
                with open('%s/%s/%s' %
                          (TESTS_ROOT_DIR, f, QUERY_FILENAME)) as query_file:
                    result = graph.query(query_file.read())

                    results = []
                    for r in result:
                        result_line = [
                            normalize(graph.namespace_manager, x) for x in r
                        ]
                        results.append(result_line)

                    print "[TEST %s] Number of results: %s" % (f, len(results))

                    with open(
                            '%s/%s/%s' %
                        (TESTS_ROOT_DIR, f, EXPECTED_FILENAME),
                            'rb') as expected_file:
                        expected = read_csv(expected_file)
                        print "[TEST %s] Number of expected results: %s" % (
                            f, len(expected))
                        if len(results) != len(expected):
                            passed = False
                        else:
                            for row in expected:
                                if row not in results:
                                    print "[TEST %s] [%s] not found!" % (
                                        f, ', '.join(map(repr, row)))
                                    print "[TEST %s] Query results:" % f
                                    print_list(results)
                                    passed = False
                                    break

                    if passed:
                        print "[TEST %s] Passed!" % f
                        number_of_passed += 1
                    else:
                        print "[TEST %s] Failed!" % f
                        number_of_failed += 1

            except:
                print "[TEST %s] Failed with exception!" % f
                number_of_failed_with_exception += 1
                traceback.print_exc()

    print "\n======RESULTS======\n"
    print "Passed: %s" % number_of_passed
    print "Failed: %s" % number_of_failed
    print "Failed with exception: %s" % number_of_failed_with_exception
Exemplo n.º 4
0
# actors = pd.read_csv('actors.csv')

### Get actor names etc. from triple store
actorquery = """
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
prefix momaf: <http://momaf-data.utu.fi/>
prefix skos: <http://www.w3.org/2004/02/skos/core#>
select * where {
  ?person_uri a momaf:Person; 
  momaf:elonet_ID ?elonet_id ; 
  momaf:elonet_person_ID ?elonet_person_id; 
  skos:prefLabel ?name }
"""

store = sparqlstore.SPARQLStore(QSERVICE)
ares = store.query(actorquery)

act = {}
for r in ares:
    if args.debug: print("%s %s %s %s" % r)
    act[int(str(r.elonet_person_id))] = str(r.name)

#if args.debug: print (act)

#print(labels.index)
#print(labels.columns)


def xsdtime(s, fps):
    t = datetime.timedelta(seconds=s / fps)
Exemplo n.º 5
0
#! /usr/bin/env python

from rdflib import Graph, URIRef
from rdflib.plugins.stores import sparqlstore

endpoint = 'http://<IP>:7200/repositories/SciGraph'
store = sparqlstore.SPARQLStore()
store.open(endpoint)

graph_name_ref = URIRef(u'http://www.springernature.com/scigraph/graphs/articles.dds')
ng = Graph(store,identifier=graph_name_ref)
store.bind('sg', 'http://www.springernature.com/scigraph/ontologies/core/')


q = 'select ?s ?t  where { ?s a sg:Article . ?s sg:title ?t  } limit 10 '
print(q)

for s, o in ng.query(q):
    print 'article Id:' +s + '\t article Title:' +o