def __init__(self, folder, dbfile, limit): if not os.path.exists(dbfile) or dbfile == ':memory:': ttlfiles = glob.glob(f"{folder}/*.ttl") csvfiles = glob.glob(f"{folder}/*/*.csv") conn = sqlite3.connect(dbfile) conn.row_factory = sqlite3.Row for stmt in schema.split(';'): stmt += ';' conn.execute(stmt) conn.commit() # load in ttl g = Graph() bind_prefixes(g) for ttl in ttlfiles: print(f"Loading TTL file {ttl}") g.load_file(ttl) # values = list(map(tuple, g)) # conn.executemany("""INSERT OR IGNORE INTO triples(subject, predicate, object) \ # VALUES(?, ?, ?)""", values) # conn.commit() g = BrickInferenceSession().expand(g) triples = list(g.g) conn.executemany( """INSERT OR IGNORE INTO triples(subject, predicate, object) \ VALUES(?, ?, ?)""", triples) conn.commit() # load in data for csvf in sorted(csvfiles)[:limit]: print(f"Loading CSV file {csvf}") rows = csv2rows(csvf) #with open(csvf) as f: # rdr = csv.reader(f) # next(rdr) # consume header # vals = list(rdr) conn.executemany( """INSERT OR IGNORE INTO data(timestamp, uuid, value) \ VALUES(?, ?, ?)""", rows) conn.commit() else: conn = sqlite3.connect(dbfile) conn.row_factory = sqlite3.Row # load in ttl g = Graph() bind_prefixes(g) triples = conn.execute( "SELECT subject, predicate, object FROM triples") for t in triples: t = ( parse_uri(t[0]), parse_uri(t[1]), parse_uri(t[2]), ) g.add(t) self.g = g self.conn = conn
def graph_from_triples(triples): g = Graph(load_brick=True) # g.load_file("ttl/owl.ttl") g.add(*triples) sess = OWLRLAllegroInferenceSession() return sess.expand(g)