def __init__(self, folder, dbfile, limit):
        if not os.path.exists(dbfile) or dbfile == ':memory:':
            ttlfiles = glob.glob(f"{folder}/*.ttl")
            csvfiles = glob.glob(f"{folder}/*/*.csv")

            conn = sqlite3.connect(dbfile)
            conn.row_factory = sqlite3.Row
            for stmt in schema.split(';'):
                stmt += ';'
                conn.execute(stmt)
            conn.commit()

            # load in ttl
            g = Graph()
            bind_prefixes(g)
            for ttl in ttlfiles:
                print(f"Loading TTL file {ttl}")
                g.load_file(ttl)
                # values = list(map(tuple, g))
                # conn.executemany("""INSERT OR IGNORE INTO triples(subject, predicate, object) \
                #                     VALUES(?, ?, ?)""", values)
                # conn.commit()
            g = BrickInferenceSession().expand(g)
            triples = list(g.g)
            conn.executemany(
                """INSERT OR IGNORE INTO triples(subject, predicate, object) \
                                VALUES(?, ?, ?)""", triples)
            conn.commit()

            # load in data
            for csvf in sorted(csvfiles)[:limit]:
                print(f"Loading CSV file {csvf}")
                rows = csv2rows(csvf)
                #with open(csvf) as f:
                #    rdr = csv.reader(f)
                #    next(rdr) # consume header
                #    vals = list(rdr)
                conn.executemany(
                    """INSERT OR IGNORE INTO data(timestamp, uuid, value) \
                                    VALUES(?, ?, ?)""", rows)
                conn.commit()
        else:
            conn = sqlite3.connect(dbfile)
            conn.row_factory = sqlite3.Row
            # load in ttl
            g = Graph()
            bind_prefixes(g)
            triples = conn.execute(
                "SELECT subject, predicate, object FROM triples")
            for t in triples:
                t = (
                    parse_uri(t[0]),
                    parse_uri(t[1]),
                    parse_uri(t[2]),
                )
                g.add(t)
        self.g = g
        self.conn = conn
Example #2
0
def test_tagset_inference():

    g = Graph(load_brick=False)
    data = pkgutil.get_data(__name__, "data/tags.ttl").decode()
    g.load_file(source=io.StringIO(data))
    g.expand(profile="tag")

    afs1 = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Flow_Sensor }")
    assert len(afs1) == 1
    afsp1 = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Flow_Setpoint }")
    assert len(afsp1) == 1
    mafs1 = g.query(
        "SELECT ?x WHERE { ?x rdf:type brick:Max_Air_Flow_Setpoint_Limit }")
    assert len(mafs1) == 1
Example #3
0
def test_tagset_inference():
    session = TagInferenceSession(approximate=False)
    assert session is not None
    g = Graph(load_brick=False)
    data = pkgutil.get_data(__name__, "data/tags.ttl").decode()
    g.load_file(source=io.StringIO(data))
    g = session.expand(g)

    afs1 = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Flow_Sensor }")
    assert len(afs1) == 1
    afsp1 = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Flow_Setpoint }")
    assert len(afsp1) == 1
    mafs1 = g.query(
        "SELECT ?x WHERE { ?x rdf:type brick:Max_Air_Flow_Setpoint_Limit }")
    assert len(mafs1) == 1
Example #4
0
def test_brick_inference():
    g = Graph(load_brick=True)
    data = pkgutil.get_data(__name__, "data/brick_inference_test.ttl").decode()
    g.load_file(source=io.StringIO(data))
    g.expand(profile="owlrl")
    g.expand(profile="tag")

    r = g.query("SELECT ?x WHERE { ?x rdf:type brick:Air_Temperature_Sensor }")
    # assert len(r) == 5
    urls = set([str(row[0]) for row in r])
    real_sensors = set([
        "http://example.com/mybuilding#sensor1",
        "http://example.com/mybuilding#sensor2",
        "http://example.com/mybuilding#sensor3",
        "http://example.com/mybuilding#sensor4",
        "http://example.com/mybuilding#sensor5",
    ])
    assert urls == real_sensors
def test_simplify():
    g = Graph(load_brick=True)
    data = pkgutil.get_data(__name__, "data/test.ttl").decode()
    g.load_file(source=io.StringIO(data))

    g.expand("brick", simplify=False, backend="owlrl")
    g.serialize("/tmp/test.ttl", format="ttl")

    q = "SELECT ?type WHERE { bldg:VAV2-4.ZN_T a ?type }"
    rows = list(g.query(q))
    bnodes = [r[0] for r in rows if isinstance(r[0], rdflib.BNode)]
    assert len(bnodes) > 0

    g.simplify()

    rows = list(g.query(q))
    bnodes = [r[0] for r in rows if isinstance(r[0], rdflib.BNode)]
    assert len(bnodes) == 0
Example #6
0
def test_brick_to_vbis_inference_with_owlrl():
    ALIGN = Namespace("https://brickschema.org/schema/Brick/alignments/vbis#")

    # input brick model; instances should have appropriate VBIS tags
    g = Graph(load_brick=True)
    data = pkgutil.get_data(__name__, "data/vbis_inference_test.ttl").decode()
    g.load_file(source=io.StringIO(data))
    g.expand(profile="owlrl")
    g.expand(profile="vbis")

    test_cases = [
        ("http://bldg#f1", "ME-Fa"),
        ("http://bldg#rtu1", "ME-ACU"),
    ]
    for (entity, vbistag) in test_cases:
        query = f"SELECT ?tag WHERE {{ <{entity}> <{ALIGN.hasVBISTag}> ?tag }}"
        res = list(g.query(query))
        assert len(res) == 1
        assert str(res[0][0]) == vbistag

    conforms, _, results = g.validate()
    assert conforms, results
Example #7
0
def test_orm():
    g = Graph(load_brick=True)
    data = pkgutil.get_data(__name__, "data/test.ttl").decode()
    g.load_file(source=io.StringIO(data))
    orm = SQLORM(g, connection_string="sqlite:///:memory:")

    equips = orm.session.query(Equipment).all()
    assert len(equips) == 5

    points = orm.session.query(Point).all()
    assert len(points) == 3

    locs = orm.session.query(Location).all()
    assert len(locs) == 4

    hvac_zones = (
        orm.session.query(Location).filter(Location.type == BRICK.HVAC_Zone).all()
    )
    assert len(hvac_zones) == 1

    # test relationships
    BLDG = Namespace("http://example.com/mybuilding#")
    vav2_4 = orm.session.query(Equipment).filter(Equipment.name == BLDG["VAV2-4"]).one()
    assert vav2_4.type == str(BRICK.Variable_Air_Volume_Box)
    assert len(vav2_4.points) == 2

    vav2_4_dpr = (
        orm.session.query(Equipment).filter(Equipment.name == BLDG["VAV2-4.DPR"]).one()
    )
    assert vav2_4_dpr.type == str(BRICK.Damper)
    assert len(vav2_4_dpr.points) == 1

    tstat = orm.session.query(Equipment).filter(Equipment.name == BLDG["tstat1"]).one()
    room_410 = (
        orm.session.query(Location).filter(Location.name == BLDG["Room-410"]).one()
    )
    assert tstat.location == room_410
    assert tstat in room_410.equipment
Example #8
0
from brickschema.graph import Graph
from brickschema.inference import BrickInferenceSession
from rdflib import Literal, URIRef
from .namespaces import SKOS, OWL, RDFS, BRICK, QUDTQK, QUDTDV, QUDT, UNIT


g = Graph()
g.load_file("support/VOCAB_QUDT-QUANTITY-KINDS-ALL-v2.1.ttl")
g.load_file("support/VOCAB_QUDT-UNITS-ALL-v2.1.ttl")
g.g.bind("qudt", QUDT)
g.g.bind("qudtqk", QUDTQK)
sess = BrickInferenceSession()
g = sess.expand(g)


def get_units(brick_quantity):
    """
    Fetches the QUDT unit and symbol (as a Literal) from the QUDT ontology so
    in order to avoid having to pull the full QUDT ontology into Brick
    """
    res = g.query(
        f"""SELECT ?unit ?symbol WHERE {{
                    <{brick_quantity}> qudt:applicableUnit ?unit .
                    ?unit qudt:symbol ?symbol .
                    FILTER(isLiteral(?symbol))
                    }}"""
    )
    for r in res:
        yield r