def test_entity_property_validation(): g = brickschema.Graph() EX = Namespace("urn:ex#") g.load_file("Brick.ttl") # test success g.add((EX["bldg"], A, BRICK.Building)) g.add( ( EX["bldg"], BRICK.buildingPrimaryFunction, [(BRICK.value, Literal("Aquarium", datatype=XSD.string))], ) ) g.expand("brick") valid, _, report = g.validate() assert valid, report # test failure g = brickschema.Graph() g.load_file("Brick.ttl") g.add((EX["bldg"], A, BRICK.Building)) g.add( ( EX["bldg"], BRICK.buildingPrimaryFunction, [(BRICK.value, Literal("AquariumFail", datatype=XSD.string))], ) ) g.expand("brick", backend="owlrl") valid, _, report = g.validate() assert not valid, "'AquariumFail' should have thrown a validation error"
def load_brick_file(cls, filepath): if not BrickStore.schema: BrickStore.schema = brickschema.Graph() cwd = os.path.dirname(os.path.realpath(__file__)) schema_path = os.path.join(cwd, "..", "bim", "schema", "Brick.ttl") BrickStore.schema.load_file(schema_path) BrickStore.graph = brickschema.Graph().load_file( filepath) + BrickStore.schema BrickStore.path = filepath
def new_brick_file(cls): if not BrickStore.schema: BrickStore.schema = brickschema.Graph() cwd = os.path.dirname(os.path.realpath(__file__)) schema_path = os.path.join(cwd, "..", "bim", "schema", "Brick.ttl") BrickStore.schema.load_file(schema_path) BrickStore.graph = brickschema.Graph() + BrickStore.schema BrickStore.graph.bind("digitaltwin", Namespace("https://example.org/digitaltwin#")) BrickStore.graph.bind( "brick", Namespace("https://brickschema.org/schema/Brick#")) BrickStore.graph.bind( "rdfs", Namespace("http://www.w3.org/2000/01/rdf-schema#"))
def test_equip(): valid_data = base_data + """ :equip brick:hasLocation :loc. """ valid_g = brickschema.Graph().parse(data=valid_data, format='turtle') conforms, _, _ = valid_g.validate([schema_g]) assert conforms invalid_data = base_data + """ :equip brick:hasLocation :point. """ invalid_g = brickschema.Graph().parse(data=invalid_data, format='turtle') conforms, _, _= invalid_g.validate([schema_g]) assert not conforms
def test_run(self): BrickStore.graph = brickschema.Graph() result = subject.add_brick( "http://example.org/digitaltwin#", "https://brickschema.org/schema/Brick#Equipment") subject.remove_brick(result) assert not list(BrickStore.graph.triples((URIRef(result), None, None)))
def test_type(): invalid_data = base_data + """ :loc a brick:Point. """ invalid_g = brickschema.Graph().parse(data=invalid_data, format='turtle') conforms, _, _= invalid_g.validate([schema_g]) assert not conforms
def test_run(self): ifc = ifcopenshell.file() tool.Ifc.set(ifc) project = ifc.createIfcProject(ifcopenshell.guid.new()) project.Name = "My Project" BrickStore.graph = brickschema.Graph() result = subject.add_brickifc_project( "http://example.org/digitaltwin#") assert result == f"http://example.org/digitaltwin#{project.GlobalId}" brick = URIRef(result) assert list( BrickStore.graph.triples( (brick, RDF.type, URIRef("https://brickschema.org/extension/ifc#Project")))) assert list( BrickStore.graph.triples( (brick, URIRef("http://www.w3.org/2000/01/rdf-schema#label"), Literal("My Project")))) assert list( BrickStore.graph.triples( (brick, URIRef("https://brickschema.org/extension/ifc#projectID"), Literal(project.GlobalId)))) assert list( BrickStore.graph.triples(( brick, URIRef("https://brickschema.org/extension/ifc#fileLocation"), Literal(bpy.context.scene.BIMProperties.ifc_file), )))
def test_example_file_with_reasoning(filename): g = brickschema.Graph() g.load_file("Brick.ttl") g.load_file(filename) g.expand("brick", backend="owlrl") valid, _, report = g.validate() assert valid, report
def test_run(self): BrickStore.graph = brickschema.Graph() subject.add_feed("http://example.org/digitaltwin#source", "http://example.org/digitaltwin#destination") assert list( BrickStore.graph.triples(( URIRef("http://example.org/digitaltwin#source"), URIRef("https://brickschema.org/schema/Brick#feeds"), URIRef("http://example.org/digitaltwin#destination"), )))
def test_run(self): # We stub the schema to make tests run faster BrickStore.schema = brickschema.Graph() cwd = os.path.dirname(os.path.realpath(__file__)) schema_path = os.path.join(cwd, "..", "files", "BrickStub.ttl") BrickStore.schema.load_file(schema_path) # This is the actual test cwd = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(cwd, "..", "files", "spaces.ttl") subject.load_brick_file(filepath) assert BrickStore.graph
def build(self, filename, delimiter=',', has_header=False): g = brickschema.Graph(load_brick=False) for pfx, ns in self.prefixes.items(): g.bind(pfx, ns) with open(filename) as f: csvf = csv.reader(f, delimiter=delimiter) if has_header: next(csvf) for row in csvf: row = [x.strip() for x in row] for triple in self.get_triples(row): g.add(triple) return g
def test_run(self): BrickStore.graph = brickschema.Graph() result = subject.add_brick( "https://example.org/digitaltwin#", "https://brickschema.org/schema/Brick#Equipment") assert "https://example.org/digitaltwin#" in result assert list( BrickStore.graph.triples( (URIRef(result), RDF.type, URIRef("https://brickschema.org/schema/Brick#Equipment")))) assert list( BrickStore.graph.triples( (URIRef(result), URIRef("http://www.w3.org/2000/01/rdf-schema#label"), Literal("Unnamed"))))
def test_run(self): # We stub the schema to make tests run faster BrickStore.schema = brickschema.Graph() cwd = os.path.dirname(os.path.realpath(__file__)) schema_path = os.path.join(cwd, "..", "files", "BrickStub.ttl") BrickStore.schema.load_file(schema_path) # This is the actual test subject.new_brick_file() assert BrickStore.graph namespaces = [(ns[0], ns[1].toPython()) for ns in BrickStore.graph.namespaces()] assert ("digitaltwin", "https://example.org/digitaltwin#") in namespaces assert ("brick", "https://brickschema.org/schema/Brick#") in namespaces assert ("rdfs", "http://www.w3.org/2000/01/rdf-schema#") in namespaces
def test_run(self): ifc = ifcopenshell.file() element = ifc.createIfcChiller() element.Name = "Chiller" element.GlobalId = ifcopenshell.guid.new() BrickStore.graph = brickschema.Graph() result = subject.add_brick_from_element( element, "http://example.org/digitaltwin#", "https://brickschema.org/schema/Brick#Equipment") uri = f"http://example.org/digitaltwin#{element.GlobalId}" assert result == uri assert list( BrickStore.graph.triples( (URIRef(uri), RDF.type, URIRef("https://brickschema.org/schema/Brick#Equipment")))) assert list( BrickStore.graph.triples( (URIRef(uri), URIRef("http://www.w3.org/2000/01/rdf-schema#label"), Literal("Chiller"))))
import sys from bricksrc.namespaces import A, OWL, RDFS, SKOS, BRICK, SH, BSH, bind_prefixes import brickschema schema_g = brickschema.Graph().load_file('shacl/BrickShape.ttl') bind_prefixes(schema_g) prefixes = """ @prefix brick: <https://brickschema.org/schema/Brick#> . @prefix : <http://example.com#> . """ base_data = prefixes + """ :equip a brick:Equipment. :point a brick:Point. :loc a brick:Location. """ def test_no_relations(): data = base_data data_g = brickschema.Graph().parse(data=data, format='turtle') conforms, r1, r2 = data_g.validate([schema_g]) assert conforms def test_equip(): valid_data = base_data + """ :equip brick:hasLocation :loc. """ valid_g = brickschema.Graph().parse(data=valid_data, format='turtle') conforms, _, _ = valid_g.validate([schema_g]) assert conforms
def test_measures_infers(): g = brickschema.Graph() g.load_file("Brick.ttl") qstr = """select ?class ?o where { ?class rdfs:subClassOf+ brick:Class. ?class owl:equivalentClass ?restrictions. ?restrictions owl:intersectionOf ?inter. ?inter rdf:rest*/rdf:first ?node. ?node owl:onProperty brick:measures . ?node owl:hasValue ?o. } """ for row in g.query(qstr): klass = row[0] entity = klass + "_entity" # Define an entity for the class g.add((entity, BRICK.measures, row[1])) # Associate the entity with measurement restrictions # Infer classes of the entities. # Apply reasoner g.expand(profile="owlrl") qstr = """select ?instance ?class where { ?instance a ?class. ?class rdfs:subClassOf* brick:Class. } """ inferred_klasses = defaultdict(set) for row in g.query(qstr): entity = row[0] klass = row[1] if BRICK in klass: # Filter out non-Brick classes such as Restrictions inferred_klasses[entity].add(klass) over_inferences = { } # Inferred Classes that are not supposed to be inferred. under_inferences = ( {} ) # Classes that should have been inferred but not actually inferred. wrong_inferences = {} # Other wrongly inferred Classes. for entity, inferred_parents in inferred_klasses.items(): if entity[-7:] != "_entity": continue true_class = URIRef( entity[0:-7] ) # This is based on how the entity name is defined above. # Find the original classes through the hierarchy from the original graph. qstr = """select ?parent where {{ <{0}> rdfs:subClassOf* ?parent. ?parent rdfs:subClassOf* brick:Class. }} """.format(true_class) res = g.query(qstr) true_parents = [row[0] for row in res] true_parents = set(filter(lambda parent: BRICK in parent, true_parents)) serialized = { "inferred_parents": list(inferred_parents), "true_parents": list(true_parents), } if inferred_parents > true_parents: over_inferences[entity] = serialized elif inferred_parents < true_parents: under_inferences[entity] = serialized elif inferred_parents != true_parents: wrong_inferences[entity] = serialized with open("tests/test_measures_inference.json", "w") as fp: json.dump( { "over_inferences": over_inferences, "under_inferences": under_inferences, "wrong_inferencers": wrong_inferences, }, fp, indent=2, ) assert not over_inferences, "There are {0} classes that are over-inferred".format( len(over_inferences)) assert not under_inferences, "There are {0} classes that are under-inferred".format( len(under_inferences)) assert ( not wrong_inferences ), "There are {0} classes that are inferred incorrectly in other ways".format( len(wrong_inferences))
import brickschema from brickschema.namespaces import A, OWL, BRICK, UNIT from rdflib import Namespace, Literal # our entities will live in this namespace BLDG = Namespace("urn:example#") # load brick into a graph so we can query it brick = brickschema.Graph().load_file("../../Brick.ttl") # create a graph for our model g = brickschema.Graph() g.bind("bldg", BLDG) # we want to define a collection of air quality sensors that measure # the air in a particular room on a floor of a building of our deployment # site # start by defining the locations # The generated turtle file should look like this: # bldg:deployment_site a brick:Site ; # brick:hasPart bldg:building_1 . # bldg:building_1 a brick:Building ; # brick:hasPart bldg:floor_1 . # bldg:floor_1 a brick:Floor ; # brick:hasPart bldg:room_1 . # bldg:room_1 a brick:Room . g.add((BLDG["deployment_site"], A, BRICK["Site"])) g.add((BLDG["building_1"], A, BRICK["Building"]))
def test_hierarchyinference(): # Load the schema g = brickschema.Graph() g.load_file("Brick.ttl") # Get all the Classes with their restrictions. qstr = """ select ?class ?tag where { ?class rdfs:subClassOf+ brick:Class. ?class brick:hasAssociatedTag ?tag } """ start_time = time.time() for row in tqdm(g.query(qstr)): klass = row[0] entity = klass + entity_postfix # Define an entity for the class g.add( (entity, BRICK.hasTag, row[1]) ) # Associate the entity with restrictions (i.e., Tags) end_time = time.time() print("Instantiation took {0} seconds".format(int(end_time - start_time))) # Infer classes of the entities. # Apply reasoner g.serialize("test.ttl", format="ttl") g.load_file("extensions/brick_extension_shacl_tag_inference.ttl") g.expand(profile="brick") g.serialize(inference_file, format="turtle") # Store the inferred graph. # Find all instances and their parents from the inferred graph. qstr = """ select ?instance ?class where { ?instance a ?class. ?class rdfs:subClassOf* brick:Class. } """ inferred_klasses = defaultdict(set) for row in tqdm(g.query(qstr)): entity = row[0] klass = row[1] if BRICK in klass: # Filter out non-Brick classes such as Restrictions inferred_klasses[entity].add(klass) # get equivalent classes equivalent_classes = defaultdict(set) res = g.query( """SELECT ?c1 ?c2 WHERE { ?c1 owl:equivalentClass ?c2 }""" ) for (c1, c2) in res: equivalent_classes[c1].add(c2) equivalent_classes[c2].add(c1) over_inferences = {} # Inferred Classes that are not supposed to be inferred. under_inferences = ( {} ) # Classes that should have been inferred but not actually inferred. wrong_inferences = {} # Other wrongly inferred Classes. for entity, inferred_parents in inferred_klasses.items(): if entity[-2:] != entity_postfix: continue true_class = URIRef( entity[0:-2] ) # This is based on how the entity name is defined above. # Find the original classes through the hierarchy from the original graph. qstr = """ select ?parent where {{ <{0}> rdfs:subClassOf* ?parent. ?parent rdfs:subClassOf* brick:Class. }}""".format( true_class ) res = g.query(qstr) true_parents = [row[0] for row in res] for tp in true_parents[:]: true_parents.extend(equivalent_classes.get(tp, [])) true_parents = set(filter(lambda parent: BRICK in parent, true_parents)) # TODO: bug here where this does not consider equivalent classes serialized = { "inferred_parents": list(inferred_parents), "true_parents": list(true_parents), } if inferred_parents > true_parents: over_inferences[entity] = serialized diff = set(inferred_parents).difference(set(true_parents)) print( f"Tags for {true_class.split('#')[-1]} imply extra classes: {make_readable([diff])}" ) elif inferred_parents < true_parents: under_inferences[entity] = serialized diff = set(true_parents).difference(set(inferred_parents)) print( f"Tags for {true_class.split('#')[-1]} do not imply classes, but should: {make_readable([diff])}" ) elif inferred_parents != true_parents: wrong_inferences[entity] = serialized with open("tests/test_hierarchy_inference.json", "w") as fp: json.dump( { "over_inferences": over_inferences, "under_inferences": under_inferences, "wrong_inferencers": wrong_inferences, }, fp, indent=2, ) assert not over_inferences, "There are {0} classes that are over-inferred".format( len(over_inferences) ) assert not under_inferences, "There are {0} classes that are under-inferred".format( len(under_inferences) ) assert ( not wrong_inferences ), "There are {0} classes that are inferred incorrectly in other ways".format( len(wrong_inferences) )
from rdflib import RDF, RDFS, OWL, Namespace import pytest import brickschema from .util import make_readable import sys sys.path.append("..") from bricksrc.namespaces import BRICK, TAG, A, SKOS # noqa: E402 BLDG = Namespace("https://brickschema.org/schema/ExampleBuilding#") g = brickschema.Graph() g.parse("Brick.ttl", format="turtle") # Instances g.add((BLDG.Coil_1, A, BRICK.Heating_Coil)) g.add((BLDG.Coil_2, BRICK.hasTag, TAG.Equipment)) g.add((BLDG.Coil_2, BRICK.hasTag, TAG.Coil)) g.add((BLDG.Coil_2, BRICK.hasTag, TAG.Heat)) g.add((BLDG.AHU1, A, BRICK.AHU)) g.add((BLDG.VAV1, A, BRICK.VAV)) g.add((BLDG.AHU1, BRICK.feedsAir, BLDG.VAV1)) g.add((BLDG.CH1, A, BRICK.Chiller)) # This gets inferred as an air temperature sensor g.add((BLDG.TS1, A, BRICK.Temperature_Sensor)) g.add((BLDG.TS1, BRICK.measures, BRICK.Air)) g.add((BLDG.TS2, A, BRICK.Air_Temperature_Sensor))
def test_no_relations(): data = base_data data_g = brickschema.Graph().parse(data=data, format='turtle') conforms, r1, r2 = data_g.validate([schema_g]) assert conforms