async def handle_message( self, msg: core_pb2.CoreLocation) -> DataWrapper[core_pb2.Response]: response = DataWrapper(False, "location", core_pb2.Response()) update = dap_update_pb2.DapUpdate() try: upd = update.update.add() upd.tablename = self._db_structure["table"] upd.fieldname = self._db_structure["field"] upd.key.core = msg.core_key upd.value.type = 9 upd.value.l.CopyFrom(msg.location) except Exception as e: msg = "Failed to create dap_update proto from director location update: ", str( e) self.warning(msg) response.error_code = 500 response.add_narrative(msg) try: status = self._dap_manager.update(update) response.success = status except Exception as e: msg = "DapManager: failed update: " + str(e) response.add_narrative(msg) self.warning(msg) return response
def _createUpdate(self): update = dap_update_pb2.DapUpdate() newvalue = update.update.add() newvalue.tablename = "wibbles" newvalue.fieldname = "wibble" newvalue.value.type = 2 newvalue.value.s = "moo" newvalue.key.agent = "007/James/Bond".encode("utf-8") return update
def _createUpdate(self, agent_name): update = dap_update_pb2.DapUpdate() newvalue = update.update.add() newvalue.tablename = "wibbles" newvalue.fieldname = "wibble" newvalue.value.type = 2 newvalue.value.s = "moo" newvalue.key.agent = agent_name.encode("utf-8") newvalue.key.core = "localhost".encode("utf-8") return update
def _createUpdate(self): update = dap_update_pb2.DapUpdate() newvalue = update.update.add() newvalue.tablename = "wibbles" newvalue.fieldname = "wibble" newvalue.value.type = 2 newvalue.value.s = "moo" newvalue.key.agent = b"007/James/Bond" newvalue.key.core = b"localhost:10000" return update
def _setupAgents(cls): dm1 = query_pb2.Query.DataModel() dm1.name = "weather_data" dm1.description = "All possible weather data." dm1.attributes.extend([ get_attr_b("wind_speed", "Provides wind speed measurements.", 0), get_attr_b("temperature", "Provides wind speed measurements.", 1), get_attr_b("air_pressure", "Provides wind speed measurements.", 2) ]) dm2 = query_pb2.Query.DataModel() dm2.name = "book_data" dm2.description = "Book store data" dm2.attributes.extend([ get_attr_b("title", "The title of the book", 1), get_attr_b("author", "The author of the book", 3), get_attr_b("release_year", "Release year of the book in the UK", 4), get_attr_b("introduction", "Short introduction by the author.", 3), get_attr_b("rating", "Summary rating of the book given by us.", 0) ]) dm3 = query_pb2.Query.DataModel() dm3.name = "book_store_new" dm3.description = "Other bookstore. Focuses on novels." dm3.attributes.extend([ get_attr_b("title", "The title of the book", 1), get_attr_b("author", "The author of the book", 3), get_attr_b("ISBN", "That code thing", 4), get_attr_b("price", "We will need a lot of money", 3), get_attr_b("count", "How many do we have", 0), get_attr_b("condition", "Our books are in the best condition", 0) ]) print( "======================================WEATHER STATION======================================" ) print(dm1) print( "======================================BOOK STORE======================================" ) print(dm2) print( "======================================OTHER BOOK STORE======================================" ) print(dm3) update = dap_update_pb2.DapUpdate() for agent_name, dm in [ ("007/James/Bond/Weather", dm1), ("White/Spy/Book", dm2), ("Black/Spy/BookMoreDataNovel", dm3), ("86/Maxwell/Smart/Weather", dm1), ]: cls._addUpdate(update, agent_name, dm) cls.update = update
def removeAll(self, key): success = True update = dap_update_pb2.DapUpdate() update.update.add().key = key for instance in self.instances: r = instance.remove(update) if r.success == False: for m in r.narrative: self.log.error(m) success &= r.success return success
def DoFieldsUpdateForAgent(self, agent, list_of_field_val_tuples): for field_val_tuple in list_of_field_val_tuples: update = dap_update_pb2.DapUpdate() update.update.add() update.update[0].tablename = "ratings" ProtoHelpers.populateUpdateTFV(update.update[0], field_val_tuple[0], field_val_tuple[1]) update.update[0].key.agent = agent.encode("utf-8") update.update[0].key.core = b'localhost' r = self.dapManager.update(update) self.warning("doing update", list_of_field_val_tuples, agent, r)
def _setupAgents(self): for agent, loc in [ ("007/James/Bond", (51.477, -0.461)), # LHR ("White/Spy", (53.354, -2.275)), # MANCHESTER ("Black/Spy", (50.734, -3.414)), #EXETER ("86/Maxwell/Smart", (40.640, -73.779)), # JFK ]: update = dap_update_pb2.DapUpdate() update.update.add() update.update[0].tablename = "location" update.update[0].fieldname = "location.update" update.update[0].key.core = b'localhost' update.update[0].key.agent = agent.encode("utf-8") update.update[0].value.l.lat = loc[0] update.update[0].value.l.lon = loc[1] update.update[0].value.type = 9 self.dapManager.update(update)
def _createUpdate(self, agent_name, fieldname, typename, data): update = dap_update_pb2.DapUpdate() newvalue = update.update.add() newvalue.fieldname = fieldname co, _, ag = agent_name.partition(' ') newvalue.value.type = { 'string': 2, 'dm': 6, }[typename] if typename == "string": newvalue.value.s = data if typename == "dm": newvalue.value.dm.CopyFrom(data) newvalue.key.agent = ag.encode("utf-8") newvalue.key.core = co.encode("utf-8") return update
def toDapUpdate(self) -> update_pb2.Update: updates = [] try: updates = self.origin.list except: updates = [self.origin] upd_list = [] for origin in updates: key = origin.key if len(key) < 1: raise InvalidAttribute("Key", "OEFCorePublicKey", "bytes", "Required key field not set!") if origin.HasField("data_model"): upd_list.append(self.updFromDataModel(key, origin.data_model)) for dm_instance in origin.data_models: upd_list.append( self.updFromDataModel(key, dm_instance.model, dm_instance.key)) if len(dm_instance.values) > 0: upd_list.append( self.updFromDataModel(key, dm_instance.model, dm_instance.key, "dm_instance_model")) upd_list.append( self.updFromDataModelValues(key, dm_instance.values, dm_instance.key)) upd_list.extend( self.attrUpdFromDataModel(key, dm_instance.key, dm_instance.model, dm_instance.values)) else: upd_list.extend( self.attrUpdFromDataModel(key, dm_instance.key, dm_instance.model, [])) for attr in origin.attributes: upd_list.append(self.updFromAttribute(key, attr)) upd = dap_update_pb2.DapUpdate() upd.update.extend(upd_list) return upd
def testFieldOptionsWork(self): dapManager = DapManager.DapManager() self.dapManager = dapManager dapManagerConfig = { "data_model_searcher": { "class": "SearchEngine", "config": { "structure": { "data_model_table": { "data_model": "embedding" }, }, }, }, "geo": { "class": "DapGeo", "config": { "structure": { "location": { "location": { "type": "location", "options": [ "plane", ] } }, }, }, }, } dapManager.setup( sys.modules[__name__], dapManagerConfig) dapManager.setDataModelEmbedder('data_model_searcher', 'data_model_table', 'data_model') dm1 = query_pb2.Query.DataModel() dm1.name = "weather_data" dm1.description = "All possible weather data." dm1.attributes.extend([ get_attr_b("wind_speed", "Provides wind speed measurements.",0), get_attr_b("temperature", "Provides wind speed measurements.",1), get_attr_b("air_pressure", "Provides wind speed measurements.",2) ]) dm2 = query_pb2.Query.DataModel() dm2.name = "more weather" dm2.description = "More weather" dm2.attributes.extend([ get_attr_b("wind_speed", "Provides wind speed measurements.",0), get_attr_b("temperature", "Provides wind speed measurements.",1), get_attr_b("rain_fall", "Provides wind speed measurements.",2) ]) dm3 = query_pb2.Query.DataModel() dm3.name = "book_store_new" dm3.description = "Other bookstore" dm3.attributes.extend([ get_attr_b("title", "The title of the book", 1), get_attr_b("author", "The author of the book", 3), get_attr_b("ISBN", "That code thing", 4), get_attr_b("price", "We will need a lot of money", 3), get_attr_b("count", "How many do we have", 0), get_attr_b("condition", "Our books are in the best condition", 0) ]) engine = dapManager.getInstance('data_model_searcher') embed1 = engine._dm_to_vec(dm1) embed2 = engine._dm_to_vec(dm2) embed3 = engine._dm_to_vec(dm3) lat = 0 lon = 0 count = 1 for agent, loc, model in [ ("007/James/Bond", (51.477,-0.461), dm1), # LHR, WEATHER ("White/Spy", (53.354,-2.275), dm2), # MANCHESTER, WEATHER ("Black/Spy", (50.734,-3.414), dm3), #EXETER, BOOKSHOP ("86/Maxwell/Smart", (40.640,-73.779), dm1), # JFK, WEATHER ]: update = dap_update_pb2.DapUpdate() update.update.add() update.update[0].tablename = "location" update.update[0].fieldname = "location.update" update.update[0].key.core = b'localhost' update.update[0].key.agent = agent.encode("utf-8") update.update[0].value.l.coordinate_system = "latlon" update.update[0].value.l.unit = "deg" update.update[0].value.l.v.extend(loc) update.update[0].value.type = 9 dapManager.update(update) update = self._createUpdate(agent) update.update.add() update.update[0].tablename = "data_model_table" update.update[0].fieldname = "data_model" update.update[0].value.type = 6 update.update[0].value.dm.CopyFrom(model) self.dapManager.update(update) qm = query_pb2.Query.Model() qAnd = qm.constraints.add() q1 = qAnd.and_.expr.add() q1.constraint.attribute_name = "location" q1.constraint.relation.op = 0 q1.constraint.distance.center.lat = 52.454 q1.constraint.distance.center.lon = -1.748 # BHX q1.constraint.distance.distance = 150 * 1000 qm.model.CopyFrom(create_dm("weather_data", "All possible weather data.", [ get_attr_b("wind_speed", "Provides wind speed measurements.", 0), get_attr_b("temperature", "Provides wind speed measurements.", 0), get_attr_b("air_pressure", "Provides wind speed measurements.", 0) ])) dapQuery = self.dapManager.makeQuery(qm) output = self.dapManager.execute(dapQuery) agents = sorted([ result.agent for result in output.identifiers ]) self.log.warning("agents={}".format(agents)) assert(agents == [ b"007/James/Bond", b"White/Spy", ])
def _createUpdate(self): update = dap_update_pb2.DapUpdate() newvalue = update.update.add() newvalue.key.core = b"localhost" return update
def testFieldOptionsWork(self): dapManager = DapManager.DapManager() dapManagerConfig = { "geo": { "class": "DapGeo", "config": { "structure": { "wobbles": { "wobbles": { "type": "location", "options": [ "plane", ] } }, }, }, }, } dapManager.setup(sys.modules[__name__], dapManagerConfig) lat = 0 lon = 0 count = 1 for agent, loc in [ ("007/James/Bond", (51.477, -0.461)), # LHR ("White/Spy", (53.354, -2.275)), # MANCHESTER ("Black/Spy", (50.734, -3.414)), #EXETER ("86/Maxwell/Smart", (40.640, -73.779)), # JFK ]: update = dap_update_pb2.DapUpdate() update.update.add() update.update[0].tablename = "wobbles" update.update[0].fieldname = "location.update" update.update[0].key.core = b'localhost' update.update[0].key.agent = agent.encode("utf-8") update.update[0].value.l.coordinate_system = "latlon" update.update[0].value.l.unit = "deg" update.update[0].value.l.v.extend(loc) update.update[0].value.type = 9 dapManager.update(update) lat += loc[0] lon += loc[1] count += 1 lat /= count lon /= count update = dap_update_pb2.DapUpdate() update.update.add() update.update[0].tablename = "wobbles" update.update[0].fieldname = "wobbles.update" update.update[0].key.core = b'localhost' update.update[0].value.l.coordinate_system = "latlon" update.update[0].value.l.unit = "deg" update.update[0].value.l.v.extend([lat, lon]) update.update[0].value.type = 9 dapManager.update(update) r = dapManager.getInstance("geo").describe() nameToField = {} for f in r.table[0].field: nameToField[f.name] = { "type": f.type, "options": f.options, } assert 'plane' in nameToField["wobbles.location"]["options"] assert nameToField["wobbles.location"]["type"] == "location" r = dapManager.getPlaneInformation('location') assert r['field_name'] == 'wobbles.location' assert len(r['values']) == 1 print(r['values'][0].value.l) assert r['values'][0].key.core == b'localhost' assert int(r['values'][0].value.l.v[1]) == -15 assert int(r['values'][0].value.l.v[0]) == 39
def testFieldOptionsWork(self): dapManager = DapManager.DapManager() dapManagerConfig = { "geo": { "class": "DapGeo", "config": { "structure": { "wobbles": { "location": { "type": "location", "options": [ "plane", ] } }, }, }, }, "attrs": { "class": "DapAttributeStore", "config": { "structure": {}, }, "options": [ "lazy", ], }, "annotations": { "class": "InMemoryDap", "config": { "structure": { "annotations": { "name": "string", "id": "int64", }, }, }, }, } dapManager.setup(sys.modules[__name__], dapManagerConfig) DATA = [ ( "007/James/Bond", (51.477, -0.461), [('id', 1), ('foo', 1)], ), # LHR ( "White/Spy", (53.354, -2.275), [('id', 2), ('foo', 2)], ), # MANCHESTER ( "Black/Spy", (50.734, -3.414), [('id', 3), ('foo', 3)], ), #EXETER ( "86/Maxwell/Smart", (40.640, -73.779), [('id', 4), ('foo', 4)], ), # JFK ] lat = 0 lon = 0 count = 1 #update = dap_update_pb2.DapUpdate() #update.update.add() #update.update[0].fieldname = "location" #update.update[0].key.core = b'localhost' #update.update[0].key.agent = agent.encode("utf-8") #update.update[0].value.l.lat = loc[0] #update.update[0].value.l.lon = loc[1] #update.update[0].value.type = 9 #dapManager.update(update) for agent, loc, attrs in DATA: update = dap_update_pb2.DapUpdate() for (k, v) in attrs[0:1]: u = update.update.add() u.key.core = b'localhost' u.key.agent = agent.encode("utf-8") ProtoHelpers.populateUpdateTFV(u, k, v) dapManager.update(update) break # we're going to check that when the assignments to the "id" # field were routed, they did not go to the attribute store. assert dapManager.getInstance("attrs").table == {} for agent, loc, attrs in DATA: update = dap_update_pb2.DapUpdate() for (k, v) in attrs[1:]: u = update.update.add() u.key.core = b'localhost' u.key.agent = agent.encode("utf-8") ProtoHelpers.populateUpdateTFV(u, k, v) dapManager.update(update) break # But "foo" should have been... assert dapManager.getInstance("attrs").table != {}