def testPublishSubscribe(self): for msg in MESSAGES: po = PayloadObject((64, 0, 0, 0), None, msg) self.bw_client.asyncPublish(URI, self.onPublishResponse, payload_objects=(po, )) self.semaphore.acquire()
def query(self, query, archiver="", timeout=DEFAULT_TIMEOUT): """ Runs the given pundat query and returns the results as a Python object. Arguments: [query]: the query string [archiver]: if specified, this is the archiver to use. Else, it will run on the first archiver passed into the constructor for the client [timeout]: time in seconds to wait for a response from the archiver """ if archiver == "": archiver = self.archivers[0] nonce = random.randint(0, 2**32) ev = threading.Event() response = {} def _handleresult(msg): # decode, throw away if not correct nonce got_response = False error = getError(nonce, msg) if error is not None: got_response = True response["error"] = error metadata = getMetadata(nonce, msg) if metadata is not None: got_response = True response["metadata"] = metadata timeseries = getTimeseries(nonce, msg) if timeseries is not None: got_response = True response["timeseries"] = timeseries if got_response: ev.set() vk = self.vk[: -1] # remove last part of VK because archiver doesn't expect it # set up receiving self.c.subscribe( "{0}/s.giles/_/i.archiver/signal/{1},queries".format(archiver, vk), _handleresult) # execute query q_struct = msgpack.packb({"Query": query, "Nonce": nonce}) po = PayloadObject((2, 0, 8, 1), None, q_struct) self.c.publish("{0}/s.giles/_/i.archiver/slot/query".format(archiver), payload_objects=(po, )) ev.wait(timeout) if len(response) == 0: # no results raise TimeoutException("Query of {0} timed out".format(query)) return response
def send_message(msg): """ msg has keys: current_demand current_price current_tier current_summation_delivered current_summation_received """ po = PayloadObject(PONUM, None, msgpack.packb(msg)) c.publish(signaluri, payload_objects=(po, ))
def cb(msg): po = msgpack.unpackb(msg.payload_objects[0].content) if not isinstance(po, dict): return client_id = msg.uri.split('/')[2] start = po.get('predstart') start = parse(start) if start else get_today() end = po.get('predend') end = parse(end) if end else get_today() + datetime.timedelta( days=1) resolution = po.get('resolution', '1h') result = prediction_fxn(start, end, resolution) po = PayloadObject((2, 0, 0, 0), None, msgpack.packb(result)) publish = '{0}/s.predictions/{1}/i.{2}/signal/response'.format( namespace, client_id, prediction_type) print "Respond on", publish c.publish(publish, payload_objects=(po, ))
def testListQuery(self): for planet, probe in PERSISTED_DATA.items(): po = PayloadObject((64, 0, 0, 0), None, probe) uri = BASE_URI + "/persisted/" + planet self.bw_client.asyncPublish(uri, self.assertOkay, payload_objects=(po, ), persist=True) self.bw_client.asyncQuery(BASE_URI + "/persisted/+", self.assertOkay, self.onMessage) self.semaphore.acquire() self.counter = 0 self.bw_client.asyncList(BASE_URI + "/persisted", self.assertOkay, self.onListResult) self.semaphore.acquire()
def testListQuery(self): for planet, probe in PERSISTED_DATA.items(): po = PayloadObject((64, 0, 0, 0), None, probe) uri = BASE_URI + "/persisted/" + planet self.bw_client.publish(uri, payload_objects=(po, ), persist=True) results = self.bw_client.query(BASE_URI + "/persisted/+") self.assertEquals(len(results), len(PERSISTED_DATA)) probes = [result.payload_objects[0].content for result in results] self.assertTrue( all([probe in PERSISTED_DATA.values() for probe in probes])) children = self.bw_client.list(BASE_URI + "/persisted") self.assertEquals(len(children), len(PERSISTED_DATA)) planets = [child[child.rfind("/") + 1:] for child in children] self.assertTrue( all([planet in PERSISTED_DATA.keys() for planet in planets]))
def do_query(self, query, timeout=DEFAULT_TIMEOUT, values_only=True): nonce = str(random.randint(0, 2**32)) ev = threading.Event() response = {} def _handleresult(msg): # decode, throw away if not correct nonce got_response = False # if got response for po in msg.payload_objects: if po.type_dotted == (2, 0, 10, 2): data = msgpack.unpackb(po.content) if data["Nonce"] != nonce: continue data.pop("Nonce") if data["Error"] is not None and len(data["Error"]) > 0: raise Exception(data["Error"]) else: data.pop("Error") for k, v in data.items(): response[k] = v if values_only and response["Count"] > 0: response["Rows"] = [{ k: v["Value"] for k, v in r.items() } for r in response["Rows"]] got_response = True if got_response: ev.set() h = self.c.subscribe( "{0}/s.hod/_/i.hod/signal/result".format(self.url), _handleresult) q_struct = msgpack.packb({"Query": query, "Nonce": nonce}) po = PayloadObject((2, 0, 10, 1), None, q_struct) self.c.publish("{0}/s.hod/_/i.hod/slot/query".format(self.url), payload_objects=(po, )) ev.wait(timeout) self.c.unsubscribe(h) if len(response) == 0: # no results raise TimeoutException("Query of {0} timed out".format(query)) return response
def publish_schedule(self, heating_setpt, cooling_setpt, override, mode, fan): assert isinstance(heating_setpt, float) assert isinstance(cooling_setpt, float) assert isinstance(override, bool) assert isinstance(mode, int) assert isinstance(fan, bool) t = { 'heating_setpoint': heating_setpt, 'cooling_setpoint': cooling_setpt, 'override': override, 'mode': mode, 'fan': fan } po = PayloadObject((2, 1, 1, 0), None, msgpack.packb(t)) print t self.bw_client.publish(self.signal, payload_objects=(po, ))
def do_query(self, query, timeout=DEFAULT_TIMEOUT): #nonce = str(random.randint(0, 2**32)) ev = threading.Event() response = {} def _handleresult(msg): got_response = False for po in msg.payload_objects: if po.type_dotted == (2, 0, 10, 4): data = msgpack.unpackb(po.content) uuids = [uuid.UUID(bytes=x) for x in data['Rows']] data = data_capnp.StreamCollection.from_bytes_packed( data['Data']) if hasattr(data, 'times'): times = list(data.times) df = pd.DataFrame( index=pd.to_datetime(times, unit='ns')) for idx, s in enumerate(data.streams): df[uuids[idx]] = s.values response['df'] = df got_response = True else: for idx, s in enumerate(data.streams): if hasattr(s, 'times'): s = pd.Series( s.values, pd.to_datetime(s.times, unit='ns')) else: s = pd.Series(s.values) response[uuids[idx]] = s got_response = True if got_response: ev.set() h = self.c.subscribe( "{0}/s.mdal/_/i.mdal/signal/{1}".format("scratch.ns", self.vk[:-1]), _handleresult) po = PayloadObject((2, 0, 10, 3), None, msgpack.packb(query)) self.c.publish("{0}/s.mdal/_/i.mdal/slot/query".format("scratch.ns"), payload_objects=(po, )) ev.wait(timeout) self.c.unsubscribe(h) return response
def write(self, state): po = PayloadObject((2,1,2,1), None, msgpack.packb(state)) self.client.publish('{0}/slot/state'.format(self._uri),payload_objects=(po,))
def testPublishFailure(self): with self.assertRaises(RuntimeError): # Unit test key should not have permissions on this URI po = PayloadObject(ponames.PODFText, None, "Hello, World!") self.bw_client.publish("jkolb/test", payload_objects=(po, ))
def test_file(filename): obj_name = os.path.basename(filename).split(".")[0] img = Image.open(filename) width, height = img.size if height/width == 480/640: width, height = 640, 480 elif width/height == 480/640: width, height = 480, 640 else: print "wrong image aspect ratio" return RESULT_BAD_FORMAT, 0 for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation]=='Orientation': break if not img._getexif(): print "No EXIF" return RESULT_BAD_FORMAT, 0 exif = dict(img._getexif().items()) if exif[orientation] == 3: img = img.rotate(180, expand=True) elif exif[orientation] == 6: img = img.rotate(270, expand=True) width, height = height, width elif exif[orientation] == 8: img = img.rotate(90, expand=True) width, height = height, width img.thumbnail((width, height), Image.ANTIALIAS) img = img.convert('L') # convert to grayscale img = np.array(img) # convert from PIL image to OpenCV image _, jpg = cv2.imencode('.jpg', img) print filename, 'width:', width, 'height:', height #modification from here identity = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(IDENTITY_LENGTH)) done = [] def onMessage(bw_message): done += [1] assert(len(bw_message.payload_objects)==1) for po in bw_message.payload_objects: if po.type_dotted == ponames.PODFText: done += [po.content] bw_client.subscribe(DEFAULT_CHANNEL + "/" + identity, onMessage) contents = jpg.tostring() width = str(width) height = str(height) fx = str(562.25) fy = str(562.25) cx = str(240) cy = str(320) po_header = PayloadObject(ponames.PODFText, None, "Cellmate Image") po_identity = PayloadObject(ponames.PODFText, None, identity) po_contents = PayloadObject(ponames.PODFText, None, contents) po_height = PayloadObject(ponames.PODFText, None, height) po_width = PayloadObject(ponames.PODFText, None, width) po_fx = PayloadObject(ponames.PODFText, None, fx) po_fy = PayloadObject(ponames.PODFText, None, fy) po_cx = PayloadObject(ponames.PODFText, None, cx) po_cy = PayloadObject(ponames.PODFText, None, cy) t0 = time.time() bw_client.publish("scratch.ns/tongli", payload_objects=(po_header,po_identity ,po_contents,po_height,po_width,po_fx,po_fy,po_cx,po_cy)) while len(done)==0: time.sleep(10) t1 = time.time() elapsed_time = round((t1 - t0)*1000, 2) assert(len(done) == 2) if done[1] != obj_name: text = "test failed. response = {0}, obj = {1}, elapsed time = {2} milliseconds".format(r.text, obj_name, elapsed_time) print text return RESULT_FAIL, elapsed_time else: print "test passed. response = {0}, obj = {1}, elapsed time = {2} milliseconds".format(r.text, obj_name, elapsed_time) return RESULT_PASS, elapsed_time return RESULT_PASS, elapsed_time
def publish(self, uri, ponum, msg): po = PayloadObject(ponum, None, marshal(ponum, msg)) super(Client, self).publish(uri, payload_objects=(po, ))
def do_query(self, query, timeout=DEFAULT_TIMEOUT, tz=pytz.timezone("US/Pacific")): """ Query structure is as follows: query = { # We bind UUIDs found as the result of a Brick query to a variable name # that we can use later. # Each variable definition has the following: # - name: how we will refer to this group of UUIDs # - definition: a Brick query. The SELECT clause should return variables that end in '_uuid', which can be found as the # object of a 'bf:uuid' relationship # - units: what units we want to retrieve this stream as. Currently supports W/kW, Wh/kWh, F/C, Lux "Variables": [ {"Name": "meter", "Definition": "SELECT ?meter_uuid WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Electric_Meter . ?meter bf:uuid ?meter_uuid . };", "Units": "kW", }, {"Name": "temp", "Definition": "SELECT ?temp_uuid WHERE { ?temp rdf:type/rdfs:subClassOf* brick:Temperature_Sensor . ?temp bf:uuid ?temp_uuid . };", "Units": "F", }, ], # this is the composition of the data matrix we are returning. Below, all the uuids for the "meter" variable will be placed before # all of the uuids for the "temp" variable. We cannot guarantee order of uuids within those groups, but the ordering of the groups # will be preserved. Explicit UUIDs can also be used here "Composition": ["meter", "temp"], # If we are retrieving statistical data, then we need to say which statistical elements we want to download. # The options are RAW, MEAN, MIN, MAX and COUNT. To query multiple, you can OR them together (e.g. MEAN|MAX). # This maps 1-1 to the "Composition" field "Selectors": [MEAN, MEAN], # Themporal parameters for the query. Retrieves data in the range [T0, T1]. By convention, T0 < T1, # but MDAL will take care of it if this is reversed. # WindowSize is the size of the resample window in nanoseconds # if Aligned is true, then MDAL will snap all data to the begining of the window (e.g. if 5min window + Aligned=true, # then all timestamps will be on 00:05:00, 00:10:00, 00:15:00, etc) "Time": { "T0": "2017-08-01 00:00:00", "T1": "2017-08-08 00:00:00", "WindowSize": '2h', "Aligned": True, }, } """ nonce = str(random.randint(0, 2**32)) query['Nonce'] = nonce ev = threading.Event() response = {} def _handleresult(msg): got_response = False for po in msg.payload_objects: if po.type_dotted != (2, 0, 10, 4): continue data = msgpack.unpackb(po.content) if data['Nonce'] != query['Nonce']: continue if 'error' in data: response['error'] = data['error'] response['df'] = None got_response = True continue uuids = [str(uuid.UUID(bytes=x)) for x in data['Rows']] data = data_capnp.StreamCollection.from_bytes_packed( data['Data']) if hasattr(data, 'times') and len(data.times): times = list(data.times) if len(times) == 0: response['df'] = pd.DataFrame(columns=uuids) got_response = True break df = pd.DataFrame( index=pd.to_datetime(times, unit='ns', utc=False)) for idx, s in enumerate(data.streams): df[uuids[idx]] = s.values df.index = df.index.tz_localize(pytz.utc).tz_convert(tz) response['df'] = df got_response = True else: df = pd.DataFrame() for idx, s in enumerate(data.streams): if hasattr(s, 'times'): newdf = pd.DataFrame(list(s.values), index=list(s.times), columns=[uuids[idx]]) newdf.index = pd.to_datetime( newdf.index, unit='ns').tz_localize(pytz.utc).tz_convert(tz) df = df.join(newdf, how='outer') else: raise Exception( "Does this ever happen? Tell gabe!") response['df'] = df got_response = True df = response.get('df') if df is not None: response['df'] = df[~df.index.duplicated(keep='first')] if got_response: ev.set() h = self.c.subscribe( "{0}/s.mdal/_/i.mdal/signal/{1}".format(self.url, self.vk[:-1]), _handleresult) po = PayloadObject((2, 0, 10, 3), None, msgpack.packb(query)) self.c.publish("{0}/s.mdal/_/i.mdal/slot/query".format(self.url), payload_objects=(po, )) ev.wait(timeout) self.c.unsubscribe(h) if 'error' in response: raise Exception(response['error']) return response