def format_multientry_response(self, location, response, service, timezone): """ Used to extract the data not used by the RPC method, and store it in the cache, helping to limit the number of API calls used to obtain data :param location: location dictionary to include with cached data :param response: Darksky forecast response :param service: :param timezone: timezone string extracted from Darksky response :return: formatted response data by service """ data = [] generation_time = self.get_generation_time_for_service(service) for entry in response['data']: entry_time = datetime.datetime.fromtimestamp( entry['time'], pytz.timezone(timezone)) entry_time = entry_time.astimezone(pytz.utc) if entry_time > utils.get_aware_utc_now(): if SERVICES_MAPPING[service]['type'] is 'forecast': data.append([ jsonapi.dumps(location), generation_time, entry_time, jsonapi.dumps(entry) ]) else: data.append([ jsonapi.dumps(location), entry_time, jsonapi.dumps(entry) ]) return data
def update_status(self, status, context=None): """ Updates the internal state of the `Status` object. This method will throw errors if the context is not serializable or if the status parameter is not within the ACCEPTABLE_STATUS tuple. :param status: :param context: :return: """ if status not in ACCEPTABLE_STATUS: raise ValueError('Invalid status value {}'.format(status)) try: jsonapi.dumps(context) except TypeError: raise ValueError('Context must be JSON serializable.') status_changed = status != self._status self._status = status self._context = context self._last_updated = format_timestamp(get_aware_utc_now()) if status_changed and self._status_changed_callback: self._status_changed_callback()
def t_heat(self,data): ''' Sets heating setpoint''' msg = {"tmode":1,"t_heat":data} value = jsonapi.dumps(msg) try: mode = (urllib.request.urlopen(self.urladdress,value)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def hold(self,data): ''' Sets hold controls''' msg = {"hold":data} value = jsonapi.dumps(msg) try: mode = (urllib.request.urlopen(self.urladdress,value)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def _handle_external_rpc_subsystem(self, message): ret_msg = dict() operation = message.args[0] rpc_msg = message.args[1] # jsonapi.loads(message.args[1]) try: method_args = rpc_msg["args"] # message.args = [method_args] message.args = method_args for idx, msg in enumerate(message.args): if isinstance(msg, str): message.args[idx] = jsonapi.loads(msg) dispatch = self._dispatcher.dispatch # _log.debug("External RPC IN message args {}".format(message)) responses = [ response for response in (dispatch(msg, message) for msg in message.args) if response ] # _log.debug("External RPC Responses {}".format(responses)) if responses: message.user = "" try: message.peer = "" message.subsystem = "external_rpc" frames = [] operation = "send_platform" frames.append(operation) msg = jsonapi.dumps( dict( to_platform=rpc_msg["from_platform"], to_peer=rpc_msg["from_peer"], from_platform=rpc_msg["to_platform"], from_peer=rpc_msg["to_peer"], args=responses, )) frames.append(msg) except KeyError: _log.error("External RPC message did not contain " "proper message format") message.args = jsonapi.dumps(ret_msg) try: self.core().connection.send_vip( peer="", subsystem="external_rpc", args=frames, msg_id=message.id, user=message.user, copy=False, ) except ZMQError as ex: _log.error("ZMQ error: {}".format(ex)) pass except KeyError: pass
def mode(self,data): ''' Sets operating mode''' msg = {"tmode":data} value = jsonapi.dumps(msg) try: mode = (urllib.request.urlopen(self.urladdress,value)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def energy_led(self,data): ''' Controls energy led, possible values: 0,1,2,4''' url = self.urladdress+"/led" msg = { "energy_led" :int(data)} value = jsonapi.dumps(msg) try: mode = (urllib.request.urlopen(url,value)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def create_forecast_entry(self, service, location, timestamp, forecast_start): """ Helper method used for removing extraneous data from a forecast request response based on request time :param service: weather agent service endpoint :param location: request location dictionary :param timestamp: timestamp for the forecast request. If None, the default forecast result of are returned - a minute-by-minute forecast for the next hour (where available), or an hour-by-hour forecast for the next 48 hours, or a day-by-day forecast for the next week :return: (the last time stamp for which forecast is returned, filtered Dark Sky forecast response) """ darksky_response = self.get_darksky_data(service, location, timestamp) forecast_response = darksky_response.pop( SERVICES_MAPPING[service]['json_name']) forecast_data = [] last_entry_time = None for entry in forecast_response['data']: entry_time = datetime.datetime.fromtimestamp( entry['time'], pytz.timezone(darksky_response['timezone'])) entry_time = entry_time.astimezone(pytz.utc) if entry_time < forecast_start: continue if timestamp and entry_time < timestamp: continue else: # Darksky required attribution entry["attribution"] = "Powered by Dark Sky" forecast_data.append([format_timestamp(entry_time), entry]) last_entry_time = entry_time if not self.performance_mode: # if performance mode isn't running we'll be receiving extra data # that we can store to help with conserving daily api calls for service_code in SERVICES_MAPPING: if service_code is not service and \ SERVICES_MAPPING[service_code]['json_name'] in \ darksky_response: service_response = darksky_response.pop( SERVICES_MAPPING[service_code]['json_name']) if SERVICES_MAPPING[service_code]['type'] is not 'current': service_data = self.format_multientry_response( location, service_response, service_code, darksky_response['timezone']) else: service_data = \ [jsonapi.dumps(location), datetime.datetime.fromtimestamp( service_response['time'], pytz.timezone( darksky_response['timezone'])), jsonapi.dumps(service_response)] self.store_weather_records(service_code, service_data) return last_entry_time, forecast_data
def t_setpoint(self,data,point,tmode=''): ''' Sets cooling setpoint''' if tmode == '': msg = { point : data } else : msg = {"tmode": tmode, point : data} value = jsonapi.dumps(msg) try: mode = (urllib.request.urlopen(self.urladdress,value)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def _handle_external_rpc_subsystem(self, message): ret_msg = dict() #_log.debug("EXT_RPC subsystem handler IN message {0}".format(message)) op = message.args[0] rpc_msg = message.args[1] #jsonapi.loads(message.args[1]) try: #_log.debug("EXT_RPC subsystem handler IN message {0}, {1}".format(message.peer, rpc_msg)) method_args = rpc_msg['args'] #message.args = [method_args] message.args = method_args for idx, msg in enumerate(message.args): if isinstance(msg, str): message.args[idx] = jsonapi.loads(msg) dispatch = self._dispatcher.dispatch #_log.debug("External RPC IN message args {}".format(message)) responses = [response for response in ( dispatch(msg, message) for msg in message.args) if response] #_log.debug("External RPC Responses {}".format(responses)) if responses: message.user = '' try: message.peer = '' message.subsystem = 'external_rpc' frames = [] op = 'send_platform' frames.append(op) msg = jsonapi.dumps(dict(to_platform=rpc_msg['from_platform'], to_peer=rpc_msg['from_peer'], from_platform=rpc_msg['to_platform'], from_peer=rpc_msg['to_peer'], args=responses)) frames.append(msg) except KeyError: _log.error("External RPC message did not contain proper message format") message.args = jsonapi.dumps(ret_msg) #_log.debug("EXT_RPC subsystem handler OUT message {}".format(message)) try: self.core().connection.send_vip(peer='', subsystem='external_rpc', args=frames, msg_id=message.id, user=message.user, copy=False) except ZMQError as ex: _log.error("ZMQ error: {}".format(ex)) pass except KeyError: pass
def _send_to_platform(self, frames): """ Send frames to external platform :param frames: frames following VIP format :return: """ try: #Extract the frames and reorganize to add external platform and peer information sender, recipient, proto, usr_id, msg_id, subsystem, op, msg = frames[: 9] #msg_data = jsonapi.loads(msg) msg_data = msg to_platform = msg_data['to_platform'] msg_data['from_platform'] = self._ext_router.my_instance_name() msg_data['from_peer'] = sender msg = jsonapi.dumps(msg_data) op = 'send_peer' frames = ['', proto, usr_id, msg_id, subsystem, op, msg] #_log.debug("ROUTER: Sending EXT RernalPC message to: {}".format(to_platform)) #Use external socket to send the message self._ext_router.send_external(to_platform, frames) return False except KeyError as exc: _log.error( "Missing instance name in external RPC message: {}".format( exc)) except IndexError: _log.error("Invalid EXT RPC message")
def serialize_frames(data: List[Any]) -> List[Frame]: frames = [] for x in data: try: if isinstance(x, list) or isinstance(x, dict): frames.append(Frame(jsonapi.dumps(x).encode(ENCODE_FORMAT))) elif isinstance(x, Frame): frames.append(x) elif isinstance(x, bytes): frames.append(Frame(x)) elif isinstance(x, bool): frames.append(struct.pack("?", x)) elif isinstance(x, int): frames.append(struct.pack("I", x)) elif isinstance(x, float): frames.append(struct.pack("f", x)) elif x is None: frames.append(Frame(x)) else: frames.append(Frame(x.encode(ENCODE_FORMAT))) except TypeError as e: import sys sys.exit(0) except AttributeError as e: import sys sys.exit(0) return frames
def test_upgrade_file_verison_0_to_1_2_minimum_entries(tmpdir_factory): """The only required field in 'version 0' was credentials""" mechanism = "CURVE" publickey = "A" * 43 version0 = { "allow": [{ "credentials": mechanism + ":" + publickey }], } filename = str(tmpdir_factory.mktemp('auth_test').join('auth.json')) with open(filename, 'w') as fp: fp.write(jsonapi.dumps(version0, indent=2)) upgraded = AuthFile(filename) entries = upgraded.read()[0] assert len(entries) == 1 assert entries[0].user_id is not None expected = version0['allow'][0] expected["credentials"] = publickey expected["mechanism"] = mechanism expected["domain"] = None expected["address"] = None expected["user_id"] = entries[0].user_id #this will be a UUID expected["enabled"] = True expected["comments"] = None expected["capabilities"] = { 'edit_config_store': { 'identity': entries[0].user_id } } expected["roles"] = [] expected["groups"] = [] assert_auth_entries_same(expected, vars(entries[0]))
def test_store_delete_configuration(auto_registered_local): data = dict(bim=50, baz="foo", bar="lambda") str_data = jsonapi.dumps(data) identity = "foo.bar" config_name = "fuzzywidgets" webapi = auto_registered_local gevent.sleep(5) platforms = webapi.list_platforms() platform_uuid = platforms[0]["uuid"] resp = webapi.store_agent_config(platform_uuid, identity, config_name, str_data) assert resp is None resp = webapi.list_agent_configs(platform_uuid, identity) assert config_name == resp[0] resp = webapi.get_agent_config(platform_uuid, identity, config_name) assert str_data == resp resp = webapi.delete_agent_config(platform_uuid, identity, config_name) assert '' == resp resp = webapi.list_agent_configs(platform_uuid, identity) for res in resp: assert config_name != resp[0]
def _on_platform_message(self, peer, sender, bus, topic, headers, message): """ Callback function for vcp agent to publish to. Platforms that are being managed should publish to this topic with the agent_list and other interesting things that the volttron central shsould want to know. """ self._log.debug('ON PLATFORM MESSAGE! {}'.format(message)) expected_prefix = "platforms/{}/".format(self.vip_identity) if not topic.startswith(expected_prefix): self._log.warn( "Unexpected topic published to stats function: {}".format( topic)) return self._log.debug("TOPIC WAS: {}".format(topic)) self._log.debug("MESSAGE WAS: {}".format(message)) self._log.debug("Expected topic: {}".format(expected_prefix)) self._log.debug("Are Equal: {}".format( topic.startswith(expected_prefix))) self._log.debug("topic type: {} prefix_type: {}".format( type(topic), type(expected_prefix))) # Pull off the "real" topic from the prefix # topic = topic[len(expected_prefix):] topicsplit = topic.split('/') if len(topicsplit) < 2: self._log.error( 'Invalid topic length published to volttron central') return # Topic is platforms/<platform_uuid>/otherdata topicsplit = topic.split('/') if len(topicsplit) < 3: self._log.warn("Invalid topic length no operation or datatype.") self._log.warn("Topic was {}".format(topic)) return _, platform_uuid, op_or_datatype, other = topicsplit[0], \ topicsplit[1], \ topicsplit[2], \ topicsplit[3:] if op_or_datatype in ('iam', 'configure'): if not other: self._log.error( "Invalid response to iam or configure endpoint") self._log.error( "the sesson token was not included in response from vcp.") return ws_endpoint = "/vc/ws/{}/{}".format(other[0], op_or_datatype) self._log.debug('SENDING MESSAGE TO {}'.format(ws_endpoint)) self._vc.vip.web.send(ws_endpoint, jsonapi.dumps(message)) else: self._log.debug("OP WAS: {}".format(op_or_datatype))
def store_config(self, identity, config_name, contents, trigger_callback=False, send_update=True): config_type = None raw_data = None if isinstance(contents, (dict, list)): config_type = 'json' raw_data = jsonapi.dumps(contents) elif isinstance(contents, str): config_type = 'raw' raw_data = contents else: raise ValueError( "Unsupported configuration content type: {}".format( str(type(contents)))) self._add_config_to_store(identity, config_name, raw_data, contents, config_type, trigger_callback=trigger_callback, send_update=send_update)
def send_management_message(self, type, data={}): """ Send a message to any socket that has connected to the management socket. The payload sent to the client is like the following:: { "type": "UPDATE_DEVICE_STATUS", "data": "this is data that was passed" } :param type: A string defining a unique type for sending to the websockets. :param data: An object that str can be called on. :type type: str :type data: serializable """ management_sockets = [ s for s in self._websocket_endpoints if s.endswith("management") ] # Nothing to send if we don't have any management sockets open. if len(management_sockets) <= 0: return if data is None: data = {} payload = dict(type=type, data=str(data)) payload = jsonapi.dumps(payload) for s in management_sockets: self.vip.web.send(s, payload)
def send_vip_object_via_proxy(self, vip_object): """ Send the VIP object to proxy router agent :param vip_object: VIP message :return: """ rkey = self._instance_name + '.proxy.router.zmq.outbound.subsystem' msg_id = getattr(vip_object, 'id', '') user = getattr(vip_object, 'user', '') # Reformat the message into ZMQ VIP message frames # VIP format - [TO, FROM, PROTO, USER_ID, MSG_ID, SUBSYS, ARGS...] frames = [ vip_object.peer, self._identity, 'VIP1', user, msg_id, vip_object.subsystem ] for arg in vip_object.args: frames.append(arg) try: # Publish to proxy router agent self.channel.basic_publish(exchange=self.exchange, routing_key=rkey, body=jsonapi.dumps(frames)) except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as exc: raise Unreachable(errno.EHOSTUNREACH, "Connection to RabbitMQ is lost", 'rabbitmq broker', 'rmq_connection')
def send_via_proxy(self, peer, subsystem, args=None, msg_id='', user='', via=None, flags=0, copy=False, track=False): rkey = self._instance_name + '.proxy.router.zmq.outbound.subsystem' # Reformat the message into ZMQ VIP message frames # VIP format - [TO, FROM, PROTO, USER_ID, MSG_ID, SUBSYS, ARGS...] frames = [peer, self._identity, 'VIP1', user, msg_id, subsystem] for arg in args: frames.append(arg) try: # Publish to proxy router agent self.channel.basic_publish(exchange=self.exchange, routing_key=rkey, body=jsonapi.dumps(frames)) except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as exc: raise Unreachable(errno.EHOSTUNREACH, "Connection to RabbitMQ is lost", 'rabbitmq broker', 'rmq_connection')
def _set_override_off(self, pattern): """Turn off override condition on all devices matching the pattern. It removes the pattern from the override patterns set, clears the list of overridden devices and reevaluates the state of devices. It then cancels the pending override event and removes pattern from the config store. :param pattern: Override pattern to be removed. :type pattern: str """ # If pattern exactly matches if pattern in self._override_patterns: self._override_patterns.discard(pattern) # Cancel any pending override events self._cancel_override_events(pattern) self._override_devices.clear() patterns = dict() # Build override devices list again for pat in self._override_patterns: for device in self.instances: if fnmatch.fnmatch(device, pat): self._override_devices.add(device) if self._override_interval_events[pat] is None: patterns[pat] = str(0.0) else: evt, end_time = self._override_interval_events[pat] patterns[pat] = utils.format_timestamp(end_time) self.vip.config.set("override_patterns", jsonapi.dumps(patterns)) else: _log.error("Override Pattern did not match!") raise OverrideError( "Pattern {} does not exist in list of override patterns". format(pattern))
def save_agent_remote_info(self, directory, local_keyname, remote_cert_name, remote_cert, remote_ca_name, remote_ca_cert): """ Save the remote info file, remote certificates and remote ca to the proper place in the remote_certificate directory. :param local_keyname: identity of the local agent connected to the local messagebux :param remote_cert_name: identity of the dynamic agent connected to the remote message bus :param remote_cert: certificate returned from the remote instance :param remote_ca_name: name of the remote ca :param remote_ca_cert: certificate of the remote ca certificate """ try: self.save_remote_cert(remote_cert_name, remote_cert, directory) self.save_remote_cert(remote_ca_name, remote_ca_cert, directory) self.create_requests_ca_bundle(directory) metadata = dict(remote_ca_name=remote_ca_name, local_keyname=local_keyname) metafile = os.path.join(directory, remote_cert_name + ".json") with open(metafile, 'w') as fp: fp.write(jsonapi.dumps(metadata)) # Change group+other permissions to read only for root, dirs, files in os.walk(directory): for f in files: os.chmod(os.path.join(root, f), 0o644) except Exception as e: _log.error(f"Error saving agent remote cert info. Exception:{e}") raise e
def serialize_frames(data: List[Any]) -> List[Frame]: frames = [] #_log.info("Serializing: {}".format(data)) for x in data: try: if isinstance(x, list) or isinstance(x, dict): frames.append(Frame(jsonapi.dumps(x).encode('utf-8'))) elif isinstance(x, Frame): frames.append(x) elif isinstance(x, bytes): frames.append(Frame(x)) elif isinstance(x, bool): frames.append(struct.pack("?", x)) elif isinstance(x, int): frames.append(struct.pack("I", x)) elif isinstance(x, float): frames.append(struct.pack("f", x)) elif x is None: frames.append(Frame(x)) else: #_log.info("serialize_frames:{}".format(x)) frames.append(Frame(x.encode('utf-8'))) except TypeError as e: import sys sys.exit(0) except AttributeError as e: import sys sys.exit(0) return frames
def get_thermostat_data(self, refresh=False): """ Collects most up to date thermostat object data for the configured Ecobee thermostat ID :param refresh: whether or not to force obtaining new data from the remote Ecobee API """ params = { "json": jsonapi.dumps({ "selection": { "selectionType": "thermostats", "selectionMatch": self.ecobee_id, "includeSensors": True, "includeRuntime": True, "includeEvents": True, "includeEquipmentStatus": True, "includeSettings": True } }) } headers = populate_thermostat_headers(self.access_token) self.thermostat_data = self.get_ecobee_data("GET", THERMOSTAT_URL, 180, refresh=refresh, headers=headers, params=params)
def multi_messagebus_vc_vcp(volttron_multi_messagebus): vcp_instance, vc_instance = volttron_multi_messagebus() assert vcp_instance.instance_name != vc_instance.instance_name # Handles both connections to zmq as well as connections to rmq bus. vc_instance.allow_all_connections() vcp_uuid = add_volttron_central_platform(vcp_instance) vc_uuid = add_volttron_central(vc_instance) assert vcp_uuid assert vc_uuid print("VC LIST AGENTS: {}".format(vc_instance.list_agents())) print("VCP LIST AGENTS: {}".format(vcp_instance.list_agents())) # Update vcp_config store to add the volttron-central-address from vc to the # config store config = jsonapi.dumps({'volttron-central-address': vc_instance.bind_web_address}) # capabilities = {'edit_config_store': {'identity': VOLTTRON_CENTRAL_PLATFORM}} # vcp_instance.add_capabilities(vcp_instance.dynamic_agent.core.publickey, capabilities) vcp_instance.dynamic_agent.vip.rpc.call(CONFIGURATION_STORE, "manage_store", VOLTTRON_CENTRAL_PLATFORM, "config", config, "json").get() # "manage_store", opts.identity, opts.name, file_contents, config_type = opts.config_type yield vcp_instance, vc_instance, vcp_uuid vcp_instance.remove_agent(vcp_uuid) vc_instance.remove_agent(vc_uuid)
def set_heat_pgm(self,schedules,day=''): """ set heat program for a week or a specific day day = {'mon','tue','wed','thu','fri','sat','sun'} for a spefic day, say 'thu' .. code-block:: python t.set_heat_pgm('{"360, 80, 480, 80, 1080, 80, 1320 , 80",'thu') for a week .. code-block:: python t.set_heat_pgm('{ "1": [360, 70, 480, 70, 1080, 70, 1320, 70], "0": [360, 66, 480, 58, 1080, 66, 1320, 58], "3": [360, 66, 480, 58, 1080, 66, 1320, 58], "2": [360, 66, 480, 58, 1080, 66, 1320, 58], "5": [360, 66, 480, 58, 1080, 66, 1320, 58], "4": [360, 66, 480, 58, 1080, 66, 1320, 58], "6": [360, 66, 480, 58, 1080, 66, 1320, 58] }') """ schedule = str(schedules) if day =='': url = self.urladdress+"/program/heat" try: mode = (urllib.request.urlopen(url, jsonapi.dumps(schedules))) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed else: url = self.urladdress+"/program/heat/"+str(day) try: schedule_str = {} schedule_str = { str(self.day_num[day]): [int(e) if e.isdigit() else e for e in schedule.split(',')]} mode = (urllib.request.urlopen(url,jsonapi.dumps(schedule_str))) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed
def _http_put_request(self, url_suffix, body=None, ssl_auth=True): if body: return self._call_grequest('put', url_suffix, ssl_auth, data=jsonapi.dumps(body)) else: return self._call_grequest('put', url_suffix, ssl_auth)
def format(self, record): dct = record.__dict__.copy() dct["msg"] = record.getMessage() dct.pop('args') exc_info = dct.pop('exc_info', None) if exc_info: dct['exc_text'] = ''.join(traceback.format_exception(*exc_info)) return jsonapi.dumps(dct)
def forward(self, peer, sender, bus, topic, headers, message): headers = Headers(headers) headers['VIP.peer'] = encode_peer(peer) headers['VIP.sender'] = encode_peer(sender) headers['VIP.bus'] = bus parts = [topic] if message is not None: if 'Content-Type' in headers: if isinstance(message, list): parts.extend(message) else: parts.append(message) else: parts.append(jsonapi.dumps(message)) headers['Content-Type'] = 'application/json' parts.insert(1, jsonapi.dumps(headers.dict)) self.out_sock.send_multipart(parts)
def insert_topic(self, topic, **kwargs): meta = kwargs.get('metadata') with self.cursor() as cursor: if self.meta_table == self.topics_table and topic and meta: cursor.execute(self.insert_topic_and_meta_query(), (topic, jsonapi.dumps(meta))) else: cursor.execute(self.insert_topic_query(), {'topic': topic}) return cursor.fetchone()[0]
def model(self): ''' Returns device model''' address= self.address+"/model" try: mode = (urllib.request.urlopen(address)) parsed = jsonapi.loadb(mode.read()) return jsonapi.dumps(parsed) except Exception as parsed: return parsed