def test_commands(): commands = Commands() assert len(commands.mapping) == 0 commands = Commands.fromJSON(builtin_commands.toJSON()) assert len(commands.mapping) == len(builtin_commands.mapping) for k, v in commands.mapping.items(): assert k == v.name other = builtin_commands.mapping[k] assert not (v < other) and not (other < v) assert isinstance(commands.toJSON(), str) json.loads(commands.toJSON()) l = commands.asSortedList() assert len(l) == commands.size() for c in l: other = commands.mapping[c.name] assert not (c < other) and not (other < c) assert commands.get('g') != None assert commands.get('name') == None commands.add(get_dummy_command()) assert commands.get('name') != None with pytest.raises(ParamException): commands.add(get_dummy_command()) with pytest.raises(ParamException): other_commands = Commands() for i in xrange(Commands.limit + 1): other_commands.add(get_dummy_command(name=str(i))) commands.remove('name') assert commands.get('name') == None with pytest.raises(MalformedQueryException): commands.getRedirectUrl('') with pytest.raises(MalformedQueryException): commands.getRedirectUrl(' ') with pytest.raises(UnknownCommandException): commands.getRedirectUrl('unknown_command') with pytest.raises(UnknownDefaultCommandException): commands.getRedirectUrl('unknown_command', 'unknown_default_command') with pytest.raises(NotEnoughArgumentsException): commands.getRedirectUrl('cron *') g_out = 'https://www.google.com/search?q=hello%20world' assert commands.getRedirectUrl('g hello world') == g_out assert commands.getRedirectUrl('g hello world', 'b') == g_out b_out = 'https://www.bing.com/search?q=hello%20world' assert commands.getRedirectUrl('hello world', 'b') == b_out
def write_influxdb_inverter_data(time, data): time = dates.from_string(time) data = json.loads(data) periodic_log('influxdb', 'writing inverter data...', 60) points = [] active_count = 0 found_producing = False for panel in data['inverters']: serial = panel['serialNumber'] field_values = {} #watts if 'lastReportWatts' in panel: field_values['active_power'] = panel['lastReportWatts'] field_values['active_power_max'] = panel['maxReportWatts'] field_values['report_time'] = panel['lastReportDate'] if 'producing' in panel: found_producing = True field_values['producing'] = (1 if panel['producing'] else 0) field_values['communicating'] = (1 if panel['communicating'] else 0) field_values['operating'] = (1 if panel['operating'] else 0) if panel['producing']: active_count += 1 points.append(influxdb.point(INFLUXDB_INVERTER_MEAS, {'device': serial}, field_values, time)) if found_producing: redis.set_state(redis.REDIS_ACTIVE_INVERTERS, active_count) if len(points) > 0: influxdb.write(INFLUXDB_INVERTER_MEAS, points) return True
def api_helper(url, method, params=None, data=None, headers=None, field=None): ret = None try: if params != None: url = '%s?%s' % (url, urlencode(params)) payload = None if method == POST and data != None: payload = urlencode(data) if headers == None: headers = {} ret = fetch( url, method=method, payload=payload, headers=headers, validate_certificate=True, ) if ret.status_code == 200: content = json.loads(ret.content) if field != None: return content.get(field) return content msg = api_error_msg(url, method, ret) logging.error('OAuth API non-200 response\n%s' % msg) except: msg = api_error_msg(url, method, ret) logging.exception('OAuth API error\n%s' % msg) return None
def get_inverters(): try: url = ENVOY_URL+'/inventory.json' logger.info('Getting data from [%s]...' % url) resp = requests.get(url, auth=HTTPDigestAuth(ENVOY_USERNAME, ENVOY_PASSWORD), timeout=9) devices = False if resp.status_code == 200: #Count active inverters devices = json.loads('{ "inverters": ' + resp.text + ' }') else: logger.error('Failed to get data from [%s]. Error code [%i]' % (url, resp.status_code)) url = ENVOY_URL+'/api/v1/production/inverters' logger.info('Getting data from [%s]...' % url) resp = requests.get(url, auth=HTTPDigestAuth(ENVOY_USERNAME, ENVOY_PASSWORD), timeout=9) if resp.status_code == 200: if devices: readings = json.loads('{ "readings": ' + resp.text + ' }') for device in devices['inverters'][0]['devices']: #Match reading for reading in readings['readings']: if str(reading['serialNumber']) == device['serial_num']: device['serialNumber'] = reading['serialNumber'] device['lastReportDate'] = reading['lastReportDate'] device['lastReportWatts'] = reading['lastReportWatts'] device['maxReportWatts'] = reading['maxReportWatts'] break else: devices = readings data = '{ "inverters": %s }' % json.dumps(devices['inverters'][0]['devices']) redis.set_state(REDIS_INVERTER_DATA, data) write_influxdb_inverter_data.delay(dates.to_string(dates.utcnow()), data) else: logger.error('Failed to insert new data. Error code [%i]' % resp.status_code) except: logger.error('Failed to insert new data.') logger.exception('message')
def write_influxdb_meter_data(time, data): try: time = dates.from_string(time) data = json.loads(data) periodic_log('influxdb', 'writing meter data...', 60) points = [] for rkey in ELEMENTS.keys(): root_ele = ELEMENTS[rkey] for phase_ele in PHASES: pkey = 'ph-%s' % phase_ele field_values = {} for mkey in MEASUREMENT_MAPPING.keys(): meas_ele = MEASUREMENT_MAPPING[mkey] val = data[rkey][pkey][mkey] field_values[meas_ele] = val points.append(influxdb.point(INFLUXDB_METER_MEAS, {'device': 'house', 'phase': phase_ele, 'type': root_ele}, field_values, time)) if len(points) > 0: #Write determined net values net_values = {} production_values = {} consumption_values = {} for mkey in ['p','q','s']: meas_ele = MEASUREMENT_MAPPING[mkey] production = sum_val(data, 'production', mkey) consumption = sum_val(data, 'total-consumption', mkey) if mkey == 'p': #Ignore error in meter readings when inverters aren't generating active_inverters = redis.get_int(redis.REDIS_ACTIVE_INVERTERS, 1) if active_inverters == 0 and production < 5.0: production = 0.0 net_values[meas_ele] = production - consumption production_values[meas_ele] = production consumption_values[meas_ele] = consumption points.append(influxdb.point(INFLUXDB_METER_MEAS, {'device': 'house', 'type': 'production', 'phase': 'all'}, production_values, time)) points.append(influxdb.point(INFLUXDB_METER_MEAS, {'device': 'house', 'type': 'consumption', 'phase': 'all'}, consumption_values, time)) points.append(influxdb.point(INFLUXDB_METER_MEAS, {'device': 'house', 'type': 'net', 'phase': 'all'}, net_values, time)) influxdb.write(INFLUXDB_METER_MEAS, points) except: logger.error('Failed to write meter data.') logger.exception('message') return False return True
def wrapper(*args, **kwargs): dataset = func(*args, **kwargs) raw_dataframe = dataset['raw_dataframe'] dataset['dataframe_info'] = json.loads( json.dumps(stats.extract_dataframe_info(raw_dataframe))) if 'feature_column_labels' not in dataset: dataset['feature_column_labels'] = raw_dataframe.columns.drop( dataset['target_column_label']) if 'MSE_baseline' not in dataset: y = raw_dataframe[dataset['target_column_label']] dataset['MSE_baseline'] = ((y - y.mean())**2).mean().compute() per_column_statistic = dataset['dataframe_info'][ 'per_column_statistic'] dataset['columns_info'] = { column_id: cloudsml.models.BaseDataTransformationColumn( id=column_id, name=column_name, statistics=per_column_statistic[column_name], data_type=per_column_statistic[column_name]['type'], data_format=per_column_statistic[column_name]['format']) for column_id, column_name in zip( sorted(random.sample(range(100000), len(raw_dataframe.columns)) ), dataset['dataframe_info']['columns']) } dataset['columns_info_by_name'] = { column.name: column for column in dataset['columns_info'].values() } dataframe = raw_dataframe.rename(columns={ column.name: column.id for column in dataset['columns_info'].values() }) dataset['dataframe'] = dataframe dataset['target_column_id'] = dataset['columns_info_by_name'][ dataset['target_column_label']].id dataset['feature_column_ids'] = dataset['dataframe'].columns.drop( dataset['target_column_id']).values.tolist() return dataset
def _on_message(self, unused_channel, basic_deliver, properties, body): """ Chamado pelo pika quando uma mensagem é entregue a partir do RabbitMQ. O canal é passado. O objeto basic_deliver que é passado transporta o exchange, routing key, delivery tag e um sinalizador de reenvio para a mensagem. As propriedades transmitidas são um instância de BasicProperties com as propriedades da mensagem e o corpo é a mensagem que foi enviada. :param pika.channel.Channel unused_channel: The channel object :param pika.Spec.Basic.Deliver: basic_deliver method :param pika.Spec.BasicProperties: properties :param str|unicode body: The message body """ print("== Nova Mensagem ==") pprint.pprint(json.loads(body)) # NOTE Recusa da mensagem: #LOGGER.info("RECUSANDO A MENSAGEM %s", basic_deliver.delivery_tag) #self._non_acknowledge_message(basic_deliver.delivery_tag) # NOTE Ackeando a mensagem: self._acknowledge_message(basic_deliver.delivery_tag)
def fromJSON(json_str): return Commands((Command(**d) for d in json.loads(json_str)))