def _probe(self) -> Optional[ValueSet] or List[ValueSet]: self.log.info('Downloading AppStore report...') report_file = os.path.join(self._db_dir, 'report_' + self._vendor + '.csv') # The latest report is always 1 day old report_date = datetime.today() - timedelta(days=1) api = Api(self._key_id, self._key_file, self._issuer_id) try: api.download_sales_and_trends_reports(filters={ 'vendorNumber': self._vendor, 'frequency': 'DAILY', 'reportDate': report_date.strftime('%Y-%m-%d') }, save_to=report_file) except APIError as e: if 'is not available yet' in str(e): # Ignore as the report is simply not yet available return None raise e with open(report_file, encoding='utf-8') as file: lines = file.readlines() if len(lines) < 2: return None sku_map = {} # type: Dict[str, SkuMetrics] headline = lines[0].split('\t') sku_col = self._find_column('SKU', headline) prod_type_col = self._find_column('Product Type Identifier', headline) units_col = self._find_column('Units', headline) dev_proceeds_col = self._find_column('Developer Proceeds', headline) for line in lines[1:]: row = line.split('\t') sku = row[sku_col] if sku not in sku_map: sku_map[sku] = SkuMetrics(sku) sku_metrics = sku_map[sku] units = int(row[units_col]) if ProdTypeIds.is_download(row[prod_type_col]) or \ ProdTypeIds.is_iap(row[prod_type_col]): # Only increment the unit counter if the # item was a download or IAP sku_metrics.units += units # Now create the metrics data = ValueSet(labels=['sku']) for sku, item in sku_map.items(): data.add(Value(item.units, label_values=[sku], name='units')) meta_data = ValueSet() meta_data.add(Value(int(report_date.timestamp()), name='latestUpdate')) return [data, meta_data]
def _probe(self): self._gcs.download_latest() apps = self._gcs.get_latest_crashes() data = ValueSet() data.labels = ['versionCode', 'type'] # The API has ~1 day latency - if no data has been found for > 3 days we assume there were no crashes # since google does not add new entries to the csv if there are no crashes date_threshold = datetime.today() - timedelta(days=3) latest_data = 0 for name, crashes in apps.items(): for crash in crashes: if crash.date < date_threshold: crash.crashes = 0 crash.anrs = 0 if crash.date.timestamp() > latest_data: latest_data = crash.date.timestamp() data.add(Value(crash.crashes, label_values=[str(crash.app_version), 'crash'], name=name)) data.add(Value(crash.anrs, label_values=[str(crash.app_version), 'anr'], name=name)) meta_data = ValueSet() meta_data.add(Value(int(latest_data), name='latestUpdate')) return [data, meta_data]
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: base_set = ValueSet() base_set.add( Value(self._sma.read(SmaRegisters.REG_TEMP).get_as_base_unit(), name='temp')) base_set.add( Value(self._sma.read( SmaRegisters.REG_FREQUENCY).get_as_base_unit(), name='frequency')) base_set.add( Value(self._sma.read( SmaRegisters.REG_POWER_EFFECTIVE_SUM).get_as_base_unit(), name='power')) base_set.add( Value(self._sma.read( SmaRegisters.REG_DC_INPUT_VOLTAGE).get_as_base_unit(), name='dc_voltage')) base_set.add( Value(self._sma.read( SmaRegisters.REG_DC_INPUT_CURRENT).get_as_base_unit(), name='dc_current')) phase_set = ValueSet(labels=['phase']) phase_voltages = [ SmaRegisters.REG_VOLTAGE_L1, SmaRegisters.REG_VOLTAGE_L2, SmaRegisters.REG_VOLTAGE_L3 ] for x in range(len(phase_voltages)): phase_set.add( Value(self._sma.read(phase_voltages[x]).get_as_base_unit(), name='ac_voltage', label_values=[str(x + 1)])) return [base_set, phase_set]
def _probe(self): data = ValueSet() for metric in self.metrics: name = metric['name'] values = self._probe_metric(metric) if metric.get('mode') != 'rate': # Regular value - just blindly sum the values data.add(Value(sum(value.value for value in values), name=name)) continue last_probe = self._last_probe.get(name) if last_probe is None: self._last_probe[name] = ProbeValue(time.time(), values) continue # > 1st run - create a rate value for each value and sum them afterwards # This is required to handle the overflow of each value correctly time_delta = time.time() - last_probe.time delta_sum = 0 for idx in range(len(values)): delta_sum += values[idx].get_delta(last_probe.data[idx].value) data.add(Value(delta_sum / time_delta, name=name)) # Refresh the data last_probe.time = time.time() last_probe.data = values return data
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: values = ValueSet(labels=['room']) self._home.get_current_state() for group in self._home.groups: if group.groupType != 'META': continue room_name = self._escape_labels(group.label) for device in group.devices: if isinstance(device, TemperatureHumiditySensorDisplay): # device.lastStatusUpdate temp_target = device.setPointTemperature temp = device.actualTemperature humidity = device.humidity values.add( Value(temp_target, label_values=[room_name], name='temperature_target')) values.add( Value(temp, label_values=[room_name], name='temperature')) values.add( Value(humidity, label_values=[room_name], name='humidity')) return values
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: ap_data = self._get('/data/monitor.ap.aplist.json?operation=load') aps = ap_data['data'] ap = aps[0] mac = ap['MAC'] value_sets = [] total_clients = ap['StaNum'] value_set = ValueSet() value_set.add(Value(total_clients, name='total_clients')) value_sets.append(value_set) value_set = ValueSet(labels=['wifi', 'direction']) value_sets.append(value_set) url_safe_mac = mac.replace(':', '%3A') self._update_wifi_stats(value_set, '2ghz', 'WIFI0', url_safe_mac) self._update_wifi_stats(value_set, '5ghz', 'WIFI1', url_safe_mac) return value_sets
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: value_set = ValueSet(labels=['phase']) value_set.values.extend(self._cache.flush_values()) if len(value_set.values) == 0: self.log.warning('No data received from meter') for value in value_set.values: value.name = value.name.lower().replace(' ', '') return value_set
def _probe(self): if not OSEnv.is_linux(): return None short, mid, long = os.getloadavg() data = ValueSet(labels=['time']) data.add(Value(short, label_values=['short'])) data.add(Value(mid, label_values=['mid'])) data.add(Value(long, label_values=['long'])) return data
def _probe(self): memory = psutil.virtual_memory() data = ValueSet(labels=['type']) data.add(Value(memory.total, label_values=['total'])) data.add(Value(memory.available, label_values=['available'])) data.add(Value(memory.used, label_values=['used'])) if OSEnv.is_linux(): data.add(Value(memory.cached, label_values=['cached'])) return data
def _probe(self): data = ValueSet(labels=['pidx']) matches = 0 for proc in psutil.process_iter(): if not self._matches(proc): continue self._collect(matches, proc, data) matches += 1 data.add(Value(matches, label_values=['all'], name='process_count')) return data
def _probe(self): data = ValueSet() try: start = time.time() * 1000 Helper.get_url(self.url, timeout=self.timeout, expected_status=self.status_code) end = time.time() * 1000 data.add(Value(int(end - start))) except Exception as e: self.log.error('Could not probe ' + str(e)) data.add(Value(self.timeout)) return data
def _probe(self): api = API() logon_service = LogonService(api) if not logon_service.restore_token(): # We need to login with open(self.cred_file) as data_file: login_data = json.load(data_file) logon_service.login(login_data['user'], login_data['pass']) car_service = CarService(api) vehicles_response = car_service.get_vehicles() data = ValueSet() for vehicle in vehicles_response.vehicles: if self.vin != vehicle.vin: continue status_report_service = VehicleStatusReportService(api, vehicle) report = status_report_service.get_stored_vehicle_data() assert isinstance(report, VehicleDataResponse) for field in report.data_fields: if field.name == 'UTC_TIME_AND_KILOMETER_STATUS': data.add(Value(int(field.value), name='kilometers')) # int in km continue if field.name == 'TEMPERATURE_OUTSIDE': data.add( Value(int(int(field.value) / 10 - 273), name='temp.outsite')) continue if field.name == 'TOTAL_RANGE': data.add(Value(int(field.value), name='range.total')) # int in km continue if field.name == 'TANK_LEVEL_IN_PERCENTAGE': data.add(Value(int(field.value), name='tankLevel')) # int in % continue if field.name == 'OIL_LEVEL_DIPSTICKS_PERCENTAGE': data.add(Value(float(field.value), name='oilLevelPercent')) # in % continue if field.name == 'ADBLUE_RANGE': data.add(Value(int(field.value), name='range.adblue')) # int in km continue return data print('VIN not found') return None
def _probe(self): data = ValueSet(['disk', 'type']) for disk in self.disks: disk_name = self.sanitize_disk_name(disk) try: usage = shutil.disk_usage(disk) except FileNotFoundError: continue data.add(Value(usage.total, label_values=[disk_name, 'total'])) data.add(Value(usage.used, label_values=[disk_name, 'used'])) data.add(Value(usage.free, label_values=[disk_name, 'free'])) return data
def _probe(self): data = ValueSet() self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.settimeout(self.timeout) try: start = time.time() * 1000 self.sock.connect((self.host, self.port)) end = time.time() * 1000 self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() data.add(Value(int(end - start))) except socket.timeout: data.add(Value(self.timeout * 1000)) return data
def probe(self): """ Probes the data from the probe_call and returns the mapped data :return: Mapped data :rtype: dict(str, int) """ probe_data = self._probe_call() data = ValueSet(labels=[self._key_name]) for if_name, stats in probe_data.items(): if not Helper.accept(self.include, self.exclude, if_name): continue # Convert namedtuple to dict stats = stats._asdict() if_name = if_name.replace('.', '_') last_stats = self._stats.get(if_name) self._stats[if_name] = stats for source, dest in self._data_map.items(): if 'total' in dest: total_name = dest['total'] else: total_name = 'total_' + source if 'drv' in dest: derive_name = dest['drv'] else: derive_name = source + '_sec' if total_name is not None: # Total counters might be disabled by setting it to None data.add( Value(stats[source], name=total_name, label_values=[if_name])) if last_stats is not None and derive_name is not None: time_delta = int(time.time() - self._last_time) data.add( Value( (stats[source] - last_stats[source]) / time_delta, name=derive_name, label_values=[if_name])) self._last_time = time.time() return data
def _probe(self): data = ValueSet(labels=['dev']) required_attributes = len(self.attributes) for dev in self.devices: column_count = 0 lines = subprocess.check_output(['smartctl', '/dev/' + dev, '-a' ]).decode('utf-8').splitlines() # Ugly parsing - meh, it does the job (so far) found_head = False for line in lines: line = line.strip() if found_head: columns = self.space_matcher.split(line) if len(columns) < 2: continue attribute_name = columns[1] if attribute_name not in self.attributes: continue # "column_count-1" is used in instead of "-1" as the actual values might contain more # columns at the end which we need to ignore value = self.number_matcher.search(columns[column_count - 1]) if not value: continue required_attributes -= 1 data.add( Value(int(value.group(0)), label_values=[dev], name=attribute_name)) if required_attributes == 0: # We already got all the attributes - return break continue if 'ATTRIBUTE_NAME' in line: found_head = True column_count = len(self.space_matcher.split(line)) continue # Some attributes are missing - return all matches so far return data
def _probe(self): data = ValueSet(labels=['name']) lines = subprocess.check_output(['sensors' ]).decode('utf-8').splitlines() chip = None skip_chip = False for line in lines: line = line.lower().strip() if not line: continue parts = line.split(':') if len(parts) == 1: # Headline -> Chip name chip = line skip_chip = False if not Helper.accept(self.include, self.exclude, chip): skip_chip = True continue continue if parts[0] == 'adapter': continue match = self.value_match.match(line) if match is None: # End of value block chip = None continue if skip_chip: continue key = match.group(1).replace(' ', '_') value = float(match.group(2)) data.add(Value(value, label_values=[key], name=chip)) return data
def _probe(self) -> Optional[ValueSet]: connection = FritzConnection(address=self._address, password=self._pass, timeout=10) new_data = {} service_name = 'WANCommonInterfaceConfig:1' if service_name not in connection.services: # Use legacy fallback service_name = 'WANCommonIFC1' output = connection.call_action(service_name, 'GetTotalBytesReceived') new_data['recv_bytes_sec'] = output['NewTotalBytesReceived'] output = connection.call_action(service_name, 'GetTotalBytesSent') new_data['sent_bytes_sec'] = output['NewTotalBytesSent'] data = ValueSet() for key, value in new_data.items(): last_stats = self._stats.get(key) self._stats[key] = value if last_stats is not None: time_delta = int(time.time() - self._last_time) data.add(Value(max(0, (value - last_stats) / time_delta), name=key)) self._last_time = time.time() return data
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: reply = requests.get(self._url + '/rest/items') data = reply.json() value_set = ValueSet(labels=['name', 'group']) for item in data: item_type = item.get('type') group_names = ','.join(item.get('groupNames', [])) label = item.get('label') if label is None: continue # Sanitize label name label = re.sub(r'[^a-zA-Z0-9_:]+', '_', label) state = item.get('state') if item_type.startswith('Number'): # Number might be formatted, try a simple split by space # to get the number without unit state = state.split(' ')[0] try: number = float(state) value_set.add( Value(number, label_values=[item['name'], group_names], name=label)) except ValueError: # Value might be null continue if item_type == 'Switch': value_set.add( Value(state == 'ON', label_values=[item['name'], group_names], name=label)) return value_set
def _probe(self) -> Optional[ValueSet]: if self.sleep > 0: sleep(self.sleep) data = ValueSet() data.add(Value(self.value)) return data
def _probe(self) -> Optional[ValueSet] or List[ValueSet]: try: self._auth.get_token() except ValueError: self.log.warning( 'Did not find any auth token, starting manually authorization flow' ) self._auth.authorize() main_set = ValueSet() installations = self.api.get_installations() install_id = installations[0].id gateway = installations[0].gateways[0] gateway_serial = gateway.serial device_id = '0' # Search for correct device id for dev in gateway.devices: if dev.device_type != Device.TYPE_VITOCONNECT: device_id = dev.id break features = self.api.get_features(install_id, gateway_serial, device_id) # Rücklauf (hydraulische weiche) return_temp = features.get_feature( 'heating.sensors.temperature.return').get_property_value('value') main_set.add(Value(return_temp, name='return_temperature')) # Außentemperatur outside_temp = features.get_feature( 'heating.sensors.temperature.outside').get_property_value('value') main_set.add(Value(outside_temp, name='outside_temperature')) hot_water_storage_top = features.get_feature('heating.dhw.sensors.temperature.hotWaterStorage.top') \ .get_property_value('value') main_set.add(Value(hot_water_storage_top, name='hot_water_storage_top')) hot_water_storage = features.get_feature('heating.dhw.sensors.temperature.hotWaterStorage') \ .get_property_value('value') main_set.add(Value(hot_water_storage, name='hot_water_storage')) # Vorlauf supply_temp = features.get_feature( 'heating.circuits.0.sensors.temperature.supply' ).get_property_value('value') main_set.add(Value(supply_temp, name='supply_temp')) secondary_temp_return = features.get_feature('heating.secondaryCircuit.sensors.temperature.return') \ .get_property_value('value') main_set.add(Value(secondary_temp_return, name='secondary_return_temp')) secondary_temp_supply = features.get_feature('heating.secondaryCircuit.sensors.temperature.supply') \ .get_property_value('value') main_set.add(Value(secondary_temp_supply, name='secondary_supply_temp')) compressor_phase = features.get_feature( 'heating.compressors.0').get_property_value('phase') main_set.add( Value(compressor_phase != 'off' and compressor_phase != 'pause', name='compressor_active')) compressor_phase_set = ValueSet(labels=['phase']) compressor_phase_set.add( Value(compressor_phase == 'cooling', name='compressor_phase', label_values=['cooling'])) compressor_phase_set.add( Value(compressor_phase == 'heating', name='compressor_phase', label_values=['heating'])) compressor_phase_set.add( Value(compressor_phase == 'pause', name='compressor_phase', label_values=['pause'])) compressor_stats = features.get_feature( 'heating.compressors.0.statistics') comp_starts = compressor_stats.get_property_value('starts') main_set.add(Value(comp_starts, name='compressor_stats_starts')) comp_hours = compressor_stats.get_property_value('hours') main_set.add(Value(comp_hours, name='compressor_stats_hours')) comp_hours_class_1 = compressor_stats.get_property_value( 'hoursLoadClassOne') main_set.add( Value(comp_hours_class_1, name='compressor_stats_hours_class_1')) comp_hours_class_2 = compressor_stats.get_property_value( 'hoursLoadClassTwo') main_set.add( Value(comp_hours_class_2, name='compressor_stats_hours_class_2')) comp_hours_class_3 = compressor_stats.get_property_value( 'hoursLoadClassThree') main_set.add( Value(comp_hours_class_3, name='compressor_stats_hours_class_3')) comp_hours_class_4 = compressor_stats.get_property_value( 'hoursLoadClassFour') main_set.add( Value(comp_hours_class_4, name='compressor_stats_hours_class_4')) comp_hours_class_5 = compressor_stats.get_property_value( 'hoursLoadClassFive') main_set.add( Value(comp_hours_class_5, name='compressor_stats_hours_class_5')) # heating_rod = features.get_feature('heating.heatingRod.status') # heating_rod_on = heating_rod.get_property_value('overall') # main_set.add(Value(heating_rod_on, name='heating_rod_active')) # heating_rod_on_level1 = heating_rod.get_property_value('level1') # main_set.add(Value(heating_rod_on_level1, name='heating_rod_active_level_1')) # heating_rod_on_level2 = heating_rod.get_property_value('level2') # main_set.add(Value(heating_rod_on_level2, name='heating_rod_active_level_2')) # heating_rod_on_level3 = heating_rod.get_property_value('level3') # main_set.add(Value(heating_rod_on_level3, name='heating_rod_active_level_3')) dhw_charging = features.get_feature( 'heating.dhw.charging').get_property_value('active') main_set.add(Value(dhw_charging, name='hot_water_charging')) heating_circulation_pump = features.get_feature('heating.circuits.0.circulation.pump') \ .get_property_value('status') != 'off' main_set.add( Value(heating_circulation_pump, name='heating_circulation_pump')) dhw_circulation_pump = features.get_feature('heating.dhw.pumps.circulation') \ .get_property_value('status') != 'off' main_set.add( Value(dhw_circulation_pump, name='hot_water_circulation_pump')) # Pumpe Warmwasserspeicher dhw_pump_primary = features.get_feature('heating.dhw.pumps.primary') \ .get_property_value('status') != 'off' main_set.add(Value(dhw_pump_primary, name='hot_water_primary_pump')) # Settings: Hot water target temperature hot_water_target = features.get_feature( 'heating.dhw.temperature.main').get_property_value('value') main_set.add(Value(hot_water_target, name='hot_water_target_temp')) # Temperature profiles program_normal = features.get_feature('heating.circuits.0.operating.programs.normal') \ .get_property_value('temperature') main_set.add(Value(program_normal, name='program_normal_temp')) return [main_set, compressor_phase_set]
def _probe(self): stream_movies = 0 stream_shows = 0 stream_music = 0 movies = 0 shows = 1 shows_episodes = 0 shows_seasons = 0 music_albums = 0 music_songs = 0 data = ValueSet() sections = ElementTree.fromstring( Helper.get_url(self.url + '/library/sections')) for section in sections: sec_id = section.attrib['key'] lib_type = section.attrib['type'] if lib_type == 'movie': library = ElementTree.fromstring( Helper.get_url(self.url + '/library/sections/' + sec_id + '/all')) movies += len(library.getchildren()) continue if lib_type == 'show': library = ElementTree.fromstring( Helper.get_url(self.url + '/library/sections/' + sec_id + '/all')) shows += len(library.getchildren()) for show in library: # Every entry is a show shows_episodes += int(show.attrib['leafCount']) shows_seasons += int(show.attrib['childCount']) continue if lib_type == 'artist': library = ElementTree.fromstring( Helper.get_url(self.url + '/library/sections/' + sec_id + '/albums')) music_albums += len(library.getchildren()) for show in library: # Every entry is an album music_songs += int(show.attrib['leafCount']) continue print('Unknown lib type: ' + lib_type) streams = ElementTree.fromstring( Helper.get_url(self.url + '/status/sessions')) for stream in streams: stream_type = stream.attrib.get('type') if stream_type == 'movie': stream_movies += 1 continue if stream_type == 'episode': stream_shows += 1 continue if stream_type == 'track': stream_music += 1 continue data.add(Value(stream_movies, name='streams.movies')) data.add(Value(stream_shows, name='streams.shows')) data.add(Value(stream_music, name='streams.musics')) data.add(Value(movies, name='lib.movies')) data.add(Value(shows, name='lib.shows')) data.add(Value(shows_episodes, name='lib.shows.episodes')) data.add(Value(shows_seasons, name='lib.shows.seasons')) data.add(Value(music_albums, name='lib.music.albums')) data.add(Value(music_songs, name='lib.music.songs')) return data
def _probe(self): counter_data = {} try: xml_data = Helper.get_url(self.url) except URLError as e: self.log.error('Could not connect to bind statistics: ' + str(e)) return None file = XmlFile(xml_data) query_type = file.get_elem('.//counters', {'type': 'qtype'}) for counter in query_type: key = counter.attrib['name'].lower() value = int(counter.text) counter_data['server.queries|' + key] = value data = ValueSet(labels=['queryType']) for view_name in self.views: low_view_name = view_name.lower() view = file.get_elem('.//view', {'name': view_name}) resolver_stats = file.get_elem('.//counters', {'type': 'resstats'}, view) queries = file.get_elem('.//counter', {'name': 'Queryv4'}, resolver_stats) counter_data[low_view_name + '.queries|v4'] = int(queries.text) queries = file.get_elem('.//counter', {'name': 'Queryv6'}, resolver_stats) counter_data[low_view_name + '.queries|v6'] = int(queries.text) cache_stats = file.get_elem('.//counters', {'type': 'cachestats'}, view) queries = file.get_elem('.//counter', {'name': 'CacheHits'}, cache_stats) counter_data[low_view_name + '.cache|hits'] = int(queries.text) queries = file.get_elem('.//counter', {'name': 'CacheMisses'}, cache_stats) counter_data[low_view_name + '.cache|misses'] = int(queries.text) cache = file.get_elem('.//cache', {'name': '_default'}, view) for rrset in cache: name = file.get_elem('.//name', root=rrset).text.replace('!', '') value = file.get_elem('.//counter', root=rrset).text data.add( Value(int(value), name=low_view_name + '.cache.rrsets', label_values=[name.lower()])) if self._last_time is not None: # Calculate delta for counter values time_delta = int(time.time() - self._last_time) for key, value in counter_data.items(): name, query_type = key.split('|', 2) if key in self._last_counters: data.add( Value((value - self._last_counters[key]) / time_delta, name=name + '_per_sec', label_values=[query_type])) self._last_time = time.time() self._last_counters = counter_data return [data]