class TestStatsManager(unittest.TestCase): """Test to verify StatsManager methods work as expected. StatsManager should collect raw data, calculate their statistics, and save them in expected format. """ def setUp(self): """Set up data and create a temporary directory to save data and stats.""" self.tempdir = tempfile.mkdtemp() self.data = StatsManager() self.data.AddValue('A', 99999.5) self.data.AddValue('A', 100000.5) self.data.AddValue('A', 'ERROR') self.data.SetUnit('A', 'uW') self.data.SetUnit('A', 'mW') self.data.AddValue('B', 1.5) self.data.AddValue('B', 2.5) self.data.AddValue('B', 3.5) self.data.SetUnit('B', 'mV') self.data.CalculateStats() def tearDown(self): """Delete the temporary directory and its content.""" shutil.rmtree(self.tempdir) def test_GetRawData(self): raw_data = self.data.GetRawData() self.assertListEqual([99999.5, 100000.5], raw_data['A']) self.assertListEqual([1.5, 2.5, 3.5], raw_data['B']) def test_GetSummary(self): summary = self.data.GetSummary() self.assertEqual(2, summary['A']['count']) self.assertAlmostEqual(100000.5, summary['A']['max']) self.assertAlmostEqual(99999.5, summary['A']['min']) self.assertAlmostEqual(0.5, summary['A']['stddev']) self.assertAlmostEqual(100000.0, summary['A']['mean']) self.assertEqual(3, summary['B']['count']) self.assertAlmostEqual(3.5, summary['B']['max']) self.assertAlmostEqual(1.5, summary['B']['min']) self.assertAlmostEqual(0.81649658092773, summary['B']['stddev']) self.assertAlmostEqual(2.5, summary['B']['mean']) def test_SaveRawData(self): dirname = 'unittest_raw_data' self.data.SaveRawData(self.tempdir, dirname) dirname = os.path.join(self.tempdir, dirname) fileA = os.path.join(dirname, 'A_mW.txt') fileB = os.path.join(dirname, 'B_mV.txt') with open(fileA, 'r') as fA: self.assertEqual('99999.50', fA.readline().strip()) self.assertEqual('100000.50', fA.readline().strip()) with open(fileB, 'r') as fB: self.assertEqual('1.50', fB.readline().strip()) self.assertEqual('2.50', fB.readline().strip()) self.assertEqual('3.50', fB.readline().strip()) def test_SaveSummary(self): fname = 'unittest_summary.txt' self.data.SaveSummary(self.tempdir, fname) fname = os.path.join(self.tempdir, fname) with open(fname, 'r') as f: self.assertEqual( '@@ NAME COUNT MEAN STDDEV MAX MIN\n', f.readline()) self.assertEqual( '@@ A_mW 2 100000.00 0.50 100000.50 99999.50\n', f.readline()) self.assertEqual( '@@ B_mV 3 2.50 0.82 3.50 1.50\n', f.readline()) def test_SaveSummaryJSON(self): fname = 'unittest_summary.json' self.data.SaveSummaryJSON(self.tempdir, fname) fname = os.path.join(self.tempdir, fname) with open(fname, 'r') as f: summary = json.load(f) self.assertAlmostEqual(100000.0, summary['A']['mean']) self.assertEqual('milliwatt', summary['A']['unit']) self.assertAlmostEqual(2.5, summary['B']['mean']) self.assertEqual('millivolt', summary['B']['unit'])
class powerlog(object): """Power class to log aggregated power. Usage: obj = powerlog() Instance Variables: _data: a StatsManager object that records sweetberry readings and calculates statistics. _pwr[]: Spower objects for individual sweetberries. """ def __init__(self, brdfile, cfgfile, serial_a=None, serial_b=None, sync_date=False, use_ms=False, use_mW=False, print_stats=False, stats_dir=None, stats_json_dir=None, print_raw_data=True, raw_data_dir=None): """Init the powerlog class and set the variables. Args: brdfile: string name of json file containing board layout. cfgfile: string name of json containing list of rails to read. serial_a: serial number of sweetberry A. serial_b: serial number of sweetberry B. sync_date: report timestamps synced with host datetime. use_ms: report timestamps in ms rather than us. use_mW: report power as milliwatts, otherwise default to microwatts. print_stats: print statistics for sweetberry readings at the end. stats_dir: directory to save sweetberry readings statistics; if None then do not save the statistics. stats_json_dir: directory to save means of sweetberry readings in json format; if None then do not save the statistics. print_raw_data: print sweetberry readings raw data in real time, default is to print. raw_data_dir: directory to save sweetberry readings raw data; if None then do not save the raw data. """ self._data = StatsManager() self._pwr = {} self._use_ms = use_ms self._use_mW = use_mW self._print_stats = print_stats self._stats_dir = stats_dir self._stats_json_dir = stats_json_dir self._print_raw_data = print_raw_data self._raw_data_dir = raw_data_dir if not serial_a and not serial_b: self._pwr['A'] = Spower('A') if serial_a: self._pwr['A'] = Spower('A', serialname=serial_a) if serial_b: self._pwr['B'] = Spower('B', serialname=serial_b) with open(process_filename(cfgfile)) as data_file: names = json.load(data_file) self._names = self.process_scenario(names) for key in self._pwr: self._pwr[key].load_board(brdfile) self._pwr[key].reset() # Allocate the rails to the appropriate boards. used_boards = [] for name in self._names: success = False for key in self._pwr.keys(): if self._pwr[key].add_ina_name(name): success = True if key not in used_boards: used_boards.append(key) if not success: raise Exception("Failed to add %s (maybe missing " "sweetberry, or bad board file?)" % name) # Evict unused boards. for key in self._pwr.keys(): if key not in used_boards: self._pwr.pop(key) for key in self._pwr.keys(): if sync_date: self._pwr[key].set_time(time.time() * 1000000) else: self._pwr[key].set_time(0) def process_scenario(self, name_list): """Return list of tuples indicating name and type. Args: json originated list of names, or [name, type] Returns: list of tuples of (name, type) defaulting to type "POWER" Raises: exception, invalid INA type. """ names = [] for entry in name_list: if isinstance(entry, list): name = entry[0] if entry[1] == "POWER": type = Spower.INA_POWER elif entry[1] == "BUSV": type = Spower.INA_BUSV elif entry[1] == "CURRENT": type = Spower.INA_CURRENT elif entry[1] == "SHUNTV": type = Spower.INA_SHUNTV else: raise Exception( "Invalid INA type", "Type of %s [%s] not recognized," " try one of POWER, BUSV, CURRENT" % (entry[0], entry[1])) else: name = entry type = Spower.INA_POWER names.append((name, type)) return names def start(self, integration_us_request, seconds, sync_speed=.8): """Starts sampling. Args: integration_us_request: requested interval between sample values. seconds: time until exit, or None to run until cancel. sync_speed: A usb request is sent every [.8] * integration_us. """ # We will get back the actual integration us. # It should be the same for all devices. integration_us = None for key in self._pwr: integration_us_new = self._pwr[key].start(integration_us_request) if integration_us: if integration_us != integration_us_new: raise Exception( "FAIL", "Integration on A: %dus != integration on B %dus" % (integration_us, integration_us_new)) integration_us = integration_us_new # CSV header title = "ts:%dus" % integration_us for name_tuple in self._names: name, ina_type = name_tuple if ina_type == Spower.INA_POWER: unit = "mW" if self._use_mW else "uW" elif ina_type == Spower.INA_BUSV: unit = "mV" elif ina_type == Spower.INA_CURRENT: unit = "uA" elif ina_type == Spower.INA_SHUNTV: unit = "uV" title += ", %s %s" % (name, unit) name_type = name + Spower.INA_SUFFIX[ina_type] self._data.SetUnit(name_type, unit) title += ", status" if self._print_raw_data: logoutput(title) forever = False if not seconds: forever = True end_time = time.time() + seconds try: pending_records = [] while forever or end_time > time.time(): if (integration_us > 5000): time.sleep((integration_us / 1000000.) * sync_speed) for key in self._pwr: records = self._pwr[key].read_line() if not records: continue for record in records: pending_records.append(record) pending_records.sort(key=lambda r: r['ts']) aggregate_record = {"boards": set()} for record in pending_records: if record["berry"] not in aggregate_record["boards"]: for rkey in record.keys(): aggregate_record[rkey] = record[rkey] aggregate_record["boards"].add(record["berry"]) else: print("break %s, %s" % (record["berry"], aggregate_record["boards"])) break if aggregate_record["boards"] == set(self._pwr.keys()): csv = "%f" % aggregate_record["ts"] for name in self._names: if name in aggregate_record: multiplier = 0.001 if ( self._use_mW and name[1] == Spower.INA_POWER) else 1 value = aggregate_record[name] * multiplier csv += ", %.2f" % value name_type = name[0] + Spower.INA_SUFFIX[ name[1]] self._data.AddValue(name_type, value) else: csv += ", " csv += ", %d" % aggregate_record["status"] if self._print_raw_data: logoutput(csv) aggregate_record = {"boards": set()} for r in range(0, len(self._pwr)): pending_records.pop(0) except KeyboardInterrupt: print('\nCTRL+C caught.') finally: for key in self._pwr: self._pwr[key].stop() self._data.CalculateStats() if self._print_stats: self._data.PrintSummary() save_dir = 'sweetberry%s' % time.time() if self._stats_dir: stats_dir = os.path.join(self._stats_dir, save_dir) self._data.SaveSummary(stats_dir) if self._stats_json_dir: stats_json_dir = os.path.join(self._stats_json_dir, save_dir) self._data.SaveSummaryJSON(stats_json_dir) if self._raw_data_dir: raw_data_dir = os.path.join(self._raw_data_dir, save_dir) self._data.SaveRawData(raw_data_dir)
class powerlog(object): """Power class to log aggregated power. Usage: obj = powerlog() Instance Variables: _data: records sweetberries readings and calculates statistics. _pwr[]: Spower objects for individual sweetberries. """ def __init__(self, brdfile, cfgfile, serial_a=None, serial_b=None, sync_date=False, use_ms=False, use_mW=False, print_stats=False, stats_dir=None, stats_json_dir=None, print_raw_data=True, raw_data_dir=None): """Init the powerlog class and set the variables. Args: brdfile: string name of json file containing board layout. cfgfile: string name of json containing list of rails to read. serial_a: serial number of sweetberry A. serial_b: serial number of sweetberry B. sync_date: report timestamps synced with host datetime. use_ms: report timestamps in ms rather than us. use_mW: report power as milliwatts, otherwise default to microwatts. print_stats: print statistics for sweetberry readings at the end. stats_dir: directory to save sweetberry readings statistics; if None then do not save the statistics. stats_json_dir: directory to save means of sweetberry readings in json format; if None then do not save the statistics. print_raw_data: print sweetberry readings raw data in real time, default is to print. raw_data_dir: directory to save sweetberry readings raw data; if None then do not save the raw data. """ self._data = StatsManager() self._pwr = {} self._use_ms = use_ms self._use_mW = use_mW self._print_stats = print_stats self._stats_dir = stats_dir self._stats_json_dir = stats_json_dir self._print_raw_data = print_raw_data self._raw_data_dir = raw_data_dir if not serial_a and not serial_b: self._pwr['A'] = Spower('A') if serial_a: self._pwr['A'] = Spower('A', serialname=serial_a) if serial_b: self._pwr['B'] = Spower('B', serialname=serial_b) with open(cfgfile) as data_file: names = json.load(data_file) self._names = names for key in self._pwr: self._pwr[key].load_board(brdfile) self._pwr[key].reset() # Allocate the rails to the appropriate boards. used_boards = [] for name in self._names: success = False for key in self._pwr.keys(): if self._pwr[key].add_ina_name(name): success = True if key not in used_boards: used_boards.append(key) if not success: raise Exception("Failed to add %s (maybe missing " "sweetberry, or bad board file?)" % name) # Evict unused boards. for key in self._pwr.keys(): if key not in used_boards: self._pwr.pop(key) for key in self._pwr.keys(): if sync_date: self._pwr[key].set_time(time.time() * 1000000) else: self._pwr[key].set_time(0) def start(self, integration_us_request, seconds, sync_speed=.8): """Starts sampling. Args: integration_us_request: requested interval between sample values. seconds: time until exit, or None to run until cancel. sync_speed: A usb request is sent every [.8] * integration_us. """ # We will get back the actual integration us. # It should be the same for all devices. integration_us = None for key in self._pwr: integration_us_new = self._pwr[key].start(integration_us_request) if integration_us: if integration_us != integration_us_new: raise Exception( "FAIL", "Integration on A: %dus != integration on B %dus" % (integration_us, integration_us_new)) integration_us = integration_us_new # CSV header if self._print_raw_data: title = "ts:%dus" % integration_us for name in self._names: unit = "mW" if self._use_mW else "uW" title += ", %s %s" % (name, unit) title += ", status" logoutput(title) forever = False if not seconds: forever = True end_time = time.time() + seconds try: pending_records = [] while forever or end_time > time.time(): if (integration_us > 5000): time.sleep((integration_us / 1000000.) * sync_speed) for key in self._pwr: records = self._pwr[key].read_line() if not records: continue for record in records: pending_records.append(record) pending_records.sort(key=lambda r: r['ts']) aggregate_record = {"boards": set()} for record in pending_records: if record["berry"] not in aggregate_record["boards"]: for rkey in record.keys(): aggregate_record[rkey] = record[rkey] aggregate_record["boards"].add(record["berry"]) else: print("break %s, %s" % (record["berry"], aggregate_record["boards"])) break if aggregate_record["boards"] == set(self._pwr.keys()): csv = "%f" % aggregate_record["ts"] for name in self._names: if name in aggregate_record: multiplier = 0.001 if self._use_mW else 1 power = aggregate_record[name] * multiplier csv += ", %.2f" % power self._data.AddValue(name, power) else: csv += ", " csv += ", %d" % aggregate_record["status"] if self._print_raw_data: logoutput(csv) aggregate_record = {"boards": set()} for r in range(0, len(self._pwr)): pending_records.pop(0) except KeyboardInterrupt: print('\nCTRL+C caught.') finally: for key in self._pwr: self._pwr[key].stop() self._data.CalculateStats() if self._print_stats: self._data.PrintSummary() save_dir = datetime.datetime.now().strftime( 'sweetberry%Y%m%d%H%M%S.%f') if self._stats_dir: stats_dir = os.path.join(self._stats_dir, save_dir) self._data.SaveSummary(stats_dir) if self._stats_json_dir: stats_json_dir = os.path.join(self._stats_json_dir, save_dir) self._data.SaveSummaryJSON(stats_json_dir) if self._raw_data_dir: raw_data_dir = os.path.join(self._raw_data_dir, save_dir) self._data.SaveRawData(raw_data_dir)