def run_once(self): """Test body.""" if utils.host_could_be_in_afe(self.context.client.host.hostname): # Just abort the test if we're in the lab and not on a # machine known to be conducted. The performance # requirements of this test are hard to meet, without # strong multi-path effects. (Our conducted setups are # designed to provide strong multi-path.) if not self.context.client.conductive: raise error.TestNAError( 'This test requires a great RF environment.') else: logging.error('Unable to determine if DUT has conducted ' 'connection to AP. Treat any TestFail with ' 'skepticism.') caps = [ hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD, hostap_config.HostapConfig.N_CAPABILITY_HT40 ] mode_11n = hostap_config.HostapConfig.MODE_11N_PURE get_config = lambda channel: hostap_config.HostapConfig( channel=channel, mode=mode_11n, n_capabilities=caps) netperf_config = netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM) for i, ap_config in enumerate([get_config(1), get_config(157)]): # Set up the router and associate the client with it. self.context.configure(ap_config) self.context.capture_host.start_capture( ap_config.frequency, ht_type=ap_config.ht_packet_capture_mode, snaplen=self.TEST_SNAPLEN) assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid()) self.context.assert_connect_wifi(assoc_params) with netperf_runner.NetperfRunner(self.context.client, self.context.router, netperf_config) as runner: runner.run() results = self.context.capture_host.stop_capture() if len(results) != 1: raise error.TestError('Expected to generate one packet ' 'capture but got %d instead.' % len(results)) # The device should sense that it is in a clean RF environment and # use the highest index to achieve maximal throughput. max_mcs_index = self.get_highest_mcs_rate(ap_config.frequency) self.check_bitrates_in_capture(results[0], max_mcs_index) # Clean up router and client state for the next run. self.context.client.shill.disconnect( self.context.router.get_ssid()) self.context.router.deconfig()
def warmup_wifi_part(self, warmup_client=True): """Warm up a rate controller on the client or server. WiFi "warms up" in that rate controllers dynamically adjust to environmental conditions by increasing symbol rates until loss is observed. This manifests as initially slow data transfer rates that get better over time. We'll say that a rate controller is warmed up if a small sample of WARMUP_WINDOW_SIZE throughput measurements has an average throughput within a standard deviation of the previous WARMUP_WINDOW_SIZE samples. @param warmup_client: bool True iff we should warmup the client rate controller. Otherwise we warm up the server rate controller. """ if warmup_client: # We say a station is warm if the TX throughput is maximized. # Each station only controls its own transmission TX rate. logging.info('Warming up the client WiFi rate controller.') test_type = netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM else: logging.info('Warming up the server WiFi rate controller.') test_type = netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS config = netperf_runner.NetperfConfig( test_type, test_time=self.WARMUP_SAMPLE_TIME_SECONDS) warmup_history = [] with netperf_runner.NetperfRunner(self._client_proxy, self._server_proxy, config) as runner: while len(warmup_history) < self.WARMUP_MAX_SAMPLES: warmup_history.append(runner.run()) if len(warmup_history) > 2 * self.WARMUP_WINDOW_SIZE: # Grab 2 * WARMUP_WINDOW_SIZE samples, divided into the most # recent chunk and the chunk before that. start = -2 * self.WARMUP_WINDOW_SIZE middle = -self.WARMUP_WINDOW_SIZE past_result = self._from_samples( warmup_history[start:middle]) recent_result = self._from_samples(warmup_history[middle:]) if recent_result.throughput < (past_result.throughput + past_result.throughput_dev): logging.info('Rate controller is warmed.') return else: logging.warning('Did not completely warmup the WiFi part.')
class network_WiFi_BluetoothStreamPerf(wifi_cell_test_base.WiFiCellTestBase): """Test maximal achievable bandwidth on several channels per band. Conducts a performance test for a set of specified router configurations and reports results as keyval pairs. """ version = 1 NETPERF_CONFIGS = [ netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS), ] def parse_additional_arguments(self, commandline_args, additional_params): """Hook into super class to take control files parameters. @param commandline_args dict of parsed parameters from the autotest. @param additional_params list of HostapConfig objects. """ self._ap_configs = additional_params def test_one(self, session, config, ap_config_tag, bt_tag): """Run one iteration of wifi testing. @param session NetperfSession session @param config NetperfConfig config @param ap_config_tag string for AP configuration @param bt_tag string for BT operation """ get_ping_config = lambda period: ping_runner.PingConfig( self.context.get_wifi_addr(), interval=1, count=period) logging.info('testing config %s, ap_config %s, BT:%s', config.tag, ap_config_tag, bt_tag) test_str = '_'.join([ap_config_tag, bt_tag]) time.sleep(1) # Record the signal level. signal_level = self.context.client.wifi_signal_level signal_description = '_'.join(['signal', test_str]) self.write_perf_keyval({signal_description: signal_level}) # Run netperf and log the results. results = session.run(config) if not results: logging.error('Failed to take measurement for %s', config.tag) return values = [result.throughput for result in results] self.output_perf_value(config.tag + '_' + bt_tag, values, units='Mbps', higher_is_better=True, graph=ap_config_tag) result = netperf_runner.NetperfResult.from_samples(results) self.write_perf_keyval( result.get_keyval(prefix='_'.join([config.tag, test_str]))) # Log the drop in throughput compared with the 'BT_disconnected' # baseline. Only positive values are valid. Report the drop as a # whole integer percentage of (base_through-through)/base_through. if bt_tag == 'BT_disconnected': self.base_through = result.throughput elif self.base_through > 0: drop = int((self.base_through - result.throughput) * 100 / self.base_through) self.output_perf_value(config.tag + '_' + bt_tag + '_drop', drop, units='percent_drop', higher_is_better=False, graph=ap_config_tag + '_drop') self.write_perf_keyval( {'_'.join([config.tag, test_str, 'drop']): drop}) logging.info('logging drop value as %d%%', drop) # Test latency with ping. result_ping = self.context.client.ping(get_ping_config(3)) self.write_perf_keyval( {'_'.join(['ping', test_str]): result_ping.avg_latency}) logging.info('Ping statistics with %s: %r', bt_tag, result_ping) def run_once(self, host): """Test body.""" start_time = time.time() # Setup Bluetooth widgets and their binder, but do not yet connect. audio_test.audio_test_requirement() factory = remote_facade_factory.RemoteFacadeFactory( host, results_dir=self.resultsdir) chameleon_board = host.chameleon chameleon_board.setup_and_reset(self.outputdir) widget_factory = chameleon_audio_helper.AudioWidgetFactory( factory, host) source = widget_factory.create_widget( chameleon_audio_ids.CrosIds.BLUETOOTH_HEADPHONE) bluetooth_widget = widget_factory.create_widget( chameleon_audio_ids.PeripheralIds.BLUETOOTH_DATA_RX) binder = widget_factory.create_binder(source, bluetooth_widget) audio_test_file = 'http://commondatastorage.googleapis.com/' \ 'chromiumos-test-assets-public/audio_test/' \ 'chameleon/Headphone/test_256_16.mp3' for ap_config in self._ap_configs: # Set up the router and associate the client with it. self.context.configure(ap_config) if ap_config.is_11ac and not self.context.client.is_vht_supported( ): raise error.TestNAError('Client does not have AC support') assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid(), security_config=ap_config.security_config) self.context.assert_connect_wifi(assoc_params) session = netperf_session.NetperfSession(self.context.client, self.context.router) session.MEASUREMENT_MAX_SAMPLES = 6. # Warmup the wifi path and measure signal. session.warmup_stations() ap_config_tag = ap_config.perf_loggable_description for config in self.NETPERF_CONFIGS: self.base_through = 0 self.test_one(session, config, ap_config_tag, 'BT_disconnected') with chameleon_audio_helper.bind_widgets(binder): self.test_one(session, config, ap_config_tag, 'BT_connected_but_not_streaming') logging.info('Playing an audio test file') browser_facade = factory.create_browser_facade() browser_facade.new_tab(audio_test_file) self.test_one(session, config, ap_config_tag, 'BT_streaming_audiofile') self.test_one(session, config, ap_config_tag, 'BT_disconnected_again') # Clean up router and client state for the next run. self.context.client.shill.disconnect( self.context.router.get_ssid()) self.context.router.deconfig() end_time = time.time() logging.info('Running time %0.1f seconds.', end_time - start_time)
class network_WiFi_Perf(wifi_cell_test_base.WiFiCellTestBase): """Test maximal achievable bandwidth on several channels per band. Conducts a performance test for a set of specified router configurations and reports results as keyval pairs. """ version = 1 NETPERF_CONFIGS = [ netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS), ] def parse_additional_arguments(self, commandline_args, additional_params): """Hook into super class to take control files parameters. @param commandline_args dict of parsed parameters from the autotest. @param additional_params list of HostapConfig objects. """ if 'governor' in commandline_args: self._governor = commandline_args['governor'] # validate governor string. Not all machines will support all of # these governors, but this at least ensures that a potentially # valid governor was passed in if self._governor not in ('performance', 'powersave', 'userspace', 'ondemand', 'conservative', 'schedutil'): logging.warning('Unrecognized CPU governor "%s". Running test ' 'without setting CPU governor...' % self._governor) self._governor = None else: self._governor = None self._ap_configs = additional_params def do_run(self, ap_config, session, power_save, governor): """Run a single set of perf tests, for a given AP and DUT config. @param ap_config: the AP configuration that is being used @param session: a netperf session instance @param power_save: whether or not to use power-save mode on the DUT (boolean) """ def get_current_governor(host): """ @ return the CPU governor name used on a machine. If cannot find the governor info of the host, or if there are multiple different governors being used on different cores, return 'default'. """ try: governors = set(utils.get_scaling_governor_states(host)) if len(governors) != 1: return 'default' return next(iter(governors)) except: return 'default' if governor: client_governor = utils.get_scaling_governor_states( self.context.client.host) router_governor = utils.get_scaling_governor_states( self.context.router.host) utils.set_scaling_governors(governor, self.context.client.host) utils.set_scaling_governors(governor, self.context.router.host) governor_name = governor else: # try to get machine's current governor governor_name = get_current_governor(self.context.client.host) if governor_name != get_current_governor(self.context.router.host): governor_name = 'default' if governor_name == self._governor: # If CPU governor is already set to self._governor, don't # perform the run twice return self.context.client.powersave_switch(power_save) session.warmup_stations() ps_tag = 'PS%s' % ('on' if power_save else 'off') governor_tag = 'governor-%s' % governor_name ap_config_tag = '_'.join( [ap_config.perf_loggable_description, ps_tag, governor_tag]) signal_level = self.context.client.wifi_signal_level signal_description = '_'.join([ap_config_tag, 'signal']) self.write_perf_keyval({signal_description: signal_level}) for config in self.NETPERF_CONFIGS: results = session.run(config) if not results: logging.error('Failed to take measurement for %s', config.tag) continue values = [result.throughput for result in results] self.output_perf_value(config.tag, values, units='Mbps', higher_is_better=True, graph=ap_config_tag) result = netperf_runner.NetperfResult.from_samples(results) self.write_perf_keyval( result.get_keyval( prefix='_'.join([ap_config_tag, config.tag]))) if governor: utils.restore_scaling_governor_states(client_governor, self.context.client.host) utils.restore_scaling_governor_states(router_governor, self.context.router.host) def run_once(self): """Test body.""" start_time = time.time() for ap_config in self._ap_configs: # Set up the router and associate the client with it. self.context.configure(ap_config) # self.context.configure has a similar check - but that one only # errors out if the AP *requires* VHT i.e. AP is requesting # MODE_11AC_PURE and the client does not support it. # For wifi_perf, we don't want to run MODE_11AC_MIXED on the AP if # the client does not support VHT, as we are guaranteed to get the # same results at 802.11n/HT40 in that case. if ap_config.is_11ac and not self.context.client.is_vht_supported( ): raise error.TestNAError('Client does not have AC support') assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid(), security_config=ap_config.security_config) self.context.assert_connect_wifi(assoc_params) session = netperf_session.NetperfSession(self.context.client, self.context.router) # Flag a test error if we disconnect for any reason. with self.context.client.assert_no_disconnects(): # Conduct the performance tests while toggling powersave mode. for power_save in (True, False): for governor in sorted(set([None, self._governor])): self.do_run(ap_config, session, power_save, governor) # Clean up router and client state for the next run. self.context.client.shill.disconnect( self.context.router.get_ssid()) self.context.router.deconfig() end_time = time.time() logging.info('Running time %0.1f seconds.', end_time - start_time)
class network_WiFi_Perf(wifi_cell_test_base.WiFiCellTestBase): """Test maximal achievable bandwidth on several channels per band. Conducts a performance test for a set of specified router configurations and reports results as keyval pairs. """ version = 1 NETPERF_CONFIGS = [ netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS), ] def parse_additional_arguments(self, commandline_args, additional_params): """Hook into super class to take control files parameters. @param commandline_args dict of parsed parameters from the autotest. @param additional_params list of HostapConfig objects. """ self._ap_configs = additional_params def do_run(self, ap_config, session, power_save): """Run a single set of perf tests, for a given AP and DUT config. @param ap_config: the AP configuration that is being used @param session: a netperf session instance @param power_save: whether or not to use power-save mode on the DUT (boolean) """ self.context.client.powersave_switch(power_save) session.warmup_stations() ps_tag = 'PS%s' % ('on' if power_save else 'off') ap_config_tag = '_'.join([ap_config.perf_loggable_description, ps_tag]) signal_level = self.context.client.wifi_signal_level signal_description = '_'.join([ap_config_tag, 'signal']) self.write_perf_keyval({signal_description: signal_level}) for config in self.NETPERF_CONFIGS: results = session.run(config) if not results: logging.error('Failed to take measurement for %s', config.tag) continue values = [result.throughput for result in results] self.output_perf_value(config.tag, values, units='Mbps', higher_is_better=True, graph=ap_config_tag) result = netperf_runner.NetperfResult.from_samples(results) self.write_perf_keyval( result.get_keyval( prefix='_'.join([ap_config_tag, config.tag]))) def run_once(self): """Test body.""" start_time = time.time() for ap_config in self._ap_configs: # Set up the router and associate the client with it. self.context.configure(ap_config) if ap_config.is_11ac and not self.context.client.is_vht_supported( ): raise error.TestNAError('Client does not have AC support') assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid(), security_config=ap_config.security_config) self.context.assert_connect_wifi(assoc_params) session = netperf_session.NetperfSession(self.context.client, self.context.router) # Flag a test error if we disconnect for any reason. with self.context.client.assert_no_disconnects(): # Conduct the performance tests while toggling powersave mode. for power_save in (True, False): self.do_run(ap_config, session, power_save) # Clean up router and client state for the next run. self.context.client.shill.disconnect( self.context.router.get_ssid()) self.context.router.deconfig() end_time = time.time() logging.info('Running time %0.1f seconds.', end_time - start_time)
class network_WiFi_BluetoothScanPerf(wifi_cell_test_base.WiFiCellTestBase): """Test the effect of bluetooth scanning on wifi performance. Conducts a performance test for a set of specified router configurations while scanning for bluetooth devices and reports results as keyval pairs. """ version = 1 NETPERF_CONFIGS = [ netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS), ] def parse_additional_arguments(self, commandline_args, additional_params): """Hook into super class to take control files parameters. @param commandline_args dict of parsed parameters from the autotest. @param additional_params list of HostapConfig objects. """ self._ap_configs = additional_params def test_one(self, session, config, ap_config_tag, bt_tag): """Run one iteration of wifi testing. @param session NetperfSession session @param config NetperfConfig config @param ap_config_tag string for AP configuration @param bt_tag string for BT operation """ get_ping_config = lambda period: ping_runner.PingConfig( self.context.get_wifi_addr(), interval=1, count=period) logging.info('testing config %s, ap_config %s, BT:%s', config.tag, ap_config_tag, bt_tag) test_str = '_'.join([ap_config_tag, bt_tag]) time.sleep(1) signal_level = self.context.client.wifi_signal_level signal_description = '_'.join(['signal', test_str]) self.write_perf_keyval({signal_description: signal_level}) results = session.run(config) if not results: logging.error('Failed to take measurement for %s', config.tag) return values = [result.throughput for result in results] self.output_perf_value(config.tag + ' ' + bt_tag, values, units='Mbps', higher_is_better=True, graph=ap_config_tag) result = netperf_runner.NetperfResult.from_samples(results) self.write_perf_keyval( result.get_keyval(prefix='_'.join([config.tag, test_str]))) # Test latency with ping. result_ping = self.context.client.ping(get_ping_config(3)) self.write_perf_keyval( {'_'.join(['ping', test_str]): result_ping.avg_latency}) logging.info('Ping statistics with %s: %r', bt_tag, result_ping) def run_once(self, host): """Test body.""" start_time = time.time() # Prepare Bluetooth to scan, but do not start yet. bt_device = bluetooth_device.BluetoothDevice(host) if not bt_device.reset_on(): raise error.TestFail('DUT could not be reset to initial state') for ap_config in self._ap_configs: # Set up the router and associate the client with it. self.context.configure(ap_config) if ap_config.is_11ac and not self.context.client.is_vht_supported( ): raise error.TestNAError('Client does not have AC support') assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid(), security_config=ap_config.security_config) self.context.assert_connect_wifi(assoc_params) session = netperf_session.NetperfSession(self.context.client, self.context.router) # Warmup the wifi path and measure signal. session.warmup_stations() ap_config_tag = ap_config.perf_loggable_description for config in self.NETPERF_CONFIGS: self.test_one(session, config, ap_config_tag, 'BT_quiet') if not bt_device.start_discovery(): raise error.TestFail('Could not start discovery on DUT') try: self.test_one(session, config, ap_config_tag, 'BT_scanning') finally: if not bt_device.stop_discovery(): logging.warning('Failed to stop discovery on DUT') self.test_one(session, config, ap_config_tag, 'BT_quiet_again') # Clean up router and client state for the next run. self.context.client.shill.disconnect( self.context.router.get_ssid()) self.context.router.deconfig() end_time = time.time() logging.info('Running time %0.1f seconds.', end_time - start_time)
class network_WiFi_AttenuatedPerf(wifi_cell_test_base.WiFiCellTestBase): """Test maximal achievable bandwidth while varying attenuation. Performs a performance test for a specified router configuration as signal attentuation increases. """ version = 1 CMDLINE_SERIES_NOTE = 'series_note' NETPERF_CONFIGS = [ netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM), netperf_runner.NetperfConfig( netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS), ] ATTENUATION_STEP = 4 FINAL_ATTENUATION = 100 TSV_OUTPUT_DIR = 'tsvs' DataPoint = collections.namedtuple( 'DataPoint', ['attenuation', 'throughput', 'variance', 'signal', 'test_type']) def parse_additional_arguments(self, commandline_args, additional_params): """Hook into super class to take control files parameters. @param commandline_args dict of parsed parameters from the autotest. @param additional_params list of dicts describing router configs. """ self._ap_config = additional_params self.series_note = None if self.CMDLINE_SERIES_NOTE in commandline_args: self.series_note = commandline_args[self.CMDLINE_SERIES_NOTE] def run_once(self): """Run test.""" start_time = time.time() throughput_data = [] max_atten = None self.context.client.host.get_file('/etc/lsb-release', self.resultsdir) # Set up the router and associate the client with it. self.context.configure(self._ap_config) assoc_params = xmlrpc_datatypes.AssociationParameters( ssid=self.context.router.get_ssid(), security_config=self._ap_config.security_config) self.context.assert_connect_wifi(assoc_params) # Conduct the performance tests. Ignore failures, since # at high attenuations, sometimes the control connection # is unable to terminate the test properly. session = netperf_session.NetperfSession(self.context.client, self.context.router, ignore_failures=True) session.warmup_stations() start_atten = self.context.attenuator.get_minimal_total_attenuation() for atten in range(start_atten, self.FINAL_ATTENUATION, self.ATTENUATION_STEP): atten_tag = 'atten%03d' % atten self.context.attenuator.set_total_attenuation( atten, self._ap_config.frequency) logging.info('RvR test: current attenuation = %d dB', atten) # Give this attenuation level a quick sanity test. If we can't stay # associated and handle a few pings, we probably won't get # meaningful results out of netperf. try: self.context.wait_for_connection( self.context.router.get_ssid()) except error.TestFail as e: logging.warning('Could not establish connection at %d dB (%s)', atten, str(e)) break for config in self.NETPERF_CONFIGS: results = session.run(config) if not results: logging.warning( 'Unable to take measurement for %s; ' 'aborting', config.human_readable_tag) break graph_name = '.'.join( [self._ap_config.perf_loggable_description, config.tag]) values = [result.throughput for result in results] # If no signal is detected with client.wifi_signal_level, set # signal_level to -100 to indicate weak signal. signal_level = (self.context.client.wifi_signal_level if self.context.client.wifi_signal_level else -100) self.output_perf_value(atten_tag, values, units='Mbps', higher_is_better=True, graph=graph_name) self.output_perf_value('_'.join([atten_tag, 'signal']), signal_level, units='dBm', higher_is_better=True, graph=graph_name) result = netperf_runner.NetperfResult.from_samples(results) throughput_data.append( self.DataPoint(atten, result.throughput, result.throughput_dev, signal_level, config.tag)) keyval_prefix = '_'.join([ self._ap_config.perf_loggable_description, config.tag, atten_tag ]) self.write_perf_keyval(result.get_keyval(prefix=keyval_prefix)) # Reported at least one successful result at this attenuation. max_atten = (atten, signal_level) signal_level = self.context.client.wifi_signal_level self.write_perf_keyval( {'_'.join([atten_tag, 'signal']): signal_level}) if not results: logging.warning('No results for atten %d dB; terminating', atten) # Clean up router and client state. self.context.client.shill.disconnect(assoc_params.ssid) self.context.router.deconfig() end_time = time.time() logging.info('Running time %0.1f seconds.', end_time - start_time) if max_atten is None: raise error.TestFail('Did not succeed at any atten level') logging.info('Reached attenuation of: %d dB (signal %d)' % max_atten) self.write_perf_keyval( {'ch%03d_max_atten' % self._ap_config.channel: max_atten[0]}) self.write_perf_keyval( {'ch%03d_min_signal' % self._ap_config.channel: max_atten[1]}) self.write_throughput_tsv_files(throughput_data) def write_throughput_tsv_files(self, throughput_data): """Write out .tsv files with plotable data from |throughput_data|. Each .tsv file starts with a label for the series that can be customized with a short note passed in from the command line. It then has column headers and fields separated by tabs. This format is easy to parse and also works well with spreadsheet programs for custom report generation. @param throughput_data a list of Datapoint namedtuples gathered from tests. """ logging.info('Writing .tsv files.') os.mkdir(os.path.join(self.resultsdir, self.TSV_OUTPUT_DIR)) series_label_parts = [ self.context.client.board, 'ch%03d' % self._ap_config.channel ] if self.series_note: series_label_parts.insert(1, '(%s)' % self.series_note) header_parts = [ 'Attenuation', 'Throughput(Mbps)', 'StdDev(Mbps)', 'Client Reported Signal' ] mode = self._ap_config.printable_mode mode = mode.replace('+', 'p').replace('-', 'm').lower() result_file_prefix = '%s_ch%03d' % (mode, self._ap_config.channel) for test_type in set([data.test_type for data in throughput_data]): result_file = os.path.join( self.resultsdir, self.TSV_OUTPUT_DIR, '%s_%s.tsv' % (result_file_prefix, test_type)) lines = [' '.join(series_label_parts), '\t'.join(header_parts)] for result in sorted([ datum for datum in throughput_data if datum.test_type == test_type ]): lines.append('\t'.join(map(str, result[0:4]))) with open(result_file, 'w') as f: f.writelines(['%s\n' % line for line in lines])