def _do_output(self, output_config, result): dispatchers = DispatcherBase.get(output_config) dispatchers = (d for d in dispatchers if d.__dispatcher_type__ != 'Influxdb') for dispatcher in dispatchers: dispatcher.flush_result_data(result)
def _output_to_influxdb(self, record): dispatchers = DispatcherBase.get(self.config['output_config']) dispatcher = next( (d for d in dispatchers if d.__dispatcher_type__ == 'Influxdb')) dispatcher.upload_one_record(record, self.case_name, '', task_id=self.task_id)
def _output_serializer_main(filename, queue): '''entrypoint for the singleton subprocess writing to outfile Use of this process enables multiple instances of a scenario without messing up the output file. ''' config = {} config["type"] = CONF.dispatcher.capitalize() config["file_path"] = filename dispatcher = DispatcherBase.get(config) while True: # blocks until data becomes available record = queue.get() if record == '_TERMINATE_': dispatcher.flush_result_data() break else: dispatcher.record_result_data(record)
def _output_serializer_main(filename, queue, config): """entrypoint for the singleton subprocess writing to outfile Use of this process enables multiple instances of a scenario without messing up the output file. """ try: out_type = config['yardstick'].get('DEFAULT', {})['dispatcher'] except KeyError: out_type = os.environ.get('DISPATCHER', 'file') conf = {'type': out_type.capitalize(), 'file_path': filename} dispatcher = DispatcherBase.get(conf, config) while True: # blocks until data becomes available record = queue.get() if record == '_TERMINATE_': dispatcher.flush_result_data() break else: dispatcher.record_result_data(record)
def _do_output(self, output_config, result): dispatchers = DispatcherBase.get(output_config) for dispatcher in dispatchers: dispatcher.flush_result_data(result)