コード例 #1
0
    def test_bza_py3_unicode_token(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/web/version': {"result": {}},
        })

        user = User()
        mock.apply(user)
        user.token = str("something:something")
        user.ping()
コード例 #2
0
ファイル: test_bza.py プロジェクト: keithmork/taurus
    def test_bza_py3_unicode_token(self):
        mock = BZMock()
        mock.mock_get.update({
            'https://a.blazemeter.com/api/v4/web/version': {"result": {}},
        })

        user = User()
        mock.apply(user)
        user.token = text_type("something:something")
        user.ping()
コード例 #3
0
 def __init__(self):
     super(CloudProvisioning, self).__init__()
     self.results_url = None
     self.results_reader = None
     self.user = User()
     self.__last_master_status = None
     self.browser_open = 'start'
     self.widget = None
     self.detach = False
     self.router = None
     self.test_ended = False
     self.check_interval = 5.0
     self._last_check_time = None
     self.public_report = False
     self.report_name = None
     self._workspaces = []
     self.launch_existing_test = None
     self.disallow_empty_execution = False
コード例 #4
0
 def __init__(self):
     super(BlazeMeterUploader, self).__init__()
     self.browser_open = 'start'
     self.kpi_buffer = []
     self.send_interval = 30
     self._last_status_check = time.time()
     self.send_data = True
     self.upload_artifacts = True
     self.send_monitoring = True
     self.monitoring_buffer = None
     self.public_report = False
     self.last_dispatch = 0
     self.results_url = None
     self._user = User()
     self._test = None
     self._master = None
     self._session = None
     self.first_ts = sys.maxsize
     self.last_ts = 0
     self.report_name = None
     self._dpoint_serializer = DatapointSerializer(self)
コード例 #5
0
def test_flow():
    user = User()
    user.token = get_token()
    user.logger_limit = sys.maxsize

    user.fetch()
    accounts = user.accounts()
    workspaces = accounts.workspaces()
    for wsp in workspaces:
        wsp.fetch()
コード例 #6
0
def test_flow():
    user = User()
    user.token = get_token()
    user.logger_limit = sys.maxsize

    user.fetch()
    accounts = user.accounts()
    workspaces = accounts.workspaces()
    tests = workspaces.multi_tests(ident=10005302)
    tests.delete()
    print(tests)
コード例 #7
0
ファイル: bza_client_example.py プロジェクト: yilongyu/taurus
def test_flow():
    user = User()
    user.token = get_token()
    user.logger_limit = sys.maxsize

    user.fetch()
    accounts = user.accounts()
    workspaces = accounts.workspaces()
    # opls = workspaces.private_locations()
    # sel_test = workspaces.tests(name='Selenium')
    projects = workspaces.projects()
    tests = projects.multi_tests()
コード例 #8
0
class CloudProvisioning(MasterProvisioning, WidgetProvider):
    """
    :type user: bzt.bza.User
    :type router: BaseCloudTest
    :type _workspaces: bzt.bza.BZAObjectsList[bzt.bza.Workspace]
    """
    def __init__(self):
        super(CloudProvisioning, self).__init__()
        self.results_url = None
        self.results_reader = None
        self.user = User()
        self.__last_master_status = None
        self.browser_open = 'start'
        self.widget = None
        self.detach = False
        self.router = None
        self.test_ended = False
        self.check_interval = 5.0
        self._last_check_time = None
        self.public_report = False
        self.report_name = None
        self._workspaces = []
        self.launch_existing_test = None
        self.disallow_empty_execution = False

    @staticmethod
    def merge_with_blazemeter_config(module):
        if 'blazemeter' not in module.engine.config.get('modules'):
            module.log.debug("Module 'blazemeter' wasn't found in base config")
            return
        bm_mod = module.engine.instantiate_module('blazemeter')
        bm_settings = copy.deepcopy(bm_mod.settings)
        bm_settings.update(module.settings)
        module.settings = bm_settings

    def prepare(self):
        reporting = self.engine.config.get(Reporter.REP)

        CloudProvisioning.merge_with_blazemeter_config(self)
        CloudProvisioning.configure_client(self)
        self._workspaces = self.user.accounts().workspaces()
        if not self._workspaces:
            raise TaurusNetworkError(
                "Your account has no active workspaces, please contact BlazeMeter support"
            )

        self.__dump_locations_if_needed()

        super(CloudProvisioning, self).prepare()
        self.browser_open = self.settings.get("browser-open",
                                              self.browser_open)
        self.detach = self.settings.get("detach", self.detach)
        self.check_interval = dehumanize_time(
            self.settings.get("check-interval", self.check_interval))
        self.public_report = self.settings.get("public-report",
                                               self.public_report)
        is_execution_empty = not self.engine.config.get("execution")
        self.launch_existing_test = self.settings.get("launch-existing-test",
                                                      is_execution_empty,
                                                      force_set=True)

        if not self.launch_existing_test:
            self._filter_reporting()

        finder = ProjectFinder(self.parameters, self.settings, self.user,
                               self._workspaces, self.log)
        finder.default_test_name = "Taurus Cloud Test"

        test_type = self.settings.get(
            "test-type")  # user test type. should we mention it in doc?
        if not test_type:
            func_mode = self.engine.is_functional_mode()
            gui_mode = func_mode and (
                (len(self.executors) == 1)
                and isinstance(self.executors[0], SeleniumExecutor))

            if func_mode:
                if gui_mode:
                    test_type = FUNC_GUI_TEST_TYPE
                else:
                    test_type = FUNC_API_TEST_TYPE
            else:
                test_type = TAURUS_TEST_TYPE

        finder.test_type = test_type

        self.router = finder.get_test_router()

        if not self.launch_existing_test:
            self.router.prepare_locations(self.executors, self.engine.config)

            res_files = self.get_rfiles()
            files_for_cloud = self._fix_filenames(res_files)

            config_for_cloud = self.prepare_cloud_config()
            config_for_cloud.dump(self.engine.create_artifact("cloud", ""))
            del_files = self.settings.get("delete-test-files", True)
            self.router.resolve_test(config_for_cloud, files_for_cloud,
                                     del_files)

        self.router.sanitize_test()

        self.report_name = self.settings.get("report-name", self.report_name)
        if self.report_name == 'ask' and sys.stdin.isatty():
            self.report_name = input("Please enter report-name: ")

        self.widget = self.get_widget()

        if self.engine.is_functional_mode():
            self.results_reader = FunctionalBZAReader(self.log)
            self.engine.aggregator.add_underling(self.results_reader)
        else:
            self.results_reader = ResultsFromBZA()
            self.results_reader.log = self.log
            self.engine.aggregator.add_underling(self.results_reader)

        validate_passfail = any(
            reporter.get('module') == 'passfail' for reporter in reporting)

        if validate_passfail:
            if self.router._test.started_passfail_validation():
                timeout = 100
                for i in range(timeout):
                    if self.router._test.get_passfail_validation():
                        return
                    self.log.warning(
                        f"Unsuccessful Passfail validation attempt [{i+1}]. Retrying..."
                    )
                    if not i % 10:
                        self.log.warning(
                            "Please keep in mind that validation can take time."
                        )
                    sleep(1)
                self.log.error("Unable get Passfail validation!")
            else:
                self.log.error("Unable to validate Passfail configuration!")

    @staticmethod
    def _get_other_modules(config):
        used_classes = LocalClient.__name__, BlazeMeterUploader.__name__
        used_modules = []

        for module in config.get("modules"):
            class_name = config.get("modules").get(module).get("class")
            if class_name and (class_name.split('.')[-1] in used_classes):
                used_modules.append(module)
        return used_modules

    def _get_executors(self):
        executors = []
        for executor in self.executors:
            executors.append(executor.execution.get("executor"))
            if isinstance(executor, SeleniumExecutor):
                executors.append(executor.runner.execution.get("executor"))

        return executors

    def _filter_unused_modules(self, config, provisioning):
        services = [
            service.get("module") for service in config.get(Service.SERV)
        ]
        reporters = [
            reporter.get("module") for reporter in config.get(Reporter.REP)
        ]
        consolidator = config.get(SETTINGS).get("aggregator")

        used_modules = self._get_executors() + self._get_other_modules(config)
        used_modules += services + reporters + [consolidator, provisioning]

        modules = set(config.get("modules").keys())
        for module in modules:
            if config.get("modules")[module].get("send-to-blazemeter"):
                continue
            if module not in used_modules:
                del config.get("modules")[module]
            elif config.get("modules")[module].get("class"):
                del config.get("modules")[module]["class"]

    def prepare_cloud_config(self):
        # expand concurrency and throughput
        for executor in self.executors:
            executor.get_load()

        config = copy.deepcopy(self.engine.config)

        provisioning = config.get(Provisioning.PROV)
        self._filter_unused_modules(config, provisioning)

        # todo: should we remove config['version'] before sending to cloud?
        config['local-bzt-version'] = config.get('version', 'N/A')

        config.filter(CLOUD_CONFIG_BLACK_LIST, black_list=True)

        for execution in config[EXEC]:
            if execution.get("files") == []:
                del execution["files"]

            for param in (ScenarioExecutor.CONCURR, ScenarioExecutor.THRPT):
                param_value = execution.get(param).get(provisioning, None)
                if param_value is None:
                    del execution[param]
                else:
                    execution[param] = param_value

        if self.router.dedicated_ips:
            config[DEDICATED_IPS] = True

        assert isinstance(config, Configuration)
        return config

    def __dump_locations_if_needed(self):
        if self.settings.get("dump-locations", False):
            locations = {}
            for loc in self._workspaces.locations(include_private=True):
                locations[loc['id']] = loc

            data = [("ID", "Name")]
            for location_id in sorted(locations):
                location = locations[location_id]
                data.append((location_id, location['title']))
            table = SingleTable(data) if sys.stdout and sys.stdout.isatty(
            ) else AsciiTable(data)
            self.log.warning(
                "Dumping available locations instead of running the test:\n%s",
                table.table)
            raise NormalShutdown("Done listing locations")

    def _filter_reporting(self):
        reporting = self.engine.config.get(Reporter.REP, [])
        new_reporting = []
        for index, reporter in enumerate(reporting):
            exc = TaurusConfigError("'module' attribute not found in %s" %
                                    reporter)
            cls = reporter.get('module', exc)
            if cls == "blazemeter":
                self.log.warning(
                    "Explicit blazemeter reporting is skipped for cloud")
            else:
                new_reporting.append(reporter)

        self.engine.config[Reporter.REP] = new_reporting

    @staticmethod
    def configure_client(module):
        module.user.log = module.log
        module.user.logger_limit = module.settings.get(
            "request-logging-limit", module.user.logger_limit)
        module.user.address = module.settings.get("address",
                                                  module.user.address)
        module.user.token = module.settings.get("token", module.user.token)
        module.user.timeout = dehumanize_time(
            module.settings.get("timeout", module.user.timeout))
        if isinstance(module.user.http_session, requests.Session):
            module.log.debug("Installing http client")
            module.user.http_session = module.engine.get_http_client()
            module.user.http_request = module.user.http_session.request
        if not module.user.token:
            raise TaurusConfigError(
                "You must provide API token to use cloud provisioning")

    def startup(self):
        super(CloudProvisioning, self).startup()
        self.results_url = self.router.launch_test()
        self.log.info("Started cloud test: %s", self.results_url)
        if self.results_url:
            if self.browser_open in ('start', 'both'):
                open_browser(self.results_url)

        if self.user.token and self.public_report:
            public_link = self.router.master.make_report_public()
            self.log.info("Public report link: %s", public_link)

        if self.report_name:
            self.router.master.set({"name": str(self.report_name)})

    def _should_skip_check(self):
        now = time.time()
        if self._last_check_time is None:
            return False
        elif now >= self._last_check_time + self.check_interval:
            return False
        else:
            return True

    def check(self):
        if self.detach:
            self.log.warning('Detaching Taurus from started test...')
            return True

        if self._should_skip_check():
            self.log.debug("Skipping cloud status check")
            return False

        self._last_check_time = time.time()

        master = self._check_master_status()
        status = master.get('status')
        progress = master.get(
            'progress')  # number value of status, see BZA API

        if status != self.__last_master_status:
            self.__last_master_status = status
            self.log.info("Cloud test status: %s", status)

        if self.results_reader and progress and progress >= BZA_TEST_DATA_RECEIVED:
            self.results_reader.master = self.router.master

        if progress == ENDED:
            self.log.info("Test was stopped in the cloud: %s", status)
            self.test_ended = True
            return True

        self.router.start_if_ready()

        self.widget.update()
        return super(CloudProvisioning, self).check()

    @get_with_retry
    def _check_master_status(self):
        return self.router.get_master_status()

    def post_process(self):
        self.log.warning(
            'Part of result data might be missed here due to BM API specifics')

        if not self.detach and self.router and not self.test_ended:
            self.router.stop_test()

        if self.results_url:
            if self.browser_open in ('end', 'both'):
                open_browser(self.results_url)

        if self.router and self.router.master:
            full = self.router.master.get_full()
            if 'note' in full and full['note']:
                self.log.warning(
                    "Cloud test has probably failed with message: %s",
                    full['note'])

            for session in full.get('sessions', ()):
                for error in session.get("errors", ()):
                    raise TaurusException(to_json(error))

            if "hasThresholds" in full and full["hasThresholds"]:
                thresholds = self.router.master.get_thresholds()
                for item in thresholds.get('data', []):
                    if item.get('success', None) is False:
                        reason = None
                        for assertion in item.get('assertions', []):
                            if assertion.get('success', None) is False:
                                criterion = assertion.get('field', '')
                                label = assertion.get('label', '')
                                reason = "Cloud failure criterion %r (on label %r) was met" % (
                                    criterion, label)
                                break
                        if reason is None:
                            reason = "Cloud tests failed because failure criteria were met"
                        self.log.warning(reason)
                        raise AutomatedShutdown(reason)

            # if we have captured HARs, let's download them
            for service in self.engine.config.get(Service.SERV, []):
                mod = service.get(
                    'module',
                    TaurusConfigError("No 'module' specified for service"))
                assert isinstance(mod, str), mod
                module = self.engine.instantiate_module(mod)
                if isinstance(module, ServiceStubCaptureHAR):
                    self._download_logs()
                    break

            if "functionalSummary" in full:
                summary = full["functionalSummary"]
                if summary is None or summary.get("isFailed", False):
                    raise AutomatedShutdown("Cloud tests failed")

    def _download_logs(self):
        for session in self.router.master.sessions():
            assert isinstance(session, Session)
            for log in session.get_logs():
                self.log.info("Downloading %s from the cloud", log['filename'])
                cloud_dir = os.path.join(self.engine.artifacts_dir,
                                         'cloud-artifacts')
                if not os.path.exists(cloud_dir):
                    os.makedirs(cloud_dir)
                dest = os.path.join(cloud_dir, log['filename'])
                dwn = ExceptionalDownloader(self.engine.get_http_client())
                with ProgressBarContext() as pbar:
                    try:
                        dwn.get(log['dataUrl'],
                                dest,
                                reporthook=pbar.download_callback)
                    except BaseException:
                        self.log.debug("Error is: %s", traceback.format_exc())
                        self.log.warning("Failed to download from %s",
                                         log['dataUrl'])
                        continue

                    if log['filename'].startswith(
                            'artifacts') and log['filename'].endswith('.zip'):
                        with zipfile.ZipFile(dest) as zipf:
                            for name in zipf.namelist():
                                ext = name.split('.')[-1].lower()
                                if ext in ('har', 'jpg', 'js', 'html', 'css'):
                                    self.log.debug("Extracting %s to %s", name,
                                                   cloud_dir)
                                    zipf.extract(name, cloud_dir)

    def get_widget(self):
        if not self.widget:
            self.widget = CloudProvWidget(self.router)
        return self.widget
コード例 #9
0
 def test_ping(self):
     obj = User()
     obj.ping()
コード例 #10
0
class BlazeMeterUploader(Reporter, AggregatorListener, MonitoringListener, Singletone):
    """
    Reporter class

    :type _test: bzt.bza.Test
    :type _master: bzt.bza.Master
    :type _session: bzt.bza.Session
    """

    def __init__(self):
        super(BlazeMeterUploader, self).__init__()
        self.browser_open = 'start'
        self.kpi_buffer = []
        self.send_interval = 30
        self._last_status_check = time.time()
        self.send_data = True
        self.upload_artifacts = True
        self.send_monitoring = True
        self.monitoring_buffer = None
        self.public_report = False
        self.last_dispatch = 0
        self.results_url = None
        self._user = User()
        self._test = None
        self._master = None
        self._session = None
        self.first_ts = sys.maxsize
        self.last_ts = 0
        self.report_name = None
        self._dpoint_serializer = DatapointSerializer(self)

    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.send_interval = dehumanize_time(self.settings.get("send-interval", self.send_interval))
        self.send_monitoring = self.settings.get("send-monitoring", self.send_monitoring)
        monitoring_buffer_limit = self.settings.get("monitoring-buffer-limit", 500)
        self.monitoring_buffer = MonitoringBuffer(monitoring_buffer_limit, self.log)
        self.browser_open = self.settings.get("browser-open", self.browser_open)
        self.public_report = self.settings.get("public-report", self.public_report)
        self.upload_artifacts = self.parameters.get("upload-artifacts", self.upload_artifacts)
        self._dpoint_serializer.multi = self.settings.get("report-times-multiplier", self._dpoint_serializer.multi)
        token = self.settings.get("token", "")
        if not token:
            self.log.warning("No BlazeMeter API key provided, will upload anonymously")
        self._user.token = token

        # usual fields
        self._user.logger_limit = self.settings.get("request-logging-limit", self._user.logger_limit)
        self._user.address = self.settings.get("address", self._user.address).rstrip("/")
        self._user.data_address = self.settings.get("data-address", self._user.data_address).rstrip("/")
        self._user.timeout = dehumanize_time(self.settings.get("timeout", self._user.timeout))
        if isinstance(self._user.http_session, requests.Session):
            self.log.debug("Installing http client")
            self._user.http_session = self.engine.get_http_client()
            self._user.http_request = self._user.http_session.request

        # direct data feeding case
        sess_id = self.parameters.get("session-id")
        if sess_id:
            self._session = Session(self._user, {'id': sess_id})
            self._session['userId'] = self.parameters.get("user-id", None)
            self._session['testId'] = self.parameters.get("test-id", None)
            self._test = Test(self._user, {'id': self._session['testId']})
            exc = TaurusConfigError("Need signature for session")
            self._session.data_signature = self.parameters.get("signature", exc)
            self._session.kpi_target = self.parameters.get("kpi-target", self._session.kpi_target)
            self.send_data = self.parameters.get("send-data", self.send_data)
        else:
            try:
                self._user.ping()  # to check connectivity and auth
            except HTTPError:
                self.log.error("Cannot reach online results storage, maybe the address/token is wrong")
                raise

            if token:
                wsp = self._user.accounts().workspaces()
                if not wsp:
                    raise TaurusNetworkError("Your account has no active workspaces, please contact BlazeMeter support")
                finder = ProjectFinder(self.parameters, self.settings, self._user, wsp, self.log)
                self._test = finder.resolve_external_test()
            else:
                self._test = Test(self._user, {'id': None})

        self.report_name = self.parameters.get("report-name", self.settings.get("report-name", self.report_name))
        if self.report_name == 'ask' and sys.stdin.isatty():
            self.report_name = input("Please enter report-name: ")

        if isinstance(self.engine.aggregator, ResultsProvider):
            self.engine.aggregator.add_listener(self)

        for service in self.engine.services:
            if isinstance(service, Monitoring):
                service.add_listener(self)

    def startup(self):
        """
        Initiate online test
        """
        super(BlazeMeterUploader, self).startup()
        self._user.log = self.log.getChild(self.__class__.__name__)

        if not self._session:
            url = self._start_online()
            self.log.info("Started data feeding: %s", url)
            if self.browser_open in ('start', 'both'):
                open_browser(url)

            if self._user.token and self.public_report:
                report_link = self._master.make_report_public()
                self.log.info("Public report link: %s", report_link)

    def _start_online(self):
        """
        Start online test

        """
        self.log.info("Initiating data feeding...")

        if self._test['id']:
            self._session, self._master = self._test.start_external()
        else:
            self._session, self._master, self.results_url = self._test.start_anonymous_external_test()
            self._test['id'] = self._session['testId']

        if self._test.token:
            self.results_url = self._master.address + '/app/#/masters/%s' % self._master['id']
            if self.report_name:
                self._session.set({"name": str(self.report_name)})

        return self.results_url

    def __get_jtls_and_more(self):
        """
        Compress all files in artifacts dir to single zipfile
        :rtype: (io.BytesIO,dict)
        """
        mfile = BytesIO()
        listing = {}

        logs = set()
        for handler in self.engine.log.parent.handlers:
            if isinstance(handler, logging.FileHandler):
                logs.add(handler.baseFilename)

        max_file_size = self.settings.get('artifact-upload-size-limit', 10) * 1024 * 1024  # 10MB
        with zipfile.ZipFile(mfile, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zfh:
            for root, _, files in os.walk(self.engine.artifacts_dir):
                for filename in files:
                    full_path = os.path.join(root, filename)
                    if full_path in logs:
                        logs.remove(full_path)

                    fsize = os.path.getsize(full_path)
                    if fsize <= max_file_size:
                        zfh.write(full_path, os.path.join(os.path.relpath(root, self.engine.artifacts_dir), filename))
                        listing[full_path] = fsize
                    else:
                        msg = "File %s exceeds maximum size quota of %s and won't be included into upload"
                        self.log.warning(msg, filename, max_file_size)

            for filename in logs:  # upload logs unconditionally
                zfh.write(filename, os.path.basename(filename))
                listing[filename] = os.path.getsize(filename)
        return mfile, listing

    def __upload_artifacts(self):
        """
        If token provided, upload artifacts folder contents and bzt.log
        """
        if not self._session.token:
            return

        worker_index = self.engine.config.get('modules').get('shellexec').get('env').get('TAURUS_INDEX_ALL')
        if worker_index:
            suffix = '-%s' % worker_index
        else:
            suffix = ''
        artifacts_zip = "artifacts%s.zip" % suffix
        mfile, zip_listing = self.__get_jtls_and_more()
        self.log.info("Uploading all artifacts as %s ...", artifacts_zip)
        self._session.upload_file(artifacts_zip, mfile.getvalue())
        self._session.upload_file(artifacts_zip + '.tail.bz', self.__format_listing(zip_listing))

        handlers = self.engine.log.parent.handlers
        for handler in handlers:
            if isinstance(handler, logging.FileHandler):
                fname = handler.baseFilename
                self.log.info("Uploading %s", fname)
                fhead, ftail = os.path.splitext(os.path.split(fname)[-1])
                modified_name = fhead + suffix + ftail
                with open(fname, 'rb') as _file:
                    self._session.upload_file(modified_name, _file.read())
                    _file.seek(-4096, 2)
                    tail = _file.read()
                    tail = tail[tail.index(b("\n")) + 1:]
                    self._session.upload_file(modified_name + ".tail.bz", tail)

    def post_process(self):
        """
        Upload results if possible
        """
        if not self._session:
            self.log.debug("No feeding session obtained, nothing to finalize")
            return

        self.log.debug("KPI bulk buffer len in post-proc: %s", len(self.kpi_buffer))
        try:
            self.log.info("Sending remaining KPI data to server...")
            if self.send_data:
                self.__send_data(self.kpi_buffer, False, True)
                self.kpi_buffer = []

            if self.send_monitoring:
                self.__send_monitoring()
        finally:
            self._postproc_phase2()

        if self.results_url:
            if self.browser_open in ('end', 'both'):
                open_browser(self.results_url)
            self.log.info("Online report link: %s", self.results_url)

    def _postproc_phase2(self):
        try:
            if self.upload_artifacts:
                self.__upload_artifacts()
        except (IOError, TaurusNetworkError):
            self.log.warning("Failed artifact upload: %s", traceback.format_exc())
        finally:
            self._last_status_check = self.parameters.get('forced-last-check', self._last_status_check)
            self.log.debug("Set last check time to: %s", self._last_status_check)

            tries = self.send_interval  # NOTE: you dirty one...
            while not self._last_status_check and tries > 0:
                self.log.info("Waiting for ping...")
                time.sleep(self.send_interval)
                tries -= 1

            self._postproc_phase3()

    def _postproc_phase3(self):
        try:
            if self.send_data:
                self.end_online()

            if self._user.token and self.engine.stopping_reason:
                exc_class = self.engine.stopping_reason.__class__.__name__
                note = "%s: %s" % (exc_class, str(self.engine.stopping_reason))
                self.append_note_to_session(note)
                if self._master:
                    self.append_note_to_master(note)

        except KeyboardInterrupt:
            raise
        except BaseException as exc:
            self.log.debug("Failed to finish online: %s", traceback.format_exc())
            self.log.warning("Failed to finish online: %s", exc)

    def end_online(self):
        """
        Finish online test
        """
        if not self._session:
            self.log.debug("Feeding not started, so not stopping")
        else:
            self.log.info("Ending data feeding...")
            if self._user.token:
                self._session.stop()
            else:
                self._session.stop_anonymous()

    def append_note_to_session(self, note):
        self._session.fetch()
        if 'note' in self._session:
            note = self._session['note'] + '\n' + note
        note = note.strip()
        if note:
            self._session.set({'note': note[:NOTE_SIZE_LIMIT]})

    def append_note_to_master(self, note):
        self._master.fetch()
        if 'note' in self._master:
            note = self._master['note'] + '\n' + note
        note = note.strip()
        if note:
            self._master.set({'note': note[:NOTE_SIZE_LIMIT]})

    def check(self):
        """
        Send data if any in buffer
        """
        self.log.debug("KPI bulk buffer len: %s", len(self.kpi_buffer))
        if self.last_dispatch < (time.time() - self.send_interval):
            self.last_dispatch = time.time()
            if self.send_data and len(self.kpi_buffer):
                self.__send_data(self.kpi_buffer)
                self.kpi_buffer = []

            if self.send_monitoring:
                self.__send_monitoring()
        return super(BlazeMeterUploader, self).check()

    @send_with_retry
    def __send_data(self, data, do_check=True, is_final=False):
        """
        :type data: list[bzt.modules.aggregator.DataPoint]
        """
        if not self._session:
            return

        serialized = self._dpoint_serializer.get_kpi_body(data, is_final)
        self._session.send_kpi_data(serialized, do_check)

    def aggregated_second(self, data):
        """
        Send online data
        :param data: DataPoint
        """
        if self.send_data:
            self.kpi_buffer.append(data)

    def monitoring_data(self, data):
        if self.send_monitoring:
            self.monitoring_buffer.record_data(data)

    @send_with_retry
    def __send_monitoring(self):
        engine_id = self.engine.config.get('modules').get('shellexec').get('env').get('TAURUS_INDEX_ALL', '')
        if not engine_id:
            engine_id = "0"
        data = self.monitoring_buffer.get_monitoring_json(self._session)
        self._session.send_monitoring_data(engine_id, data)

    def __format_listing(self, zip_listing):
        lines = []
        for fname in sorted(zip_listing.keys()):
            bytestr = humanize_bytes(zip_listing[fname])
            if fname.startswith(self.engine.artifacts_dir):
                fname = fname[len(self.engine.artifacts_dir) + 1:]
            lines.append(bytestr + " " + fname)
        return "\n".join(lines)
コード例 #11
0
ファイル: test_blazemeter.py プロジェクト: keithmork/taurus
 def test_ping(self):
     obj = User()
     obj.ping()