Beispiel #1
0
    def __init__(self, cluster_spec: ClusterSpec, test_config: TestConfig,
                 verbose: bool):
        self.cluster_spec = cluster_spec
        self.test_config = test_config

        self.target_iterator = TargetIterator(cluster_spec, test_config)

        self.cluster = ClusterManager(cluster_spec, test_config)
        self.memcached = MemcachedHelper(test_config)
        self.monitor = Monitor(cluster_spec, test_config, verbose)
        self.rest = RestHelper(cluster_spec)
        self.remote = RemoteHelper(cluster_spec, verbose)
        self.profiler = Profiler(cluster_spec, test_config)

        self.master_node = next(cluster_spec.masters)
        self.build = self.rest.get_version(self.master_node)

        self.metrics = MetricHelper(self)
        self.reporter = ShowFastReporter(cluster_spec, test_config, self.build)

        self.cbmonitor_snapshots = []
        self.cbmonitor_clusters = []

        if self.test_config.test_case.use_workers:
            self.worker_manager = WorkerManager(cluster_spec, test_config,
                                                verbose)
Beispiel #2
0
    def run(self):
        self.extract_tools()

        if self.test_config.backup_settings.use_tls or self.test_config.restore_settings.use_tls:
            self.download_certificate()

        self.get_tool_versions()

        self.load()
        self.wait_for_persistence()
        self.check_num_items()
        self.compact_bucket(wait=True)
        self.backup()

        initial_backup_size = local.calc_backup_size(self.cluster_spec, rounded=False)

        self.access()
        self.wait_for_persistence()

        # Define a secondary load. For this we borrow the 'creates' field,
        # since load doesn't normally use this anyway.
        inc_load = self.test_config.load_settings.additional_items
        workers = self.test_config.load_settings.workers
        size = self.test_config.load_settings.size

        # New key prefix needed to create incremental dataset.
        self.load(
            settings=LoadSettings({"items": inc_load, "workers": workers, "size": size}),
            target_iterator=TargetIterator(self.cluster_spec, self.test_config, prefix='inc-')
        )
        self.wait_for_persistence()

        try:
            inc_backup_time = self.backup_with_stats(mode=True)
            total_backup_size = local.calc_backup_size(self.cluster_spec, rounded=False)
            inc_backup_size = round(total_backup_size - initial_backup_size, 2)
        finally:
            self.collectlogs()

        self._report_kpi(inc_backup_time, inc_backup_size)
Beispiel #3
0
    def run(self):
        self.extract_tools()

        self.load()
        self.wait_for_persistence()
        self.backup()

        initial_backup_size = local.calc_backup_size(self.cluster_spec,
                                                     rounded=False)

        self.access()
        self.wait_for_persistence()

        # Define a secondary load. For this we borrow the 'creates' field,
        # since load doesn't normally use this anyway.
        inc_load = self.test_config.load_settings.creates
        workers = self.test_config.load_settings.workers
        size = self.test_config.load_settings.size

        # New key prefix needed to create incremental dataset.
        self.load(settings=LoadSettings({
            "items": inc_load,
            "workers": workers,
            "size": size
        }),
                  target_iterator=TargetIterator(self.cluster_spec,
                                                 self.test_config,
                                                 prefix='inc-'))
        self.wait_for_persistence()

        inc_backup_time = self.backup_with_stats(mode=True)
        total_backup_size = local.calc_backup_size(self.cluster_spec,
                                                   rounded=False)
        inc_backup_size = round(total_backup_size - initial_backup_size, 2)

        self._report_kpi(inc_backup_time, inc_backup_size)