Example #1
0
    def codespeed_push(self):
        """
        Pushing the results into codespeed, for random test only!

        """

        # do not push results in case of development run
        if dev_mode:
            return

        # in case of io pattern is sequential - do nothing
        if self.results["io_pattern"] == "sequential":
            return

        # in case of random test - push the results
        reads = self.all_results["4KiB"]["randread"]["IOPS"]
        writes = self.all_results["4KiB"]["randwrite"]["IOPS"]
        r_bw = self.all_results["1024KiB"]["randread"]["IOPS"]
        w_bw = self.all_results["1024KiB"]["randwrite"]["IOPS"]

        # Pushing the results into codespeed
        log.info(f"Pushing to codespeed : Read={reads} ; Write={writes} ; "
                 f"R-BW={r_bw} ; W-BW={w_bw}")
        push_perf_dashboard(self.results["storageclass"], reads, writes, r_bw,
                            w_bw)
Example #2
0
    def test_run_io(self, size, io_direction, jobs, runtime, depth):
        """
        Test IO
        """
        logging.info(f"Running FIO with:\nsize: {size}\njobs: {jobs}\n"
                     f"runtime: {runtime}\nIO depth: {depth}\n")
        self.pod_obj.run_io(
            "fs",
            size=size,
            io_direction=io_direction,
            jobs=jobs,
            runtime=runtime,
            depth=depth,
        )
        logging.info("Waiting for results")
        fio_result = self.pod_obj.get_fio_results()
        logging.info("IOPs after FIO:")
        reads = fio_result.get("jobs")[0].get("read").get("iops")
        writes = fio_result.get("jobs")[0].get("write").get("iops")
        w_bw = fio_result.get("jobs")[0].get("write").get("bw")
        r_bw = fio_result.get("jobs")[0].get("read").get("bw")
        logging.info(f"Read: {reads}")
        logging.info(f"Write: {writes}")

        push_perf_dashboard(self.interface, reads, writes, r_bw, w_bw)
Example #3
0
def analyze_regression(io_pattern, storage_class, es_username):
    """
    Analyzes the FIO result for variance and regression
    The test fails ff the test run has more than 5% regression

    Args:
        io_pattern (str): 'sequential' or 'random' workload
        es_username (str): ocs_build used in the CR object

    """
    es = Elasticsearch([{'host': constants.ES_SERVER_IP, 'port': constants.ES_SERVER_PORT}])
    # Todo: Fetch benchmark values for FIO, which
    #  Will be implemented after finalizing on h/w
    # fetch results for the current run with unique es_username
    fio_analyzed_result = es.search(index='ripsaw-fio-analyzed-result',
                                    body={"query": {"match": {'user': es_username}}})
    assert fio_analyzed_result['hits']['hits'], 'Results not found in Elasticsearch'
    # Initialize variables for codespeed results
    reads = 0
    writes = 0
    r_bw = 0
    w_bw = 0
    for result in fio_analyzed_result['hits']['hits']:
        test_data = result['_source']['ceph_benchmark_test']['test_data']
        object_size = test_data['object_size']
        operation = test_data['operation']
        total_iops = test_data['total-iops']
        log.info(
            f"io_pattern: {io_pattern}\n"
            f"block_size: {object_size}\n"
            f"operation: {operation}\n"
            f"total_iops: {total_iops}\n"
        )
        # Fail test if std deviation is above 5%
        # Todo: Remove the below skip for random workload once
        #  https://github.com/cloud-bulldozer/snafu/issues/180 is fixed
        if io_pattern == 'sequential':
            std_dev = 'std-dev-' + object_size
            variance = test_data[std_dev]
            log.info(f'variance - {variance}')
        # Todo: Fail test if 5% deviation from benchmark value

        # Extracting results for code speed
        if operation == "randread":
            if object_size == "4KiB":
                reads = total_iops
            if object_size == "1024KiB":  # if BS is 1M, then IOPS == Bandwidth
                r_bw = total_iops
        if operation == "randwrite":
            if object_size == "4KiB":
                writes = total_iops
            if object_size == "1024KiB":  # if BS is 1M, then IOPS == Bandwidth
                w_bw = total_iops
    # Pushing the results into codespeed
    push_perf_dashboard(storage_class, reads, writes, r_bw, w_bw)
Example #4
0
    def codespeed_push(self):
        """
        Pushing the results into codespeed, for random test only!

        """

        # in case of io pattern is sequential - do nothing
        if self.results['io_pattern'] == 'sequential':
            return

        # in case of random test - push the results
        reads = self.all_results['4KiB']['randread']['IOPS']
        writes = self.all_results['4KiB']['randwrite']['IOPS']
        r_bw = self.all_results['1024KiB']['randread']['IOPS']
        w_bw = self.all_results['1024KiB']['randwrite']['IOPS']

        # Pushing the results into codespeed
        log.info(f'Pushing to codespeed : Read={reads} ; Write={writes} ; '
                 f'R-BW={r_bw} ; W-BW={w_bw}')
        push_perf_dashboard(self.results['storageclass'], reads, writes, r_bw,
                            w_bw)
Example #5
0
def analyze_regression(io_pattern, storage_class, es_username):
    """
    Analyzes the FIO result for variance and regression
    The test fails ff the test run has more than 5% regression

    Args:
        io_pattern (str): 'sequential' or 'random' workload
        es_username (str): ocs_build used in the CR object

    """
    def _copy(es):
        """
        Copy All data from the internal ES server to the main ES

        Args:
            es (obj): elasticsearch object which connected to the main ES

        """

        # connecting to the internal ES via the local_server
        try:
            int_es = Elasticsearch([{'host': 'localhost', 'port': '9200'}])
        except ESExp.ConnectionError:
            log.error('Can not connect to the internal elastic-search server')
            return

        query = {'size': 10000, 'query': {'match_all': {}}}
        for ind in [
                'ripsaw-fio-logs', 'ripsaw-fio-results',
                'ripsaw-fio-analyzed-result'
        ]:
            log.info(f'Reading {ind} from internal ES server')
            try:
                result = int_es.search(index=ind, body=query)
            except ESExp.NotFoundError:
                log.warning(f'{ind} Not found in the Internal ES.')
                continue

            log.debug(f'The results from internal ES for {ind} are :{result}')
            log.info(f'Writing {ind} into main ES server')
            for doc in result['hits']['hits']:
                log.debug(f'Going to write : {doc}')
                es.index(index=ind, doc_type='_doc', body=doc['_source'])

    es = Elasticsearch([{
        'host': constants.ES_SERVER_IP,
        'port': constants.ES_SERVER_PORT
    }])
    _copy(es)
    # Todo: Fetch benchmark values for FIO, which
    #  Will be implemented after finalizing on h/w
    # fetch results for the current run with unique es_username
    fio_analyzed_result = es.search(
        index='ripsaw-fio-analyzed-result',
        body={"query": {
            "match": {
                'user': es_username
            }
        }})
    assert fio_analyzed_result['hits'][
        'hits'], 'Results not found in Elasticsearch'
    # Initialize variables for codespeed results
    reads = 0
    writes = 0
    r_bw = 0
    w_bw = 0
    for result in fio_analyzed_result['hits']['hits']:
        test_data = result['_source']['ceph_benchmark_test']['test_data']
        object_size = test_data['object_size']
        operation = test_data['operation']
        total_iops = test_data['total-iops']
        log.info(f"io_pattern: {io_pattern}\n"
                 f"block_size: {object_size}\n"
                 f"operation: {operation}\n"
                 f"total_iops: {total_iops}\n")
        # Fail test if std deviation is above 5%
        # Todo: Remove the below skip for random workload once
        #  https://github.com/cloud-bulldozer/snafu/issues/180 is fixed
        if io_pattern == 'sequential':
            std_dev = 'std-dev-' + object_size
            variance = test_data[std_dev]
            log.info(f'variance - {variance}')
        # Todo: Fail test if 5% deviation from benchmark value

        # Extracting results for code speed
        if operation == "randread":
            if object_size == "4KiB":
                reads = total_iops
            if object_size == "1024KiB":  # if BS is 1M, then IOPS == Bandwidth
                r_bw = total_iops
        if operation == "randwrite":
            if object_size == "4KiB":
                writes = total_iops
            if object_size == "1024KiB":  # if BS is 1M, then IOPS == Bandwidth
                w_bw = total_iops
    # Pushing the results into codespeed
    push_perf_dashboard(storage_class, reads, writes, r_bw, w_bw)