Exemple #1
0
def test_Statistics_measure_enabled_nested_cm():
    """
    Test measuring time with enabled statistics, via nested context managers.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 0.1
    inner_count = 3

    with statistics('compile_schema_classes'):
        for _ in range(0, inner_count):
            with statistics('compile_mof_string'):
                time.sleep(duration)

    stats_dict = dict(statistics.snapshot())
    assert len(stats_dict) == 2

    assert 'compile_schema_classes' in stats_dict
    stats = stats_dict['compile_schema_classes']
    assert stats.count == 1

    assert 'compile_mof_string' in stats_dict
    stats = stats_dict['compile_mof_string']
    assert stats.count == inner_count
Exemple #2
0
    def test_print_stats_svrtime(self):  # pylint: disable=no-self-use
        """Simply print repr() and formatted() for a small statistics."""

        statistics = Statistics()
        statistics.enable()

        stats = statistics.start_timer('EnumerateInstanceNames')
        time.sleep(0.1)
        stats.stop_timer(1200, 22000, 0.1)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.1)
        stats.stop_timer(1000, 20000, 0.1)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.2)
        stats.stop_timer(1500, 25000, 0.2)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.4)
        stats.stop_timer(1200, 35000, 0.4)

        print("\n\nTest print of repr() for a small statisticsW server time:")
        print("================")
        print(repr(statistics))
        print("================")
        print("\nTest print of formatted() for the same statistics:")
        print("================")
        print(statistics.formatted())
        print("================")
Exemple #3
0
def test_Statistics_reset():
    """
    Test resetting statistics.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    stats = statistics.start_timer('GetInstance')
    # test reset fails because stat in process
    assert statistics.reset() is False
    time.sleep(duration)
    stats.stop_timer(100, 200)

    # take a snapshot
    snapshot = statistics.snapshot()

    # verify that only the first set of data is in the snapshot
    for _, stats in snapshot:
        assert stats.count == 1
        assert_time_range(stats.avg_time, duration)
        assert_time_range(stats.min_time, duration)
        assert_time_range(stats.max_time, duration)

    assert statistics.reset() is True

    # take another snapshot. This snapshot should be empty
    snapshot = statistics.snapshot()
    assert not snapshot
Exemple #4
0
def test_Statistics_enable(statistics_enable):
    # pylint: disable=redefined-outer-name
    """
    Test function for Statistics.enable()
    """

    statistics = Statistics(enable=statistics_enable)

    # The code to be tested
    statistics.enable()

    assert statistics.enabled is True
Exemple #5
0
def test_Statistics_measure_disabled_cm():
    """
    Test measuring time with disabled statistics, via context manager.
    """

    statistics = Statistics()

    duration = 0.1

    with statistics('EnumerateInstances'):
        time.sleep(duration)

    stats_list = statistics.snapshot()
    assert len(stats_list) == 0
Exemple #6
0
def test_Statistics_measure_exception():
    """
    Test measuring time with enabled statistics.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    # Allowable delta in seconds between expected and actual duration.
    # Notes:
    # * Windows has only a precision of 1/60 sec.
    # * In CI environments, the tests sometimes run slow.
    delta = 0.5

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(100, 200)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(200, 400)

    for _, stats in statistics.snapshot():
        assert stats.count == 2
        assert time_abs_delta(stats.avg_time, duration) < delta
        assert time_abs_delta(stats.min_time, duration) < delta
        assert time_abs_delta(stats.max_time, duration) < delta
        assert stats.max_request_len == 200
        assert stats.min_request_len == 100
        assert stats.avg_request_len == 150
        assert stats.max_reply_len == 400
        assert stats.min_reply_len == 200
        assert stats.avg_reply_len == 300
Exemple #7
0
    def test_measure_exception(self):
        """Test measuring time with enabled statistics."""

        statistics = Statistics()
        statistics.enable()

        duration = 0.4
        # On Windows has only a precision of 1/60 sec:
        delta = 0.04

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(100, 200)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(200, 400)

        for _, stats in statistics.snapshot():
            self.assertEqual(stats.count, 2)
            self.assertTrue(time_abs_delta(stats.avg_time, duration) < delta)
            self.assertTrue(time_abs_delta(stats.min_time, duration) < delta)
            self.assertTrue(time_abs_delta(stats.max_time, duration) < delta)
            self.assertEqual(stats.max_request_len, 200)
            self.assertEqual(stats.min_request_len, 100)
            self.assertEqual(stats.avg_request_len, 150)
            self.assertEqual(stats.max_reply_len, 400)
            self.assertEqual(stats.min_reply_len, 200)
            self.assertEqual(stats.avg_reply_len, 300)
Exemple #8
0
def test_Statistics_measure_exception():
    """
    Test measuring time with enabled statistics.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(100, 200)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(200, 400)

    for _, stats in statistics.snapshot():
        assert stats.count == 2
        assert_time_range(stats.avg_time, duration)
        assert_time_range(stats.min_time, duration)
        assert_time_range(stats.max_time, duration)
        assert stats.max_request_len == 200
        assert stats.min_request_len == 100
        assert stats.avg_request_len == 150
        assert stats.max_reply_len == 400
        assert stats.min_reply_len == 200
        assert stats.avg_reply_len == 300
Exemple #9
0
    def test_measure_disabled(self):
        """Test measuring time with disabled statistics."""

        statistics = Statistics()

        duration = 0.2

        stats = statistics.get_op_statistic('GetClass')
        self.assertEqual(stats.name, 'disabled')

        stats.start_timer()
        time.sleep(duration)
        stats.stop_timer(100, 200)

        for _, stats in statistics.snapshot():
            self.assertEqual(stats.count, 0)
            self.assertEqual(stats.avg_time, 0)
            self.assertEqual(stats.min_time, float('inf'))
            self.assertEqual(stats.max_time, 0)
Exemple #10
0
def test_Statistics_measure_enabled_cm():
    """
    Test measuring time with enabled statistics, via context manager.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 0.1

    with statistics('EnumerateInstances'):
        time.sleep(duration)

    stats_dict = dict(statistics.snapshot())
    assert len(stats_dict) == 1

    assert 'EnumerateInstances' in stats_dict
    stats = stats_dict['EnumerateInstances']
    assert stats.count == 1
Exemple #11
0
def test_Statistics_get_op_statistic(testcase, init_enable, method_name,
                                     exp_snapshot_len, exp_op_stats_attrs):
    # pylint: disable=unused-argument
    """
    Test function for Statistics.get_op_statistic()
    """

    statistics = Statistics(enable=init_enable)

    # The code to be tested
    op_stats = statistics.get_op_statistic(method_name)

    snapshot_length = len(statistics.snapshot())
    assert snapshot_length == exp_snapshot_len

    for attr_name in exp_op_stats_attrs:
        exp_attr_value = exp_op_stats_attrs[attr_name]
        attr_value = getattr(op_stats, attr_name)
        assert attr_value == exp_attr_value, \
            "Unexpected op_stats attribute '{}'".format(attr_name)
Exemple #12
0
def test_Statistics_measure_disabled():
    """
    Test measuring time with disabled statistics.
    """

    statistics = Statistics()

    duration = 0.2

    stats = statistics.get_op_statistic('GetClass')
    assert stats.name == 'disabled'

    stats.start_timer()
    time.sleep(duration)
    stats.stop_timer(100, 200)

    for _, stats in statistics.snapshot():
        assert stats.count == 0
        assert stats.avg_time == 0
        assert stats.min_time == float('inf')
        assert stats.max_time == 0
Exemple #13
0
def test_Statistics_snapshot():
    """
    Test that snapshot() takes a stable snapshot.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    stats = statistics.start_timer('GetInstance')
    time.sleep(duration)
    stats.stop_timer(100, 200)

    # take the snapshot
    snapshot = statistics.snapshot()

    # keep producing statistics data
    stats.start_timer()
    time.sleep(duration)
    stats.stop_timer(100, 200)

    # verify that only the first set of data is in the snapshot
    for _, stats in snapshot:
        assert stats.count == 1
        assert_time_range(stats.avg_time, duration)
        assert_time_range(stats.min_time, duration)
        assert_time_range(stats.max_time, duration)
Exemple #14
0
def test_Statistics_measure_enabled_with_servertime():
    # pylint: disable=invalid-name
    """
    Test measuring time with enabled statistics.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(1000, 2000, duration)

    for _, stats in statistics.snapshot():
        assert stats.count == 1
        assert_time_range(stats.avg_time, duration)
        assert_time_range(stats.min_time, duration)
        assert_time_range(stats.max_time, duration)
        assert_time_range(stats.avg_server_time, duration)
        assert_time_range(stats.min_server_time, duration)
        assert_time_range(stats.max_server_time, duration)
        assert stats.max_request_len == 1000
        assert stats.min_request_len == 1000
        assert stats.avg_request_len == 1000
        assert stats.max_reply_len == 2000
        assert stats.min_reply_len == 2000
        assert stats.avg_reply_len == 2000

    stats.reset()
    assert stats.count == 0
    assert stats.avg_time == 0
    assert stats.min_time == float('inf')
    assert stats.max_time == 0
Exemple #15
0
    def test_snapshot(self):
        """Test that snapshot() takes a stable snapshot."""

        statistics = Statistics()
        statistics.enable()

        duration = 0.4
        # On Windows has only a precision of 1/60 sec:
        delta = 0.04

        stats = statistics.start_timer('GetInstance')
        time.sleep(duration)
        stats.stop_timer(100, 200)

        # take the snapshot
        snapshot = statistics.snapshot()

        # keep producing statistics data
        stats.start_timer()
        time.sleep(duration)
        stats.stop_timer(100, 200)

        # verify that only the first set of data is in the snapshot
        for _, stats in snapshot:
            self.assertEqual(stats.count, 1)
            self.assertTrue(time_abs_delta(stats.avg_time, duration) < delta)
            self.assertTrue(time_abs_delta(stats.min_time, duration) < delta)
            self.assertTrue(time_abs_delta(stats.max_time, duration) < delta)
Exemple #16
0
    def test_snapshot(self):
        """Test that snapshot() takes a stable snapshot."""

        statistics = Statistics()
        statistics.enable()

        duration = 1.0

        # Allowable delta in seconds between expected and actual duration.
        # Notes:
        # * Windows has only a precision of 1/60 sec.
        # * In CI environments, the tests sometimes run slow.
        delta = 0.5

        stats = statistics.start_timer('GetInstance')
        time.sleep(duration)
        stats.stop_timer(100, 200)

        # take the snapshot
        snapshot = statistics.snapshot()

        # keep producing statistics data
        stats.start_timer()
        time.sleep(duration)
        stats.stop_timer(100, 200)

        # verify that only the first set of data is in the snapshot
        for _, stats in snapshot:
            self.assertEqual(stats.count, 1)
            self.assertTrue(time_abs_delta(stats.avg_time, duration) < delta,
                            "actual avg duration: %r" % stats.avg_time)
            self.assertTrue(time_abs_delta(stats.min_time, duration) < delta,
                            "actual min duration: %r" % stats.min_time)
            self.assertTrue(time_abs_delta(stats.max_time, duration) < delta,
                            "actual max duration: %r" % stats.max_time)
Exemple #17
0
    def test_enabling(self):
        """Test enabling and disabling."""

        statistics = Statistics()

        self.assertFalse(statistics.enabled,
                         "Error: initial state is not disabled")

        statistics.disable()
        self.assertFalse(statistics.enabled,
                         "Error: disabling a disabled statistics works")

        statistics.enable()
        self.assertTrue(statistics.enabled,
                        "Error: enabling a disabled statistics works")

        statistics.enable()
        self.assertTrue(statistics.enabled,
                        "Error: enabling an enabled statistics works")

        statistics.disable()
        self.assertFalse(statistics.enabled,
                         "Errror: disabling an enabled statistics works")
Exemple #18
0
    def test_reset(self):
        """Test resetting statistics."""

        statistics = Statistics()
        statistics.enable()

        duration = 1.0

        # Allowable delta in seconds between expected and actual duration.
        # Notes:
        # * Windows has only a precision of 1/60 sec.
        # * In CI environments, the tests sometimes run slow.
        delta = 0.5

        stats = statistics.start_timer('GetInstance')
        # test reset fails because stat in process
        self.assertFalse(statistics.reset())
        time.sleep(duration)
        stats.stop_timer(100, 200)

        # take a snapshot
        snapshot = statistics.snapshot()

        # verify that only the first set of data is in the snapshot
        for _, stats in snapshot:
            self.assertEqual(stats.count, 1)
            self.assertTrue(
                time_abs_delta(stats.avg_time, duration) < delta,
                "actual avg duration: %r" % stats.avg_time)
            self.assertTrue(
                time_abs_delta(stats.min_time, duration) < delta,
                "actual min duration: %r" % stats.min_time)
            self.assertTrue(
                time_abs_delta(stats.max_time, duration) < delta,
                "actual max duration: %r" % stats.max_time)

        self.assertTrue(statistics.reset())

        # take another snapshot. This snapshot should be empty
        snapshot = statistics.snapshot()
        self.assertTrue(len(snapshot) == 0)
Exemple #19
0
def test_Statistics_init(testcase, init_args, init_kwargs, exp_attrs):
    """
    Test function for Statistics.__init__()
    """

    # The code to be tested
    statistics = Statistics(*init_args, **init_kwargs)

    # Ensure that exceptions raised in the remainder of this function
    # are not mistaken as expected exceptions
    assert testcase.exp_exc_types is None

    exp_enabled = exp_attrs['enabled']
    assert statistics.enabled == exp_enabled
    assert isinstance(statistics.enabled, type(exp_enabled))
Exemple #20
0
    def test_measure_enabled_with_servertime(self):
        # pylint: disable=invalid-name
        """Test measuring time with enabled statistics."""

        statistics = Statistics()
        statistics.enable()

        duration = 1.0

        # Allowable delta in seconds between expected and actual duration.
        # Notes:
        # * Windows has only a precision of 1/60 sec.
        # * In CI environments, the tests sometimes run slow.
        delta = 0.5

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(1000, 2000, duration)

        for _, stats in statistics.snapshot():
            self.assertEqual(stats.count, 1)
            self.assertTrue(
                time_abs_delta(stats.avg_time, duration) <= delta,
                "actual avg duration: %r" % stats.avg_time)
            self.assertTrue(
                time_abs_delta(stats.min_time, duration) <= delta,
                "actual min duration: %r" % stats.min_time)
            self.assertTrue(
                time_abs_delta(stats.max_time, duration) <= delta,
                "actual max duration: %r" % stats.max_time)

            self.assertTrue(
                time_abs_delta(stats.avg_server_time, duration) <= delta,
                "actual avg server duration: %r" % stats.avg_server_time)
            self.assertTrue(
                time_abs_delta(stats.min_server_time, duration) <= delta,
                "actual min server duration: %r" % stats.min_server_time)
            self.assertTrue(
                time_abs_delta(stats.max_server_time, duration) <= delta,
                "actual max server duration: %r" % stats.max_server_time)

            self.assertEqual(stats.max_request_len, 1000)
            self.assertEqual(stats.min_request_len, 1000)
            self.assertEqual(stats.avg_request_len, 1000)
            self.assertEqual(stats.max_reply_len, 2000)
            self.assertEqual(stats.min_reply_len, 2000)
            self.assertEqual(stats.avg_reply_len, 2000)

        stats.reset()
        self.assertEqual(stats.count, 0)
        self.assertEqual(stats.avg_time, 0)
        self.assertEqual(stats.min_time, float('inf'))
        self.assertEqual(stats.max_time, 0)
Exemple #21
0
    def test_measure_exception(self):
        """Test measuring time with enabled statistics."""

        statistics = Statistics()
        statistics.enable()

        duration = 1.0

        # Allowable delta in seconds between expected and actual duration.
        # Notes:
        # * Windows has only a precision of 1/60 sec.
        # * In CI environments, the tests sometimes run slow.
        delta = 0.5

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(100, 200)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(200, 400)

        for _, stats in statistics.snapshot():
            self.assertEqual(stats.count, 2)
            self.assertTrue(
                time_abs_delta(stats.avg_time, duration) < delta,
                "actual avg duration: %r" % stats.avg_time)
            self.assertTrue(
                time_abs_delta(stats.min_time, duration) < delta,
                "actual min duration: %r" % stats.min_time)
            self.assertTrue(
                time_abs_delta(stats.max_time, duration) < delta,
                "actual max duration: %r" % stats.max_time)
            self.assertEqual(stats.max_request_len, 200)
            self.assertEqual(stats.min_request_len, 100)
            self.assertEqual(stats.avg_request_len, 150)
            self.assertEqual(stats.max_reply_len, 400)
            self.assertEqual(stats.min_reply_len, 200)
            self.assertEqual(stats.avg_reply_len, 300)
Exemple #22
0
def test_Statistics_measure_enabled_with_servertime():
    # pylint: disable=invalid-name
    """
    Test measuring time with enabled statistics.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0

    # Allowable delta in seconds between expected and actual duration.
    # Notes:
    # * Windows has only a precision of 1/60 sec.
    # * In CI environments, the tests sometimes run slow.
    delta = 0.5

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(duration)
    stats.stop_timer(1000, 2000, duration)

    for _, stats in statistics.snapshot():
        assert stats.count == 1
        assert time_abs_delta(stats.avg_time, duration) <= delta
        assert time_abs_delta(stats.min_time, duration) <= delta
        assert time_abs_delta(stats.max_time, duration) <= delta

        assert time_abs_delta(stats.avg_server_time, duration) <= delta
        assert time_abs_delta(stats.min_server_time, duration) <= delta
        assert time_abs_delta(stats.max_server_time, duration) <= delta

        assert stats.max_request_len == 1000
        assert stats.min_request_len == 1000
        assert stats.avg_request_len == 1000
        assert stats.max_reply_len == 2000
        assert stats.min_reply_len == 2000
        assert stats.avg_reply_len == 2000

    stats.reset()
    assert stats.count == 0
    assert stats.avg_time == 0
    assert stats.min_time == float('inf')
    assert stats.max_time == 0
Exemple #23
0
    def test_measure_enabled_with_servertime(self):
        # pylint: disable=invalid-name
        """Test measuring time with enabled statistics."""

        statistics = Statistics()
        statistics.enable()

        duration = 0.4
        # On Windows has only a precision of 1/60 sec:
        delta = 0.04

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(duration)
        stats.stop_timer(1000, 2000, duration)

        for _, stats in statistics.snapshot():
            self.assertEqual(stats.count, 1)
            self.assertTrue(time_abs_delta(stats.avg_time, duration) <= delta)
            self.assertTrue(time_abs_delta(stats.min_time, duration) <= delta)
            self.assertTrue(time_abs_delta(stats.max_time, duration) <= delta)

            self.assertTrue(
                time_abs_delta(stats.avg_server_time, duration) <= delta)
            self.assertTrue(
                time_abs_delta(stats.min_server_time, duration) <= delta)
            self.assertTrue(
                time_abs_delta(stats.max_server_time, duration) <= delta)

            self.assertEqual(stats.max_request_len, 1000)
            self.assertEqual(stats.min_request_len, 1000)
            self.assertEqual(stats.avg_request_len, 1000)
            self.assertEqual(stats.max_reply_len, 2000)
            self.assertEqual(stats.min_reply_len, 2000)
            self.assertEqual(stats.avg_reply_len, 2000)

        stats.reset()
        self.assertEqual(stats.count, 0)
        self.assertEqual(stats.avg_time, 0)
        self.assertEqual(stats.min_time, float('inf'))
        self.assertEqual(stats.max_time, 0)
def test_leaks_Statistics_minimal():
    """
    Test function with a minimal Statistics object.
    """
    _ = Statistics()
Exemple #25
0
    def test_get(self):
        """Test getting statistics."""

        statistics = Statistics()
        snapshot_length = len(statistics.snapshot())
        self.assertEqual(
            snapshot_length, 0,
            "Error:  initial state has no time statistics. "
            "Actual number = %d" % snapshot_length)

        stats = statistics.get_op_statistic('EnumerateInstances')
        snapshot_length = len(statistics.snapshot())
        self.assertEqual(
            snapshot_length, 0, "Error: getting a new stats with a disabled "
            "statistics results in no time statistics. "
            "Actual number = %d" % snapshot_length)
        self.assertEqual(stats.container, statistics)
        self.assertEqual(stats.name, "disabled")
        self.assertEqual(stats.count, 0)
        self.assertEqual(stats.avg_time, 0)
        self.assertEqual(stats.min_time, float('inf'))
        self.assertEqual(stats.max_time, 0)

        self.assertEqual(stats.avg_request_len, 0)
        self.assertEqual(stats.min_request_len, float('inf'))
        self.assertEqual(stats.max_request_len, 0)

        statistics.enable()

        method_name = 'OpenEnumerateInstances'

        stats = statistics.get_op_statistic(method_name)
        snapshot_length = len(statistics.snapshot())
        self.assertEqual(
            snapshot_length, 1, "Error: getting a new stats with an enabled "
            "statistics results in one time statistics. "
            "Actual number = %d" % snapshot_length)

        self.assertEqual(stats.container, statistics)
        self.assertEqual(stats.name, method_name)
        self.assertEqual(stats.count, 0)
        self.assertEqual(stats.avg_time, 0)
        self.assertEqual(stats.min_time, float('inf'))
        self.assertEqual(stats.max_time, 0)

        statistics.get_op_statistic(method_name)
        snapshot_length = len(statistics.snapshot())
        self.assertEqual(
            snapshot_length, 1, "Error: getting an existing stats with an "
            "enabled statistics results in the same number of "
            "statistics. "
            "Actual number = %d" % snapshot_length)
Exemple #26
0
    def test_print_stats_svrtime(self):  # pylint: disable=no-self-use
        """Test repr() and formatted() for a small statistics."""

        statistics = Statistics()
        statistics.enable()

        stats = statistics.start_timer('EnumerateInstanceNames')
        time.sleep(0.1)
        stats.stop_timer(1200, 22000, 0.1)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.1)
        stats.stop_timer(1000, 20000, 0.1)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.2)
        stats.stop_timer(1500, 25000, 0.2)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.4)
        stats.stop_timer(1200, 35000, 0.4)

        # test repr output
        stat_repr = repr(statistics)

        # test repr output
        self.assert_regexp_matches(stat_repr, 'Statistics\(')  # pylint: disable=anomalous-backslash-in-string

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstanceNames', count=1,"
            r" exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
            r"max_time=[.0-9]+, avg_server_time=[.0-9]+, "
            r"min_server_time=[.0-9]+, "
            r"max_server_time=[.0-9]+, avg_request_len=[.0-9]+, "
            r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
            r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5},"
            r" max_reply_len=[0-9]{5}")

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstances', count=3, "
            r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
            r"max_time=[.0-9]+, avg_server_time=[.0-9]+, "
            r"min_server_time=[.0-9]+, "
            r"max_server_time=[.0-9]+, avg_request_len=[.0-9]+, "
            r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
            r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5}, "
            r"max_reply_len=[0-9]{5}")

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstances', count=3, "
            r"exception_count=0, avg_time=.+min_time=.+max_time=.+"
            r"avg_server_time=.+min_server_time.+max_server_time=.+"
            r"max_reply_len=[0-9]{5}")

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstanceNames', count=1, "
            r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
            r"max_time=[.0-9]+, avg_server_time=[.0-9]+, min_server_time="
            r"[.0-9]+.+max_server_time=[.0-9]+, avg_request_len=[.0-9]+"
            r".+max_reply_len=[0-9]{5}")

        # test formatted output

        report = statistics.formatted()

        self.assert_regexp_contains(
            report,
            r'Count Excep +Time +ServerTime +RequestLen +ReplyLen +Operation')

        self.assert_regexp_contains(
            report, r'Cnt +Avg +Min +Max +Avg +Min +Max +Avg +Min +Max')

        self.assert_regexp_contains(
            report,
            r"3     0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
            r"[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]{5} "
            r"EnumerateInstances")

        self.assert_regexp_contains(
            report,
            r"1     0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
            r"[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]{5} "
            r"EnumerateInstanceNames")
Exemple #27
0
    def test_print_statistics(self):  # pylint: disable=no-self-use
        """Test repr() and formatted() for a small statistics."""

        statistics = Statistics()
        statistics.enable()

        stats = statistics.start_timer('EnumerateInstanceNames')
        time.sleep(0.1)
        stats.stop_timer(1200, 22000)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.1)
        stats.stop_timer(1000, 20000)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.2)
        stats.stop_timer(1500, 25000)

        stats = statistics.start_timer('EnumerateInstances')
        time.sleep(0.4)
        stats.stop_timer(1200, 35000)

        # test repr output
        stat_repr = repr(statistics)

        self.assert_regexp_matches(stat_repr, r'Statistics\(')

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstanceNames', count=1,"
            r" exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
            r"max_time=[.0-9]+, avg_server_time=0.0, min_server_time=inf, "
            r"max_server_time=0.0, avg_request_len=[.0-9]+, "
            r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
            r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5},"
            r" max_reply_len=[0-9]{5}")

        self.assert_regexp_contains(
            stat_repr,
            r"OperationStatistic\(name='EnumerateInstances', count=3, "
            r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
            r"max_time=[.0-9]+, avg_server_time=0.0, min_server_time=inf, "
            r"max_server_time=0.0, avg_request_len=[.0-9]+, "
            r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
            r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5}, "
            r"max_reply_len=[0-9]{5}")

        # Test statistics report output

        report = statistics.formatted()

        self.assert_regexp_matches(
            report, r'Statistics \(times in seconds, lengths in Bytes\)')

        self.assert_regexp_contains(
            report, r"Count Excep *Time *RequestLen *ReplyLen *Operation")

        self.assert_regexp_contains(
            report, r" +3 +0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
            r"[.0-9]+ +[0-9]{4} +[0-9]{4} +"
            r"[.0-9]+ +[0-9]{5} +[0-9]{5} EnumerateInstances")

        self.assert_regexp_contains(
            report, r" +1 +0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
            r"[.0-9]+ +[0-9]{4} +[0-9]{4} +"
            r"[.0-9]+ +[0-9]{5} +[0-9]{5} EnumerateInstanceNames")
Exemple #28
0
def test_Statistics_server_time_suspension():
    """
    Test suspending server time and resetting.

    Server time suspension occurs at the level of operations (i.e.
    OperationStatistics object) if not all executions of the operation return
    the server response time. Once it has occurred, the server time values
    are reset and this condition is remembered until the OperationStatistics
    object's reset() is called (which normally is not used and not tested here)
    or the parent Statistic object's reset() is called.
    """

    statistics = Statistics()
    statistics.enable()

    duration = 1.0
    server_resp_time = 0.8

    # Allowable delta in seconds between expected and actual duration.
    # Notes:
    # * Windows has only a precision of 1/60 sec.
    # * In CI environments, the tests sometimes run slow.
    delta = 0.5

    stats = statistics.start_timer('GetInstance')
    time.sleep(duration)
    stats.stop_timer(100, 200, server_resp_time)

    # verify server time after server response time was provided
    snapshot = statistics.snapshot()
    for _, stats in snapshot:
        assert stats.count == 1
        assert time_abs_delta(stats.avg_time, duration) < delta
        assert time_abs_delta(stats.min_time, duration) < delta
        assert time_abs_delta(stats.max_time, duration) < delta
        assert time_abs_delta(stats.avg_server_time, server_resp_time) < delta
        assert time_abs_delta(stats.min_server_time, server_resp_time) < delta
        assert time_abs_delta(stats.max_server_time, server_resp_time) < delta

    stats = statistics.start_timer('GetInstance')
    time.sleep(duration)
    stats.stop_timer(120, 250, None)

    # verify server time (and only that) is suspended when server response
    # time was not provided
    snapshot = statistics.snapshot()
    for _, stats in snapshot:
        assert stats.count == 2
        assert time_abs_delta(stats.avg_time, duration) < delta
        assert time_abs_delta(stats.min_time, duration) < delta
        assert time_abs_delta(stats.max_time, duration) < delta
        assert stats.avg_server_time == float(0)
        assert stats.min_server_time == float('inf')
        assert stats.max_server_time == float(0)

    stats = statistics.start_timer('GetInstance')
    time.sleep(duration)
    stats.stop_timer(120, 250, server_resp_time)

    # verify that server time suspension is sticky once it occurred, even if
    # server response time is provided again
    snapshot = statistics.snapshot()
    for _, stats in snapshot:
        assert stats.count == 3
        assert time_abs_delta(stats.avg_time, duration) < delta
        assert time_abs_delta(stats.min_time, duration) < delta
        assert time_abs_delta(stats.max_time, duration) < delta
        assert stats.avg_server_time == float(0)
        assert stats.min_server_time == float('inf')
        assert stats.max_server_time == float(0)

    assert statistics.reset() is True

    stats = statistics.start_timer('GetInstance')
    time.sleep(duration)
    stats.stop_timer(100, 200, server_resp_time)

    # verify that reset() also resets server time suspension
    snapshot = statistics.snapshot()
    for _, stats in snapshot:
        assert stats.count == 1
        assert time_abs_delta(stats.avg_time, duration) < delta
        assert time_abs_delta(stats.min_time, duration) < delta
        assert time_abs_delta(stats.max_time, duration) < delta
        assert time_abs_delta(stats.avg_server_time, server_resp_time) < delta
        assert time_abs_delta(stats.min_server_time, server_resp_time) < delta
        assert time_abs_delta(stats.max_server_time, server_resp_time) < delta
Exemple #29
0
def test_Statistics_print_statistics():
    """
    Test repr() and formatted() for a small statistics.
    """

    statistics = Statistics()
    statistics.enable()

    stats = statistics.start_timer('EnumerateInstanceNames')
    time.sleep(0.1)
    stats.stop_timer(1200, 22000)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.1)
    stats.stop_timer(1000, 20000)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.2)
    stats.stop_timer(1500, 25000)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.4)
    stats.stop_timer(1200, 35000)

    # test repr output
    stat_repr = repr(statistics)

    assert re.match(r'Statistics\(', stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstanceNames', count=1,"
        r" exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
        r"max_time=[.0-9]+, avg_server_time=0.0, min_server_time=inf, "
        r"max_server_time=0.0, avg_request_len=[.0-9]+, "
        r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
        r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5},"
        r" max_reply_len=[0-9]{5}", stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstances', count=3, "
        r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
        r"max_time=[.0-9]+, avg_server_time=0.0, min_server_time=inf, "
        r"max_server_time=0.0, avg_request_len=[.0-9]+, "
        r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
        r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5}, "
        r"max_reply_len=[0-9]{5}", stat_repr)

    # Test statistics report output

    report = statistics.formatted()

    assert re.match(r'Statistics \(times in seconds, lengths in Bytes\)',
                    report)

    assert re.search(
        r"Count Excep *ClientTime *RequestLen *ReplyLen *Operation", report)

    assert re.search(
        r" +3 +0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
        r"[.0-9]+ +[0-9]{4} +[0-9]{4} +"
        r"[.0-9]+ +[0-9]{5} +[0-9]{5} EnumerateInstances", report)

    assert re.search(
        r" +1 +0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
        r"[.0-9]+ +[0-9]{4} +[0-9]{4} +"
        r"[.0-9]+ +[0-9]{5} +[0-9]{5} EnumerateInstanceNames", report)
Exemple #30
0
def test_Statistics_print_stats_svrtime():
    """
    Test repr() and formatted() for a small statistics.
    """

    statistics = Statistics()
    statistics.enable()

    stats = statistics.start_timer('EnumerateInstanceNames')
    time.sleep(0.1)
    stats.stop_timer(1200, 22000, 0.1)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.1)
    stats.stop_timer(1000, 20000, 0.1)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.2)
    stats.stop_timer(1500, 25000, 0.2)

    stats = statistics.start_timer('EnumerateInstances')
    time.sleep(0.4)
    stats.stop_timer(1200, 35000, 0.4)

    # test repr output
    stat_repr = repr(statistics)

    # test repr output
    assert re.match(r'Statistics\(', stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstanceNames', count=1,"
        r" exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
        r"max_time=[.0-9]+, avg_server_time=[.0-9]+, "
        r"min_server_time=[.0-9]+, "
        r"max_server_time=[.0-9]+, avg_request_len=[.0-9]+, "
        r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
        r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5},"
        r" max_reply_len=[0-9]{5}", stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstances', count=3, "
        r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
        r"max_time=[.0-9]+, avg_server_time=[.0-9]+, "
        r"min_server_time=[.0-9]+, "
        r"max_server_time=[.0-9]+, avg_request_len=[.0-9]+, "
        r"min_request_len=[0-9]{4}, max_request_len=[0-9]{4}, "
        r"avg_reply_len=[.0-9]+, min_reply_len=[0-9]{5}, "
        r"max_reply_len=[0-9]{5}", stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstances', count=3, "
        r"exception_count=0, avg_time=.+min_time=.+max_time=.+"
        r"avg_server_time=.+min_server_time.+max_server_time=.+"
        r"max_reply_len=[0-9]{5}", stat_repr)

    assert re.search(
        r"OperationStatistic\(name='EnumerateInstanceNames', count=1, "
        r"exception_count=0, avg_time=[.0-9]+, min_time=[.0-9]+, "
        r"max_time=[.0-9]+, avg_server_time=[.0-9]+, min_server_time="
        r"[.0-9]+.+max_server_time=[.0-9]+, avg_request_len=[.0-9]+"
        r".+max_reply_len=[0-9]{5}", stat_repr)

    # test formatted output

    report = statistics.formatted()

    assert re.search(
        r'Count Excep +ClientTime +ServerTime +RequestLen +ReplyLen +'
        r'Operation', report)

    assert re.search(r'Cnt +Avg +Min +Max +Avg +Min +Max +Avg +Min +Max',
                     report)

    assert re.search(
        r"3     0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
        r"[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]{5} "
        r"EnumerateInstances", report)

    assert re.search(
        r"1     0 +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +[.0-9]+ +"
        r"[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]+ +[0-9]{5} "
        r"EnumerateInstanceNames", report)