def test_init_with_valid_parameters(self): """ Simply assert that the constructor is exception free. Also tests the '_add_system_info_properties' method which gets called in constructor """ try: EventMetric("App", "Test Log Metric without additional properties") EventMetric("App", "Test Log Metric with additional properties", properties={ "IntProp": 2, "BoolProp": True, "StringProp": "This is a test string", "DictProp": { "Key1": "value1", "Key2": "Value2" } }) except Exception as e: self.fail( "Creating an instance of 'EventMetric' failed unexpectedly: %s" % (e))
def test_maximum_queue_size(self): """ Test that the dispatcher has a maximum queue size to prevent memory leak if the worker thread is not started. This test requires overriding what's being done in class setUp and deliberatly stopping the dispatcher worker tread. """ # Stop the dispatcher worker thread self._destroy_engine() TEST_SIZE = 10 * MetricsQueueSingleton.MAXIMUM_QUEUE_SIZE for i in range(TEST_SIZE): EventMetric.log("App", "Testing maximum queue size %d" % (i), properties={"Metric id": i}) queue = MetricsQueueSingleton()._queue self.assertTrue(len(queue) <= MetricsQueueSingleton.MAXIMUM_QUEUE_SIZE) # Test that the first item is indeed N items past the originally queued ones # Where N is TEST_SIZE minus size of queue oldest_metric = queue.popleft() metric_index = oldest_metric.data["event_properties"]["Metric id"] self.assertEqual(metric_index, TEST_SIZE - MetricsQueueSingleton.MAXIMUM_QUEUE_SIZE) # Finally, test that the newest item newest_metric = queue.pop() metric_index = newest_metric.data["event_properties"]["Metric id"] self.assertEqual(metric_index, TEST_SIZE - 1)
def test_not_logging_older_tookit(self): """ Test that logging metrics is not possible from an older version of toolkit as it can't even pass metric version check and therefore won't call urllib2.urlopen mock calls """ # Define a local server caps mock locally since it only # applies to this particular test class server_capsMock: def __init__(self): self.version = (6, 3, 11) # Setup test fixture, engine and context with newer server caps self._setup_shotgun(server_capsMock()) # Make at least one metric related call EventMetric.log("App", "Test Log Metric with old server") # Because we are testing for the absence of a Request # we do have to wait longer for the test to be valid. TIMEOUT_SECONDS = 4 * MetricsDispatchWorkerThread.DISPATCH_INTERVAL timeout = time.time() + TIMEOUT_SECONDS while time.time() < timeout: time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) for metric in self._get_metrics(): self.fail( "Was not expecting any request mock calls since code in metrics.py " "should have been filtered out based on server caps. version." )
def test_log_event_metric(self): # Self testing that the mock setup is correct # by trying out a non-deprecated method. EventMetric.log("App", "Testing Own Test Mock") self.assertTrue( self._mocked_method.called, "Was expecting a call to the " "`MetricsQueueSingleton.log`" "method from the non-deprecated " "`log_event_metric` method.")
def test_maximum_batch_size(self): """ Test that the dispatcher worker thread is not sending all queued metrics at once but rather sent in batches that can be handled by the server. """ # Setup test fixture, engine and context with newer server caps # # Define a local server caps mock locally since it only # applies to this particular test class server_capsMock: def __init__(self): self.version = (7, 4, 0) self._setup_shotgun(server_capsMock()) # The '_setup_shotgun' helper method is setting up an 'urlopen' mock. # For this test, we need to override that to something more specific. self._urlopen_mock.stop() self._urlopen_mock = None self._urlopen_mock = patch("urllib2.urlopen", side_effect=TestMetricsDispatchWorkerThread. _mocked_urlopen_for_test_maximum_batch_size) self._mocked_method = self._urlopen_mock.start() # We add 10 time the maximum number of events in the queue. TEST_SIZE = 7 + (10 * MetricsQueueSingleton.MAXIMUM_QUEUE_SIZE) for i in range(TEST_SIZE): EventMetric.log("App", "Testing maximum queue size %d" % (i), properties={"Metric id": i}) queue = MetricsQueueSingleton()._queue TIMEOUT_SECONDS = 40 * MetricsDispatchWorkerThread.DISPATCH_INTERVAL # Wait for firsts events to show up in queue timeout = time.time() + TIMEOUT_SECONDS length = len(queue) while (length == 0) and (time.time() < timeout): time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) length = len(queue) # Wait for the queue to be emptied length = len(queue) timeout = time.time() + TIMEOUT_SECONDS while (length > 0) and (time.time() < timeout): time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) length = len(queue) self.assertEquals(self.batch_size_too_large_failure_count, 0)
def test_usage_of_extra_properties(self): """ Simply assert usage of the properties parameter is exception free. """ EventMetric("App", "Test add_event_properties", None) EventMetric("App", "Test add_event_properties", properties={ "IntProp": 2, "BoolProp": True, "StringProp": "This is a test string", "DictProp": { "Key1": "value1", "Key2": "Value2" }, "ListProp": [1, 2, 3, 4, 5] })
def test_data_property(self): """Object has a data dictionary that matches args.""" obj = EventMetric("App", "Testing Data Property") self.assertTrue(hasattr(obj, 'data')) self.assertIsInstance(obj.data, dict) metric = obj.data self.assertTrue("event_group" in metric) self.assertTrue("event_name" in metric) self.assertTrue("event_properties" in metric)
def test_log_event_metric_with_good_metrics(self): # make sure no exceptions on good metrics try: EventMetric.log( "App", "Testing Log Metric without additional properties") EventMetric.log("App", "Testing Log Metric with additional properties", properties={ "IntProp": 2, "BoolProp": True, "StringProp": "This is a test string", "DictProp": { "Key1": "value1", "Key2": "Value2" } }) except Exception as e: self.fail( "EventMetric.log() failed unexpectedly on good metric: %s", (e))
def test_init_with_invalid_parameters(self): """ Simply assert that the constructor is exception free and is able to deal with invalid parameters and various type of extra properties. Also tests the '_add_system_info_properties' method which gets called in constructor """ try: EventMetric(None, "Testing No event group"), EventMetric("No event name", None), EventMetric("No event name", None, None), EventMetric("No event name", None, {}), EventMetric(None, None), EventMetric({}, {}), EventMetric([], []), except Exception as e: self.fail( "Creating an instance of 'EventMetric' failed unexpectedly: %s", (e))
def test_log_event_metric_with_bad_metrics(self): # make sure no exceptions on bad metrics try: EventMetric.log(None, "No event group"), EventMetric.log("No event name", None), EventMetric.log("No event name", "Using should causes test to fail"), EventMetric.log(None, None), EventMetric.log({}, {}), EventMetric.log([], []), except Exception as e: self.fail("log_metric() failed unexpectedly on bad metric: %s", (e))
def test_batch_interval(self): """ Test that the dispatcher attempts emptying the queue on each cycle rather than sending a single batch per cycle. The older code was s sending a single batch of metrics per cycle of 5 seconds, each batch being 10 metrics, the dispatcher could then handle only 2 metrics per second. A higher rate would cause metrics to accumulate in the dispatcher queue. Because we're dealing with another thread, there are timing issues and context switch which are difficult to account for. Because we're using a very small dispatcher cycle time for some cycle we might actually go beyond the normal cycle period. For that reason we won't inspect individual cycle but the average cycle time over 10 or more cycles. """ # Setup test fixture, engine and context with newer server caps # # Define a local server caps mock locally since it only # applies to this particular test class server_capsMock: def __init__(self): self.version = (7, 4, 0) self._setup_shotgun(server_capsMock()) # The '_setup_shotgun' helper method is setting up an 'urlopen' mock. # For this test, we need to override that to something more specific. self._urlopen_mock.stop() self._urlopen_mock = None self._urlopen_mock = patch("urllib2.urlopen", side_effect=TestMetricsDispatchWorkerThread. _mocked_urlopen_for_test_maximum_batch_size) self._mocked_method = self._urlopen_mock.start() # We add 10 time the maximum number of events in the queue + some extra. TEST_SIZE = 7 + (10 * MetricsQueueSingleton.MAXIMUM_QUEUE_SIZE) for i in range(TEST_SIZE): EventMetric.log(EventMetric.GROUP_TOOLKIT, "Testing maximum queue size %d" % (i), properties={"Metric id": i}) queue = MetricsQueueSingleton()._queue TIMEOUT_SECONDS = 40 * MetricsDispatchWorkerThread.DISPATCH_INTERVAL # Wait for firsts events to show up in queue timeout = time.time() + TIMEOUT_SECONDS length = len(queue) while (length == 0) and (time.time() < timeout): time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) length = len(queue) # Wait for the queue to be emptied length = len(queue) timeout = time.time() + TIMEOUT_SECONDS while (length > 0) and (time.time() < timeout): time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) length = len(queue) # Checking overall cycle average time NOT individual cycle time max_interval = MetricsDispatchWorkerThread.DISPATCH_INTERVAL * 1000 count = len(self.mock_calls_timestamp) first_timestamp_ms = self.mock_calls_timestamp[0] last_timestamp_ms = self.mock_calls_timestamp[count - 1] avg_time_ms = (last_timestamp_ms - first_timestamp_ms) / count self.assertTrue(avg_time_ms < max_interval)
def _helper_test_end_to_end(self, group, name, properties): """ Helper method for the test_end_to_end_* tests. Allows a deeper and more complete test cycle of creating, submitting and receiving a mocked-server response. """ # Setup test fixture, engine and context with newer server caps # # Define a local server caps mock locally since it only # applies to this particular test class server_capsMock: def __init__(self): self.version = (7, 4, 0) self._setup_shotgun(server_capsMock()) # Save a few values for comparing on the other side expected_event_name = name if name not in MetricsDispatchWorkerThread.SUPPORTED_EVENTS: expected_event_name = "Unknown Event" # Make at least one metric related call! EventMetric.log(group, name, properties) TIMEOUT_SECONDS = 4 * MetricsDispatchWorkerThread.DISPATCH_INTERVAL timeout = time.time() + TIMEOUT_SECONDS # Simple flag just to differenciate one of two conditions: # a) didn't even find a mocked request call # b) a + didn't find expected metric found_urllib2_request_call = False while time.time() < timeout: time.sleep(TestMetricsDispatchWorkerThread.SLEEP_INTERVAL) for mocked_request in self._get_urllib2_request_calls(): found_urllib2_request_call = True data = json.loads(mocked_request.get_data()) # Now that we have request data # Traverse the metrics to find the one we've logged above if "metrics" in data: # At this point we found Request calls with 'metrics' data # Although we've not found our particular metric # We can already verify that the logged metric was made using the right URL url = mocked_request.get_full_url() self.assertTrue( TestMetricsDispatchWorkerThread.METRIC_ENDPOINT in url, "Not using the latest metric '%s' endpoint" % (TestMetricsDispatchWorkerThread.METRIC_ENDPOINT)) for metric in data["metrics"]: if ("event_name" in metric) and (expected_event_name == metric["event_name"]): # Nothing else FOR NOW to test, we can report success by bypassing # timeout failure down below. # Tests all of the received metric properties that went through two conversions return metric if (found_urllib2_request_call): self.fail("Timed out waiting for expected metric.") else: self.fail("Timed out waiting for a mocked urlopen request call.")