class TestDatabaseNotFoundException(unittest.TestCase): MAX_EVENTS_LOGGED = 1000 def setUp(self) -> None: self._db = Database(logger, True) self._mock_data = dict() total_events = random.randint(0, self.MAX_EVENTS_LOGGED) for each in DEFAULT_EVENTS: event_type_id = self._db.create_event_type(each) self.assertGreater(event_type_id, 0, msg="Failed to create event type") mock_events = [] now = datetime.now() for x in range(0, total_events): mock_event_data = { "floatMock": random.random(), "datetimeMock": random_later_datetime().isoformat(), "integerMock": random.randint(0, 1500000), "stringMock": "a" * random.randint(0, 1024) } new_mock_event = Event(0, event_type_id, each, mock_event_data, 0, 1, now) mock_events.append(new_mock_event) self._mock_data[each] = mock_events event_type = random.choice(DEFAULT_EVENTS) mock_events = self._mock_data[event_type] event_type_id = None for each_mock in mock_events: if event_type_id is None: event_type_id = each_mock.event_type_id event_id = self._db.log_event(each_mock.event_data, each_mock.event_type_id, each_mock.node_id, each_mock.user_id) self.assertGreater(event_id, 0, msg="Failed to log event.") each_mock.event_id = event_id event_type_count = self._db.get_event_count( event_type_id=event_type_id) self.assertEqual(event_type_count, len(mock_events)) logger.info("Events logged: {0} ({1})".format(event_type_count, event_type)) def test_lookupFailed(self): with self.assertRaises(DatabaseNotFoundException): self._db.get_event_data(self.MAX_EVENTS_LOGGED + self.MAX_EVENTS_LOGGED)
class EventLog: def __init__(self, db=None, logger=None): self._cache = redis.Redis() if db: self._db = db else: self._db = Database(logger) self._logger = logger event_types = [] for each in self.list_event_types(): event_types.append(each[1]) missing_events = set(DEFAULT_EVENTS) - set(event_types) for new_event in missing_events: self._db.create_event_type(new_event) def deserialize_user_object(self, event: Event): if event.is_analyst(): cached_user_data = self._cache.get("analyst:{0}".format( event.user_id)) if cached_user_data: user_data = json.dumps(cached_user_data) new_analyst_user = AnalystUser( user_data["user_id"], user_data["email_address"], user_data["session_token"], user_data["full_name"], user_data["permissions"], user_data["view_event_type_ids"]) return new_analyst_user else: cached_user_data = self._cache.get("django:{0}".format( event.user_id)) if cached_user_data: user_data = json.dumps(cached_user_data) new_django_user = DjangoUser( user_data["user_id"], user_data["username"], user_data["first_name"], user_data["last_name"], user_data["email_address"], datetime.datetime.strptime(user_data["created"], "%Y-%m-%dT%H:%M:%S%z"), datetime.datetime.strptime(user_data["last_logged_in"], "%Y-%m-%dT%H:%M:%S%z")) return new_django_user def get_api_key_object(self, api_key: str) -> APIKey: try: node_data = self._db.validate_api_key(api_key) node = APIKey(node_data[0], api_key, node_data[1], node_data[2], node_data[3], node_data[4], node_data[5]) if node.suspended_event: raise APIKeySuspended if node.next_reset is None or node.next_reset < datetime.datetime.now( ): next_reset = datetime.datetime.now() + datetime.timedelta( hours=1) node.next_reset = next_reset node.events_posted = 0 self._db.reset_quota(node.node_id, next_reset) if node.events_posted >= node.quota: raise APIKeyRateLimited return node except DatabaseException: raise APIKeyInvalid def list_api_keys(self) -> list: output = [] try: all_api_keys = self._db.list_api_keys() except DatabaseException: raise EventLogException for each_api_key in all_api_keys: new_api_key = APIKey(each_api_key[0], each_api_key[1], each_api_key[2], each_api_key[3], each_api_key[4], each_api_key[5], each_api_key[6]) output.append(new_api_key) return output def add_event_type(self, new_event_type): self._cache.delete("event_types") new_event_type_id = self._db.create_event_type(new_event_type) if new_event_type_id > 0: return new_event_type_id raise EventLogException @property def event_type_ids_as_set(self) -> set: try: all_event_types = self.list_event_types() all_event_type_ids = [] for each in all_event_types: all_event_type_ids.append(each[0]) return set(all_event_type_ids) except EventLogException: raise EventLogException def list_event_types(self) -> list: event_types_cached = self._cache.get("event_types") if event_types_cached: event_types = json.loads(event_types_cached) else: event_types = self._db.list_event_types() self._cache.set("event_types", json.dumps(event_types)) if len(event_types) == 1 and event_types[0][0] == -1: raise EventLogException return event_types def get_event_count(self, user_id=None, event_type_id=None, since=None, until=None): return self._db.get_event_count(user_id, event_type_id, since, until) def log_event(self, event_data: dict, event_type_id: int, user_id: int = None, node_id: int = None): event_id = self._db.log_event(event_data, event_type_id, user_id, node_id) if event_id: event_type = None for event in self.list_event_types(): if event[0] == event_type_id: event_type = event[1] break if not event_type: raise EventLogException new_event = Event(event_id, event_type_id, event_type, event_data, user_id, node_id, datetime.datetime.now().isoformat()) self._cache.set("event_id:{0}".format(event_id), str(new_event)) return new_event raise EventLogException def get_event(self, event_id: int) -> Event: cached_event_data = self._cache.get("event_id:{0}".format(event_id)) new_event = None if cached_event_data: cached_obj = json.loads(cached_event_data) new_event = Event( cached_obj["event_id"], cached_obj["event_type_id"], cached_obj["event_type"], cached_obj["event_data"], cached_obj["node_id"], cached_obj["user_id"], datetime.datetime.strptime(cached_obj["created"], "%Y-%m-%dT%H:%M:%S%z")) else: db_event_record = self._db.get_event_data(event_id) if db_event_record: # event_id, event_type_id, node_id, user_id, event_data, created, event_type.event_type new_event = Event(db_event_record[0], db_event_record[1], db_event_record[2], json.loads(db_event_record[4]), db_event_record[2], db_event_record[3], db_event_record[5]) if new_event: return new_event else: raise EventLogException def retrieve_events(self, user_id: int = None, since: datetime = None, until: datetime = None, event_type_id: int = None, node_id: int = None, limit: int = 100): try: meta_data = self._db.retrieve_events(user_id, event_type_id, node_id, since, until, limit) # event_id, event_type_id, node_id, user_id, created output = [] for row in meta_data: event_id = row[0] cache_key = "event_id:{0}".format(event_id) event_data = self._cache.get(cache_key) if event_data is None: event_data = self._db.get_event_data(event_id) new_event = Event(event_data[0], event_data[1], event_data[6], json.loads(event_data[4]), event_data[3], event_data[2], event_data[5]) self._cache.set(cache_key, str(new_event)) output.append(json.loads(str(new_event))) else: output.append(json.loads(event_data)) return output except DatabaseException: raise EventLogException def update_quota(self, node: APIKey): try: self._db.update_quota(node.node_id) except DatabaseException: if self._logger: self._logger.error( "Failed to update API rate quota for node id: {0}".format( node.node_id))
class TestEventTypeIdLogging(unittest.TestCase): def setUp(self): self._db = Database(logger, True) self._mock_data = dict() now = datetime.now() for each in DEFAULT_EVENTS: event_type_id = self._db.create_event_type(each) self.assertGreater(event_type_id, 0, msg="Failed to create event type") mock_events = [] for x in range(0, random.randint(10, 50)): mock_event_data = { "floatMock": random.random(), "datetimeMock": random_later_datetime().isoformat(), "integerMock": random.randint(0, 1500000), "stringMock": "a" * random.randint(0, 1024) } new_mock_event = Event(0, event_type_id, each, mock_event_data, 0, 1, now) mock_events.append(new_mock_event) self._mock_data[each] = mock_events def test_logging(self): total_events_logged = 0 event_type = random.choice(DEFAULT_EVENTS) mock_events = self._mock_data[event_type] event_type_id = None for each_mock in mock_events: if event_type_id is None: event_type_id = each_mock.event_type_id event_id = self._db.log_event(each_mock.event_data, each_mock.event_type_id, each_mock.node_id, each_mock.user_id) self.assertGreater(event_id, 0, msg="Failed to log event.") each_mock.event_id = event_id event_type_count = self._db.get_event_count( event_type_id=event_type_id) self.assertEqual(event_type_count, len(mock_events)) logger.info("Events logged: {0} ({1})".format(event_type_count, event_type)) logger.info("Fetching events individually...") total_events_logged += event_type_count for each_mock in mock_events: db_event_data = self._db.get_event_data(each_mock.event_id) # DB: event_id, event_type_id, node_id, user_id, event_data, created, event_type.event_type self.assertEqual(db_event_data[1], each_mock.event_type_id) self.assertEqual(db_event_data[2], each_mock.node_id) self.assertEqual(db_event_data[3], each_mock.user_id) # db_event_data = json.loads(db_event_data[4]) # self.assertEqual(each_mock.event_data["floatMock"], db_event_data["floatMock"]) # self.assertEqual(db_event_data[6], each_event_type) logger.info("Retrieving all events of type in single query...") all_events_of_type = self._db.retrieve_events( event_type_id=event_type_id) mock_events_reversed = mock_events mock_events_reversed.reverse() # event_id, event_type_id, node_id, user_id, created for x in range(0, len(mock_events_reversed)): self.assertEqual(all_events_of_type[x][0], mock_events_reversed[x].event_id) self.assertEqual(all_events_of_type[x][1], mock_events_reversed[x].event_type_id) self.assertEqual(all_events_of_type[x][2], mock_events_reversed[x].node_id) self.assertEqual(all_events_of_type[x][3], mock_events_reversed[x].user_id) self.assertEqual(len(all_events_of_type), event_type_count) db_total_event_count = self._db.get_event_count() self.assertEqual(db_total_event_count, total_events_logged)