def resync(self, subscribers): """ Method that should resync the store with the mentioned list of subscribers. The resync leaves the current state of subscribers intact. Args: subscribers - list of subscribers to be in the store. """ with self.conn: # Capture the current state of the subscribers res = self.conn.execute( "SELECT subscriber_id, data FROM subscriberdb") current_state = {} for row in res: sub = SubscriberData() sub.ParseFromString(row[1]) current_state[row[0]] = sub.state # Clear all subscribers self.conn.execute("DELETE FROM subscriberdb") # Add the subscribers with the current state for sub in subscribers: sid = SIDUtils.to_str(sub.sid) if sid in current_state: sub.state.CopyFrom(current_state[sid]) data_str = sub.SerializeToString() self.conn.execute( "INSERT INTO subscriberdb(subscriber_id, data) " "VALUES (?, ?)", (sid, data_str)) self._on_ready.resync(subscribers)
def get_subscriber_data(self, subscriber_id): """ Return the auth key for the subscriber. """ db_location = self._db_locations[self._sid2bucket(subscriber_id)] conn = sqlite3.connect(db_location, uri=True) try: with conn: res = conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, ), ) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) except sqlite3.OperationalError: # Print the process holding the lock db_parts = db_location.split(":", 1) if (len(db_parts) == 2) and db_parts[1]: path_str = db_parts[1].split("?") output = subprocess.Popen( ["/usr/bin/fuser", "-uv", path_str[0]], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) logging.info(output.communicate()) raise SubscriberServerTooBusy(subscriber_id) finally: conn.close() subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) return subscriber_data
def edit_subscriber(self, subscriber_id): """ Context manager to modify the subscriber data. """ db_location = self._db_locations[self._sid2bucket(subscriber_id)] conn = sqlite3.connect(db_location, uri=True) try: with conn: res = conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, ), ) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) yield subscriber_data data_str = subscriber_data.SerializeToString() conn.execute( "UPDATE subscriberdb SET data = ? " "WHERE subscriber_id = ?", (data_str, subscriber_id), ) finally: conn.close()
def process_update(self, stream_name, updates, resync): """ The cloud streams ALL subscribers registered, both active and inactive. Since we don't have a good way of knowing whether a detach succeeds or fails to update the local database correctly, we have to send down all subscribers to keep trying to delete inactive subscribers. TODO we can optimize a bit on the MME side to not detach already detached subscribers. """ logging.info("Processing %d subscriber updates (resync=%s)", len(updates), resync) if resync: # TODO: # - handle database exceptions keys = [] subscribers = [] active_subscriber_ids = [] for update in updates: sub = SubscriberData() sub.ParseFromString(update.value) subscribers.append(sub) keys.append(update.key) if sub.lte.state == LTESubscription.ACTIVE: active_subscriber_ids.append(update.key) old_sub_ids = self._store.list_subscribers() # Only compare active subscribers against the database to decide # what to detach. self.detach_deleted_subscribers(old_sub_ids, active_subscriber_ids) logging.debug("Resync with subscribers: %s", ','.join(keys)) self._store.resync(subscribers) else: # TODO: implement updates pass
def edit_subscriber(self, subscriber_id): """ Context manager to modify the subscriber data. """ start = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID) start2 = time.clock_gettime(time.CLOCK_MONOTONIC) with self.conn: res = self.conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, ), ) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) yield subscriber_data data_str = subscriber_data.SerializeToString() self.conn.execute( "UPDATE subscriberdb SET data = ? " "WHERE subscriber_id = ?", (data_str, subscriber_id), ) end = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID) end2 = time.clock_gettime(time.CLOCK_MONOTONIC) logging.warning('LTE edit sub spends: {} ms'.format( (end - start) * 1e3)) logging.warning('LTE edit sub takes: {} ms'.format( (end2 - start2) * 1e3))
def get_subscriber_data(self, subscriber_id): """ Method that returns the auth key for the subscriber. """ with self.conn: res = self.conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, )) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) return subscriber_data
def edit_subscriber(self, subscriber_id): """ Context manager to modify the subscriber data. """ with self.conn: res = self.conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, )) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) yield subscriber_data data_str = subscriber_data.SerializeToString() self.conn.execute( "UPDATE subscriberdb SET data = ? " "WHERE subscriber_id = ?", (data_str, subscriber_id))
def resync(self, subscribers): """ Method that should resync the store with the mentioned list of subscribers. The resync leaves the current state of subscribers intact. Args: subscribers - list of subscribers to be in the store. """ bucket_subs = defaultdict(list) for sub in subscribers: sid = SIDUtils.to_str(sub.sid) bucket_subs[self._sid2bucket(sid)].append(sub) for i, db_location in enumerate(self._db_locations): conn = sqlite3.connect(db_location, uri=True) try: with conn: # Capture the current state of the subscribers res = conn.execute( "SELECT subscriber_id, data FROM subscriberdb", ) current_state = {} for row in res: sub = SubscriberData() sub.ParseFromString(row[1]) current_state[row[0]] = sub.state # Clear all subscribers conn.execute("DELETE FROM subscriberdb") # Add the subscribers with the current state for sub in bucket_subs[i]: sid = SIDUtils.to_str(sub.sid) if sid in current_state: sub.state.CopyFrom(current_state[sid]) data_str = sub.SerializeToString() conn.execute( "INSERT INTO subscriberdb(subscriber_id, data) " "VALUES (?, ?)", (sid, data_str), ) finally: conn.close() self._on_ready.resync(subscribers)
def get_subscriber_data(self, subscriber_id): """ Method that returns the auth key for the subscriber. """ db_location = self._db_locations[self._sid2bucket(subscriber_id)] conn = sqlite3.connect(db_location, uri=True) try: with conn: res = conn.execute( "SELECT data FROM subscriberdb WHERE " "subscriber_id = ?", (subscriber_id, )) row = res.fetchone() if not row: raise SubscriberNotFoundError(subscriber_id) finally: conn.close() subscriber_data = SubscriberData() subscriber_data.ParseFromString(row[0]) return subscriber_data
def process_update(self, stream_name, updates, resync): logging.info("Processing %d subscriber updates (resync=%s)", len(updates), resync) if resync: # TODO: # - handle database exceptions keys = [] subscribers = [] for update in updates: sub = SubscriberData() sub.ParseFromString(update.value) subscribers.append(sub) keys.append(update.key) old_sub_ids = self._store.list_subscribers() self.detach_deleted_subscribers(old_sub_ids, keys) logging.debug("Resync with subscribers: %s", ','.join(keys)) self._store.resync(subscribers) else: # TODO: implement updates pass