def iter_users(self, page=None, prefix=None, prefix_limit=None): self._cache_users() if page is not None: page_total = self.page_total page = min(page, page_total) if page == 0: page_index_s = 0 else: page_index_s = int(cache_db.dict_get(self.get_cache_key( 'users_page_index'), str(page))) if page == page_total: page_index_e = None else: page_index_e = int(cache_db.dict_get(self.get_cache_key( 'users_page_index'), str(page + 1))) for user_id in cache_db.list_iter_range( self.get_cache_key('users_sorted'), page_index_s, page_index_e): user = User.get_user(self, id=user_id) if user: yield user elif prefix is not None: users_dict = {} users_sort = [] prefix_count = 0 users_trie = CacheTrie(self.get_cache_key('users_trie')) for user_data in users_trie.iter_prefix(prefix): user_id, user_type, user_name = user_data.split('-', 2) if user_type == CERT_CLIENT: prefix_count += 1 name_id = user_name + '_' + user_id users_dict[name_id] = (user_id, user_type, user_name) users_sort.append(name_id) self._last_prefix_count = prefix_count user_count = 0 search_more = False for name_id in sorted(users_sort): user_id, user_type, user_name = users_dict[name_id] user = User.get_user(self, id=user_id) if not user: continue yield user if prefix_limit and user_type == CERT_CLIENT: user_count += 1 if user_count >= prefix_limit: search_more = True break if prefix_limit and not search_more: yield None else: for user_id in cache_db.list_iter( self.get_cache_key('users_sorted')): user = User.get_user(self, id=user_id) if user: yield user
def __getattr__(self, name): if name == 'status': if cache_db.dict_get(self.get_cache_key(), name) == 't': return True return False elif name == 'call_buffer': return _call_buffers.get(self.id) elif name not in self.__dict__: raise AttributeError('Server instance has no attribute %r' % name) return self.__dict__[name]
def __getattr__(self, name): if name == 'status': return cache_db.dict_get(self.get_cache_key(), name) == 't' elif name == 'uptime': if self.status: return int(time.time()) - int(cache_db.dict_get( self.get_cache_key(), 'start_time')) return None elif name == 'clients': clients = cache_db.dict_get(self.get_cache_key(), name) if self.status and clients: return json.loads(clients) return {} elif name == 'output': return '\n'.join(cache_db.list_elements( self.get_cache_key('output'))) elif name == 'user_count': return self._get_user_count() elif name == 'org_count': return self._get_org_count() return Config.__getattr__(self, name)
def __getattr__(self, name): if name == 'clients': clients = cache_db.dict_get(self.get_cache_key(), name) if self.status and clients: return json.loads(clients) return {} elif name == 'output': return '\n'.join(cache_db.list_elements( self.get_cache_key('output'))) elif name == 'user_count': return self._get_user_count() elif name == 'org_count': return self._get_org_count() return MongoObject.__getattr__(self, name)
def update_ip_pool(self): cache_key = self.get_cache_key('ip_pool') set_cache_key = self.get_cache_key('ip_pool_set') cache_db.lock_acquire(cache_key) try: ip_pool = ipaddress.IPv4Network(self.network).iterhosts() ip_pool.next() users = set() for org in self.iter_orgs(): for user in org.iter_users(): if user.type == CERT_CLIENT: users.add(org.id + '-' + user.id) for user_id in cache_db.dict_keys(cache_key) - users: ip_set = cache_db.dict_get(cache_key, user_id) local_ip_addr, remote_ip_addr = ip_set.split('-') cache_db.set_remove(set_cache_key, local_ip_addr) cache_db.set_remove(set_cache_key, remote_ip_addr) cache_db.dict_remove(cache_key, user_id) try: for user_id in users - cache_db.dict_keys(cache_key): while True: remote_ip_addr = str(ip_pool.next()) ip_addr_endpoint = remote_ip_addr.split('.')[-1] if ip_addr_endpoint not in VALID_IP_ENDPOINTS: continue local_ip_addr = str(ip_pool.next()) if not cache_db.set_exists(set_cache_key, local_ip_addr) and not cache_db.set_exists( set_cache_key, remote_ip_addr): cache_db.set_add(set_cache_key, local_ip_addr) cache_db.set_add(set_cache_key, remote_ip_addr) break cache_db.dict_set(cache_key, user_id, local_ip_addr + '-' + remote_ip_addr) except StopIteration: pass finally: self._commit_ip_pool() for org in self.iter_orgs(): Event(type=USERS_UPDATED, resource_id=org.id) finally: cache_db.lock_release(cache_key)
def _update_clients_bandwidth(self, clients): # Remove client no longer connected for client_id in cache_db.dict_keys(self.get_cache_key('clients')): if client_id not in clients: cache_db.dict_remove(self.get_cache_key('clients'), client_id) # Get total bytes send and recv for all clients bytes_recv_t = 0 bytes_sent_t = 0 for client_id in clients: bytes_recv = clients[client_id]['bytes_received'] bytes_sent = clients[client_id]['bytes_sent'] prev_bytes_recv = 0 prev_bytes_sent = 0 client_prev = cache_db.dict_get(self.get_cache_key('clients'), client_id) cache_db.dict_set(self.get_cache_key('clients'), client_id, '%s,%s' % (bytes_recv, bytes_sent)) if client_prev: client_prev = client_prev.split(',') prev_bytes_recv = int(client_prev[0]) prev_bytes_sent = int(client_prev[1]) if prev_bytes_recv > bytes_recv or prev_bytes_sent > bytes_sent: prev_bytes_recv = 0 prev_bytes_sent = 0 bytes_recv_t += bytes_recv - prev_bytes_recv bytes_sent_t += bytes_sent - prev_bytes_sent # Store bytes send recv into time periods if bytes_recv_t != 0 or bytes_sent_t != 0: date = datetime.datetime.utcnow() date -= datetime.timedelta(microseconds=date.microsecond, seconds=date.second) timestamp_1m = date.strftime('%s') timestamp_1m_min = int((date - datetime.timedelta( hours=6)).strftime('%s')) date_5m = date - datetime.timedelta(minutes=date.minute % 5) timestamp_5m = date_5m.strftime('%s') timestamp_5m_min = int((date_5m - datetime.timedelta( days=1)).strftime('%s')) date_30m = date - datetime.timedelta(minutes=date.minute % 30) timestamp_30m = date_30m.strftime('%s') timestamp_30m_min = int((date_30m - datetime.timedelta( days=7)).strftime('%s')) date_2h = date - datetime.timedelta( hours=date.hour % 2, minutes=date.minute) timestamp_2h = date_2h.strftime('%s') timestamp_2h_min = int((date_2h - datetime.timedelta( days=30)).strftime('%s')) date_1d = date - datetime.timedelta( hours=date.hour, minutes=date.minute) timestamp_1d = date_1d.strftime('%s') timestamp_1d_min = int((date_1d - datetime.timedelta( days=365)).strftime('%s')) for period, timestamp, timestamp_min in ( ('1m', timestamp_1m, timestamp_1m_min), ('5m', timestamp_5m, timestamp_5m_min), ('30m', timestamp_30m, timestamp_30m_min), ('2h', timestamp_2h, timestamp_2h_min), ('1d', timestamp_1d, timestamp_1d_min), ): bytes_recv = bytes_recv_t bytes_sent = bytes_sent_t prev_bandwidth = persist_db.dict_get( self.get_cache_key('bandwidth-%s' % period), timestamp) if prev_bandwidth: prev_bandwidth = prev_bandwidth.split(',') bytes_recv += int(prev_bandwidth[0]) bytes_sent += int(prev_bandwidth[1]) persist_db.dict_set(self.get_cache_key( 'bandwidth-%s' % period), timestamp, '%s,%s' % (bytes_recv, bytes_sent)) for timestamp_p in persist_db.dict_keys(self.get_cache_key( 'bandwidth-%s' % period)): if int(timestamp_p) <= timestamp_min: persist_db.dict_remove(self.get_cache_key( 'bandwidth-%s' % period), timestamp_p)
def get_ip_set(self, org_id, user_id): ip_set = cache_db.dict_get(self.get_cache_key('ip_pool'), org_id + '-' + user_id) if ip_set: return ip_set.split('-') return None, None
def get_cache(self): self.data = cache_db.dict_get(self.get_cache_key(), 'data') self.mime_type = cache_db.dict_get(self.get_cache_key(), 'mime_type') self.last_modified = cache_db.dict_get(self.get_cache_key(), 'last_modified') self.etag = cache_db.dict_get(self.get_cache_key(), 'etag')