def clear_cache(self): cache_db.set_remove('servers', '%s_%s' % (self.id, self.type)) cache_db.list_remove('servers_sorted', '%s_%s' % (self.id, self.type)) cache_db.remove(self.get_cache_key('clients')) for period in ('1m', '5m', '30m', '2h', '1d'): persist_db.remove(self.get_cache_key('bandwidth-%s' % period)) Config.clear_cache(self)
def clear_cache(self): cache_db.set_remove("orgs", self.id) cache_db.list_remove("orgs_sorted", self.id) cache_db.decrement("org_count") cache_db.remove(self.get_cache_key("users_cached")) cache_db.remove(self.get_cache_key("users")) CacheTrie(self.get_cache_key("users_trie")).clear_cache() Config.clear_cache(self)
def _cache_orgs(cls): if cache_db.get('orgs_cached') != 't': cache_db.remove('orgs') path = os.path.join(app_server.data_path, ORGS_DIR) if os.path.isdir(path): for org_id in os.listdir(path): cache_db.set_add('orgs', org_id) cls.sort_orgs_cache() cache_db.set('orgs_cached', 't')
def _cache_orgs(cls): if cache_db.get("orgs_cached") != "t": cache_db.remove("orgs") path = os.path.join(app_server.data_path, ORGS_DIR) if os.path.isdir(path): for org_id in os.listdir(path): cache_db.set_add("orgs", org_id) cls.sort_orgs_cache() cache_db.set("orgs_cached", "t")
def _run_thread(self): logger.debug('Starting ovpn process. %r' % { 'server_id': self.id, }) self._interrupt = False self._state = True try: try: process = subprocess.Popen(['openvpn', self.ovpn_conf_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: self.push_output(traceback.format_exc()) logger.exception('Failed to start ovpn process. %r' % { 'server_id': self.id, }) self.publish('stopped') return cache_db.dict_set(self.get_cache_key(), 'start_time', str(int(time.time() - 1))) sub_thread = threading.Thread(target=self._sub_thread, args=(process,)) sub_thread.start() status_thread = threading.Thread(target=self._status_thread) status_thread.start() self.status = True self.publish('started') while True: line = process.stdout.readline() if not line: if process.poll() is not None: break else: continue self.push_output(line) self._interrupt = True status_thread.join() cache_db.remove(self.get_cache_key('clients')) cache_db.dict_remove(self.get_cache_key(), 'clients') self.status = False self.publish('stopped') self.update_clients({}, force=True) if self._state: Event(type=SERVERS_UPDATED) LogEntry(message='Server stopped unexpectedly "%s".' % ( self.name)) logger.debug('Ovpn process has ended. %r' % { 'server_id': self.id, }) except: self._interrupt = True self.publish('stopped') raise
def commit(self): if self._rebuild_dh_params: self._generate_dh_param() if self._reset_ip_pool: cache_db.remove(self.get_cache_key('ip_pool')) cache_db.remove(self.get_cache_key('ip_pool_set')) self.update_ip_pool() Config.commit(self) self.sort_servers_cache() Event(type=SERVERS_UPDATED)
def sort_users_cache(self): user_count = 0 users_dict = {} users_sort = [] # Create temp uuid key to prevent multiple threads modifying same key temp_suffix = 'temp_' + uuid.uuid4().hex temp_users_sorted_key = 'users_sorted_' + temp_suffix users_page_index_key = 'users_page_index_' + temp_suffix try: for user_id in cache_db.set_elements(self.get_cache_key('users')): user = User.get_user(self, id=user_id) if not user: continue name_id = '%s_%s' % (user.name, user_id) if user.type == CERT_CLIENT: user_count += 1 users_dict[name_id] = (user_id, user.type) users_sort.append(name_id) cache_db.set(self.get_cache_key('user_count'), str(user_count)) cur_page = 0 user_count = 0 client_count = 0 for name_id in sorted(users_sort): if users_dict[name_id][1] == CERT_CLIENT: page = client_count / USER_PAGE_COUNT if page != cur_page: cur_page = page cache_db.dict_set(self.get_cache_key(users_page_index_key), str(cur_page), str(user_count)) client_count += 1 user_count += 1 cache_db.list_rpush(self.get_cache_key(temp_users_sorted_key), users_dict[name_id][0]) cache_db.lock_acquire(self.get_cache_key('sort')) try: cache_db.rename(self.get_cache_key(users_page_index_key), self.get_cache_key('users_page_index')) cache_db.rename(self.get_cache_key(temp_users_sorted_key), self.get_cache_key('users_sorted')) cache_db.set(self.get_cache_key('users_page_total'), str(cur_page)) finally: cache_db.lock_release(self.get_cache_key('sort')) except: cache_db.remove(self.get_cache_key(users_page_index_key)) cache_db.remove(self.get_cache_key(temp_users_sorted_key)) raise
def clear_cache(self, org_data=True): if org_data: if self.type == CERT_CLIENT: cache_db.decrement(self.org.get_cache_key('user_count')) if self.type != CERT_CA: cache_db.set_remove(self.org.get_cache_key('users'), self.id) cache_db.list_remove(self.org.get_cache_key('users_sorted'), self.id) if self.type != CERT_CA: self._remove_cache_trie_key() cache_db.remove(self.get_cache_key('otp')) cache_db.remove(self.get_cache_key('otp_cache')) Config.clear_cache(self)
def _cache_servers(cls): if cache_db.get('servers_cached') != 't': cache_db.remove('servers') path = os.path.join(app_server.data_path, SERVERS_DIR) if os.path.isdir(path): for server_id in os.listdir(path): if os.path.isfile(os.path.join(path, server_id, NODE_SERVER)): server_id += '_' + NODE_SERVER else: server_id += '_' + SERVER cache_db.set_add('servers', server_id) cls.sort_servers_cache() cache_db.set('servers_cached', 't')
def remove_key(self, key, value): name = self.name + '_' cur_key = self.key new_key = cur_key for char in key.lower(): new_key += char name_key = name + cur_key cache_db.set_remove(name_key, new_key) if not cache_db.set_length(name_key): cache_db.remove(name_key) cur_key = new_key name_key = name + cur_key + '_values' cache_db.set_remove(name_key, value) if not cache_db.set_length(name_key): cache_db.remove(name_key)
def _cache_users(self): if cache_db.get(self.get_cache_key('users_cached')) != 't': cache_db.remove(self.get_cache_key('users')) certs_path = os.path.join(self.path, CERTS_DIR) if os.path.isdir(certs_path): for cert in os.listdir(certs_path): user_id = cert.replace('.crt', '') if user_id == CA_CERT_ID: continue user = User.get_user(self, id=user_id) if not user: continue user._add_cache_trie_key() cache_db.set_add(self.get_cache_key('users'), user_id) self.sort_users_cache() cache_db.set(self.get_cache_key('users_cached'), 't')
def clear_cache(self): self._clear_list_cache() cache_db.remove(self.get_cache_key('clients')) cache_db.remove(self.get_cache_key('ip_pool')) cache_db.remove(self.get_cache_key('ip_pool_set')) cache_db.remove(self.get_cache_key('ip_pool_cached')) for period in ('1m', '5m', '30m', '2h', '1d'): persist_db.remove(self.get_cache_key('bandwidth-%s' % period)) Config.clear_cache(self)
def verify_otp_code(self, code, remote_ip=None): if remote_ip: otp_cache = cache_db.get(self.get_cache_key('otp_cache')) if otp_cache: cur_code, cur_remote_ip = otp_cache.split(',') if cur_code == code and cur_remote_ip == remote_ip: cache_db.expire(self.get_cache_key('otp_cache'), OTP_CACHE_TTL) return True else: cache_db.remove(self.get_cache_key('otp_cache')) otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(time.time() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False used_codes = cache_db.dict_get_all(self.get_cache_key('otp')) for auth_time, used_code in used_codes.items(): if int(time.time()) - int(auth_time) > 120: cache_db.dict_remove(self.get_cache_key('otp'), auth_time) if used_code == code: return False cache_db.dict_set(self.get_cache_key('otp'), str(int(time.time())), code) cache_db.expire(self.get_cache_key('otp_cache'), OTP_CACHE_TTL) cache_db.set(self.get_cache_key('otp_cache'), ','.join((code, remote_ip))) return True
def sort_servers_cache(cls): servers_dict = {} servers_sort = [] # Create temp uuid key to prevent multiple threads modifying same key temp_sorted_key = 'servers_sorted_temp_' + uuid.uuid4().hex try: for server_id_type in cache_db.set_elements('servers'): server_id, server_type = server_id_type.split('_', 1) server = Server.get_server(id=server_id, type=server_type) if not server: continue name_id = '%s_%s' % (server.name, server_id) servers_dict[name_id] = server_id_type servers_sort.append(name_id) for name_id in sorted(servers_sort): cache_db.list_rpush(temp_sorted_key, servers_dict[name_id]) cache_db.rename(temp_sorted_key, 'servers_sorted') except: cache_db.remove(temp_sorted_key) raise
def sort_orgs_cache(cls): org_count = 0 orgs_dict = {} orgs_sort = [] # Create temp uuid key to prevent multiple threads modifying same key temp_orgs_sorted_key = 'orgs_sorted_temp_' + uuid.uuid4().hex try: for org_id in cache_db.set_elements('orgs'): org = Organization.get_org(id=org_id) if not org: continue name_id = '%s_%s' % (org.name, org_id) org_count += 1 orgs_dict[name_id] = org_id orgs_sort.append(name_id) cache_db.set('org_count', str(org_count)) for name_id in sorted(orgs_sort): cache_db.list_rpush(temp_orgs_sorted_key, orgs_dict[name_id]) cache_db.rename(temp_orgs_sorted_key, 'orgs_sorted') except: cache_db.remove(temp_orgs_sorted_key) raise
def clear_cache(self): cache_db.remove(self.get_cache_key('cached')) cache_db.remove(self.get_cache_key())
def clear_output(self): cache_db.remove(self.get_cache_key('output')) self._event_delay(type=SERVER_OUTPUT_UPDATED, resource_id=self.id)
def clear_cache(self): for user in self.iter_users(): user.clear_cache(org_data=False) self.ca_cert.clear_cache(org_data=False) cache_db.set_remove('orgs', self.id) cache_db.list_remove('orgs_sorted', self.id) cache_db.decrement('org_count') cache_db.remove(self.get_cache_key('users_cached')) cache_db.remove(self.get_cache_key('users')) cache_db.remove(self.get_cache_key('user_count')) cache_db.remove(self.get_cache_key('users_sorted')) cache_db.remove(self.get_cache_key('users_page_index')) cache_db.remove(self.get_cache_key('users_page_total')) CacheTrie(self.get_cache_key('users_trie')).clear_cache() Config.clear_cache(self)
def clear_cache(self): cache_db.remove(self.get_cache_key()) cache_db.remove(self.get_cache_key('values'))