def _load_ip_pool(self): if cache_db.get(self.get_cache_key('ip_pool_cached')) == 't': return reset = False if os.path.exists(self.ip_pool_path): with open(self.ip_pool_path, 'r') as ip_pool_file: pool = json.loads(ip_pool_file.read()) network = pool.pop('network', None) if network == self.network: cache_key = self.get_cache_key('ip_pool') set_cache_key = self.get_cache_key('ip_pool_set') for key, value in pool.iteritems(): cache_db.dict_set(cache_key, key, value) local_ip_addr, remote_ip_addr = value.split('-') cache_db.set_add(set_cache_key, local_ip_addr) cache_db.set_add(set_cache_key, remote_ip_addr) else: reset = True cache_db.set(self.get_cache_key('ip_pool_cached'), 't') if reset: self.update_ip_pool()
def commit(self): logger.debug('Committing config.') if not self._loaded: self.load(True) try: temp_conf_path = self._conf_path + CONF_TEMP_EXT with open(temp_conf_path, 'w') as config: if self.chmod_mode: os.chmod(temp_conf_path, self.chmod_mode) for name in self.all_options: if name not in self.__dict__: continue value = self.__dict__[name] if value is None: continue if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) config.write(self._encode_line(name, value)) os.rename(temp_conf_path, self._conf_path) except: try: os.remove(temp_conf_path) except OSError: pass raise self.set_state(SAVED)
def verify_otp_code(self, code): otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(time.time() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False used_codes = cache_db.dict_get_all(self.get_cache_key('otp')) for auth_time, used_code in used_codes.items(): if int(time.time()) - int(auth_time) > 120: cache_db.dict_remove(self.get_cache_key('otp'), auth_time) if used_code == code: return False cache_db.dict_set(self.get_cache_key('otp'), str(int(time.time())), code) return True
def _set_value(self, name, value, merge=False): if merge and name in self.__dict__: return self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, self._encode_value(name, value))
def __setattr__(self, name, value): if name == 'status': if value: cache_db.dict_set(self.get_cache_key(), name, 't') else: cache_db.dict_set(self.get_cache_key(), name, 'f') else: self.__dict__[name] = value
def _run_thread(self): logger.debug('Starting ovpn process. %r' % { 'server_id': self.id, }) self._interrupt = False self._state = True try: try: process = subprocess.Popen(['openvpn', self.ovpn_conf_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: self.push_output(traceback.format_exc()) logger.exception('Failed to start ovpn process. %r' % { 'server_id': self.id, }) self.publish('stopped') return cache_db.dict_set(self.get_cache_key(), 'start_time', str(int(time.time() - 1))) sub_thread = threading.Thread(target=self._sub_thread, args=(process,)) sub_thread.start() status_thread = threading.Thread(target=self._status_thread) status_thread.start() self.status = True self.publish('started') while True: line = process.stdout.readline() if not line: if process.poll() is not None: break else: continue self.push_output(line) self._interrupt = True status_thread.join() cache_db.remove(self.get_cache_key('clients')) cache_db.dict_remove(self.get_cache_key(), 'clients') self.status = False self.publish('stopped') self.update_clients({}, force=True) if self._state: Event(type=SERVERS_UPDATED) LogEntry(message='Server stopped unexpectedly "%s".' % ( self.name)) logger.debug('Ovpn process has ended. %r' % { 'server_id': self.id, }) except: self._interrupt = True self.publish('stopped') raise
def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update( cache_db.dict_get_all(self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update(cache_db.dict_get_all( self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def start(self, silent=False): cache_db.lock_acquire(self.get_cache_key('op_lock')) try: if self.status: return if not self.org_count: raise ServerMissingOrg('Server cannot be started without ' + \ 'any organizations', { 'server_id': self.id, }) logger.debug('Starting node server. %r' % { 'server_id': self.id, }) ovpn_conf = self._generate_ovpn_conf() try: response = self._request('post', json_data={ 'interface': self.interface, 'network': self.network, 'local_networks': self.local_networks, 'ovpn_conf': ovpn_conf, 'server_ver': NODE_SERVER_VER, }) except httplib.HTTPException: raise NodeConnectionError('Failed to connect to node server', { 'server_id': self.id, }) if response.status_code == 401: raise InvalidNodeAPIKey('Invalid node server api key', { 'server_id': self.id, 'status_code': response.status_code, 'reason': response.reason, }) elif response.status_code != 200: raise ServerStartError('Failed to start node server', { 'server_id': self.id, 'status_code': response.status_code, 'reason': response.reason, }) cache_db.dict_set(self.get_cache_key(), 'start_time', str(int(time.time() - 1))) self.clear_output() self._interrupt = False self.status = True self._start_server_threads() if not silent: Event(type=SERVERS_UPDATED) LogEntry(message='Started server "%s".' % self.name) finally: cache_db.lock_release(self.get_cache_key('op_lock'))
def sort_users_cache(self): user_count = 0 users_dict = {} users_sort = [] # Create temp uuid key to prevent multiple threads modifying same key temp_suffix = 'temp_' + uuid.uuid4().hex temp_users_sorted_key = 'users_sorted_' + temp_suffix users_page_index_key = 'users_page_index_' + temp_suffix try: for user_id in cache_db.set_elements(self.get_cache_key('users')): user = User.get_user(self, id=user_id) if not user: continue name_id = '%s_%s' % (user.name, user_id) if user.type == CERT_CLIENT: user_count += 1 users_dict[name_id] = (user_id, user.type) users_sort.append(name_id) cache_db.set(self.get_cache_key('user_count'), str(user_count)) cur_page = 0 user_count = 0 client_count = 0 for name_id in sorted(users_sort): if users_dict[name_id][1] == CERT_CLIENT: page = client_count / USER_PAGE_COUNT if page != cur_page: cur_page = page cache_db.dict_set(self.get_cache_key(users_page_index_key), str(cur_page), str(user_count)) client_count += 1 user_count += 1 cache_db.list_rpush(self.get_cache_key(temp_users_sorted_key), users_dict[name_id][0]) cache_db.lock_acquire(self.get_cache_key('sort')) try: cache_db.rename(self.get_cache_key(users_page_index_key), self.get_cache_key('users_page_index')) cache_db.rename(self.get_cache_key(temp_users_sorted_key), self.get_cache_key('users_sorted')) cache_db.set(self.get_cache_key('users_page_total'), str(cur_page)) finally: cache_db.lock_release(self.get_cache_key('sort')) except: cache_db.remove(self.get_cache_key(users_page_index_key)) cache_db.remove(self.get_cache_key(temp_users_sorted_key)) raise
def __setattr__(self, name, value): if name == 'status': cache_db.dict_set(self.get_cache_key(), name, 't' if value else 'f') return elif name == 'clients': cache_db.dict_set(self.get_cache_key(), name, json.dumps(value)) return elif name == 'dh_param_bits': if not self._loaded or self.dh_param_bits != value: self._rebuild_dh_params = True elif name == 'network': self._reset_ip_pool = self._loaded and self.network != value Config.__setattr__(self, name, value)
def set_cache(self): cache_db.dict_set(self.get_cache_key(), 'data', self.data or '') cache_db.dict_set(self.get_cache_key(), 'mime_type', self.mime_type or '') cache_db.dict_set(self.get_cache_key(), 'last_modified', self.last_modified) cache_db.dict_set(self.get_cache_key(), 'etag', self.etag or '')
def update_ip_pool(self): cache_key = self.get_cache_key('ip_pool') set_cache_key = self.get_cache_key('ip_pool_set') cache_db.lock_acquire(cache_key) try: ip_pool = ipaddress.IPv4Network(self.network).iterhosts() ip_pool.next() users = set() for org in self.iter_orgs(): for user in org.iter_users(): if user.type == CERT_CLIENT: users.add(org.id + '-' + user.id) for user_id in cache_db.dict_keys(cache_key) - users: ip_set = cache_db.dict_get(cache_key, user_id) local_ip_addr, remote_ip_addr = ip_set.split('-') cache_db.set_remove(set_cache_key, local_ip_addr) cache_db.set_remove(set_cache_key, remote_ip_addr) cache_db.dict_remove(cache_key, user_id) try: for user_id in users - cache_db.dict_keys(cache_key): while True: remote_ip_addr = str(ip_pool.next()) ip_addr_endpoint = remote_ip_addr.split('.')[-1] if ip_addr_endpoint not in VALID_IP_ENDPOINTS: continue local_ip_addr = str(ip_pool.next()) if not cache_db.set_exists(set_cache_key, local_ip_addr) and not cache_db.set_exists( set_cache_key, remote_ip_addr): cache_db.set_add(set_cache_key, local_ip_addr) cache_db.set_add(set_cache_key, remote_ip_addr) break cache_db.dict_set(cache_key, user_id, local_ip_addr + '-' + remote_ip_addr) except StopIteration: pass finally: self._commit_ip_pool() for org in self.iter_orgs(): Event(type=USERS_UPDATED, resource_id=org.id) finally: cache_db.lock_release(cache_key)
def __init__(self): self._orgs_users_thread = None self._servers_thread = None self.dh_pool_path = os.path.join(app_server.data_path, DH_POOL_DIR) if not os.path.exists(self.dh_pool_path): os.makedirs(self.dh_pool_path) for dh_param in os.listdir(self.dh_pool_path): dh_param_path = os.path.join(self.dh_pool_path, dh_param) dh_param_split = dh_param.split('_') if len(dh_param_split) != 2: logger.warning( 'Invalid dh param name in pool, skipping... %r' % { 'path': dh_param_path }) continue dh_param_bits, dh_param_id = dh_param_split try: dh_param_bits = int(dh_param_bits) except ValueError: logger.warning( 'Invalid dh param size in pool, skipping... %r' % { 'path': dh_param_path }) continue if dh_param_bits not in VALID_DH_PARAM_BITS: logger.warning( 'Unknown dh param size in pool, skipping... %r' % { 'path': dh_param_path }) continue if len(dh_param_id) != 32 or not dh_param_id.isalnum() or \ not dh_param_id.islower(): logger.warning( 'Invalid dh param id in pool, skipping... %r' % { 'path': dh_param_path }) continue cache_db.dict_set('dh_pool_state', dh_param_path, COMPLETE) cache_db.set_add('dh_pool_%s' % dh_param_bits, dh_param_path)
def _gen_dh_params(self, dh_param_bits): exit_code = None cache_key = 'dh_pool_%s' % dh_param_bits dh_param_path = os.path.join(self.dh_pool_path, '%s_%s' % (dh_param_bits, uuid.uuid4().hex)) try: cache_db.dict_set('dh_pool_state', dh_param_path, PENDING) cache_db.set_add(cache_key, dh_param_path) args = [ 'openssl', 'dhparam', '-out', dh_param_path, dh_param_bits, ] process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) listener_thread = threading.Thread( target=self._fill_servers_pool_listener, args=(cache_key, dh_param_path, process), ) listener_thread.daemon = True listener_thread.start() exit_code = process.wait() finally: if exit_code == 0: cache_db.dict_set('dh_pool_state', dh_param_path, COMPLETE) else: cache_db.set_remove(cache_key, dh_param_path) cache_db.dict_remove('dh_pool_state', dh_param_path) try: os.remove(dh_param_path) except: pass logger.error('Openssl dhparam returned ' + \ 'error exit code %r.' % exit_code) cache_db.publish('pooler', 'update')
def create_user_key_link(self, user_id): key_id = uuid.uuid4().hex key_id_key = 'key_token-%s' % key_id view_id = None uri_id = None for i in xrange(2): for i in xrange(2048): temp_id = ''.join(random.sample( SHORT_URL_CHARS, SHORT_URL_LEN)) if not view_id: if not cache_db.exists('view_token-%s' % temp_id): view_id = temp_id break else: if not cache_db.exists('uri_token-%s' % temp_id): uri_id = temp_id break if not view_id and not uri_id: raise KeyLinkError('Failed to generate random id') view_id_key = 'view_token-%s' % view_id uri_id_key = 'uri_token-%s' % uri_id cache_db.expire(key_id_key, KEY_LINK_TIMEOUT) cache_db.dict_set(key_id_key, 'org_id', self.id) cache_db.dict_set(key_id_key, 'user_id', user_id) cache_db.dict_set(key_id_key, 'view_id', view_id) cache_db.dict_set(key_id_key, 'uri_id', uri_id) conf_urls = [] if app_server.inline_certs: for server in self.iter_servers(): conf_id = uuid.uuid4().hex conf_id_key = 'conf_token-%s' % conf_id cache_db.expire(conf_id_key, KEY_LINK_TIMEOUT) cache_db.dict_set(conf_id_key, 'org_id', self.id) cache_db.dict_set(conf_id_key, 'user_id', user_id) cache_db.dict_set(conf_id_key, 'server_id', server.id) conf_urls.append({ 'id': conf_id, 'server_name': server.name, 'url': '/key/%s.ovpn' % conf_id, }) cache_db.expire(view_id_key, KEY_LINK_TIMEOUT) cache_db.dict_set(view_id_key, 'org_id', self.id) cache_db.dict_set(view_id_key, 'user_id', user_id) cache_db.dict_set(view_id_key, 'key_id', key_id) cache_db.dict_set(view_id_key, 'uri_id', uri_id) cache_db.dict_set(view_id_key, 'conf_urls', json.dumps(conf_urls)) cache_db.expire(uri_id_key, KEY_LINK_TIMEOUT) cache_db.dict_set(uri_id_key, 'org_id', self.id) cache_db.dict_set(uri_id_key, 'user_id', user_id) return { 'id': key_id, 'key_url': '/key/%s.tar' % key_id, 'view_url': '/k/%s' % view_id, 'uri_url': '/ku/%s' % uri_id, }
def __setattr__(self, name, value): if name == 'clients': cache_db.dict_set(self.get_cache_key(), name, json.dumps(value)) return MongoObject.__setattr__(self, name, value)
def _update_clients_bandwidth(self, clients): # Remove client no longer connected for client_id in cache_db.dict_keys(self.get_cache_key('clients')): if client_id not in clients: cache_db.dict_remove(self.get_cache_key('clients'), client_id) # Get total bytes send and recv for all clients bytes_recv_t = 0 bytes_sent_t = 0 for client_id in clients: bytes_recv = clients[client_id]['bytes_received'] bytes_sent = clients[client_id]['bytes_sent'] prev_bytes_recv = 0 prev_bytes_sent = 0 client_prev = cache_db.dict_get(self.get_cache_key('clients'), client_id) cache_db.dict_set(self.get_cache_key('clients'), client_id, '%s,%s' % (bytes_recv, bytes_sent)) if client_prev: client_prev = client_prev.split(',') prev_bytes_recv = int(client_prev[0]) prev_bytes_sent = int(client_prev[1]) if prev_bytes_recv > bytes_recv or prev_bytes_sent > bytes_sent: prev_bytes_recv = 0 prev_bytes_sent = 0 bytes_recv_t += bytes_recv - prev_bytes_recv bytes_sent_t += bytes_sent - prev_bytes_sent # Store bytes send recv into time periods if bytes_recv_t != 0 or bytes_sent_t != 0: date = datetime.datetime.utcnow() date -= datetime.timedelta(microseconds=date.microsecond, seconds=date.second) timestamp_1m = date.strftime('%s') timestamp_1m_min = int((date - datetime.timedelta( hours=6)).strftime('%s')) date_5m = date - datetime.timedelta(minutes=date.minute % 5) timestamp_5m = date_5m.strftime('%s') timestamp_5m_min = int((date_5m - datetime.timedelta( days=1)).strftime('%s')) date_30m = date - datetime.timedelta(minutes=date.minute % 30) timestamp_30m = date_30m.strftime('%s') timestamp_30m_min = int((date_30m - datetime.timedelta( days=7)).strftime('%s')) date_2h = date - datetime.timedelta( hours=date.hour % 2, minutes=date.minute) timestamp_2h = date_2h.strftime('%s') timestamp_2h_min = int((date_2h - datetime.timedelta( days=30)).strftime('%s')) date_1d = date - datetime.timedelta( hours=date.hour, minutes=date.minute) timestamp_1d = date_1d.strftime('%s') timestamp_1d_min = int((date_1d - datetime.timedelta( days=365)).strftime('%s')) for period, timestamp, timestamp_min in ( ('1m', timestamp_1m, timestamp_1m_min), ('5m', timestamp_5m, timestamp_5m_min), ('30m', timestamp_30m, timestamp_30m_min), ('2h', timestamp_2h, timestamp_2h_min), ('1d', timestamp_1d, timestamp_1d_min), ): bytes_recv = bytes_recv_t bytes_sent = bytes_sent_t prev_bandwidth = persist_db.dict_get( self.get_cache_key('bandwidth-%s' % period), timestamp) if prev_bandwidth: prev_bandwidth = prev_bandwidth.split(',') bytes_recv += int(prev_bandwidth[0]) bytes_sent += int(prev_bandwidth[1]) persist_db.dict_set(self.get_cache_key( 'bandwidth-%s' % period), timestamp, '%s,%s' % (bytes_recv, bytes_sent)) for timestamp_p in persist_db.dict_keys(self.get_cache_key( 'bandwidth-%s' % period)): if int(timestamp_p) <= timestamp_min: persist_db.dict_remove(self.get_cache_key( 'bandwidth-%s' % period), timestamp_p)