def commit(self): logger.debug('Committing config.') if not self._loaded: self.load(True) try: temp_conf_path = self._conf_path + CONF_TEMP_EXT with open(temp_conf_path, 'w') as config: if self.chmod_mode: os.chmod(temp_conf_path, self.chmod_mode) for name in self.all_options: if name not in self.__dict__: continue value = self.__dict__[name] if value is None or value == []: if self.cached: cache_db.dict_remove(self.get_cache_key(), name) else: if self.cached: cache_db.dict_set(self.get_cache_key(), name, self._encode_value(name, value)) config.write(self._encode_line(name, value)) os.rename(temp_conf_path, self._conf_path) except: try: os.remove(temp_conf_path) except OSError: pass raise self.set_state(SAVED)
def verify_otp_code(self, code): otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(time.time() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False used_codes = cache_db.dict_get_all(self.get_cache_key('otp')) for auth_time, used_code in used_codes.items(): if int(time.time()) - int(auth_time) > 120: cache_db.dict_remove(self.get_cache_key('otp'), auth_time) if used_code == code: return False cache_db.dict_set(self.get_cache_key('otp'), str(int(time.time())), code) return True
def _run_thread(self): logger.debug('Starting ovpn process. %r' % { 'server_id': self.id, }) self._interrupt = False self._state = True try: try: process = subprocess.Popen(['openvpn', self.ovpn_conf_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: self.push_output(traceback.format_exc()) logger.exception('Failed to start ovpn process. %r' % { 'server_id': self.id, }) self.publish('stopped') return cache_db.dict_set(self.get_cache_key(), 'start_time', str(int(time.time() - 1))) sub_thread = threading.Thread(target=self._sub_thread, args=(process,)) sub_thread.start() status_thread = threading.Thread(target=self._status_thread) status_thread.start() self.status = True self.publish('started') while True: line = process.stdout.readline() if not line: if process.poll() is not None: break else: continue self.push_output(line) self._interrupt = True status_thread.join() cache_db.remove(self.get_cache_key('clients')) cache_db.dict_remove(self.get_cache_key(), 'clients') self.status = False self.publish('stopped') self.update_clients({}, force=True) if self._state: Event(type=SERVERS_UPDATED) LogEntry(message='Server stopped unexpectedly "%s".' % ( self.name)) logger.debug('Ovpn process has ended. %r' % { 'server_id': self.id, }) except: self._interrupt = True self.publish('stopped') raise
def update_ip_pool(self): cache_key = self.get_cache_key('ip_pool') set_cache_key = self.get_cache_key('ip_pool_set') cache_db.lock_acquire(cache_key) try: ip_pool = ipaddress.IPv4Network(self.network).iterhosts() ip_pool.next() users = set() for org in self.iter_orgs(): for user in org.iter_users(): if user.type == CERT_CLIENT: users.add(org.id + '-' + user.id) for user_id in cache_db.dict_keys(cache_key) - users: ip_set = cache_db.dict_get(cache_key, user_id) local_ip_addr, remote_ip_addr = ip_set.split('-') cache_db.set_remove(set_cache_key, local_ip_addr) cache_db.set_remove(set_cache_key, remote_ip_addr) cache_db.dict_remove(cache_key, user_id) try: for user_id in users - cache_db.dict_keys(cache_key): while True: remote_ip_addr = str(ip_pool.next()) ip_addr_endpoint = remote_ip_addr.split('.')[-1] if ip_addr_endpoint not in VALID_IP_ENDPOINTS: continue local_ip_addr = str(ip_pool.next()) if not cache_db.set_exists(set_cache_key, local_ip_addr) and not cache_db.set_exists( set_cache_key, remote_ip_addr): cache_db.set_add(set_cache_key, local_ip_addr) cache_db.set_add(set_cache_key, remote_ip_addr) break cache_db.dict_set(cache_key, user_id, local_ip_addr + '-' + remote_ip_addr) except StopIteration: pass finally: self._commit_ip_pool() for org in self.iter_orgs(): Event(type=USERS_UPDATED, resource_id=org.id) finally: cache_db.lock_release(cache_key)
def _gen_dh_params(self, dh_param_bits): exit_code = None cache_key = 'dh_pool_%s' % dh_param_bits dh_param_path = os.path.join(self.dh_pool_path, '%s_%s' % (dh_param_bits, uuid.uuid4().hex)) try: cache_db.dict_set('dh_pool_state', dh_param_path, PENDING) cache_db.set_add(cache_key, dh_param_path) args = [ 'openssl', 'dhparam', '-out', dh_param_path, dh_param_bits, ] process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) listener_thread = threading.Thread( target=self._fill_servers_pool_listener, args=(cache_key, dh_param_path, process), ) listener_thread.daemon = True listener_thread.start() exit_code = process.wait() finally: if exit_code == 0: cache_db.dict_set('dh_pool_state', dh_param_path, COMPLETE) else: cache_db.set_remove(cache_key, dh_param_path) cache_db.dict_remove('dh_pool_state', dh_param_path) try: os.remove(dh_param_path) except: pass logger.error('Openssl dhparam returned ' + \ 'error exit code %r.' % exit_code) cache_db.publish('pooler', 'update')
def _update_clients_bandwidth(self, clients): # Remove client no longer connected for client_id in cache_db.dict_keys(self.get_cache_key('clients')): if client_id not in clients: cache_db.dict_remove(self.get_cache_key('clients'), client_id) # Get total bytes send and recv for all clients bytes_recv_t = 0 bytes_sent_t = 0 for client_id in clients: bytes_recv = clients[client_id]['bytes_received'] bytes_sent = clients[client_id]['bytes_sent'] prev_bytes_recv = 0 prev_bytes_sent = 0 client_prev = cache_db.dict_get(self.get_cache_key('clients'), client_id) cache_db.dict_set(self.get_cache_key('clients'), client_id, '%s,%s' % (bytes_recv, bytes_sent)) if client_prev: client_prev = client_prev.split(',') prev_bytes_recv = int(client_prev[0]) prev_bytes_sent = int(client_prev[1]) if prev_bytes_recv > bytes_recv or prev_bytes_sent > bytes_sent: prev_bytes_recv = 0 prev_bytes_sent = 0 bytes_recv_t += bytes_recv - prev_bytes_recv bytes_sent_t += bytes_sent - prev_bytes_sent # Store bytes send recv into time periods if bytes_recv_t != 0 or bytes_sent_t != 0: date = datetime.datetime.utcnow() date -= datetime.timedelta(microseconds=date.microsecond, seconds=date.second) timestamp_1m = date.strftime('%s') timestamp_1m_min = int((date - datetime.timedelta( hours=6)).strftime('%s')) date_5m = date - datetime.timedelta(minutes=date.minute % 5) timestamp_5m = date_5m.strftime('%s') timestamp_5m_min = int((date_5m - datetime.timedelta( days=1)).strftime('%s')) date_30m = date - datetime.timedelta(minutes=date.minute % 30) timestamp_30m = date_30m.strftime('%s') timestamp_30m_min = int((date_30m - datetime.timedelta( days=7)).strftime('%s')) date_2h = date - datetime.timedelta( hours=date.hour % 2, minutes=date.minute) timestamp_2h = date_2h.strftime('%s') timestamp_2h_min = int((date_2h - datetime.timedelta( days=30)).strftime('%s')) date_1d = date - datetime.timedelta( hours=date.hour, minutes=date.minute) timestamp_1d = date_1d.strftime('%s') timestamp_1d_min = int((date_1d - datetime.timedelta( days=365)).strftime('%s')) for period, timestamp, timestamp_min in ( ('1m', timestamp_1m, timestamp_1m_min), ('5m', timestamp_5m, timestamp_5m_min), ('30m', timestamp_30m, timestamp_30m_min), ('2h', timestamp_2h, timestamp_2h_min), ('1d', timestamp_1d, timestamp_1d_min), ): bytes_recv = bytes_recv_t bytes_sent = bytes_sent_t prev_bandwidth = persist_db.dict_get( self.get_cache_key('bandwidth-%s' % period), timestamp) if prev_bandwidth: prev_bandwidth = prev_bandwidth.split(',') bytes_recv += int(prev_bandwidth[0]) bytes_sent += int(prev_bandwidth[1]) persist_db.dict_set(self.get_cache_key( 'bandwidth-%s' % period), timestamp, '%s,%s' % (bytes_recv, bytes_sent)) for timestamp_p in persist_db.dict_keys(self.get_cache_key( 'bandwidth-%s' % period)): if int(timestamp_p) <= timestamp_min: persist_db.dict_remove(self.get_cache_key( 'bandwidth-%s' % period), timestamp_p)