def check_auth(username, password, remote_addr=None): if remote_addr: doc = Administrator.limiter_collection.find_and_modify({ '_id': remote_addr, }, { '$inc': {'count': 1}, '$setOnInsert': {'timestamp': utils.now()}, }, new=True, upsert=True) if utils.now() > doc['timestamp'] + datetime.timedelta(minutes=1): doc = { 'count': 1, 'timestamp': utils.now(), } Administrator.limiter_collection.update({ '_id': remote_addr, }, doc, upsert=True) if doc['count'] > settings.app.auth_limiter_count_max: raise flask.abort(403) administrator = find_user(username=username) if not administrator: return if not administrator.test_password(password): return return administrator
def init(): settings.local.host = Host() try: settings.local.host.load() except NotFound: pass settings.local.host.status = ONLINE settings.local.host.users_online = 0 settings.local.host.start_timestamp = utils.now() settings.local.host.ping_timestamp = utils.now() if settings.local.public_ip: settings.local.host.auto_public_address = settings.local.public_ip if settings.local.public_ip6: settings.local.host.auto_public_address6 = settings.local.public_ip6 try: settings.local.host.hostname = socket.gethostname() except: logger.exception('Failed to get hostname', 'host') settings.local.host.hostname = None if settings.conf.local_address_interface == 'auto': try: settings.local.host.auto_local_address = utils.get_local_address() except: logger.exception('Failed to get auto_local_address', 'host') settings.local.host.local_address = None try: settings.local.host.auto_local_address6 = \ utils.get_local_address6() except: logger.exception('Failed to get auto_local_address6', 'host') settings.local.host.local_address6 = None else: try: settings.local.host.auto_local_address = \ utils.get_interface_address( str(settings.conf.local_address_interface)) except: logger.exception('Failed to get auto_local_address', 'host', interface=settings.conf.local_address_interface) settings.local.host.auto_local_address = None try: settings.local.host.auto_local_address6 = \ utils.get_interface_address6( str(settings.conf.local_address_interface)) except: logger.exception('Failed to get auto_local_address6', 'host', interface=settings.conf.local_address_interface) settings.local.host.auto_local_address6 = None settings.local.host.auto_instance_id = utils.get_instance_id() settings.local.host.local_networks = utils.get_local_networks() settings.local.host.commit() event.Event(type=HOSTS_UPDATED)
def reserve_route_advertisement(self, vpc_region, vpc_id, network): ra_id = '%s_%s_%s' % (self.server.id, vpc_id, network) timestamp_spec = utils.now() - datetime.timedelta( seconds=settings.vpn.route_ping_ttl) try: self.routes_collection.update_one({ '_id': ra_id, 'timestamp': {'$lt': timestamp_spec}, }, {'$set': { 'instance_id': self.id, 'server_id': self.server.id, 'vpc_region': vpc_region, 'vpc_id': vpc_id, 'network': network, 'timestamp': utils.now(), }}, upsert=True) utils.add_vpc_route(vpc_region, vpc_id, network, settings.local.host.aws_id) self.route_advertisements.add(ra_id) except pymongo.errors.DuplicateKeyError: return except: logger.exception('Failed to add vpc route', 'server', server_id=self.server.id, instance_id=self.id, vpc_region=vpc_region, vpc_id=vpc_id, network=network, )
def _watch_thread(self): try: while True: self.cur_timestamp = utils.now() timestamp_ttl = self.cur_timestamp - datetime.timedelta( seconds=180) for client_id, (timestamp, _, _) in self.client_bytes.items(): if timestamp < timestamp_ttl: self.client_bytes.pop(client_id, None) self.bytes_lock.acquire() bytes_recv = self.bytes_recv bytes_sent = self.bytes_sent self.bytes_recv = 0 self.bytes_sent = 0 self.bytes_lock.release() if bytes_recv != 0 or bytes_sent != 0: self.server.bandwidth.add_data( utils.now(), bytes_recv, bytes_sent) yield interrupter_sleep(self.bandwidth_rate) if self.instance.sock_interrupt: return except GeneratorExit: raise except: self.push_output('ERROR Management thread error') logger.exception('Error in management watch thread', 'server', server_id=self.server.id, instance_id=self.instance.id, ) self.instance.stop_process()
def claim_commit(self, fields=None): doc = self.get_commit_doc(fields=fields) doc['state'] = PENDING doc['attempts'] = self.attempts doc['runner_id'] = self.runner_id doc['ttl_timestamp'] = utils.now() + \ datetime.timedelta(seconds=self.ttl) doc['timestamp'] = utils.now() try: response = self.collection.update({ '_id': self.id, '$and': [ {'$or': [ {'state': {'$ne': COMPLETE}}, {'state': {'$exists': False}}, ]}, {'$or': [ {'runner_id': self.runner_id}, {'runner_id': {'$exists': False}}, ]}, ], }, { '$set': doc, }, upsert=True) claimed = bool(response.get('updatedExisting') or response.get( 'upserted')) except pymongo.errors.DuplicateKeyError: claimed = False self.claimed = claimed return claimed
def _keep_alive_thread(): last_update = None proc_stat = None settings.local.host_ping_timestamp = utils.now() while True: try: timestamp = utils.now() timestamp -= datetime.timedelta( microseconds=timestamp.microsecond, seconds=timestamp.second, ) if timestamp != last_update: last_update = timestamp last_proc_stat = proc_stat proc_stat = host.usage_utils.get_proc_stat() if last_proc_stat and proc_stat: cpu_usage = host.usage_utils.calc_cpu_usage( last_proc_stat, proc_stat) mem_usage = host.usage_utils.get_mem_usage() settings.local.host.usage.add_period(timestamp, cpu_usage, mem_usage) yield interrupter_sleep(settings.app.host_ttl - 10) ping_timestamp = utils.now() settings.local.host.collection.update({ '_id': settings.local.host.id, }, {'$set': { 'status': ONLINE, 'ping_timestamp': utils.now(), 'auto_public_address': settings.local.public_ip, 'auto_public_address6': settings.local.public_ip6, }}) if settings.local.host.auto_public_address != \ settings.local.public_ip or \ settings.local.host.auto_public_address6 != \ settings.local.public_ip6: settings.local.host.auto_public_address = \ settings.local.public_ip settings.local.host.auto_public_address6 = \ settings.local.public_ip6 event.Event(type=HOSTS_UPDATED) settings.local.host_ping_timestamp = ping_timestamp except GeneratorExit: host.deinit() raise except: logger.exception('Error in host keep alive update', 'runners', host_id=settings.local.host.id, host_name=settings.local.host.name, ) time.sleep(0.5)
def _keep_alive_thread(self): try: error_count = 0 while not self.interrupt: try: doc = self.collection.find_and_modify({ '_id': self.server.id, 'availability_group': \ settings.local.host.availability_group, 'instances.instance_id': self.id, }, {'$set': { 'instances.$.ping_timestamp': utils.now(), }}, fields={ '_id': False, 'instances': True, }, new=True) yield if not doc: logger.error( 'Instance doc lost, stopping server', 'server', server_id=self.server.id, instance_id=self.id, cur_timestamp=utils.now(), ) if self.stop_process(): break else: time.sleep(0.1) continue else: error_count = 0 yield except: error_count += 1 if error_count >= 2 and self.stop_process(): logger.exception( 'Failed to update server ping, stopping server', 'server', server_id=self.server.id, ) break logger.exception('Failed to update server ping', 'server', server_id=self.server.id, ) time.sleep(2) yield interrupter_sleep(settings.vpn.server_ping) except GeneratorExit: self.stop_process()
def verify_otp_code(self, code, remote_ip=None): if remote_ip and settings.vpn.cache_otp_codes: doc = self.otp_cache_collection.find_one({"_id": self.id}) if doc: _, hash_salt, cur_otp_hash = doc["otp_hash"].split("$") hash_salt = base64.b64decode(hash_salt) else: hash_salt = os.urandom(8) cur_otp_hash = None otp_hash = hashlib.sha512() otp_hash.update(code + remote_ip) otp_hash.update(hash_salt) otp_hash = base64.b64encode(otp_hash.digest()) if otp_hash == cur_otp_hash: self.otp_cache_collection.update({"_id": self.id}, {"$set": {"timestamp": utils.now()}}) return True otp_hash = "$".join(("1", base64.b64encode(hash_salt), otp_hash)) otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, "=") otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(utils.time_now() / 30) for epoch_offset in range(-1, 2): value = struct.pack(">q", epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset : offset + 4] truncated_hash = struct.unpack(">L", truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append("%06d" % truncated_hash) if code not in valid_codes: return False response = self.otp_collection.update( {"_id": {"user_id": self.id, "code": code}}, {"$set": {"timestamp": utils.now()}}, upsert=True ) if response["updatedExisting"]: return False if remote_ip and settings.vpn.cache_otp_codes: self.otp_cache_collection.update( {"_id": self.id}, {"$set": {"otp_hash": otp_hash, "timestamp": utils.now()}}, upsert=True ) return True
def _watch_thread(self): try: while True: self.cur_timestamp = utils.now() timestamp_ttl = self.cur_timestamp - datetime.timedelta( seconds=180) for client_id, (timestamp, _, _) in self.client_bytes.items(): if timestamp < timestamp_ttl: self.client_bytes.pop(client_id, None) self.bytes_lock.acquire() bytes_recv = self.bytes_recv bytes_sent = self.bytes_sent self.bytes_recv = 0 self.bytes_sent = 0 self.bytes_lock.release() monitoring.insert_point('server_bandwidth', { 'host': settings.local.host.name, 'server': self.server.name, }, { 'bytes_sent': bytes_sent, 'bytes_recv': bytes_recv, }) monitoring.insert_point('server', { 'host': settings.local.host.name, 'server': self.server.name, }, { 'device_count': self.clients.clients.count({}), }) if bytes_recv != 0 or bytes_sent != 0: self.server.bandwidth.add_data( utils.now(), bytes_recv, bytes_sent) yield interrupter_sleep(self.bandwidth_rate) if self.instance.sock_interrupt: return except GeneratorExit: raise except: try: self.push_output('ERROR Management rate thread error') except: pass logger.exception('Error in management rate thread', 'server', server_id=self.server.id, instance_id=self.instance.id, ) self.instance.stop_process()
def _check_thread(): collection = mongo.get_collection('transaction') while True: try: spec = { 'ttl_timestamp': {'$lt': utils.now()}, } for doc in collection.find(spec).sort('priority'): logger.info('Transaction timeout retrying...', 'runners', doc=doc, ) try: tran = transaction.Transaction(doc=doc) tran.run() except: logger.exception('Failed to run transaction', 'runners', transaction_id=doc['_id'], ) yield interrupter_sleep(settings.mongo.tran_ttl) except GeneratorExit: raise except: logger.exception('Error in transaction runner thread', 'runners') time.sleep(0.5)
def run(self, send_events=False): replica_count = self.server.replica_count if replica_count < 1: replica_count = 10000 response = self.collection.update({ '_id': self.server.id, 'status': ONLINE, 'instances_count': {'$lt': replica_count}, }, { '$push': { 'instances': { 'instance_id': self.id, 'host_id': settings.local.host_id, 'ping_timestamp': utils.now(), 'clients': [], 'clients_active': 0, }, }, '$inc': { 'instances_count': 1, }, }) if not response['updatedExisting']: return threading.Thread(target=self._run_thread, args=(send_events,)).start()
def task(self): self.server_collection.update_many({ 'network_lock_ttl': {'$lt': utils.now()}, }, {'$unset': { 'network_lock': '', 'network_lock_ttl': '', }})
def _logger_runner_thread(): log_queue = logger.log_queue collection = mongo.get_collection('logs') while True: try: msg_docs = [] while True: try: msg = log_queue.popleft() msg_docs.append({ 'timestamp': utils.now(), 'message': msg, }) except IndexError: break if msg_docs: yield collection.insert(msg_docs) event.Event(type=SYSTEM_LOG_UPDATED) yield interrupter_sleep(settings.app.log_db_delay) except GeneratorExit: raise except: logger.exception('Error in log runner thread', 'runners') time.sleep(0.5)
def _connected(self, client_id): client = self.clients.find_id(client_id) if not client: self.instance_com.push_output( 'ERROR Unknown client connected client_id=%s' % client_id) self.instance_com.client_kill(client_id) return self.set_iptables_rules( client['iptables_rules'], client['ip6tables_rules'], ) timestamp = utils.now() doc = { 'user_id': client['user_id'], 'server_id': self.server.id, 'host_id': settings.local.host_id, 'timestamp': timestamp, 'platform': client['platform'], 'type': client['user_type'], 'device_name': client['device_name'], 'mac_addr': client['mac_addr'], 'network': self.server.network, 'real_address': client['real_address'], 'virt_address': client['virt_address'], 'virt_address6': client['virt_address6'], 'dns_servers': client['dns_servers'], 'dns_suffix': client['dns_suffix'], 'connected_since': int(timestamp.strftime('%s')), } if settings.local.sub_active and \ settings.local.sub_plan == 'enterprise': domain_hash = hashlib.md5() domain_hash.update((client['user_name'].split('@')[0] + '.' + client['org_name']).lower()) domain_hash = bson.binary.Binary(domain_hash.digest(), subtype=bson.binary.MD5_SUBTYPE) doc['domain'] = domain_hash try: doc_id = self.collection.insert(doc) except: logger.exception('Error adding client', 'server', server_id=self.server.id, ) self.instance_com.client_kill(client_id) return self.clients.update_id(client_id, { 'doc_id': doc_id, 'timestamp': time.time(), }) self.clients_queue.append(client_id) self.instance_com.push_output( 'User connected user_id=%s' % client['user_id']) self.send_event()
def key_sync_get(org_id, user_id, server_id, key_hash): utils.rand_sleep() if not settings.local.sub_active: return utils.response('', status_code=480) auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return flask.abort(401) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return flask.abort(401) except ValueError: return flask.abort(401) org = organization.get_by_id(org_id) if not org: return flask.abort(404) user = org.get_user(id=user_id) if not user: return flask.abort(404) elif not user.sync_secret: return flask.abort(404) auth_string = '&'.join([ auth_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path] + ([flask.request.data] if flask.request.data else [])) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return flask.abort(401) auth_test_signature = base64.b64encode(hmac.new( user.sync_secret.encode(), auth_string, hashlib.sha256).digest()) if auth_signature != auth_test_signature: return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }, w=0) except pymongo.errors.DuplicateKeyError: return flask.abort(401) key_conf = user.sync_conf(server_id, key_hash) if key_conf: return utils.response(key_conf['conf']) return utils.response('')
def claim_commit(self, fields=None): doc = self.get_commit_doc(fields=fields) doc['runner_id'] = self.runner_id doc['ttl_timestamp'] = utils.now() + \ datetime.timedelta(seconds=self.ttl) response = self.collection.update({ '_id': self.id, '$or': [ {'runner_id': self.runner_id}, {'runner_id': {'$exists': False}}, ], }, { '$set': doc, }) self.claimed = response['updatedExisting'] if self.claimed: self.keep_alive() logger.debug('Queue claimed', 'queue', queue_id=self.id, queue_type=self.type, ) return response['updatedExisting']
def _keep_alive_thread(self): while not self.interrupt: try: doc = self.collection.find_and_modify({ '_id': self.server.id, 'instances.instance_id': self.id, }, {'$set': { 'instances.$.ping_timestamp': utils.now(), }}, fields={ '_id': False, 'instances': True, }, new=True) yield if not doc: if self.stop_process(): break else: time.sleep(0.1) continue except: logger.exception('Failed to update server ping', 'server', server_id=self.server.id, ) yield interrupter_sleep(settings.vpn.server_ping)
def audit_event(self, event_type, event_msg, remote_addr=None): if settings.app.auditing != ALL: return timestamp = utils.now() self.audit_collection.insert({ 'user_id': self.id, 'org_id': self.org_id, 'timestamp': timestamp, 'type': event_type, 'remote_addr': remote_addr, 'message': event_msg, }) plugins.event( 'audit_event', host_id=settings.local.host_id, host_name=settings.local.host.name, user_id=self.id, org_id=self.org_id, timestamp=timestamp, type=event_type, remote_addr=remote_addr, message=event_msg, )
def _logger_runner_thread(): log_queue = logger.log_queue collection = mongo.get_collection("logs") settings.local.logger_runner = True while True: try: msg_docs = [] while True: try: msg = log_queue.popleft() msg_docs.append({"timestamp": utils.now(), "message": msg}) except IndexError: break if msg_docs: yield collection.insert(msg_docs) yield interrupter_sleep(settings.app.log_db_delay) except GeneratorExit: raise except: logger.exception("Error in log runner thread", "runners") time.sleep(0.5)
def verify_otp_code(self, code): otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(utils.time_now() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False try: self.otp_collection.insert({ '_id': { 'user_id': self.id, 'code': code, }, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: return False return True
def rollback_actions(self): logger.warning('Transaction failed rolling back...', 'transaction', actions=self.action_sets, ) response = self.transaction_collection.update({ '_id': self.id, 'state': ROLLBACK, }, { '$set': { 'ttl_timestamp': utils.now() + \ datetime.timedelta(seconds=self.ttl), }, }) if not response['updatedExisting']: return try: self._rollback_actions() except: logger.exception('Error occurred rolling back ' + 'transaction actions', 'transaction', transaction_id=self.id, ) raise self.transaction_collection.remove(self.id)
def run_thread(): last_run = None try: for task_cls in task.tasks_on_start: run_task(task_cls()) except: logger.exception('Error running on start tasks', 'runners') while True: try: cur_time = utils.now() if int(time.mktime(cur_time.timetuple())) != last_run: last_run = int(time.mktime(cur_time.timetuple())) for hour in ('all', cur_time.hour): for minute in ('all', cur_time.minute): for second in ('all', cur_time.second): for task_cls in task.tasks[hour][minute][second]: run_task(task_cls()) except: logger.exception('Error in tasks run thread', 'runners') time.sleep(0.5) yield
def _host_check_thread(): collection = mongo.get_collection('hosts') while True: try: ttl_timestamp = {'$lt': utils.now() - datetime.timedelta(seconds=settings.app.host_ttl)} cursor = collection.find({ 'ping_timestamp': ttl_timestamp, }, { '_id': True, }) for doc in cursor: response = collection.update({ '_id': doc['_id'], 'ping_timestamp': ttl_timestamp, }, {'$set': { 'status': OFFLINE, 'ping_timestamp': None, }}) if response['updatedExisting']: event.Event(type=HOSTS_UPDATED) except GeneratorExit: raise except: logger.exception('Error checking host status', 'runners') yield interrupter_sleep(settings.app.host_ttl)
def create_user_key_link(self, user_id, one_time=False): success = False for _ in xrange(256): key_id = uuid.uuid4().hex short_id = ''.join(random.sample( SHORT_URL_CHARS, settings.app.short_url_length)) try: self.key_link_collection.update({ 'org_id': self.id, 'user_id': user_id, }, {'$set': { 'key_id': key_id, 'short_id': short_id, 'one_time': one_time, 'timestamp': utils.now(), }}, upsert=True) except pymongo.errors.DuplicateKeyError: continue success = True break if not success: raise KeyLinkError('Failed to generate random key short id') return { 'id': key_id, 'key_url': '/key/%s.tar' % key_id, 'key_zip_url': '/key/%s.zip' % key_id, 'key_onc_url': '/key_onc/%s.zip' % key_id, 'view_url': '/k/%s' % short_id, 'uri_url': '/ku/%s' % short_id, }
def run(self, send_events=False): availability_group = settings.local.host.availability_group response = self.collection.update({ '_id': self.server.id, 'status': ONLINE, 'instances_count': {'$lt': self.server.replica_count}, '$or': [ {'availability_group': None}, {'availability_group': {'$exists': False}}, {'availability_group': availability_group}, ], }, { '$set': { 'availability_group': availability_group, }, '$push': { 'instances': { 'instance_id': self.id, 'host_id': settings.local.host_id, 'ping_timestamp': utils.now(), }, }, '$inc': { 'instances_count': 1, }, }) if not response['updatedExisting']: return threading.Thread(target=self._run_thread, args=(send_events,)).start()
def update_clients_bandwidth(self, clients, rem_clients): # Remove client no longer connected for client_id in rem_clients: self.clients.pop(client_id, None) # Get total bytes send and recv for all clients bytes_recv_t = 0 bytes_sent_t = 0 for client in clients: client_id = client['id'] bytes_recv = client['bytes_received'] bytes_sent = client['bytes_sent'] prev_bytes_recv, prev_bytes_sent = self.clients.get( client_id, (0, 0)) self.clients[client_id] = (bytes_recv, bytes_sent) if prev_bytes_recv > bytes_recv or prev_bytes_sent > bytes_sent: prev_bytes_recv = 0 prev_bytes_sent = 0 bytes_recv_t += bytes_recv - prev_bytes_recv bytes_sent_t += bytes_sent - prev_bytes_sent if bytes_recv_t != 0 or bytes_sent_t != 0: self.server.bandwidth.add_data( utils.now(), bytes_recv_t, bytes_sent_t)
def check_thread(): while True: try: cur_timestamp = utils.now() spec = { 'ttl_timestamp': {'$lt': cur_timestamp}, 'state': {'$ne': COMPLETE}, } for task_item in task.iter_tasks(spec): random_sleep() response = task.Task.collection.update({ '_id': task_item.id, 'state': {'$ne': COMPLETE}, 'ttl_timestamp': {'$lt': cur_timestamp}, }, {'$unset': { 'runner_id': '', }}) if response['updatedExisting']: run_task(task_item) except: logger.exception('Error in task check thread', 'runners') yield interrupter_sleep(settings.mongo.task_ttl)
def _keep_alive_thread(self): while True: time.sleep(self.ttl - 6) if self.queue_com.state in (COMPLETE, STOPPED): break response = self.collection.update({ '_id': self.id, 'runner_id': self.runner_id, }, {'$set': { 'ttl_timestamp': utils.now() + \ datetime.timedelta(seconds=self.ttl), }}) if response['updatedExisting']: messenger.publish('queue', [UPDATE, self.id]) else: self.queue_com.state_lock.acquire() try: self.queue_com.state = STOPPED finally: self.queue_com.state_lock.release() logger.error('Lost reserve, queue stopped', 'queue', queue_id=self.id, queue_type=self.type, )
def sso_request_get(): state = utils.rand_str(64) secret = utils.rand_str(64) callback = flask.request.url_root + 'sso/callback' if not settings.local.sub_active: return flask.abort(405) resp = utils.request.post(AUTH_SERVER + '/request/google', json_data={ 'license': settings.app.license, 'callback': callback, 'state': state, 'secret': secret, }, headers={ 'Content-Type': 'application/json', }) if resp.status_code != 200: if resp.status_code == 401: return flask.abort(405) return flask.abort(500) tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': state, 'secret': secret, 'timestamp': utils.now(), }) data = resp.json() return flask.redirect(data['url'])
def task(self): try: timestamp_spec = utils.now() - datetime.timedelta( seconds=settings.vpn.route_ping_ttl) docs = self.routes_collection.find({ 'timestamp': {'$lt': timestamp_spec}, }) yield for doc in docs: server_id = doc['server_id'] vpc_region = doc['vpc_region'] vpc_id = doc['vpc_id'] network = doc['network'] messenger.publish('instance', ['route_advertisement', server_id, vpc_region, vpc_id, network]) except GeneratorExit: raise except: logger.exception('Error checking route states', 'tasks') yield interrupter_sleep(settings.vpn.server_ping)
def run_post_actions(self): response = self.transaction_collection.update({ '_id': self.id, 'state': COMMITTED, }, { '$set': { 'ttl_timestamp': utils.now() + \ datetime.timedelta(seconds=self.ttl), }, }) if not response['updatedExisting']: return try: self._run_post_actions() except: logger.exception('Error occurred running ' + 'transaction post actions', 'transaction', transaction_id=self.id, ) raise self.transaction_collection.remove(self.id)
def run_timeout_queues(): cur_timestamp = utils.now() spec = { 'ttl_timestamp': { '$lt': cur_timestamp }, } for queue_item in queue.iter_queues(spec): response = queue.Queue.collection.update( { '_id': queue_item.id, 'ttl_timestamp': { '$lt': cur_timestamp }, }, {'$unset': { 'runner_id': '', }}) if response['updatedExisting']: runner_queues[queue_item.cpu_type].put(( abs(queue_item.priority - 4), queue_item, ))
def task(self): if settings.app.demo_mode: return try: timestamp = utils.now() timestamp_spec = timestamp - datetime.timedelta( seconds=settings.vpn.server_ping_ttl) docs = self.server_collection.find( { 'instances.ping_timestamp': { '$lt': timestamp_spec }, }, { '_id': True, 'instances': True, }) yield for doc in docs: for instance in doc['instances']: if instance['ping_timestamp'] < timestamp_spec: logger.warning( 'Removing instance doc', 'server', server_id=doc['_id'], instance_id=instance['instance_id'], cur_timestamp=timestamp, ttl_timestamp=timestamp_spec, ping_timestamp=instance['ping_timestamp'], ) self.server_collection.update( { '_id': doc['_id'], 'instances.instance_id': instance['instance_id'], }, { '$pull': { 'instances': { 'instance_id': instance['instance_id'], }, }, '$inc': { 'instances_count': -1, }, }) yield docs = self.host_collection.find({ 'status': ONLINE, }, { '_id': True, 'availability_group': True, }) yield hosts_group = {} for doc in docs: hosts_group[doc['_id']] = doc.get('availability_group', DEFAULT) yield response = self.server_collection.aggregate([ { '$match': { 'status': ONLINE, 'start_timestamp': { '$lt': timestamp_spec }, } }, { '$project': { '_id': True, 'hosts': True, 'instances': True, 'replica_count': True, 'availability_group': True, 'offline_instances_count': { '$subtract': [ '$replica_count', '$instances_count', ], } } }, { '$match': { 'offline_instances_count': { '$gt': 0 }, } }, ]) yield recover_count = 0 for doc in response: cur_avail_group = doc.get('availability_group', DEFAULT) hosts_set = set(doc['hosts']) group_best = None group_len_max = 0 server_groups = collections.defaultdict(set) for hst in hosts_set: avail_zone = hosts_group.get(hst) if not avail_zone: continue server_groups[avail_zone].add(hst) group_len = len(server_groups[avail_zone]) if group_len > group_len_max: group_len_max = group_len group_best = avail_zone elif group_len == group_len_max and \ avail_zone == cur_avail_group: group_best = avail_zone if group_best and cur_avail_group != group_best: logger.info( 'Rebalancing server availability group', 'server', server_id=doc['_id'], current_availability_group=cur_avail_group, new_availability_group=group_best, ) self.server_collection.update( { '_id': doc['_id'], 'status': ONLINE, }, { '$set': { 'instances': [], 'instances_count': 0, 'availability_group': group_best, } }) messenger.publish('servers', 'rebalance', extra={ 'server_id': doc['_id'], 'availability_group': group_best, }) prefered_hosts = server_groups[group_best] else: prefered_hosts = server_groups[cur_avail_group] active_hosts = set([x['host_id'] for x in doc['instances']]) prefered_hosts = list(prefered_hosts - active_hosts) if not prefered_hosts: continue if recover_count >= 3: continue recover_count += 1 logger.info( 'Recovering server state', 'server', server_id=doc['_id'], prefered_hosts=prefered_hosts, ) messenger.publish('servers', 'start', extra={ 'server_id': doc['_id'], 'send_events': True, 'prefered_hosts': host.get_prefered_hosts( prefered_hosts, doc['replica_count']) }) except GeneratorExit: raise except: logger.exception('Error checking server states', 'tasks')
def start(self, timeout=None): timeout = timeout or settings.vpn.op_timeout cursor_id = self.get_cursor_id() if self.status != OFFLINE: return if not self.dh_params: self.generate_dh_param() return if not self.organizations: raise ServerMissingOrg('Server cannot be started ' + \ 'without any organizations', { 'server_id': self.id, }) self.pre_start_check() start_timestamp = utils.now() response = self.collection.update( { '_id': self.id, 'status': OFFLINE, 'instances_count': 0, }, { '$set': { 'status': ONLINE, 'pool_cursor': None, 'start_timestamp': start_timestamp, 'availability_group': self.get_best_availability_group(), } }) if not response['updatedExisting']: raise ServerInstanceSet('Server instances already running. %r', { 'server_id': self.id, }) self.clients_pool_collection.remove({ 'server_id': self.id, }) self.status = ONLINE self.start_timestamp = start_timestamp replica_count = min(self.replica_count, len(self.hosts)) started_count = 0 error_count = 0 try: self.publish('start', extra={ 'prefered_hosts': host.get_prefered_hosts(self.hosts, replica_count), }) for x_timeout in (4, timeout): for msg in self.subscribe(cursor_id=cursor_id, timeout=x_timeout): message = msg['message'] if message == 'started': started_count += 1 if started_count + error_count >= replica_count: break elif message == 'error': error_count += 1 if started_count + error_count >= replica_count: break if started_count: break if not started_count: if error_count: raise ServerStartError('Server failed to start', { 'server_id': self.id, }) else: raise ServerStartError('Server start timed out', { 'server_id': self.id, }) except: self.publish('force_stop') self.collection.update({ '_id': self.id, }, { '$set': { 'status': OFFLINE, 'instances': [], 'instances_count': 0, } }) self.status = OFFLINE self.instances = [] self.instances_count = 0 raise
def verify_otp_code(self, code, remote_ip=None): if remote_ip and settings.vpn.cache_otp_codes: doc = self.otp_cache_collection.find_one({ '_id': self.id, }) if doc: _, hash_salt, cur_otp_hash = doc['otp_hash'].split('$') hash_salt = base64.b64decode(hash_salt) else: hash_salt = os.urandom(8) cur_otp_hash = None otp_hash = hashlib.sha512() otp_hash.update(code + remote_ip) otp_hash.update(hash_salt) otp_hash = base64.b64encode(otp_hash.digest()) if otp_hash == cur_otp_hash: self.otp_cache_collection.update({ '_id': self.id, }, {'$set': { 'timestamp': utils.now(), }}) return True otp_hash = '$'.join(( '1', base64.b64encode(hash_salt), otp_hash, )) otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(utils.time_now() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False response = self.otp_collection.update( { '_id': { 'user_id': self.id, 'code': code, }, }, {'$set': { 'timestamp': utils.now(), }}, upsert=True) if response['updatedExisting']: return False if remote_ip and settings.vpn.cache_otp_codes: self.otp_cache_collection.update({ '_id': self.id, }, {'$set': { 'otp_hash': otp_hash, 'timestamp': utils.now(), }}, upsert=True) return True
def key_wg_post(org_id, user_id, server_id): org_id = org_id user_id = user_id server_id = server_id remote_addr = utils.get_remote_addr() auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: journal.entry( journal.USER_WG_FAILURE, remote_address=remote_addr, event_long='Missing auth header', ) return flask.abort(406) auth_token = auth_token[:256] auth_timestamp = auth_timestamp[:64] auth_nonce = auth_nonce[:32] auth_signature = auth_signature[:512] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: journal.entry( journal.USER_WG_FAILURE, remote_address=remote_addr, event_long='Expired auth timestamp', ) return flask.abort(408) except ValueError: journal.entry( journal.USER_WG_FAILURE, remote_address=remote_addr, event_long='Invalid auth timestamp', ) return flask.abort(405) org = organization.get_by_id(org_id) if not org: journal.entry( journal.USER_WG_FAILURE, remote_address=remote_addr, event_long='Organization not found', ) return flask.abort(404) usr = org.get_user(id=user_id) if not usr: journal.entry( journal.USER_WG_FAILURE, remote_address=remote_addr, event_long='User not found', ) return flask.abort(404) elif not usr.sync_secret: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='User missing sync secret', ) return flask.abort(410) if auth_token != usr.sync_token: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Sync token mismatch', ) return flask.abort(411) if usr.disabled: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='User disabled', ) return flask.abort(403) cipher_data64 = flask.request.json.get('data') box_nonce64 = flask.request.json.get('nonce') public_key64 = flask.request.json.get('public_key') signature64 = flask.request.json.get('signature') auth_string = '&'.join([ usr.sync_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path, cipher_data64, box_nonce64, public_key64, signature64 ]) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Auth string len limit exceeded', ) return flask.abort(414) auth_test_signature = base64.b64encode( hmac.new(usr.sync_secret.encode(), auth_string, hashlib.sha512).digest()) if not utils.const_compare(auth_signature, auth_test_signature): journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Auth signature mismatch', ) return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Duplicate nonce', ) return flask.abort(409) data_hash = hashlib.sha512('&'.join( [cipher_data64, box_nonce64, public_key64])).digest() try: usr.verify_sig( data_hash, base64.b64decode(signature64), ) except InvalidSignature: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Invalid rsa signature', ) return flask.abort(412) svr = usr.get_server(server_id) sender_pub_key = nacl.public.PublicKey(base64.b64decode(public_key64)) box_nonce = base64.b64decode(box_nonce64) priv_key = nacl.public.PrivateKey( base64.b64decode(svr.auth_box_private_key)) cipher_data = base64.b64decode(cipher_data64) nacl_box = nacl.public.Box(priv_key, sender_pub_key) plaintext = nacl_box.decrypt(cipher_data, box_nonce).decode('utf-8') try: nonces_collection.insert({ 'token': auth_token, 'nonce': box_nonce64, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Duplicate secondary nonce', ) return flask.abort(415) key_data = json.loads(plaintext) client_platform = utils.filter_str_uni(key_data['platform']) client_device_id = utils.filter_str_uni(key_data['device_id']) client_device_name = utils.filter_str_uni(key_data['device_name']) client_mac_addr = utils.filter_str_uni(key_data['mac_addr']) client_mac_addrs = key_data['mac_addrs'] if client_mac_addrs: client_mac_addrs = [utils.filter_str_uni(x) for x in client_mac_addrs] else: client_mac_addrs = None client_auth_token = key_data['token'].decode('utf-8') client_auth_nonce = utils.filter_str_uni(key_data['nonce']) client_auth_password = key_data['password'].decode('utf-8') client_auth_timestamp = int(key_data['timestamp']) client_wg_public_key = key_data['wg_public_key'].decode('utf-8') if len(client_wg_public_key) < 32: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Public key too short', ) return flask.abort(416) try: client_wg_public_key = base64.b64decode(client_wg_public_key) if len(client_wg_public_key) != 32: raise ValueError('Invalid length') client_wg_public_key = base64.b64encode(client_wg_public_key) except: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Public key invalid', ) return flask.abort(417) instance = server.get_instance(server_id) if not instance or instance.state != 'running': return flask.abort(429) if not instance.server.wg: return flask.abort(429) wg_keys_collection = mongo.get_collection('wg_keys') try: wg_keys_collection.insert({ '_id': client_wg_public_key, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: journal.entry( journal.USER_WG_FAILURE, usr.journal_data, remote_address=remote_addr, wg_public_key=client_wg_public_key, event_long='Duplicate wg public key', ) return flask.abort(413) clients = instance.instance_com.clients event = threading.Event() send_data = { 'allow': None, 'configuration': None, 'reason': None, } def callback(allow, data): send_data['allow'] = allow if allow: send_data['configuration'] = data else: send_data['reason'] = data event.set() clients.connect_wg( user=usr, org=org, wg_public_key=client_wg_public_key, auth_password=client_auth_password, auth_token=client_auth_token, auth_nonce=client_auth_nonce, auth_timestamp=client_auth_timestamp, platform=client_platform, device_id=client_device_id, device_name=client_device_name, mac_addr=client_mac_addr, mac_addrs=client_mac_addrs, remote_ip=remote_addr, connect_callback=callback, ) event.wait() send_nonce = nacl.utils.random(nacl.public.Box.NONCE_SIZE) nacl_box = nacl.public.Box(priv_key, sender_pub_key) send_cipher_data = nacl_box.encrypt(json.dumps(send_data), send_nonce) send_cipher_data = send_cipher_data[nacl.public.Box.NONCE_SIZE:] send_nonce64 = base64.b64encode(send_nonce) send_cipher_data64 = base64.b64encode(send_cipher_data) usr.audit_event( 'user_profile', 'User retrieved wg public key from pritunl client', remote_addr=remote_addr, ) journal.entry( journal.USER_WG_SUCCESS, usr.journal_data, remote_address=remote_addr, event_long='User retrieved wg public key from pritunl client', ) sync_signature = base64.b64encode( hmac.new(usr.sync_secret.encode(), send_cipher_data64 + '&' + send_nonce64, hashlib.sha512).digest()) return utils.jsonify({ 'data': send_cipher_data64, 'nonce': send_nonce64, 'signature': sync_signature, })
def _auth_push_thread(self): info = { 'Server': self.server.name, } platform_name = None if self.platform == 'linux': platform_name = 'Linux' elif self.platform == 'mac' or self.platform == 'ios': platform_name = 'Apple' elif self.platform == 'win': platform_name = 'Windows' elif self.platform == 'chrome': platform_name = 'Chrome OS' if self.device_name: info['Device'] = '%s (%s)' % (self.device_name, platform_name) if self.push_type == DUO_AUTH: allow, _ = sso.auth_duo( self.user.name, ipaddr=self.remote_ip, type='Connection', info=info, ) elif self.push_type == SAML_OKTA_AUTH: allow = sso.auth_okta_push( self.user.name, ipaddr=self.remote_ip, type='Connection', info=info, ) else: raise ValueError('Unkown push auth type') if not allow: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'Push authentication failed') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('User failed push authentication') if settings.app.sso_cache: self.sso_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'timestamp': utils.now(), }, upsert=True)
def ping_thread(self): try: while True: try: try: client_id = self.clients_queue.popleft() except IndexError: if self.interrupter_sleep(10): return continue client = self.clients.find_id(client_id) if not client: continue diff = settings.vpn.client_ttl - 150 - \ (time.time() - client['timestamp']) if diff > settings.vpn.client_ttl: logger.error( 'Client ping time diff out of range', 'server', time_diff=diff, server_id=self.server.id, instance_id=self.instance.id, ) if self.interrupter_sleep(10): return elif diff > 1: if self.interrupter_sleep(diff): return if self.instance.sock_interrupt: return try: updated = self.clients.update_id( client_id, { 'timestamp': time.time(), }) if not updated: continue response = self.collection.update( { '_id': client['doc_id'], }, { '$set': { 'timestamp': utils.now(), }, }) if not response['updatedExisting']: logger.error( 'Client lost unexpectedly', 'server', server_id=self.server.id, instance_id=self.instance.id, ) self.instance_com.client_kill(client_id) continue except: self.clients_queue.append(client_id) logger.exception( 'Failed to update client', 'server', server_id=self.server.id, instance_id=self.instance.id, ) yield interrupter_sleep(1) continue self.clients_queue.append(client_id) yield if self.instance.sock_interrupt: return except GeneratorExit: raise except: logger.exception( 'Error in client thread', 'server', server_id=self.server.id, instance_id=self.instance.id, ) yield interrupter_sleep(3) if self.instance.sock_interrupt: return finally: doc_ids = [] for client in self.clients.find_all(): doc_id = client.get('doc_id') if doc_id: doc_ids.append(doc_id) try: self.collection.remove({ '_id': { '$in': doc_ids }, }) except: logger.exception( 'Error removing client', 'server', server_id=self.server.id, )
def update_available(self, available_hosts): if self.is_available: return if self.status == UNAVAILABLE and not self.active: return has_available = False for hst in available_hosts: if hst.id == self.id: continue if hst.is_available: has_available = True break cur_timestamp = utils.now() if has_available: response = self.collection.update( { '_id': self.id, 'ping_timestamp_ttl': self.ping_timestamp_ttl, '$or': [ { 'active': True }, { 'status': { '$ne': UNAVAILABLE } }, ], }, { '$set': { 'status': UNAVAILABLE, 'active': False, 'backoff_timestamp': cur_timestamp, 'ping_timestamp_ttl': None, } }) if response['updatedExisting']: self.active = False self.status = UNAVAILABLE self.backoff_timestamp = cur_timestamp self.ping_timestamp_ttl = None return if self.status == UNAVAILABLE: return response = self.collection.update( { '_id': self.id, 'ping_timestamp_ttl': self.ping_timestamp_ttl, 'status': { '$ne': UNAVAILABLE }, }, { '$set': { 'status': UNAVAILABLE, 'backoff_timestamp': cur_timestamp, 'ping_timestamp_ttl': None, } }) if response['updatedExisting']: self.status = UNAVAILABLE self.backoff_timestamp = cur_timestamp self.ping_timestamp_ttl = None
def _keep_alive_thread(): host_event = False last_update = None proc_stat = None settings.local.host_ping_timestamp = utils.now() cur_public_ip = None cur_public_ip6 = None cur_host_name = settings.local.host.name cur_route53_region = settings.app.route53_region cur_route53_zone = settings.app.route53_zone auto_public_host = settings.local.host.auto_public_host auto_public_host6 = settings.local.host.auto_public_host6 while True: try: timestamp = utils.now() timestamp -= datetime.timedelta( microseconds=timestamp.microsecond, seconds=timestamp.second, ) if timestamp != last_update: last_update = timestamp last_proc_stat = proc_stat proc_stat = host.usage_utils.get_proc_stat() if last_proc_stat and proc_stat: cpu_usage = host.usage_utils.calc_cpu_usage( last_proc_stat, proc_stat) mem_usage = host.usage_utils.get_mem_usage() settings.local.host.usage.add_period(timestamp, cpu_usage, mem_usage) yield interrupter_sleep(settings.app.host_ping) ping_timestamp = utils.now() try: open_file_count = len(os.listdir('/proc/self/fd')) except: open_file_count = 0 cpu_usage = None mem_usage = None thread_count = threading.active_count() server_count = len(host.global_servers) device_count = host.global_clients.count({}) try: cpu_usage, mem_usage = utils.get_process_cpu_mem() except: logger.exception('Failed to get process cpu and mem usage', 'runners', host_id=settings.local.host.id, host_name=settings.local.host.name, ) host_name = settings.local.host.name route53_region = settings.app.route53_region route53_zone = settings.app.route53_zone if route53_region and route53_zone: if cur_public_ip != settings.local.public_ip or \ cur_public_ip6 != settings.local.public_ip6 or \ cur_host_name != host_name or \ cur_route53_region != route53_region or \ cur_route53_zone != route53_zone: cur_host_name = host_name cur_public_ip = settings.local.public_ip cur_public_ip6 = settings.local.public_ip6 cur_route53_region = route53_region cur_route53_zone = route53_zone auto_public_host, auto_public_host6 = \ utils.set_zone_record( route53_region, route53_zone, host_name, cur_public_ip, cur_public_ip6, ) settings.local.host.auto_public_host = auto_public_host settings.local.host.auto_public_host6 = auto_public_host6 host_event = True else: auto_public_host = None auto_public_host6 = None if settings.local.host.auto_public_address != \ settings.local.public_ip or \ settings.local.host.auto_public_address6 != \ settings.local.public_ip6: settings.local.host.auto_public_address = \ settings.local.public_ip settings.local.host.auto_public_address6 = \ settings.local.public_ip6 host_event = True settings.local.host.collection.update({ '_id': settings.local.host.id, }, {'$set': { 'server_count': server_count, 'device_count': device_count, 'cpu_usage': cpu_usage, 'mem_usage': mem_usage, 'thread_count': thread_count, 'open_file_count': open_file_count, 'status': ONLINE, 'ping_timestamp': utils.now(), 'auto_public_address': settings.local.public_ip, 'auto_public_address6': settings.local.public_ip6, 'auto_public_host': auto_public_host, 'auto_public_host6': auto_public_host6, }}) if host_event: host_event = False event.Event(type=HOSTS_UPDATED) monitoring.insert_point('system', { 'host': settings.local.host.name, }, { 'cpu_usage': cpu_usage, 'mem_usage': mem_usage, 'thread_count': thread_count, 'open_file_count': open_file_count, }) settings.local.host_ping_timestamp = ping_timestamp except GeneratorExit: host.deinit() raise except: logger.exception('Error in host keep alive update', 'runners', host_id=settings.local.host.id, host_name=settings.local.host.name, ) time.sleep(0.5)
def sso_callback_get(): sso_mode = settings.app.sso if sso_mode not in (AZURE_AUTH, AZURE_DUO_AUTH, AZURE_YUBICO_AUTH, GOOGLE_AUTH, GOOGLE_DUO_AUTH, GOOGLE_YUBICO_AUTH, SLACK_AUTH, SLACK_DUO_AUTH, SLACK_YUBICO_AUTH, SAML_AUTH, SAML_DUO_AUTH, SAML_YUBICO_AUTH, SAML_OKTA_AUTH, SAML_OKTA_DUO_AUTH, SAML_OKTA_YUBICO_AUTH, SAML_ONELOGIN_AUTH, SAML_ONELOGIN_DUO_AUTH, SAML_ONELOGIN_YUBICO_AUTH): return flask.abort(405) state = flask.request.args.get('state') sig = flask.request.args.get('sig') tokens_collection = mongo.get_collection('sso_tokens') doc = tokens_collection.find_and_modify(query={ '_id': state, }, remove=True) if not doc: return flask.abort(404) query = flask.request.query_string.split('&sig=')[0] test_sig = base64.urlsafe_b64encode( hmac.new(str(doc['secret']), query, hashlib.sha512).digest()) if not utils.const_compare(sig, test_sig): return flask.abort(401) params = urlparse.parse_qs(query) if doc.get('type') == SAML_AUTH: username = params.get('username')[0] email = params.get('email', [None])[0] org_names = [] if params.get('org'): org_names_param = params.get('org')[0] if ';' in org_names_param: org_names = org_names_param.split(';') else: org_names = org_names_param.split(',') org_names = [x for x in org_names if x] org_names = sorted(org_names) groups = [] if params.get('groups'): groups_param = params.get('groups')[0] if ';' in groups_param: groups = groups_param.split(';') else: groups = groups_param.split(',') groups = [x for x in groups if x] groups = set(groups) if not username: return flask.abort(406) org_id = settings.app.sso_org if org_names: not_found = False for org_name in org_names: org = organization.get_by_name( utils.filter_unicode(org_name), fields=('_id'), ) if org: not_found = False org_id = org.id break else: not_found = True if not_found: logger.warning( 'Supplied org names do not exists', 'sso', sso_type=doc.get('type'), user_name=username, user_email=email, org_names=org_names, ) valid, org_id_new, groups2 = sso.plugin_sso_authenticate( sso_type='saml', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), sso_org_names=org_names, ) if valid: org_id = org_id_new or org_id else: logger.error( 'Saml plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) groups = groups | set(groups2 or []) elif doc.get('type') == SLACK_AUTH: username = params.get('username')[0] email = None user_team = params.get('team')[0] org_names = params.get('orgs', [''])[0] org_names = sorted(org_names.split(',')) if user_team != settings.app.sso_match[0]: return flask.abort(401) not_found = False org_id = settings.app.sso_org for org_name in org_names: org = organization.get_by_name( utils.filter_unicode(org_name), fields=('_id'), ) if org: not_found = False org_id = org.id break else: not_found = True if not_found: logger.warning( 'Supplied org names do not exists', 'sso', sso_type=doc.get('type'), user_name=username, user_email=email, org_names=org_names, ) valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='slack', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), sso_org_names=org_names, ) if valid: org_id = org_id_new or org_id else: logger.error( 'Slack plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) groups = set(groups or []) elif doc.get('type') == GOOGLE_AUTH: username = params.get('username')[0] email = username valid, google_groups = sso.verify_google(username) if not valid: return flask.abort(401) org_id = settings.app.sso_org valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='google', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), ) if valid: org_id = org_id_new or org_id else: logger.error( 'Google plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) groups = set(groups or []) if settings.app.sso_google_mode == 'groups': groups = groups | set(google_groups) else: not_found = False google_groups = sorted(google_groups) for org_name in google_groups: org = organization.get_by_name( utils.filter_unicode(org_name), fields=('_id'), ) if org: not_found = False org_id = org.id break else: not_found = True if not_found: logger.warning( 'Supplied org names do not exists', 'sso', sso_type=doc.get('type'), user_name=username, user_email=email, org_names=google_groups, ) elif doc.get('type') == AZURE_AUTH: username = params.get('username')[0] email = None tenant, username = username.split('/', 2) if tenant != settings.app.sso_azure_directory_id: logger.error( 'Azure directory ID mismatch', 'sso', username=username, ) return flask.abort(401) valid, azure_groups = sso.verify_azure(username) if not valid: return flask.abort(401) org_id = settings.app.sso_org valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='azure', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), ) if valid: org_id = org_id_new or org_id else: logger.error( 'Azure plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) groups = set(groups or []) if settings.app.sso_azure_mode == 'groups': groups = groups | set(azure_groups) else: not_found = False azure_groups = sorted(azure_groups) for org_name in azure_groups: org = organization.get_by_name( utils.filter_unicode(org_name), fields=('_id'), ) if org: not_found = False org_id = org.id break else: not_found = True if not_found: logger.warning( 'Supplied org names do not exists', 'sso', sso_type=doc.get('type'), user_name=username, user_email=email, org_names=azure_groups, ) else: logger.error( 'Unknown sso type', 'sso', sso_type=doc.get('type'), ) return flask.abort(401) if DUO_AUTH in sso_mode: token = utils.generate_secret() tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': token, 'type': DUO_AUTH, 'username': username, 'email': email, 'org_id': org_id, 'groups': list(groups) if groups else None, 'timestamp': utils.now(), }) duo_page = static.StaticFile(settings.conf.www_path, 'duo.html', cache=False, gzip=False) sso_duo_mode = settings.app.sso_duo_mode if sso_duo_mode == 'passcode': duo_mode = 'passcode' elif sso_duo_mode == 'phone': duo_mode = 'phone' else: duo_mode = 'push' body_class = duo_mode if settings.app.theme == 'dark': body_class += ' dark' duo_page.data = duo_page.data.replace('<%= body_class %>', body_class) duo_page.data = duo_page.data.replace('<%= token %>', token) duo_page.data = duo_page.data.replace('<%= duo_mode %>', duo_mode) return duo_page.get_response() if YUBICO_AUTH in sso_mode: token = utils.generate_secret() tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': token, 'type': YUBICO_AUTH, 'username': username, 'email': email, 'org_id': org_id, 'groups': list(groups) if groups else None, 'timestamp': utils.now(), }) yubico_page = static.StaticFile(settings.conf.www_path, 'yubico.html', cache=False, gzip=False) if settings.app.theme == 'dark': yubico_page.data = yubico_page.data.replace( '<body>', '<body class="dark">') yubico_page.data = yubico_page.data.replace('<%= token %>', token) return yubico_page.get_response() return _validate_user(username, email, sso_mode, org_id, groups, http_redirect=True)
def _run_thread(self, send_events): from pritunl.server.utils import get_by_id logger.info( 'Starting vpn server', 'server', server_id=self.server.id, instance_id=self.id, network=self.server.network, network6=self.server.network6, host_address=settings.local.host.local_addr, host_address6=settings.local.host.local_addr6, host_networks=settings.local.host.local_networks, cur_timestamp=utils.now(), ) self.resources_acquire() try: cursor_id = self.get_cursor_id() os.makedirs(self._temp_path) self.enable_ip_forwarding() self.bridge_start() if self.server.replicating and self.server.vxlan: try: self.vxlan = vxlan.get_vxlan(self.server.id) self.vxlan.start() except: logger.exception( 'Failed to setup server vxlan', 'vxlan', server_id=self.server.id, instance_id=self.id, ) self.generate_ovpn_conf() self.generate_iptables_rules() self.iptables.upsert_rules() self.init_route_advertisements() self.process = self.openvpn_start() self.start_threads(cursor_id) self.instance_com = ServerInstanceCom(self.server, self) self.instance_com.start() self.publish('started') if send_events: event.Event(type=SERVERS_UPDATED) event.Event(type=SERVER_HOSTS_UPDATED, resource_id=self.server.id) for org_id in self.server.organizations: event.Event(type=USERS_UPDATED, resource_id=org_id) for link_doc in self.server.links: if self.server.id > link_doc['server_id']: instance_link = ServerInstanceLink( server=self.server, linked_server=get_by_id(link_doc['server_id']), ) self.server_links.append(instance_link) instance_link.start() plugins.caller( 'server_start', host_id=settings.local.host_id, host_name=settings.local.host.name, server_id=self.server.id, server_name=self.server.name, port=self.server.port, protocol=self.server.protocol, ipv6=self.server.ipv6, ipv6_firewall=self.server.ipv6_firewall, network=self.server.network, network6=self.server.network6, network_mode=self.server.network_mode, network_start=self.server.network_start, network_stop=self.server.network_end, restrict_routes=self.server.restrict_routes, bind_address=self.server.bind_address, onc_hostname=self.server.onc_hostname, dh_param_bits=self.server.dh_param_bits, multi_device=self.server.multi_device, dns_servers=self.server.dns_servers, search_domain=self.server.search_domain, otp_auth=self.server.otp_auth, cipher=self.server.cipher, hash=self.server.hash, inter_client=self.server.inter_client, ping_interval=self.server.ping_interval, ping_timeout=self.server.ping_timeout, link_ping_interval=self.server.link_ping_interval, link_ping_timeout=self.server.link_ping_timeout, allowed_devices=self.server.allowed_devices, max_clients=self.server.max_clients, replica_count=self.server.replica_count, dns_mapping=self.server.dns_mapping, debug=self.server.debug, interface=self.interface, bridge_interface=self.bridge_interface, vxlan=self.vxlan, ) try: self.openvpn_watch() finally: plugins.caller( 'server_stop', host_id=settings.local.host_id, host_name=settings.local.host.name, server_id=self.server.id, server_name=self.server.name, port=self.server.port, protocol=self.server.protocol, ipv6=self.server.ipv6, ipv6_firewall=self.server.ipv6_firewall, network=self.server.network, network6=self.server.network6, network_mode=self.server.network_mode, network_start=self.server.network_start, network_stop=self.server.network_end, restrict_routes=self.server.restrict_routes, bind_address=self.server.bind_address, onc_hostname=self.server.onc_hostname, dh_param_bits=self.server.dh_param_bits, multi_device=self.server.multi_device, dns_servers=self.server.dns_servers, search_domain=self.server.search_domain, otp_auth=self.server.otp_auth, cipher=self.server.cipher, hash=self.server.hash, inter_client=self.server.inter_client, ping_interval=self.server.ping_interval, ping_timeout=self.server.ping_timeout, link_ping_interval=self.server.link_ping_interval, link_ping_timeout=self.server.link_ping_timeout, allowed_devices=self.server.allowed_devices, max_clients=self.server.max_clients, replica_count=self.server.replica_count, dns_mapping=self.server.dns_mapping, debug=self.server.debug, interface=self.interface, bridge_interface=self.bridge_interface, vxlan=self.vxlan, ) self.interrupt = True self.bridge_stop() self.iptables.clear_rules() if not self.clean_exit: event.Event(type=SERVERS_UPDATED) self.server.send_link_events() logger.LogEntry(message='Server stopped unexpectedly "%s".' % (self.server.name)) except: try: self.interrupt = True self.stop_process() except: logger.exception( 'Server stop error', 'server', server_id=self.server.id, instance_id=self.id, ) logger.exception( 'Server error occurred while running', 'server', server_id=self.server.id, instance_id=self.id, ) finally: try: if self.resource_lock: self.bridge_stop() self.iptables.clear_rules() except: logger.exception( 'Server resource error', 'server', server_id=self.server.id, instance_id=self.id, ) try: self.stop_threads() self.collection.update( { '_id': self.server.id, 'instances.instance_id': self.id, }, { '$pull': { 'instances': { 'instance_id': self.id, }, }, '$inc': { 'instances_count': -1, }, }) utils.rmtree(self._temp_path) except: logger.exception( 'Server clean up error', 'server', server_id=self.server.id, instance_id=self.id, ) try: self.resources_release() except: logger.exception( 'Failed to release resources', 'server', server_id=self.server.id, instance_id=self.id, )
def link_state_put(): if settings.app.demo_mode: return utils.demo_blocked() auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return flask.abort(406) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return flask.abort(408) except ValueError: return flask.abort(405) host = link.get_host(utils.ObjectId(auth_token)) if not host: return flask.abort(404) auth_string = '&'.join([ auth_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path, ]) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return flask.abort(413) auth_test_signature = base64.b64encode( hmac.new(host.secret.encode(), auth_string, hashlib.sha512).digest()) if not utils.const_compare(auth_signature, auth_test_signature): return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: return flask.abort(409) host.load_link() host.version = flask.request.json.get('version') host.public_address = flask.request.json.get('public_address') host.local_address = flask.request.json.get('local_address') host.address6 = flask.request.json.get('address6') data = json.dumps(host.get_state(), default=lambda x: str(x)) data += (16 - len(data) % 16) * '\x00' iv = os.urandom(16) key = hashlib.sha256(host.secret).digest() cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend()).encryptor() enc_data = base64.b64encode(cipher.update(data) + cipher.finalize()) enc_signature = base64.b64encode( hmac.new(host.secret.encode(), enc_data, hashlib.sha512).digest()) resp = flask.Response(response=enc_data, mimetype='application/base64') resp.headers.add('Cache-Control', 'no-cache, no-store, must-revalidate') resp.headers.add('Pragma', 'no-cache') resp.headers.add('Expires', 0) resp.headers.add('Cipher-IV', base64.b64encode(iv)) resp.headers.add('Cipher-Signature', enc_signature) return resp
def sso_callback_get(): sso_mode = settings.app.sso if sso_mode not in (GOOGLE_AUTH, GOOGLE_DUO_AUTH, GOOGLE_YUBICO_AUTH, SLACK_AUTH, SLACK_DUO_AUTH, SLACK_YUBICO_AUTH, SAML_AUTH, SAML_DUO_AUTH, SAML_YUBICO_AUTH, SAML_OKTA_AUTH, SAML_OKTA_DUO_AUTH, SAML_OKTA_YUBICO_AUTH, SAML_ONELOGIN_AUTH, SAML_ONELOGIN_DUO_AUTH, SAML_ONELOGIN_YUBICO_AUTH): return flask.abort(405) state = flask.request.args.get('state') sig = flask.request.args.get('sig') tokens_collection = mongo.get_collection('sso_tokens') doc = tokens_collection.find_and_modify(query={ '_id': state, }, remove=True) if not doc: return flask.abort(404) query = flask.request.query_string.split('&sig=')[0] test_sig = base64.urlsafe_b64encode( hmac.new(str(doc['secret']), query, hashlib.sha512).digest()) if sig != test_sig: return flask.abort(401) params = urlparse.parse_qs(query) if doc.get('type') == SAML_AUTH: username = params.get('username')[0] email = params.get('email', [None])[0] org_name = params.get('org', [None])[0] if not username: return flask.abort(406) valid, org_name = sso.verify_saml(username, email, org_name) if not valid: return flask.abort(401) org_id = settings.app.sso_org if org_name: org = organization.get_by_name(org_name, fields=('_id')) if org: org_id = org.id valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='saml', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), sso_org_names=[org_name], ) if valid: org_id = org_id_new or org_id else: logger.error( 'Saml plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) elif doc.get('type') == SLACK_AUTH: username = params.get('username')[0] email = None user_team = params.get('team')[0] org_names = params.get('orgs', [''])[0] org_names = org_names.split(',') valid, org_name = sso.verify_slack(username, user_team, org_names) if not valid: return flask.abort(401) if org_name: org_names = [org_name] org_id = settings.app.sso_org for org_name in org_names: org = organization.get_by_name(org_name, fields=('_id')) if org: org_id = org.id break valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='slack', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), sso_org_names=org_names, ) if valid: org_id = org_id_new or org_id else: logger.error( 'Slack plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) else: username = params.get('username')[0] email = username valid, org_name = sso.verify_google(username) if not valid: return flask.abort(401) org_id = settings.app.sso_org if org_name: org = organization.get_by_name(org_name, fields=('_id')) if org: org_id = org.id valid, org_id_new, groups = sso.plugin_sso_authenticate( sso_type='google', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), ) if valid: org_id = org_id_new or org_id else: logger.error( 'Google plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) if DUO_AUTH in sso_mode: if settings.app.sso_duo_mode == 'passcode': token = utils.generate_secret() tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': token, 'type': DUO_AUTH, 'username': username, 'email': email, 'org_id': org_id, 'groups': groups, 'timestamp': utils.now(), }) duo_page = static.StaticFile(settings.conf.www_path, 'duo.html', cache=False, gzip=False) if settings.app.theme == 'dark': duo_page.data = duo_page.data.replace('<body>', '<body class="dark">') duo_page.data = duo_page.data.replace('<%= token %>', token) return duo_page.get_response() else: duo_auth = sso.Duo( username=username, factor=settings.app.sso_duo_mode, remote_ip=utils.get_remote_addr(), auth_type='Key', ) valid = duo_auth.authenticate() if valid: valid, org_id_new, groups2 = sso.plugin_sso_authenticate( sso_type='duo', user_name=username, user_email=email, remote_ip=utils.get_remote_addr(), ) if valid: org_id = org_id_new or org_id else: logger.error( 'Duo plugin authentication not valid', 'sso', username=username, ) return flask.abort(401) groups = ((groups or set()) | (groups2 or set())) or None else: logger.error( 'Duo authentication not valid', 'sso', username=username, ) return flask.abort(401) if YUBICO_AUTH in sso_mode: token = utils.generate_secret() tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': token, 'type': YUBICO_AUTH, 'username': username, 'email': email, 'org_id': org_id, 'groups': groups, 'timestamp': utils.now(), }) yubico_page = static.StaticFile(settings.conf.www_path, 'yubico.html', cache=False, gzip=False) if settings.app.theme == 'dark': yubico_page.data = yubico_page.data.replace( '<body>', '<body class="dark">') yubico_page.data = yubico_page.data.replace('<%= token %>', token) return yubico_page.get_response() org = organization.get_by_id(org_id) if not org: return flask.abort(405) usr = org.find_user(name=username) if not usr: usr = org.new_user(name=username, email=email, type=CERT_CLIENT, auth_type=sso_mode, groups=list(groups) if groups else None) usr.audit_event('user_created', 'User created with single sign-on', remote_addr=utils.get_remote_addr()) event.Event(type=ORGS_UPDATED) event.Event(type=USERS_UPDATED, resource_id=org.id) event.Event(type=SERVERS_UPDATED) else: if usr.disabled: return flask.abort(403) if groups and groups - set(usr.groups or []): usr.groups = list(set(usr.groups or []) | groups) usr.commit('groups') if usr.auth_type != sso_mode: usr.auth_type = sso_mode usr.commit('auth_type') key_link = org.create_user_key_link(usr.id, one_time=True) usr.audit_event( 'user_profile', 'User profile viewed from single sign-on', remote_addr=utils.get_remote_addr(), ) return utils.redirect(utils.get_url_root() + key_link['view_url'])
def key_sync_get(org_id, user_id, server_id, key_hash): if not settings.user.conf_sync: return utils.jsonify({}) if not settings.local.sub_active: return utils.jsonify({}, status_code=480) utils.rand_sleep() auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return flask.abort(401) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return flask.abort(401) except ValueError: return flask.abort(401) org = organization.get_by_id(org_id) if not org: return flask.abort(401) user = org.get_user(id=user_id) if not user: return flask.abort(401) elif not user.sync_secret: return flask.abort(401) if auth_token != user.sync_token: return flask.abort(401) if user.disabled: return flask.abort(403) auth_string = '&'.join([ user.sync_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path ]) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return flask.abort(401) auth_test_signature = base64.b64encode( hmac.new(user.sync_secret.encode(), auth_string, hashlib.sha512).digest()) if auth_signature != auth_test_signature: return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: return flask.abort(401) key_conf = user.sync_conf(server_id, key_hash) if key_conf: user.audit_event( 'user_profile', 'User profile synced from pritunl client', remote_addr=utils.get_remote_addr(), ) sync_signature = base64.b64encode( hmac.new(user.sync_secret.encode(), key_conf['conf'], hashlib.sha512).digest()) return utils.jsonify({ 'signature': sync_signature, 'conf': key_conf['conf'], }) return utils.jsonify({})
def get_period(self, period): date_end = usage_utils.get_period_timestamp(period, utils.now()) if period == '1m': date_start = date_end - datetime.timedelta(hours=6) date_step = datetime.timedelta(minutes=1) elif period == '5m': date_start = date_end - datetime.timedelta(days=1) date_step = datetime.timedelta(minutes=5) elif period == '30m': date_start = date_end - datetime.timedelta(days=7) date_step = datetime.timedelta(minutes=30) elif period == '2h': date_start = date_end - datetime.timedelta(days=30) date_step = datetime.timedelta(hours=2) elif period == '1d': date_start = date_end - datetime.timedelta(days=365) date_step = datetime.timedelta(days=1) date_cur = date_start data = { 'cpu': [], 'mem': [], } results = self.collection.aggregate([{ '$match': { 'host_id': self.host_id, 'period': period, } }, { '$project': { 'timestamp': True, 'cpu': { '$divide': ['$cpu', '$count'] }, 'mem': { '$divide': ['$mem', '$count'] }, } }, { '$sort': { 'timestamp': pymongo.ASCENDING, } }]) for doc in results: if date_cur > doc['timestamp']: continue while date_cur < doc['timestamp']: timestamp = int(date_cur.strftime('%s')) data['cpu'].append((timestamp, 0)) data['mem'].append((timestamp, 0)) date_cur += date_step timestamp = int(doc['timestamp'].strftime('%s')) data['cpu'].append((timestamp, doc['cpu'])) data['mem'].append((timestamp, doc['mem'])) date_cur += date_step while date_cur <= date_end: timestamp = int(date_cur.strftime('%s')) data['cpu'].append((timestamp, 0)) data['mem'].append((timestamp, 0)) date_cur += date_step return data
def key_sync_get(org_id, user_id, server_id, key_hash): utils.rand_sleep() if not settings.local.sub_active: return utils.response('', status_code=480) auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return flask.abort(401) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return flask.abort(401) except ValueError: return flask.abort(401) org = organization.get_by_id(org_id) if not org: return flask.abort(404) user = org.get_user(id=user_id) if not user: return flask.abort(404) elif not user.sync_secret: return flask.abort(404) auth_string = '&'.join([ auth_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path ] + ([flask.request.data] if flask.request.data else [])) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return flask.abort(401) auth_test_signature = base64.b64encode( hmac.new(user.sync_secret.encode(), auth_string, hashlib.sha256).digest()) if auth_signature != auth_test_signature: return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert( { 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }, w=0) except pymongo.errors.DuplicateKeyError: return flask.abort(401) key_conf = user.sync_conf(server_id, key_hash) if key_conf: return utils.response(key_conf['conf']) return utils.response('')
def auth_user_post(): if settings.app.demo_mode: return utils.demo_blocked() auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) except ValueError: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) org = organization.get_by_token(auth_token) if not org: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) auth_string = '&'.join([ auth_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path, ] + ([flask.request.data] if flask.request.data else [])) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) if not org.auth_secret or len(org.auth_secret) < 8: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) auth_test_signature = base64.b64encode( hmac.new(org.auth_secret.encode(), auth_string, hashlib.sha256).digest()) if auth_signature != auth_test_signature: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) try: org.nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: return utils.jsonify( { 'error': AUTH_INVALID, 'error_msg': AUTH_INVALID_MSG, }, 401) username = flask.request.json['username'] network_links = flask.request.json.get('network_links') usr = org.find_user(name=username) if usr: usr.remove() usr = org.new_user(name=username, type=CERT_CLIENT) usr.audit_event('user_created', 'User created with authentication token', remote_addr=utils.get_remote_addr()) if network_links: for network_link in network_links: try: usr.add_network_link(network_link, force=True) except (ipaddress.AddressValueError, ValueError): return _network_link_invalid() event.Event(type=ORGS_UPDATED) event.Event(type=USERS_UPDATED, resource_id=org.id) event.Event(type=SERVERS_UPDATED) keys = {} for svr in org.iter_servers(): key = usr.build_key_conf(svr.id) keys[key['name']] = key['conf'] return utils.jsonify(keys)
def _connected(self, client_id): client = self.clients.find_id(client_id) if not client: self.instance_com.push_output( 'ERROR Unknown client connected client_id=%s' % client_id) self.instance_com.client_kill(client_id) return self.set_iptables_rules( client['iptables_rules'], client['ip6tables_rules'], ) timestamp = utils.now() doc = { 'user_id': client['user_id'], 'server_id': self.server.id, 'host_id': settings.local.host_id, 'timestamp': timestamp, 'platform': client['platform'], 'type': client['user_type'], 'device_name': client['device_name'], 'mac_addr': client['mac_addr'], 'network': self.server.network, 'real_address': client['real_address'], 'virt_address': client['virt_address'], 'virt_address6': client['virt_address6'], 'host_address': settings.local.host.local_addr, 'host_address6': settings.local.host.local_addr6, 'dns_servers': client['dns_servers'], 'dns_suffix': client['dns_suffix'], 'connected_since': int(timestamp.strftime('%s')), } if settings.local.sub_active and \ settings.local.sub_plan == 'enterprise': domain_hash = hashlib.md5() domain_hash.update((client['user_name'].split('@')[0] + '.' + client['org_name']).lower()) domain_hash = bson.binary.Binary(domain_hash.digest(), subtype=bson.binary.MD5_SUBTYPE) doc['domain'] = domain_hash try: doc_id = self.collection.insert(doc) if self.route_clients: messenger.publish( 'client', { 'state': True, 'virt_address': client['virt_address'], 'virt_address6': client['virt_address6'], 'host_address': settings.local.host.local_addr, 'host_address6': settings.local.host.local_addr6, }) except: logger.exception( 'Error adding client', 'server', server_id=self.server.id, ) self.instance_com.client_kill(client_id) return self.clients.update_id(client_id, { 'doc_id': doc_id, 'timestamp': time.time(), }) self.clients_queue.append(client_id) self.instance_com.push_output('User connected user_id=%s' % client['user_id']) self.send_event()
def key_sync_get(org_id, user_id, server_id, key_hash): remote_addr = utils.get_remote_addr() if not settings.user.conf_sync: return utils.jsonify({}) if not settings.local.sub_active: return utils.jsonify({}, status_code=480) utils.rand_sleep() auth_token = flask.request.headers.get('Auth-Token', None) auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: journal.entry( journal.USER_SYNC_FAILURE, remote_address=remote_addr, event_long='Missing auth header', ) return flask.abort(406) auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: journal.entry( journal.USER_SYNC_FAILURE, remote_address=remote_addr, event_long='Expired auth timestamp', ) return flask.abort(408) except ValueError: journal.entry( journal.USER_SYNC_FAILURE, remote_address=remote_addr, event_long='Invalid auth timestamp', ) return flask.abort(405) org = organization.get_by_id(org_id) if not org: journal.entry( journal.USER_SYNC_FAILURE, remote_address=remote_addr, event_long='Organization not found', ) return flask.abort(404) usr = org.get_user(id=user_id) if not usr: journal.entry( journal.USER_SYNC_FAILURE, remote_address=remote_addr, event_long='User not found', ) return flask.abort(404) elif not usr.sync_secret: journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='User missing sync secret', ) return flask.abort(410) if auth_token != usr.sync_token: journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Sync token mismatch', ) return flask.abort(410) if usr.disabled: journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='User disabled', ) return flask.abort(403) auth_string = '&'.join([ usr.sync_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path ]) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Auth string len limit exceeded', ) return flask.abort(413) auth_test_signature = base64.b64encode( hmac.new(usr.sync_secret.encode(), auth_string, hashlib.sha512).digest()) if not utils.const_compare(auth_signature, auth_test_signature): journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Sync signature mismatch', ) return flask.abort(401) nonces_collection = mongo.get_collection('auth_nonces') try: nonces_collection.insert({ 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }) except pymongo.errors.DuplicateKeyError: journal.entry( journal.USER_SYNC_FAILURE, usr.journal_data, remote_address=remote_addr, event_long='Duplicate key', ) return flask.abort(409) key_conf = usr.sync_conf(server_id, key_hash) if key_conf: usr.audit_event( 'user_profile', 'User profile synced from pritunl client', remote_addr=remote_addr, ) journal.entry( journal.USER_SYNC_SUCCESS, usr.journal_data, remote_address=remote_addr, event_long='User profile synced from pritunl client', ) sync_signature = base64.b64encode( hmac.new(usr.sync_secret.encode(), key_conf['conf'], hashlib.sha512).digest()) return utils.jsonify({ 'signature': sync_signature, 'conf': key_conf['conf'], }) return utils.jsonify({})
def is_available(self): if self.status != AVAILABLE or \ not self.ping_timestamp_ttl or \ utils.now() > self.ping_timestamp_ttl: return False return True
def check_session(): auth_token = flask.request.headers.get('Auth-Token', None) if auth_token: auth_timestamp = flask.request.headers.get('Auth-Timestamp', None) auth_nonce = flask.request.headers.get('Auth-Nonce', None) auth_signature = flask.request.headers.get('Auth-Signature', None) if not auth_token or not auth_timestamp or not auth_nonce or \ not auth_signature: return False auth_nonce = auth_nonce[:32] try: if abs(int(auth_timestamp) - int(utils.time_now())) > \ settings.app.auth_time_window: return False except ValueError: return False administrator = find_user(token=auth_token) if not administrator: return False auth_string = '&'.join([ auth_token, auth_timestamp, auth_nonce, flask.request.method, flask.request.path ] + ([flask.request.data] if flask.request.data else [])) if len(auth_string) > AUTH_SIG_STRING_MAX_LEN: return False auth_test_signature = base64.b64encode( hmac.new(administrator.secret.encode(), auth_string, hashlib.sha256).digest()) if auth_signature != auth_test_signature: return False try: Administrator.nonces_collection.insert( { 'token': auth_token, 'nonce': auth_nonce, 'timestamp': utils.now(), }, w=0) except pymongo.errors.DuplicateKeyError: return False else: if not flask.session: return False admin_id = flask.session.get('admin_id') if not admin_id: return False admin_id = bson.ObjectId(admin_id) session_id = flask.session.get('session_id') administrator = get_user(admin_id, session_id) if not administrator: return False if not settings.conf.ssl and flask.session.get( 'source') != utils.get_remote_addr(): flask.session.clear() return False session_timeout = settings.app.session_timeout if session_timeout and int(utils.time_now()) - \ flask.session['timestamp'] > session_timeout: flask.session.clear() return False flask.session['timestamp'] = int(utils.time_now()) flask.g.administrator = administrator return True
def get_state(self): self.status = AVAILABLE self.timestamp = utils.now() self.ping_timestamp_ttl = utils.now() + datetime.timedelta( seconds=self.timeout or settings.vpn.link_timeout) self.commit(('public_address', 'address6', 'local_address', 'version', 'status', 'timestamp', 'hosts', 'hosts_hist', 'ping_timestamp_ttl')) if not self.link.key: self.link.generate_key() self.link.commit('key') return links = [] hosts = {} state = { 'id': self.id, 'ipv6': self.link.ipv6, 'action': self.link.action, 'type': self.location.type, 'links': links, 'hosts': hosts, 'preferred_ike': self.link.preferred_ike, 'preferred_esp': self.link.preferred_esp, } active_host = self.location.get_active_host() active = active_host and active_host.id == self.id loc_transit_excludes = set(self.location.transit_excludes) locations, locations_id, loc_excludes = self.get_state_locations() if self.link.status == ONLINE and active_host and active: if self.link.type == DIRECT: other_location = None for location in locations: if location.id == self.location.id: continue if location.type != self.location.type: other_location = location active_host = other_location.get_active_host() if active_host: if self.location.type == DIRECT_SERVER: left_subnets = ['%s/32' % self.local_address] right_subnets = ['%s/32' % active_host.local_address] else: left_subnets = ['%s/32' % self.local_address] right_subnets = ['%s/32' % active_host.local_address] links.append({ 'id': other_location.id, 'static': active_host.static, 'pre_shared_key': self.link.key, 'right': active_host.address6 \ if self.link.ipv6 else \ active_host.public_address, 'left_subnets': left_subnets, 'right_subnets': right_subnets, }) else: for location in locations: if location.id in loc_excludes or \ location.id == self.location.id: continue active_host = location.get_active_host() if not active_host: continue excludes = set() transit_excludes = set(self.location.transit_excludes) for exclude in self.link.excludes: if location.id not in exclude: continue if exclude[0] == location.id: exclude_id = exclude[1] else: exclude_id = exclude[0] excludes.add(exclude_id) left_subnets = [] for route in list(self.location.routes.values()): if route['network'] not in left_subnets: left_subnets.append(route['network']) for transit_id in self.location.transits: if transit_id != self.id and \ transit_id in excludes and \ transit_id in locations_id and \ transit_id not in loc_transit_excludes: transit_loc = locations_id[transit_id] for route in list(transit_loc.routes.values()): if route['network'] not in left_subnets: left_subnets.append(route['network']) right_subnets = [] for route in list(location.routes.values()): right_subnets.append(route['network']) for transit_id in location.transits: if transit_id != self.id and \ transit_id in loc_excludes and \ transit_id in locations_id and \ transit_id not in transit_excludes: transit_loc = locations_id[transit_id] for route in list(transit_loc.routes.values()): if route['network'] not in left_subnets: right_subnets.append(route['network']) links.append({ 'id': location.id, 'static': active_host.static, 'pre_shared_key': self.link.key, 'right': active_host.address6 \ if self.link.ipv6 else \ active_host.public_address, 'left_subnets': left_subnets, 'right_subnets': right_subnets, }) if self.link.type != DIRECT: for location in locations: if location.id in loc_excludes or \ location.id == self.location.id: continue for host in location.iter_hosts(): hosts[str(host.id)] = host.address6 if \ host.link.ipv6 else host.public_address for lnk in links: link_hash = hashlib.md5( json.dumps( lnk, sort_keys=True, default=lambda x: str(x), ).encode()).hexdigest() lnk['hash'] = link_hash state['hash'] = hashlib.md5( json.dumps( state, sort_keys=True, default=lambda x: str(x), ).encode()).hexdigest() return state, active
def sso_request_get(): sso_mode = settings.app.sso if sso_mode not in (GOOGLE_AUTH, GOOGLE_DUO_AUTH, SLACK_AUTH, SLACK_DUO_AUTH, SAML_AUTH, SAML_DUO_AUTH, SAML_OKTA_AUTH, SAML_OKTA_DUO_AUTH, SAML_ONELOGIN_AUTH, SAML_ONELOGIN_DUO_AUTH): return flask.abort(405) state = utils.rand_str(64) secret = utils.rand_str(64) callback = utils.get_url_root() + 'sso/callback' if not settings.local.sub_active: logger.error('Subscription must be active for sso', 'sso') return flask.abort(405) if GOOGLE_AUTH in sso_mode: resp = requests.post(AUTH_SERVER + '/v1/request/google', headers={ 'Content-Type': 'application/json', }, json={ 'license': settings.app.license, 'callback': callback, 'state': state, 'secret': secret, } ) if resp.status_code != 200: logger.error('Google auth server error', 'sso', status_code=resp.status_code, content=resp.content, ) if resp.status_code == 401: return flask.abort(405) return flask.abort(500) tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': state, 'type': GOOGLE_AUTH, 'secret': secret, 'timestamp': utils.now(), }) data = resp.json() return utils.redirect(data['url']) elif SLACK_AUTH in sso_mode: resp = requests.post(AUTH_SERVER + '/v1/request/slack', headers={ 'Content-Type': 'application/json', }, json={ 'license': settings.app.license, 'callback': callback, 'state': state, 'secret': secret, } ) if resp.status_code != 200: logger.error('Slack auth server error', 'sso', status_code=resp.status_code, content=resp.content, ) if resp.status_code == 401: return flask.abort(405) return flask.abort(500) tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': state, 'type': SLACK_AUTH, 'secret': secret, 'timestamp': utils.now(), }) data = resp.json() return utils.redirect(data['url']) elif SAML_AUTH in sso_mode: resp = requests.post(AUTH_SERVER + '/v1/request/saml', headers={ 'Content-Type': 'application/json', }, json={ 'license': settings.app.license, 'callback': callback, 'state': state, 'secret': secret, 'sso_url': settings.app.sso_saml_url, 'issuer_url': settings.app.sso_saml_issuer_url, 'cert': settings.app.sso_saml_cert, }, ) if resp.status_code != 200: logger.error('Saml auth server error', 'sso', status_code=resp.status_code, content=resp.content, ) if resp.status_code == 401: return flask.abort(405) return flask.abort(500) tokens_collection = mongo.get_collection('sso_tokens') tokens_collection.insert({ '_id': state, 'type': SAML_AUTH, 'secret': secret, 'timestamp': utils.now(), }) return flask.Response( status=200, response=resp.content, content_type="text/html;charset=utf-8", )
def uptime(self): if self.status != ONLINE or not self.start_timestamp: return return max((utils.now() - self.start_timestamp).seconds, 1)
def _check_password(self): if settings.vpn.stress_test or self.user.link_server_id: return if self.user.bypass_secondary: logger.info( 'Bypass secondary enabled, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.has_token: logger.info( 'Client authentication cached, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.whitelisted: logger.info( 'Client network whitelisted, skipping password', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if not limiter.auth_check(self.user.id): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. Too many ' + 'authentication attempts') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('Too many authentication attempts') sso_mode = settings.app.sso or '' duo_mode = settings.app.sso_duo_mode onelogin_mode = utils.get_onelogin_mode() okta_mode = utils.get_okta_mode() auth_type = self.user.auth_type or '' has_duo_passcode = DUO_AUTH in sso_mode and \ DUO_AUTH in auth_type and duo_mode == 'passcode' has_onelogin_passcode = SAML_ONELOGIN_AUTH == sso_mode and \ SAML_ONELOGIN_AUTH in auth_type and onelogin_mode == 'passcode' has_okta_passcode = SAML_OKTA_AUTH == sso_mode and \ SAML_OKTA_AUTH in auth_type and okta_mode == 'passcode' if has_duo_passcode or has_onelogin_passcode or has_okta_passcode: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password passcode_len = settings.app.sso_duo_passcode_length orig_password = self.password passcode = self.password[-passcode_len:] self.password = self.password[:-passcode_len] allow = False if settings.app.sso_cache and not self.server_auth_token: doc = self.sso_passcode_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, }) if doc: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping secondary passcode', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: if DUO_AUTH in sso_mode: label = 'Duo' duo_auth = sso.Duo( username=self.user.name, factor=duo_mode, remote_ip=self.remote_ip, auth_type='Connection', passcode=passcode, ) allow = duo_auth.authenticate() elif SAML_ONELOGIN_AUTH == sso_mode: label = 'OneLogin' allow = sso.auth_onelogin_secondary( username=self.user.name, passcode=passcode, remote_ip=self.remote_ip, onelogin_mode=onelogin_mode, ) elif SAML_OKTA_AUTH == sso_mode: label = 'Okta' allow = sso.auth_okta_secondary( username=self.user.name, passcode=passcode, remote_ip=self.remote_ip, okta_mode=okta_mode, ) else: raise AuthError('Unknown secondary passcode challenge') if not allow: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed %s passcode authentication') % (self.server.name, label), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'Enter %s Passcode' % label, True) else: self.set_challenge(None, 'Enter %s Passcode' % label, True) raise AuthError('Challenge secondary passcode') raise AuthError('Invalid secondary passcode') if settings.app.sso_cache and not self.server_auth_token: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': passcode, 'timestamp': utils.now(), }, upsert=True) elif YUBICO_AUTH in sso_mode and YUBICO_AUTH in auth_type: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password orig_password = self.password yubikey = self.password[-44:] self.password = self.password[:-44] yubikey_hash = hashlib.sha512() yubikey_hash.update(yubikey) yubikey_hash = base64.b64encode(yubikey_hash.digest()) allow = False if settings.app.sso_cache and not self.server_auth_token: doc = self.sso_passcode_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, }) if doc: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping Yubikey', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: valid, yubico_id = sso.auth_yubico(yubikey) if yubico_id != self.user.yubico_id: valid = False if not valid: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed Yubico authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'YubiKey', True) else: self.set_challenge(None, 'YubiKey', True) raise AuthError('Challenge YubiKey') raise AuthError('Invalid YubiKey') if settings.app.sso_cache and not self.server_auth_token: self.sso_passcode_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': yubikey_hash, 'timestamp': utils.now(), }, upsert=True) elif self.server.otp_auth and self.user.type == CERT_CLIENT: if not self.password and self.has_challenge() and \ self.user.has_pin(): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') challenge = self.get_challenge() if challenge: self.password = challenge + self.password orig_password = self.password otp_code = self.password[-6:] self.password = self.password[:-6] allow = False if settings.app.sso_cache and not self.server_auth_token: doc = self.otp_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, }) if doc: self.otp_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, 'timestamp': utils.now(), }) allow = True logger.info( 'Authentication cached, skipping OTP', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) if not allow: if not self.user.verify_otp_code(otp_code): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed two-step authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): if self.user.has_password(self.server): self.set_challenge(orig_password, 'Enter OTP Code', True) else: self.set_challenge(None, 'Enter OTP Code', True) raise AuthError('Challenge OTP code') raise AuthError('Invalid OTP code') if settings.app.sso_cache and not self.server_auth_token: self.otp_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'passcode': otp_code, 'timestamp': utils.now(), }, upsert=True) if self.user.has_pin(): if not self.user.check_pin(self.password): self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User failed pin authentication') % (self.server.name), remote_addr=self.remote_ip, ) if self.has_challenge(): self.set_challenge(None, 'Enter Pin', False) raise AuthError('Challenge pin') raise AuthError('Invalid pin') elif settings.user.pin_mode == PIN_REQUIRED: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'User does not have a pin set') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('User does not have a pin set')
def _auth_push_thread(self): info = { 'Server': self.server.name, } platform_name = None if self.platform == 'linux': platform_name = 'Linux' elif self.platform == 'mac': platform_name = 'macOS' elif self.platform == 'ios': platform_name = 'iOS' elif self.platform == 'win': platform_name = 'Windows' elif self.platform == 'chrome': platform_name = 'Chrome OS' if self.device_name: info['Device'] = '%s (%s)' % (self.device_name, platform_name) onelogin_mode = utils.get_onelogin_mode() okta_mode = utils.get_okta_mode() if self.push_type == DUO_AUTH: duo_auth = sso.Duo( username=self.user.name, factor=settings.app.sso_duo_mode, remote_ip=self.remote_ip, auth_type='Connection', info=info, ) allow = duo_auth.authenticate() elif self.push_type == SAML_ONELOGIN_AUTH: allow = sso.auth_onelogin_secondary( username=self.user.name, passcode=None, remote_ip=self.remote_ip, onelogin_mode=onelogin_mode, ) elif self.push_type == SAML_OKTA_AUTH: allow = sso.auth_okta_secondary( username=self.user.name, passcode=None, remote_ip=self.remote_ip, okta_mode=okta_mode, ) else: raise ValueError('Unkown push auth type') if not allow: self.user.audit_event( 'user_connection', ('User connection to "%s" denied. ' + 'Push authentication failed') % (self.server.name), remote_addr=self.remote_ip, ) raise AuthError('User failed push authentication') if settings.app.sso_cache and not self.server_auth_token: self.sso_push_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'timestamp': utils.now(), }, upsert=True)
def uptime(self): if self.status != ONLINE or not self.start_timestamp: return return max(int((utils.now() - self.start_timestamp).total_seconds()), 1)
def _check_push(self): self.push_type = self.user.get_push_type() if not self.push_type: return if settings.vpn.stress_test: return if self.user.bypass_secondary: logger.info( 'Bypass secondary enabled, skipping push', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.has_token: logger.info( 'Client authentication cached, skipping push', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if self.whitelisted: logger.info( 'Client network whitelisted, skipping push', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return if settings.app.sso_cache and not self.server_auth_token: doc = self.sso_push_cache_collection.find_one({ 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, }) if doc: self.sso_push_cache_collection.update( { 'user_id': self.user.id, 'server_id': self.server.id, 'mac_addr': self.mac_addr, 'device_id': self.device_id, 'device_name': self.device_name, }, { 'user_id': self.user.id, 'server_id': self.server.id, 'remote_ip': self.remote_ip, 'mac_addr': self.mac_addr, 'platform': self.platform, 'device_id': self.device_id, 'device_name': self.device_name, 'timestamp': utils.now(), }) logger.info( 'Authentication cached, skipping push', 'sso', user_name=self.user.name, org_name=self.user.org.name, server_name=self.server.name, ) return def thread_func(): try: self._check_call(self._auth_push_thread) self._callback(True) except: pass thread = threading.Thread(target=thread_func) thread.daemon = True thread.start() raise AuthForked()