def _get_user_count(self): try: user_count = int(cache_db.get(self.get_cache_key('user_count'))) except TypeError: self._cache_users() user_count = int(cache_db.get(self.get_cache_key('user_count'))) return user_count
def get_org_count(cls): try: org_count = int(cache_db.get('org_count')) except TypeError: self._cache_orgs() org_count = int(cache_db.get('org_count')) return org_count
def _load_ip_pool(self): if cache_db.get(self.get_cache_key('ip_pool_cached')) == 't': return reset = False if os.path.exists(self.ip_pool_path): with open(self.ip_pool_path, 'r') as ip_pool_file: pool = json.loads(ip_pool_file.read()) network = pool.pop('network', None) if network == self.network: cache_key = self.get_cache_key('ip_pool') set_cache_key = self.get_cache_key('ip_pool_set') for key, value in pool.iteritems(): cache_db.dict_set(cache_key, key, value) local_ip_addr, remote_ip_addr = value.split('-') cache_db.set_add(set_cache_key, local_ip_addr) cache_db.set_add(set_cache_key, remote_ip_addr) else: reset = True cache_db.set(self.get_cache_key('ip_pool_cached'), 't') if reset: self.update_ip_pool()
def __getattr__(self, name): if name == 'web_protocol': if not self.ssl: return 'http' return 'https' elif name == 'password_data': if self.password[:2] == '1$': pass_split = self.password.split('$') return (1, pass_split[1], pass_split[2]) else: return (0, PASSWORD_SALT_V0, self.password) elif name == 'ssl': if self.debug: return False elif name == 'localhost_ip': localhost_ip = cache_db.get('localhost_ip') if not localhost_ip: try: localhost_ip = socket.gethostbyname('localhost') except: localhost_ip = '127.0.0.1' cache_db.expire('localhost_ip', LOCALHOST_IP_TTL) cache_db.set('localhost_ip', localhost_ip) return localhost_ip return Config.__getattr__(self, name)
def _cache_orgs(cls): if cache_db.get('orgs_cached') != 't': cache_db.remove('orgs') path = os.path.join(app_server.data_path, ORGS_DIR) if os.path.isdir(path): for org_id in os.listdir(path): cache_db.set_add('orgs', org_id) cls.sort_orgs_cache() cache_db.set('orgs_cached', 't')
def _cache_orgs(cls): if cache_db.get("orgs_cached") != "t": cache_db.remove("orgs") path = os.path.join(app_server.data_path, ORGS_DIR) if os.path.isdir(path): for org_id in os.listdir(path): cache_db.set_add("orgs", org_id) cls.sort_orgs_cache() cache_db.set("orgs_cached", "t")
def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update(cache_db.dict_get_all( self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update( cache_db.dict_get_all(self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def __getattr__(self, name): if name == 'otp_auth': for server in self.iter_servers(): if server.otp_auth: return True return False elif name == 'user_count': return self._get_user_count() elif name == 'page_total': return int(cache_db.get(self.get_cache_key('users_page_total'))) return Config.__getattr__(self, name)
def __getattr__(self, name): if name == "otp_auth": for server in self.iter_servers(): if server.otp_auth: return True return False elif name == "user_count": return self._get_user_count() elif name == "page_total": return int(cache_db.get(self.get_cache_key("users_page_total"))) return Config.__getattr__(self, name)
def _cache_servers(cls): if cache_db.get('servers_cached') != 't': cache_db.remove('servers') path = os.path.join(app_server.data_path, SERVERS_DIR) if os.path.isdir(path): for server_id in os.listdir(path): if os.path.isfile(os.path.join(path, server_id, NODE_SERVER)): server_id += '_' + NODE_SERVER else: server_id += '_' + SERVER cache_db.set_add('servers', server_id) cls.sort_servers_cache() cache_db.set('servers_cached', 't')
def _cache_users(self): if cache_db.get(self.get_cache_key('users_cached')) != 't': cache_db.remove(self.get_cache_key('users')) certs_path = os.path.join(self.path, CERTS_DIR) if os.path.isdir(certs_path): for cert in os.listdir(certs_path): user_id = cert.replace('.crt', '') if user_id == CA_CERT_ID: continue user = User.get_user(self, id=user_id) if not user: continue user._add_cache_trie_key() cache_db.set_add(self.get_cache_key('users'), user_id) self.sort_users_cache() cache_db.set(self.get_cache_key('users_cached'), 't')
def verify_otp_code(self, code, remote_ip=None): if remote_ip: otp_cache = cache_db.get(self.get_cache_key('otp_cache')) if otp_cache: cur_code, cur_remote_ip = otp_cache.split(',') if cur_code == code and cur_remote_ip == remote_ip: cache_db.expire(self.get_cache_key('otp_cache'), OTP_CACHE_TTL) return True else: cache_db.remove(self.get_cache_key('otp_cache')) otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(time.time() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False used_codes = cache_db.dict_get_all(self.get_cache_key('otp')) for auth_time, used_code in used_codes.items(): if int(time.time()) - int(auth_time) > 120: cache_db.dict_remove(self.get_cache_key('otp'), auth_time) if used_code == code: return False cache_db.dict_set(self.get_cache_key('otp'), str(int(time.time())), code) cache_db.expire(self.get_cache_key('otp_cache'), OTP_CACHE_TTL) cache_db.set(self.get_cache_key('otp_cache'), ','.join((code, remote_ip))) return True