def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update(cache_db.dict_get_all( self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def load(self, merge=False): logger.debug('Loading config. %r' % { 'path': self._conf_path, }) self._loaded = True if self.cached: if not hasattr(self, 'id'): raise ValueError('Object ID is required for caching') if cache_db.get(self.get_cache_key('cached')) == 't': if merge: for name, value in cache_db.dict_get_all( self.get_cache_key()).iteritems(): if name in self.__dict__: continue self.__dict__[name] = value else: self.__dict__.update( cache_db.dict_get_all(self.get_cache_key())) return try: with open(self._conf_path) as config: for line in config: line = line.rstrip('\n') if line.strip() == '': continue elif line[0] == '#': continue elif '=' in line: pass else: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) continue try: name, value = self._decode_line(line) if merge and name in self.__dict__: continue self.__dict__[name] = value if self.cached: cache_db.dict_set(self.get_cache_key(), name, value) except ValueError: logger.warning('Ignoring invalid line. %r' % { 'line': line, }) except IOError: if not merge: raise if self.cached: cache_db.set(self.get_cache_key('cached'), 't')
def verify_otp_code(self, code): otp_secret = self.otp_secret padding = 8 - len(otp_secret) % 8 if padding != 8: otp_secret = otp_secret.ljust(len(otp_secret) + padding, '=') otp_secret = base64.b32decode(otp_secret.upper()) valid_codes = [] epoch = int(time.time() / 30) for epoch_offset in range(-1, 2): value = struct.pack('>q', epoch + epoch_offset) hmac_hash = hmac.new(otp_secret, value, hashlib.sha1).digest() offset = ord(hmac_hash[-1]) & 0x0F truncated_hash = hmac_hash[offset:offset + 4] truncated_hash = struct.unpack('>L', truncated_hash)[0] truncated_hash &= 0x7FFFFFFF truncated_hash %= 1000000 valid_codes.append('%06d' % truncated_hash) if code not in valid_codes: return False used_codes = cache_db.dict_get_all(self.get_cache_key('otp')) for auth_time, used_code in used_codes.items(): if int(time.time()) - int(auth_time) > 120: cache_db.dict_remove(self.get_cache_key('otp'), auth_time) if used_code == code: return False cache_db.dict_set(self.get_cache_key('otp'), str(int(time.time())), code) return True
def _commit_ip_pool(self): with open(self.ip_pool_path, 'w') as ip_pool_file: pool = cache_db.dict_get_all(self.get_cache_key('ip_pool')) pool['network'] = self.network ip_pool_file.write(json.dumps(pool))