def secure_loads(data, encryption_key, hash_key=None, compression_level=None): """loads a signed data dump""" data = to_bytes(data) components = data.count(b':') if components == 1: return secure_loads_deprecated(data, encryption_key, hash_key, compression_level) if components != 2: return None version, signature, encrypted_data = data.split(b':', 2) if version != b'hmac256': return None encryption_key = to_bytes(encryption_key) if not hash_key: hash_key = hashlib.sha256(encryption_key).digest() actual_signature = hmac.new(to_bytes(hash_key), encrypted_data, hashlib.sha256).hexdigest() if not compare(to_native(signature), actual_signature): return None encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(pad(encryption_key)[:32], IV=IV) try: data = unpad(AES_dec(cipher, encrypted_data)) if compression_level: data = zlib.decompress(data) return pickle.loads(data) except Exception: return None
def secure_loads(data, encryption_key, hash_key=None, compression_level=None): data = to_bytes(data) components = data.count(b':') if components == 1: return secure_loads_deprecated(data, encryption_key, hash_key, compression_level) if components != 2: return None version, signature, encrypted_data = data.split(b':', 2) if version != b'hmac256': return None encryption_key = to_bytes(encryption_key) if not hash_key: hash_key = hashlib.sha256(encryption_key).digest() actual_signature = hmac.new(to_bytes(hash_key), encrypted_data, hashlib.sha256).hexdigest() if not compare(to_native(signature), actual_signature): return None encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(pad(encryption_key)[:32], IV=IV) try: data = unpad(AES_dec(cipher, encrypted_data)) if compression_level: data = zlib.decompress(data) return pickle.loads(data) except Exception as e: return None
def secure_loads_deprecated(data, encryption_key, hash_key=None, compression_level=None): """loads signed data (deprecated because of incorrect padding)""" encryption_key = to_bytes(encryption_key) data = to_native(data) if ':' not in data: return None if not hash_key: hash_key = hashlib.sha1(encryption_key).hexdigest() signature, encrypted_data = data.split(':', 1) encrypted_data = to_bytes(encrypted_data) actual_signature = hmac.new(to_bytes(hash_key), encrypted_data, hashlib.md5).hexdigest() if not compare(signature, actual_signature): return None key = __pad_deprecated(encryption_key)[:32] encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(key, IV=IV) try: data = AES_dec(cipher, encrypted_data) data = data.rstrip(b' ') if compression_level: data = zlib.decompress(data) return pickle.loads(data) except Exception: return None
def secure_loads_deprecated(data, encryption_key, hash_key=None, compression_level=None): encryption_key = to_bytes(encryption_key) data = to_native(data) if ':' not in data: return None if not hash_key: hash_key = sha1(encryption_key).hexdigest() signature, encrypted_data = data.split(':', 1) encrypted_data = to_bytes(encrypted_data) actual_signature = hmac.new(to_bytes(hash_key), encrypted_data, hashlib.md5).hexdigest() if not compare(signature, actual_signature): return None key = __pad_deprecated(encryption_key)[:32] encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(key, IV=IV) try: data = AES_dec(cipher, encrypted_data) data = data.rstrip(b' ') if compression_level: data = zlib.decompress(data) return pickle.loads(data) except Exception as e: return None
def testSerialization(self): from gluon._compat import pickle db = DAL(check_reserved=['all']) db.define_table('t_a', Field('f_a')) db.t_a.insert(f_a='test') a = db(db.t_a.id > 0).select(cacheable=True) s = pickle.dumps(a) b = pickle.loads(s) self.assertEqual(a.db, b.db) db.t_a.drop() db.close()
def load( self, request, app, ticket_id, ): if not self.db: try: ef = self._error_file(request, ticket_id, 'rb', app) except IOError: return {} try: return pickle.load(ef) finally: ef.close() else: table = self._get_table(self.db, self.tablename, app) rows = self.db(table.ticket_id == ticket_id).select() return pickle.loads(rows[0].ticket_data) if rows else {}
def secure_loads(data, encryption_key, hash_key=None, compression_level=None): encryption_key = to_bytes(encryption_key) data = to_native(data) if ':' not in data: return None if not hash_key: hash_key = sha1(encryption_key).hexdigest() signature, encrypted_data = data.split(':', 1) encrypted_data = to_bytes(encrypted_data) actual_signature = hmac.new(to_bytes(hash_key), encrypted_data).hexdigest() if not compare(signature, actual_signature): return None key = pad(encryption_key)[:32] encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(key, IV=IV) try: data = cipher.decrypt(encrypted_data) data = data.rstrip(b' ') if compression_level: data = zlib.decompress(data) return pickle.loads(data) except Exception as e: return None
def globals_dict(self): """Returns a dictionary view of the globals. """ return dict((name, pickle.loads(val)) for name, val in zip(self.global_names, self.globals))
def connect(self, request=None, response=None, db=None, tablename='web2py_session', masterapp=None, migrate=True, separate=None, check_client=False, cookie_key=None, cookie_expires=None, compression_level=None ): """ Used in models, allows to customize Session handling Args: request: the request object response: the response object db: to store/retrieve sessions in db (a table is created) tablename(str): table name masterapp(str): points to another's app sessions. This enables a "SSO" environment among apps migrate: passed to the underlying db separate: with True, creates a folder with the 2 initials of the session id. Can also be a function, e.g. :: separate=lambda(session_name): session_name[-2:] check_client: if True, sessions can only come from the same ip cookie_key(str): secret for cookie encryption cookie_expires: sets the expiration of the cookie compression_level(int): 0-9, sets zlib compression on the data before the encryption """ from gluon.dal import Field request = request or current.request response = response or current.response masterapp = masterapp or request.application cookies = request.cookies self._unlock(response) response.session_masterapp = masterapp response.session_id_name = 'session_id_%s' % masterapp.lower() response.session_data_name = 'session_data_%s' % masterapp.lower() response.session_cookie_expires = cookie_expires response.session_client = str(request.client).replace(':', '.') current._session_cookie_key = cookie_key response.session_cookie_compression_level = compression_level # check if there is a session_id in cookies try: old_session_id = cookies[response.session_id_name].value except KeyError: old_session_id = None response.session_id = old_session_id # if we are supposed to use cookie based session data if cookie_key: response.session_storage_type = 'cookie' elif db: response.session_storage_type = 'db' else: response.session_storage_type = 'file' # why do we do this? # because connect may be called twice, by web2py and in models. # the first time there is no db yet so it should do nothing if (global_settings.db_sessions is True or masterapp in global_settings.db_sessions): return if response.session_storage_type == 'cookie': # check if there is session data in cookies if response.session_data_name in cookies: session_cookie_data = cookies[response.session_data_name].value else: session_cookie_data = None if session_cookie_data: data = secure_loads(session_cookie_data, cookie_key, compression_level=compression_level) if data: self.update(data) response.session_id = True # else if we are supposed to use file based sessions elif response.session_storage_type == 'file': response.session_new = False response.session_file = None # check if the session_id points to a valid sesion filename if response.session_id: if not regex_session_id.match(response.session_id): response.session_id = None else: response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) try: response.session_file = \ recfile.open(response.session_filename, 'rb+') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True self.update(pickle.load(response.session_file)) response.session_file.seek(0) oc = response.session_filename.split('/')[-1].split('-')[0] if check_client and response.session_client != oc: raise Exception("cookie attack") except: response.session_id = None if not response.session_id: uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = separate and (lambda session_name: session_name[-2:]) if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': if global_settings.db_sessions is not True: global_settings.db_sessions.add(masterapp) # if had a session on file alreday, close it (yes, can happen) if response.session_file: self._close(response) # if on GAE tickets go also in DB if settings.global_settings.web2py_runtime_gae: request.tickets_db = db if masterapp == request.application: table_migrate = migrate else: table_migrate = False tname = tablename + '_' + masterapp table = db.get(tname, None) # Field = db.Field if table is None: db.define_table( tname, Field('locked', 'boolean', default=False), Field('client_ip', length=64), Field('created_datetime', 'datetime', default=request.now), Field('modified_datetime', 'datetime'), Field('unique_key', length=64), Field('session_data', 'blob'), migrate=table_migrate, ) table = db[tname] # to allow for lazy table response.session_db_table = table if response.session_id: # Get session data out of the database try: (record_id, unique_key) = response.session_id.split(':') record_id = long(record_id) except (TypeError, ValueError): record_id = None # Select from database if record_id: row = table(record_id, unique_key=unique_key) # Make sure the session data exists in the database if row: # rows[0].update_record(locked=True) # Unpickle the data session_data = pickle.loads(row.session_data) self.update(session_data) response.session_new = False else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id else: response.session_id = None response.session_new = True # if there is no session id yet, we'll need to create a # new session else: response.session_new = True # set the cookie now if you know the session_id so user can set # cookie attributes in controllers/models # cookie will be reset later # yet cookie may be reset later # Removed comparison between old and new session ids - should send # the cookie all the time if isinstance(response.session_id, str): response.cookies[response.session_id_name] = response.session_id response.cookies[response.session_id_name]['path'] = '/' if cookie_expires: response.cookies[response.session_id_name]['expires'] = \ cookie_expires.strftime(FMT) session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_hash = hashlib.md5(session_pickled).hexdigest() if self.flash: (response.flash, self.flash) = (self.flash, None)
def test_pickling(self): """ Test storage pickling """ s = Storage(a=1) sd = pickle.dumps(s, pickle.HIGHEST_PROTOCOL) news = pickle.loads(sd) self.assertEqual(news.a, 1)
def connect(self, request=None, response=None, db=None, tablename='web2py_session', masterapp=None, migrate=True, separate=None, check_client=False, cookie_key=None, cookie_expires=None, compression_level=None): """ Used in models, allows to customize Session handling Args: request: the request object response: the response object db: to store/retrieve sessions in db (a table is created) tablename(str): table name masterapp(str): points to another's app sessions. This enables a "SSO" environment among apps migrate: passed to the underlying db separate: with True, creates a folder with the 2 initials of the session id. Can also be a function, e.g. :: separate=lambda(session_name): session_name[-2:] check_client: if True, sessions can only come from the same ip cookie_key(str): secret for cookie encryption cookie_expires: sets the expiration of the cookie compression_level(int): 0-9, sets zlib compression on the data before the encryption """ from gluon.dal import Field request = request or current.request response = response or current.response masterapp = masterapp or request.application cookies = request.cookies self._unlock(response) response.session_masterapp = masterapp response.session_id_name = 'session_id_%s' % masterapp.lower() response.session_data_name = 'session_data_%s' % masterapp.lower() response.session_cookie_expires = cookie_expires response.session_client = str(request.client).replace(':', '.') current._session_cookie_key = cookie_key response.session_cookie_compression_level = compression_level # check if there is a session_id in cookies try: old_session_id = cookies[response.session_id_name].value except KeyError: old_session_id = None response.session_id = old_session_id # if we are supposed to use cookie based session data if cookie_key: response.session_storage_type = 'cookie' elif db: response.session_storage_type = 'db' else: response.session_storage_type = 'file' # why do we do this? # because connect may be called twice, by web2py and in models. # the first time there is no db yet so it should do nothing if (global_settings.db_sessions is True or masterapp in global_settings.db_sessions): return if response.session_storage_type == 'cookie': # check if there is session data in cookies if response.session_data_name in cookies: session_cookie_data = cookies[response.session_data_name].value else: session_cookie_data = None if session_cookie_data: data = secure_loads(session_cookie_data, cookie_key, compression_level=compression_level) if data: self.update(data) response.session_id = True # else if we are supposed to use file based sessions elif response.session_storage_type == 'file': response.session_new = False response.session_file = None # check if the session_id points to a valid sesion filename if response.session_id: if not regex_session_id.match(response.session_id): response.session_id = None else: response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) try: response.session_file = \ recfile.open(response.session_filename, 'rb+') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True self.update(pickle.load(response.session_file)) response.session_file.seek(0) oc = response.session_filename.split('/')[-1].split( '-')[0] if check_client and response.session_client != oc: raise Exception("cookie attack") except: response.session_id = None if not response.session_id: uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = separate and ( lambda session_name: session_name[-2:]) if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': if global_settings.db_sessions is not True: global_settings.db_sessions.add(masterapp) # if had a session on file alreday, close it (yes, can happen) if response.session_file: self._close(response) # if on GAE tickets go also in DB if settings.global_settings.web2py_runtime_gae: request.tickets_db = db if masterapp == request.application: table_migrate = migrate else: table_migrate = False tname = tablename + '_' + masterapp table = db.get(tname, None) # Field = db.Field if table is None: db.define_table( tname, Field('locked', 'boolean', default=False), Field('client_ip', length=64), Field('created_datetime', 'datetime', default=request.now), Field('modified_datetime', 'datetime'), Field('unique_key', length=64), Field('session_data', 'blob'), migrate=table_migrate, ) table = db[tname] # to allow for lazy table response.session_db_table = table if response.session_id: # Get session data out of the database try: (record_id, unique_key) = response.session_id.split(':') record_id = long(record_id) except (TypeError, ValueError): record_id = None # Select from database if record_id: row = table(record_id, unique_key=unique_key) # Make sure the session data exists in the database if row: # rows[0].update_record(locked=True) # Unpickle the data session_data = pickle.loads(row.session_data) self.update(session_data) response.session_new = False else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id else: response.session_id = None response.session_new = True # if there is no session id yet, we'll need to create a # new session else: response.session_new = True # set the cookie now if you know the session_id so user can set # cookie attributes in controllers/models # cookie will be reset later # yet cookie may be reset later # Removed comparison between old and new session ids - should send # the cookie all the time if isinstance(response.session_id, str): response.cookies[response.session_id_name] = response.session_id response.cookies[response.session_id_name]['path'] = '/' if cookie_expires: response.cookies[response.session_id_name]['expires'] = \ cookie_expires.strftime(FMT) session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_hash = hashlib.md5(session_pickled).hexdigest() if self.flash: (response.flash, self.flash) = (self.flash, None)
def get(self): session = Storage() session.update(pickle.loads(self.row.session_data)) return session