def safe_apply(self, key, function, default_value=None): """ Safely apply a function to the value of a key in storage and set the return value of the function to it. Return the result of applying the function. """ key = self.key_filter_in(key) exists = True try: val_file = recfile.open(key, mode='r+b', path=self.folder) except IOError: exists = False val_file = recfile.open(key, mode='wb', path=self.folder) self.wait_portalock(val_file) if exists: timestamp, value = pickle.load(val_file) else: value = default_value new_value = function(value) val_file.seek(0) pickle.dump((time.time(), new_value), val_file, pickle.HIGHEST_PROTOCOL) val_file.truncate() val_file.close() return new_value
def load_storage(filename): fp = None try: fp = portalocker.LockedFile(filename, 'rb') storage = pickle.load(fp) finally: if fp: fp.close() return Storage(storage)
def __getitem__(self, key): key = self.key_filter_in(key) try: val_file = recfile.open(key, mode='rb', path=self.folder) except IOError: raise KeyError self.wait_portalock(val_file) value = pickle.load(val_file) val_file.close() return value
def load( self, request, app, ticket_id, ): if not self.db: try: ef = self._error_file(request, ticket_id, 'rb', app) except IOError: return {} try: return pickle.load(ef) finally: ef.close() else: table = self._get_table(self.db, self.tablename, app) rows = self.db(table.ticket_id == ticket_id).select() return pickle.loads(rows[0].ticket_data) if rows else {}
def load(self, filename): with open(filename, 'rb') as fp: self.loadd(pickle.load(fp))
def connect(self, request=None, response=None, db=None, tablename='web2py_session', masterapp=None, migrate=True, separate=None, check_client=False, cookie_key=None, cookie_expires=None, compression_level=None): """ Used in models, allows to customize Session handling Args: request: the request object response: the response object db: to store/retrieve sessions in db (a table is created) tablename(str): table name masterapp(str): points to another's app sessions. This enables a "SSO" environment among apps migrate: passed to the underlying db separate: with True, creates a folder with the 2 initials of the session id. Can also be a function, e.g. :: separate=lambda(session_name): session_name[-2:] check_client: if True, sessions can only come from the same ip cookie_key(str): secret for cookie encryption cookie_expires: sets the expiration of the cookie compression_level(int): 0-9, sets zlib compression on the data before the encryption """ from gluon.dal import Field request = request or current.request response = response or current.response masterapp = masterapp or request.application cookies = request.cookies self._unlock(response) response.session_masterapp = masterapp response.session_id_name = 'session_id_%s' % masterapp.lower() response.session_data_name = 'session_data_%s' % masterapp.lower() response.session_cookie_expires = cookie_expires response.session_client = str(request.client).replace(':', '.') current._session_cookie_key = cookie_key response.session_cookie_compression_level = compression_level # check if there is a session_id in cookies try: old_session_id = cookies[response.session_id_name].value except KeyError: old_session_id = None response.session_id = old_session_id # if we are supposed to use cookie based session data if cookie_key: response.session_storage_type = 'cookie' elif db: response.session_storage_type = 'db' else: response.session_storage_type = 'file' # why do we do this? # because connect may be called twice, by web2py and in models. # the first time there is no db yet so it should do nothing if (global_settings.db_sessions is True or masterapp in global_settings.db_sessions): return if response.session_storage_type == 'cookie': # check if there is session data in cookies if response.session_data_name in cookies: session_cookie_data = cookies[response.session_data_name].value else: session_cookie_data = None if session_cookie_data: data = secure_loads(session_cookie_data, cookie_key, compression_level=compression_level) if data: self.update(data) response.session_id = True # else if we are supposed to use file based sessions elif response.session_storage_type == 'file': response.session_new = False response.session_file = None # check if the session_id points to a valid sesion filename if response.session_id: if not regex_session_id.match(response.session_id): response.session_id = None else: response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) try: response.session_file = \ recfile.open(response.session_filename, 'rb+') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True self.update(pickle.load(response.session_file)) response.session_file.seek(0) oc = response.session_filename.split('/')[-1].split( '-')[0] if check_client and response.session_client != oc: raise Exception("cookie attack") except: response.session_id = None if not response.session_id: uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = separate and ( lambda session_name: session_name[-2:]) if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': if global_settings.db_sessions is not True: global_settings.db_sessions.add(masterapp) # if had a session on file alreday, close it (yes, can happen) if response.session_file: self._close(response) # if on GAE tickets go also in DB if settings.global_settings.web2py_runtime_gae: request.tickets_db = db if masterapp == request.application: table_migrate = migrate else: table_migrate = False tname = tablename + '_' + masterapp table = db.get(tname, None) # Field = db.Field if table is None: db.define_table( tname, Field('locked', 'boolean', default=False), Field('client_ip', length=64), Field('created_datetime', 'datetime', default=request.now), Field('modified_datetime', 'datetime'), Field('unique_key', length=64), Field('session_data', 'blob'), migrate=table_migrate, ) table = db[tname] # to allow for lazy table response.session_db_table = table if response.session_id: # Get session data out of the database try: (record_id, unique_key) = response.session_id.split(':') record_id = long(record_id) except (TypeError, ValueError): record_id = None # Select from database if record_id: row = table(record_id, unique_key=unique_key) # Make sure the session data exists in the database if row: # rows[0].update_record(locked=True) # Unpickle the data session_data = pickle.loads(row.session_data) self.update(session_data) response.session_new = False else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id else: response.session_id = None response.session_new = True # if there is no session id yet, we'll need to create a # new session else: response.session_new = True # set the cookie now if you know the session_id so user can set # cookie attributes in controllers/models # cookie will be reset later # yet cookie may be reset later # Removed comparison between old and new session ids - should send # the cookie all the time if isinstance(response.session_id, str): response.cookies[response.session_id_name] = response.session_id response.cookies[response.session_id_name]['path'] = '/' if cookie_expires: response.cookies[response.session_id_name]['expires'] = \ cookie_expires.strftime(FMT) session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_hash = hashlib.md5(session_pickled).hexdigest() if self.flash: (response.flash, self.flash) = (self.flash, None)
def connect(self, request=None, response=None, db=None, tablename='web2py_session', masterapp=None, migrate=True, separate=None, check_client=False, cookie_key=None, cookie_expires=None, compression_level=None ): """ Used in models, allows to customize Session handling Args: request: the request object response: the response object db: to store/retrieve sessions in db (a table is created) tablename(str): table name masterapp(str): points to another's app sessions. This enables a "SSO" environment among apps migrate: passed to the underlying db separate: with True, creates a folder with the 2 initials of the session id. Can also be a function, e.g. :: separate=lambda(session_name): session_name[-2:] check_client: if True, sessions can only come from the same ip cookie_key(str): secret for cookie encryption cookie_expires: sets the expiration of the cookie compression_level(int): 0-9, sets zlib compression on the data before the encryption """ from gluon.dal import Field request = request or current.request response = response or current.response masterapp = masterapp or request.application cookies = request.cookies self._unlock(response) response.session_masterapp = masterapp response.session_id_name = 'session_id_%s' % masterapp.lower() response.session_data_name = 'session_data_%s' % masterapp.lower() response.session_cookie_expires = cookie_expires response.session_client = str(request.client).replace(':', '.') current._session_cookie_key = cookie_key response.session_cookie_compression_level = compression_level # check if there is a session_id in cookies try: old_session_id = cookies[response.session_id_name].value except KeyError: old_session_id = None response.session_id = old_session_id # if we are supposed to use cookie based session data if cookie_key: response.session_storage_type = 'cookie' elif db: response.session_storage_type = 'db' else: response.session_storage_type = 'file' # why do we do this? # because connect may be called twice, by web2py and in models. # the first time there is no db yet so it should do nothing if (global_settings.db_sessions is True or masterapp in global_settings.db_sessions): return if response.session_storage_type == 'cookie': # check if there is session data in cookies if response.session_data_name in cookies: session_cookie_data = cookies[response.session_data_name].value else: session_cookie_data = None if session_cookie_data: data = secure_loads(session_cookie_data, cookie_key, compression_level=compression_level) if data: self.update(data) response.session_id = True # else if we are supposed to use file based sessions elif response.session_storage_type == 'file': response.session_new = False response.session_file = None # check if the session_id points to a valid sesion filename if response.session_id: if not regex_session_id.match(response.session_id): response.session_id = None else: response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) try: response.session_file = \ recfile.open(response.session_filename, 'rb+') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True self.update(pickle.load(response.session_file)) response.session_file.seek(0) oc = response.session_filename.split('/')[-1].split('-')[0] if check_client and response.session_client != oc: raise Exception("cookie attack") except: response.session_id = None if not response.session_id: uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = separate and (lambda session_name: session_name[-2:]) if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': if global_settings.db_sessions is not True: global_settings.db_sessions.add(masterapp) # if had a session on file alreday, close it (yes, can happen) if response.session_file: self._close(response) # if on GAE tickets go also in DB if settings.global_settings.web2py_runtime_gae: request.tickets_db = db if masterapp == request.application: table_migrate = migrate else: table_migrate = False tname = tablename + '_' + masterapp table = db.get(tname, None) # Field = db.Field if table is None: db.define_table( tname, Field('locked', 'boolean', default=False), Field('client_ip', length=64), Field('created_datetime', 'datetime', default=request.now), Field('modified_datetime', 'datetime'), Field('unique_key', length=64), Field('session_data', 'blob'), migrate=table_migrate, ) table = db[tname] # to allow for lazy table response.session_db_table = table if response.session_id: # Get session data out of the database try: (record_id, unique_key) = response.session_id.split(':') record_id = long(record_id) except (TypeError, ValueError): record_id = None # Select from database if record_id: row = table(record_id, unique_key=unique_key) # Make sure the session data exists in the database if row: # rows[0].update_record(locked=True) # Unpickle the data session_data = pickle.loads(row.session_data) self.update(session_data) response.session_new = False else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id else: response.session_id = None response.session_new = True # if there is no session id yet, we'll need to create a # new session else: response.session_new = True # set the cookie now if you know the session_id so user can set # cookie attributes in controllers/models # cookie will be reset later # yet cookie may be reset later # Removed comparison between old and new session ids - should send # the cookie all the time if isinstance(response.session_id, str): response.cookies[response.session_id_name] = response.session_id response.cookies[response.session_id_name]['path'] = '/' if cookie_expires: response.cookies[response.session_id_name]['expires'] = \ cookie_expires.strftime(FMT) session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_hash = hashlib.md5(session_pickled).hexdigest() if self.flash: (response.flash, self.flash) = (self.flash, None)