def expire_secure_flow_state(self): key = self.get_secure_flow_key() try: cache.delete(key) except Exception: pass self.set_login_session_id()
def get_active_sessions(sessions_key=None): portal = api.portal.get() portal_path = '-'.join(portal.getPhysicalPath()[1:]) if not sessions_key: sessions_key = '%s-session-*' % portal_path cclient = cache.get_client() sessions = [] try: keys = cclient.client.keys(sessions_key) if keys: for session in cclient.client.mget(keys): try: session = cPickle.loads(session) except Exception: continue sessions.append(session) sessions = reversed(sorted(sessions, key=lambda x: x['updated'])) except AttributeError: sessions = [] # we'll also squash and cleanup duplicate sessions here... found = [] filtered = [] for session in sessions: compare_key = '%s%s%s' % (session.get('ua'), session.get('ip'), session.get('userid')) if compare_key in found: cache_key = '%s-session-%s-%s' % (portal_path, session['userid'], session['id']) try: cache.delete(cache_key) except Exception: pass else: try: session['user'] = api.user.get(session['userid']) except Exception: pass found.append(compare_key) filtered.append(session) return filtered
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') existing_id = self.request.form.get('content', None) field_name = self.request.form.get('field', None) if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join(self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False if not existing_id: try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True else: try: info['existing_id'] = existing_id info['field_name'] = field_name obj, success, msg = self.update_file_content(info) if not success: self.update_file_content(info) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id, 'reason': msg }) except Exception: logger.warning( 'Failed to update content.', exc_info=True) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id }) if not info.get('field_name', '').startswith('tmp_'): # tmp files need to stick around and be managed later... self._clean_tmp(info) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except Exception: cache.set(cache_key_prefix + _id, info) return json.dumps({ 'success': True, 'id': _id })
def delete(self): try: cache.delete(self.cache_key) except Exception: pass
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join( self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True tmp_dir = '/'.join(info['tmp_file'].split('/')[:-1]) shutil.rmtree(tmp_dir) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except: cache.set(cache_key_prefix + _id, info) return json.dumps({'success': True, 'id': _id})
def expire_secure_flow_state(self): key = self.get_secure_flow_key() try: cache.delete(key) except Exception: pass