def expire(self, session=None): if session is None: session = self.get() if session: session.update({'expired': True}) cache.set(self.cache_key, session, expire=60 * 60 * 5)
def MosaicRegistry_parseRegistry(self): cache_key = '%s-mosaic-registry' % '/'.join( api.portal.get().getPhysicalPath()[1:]) try: result = cache.get(cache_key) except KeyError: result = self._old_parseRegistry() cache.set(cache_key, result, 60 * 2) # cache for 2 minutes registry = getUtility(IRegistry) settings = registry.forInterface(ITinyMCESchema, prefix="plone", check=False) if settings.libraries_spellchecker_choice != 'AtD': return result # add atd config to toolbar dynamically mos_settings = result['plone']['app']['mosaic'] mos_settings['richtext_toolbar']['AtD'] = { 'category': u'actions', 'name': u'toolbar-AtD', 'weight': 0, 'favorite': False, 'label': u'After the deadline', 'action': u'AtD', 'icon': False } for widget_type in _rich_text_widget_types: mos_settings['widget_actions'][widget_type]['actions'].append( 'toolbar-AtD') mos_settings['structure_tiles']['text']['available_actions'].append( 'toolbar-AtD') mos_settings['app_tiles']['plone_app_standardtiles_rawhtml'][ 'available_actions'].append('toolbar-AtD') # noqa return result
def get_cachable_config_data(self): cache_key = '%s-config-data' % '/'.join(self.site.getPhysicalPath()[1:]) try: return cache.get(cache_key) except Exception: pass available_tiles = self.registry.get('castle.slot_tiles') if not available_tiles: available_tiles = { 'Structure': ['plone.app.standardtiles.rawhtml'] } # otherwise, you're editing the value in the DB!!!! available_tiles = available_tiles.copy() for group_name, tile_ids in available_tiles.items(): group = [] for tile_id in tile_ids: tile = getUtility(ITileType, name=tile_id) group.append({ 'id': tile_id, 'label': tile.title }) available_tiles[group_name] = group data = { 'data-available-slots': json.dumps(available_tiles), 'data-youtube-enabled': str(youtube.get_oauth_token() is not None).lower() } cache.set(cache_key, data, 600) return data
def add(self, text, type=u'info'): if self.anon: return super(CastleStatusMessage, self).add(text, type) try: text = translate(text) except Exception: pass cache_key = self.get_cache_key() try: messages = cache.get(cache_key) except KeyError: messages = [] site_path = context_path = '/'.join(self.site_path) context = get_context_from_request(self.context) if context: try: context_path = '/'.join(context.getPhysicalPath()) except AttributeError: pass messages.append({ 'text': text, 'type': type, 'timestamp': time.time(), 'context': context_path[len(site_path):] }) messages = messages[-self.max_messages:] # cache for 1 hour, should it be longer? shorter? cache.set(cache_key, messages, 1 * 60 * 60)
def get_popular_tags(self, limit=20): cache_key = '-'.join( self.site.getPhysicalPath()[1:]) + '-popular-tags-' + str(limit) try: result = cache.get(cache_key) except Exception: result = None if result is not None: return result cat = api.portal.get_tool('portal_catalog') index = cat._catalog.getIndex('Subject') tags = [] for name in index._index.keys(): try: number_of_entries = len(index._index[name]) except TypeError: continue tags.append({'name': name, 'count': number_of_entries}) sorted_tags = list(reversed(sorted( tags, key=lambda tag: tag['count'])))[:limit] cache.set(cache_key, sorted_tags, 60 * 5) return sorted_tags
def __call__(self): auth = self.authenticator = getMultiAdapter( (self.context, self.request), IAuthenticator) userid = self.request.form.get('userid') code = self.request.form.get('code') if userid and code: exc_key = auth.get_country_exception_cache_key(userid) try: data = cache.get(exc_key) if not strings_differ(data['code'], code): timestamp = data.get('timestamp') if timestamp and (time.time() < (timestamp + (12 * 60 * 60))): user = api.user.get(data['userid']) self.message = 'Successfully issued country login exception for {}({}).'.format( # noqa user.getProperty('fullname') or user.getUserName(), user.getUserName()) self.success = True data['granted'] = True data['timestamp'] = time.time() cache.set(exc_key, data, 12 * 60 * 60) self.send_email(data) except: pass return self.index()
def finish(self): oblist = [] for ob in self.oblist: if ob.wl_isLocked(): self.errors.append( _(u'${title} is being edited and cannot be cut.', mapping={u'title': self.objectTitle(ob)})) continue if not ob.cb_isMoveable(): self.errors.append( _(u'${title} is being edited and can not be cut.', mapping={u'title': self.objectTitle(ob)})) continue m = Moniker(ob) oblist.append(m.dump()) if len(oblist) > 20 and cache.redis_installed(): cache_key = str(uuid4()) cache.set(cache_key, oblist, expire=60 * 60 * 24) cp = (1, [['cache:' + cache_key]]) else: cp = (1, oblist) cp = _cb_encode(cp) resp = self.request.response resp.setCookie('__cp', cp, path='%s' % cookie_path(self.request)) self.request['__cp'] = cp
def set_secure_flow_state(self, state=None): if state not in self.valid_flow_states: return False cache_key = self.get_secure_flow_key() new_state = {'state': state, 'timestamp': time.time()} cache.set(cache_key, new_state, expire=self.expire) return True
def issue_2factor_code(self, username): cache_key = self.get_2factor_code_key(username) code = get_random_string(8).upper() # store code to check again later cache.set(cache_key, { 'code': code, 'timestamp': time.time() }) return code
def MosaicRegistry_parseRegistry(self): cache_key = '%s-mosaic-registry' % '/'.join( api.portal.get().getPhysicalPath()[1:]) try: return cache.get(cache_key) except KeyError: result = self._old_parseRegistry() cache.set(cache_key, result, 60 * 2) # cache for 2 minutes return result
def get_ga_profile(self, service): cache_key = '%s-ga-profile' % '-'.join( api.portal.get().getPhysicalPath()[1:]) try: profile = cache.get(cache_key) except Exception: profile = None if profile is None: profile = analytics.get_ga_profile(service) cache.set(cache_key, profile, 60 * 60 * 1) return profile
def log(self, session): session.update({ 'updated': datetime.utcnow().isoformat(), 'ua': self.request.environ.get('HTTP_USER_AGENT', 'unknown'), 'ip': get_ip(self.request), 'userid': self.user.getId(), 'id': self.session_id }) # default of 5 hr sessions # ideally this matches cookie timeout cache.set(self.cache_key, session, expire=60 * 60 * 5)
def ga_api_call(self, paths): params = json.loads(self.request.get('params')) cache_key = '-'.join(api.portal.get().getPhysicalPath()[1:]) for key, value in params.items(): cache_key += '%s=%s' % (key, value) try: result = cache.get(cache_key) except Exception: result = None if result is None: service = analytics.get_ga_service() if not service: return {'error': 'Could not get GA Service'} profile = self.get_ga_profile(service) if not profile: return {'error': 'Could not get GA Profile'} if self.request.get('type') == 'realtime': ga = service.data().realtime() if not params.pop('global', False): # need to restrict by filters path_query = ','.join( ['rt:pagePath==%s' % p for p in paths]) params['filters'] = path_query else: if not params.pop('global', False): # need to restrict by filters path_query = ','.join( ['ga:pagePath==%s' % p for p in paths]) params['filters'] = path_query ga = service.data().ga() query = ga.get(ids='ga:' + profile, **params) result = query.execute() if result: cache_duration = self.request.get('cache_duration') if cache_duration: cache.set(cache_key, result, int(cache_duration)) else: result = {'error': 'GA query execution yielded no result.'} return result
def test_authorize_code_does_not_work_out_of_time(self): self.request.form.update({ 'apiMethod': 'authorize_code', 'username': TEST_USER_NAME }) view = SecureLoginView(self.portal, self.request) code = view.auth.issue_2factor_code(TEST_USER_NAME) self.request.form.update({'code': code}) # set the code back cache_key = view.auth.get_2factor_code_key(TEST_USER_NAME) code_data = cache.get(cache_key) code_data['timestamp'] -= (5 * 60) + 1 cache.set(cache_key, code_data) result = json.loads(view()) self.assertFalse(result['success'])
def issue_country_exception(self, user, country): # capture information about the request data = { 'referrer': self.request.get_header('REFERER'), 'user_agent': self.request.get_header('USER_AGENT'), 'ip': get_ip(self.request), 'username': user.getUserName(), 'userid': user.getId(), 'country': country, 'timestamp': time.time(), 'code': get_random_string(50), 'granted': False } cache_key = self.get_country_exception_cache_key(user.getId()) cache.set(cache_key, data, 12 * 60 * 60) # valid for 12 hours return data
def test_authorize_code_does_not_work_out_of_time(self): view = SecureLoginView(self.portal, self.request) view() # REQUESTING_AUTH_CODE state self.request.form.update({'username': TEST_USER_NAME}) self.request.REQUEST_METHOD = 'POST' view() # CHECK_CREDENTIALS state code = cache.get( view.auth.get_2factor_code_key(TEST_USER_NAME))['code'] self.request.form.update({ 'username': TEST_USER_NAME, 'password': TEST_USER_PASSWORD, 'code': code }) # set the code back cache_key = view.auth.get_2factor_code_key(TEST_USER_NAME) code_data = cache.get(cache_key) code_data['timestamp'] -= (5 * 60) + 1 cache.set(cache_key, code_data) self.request.REQUEST_METHOD = 'POST' result = json.loads(view()) self.assertFalse(result['success'])
def get_available_slot_tiles(self): cache_key = '%s-slot-tiles' % '/'.join(self.site.getPhysicalPath()[1:]) try: return cache.get(cache_key) except: pass available_tiles = self.registry.get('castle.slot_tiles') if not available_tiles: available_tiles = { 'Structure': ['plone.app.standardtiles.rawhtml'] } # otherwise, you're editing the value in the DB!!!! available_tiles = available_tiles.copy() for group_name, tile_ids in available_tiles.items(): group = [] for tile_id in tile_ids: tile = getUtility(ITileType, name=tile_id) group.append({'id': tile_id, 'label': tile.title}) available_tiles[group_name] = group cache.set(cache_key, available_tiles, 600) return available_tiles
def parseRegistry(self): cache_key = '%s-mosaic-registry' % '/'.join( api.portal.get().getPhysicalPath()[1:]) if not api.env.debug_mode(): try: return cache.get(cache_key) except KeyError: result = super(CastleMosaicRegistry, self).parseRegistry() else: result = super(CastleMosaicRegistry, self).parseRegistry() mng = get_tile_manager() for tile in mng.get_tiles(): if tile.get('hidden'): continue key = 'castle_cms_dynamic_{}'.format(tile['id']) category = tile.get('category') or 'advanced' category_id = category.replace(' ', '_').lower() if category_id not in result['plone']['app']['mosaic'][ 'tiles_categories']: result['plone']['app']['mosaic']['tiles_categories'][ category_id] = { 'label': category, 'name': category_id, 'weight': 100 } result['plone']['app']['mosaic']['app_tiles'][key] = { 'category': category_id, 'default_value': None, 'favorite': False, 'label': tile['title'], 'name': tile['name'], 'tile_type_id': u'castle.cms.dynamic', 'read_only': False, 'rich_text': False, 'settings': True, 'tile_type': u'app', 'weight': tile['weight'] } registry = getUtility(IRegistry) settings = registry.forInterface(ITinyMCESchema, prefix="plone", check=False) if settings.libraries_spellchecker_choice != 'AtD': cache.set(cache_key, result, MOSAIC_CACHE_DURATION) return result # add atd config to toolbar dynamically mos_settings = result['plone']['app']['mosaic'] mos_settings['richtext_toolbar']['AtD'] = { 'category': u'actions', 'name': u'toolbar-AtD', 'weight': 0, 'favorite': False, 'label': u'After the deadline', 'action': u'AtD', 'icon': False } for widget_type in _rich_text_widget_types: mos_settings['widget_actions'][widget_type]['actions'].append( 'toolbar-AtD') # noqa mos_settings['structure_tiles']['text']['available_actions'].append( 'toolbar-AtD') # noqa mos_settings['app_tiles']['plone_app_standardtiles_rawhtml'][ 'available_actions'].append('toolbar-AtD') # noqa cache.set(cache_key, result, MOSAIC_CACHE_DURATION) return result
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join( self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True tmp_dir = '/'.join(info['tmp_file'].split('/')[:-1]) shutil.rmtree(tmp_dir) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except: cache.set(cache_key_prefix + _id, info) return json.dumps({'success': True, 'id': _id})
def chunk_upload(self): chunk = int(self.request.form['chunk']) chunk_size = int(self.request.form['chunkSize']) total_size = int(self.request.form['totalSize']) total_chunks = int(math.ceil(float(total_size) / float(chunk_size))) _id = self.request.form.get('id') existing_id = self.request.form.get('content', None) field_name = self.request.form.get('field', None) if chunk > total_chunks: raise Exception("More chunks than what should be possible") cache_key_prefix = '%s-uploads-' % '/'.join(self.context.getPhysicalPath()[1:]) if chunk == 1: # initializing chunk upload _id = utils.get_random_string(50) filename = self.request.form['name'] tmp_dir = tempfile.mkdtemp() tmp_filename = os.path.join(tmp_dir, filename) info = { 'last_chunk': 1, 'total_size': total_size, 'chunk_size': chunk_size, 'tmp_file': tmp_filename, 'name': filename } else: info = cache.get(cache_key_prefix + _id) # check things are matching up if info['last_chunk'] != chunk - 1: raise Exception('Invalid chunk sequence') if info['total_size'] != total_size: raise Exception('Invalid total size') if info['chunk_size'] != chunk_size: raise Exception('Inconsistent chunk size') info['last_chunk'] = chunk mode = 'wb' if chunk > 1: # appending to file now mode = 'ab+' if not os.path.exists(info['tmp_file']): raise Exception('No tmp upload file found') fi = open(info['tmp_file'], mode) while True: data = self.request.form['file'].read(2 << 16) if not data: break fi.write(data) fi.close() if chunk == total_chunks: # finish upload dup = False if not existing_id: try: obj = self.create_file_content(info) except duplicates.DuplicateException as ex: obj = ex.obj dup = True else: try: info['existing_id'] = existing_id info['field_name'] = field_name obj, success, msg = self.update_file_content(info) if not success: self.update_file_content(info) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id, 'reason': msg }) except Exception: logger.warning( 'Failed to update content.', exc_info=True) self._clean_tmp(info) return json.dumps({ 'success': False, 'id': _id }) if not info.get('field_name', '').startswith('tmp_'): # tmp files need to stick around and be managed later... self._clean_tmp(info) cache.delete(cache_key_prefix + _id) return dump_object_data(obj, dup) else: cache.set(cache_key_prefix + _id, info) check_put = None while check_put is None: try: check_put = cache.get(cache_key_prefix + _id) except Exception: cache.set(cache_key_prefix + _id, info) return json.dumps({ 'success': True, 'id': _id })
def save(self): cache.set(self.cache_key, self.attempts)