def auth_url(self, key, secret): """ Returns the URL used to retrieve a Google OAuth2 token. """ safeWrite(self) auth_url = None if key and secret: next = '%s/analytics-auth' % api.portal.get().absolute_url() oauth2_token = gdata.gauth.OAuth2Token( client_id=key, client_secret=secret, scope="https://www.googleapis.com/auth/analytics", user_agent='collective-googleanalytics', ) logger.debug(u"Created new OAuth2 token with id: '%s' and secret:" u" '%s'" % (key, secret)) oauth2_token.redirect_uri = next self._auth_token = oauth2_token auth_url = oauth2_token.generate_authorize_url( redirect_uri=next, approval_prompt='force') logger.debug(u"Auth URL: %s" % auth_url) return auth_url
def unprotected_write(obj): """Marks ``obj`` so that it does not trigger plone.protect's write protection for GET request. The flag is not applied recursively. This currently delegates most of the work to safeWrite(), but we can't quite drop it yet, because: - safeWrite() doesn't return the object, which makes it more awkward to use - safeWrite() doesn't unwrap non-persistent attribute annotations TODO: Possibly move this functionaly upstream (into plone.protect) """ if obj is None: return obj # Unwrap nonpersistent AttributeAnnotations if isinstance(obj, AttributeAnnotations): unprotected_write(getattr(obj.obj, '__annotations__', None)) return obj # safeWrite all buckets of a BTree if getattr(obj, '_firstbucket', None): for bucket in get_buckets_for_btree(obj): safeWrite(bucket) safeWrite(obj) return obj
def lock_info(self): info = [] rtokens = dict([(v['token'], v['type']) for v in self._locks(False).values()]) jar = self.context._p_jar if jar is not None: isReadOnly = jar.isReadOnly() else: isReadOnly = False lock_mapping = self.context.wl_lockmapping(not isReadOnly) safeWrite(lock_mapping) for lock in lock_mapping.values(): if not lock.isValid(): continue # Skip invalid/expired locks token = lock.getLockToken() creator = lock.getCreator() # creator can be None when locked by an anonymous user if creator is not None: creator = creator[1] info.append({ 'creator': creator, 'time': lock.getModifiedTime(), 'token': token, 'type': rtokens.get(token, None), }) return info
def auth_url(self, key, secret): """ Returns the URL used to retrieve a Google OAuth2 token. """ safeWrite(self) auth_url = None if key and secret: next = '%s/analytics-auth' % api.portal.get().absolute_url() oauth2_token = gdata.gauth.OAuth2Token( client_id=key, client_secret=secret, scope="https://www.googleapis.com/auth/analytics", user_agent='collective-googleanalytics', ) logger.debug(u"Created new OAuth2 token with id: '%s' and secret:" u" '%s'" % (key, secret)) oauth2_token.redirect_uri = next self._auth_token = oauth2_token auth_url = oauth2_token.generate_authorize_url( redirect_uri=next, approval_prompt='force' ) logger.debug(u"Auth URL: %s" % auth_url) return auth_url
def set_token(self, code, state): safeWrite(self) logger.debug("Received callback from Google with code '%s' " % code) flow = google_auth_oauthlib.flow.Flow.from_client_config( dict(web=self._state), scopes=SCOPES, state=state) flow.redirect_uri = '%s/analytics-auth' % api.portal.get( ).absolute_url() try: flow.fetch_token(code=code) except [InvalidGrantError]: logger.debug("Code was invalid, could not get tokens") message = _(u'Authorization failed. Google Analytics for ' u'Plone received an invalid token.') return message except Warning as e: # Probably due to Warning: Scope has changed from "https://www.googleapis.com/auth/analytics.readonly" to "https://www.googleapis.com/auth/analytics https://www.googleapis.com/auth/analytics.readonly". # This most likely happens because the user already has a token but for the old version which was more permissive. logger.info("Warning on fetch_token: %s" % str(e)) logger.debug("Code was valid, got '%s' as access_token and '%s' as " "refresh_token. Token will expire on '%s'" % (flow.credentials.token, flow.credentials.refresh_token, flow.credentials.expiry)) self._update_credentials(flow.credentials) message = _(u'Authorization succeeded. You may now configure ' u'Google Analytics for Plone.') return message
def __call__(self): """ """ request = self.context.REQUEST # import pdb;pdb.set_trace() if request.get('save_json') is not None: if self.context.portal_type not in ['SchemaFormFolder']: api.portal.show_message( message='Can not add schema in the context!', request=request) return request.RESPONSE.redirect(self.context.absolute_url() + '/formbuilder') if str(request.get('schema_json')).strip() in ['None', '']: api.portal.show_message(message='Form can not be empty', request=request) return request.RESPONSE.redirect(self.context.absolute_url() + '/formbuilder') obj = self.context safeWrite(obj, request) raw_json = request.get('schema_json') schema_json = raw_json # json.loads(raw_json) obj.schema_json = schema_json obj.reindexObject() api.portal.show_message(message='Saved form succesfully!', request=request) return request.RESPONSE.redirect(self.context.absolute_url())
def wl_lockmapping(self, killinvalids=0, create=0): has_write_locks = hasattr(self, '_dav_writelocks') locks = self._old_wl_lockmapping(killinvalids=killinvalids, create=create) try: safeWrite(locks) if not has_write_locks and create: # first time writing to object, need to mark it safe safeWrite(self) except AttributeError: # not a persistent class, ignore pass return locks
def _fixup(self): # due to compatibility reasons this method fixes data structure # for old Taxonomy instances. # XXX: remove this in version 2.0 to prevent write on read if self.order is None: safeWrite(self, getRequest()) self.order = PersistentDict() self.count = PersistentDict() if self.version is None: safeWrite(self, getRequest()) self.version = PersistentDict()
def ruleAssignmentManagerAdapterFactory(context): """When adapting an IRuleAssignable, get an IRuleAssignmentManager by finding one in the object's annotations. The container will be created if necessary. """ annotations = IAnnotations(context) manager = annotations.get(KEY, None) if manager is None: annotations[KEY] = RuleAssignmentManager() manager = annotations[KEY] # protect both context and its annotations from a write on read error safeWrite(context) safeWrite(context.__annotations__) return manager
def ensure_plone_protect_changes_marked_as_save(request): """Event handler for marking all objects touched on this request as save writes. """ if not is_plone_protect_autocsrf_enabled(): return from plone.protect.auto import safeWrite from plone.transformchain.interfaces import ITransform transform = getMultiAdapter((None, request), ITransform, name='plone.protect.autocsrf') for obj in transform._registered_objects(): safeWrite(obj, request)
def __init__(self, *args, **kwargs): super(FilteredResults, self).__init__(*args, **kwargs) self.site = getSite() self.analytics_tool = getToolByName(self.site, 'portal_analytics') self.utility = getUtility(IAnalyticsModerationUtility) analytics_tool = aq_base(self.analytics_tool) self.cached_results = getattr(analytics_tool, '_cached_results', None) if not self.cached_results: if HAS_AUTO_PROTECT: safeWrite(analytics_tool) self.cached_results = analytics_tool._cached_results = \ PersistentMapping() if HAS_AUTO_PROTECT: safeWrite(self.cached_results)
def cached_tools_json(context, request): safeWrite(context, request) now = datetime.now() short_cache = now + timedelta(minutes=5) long_cache = now + timedelta(minutes=15) if request.get('invalidate-cache'): mtool = api.portal.get_tool('portal_membership') if mtool.checkPermission('cmf.ModifyPortalContent', context): context.cache_until = now if not hasattr(context, 'json'): try: context.json = get_json() context.cache_until = long_cache except (ValueError, AttributeError), err: log.error('Failed to retrieve tools JSON from {}: {}'.format( get_json_url(), err)) return []
def _locks(self, create=True): if self.__locks is not None: return self.__locks annotations = IAnnotations(self.context) locks = annotations.get(ANNOTATION_KEY, None) if locks is None and create: locks = annotations.setdefault(ANNOTATION_KEY, PersistentDict()) try: safeWrite(annotations.obj.__annotations__) except AttributeError: pass if locks is not None: self.__locks = locks return self.__locks else: return {}
def cached_json(self): from plone.protect.auto import safeWrite safeWrite(self.context, self.request) now = datetime.now() short_cache = now + timedelta(minutes=5) long_cache = now + timedelta(minutes=15) if self.request.get('invalidate-cache'): mtool = api.portal.get_tool('portal_membership') if mtool.checkPermission('cmf.ModifyPortalContent', self.context): self.context.cache_until = now if not hasattr(self.context, 'json'): try: self.context.json = self.get_json() self.context.cache_until = long_cache except (ValueError, AttributeError), err: log.error('Failed to retrieve tools JSON from {}: {}'.format( self.json_url, err)) return []
def lock(self, lock_type=STEALABLE_LOCK, children=False): settings = queryAdapter(self.context, ILockSettings) if settings is None: registry = getUtility(IRegistry) settings = registry.forInterface(IEditingSchema, prefix='plone') if settings is not None and settings.lock_on_ttw_edit is False: return if not self.locked(): user = getSecurityManager().getUser() depth = children and 'infinity' or 0 lock = LockItem(user, depth=depth, timeout=lock_type.timeout * 60L) token = lock.getLockToken() self.context._v_safe_write = True self.context.wl_setLock(token, lock) locks = self._locks() locks[lock_type.__name__] = dict(type=lock_type, token=token) safeWrite(self.context)
def handlePubAfterTraversal(self, event): """Support plone.protect's auto CSRF protection as good as possible. The problem is that we use the same connection and transaction for preparation as for performing a request with ftw.testbrowser. This means that we may already have changed objects on the connection but the change is not from within the request. We fix that by marking all objects which are already marked as changed on the current as safe for CSRF. This also means that the auto protection does no longer trigger within the test for the followed requests. """ transform = getMultiAdapter((self['portal'], event.request), ITransform, name='plone.protect.autocsrf') for obj in transform._registered_objects(): safeWrite(obj, event.request)
def auth_url(self, key=None, secret=None): safeWrite(self) # Special case to wipe our stored secrets if key == '' or secret == '': self._state = {} return elif self._state is None: return elif key is None or secret is None: key = self._state.get('client_id', None) secret = self._state.get('client_secret', None) if not key or not secret: return client_config = dict( client_id=key, client_secret=secret, auth_uri="https://accounts.google.com/o/oauth2/auth", token_uri="https://accounts.google.com/o/oauth2/token") flow = google_auth_oauthlib.flow.Flow.from_client_config( dict(web=client_config), scopes=SCOPES) # The URI created here must exactly match one of the authorized redirect URIs # for the OAuth 2.0 client, which you configured in the API Console. If this # value doesn't match an authorized URI, you will get a 'redirect_uri_mismatch' # error. flow.redirect_uri = '%s/analytics-auth' % api.portal.get( ).absolute_url() authorization_url, state = flow.authorization_url( # Enable offline access so that you can refresh an access token without # re-prompting the user for permission. Recommended for web server apps. access_type='offline', # Enable incremental authorization. Recommended as a best practice. include_granted_scopes='true') # Store the state so the callback can verify the auth server response. self._state = client_config return authorization_url
def render_attachment_preview(self, attachment): sm = getSecurityManager() if not sm.checkPermission(permissions.View, self.context): raise Unauthorized r = self.request.response # avoid long dreaded CSRF error annotations = IAnnotations(attachment) if not annotations.get('collective.documentviewer', None): safeWrite(attachment) settings = Settings(attachment) # possibly creates annotation if self.preview_type not in ('large', 'normal', 'small'): self.preview_type = 'small' if self.page is None: self.page = 1 filepath = u'%s/dump_%s.%s' % (self.preview_type, self.page, settings.pdf_image_format) try: blob = settings.blob_files[filepath] except TypeError: # 'NoneType' object has no attribute '__getitem__' # happens e.g. when missing preview for stream attachment return blobfi = openBlob(blob) length = os.fstat(blobfi.fileno()).st_size blobfi.close() ext = os.path.splitext(os.path.normcase(filepath))[1][1:] if ext == 'txt': ct = 'text/plain' else: ct = 'image/%s' % ext r.setHeader('Content-Type', ct) r.setHeader('Last-Modified', rfc1123_date(self.context._p_mtime)) r.setHeader('Accept-Ranges', 'bytes') r.setHeader("Content-Length", length) request_range = handleRequestRange(self.context, length, self.request, self.request.response) return BlobStreamIterator(blob, **request_range)
def lock(self, lock_type=STEALABLE_LOCK, children=False): settings = queryAdapter(self.context, ILockSettings) if settings is None: # No context specific adapter, is this a Plone site? pprops = getToolByName(self.context, "portal_properties", None) if pprops is not None and "site_properties" in pprops.objectIds(): settings = pprops.site_properties if settings is not None and settings.lock_on_ttw_edit is False: return if not self.locked(): user = getSecurityManager().getUser() depth = children and "infinity" or 0 lock = LockItem(user, depth=depth, timeout=lock_type.timeout * 60L) token = lock.getLockToken() self.context._v_safe_write = True self.context.wl_setLock(token, lock) locks = self._locks() locks[lock_type.__name__] = dict(type=lock_type, token=token) safeWrite(self.context)
def lock(self, lock_type=STEALABLE_LOCK, children=False): settings = queryAdapter(self.context, ILockSettings) if settings is None: # No context specific adapter, is this a Plone site? pprops = getToolByName(self.context, 'portal_properties', None) if pprops is not None and 'site_properties' in pprops.objectIds(): settings = pprops.site_properties if settings is not None and settings.lock_on_ttw_edit is False: return if not self.locked(): user = getSecurityManager().getUser() depth = children and 'infinity' or 0 lock = LockItem(user, depth=depth, timeout=lock_type.timeout * 60L) token = lock.getLockToken() self.context._v_safe_write = True self.context.wl_setLock(token, lock) locks = self._locks() locks[lock_type.__name__] = dict(type=lock_type, token=token) safeWrite(self.context)
def test_safe_write_empty_returns_true(self): safeWrite(self.portal, self.request) transform = ProtectTransform(self.portal, self.request) transform._registered_objects = lambda: [self.portal] self.assertTrue(transform._check())
def makeClientRequest(self, api_request, **query_args): """ Get the authenticated client object and make the specified request. We need this wrapper method so that we can intelligently handle errors. """ safeWrite(self) # Workaround for the lack of timeout handling in gdata. This approach comes # from collective.twitterportlet. See: # https://svn.plone.org/svn/collective/collective.twitterportlet/ # timeout = socket.getdefaulttimeout() # If the current timeout is set to GOOGLE_REQUEST_TIMEOUT, then another # thread has called this method before we had a chance to reset the # default timeout. In that case, we fall back to the system default # timeout value. # if timeout == GOOGLE_REQUEST_TIMEOUT: # timeout = DEFAULT_TIMEOUT # logger.warning('Conflict while setting socket timeout.') creds, service = self._getService() try: # socket.setdefaulttimeout(GOOGLE_REQUEST_TIMEOUT) try: result = None if api_request == 'webproperties': result = service.management().webproperties().list( accountId='~all').execute() elif api_request == 'accounts': result = service.management().accounts().list().execute() elif api_request == 'profiles': result = service.management().profiles().list( accountId='~all', webPropertyId='~all').execute() elif api_request == 'data': result = service.data().ga().get(**query_args).execute() else: raise ValueError("Not supported api") self._update_credentials(creds) return result except RefreshError, e: reason = e.message if any([ r in reason for r in [ 'Token invalid', 'Forbidden', 'Unauthorized', 'invalid_grant' ] ]): # Reset the stored auth token. self._state['token'] = None settings = self.get_settings() settings.reports_profile = None raise error.BadAuthenticationError, 'You need to authorize with Google' else: raise except (socket.sslerror, socket.timeout): raise error.RequestTimedOutError, 'The request to Google timed out' except (socket.gaierror, ResponseNotReady): raise error.RequestTimedOutError, 'You may not have internet access. Please try again later.' except urllib2.HttpError as e: raise error.InvalidRequestMethodError, str(e)
def project_added(obj, event): request = getRequest() safeWrite(obj, request) obj.allow_discussion = True
def __call__(self): self.context.foo = 'bar' safeWrite(self) return 'done'
def transform(self, result, encoding): site_url = 'foobar' if self.site: site_url = self.site.absolute_url() registered = self._registered_objects() if len(registered) > 0 and \ not IDisableCSRFProtection.providedBy(self.request): # in plone 4, we need to do some more trickery to # prevent write on read errors annotation_keys = ( 'plone.contentrules.localassignments', 'syndication_settings', 'plone.portlets.contextassignments') for obj in registered: if isinstance(obj, OOBTree): safe = False for key in annotation_keys: try: if key in obj: safe = True break except TypeError: pass if safe: safeWrite(obj) elif isinstance(obj, ATBlob): # writing scales is fine safeWrite(obj) # check referrer/origin header as a backstop to check # against false positives for write on read errors referrer = self.request.environ.get('HTTP_REFERER') if referrer: if referrer.startswith(site_url + '/'): alsoProvides(self.request, IDisableCSRFProtection) else: origin = self.request.environ.get('HTTP_ORIGIN') if origin and origin == site_url: alsoProvides(self.request, IDisableCSRFProtection) result = self.parseTree(result, encoding) if result is None: return None root = result.tree.getroot() try: token = createToken(manager=self.key_manager) except ComponentLookupError: return if self.site is not None: body = root.cssselect('body')[0] protect_script = etree.Element("script") protect_script.attrib.update({ 'type': "application/javascript", 'src': "%s/++resource++protect.js" % site_url, 'data-site-url': site_url, 'data-token': token, 'id': 'protect-script' }) body.append(protect_script) # guess zmi, if it is, rewrite all links last_path = self.request.URL.split('/')[-1] if last_path == 'manage' or last_path.startswith('manage_'): root.make_links_absolute(self.request.URL) def rewrite_func(url): return addTokenToUrl( url, self.request, manager=self.key_manager) root.rewrite_links(rewrite_func) # Links to add token to so we don't trigger the csrf # warnings for anchor in root.cssselect(_add_rule_token_selector): url = anchor.attrib.get('href') # addTokenToUrl only converts urls on the same site anchor.attrib['href'] = addTokenToUrl( url, self.request, manager=self.key_manager) return result
def deleteText(self, obj): """ """ safeWrite(obj, self.REQUEST) obj.text = RichTextValue(u"", 'text/plain', 'text/html')
def makeClientRequest(self, feed, *args, **kwargs): """ Get the authenticated client object and make the specified request. We need this wrapper method so that we can intelligently handle errors. """ safeWrite(self) # XXX: Have to use v2.4. gdata 2.0.18 doesn't yet support v3 for # analytics feed_url = 'https://www.googleapis.com/analytics/v2.4/' + feed client = self._getAuthenticatedClient() if feed.startswith('management'): query_method = client.get_management_feed if feed.startswith('data'): query_method = client.get_data_feed # Workaround for the lack of timeout handling in gdata. This approach comes # from collective.twitterportlet. See: # https://svn.plone.org/svn/collective/collective.twitterportlet/ timeout = socket.getdefaulttimeout() # If the current timeout is set to GOOGLE_REQUEST_TIMEOUT, then another # thread has called this method before we had a chance to reset the # default timeout. In that case, we fall back to the system default # timeout value. if timeout == GOOGLE_REQUEST_TIMEOUT: timeout = DEFAULT_TIMEOUT logger.warning('Conflict while setting socket timeout.') try: socket.setdefaulttimeout(GOOGLE_REQUEST_TIMEOUT) try: expired = False # Token gets refreshed when a new request is made to Google, # so check before if self._auth_token.token_expiry < datetime.now(): logger.debug("This access token expired, will try to " "refresh it.") expired = True result = query_method(feed_url, *args, **kwargs) if expired: logger.debug("Token was refreshed successfuly. New expire " "date: %s" % self._auth_token.token_expiry) return result except (Unauthorized, RequestError), e: if hasattr(e, 'reason'): reason = e.reason else: reason = e[0]['reason'] if 'Token invalid' in reason or reason in ('Forbidden', 'Unauthorized'): # Reset the stored auth token. self._auth_token = None settings = self.get_settings() settings.reports_profile = None raise error.BadAuthenticationError, 'You need to authorize with Google' else: raise except (socket.sslerror, socket.timeout): raise error.RequestTimedOutError, 'The request to Google timed out' except socket.gaierror: raise error.RequestTimedOutError, 'You may not have internet access. Please try again later.'
def transform(self, result, encoding): site_url = 'foobar' if self.site: site_url = self.site.absolute_url() registered = self._registered_objects() if len(registered) > 0 and \ not IDisableCSRFProtection.providedBy(self.request): # in plone 4, we need to do some more trickery to # prevent write on read errors annotation_keys = ('plone.contentrules.localassignments', 'syndication_settings', 'plone.portlets.contextassignments') for obj in registered: if isinstance(obj, OOBTree): safe = False for key in annotation_keys: if key in obj: safe = True break if safe: safeWrite(obj) elif isinstance(obj, ATBlob): # writing scales is fine safeWrite(obj) # check referrer/origin header as a backstop to check # against false positives for write on read errors referrer = self.request.environ.get('HTTP_REFERER') if referrer: if referrer.startswith(site_url): alsoProvides(self.request, IDisableCSRFProtection) else: origin = self.request.environ.get('HTTP_ORIGIN') if origin and origin == site_url: alsoProvides(self.request, IDisableCSRFProtection) result = self.parseTree(result, encoding) if result is None: return None root = result.tree.getroot() try: token = createToken(manager=self.key_manager) except ComponentLookupError: return if self.site is not None: body = root.cssselect('body')[0] protect_script = etree.Element("script") protect_script.attrib.update({ 'type': "text/javascript", 'src': "%s/++resource++protect.js" % site_url, 'data-site-url': site_url, 'data-token': token, 'id': 'protect-script' }) body.append(protect_script) # guess zmi, if it is, rewrite all links last_path = self.request.URL.split('/')[-1] if last_path == 'manage' or last_path.startswith('manage_'): root.make_links_absolute(self.request.URL) def rewrite_func(url): return addTokenToUrl(url, self.request, manager=self.key_manager) root.rewrite_links(rewrite_func) # Links to add token to so we don't trigger the csrf # warnings for anchor in root.cssselect(_add_rule_token_selector): url = anchor.attrib.get('href') # addTokenToUrl only converts urls on the same site anchor.attrib['href'] = addTokenToUrl(url, self.request, manager=self.key_manager) return result
def unlock(self, lock_type=STEALABLE_LOCK, stealable_only=True): self.clear_locks() locks = self._locks() safeWrite(locks) safeWrite(self.context)