def confirm_email_get(token, auth=None, **kwargs): """ View for email confirmation links. Authenticates and redirects to user settings page if confirmation is successful, otherwise shows an "Expired Link" error. HTTP Method: GET """ user = User.load(kwargs['uid']) is_merge = 'confirm_merge' in request.args is_initial_confirmation = not user.date_confirmed log_out = request.args.get('logout', None) if user is None: raise HTTPError(http.NOT_FOUND) # if the user is merging or adding an email (they already are an osf user) if log_out: return auth_email_logout(token, user) if auth and auth.user and (auth.user._id == user._id or auth.user._id == user.merged_by._id): if not is_merge: # determine if the user registered through a campaign campaign = campaigns.campaign_for_user(user) if campaign: return redirect(campaigns.campaign_url_for(campaign)) # go to home page with push notification if len(auth.user.emails) == 1 and len( auth.user.email_verifications) == 0: status.push_status_message(language.WELCOME_MESSAGE, kind='default', jumbotron=True, trust=True) if token in auth.user.email_verifications: status.push_status_message( language.CONFIRM_ALTERNATE_EMAIL_ERROR, kind='danger', trust=True) return redirect(web_url_for('index')) status.push_status_message(language.MERGE_COMPLETE, kind='success', trust=False) return redirect(web_url_for('user_account')) try: user.confirm_email(token, merge=is_merge) except exceptions.EmailConfirmTokenError as e: raise HTTPError(http.BAD_REQUEST, data={ 'message_short': e.message_short, 'message_long': e.message_long }) if is_initial_confirmation: user.update_date_last_login() user.save() # send out our welcome message mails.send_mail(to_addr=user.username, mail=mails.WELCOME, mimetype='html', user=user) # new random verification key, allows CAS to authenticate the user w/o password one-time only. user.verification_key = generate_verification_key() user.save() # redirect to CAS and authenticate the user with a verification key. return redirect( cas.get_login_url(request.url, username=user.username, verification_key=user.verification_key))
def gitlab_set_config(auth, **kwargs): node_settings = kwargs.get('node_addon', None) node = kwargs.get('node', None) user_settings = kwargs.get('user_addon', None) try: if not node: node = node_settings.owner if not user_settings: user_settings = node_settings.user_settings except AttributeError: raise HTTPError(http.BAD_REQUEST) # Parse request gitlab_user_name = request.json.get('gitlab_user', '') gitlab_repo_name = request.json.get('gitlab_repo', '') gitlab_repo_id = request.json.get('gitlab_repo_id', '') if not gitlab_user_name or not gitlab_repo_name or not gitlab_repo_id: raise HTTPError(http.BAD_REQUEST) # Verify that repo exists and that user can access connection = GitLabClient(external_account=node_settings.external_account) repo = connection.repo(gitlab_repo_id) if repo is None: if user_settings: message = ('Cannot access repo. Either the repo does not exist ' 'or your account does not have permission to view it.') else: message = ('Cannot access repo.') return {'message': message}, http.BAD_REQUEST changed = (gitlab_user_name != node_settings.user or gitlab_repo_name != node_settings.repo or gitlab_repo_id != node_settings.repo_id) # Update hooks if changed: # Delete existing hook, if any node_settings.delete_hook() # Update node settings node_settings.user = gitlab_user_name node_settings.repo = gitlab_repo_name node_settings.repo_id = gitlab_repo_id # Log repo select node.add_log( action='gitlab_repo_linked', params={ 'project': node.parent_id, 'node': node._id, 'gitlab': { 'user': gitlab_user_name, 'repo': gitlab_repo_name, 'repo_id': gitlab_repo_id, } }, auth=auth, ) # Add new hook if node_settings.user and node_settings.repo: node_settings.add_hook(save=False) node_settings.save() return {}
def verify_user_match(auth, **kwargs): uid = kwargs.get('uid') if uid and uid != auth.user._id: raise HTTPError(http.FORBIDDEN)
def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default'): """ Unregistered user claiming a user account as an contributor to a project. Send an email for claiming the account. Either sends to the given email or the referrer's email, depending on the email address provided. :param str email: The address given in the claim user form :param User unclaimed_user: The User record to claim. :param Node node: The node where the user claimed their account. :param bool notify: If True and an email is sent to the referrer, an email will also be sent to the invited user about their pending verification. :param int throttle: Time period (in seconds) after the referrer is emailed during which the referrer will not be emailed again. :param str email_template: the email template to use :return :raise http.BAD_REQUEST """ claimer_email = email.lower().strip() unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) referrer = User.load(unclaimed_record['referrer_id']) claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) # Option 1: # When adding the contributor, the referrer provides both name and email. # The given email is the same provided by user, just send to that email. preprint_provider = None if unclaimed_record.get('email') == claimer_email: # check email template for branded preprints if email_template == 'preprint': email_template, preprint_provider = find_preprint_provider(node) if not email_template or not preprint_provider: return mail_tpl = getattr(mails, 'INVITE_PREPRINT')(email_template, preprint_provider) else: mail_tpl = getattr(mails, 'INVITE_DEFAULT'.format(email_template.upper())) to_addr = claimer_email unclaimed_record['claimer_email'] = claimer_email unclaimed_user.save() # Option 2: # TODO: [new improvement ticket] this option is disabled from preprint but still available on the project page # When adding the contributor, the referred only provides the name. # The account is later claimed by some one who provides the email. # Send email to the referrer and ask her/him to forward the email to the user. else: # check throttle timestamp = unclaimed_record.get('last_sent') if not throttle_period_expired(timestamp, throttle): raise HTTPError(http.BAD_REQUEST, data=dict( message_long='User account can only be claimed with an existing user once every 24 hours' )) # roll the valid token for each email, thus user cannot change email and approve a different email address verification_key = generate_verification_key(verification_type='claim') unclaimed_record['last_sent'] = get_timestamp() unclaimed_record['token'] = verification_key['token'] unclaimed_record['expires'] = verification_key['expires'] unclaimed_record['claimer_email'] = claimer_email unclaimed_user.save() claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) # send an email to the invited user without `claim_url` if notify: pending_mail = mails.PENDING_VERIFICATION mails.send_mail( claimer_email, pending_mail, user=unclaimed_user, referrer=referrer, fullname=unclaimed_record['name'], node=node ) mail_tpl = mails.FORWARD_INVITE to_addr = referrer.username # Send an email to the claimer (Option 1) or to the referrer (Option 2) with `claim_url` mails.send_mail( to_addr, mail_tpl, user=unclaimed_user, referrer=referrer, node=node, claim_url=claim_url, email=claimer_email, fullname=unclaimed_record['name'], branded_service_name=preprint_provider ) return to_addr
def claim_user_form(auth, **kwargs): """ View for rendering the set password page for a claimed user. Must have ``token`` as a querystring argument. Renders the set password form, validates it, and sets the user's password. HTTP Method: GET, POST """ uid, pid = kwargs['uid'], kwargs['pid'] token = request.form.get('token') or request.args.get('token') user = User.load(uid) # If unregistered user is not in database, or url bears an invalid token raise HTTP 400 error if not user or not verify_claim_token(user, token, pid): error_data = { 'message_short': 'Invalid url.', 'message_long': 'Claim user does not exists, the token in the URL is invalid or has expired.' } raise HTTPError(http.BAD_REQUEST, data=error_data) # If user is logged in, redirect to 're-enter password' page if auth.logged_in: return redirect(web_url_for('claim_user_registered', uid=uid, pid=pid, token=token)) unclaimed_record = user.unclaimed_records[pid] user.fullname = unclaimed_record['name'] user.update_guessed_names() # The email can be the original referrer email if no claimer email has been specified. claimer_email = unclaimed_record.get('claimer_email') or unclaimed_record.get('email') # If there is a registered user with this email, redirect to 're-enter password' page try: user_from_email = User.objects.get(emails__icontains=claimer_email) if claimer_email else None except User.DoesNotExist: user_from_email = None if user_from_email and user_from_email.is_registered: return redirect(web_url_for('claim_user_registered', uid=uid, pid=pid, token=token)) form = SetEmailAndPasswordForm(request.form, token=token) if request.method == 'POST': if not form.validate(): forms.push_errors_to_status(form.errors) elif settings.RECAPTCHA_SITE_KEY and not validate_recaptcha(request.form.get('g-recaptcha-response'), remote_ip=request.remote_addr): status.push_status_message('Invalid captcha supplied.', kind='error') else: username, password = claimer_email, form.password.data if not username: raise HTTPError(http.BAD_REQUEST, data=dict( message_long='No email associated with this account. Please claim this ' 'account on the project to which you were invited.' )) user.register(username=username, password=password) # Clear unclaimed records user.unclaimed_records = {} user.verification_key = generate_verification_key() user.save() # Authenticate user and redirect to project page status.push_status_message(language.CLAIMED_CONTRIBUTOR, kind='success', trust=True) # Redirect to CAS and authenticate the user with a verification key. return redirect(cas.get_login_url( web_url_for('view_project', pid=pid, _absolute=True), username=user.username, verification_key=user.verification_key )) return { 'firstname': user.given_name, 'email': claimer_email if claimer_email else '', 'fullname': user.fullname, 'form': forms.utils.jsonify(form) if is_json_request() else form, }
def view_institution(inst_id, **kwargs): try: inst = Institution.find_one(Q('_id', 'eq', inst_id) & Q('is_deleted', 'ne', True)) except NoResultsFound: raise HTTPError(http.NOT_FOUND) return serialize_institution(inst)
def project_wiki_view(auth, wname, path=None, **kwargs): node = kwargs['node'] or kwargs['project'] anonymous = has_anonymous_link(node, auth) wiki_name = (wname or '').strip() wiki_key = to_mongo_key(wiki_name) wiki_page = node.get_wiki_page(wiki_name) wiki_settings = node.get_addon('wiki') can_edit = (auth.logged_in and not node.is_registration and (node.has_permission(auth.user, 'write') or wiki_settings.is_publicly_editable)) versions = _get_wiki_versions(node, wiki_name, anonymous=anonymous) # Determine panels used in view panels = {'view', 'edit', 'compare', 'menu'} if request.args and set(request.args).intersection(panels): panels_used = [panel for panel in request.args if panel in panels] num_columns = len( set(panels_used).intersection({'view', 'edit', 'compare'})) if num_columns == 0: panels_used.append('view') num_columns = 1 else: panels_used = ['view', 'menu'] num_columns = 1 try: view = wiki_utils.format_wiki_version( version=request.args.get('view'), num_versions=len(versions), allow_preview=True, ) compare = wiki_utils.format_wiki_version( version=request.args.get('compare'), num_versions=len(versions), allow_preview=False, ) except InvalidVersionError: raise WIKI_INVALID_VERSION_ERROR # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == 'home': wiki_name = 'home' if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.html(node) rendered_before_update = wiki_page.rendered_before_update else: version = 'NA' is_current = False content = '' rendered_before_update = False if can_edit: if wiki_key not in node.wiki_private_uuids: wiki_utils.generate_private_uuid(node, wiki_name) sharejs_uuid = wiki_utils.get_sharejs_uuid(node, wiki_name) else: if wiki_key not in node.wiki_pages_current and wiki_key != 'home': raise WIKI_PAGE_NOT_FOUND_ERROR if 'edit' in request.args: if wiki_settings.is_publicly_editable: raise HTTPError(http.UNAUTHORIZED) raise HTTPError(http.FORBIDDEN) sharejs_uuid = None # Opens 'edit' panel when home wiki is empty if not content and can_edit and wiki_name == 'home': panels_used.append('edit') # Default versions for view and compare version_settings = { 'view': view or ('preview' if 'edit' in panels_used else 'current'), 'compare': compare or 'previous', } ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'rendered_before_update': rendered_before_update, 'page': wiki_page, 'version': version, 'versions': versions, 'sharejs_uuid': sharejs_uuid or '', 'sharejs_url': settings.SHAREJS_URL, 'is_current': is_current, 'version_settings': version_settings, 'pages_current': _get_wiki_pages_current(node), 'category': node.category, 'panels_used': panels_used, 'num_columns': num_columns, 'urls': { 'api': _get_wiki_api_urls( node, wiki_name, { 'content': node.api_url_for('wiki_page_content', wname=wiki_name), 'draft': node.api_url_for('wiki_page_draft', wname=wiki_name), }), 'web': _get_wiki_web_urls(node, wiki_name), 'gravatar': get_gravatar(auth.user, 25), }, } ret.update(_view_project(node, auth, primary=True)) ret['user']['can_edit_wiki_body'] = can_edit return ret
def get_auth(auth, **kwargs): cas_resp = None if not auth.user: # Central Authentication Server OAuth Bearer Token authorization = request.headers.get('Authorization') if authorization and authorization.startswith('Bearer '): client = cas.get_client() try: access_token = cas.parse_auth_header(authorization) cas_resp = client.profile(access_token) except cas.CasError as err: sentry.log_exception() # NOTE: We assume that the request is an AJAX request return json_renderer(err) if cas_resp.authenticated: auth.user = OSFUser.load(cas_resp.user) try: data = jwt.decode(jwe.decrypt( request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY), settings.WATERBUTLER_JWT_SECRET, options={'require_exp': True}, algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data'] except (jwt.InvalidTokenError, KeyError) as err: sentry.log_message(str(err)) raise HTTPError(httplib.FORBIDDEN) if not auth.user: auth.user = OSFUser.from_cookie(data.get('cookie', '')) try: action = data['action'] node_id = data['nid'] provider_name = data['provider'] except KeyError: raise HTTPError(httplib.BAD_REQUEST) node = AbstractNode.load(node_id) if not node: raise HTTPError(httplib.NOT_FOUND) check_access(node, auth, action, cas_resp) provider_settings = node.get_addon(provider_name) if not provider_settings: raise HTTPError(httplib.BAD_REQUEST) try: credentials = provider_settings.serialize_waterbutler_credentials() waterbutler_settings = provider_settings.serialize_waterbutler_settings( ) except exceptions.AddonError: log_exception() raise HTTPError(httplib.BAD_REQUEST) return { 'payload': jwe.encrypt( jwt.encode( { 'exp': timezone.now() + datetime.timedelta( seconds=settings.WATERBUTLER_JWT_EXPIRATION), 'data': { 'auth': make_auth( auth.user ), # A waterbutler auth dict not an Auth object 'credentials': credentials, 'settings': waterbutler_settings, 'callback_url': node.api_url_for(('create_waterbutler_log' if not node.is_registration else 'registration_callbacks'), _absolute=True, _internal=True), } }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY) }
def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: auth = payload['auth'] action = LOG_ACTION_MAP[payload['action']] except KeyError: raise HTTPError(httplib.BAD_REQUEST) user = OSFUser.load(auth['id']) if user is None: raise HTTPError(httplib.BAD_REQUEST) auth = Auth(user=user) node = kwargs['node'] or kwargs['project'] if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED): for bundle in ('source', 'destination'): for key in ('provider', 'materialized', 'name', 'nid'): if key not in payload[bundle]: raise HTTPError(httplib.BAD_REQUEST) dest = payload['destination'] src = payload['source'] if src is not None and dest is not None: dest_path = dest['materialized'] src_path = src['materialized'] if dest_path.endswith('/') and src_path.endswith('/'): dest_path = os.path.dirname(dest_path) src_path = os.path.dirname(src_path) if (os.path.split(dest_path)[0] == os.path.split(src_path)[0] and dest['provider'] == src['provider'] and dest['nid'] == src['nid'] and dest['name'] != src['name']): action = LOG_ACTION_MAP['rename'] destination_node = node # For clarity source_node = AbstractNode.load(payload['source']['nid']) source = source_node.get_addon(payload['source']['provider']) destination = node.get_addon(payload['destination']['provider']) payload['source'].update({ 'materialized': payload['source']['materialized'].lstrip('/'), 'addon': source.config.full_name, 'url': source_node.web_url_for( 'addon_view_or_download_file', path=payload['source']['path'].lstrip('/'), provider=payload['source']['provider']), 'node': { '_id': source_node._id, 'url': source_node.url, 'title': source_node.title, } }) payload['destination'].update({ 'materialized': payload['destination']['materialized'].lstrip('/'), 'addon': destination.config.full_name, 'url': destination_node.web_url_for( 'addon_view_or_download_file', path=payload['destination']['path'].lstrip('/'), provider=payload['destination']['provider']), 'node': { '_id': destination_node._id, 'url': destination_node.url, 'title': destination_node.title, } }) payload.update({ 'node': destination_node._id, 'project': destination_node.parent_id, }) if not payload.get('errors'): destination_node.add_log(action=action, auth=auth, params=payload) if payload.get('email') is True or payload.get('errors'): mails.send_mail( user.username, mails.FILE_OPERATION_FAILED if payload.get('errors') else mails.FILE_OPERATION_SUCCESS, action=payload['action'], source_node=source_node, destination_node=destination_node, source_path=payload['source']['materialized'], destination_path=payload['source']['materialized'], source_addon=payload['source']['addon'], destination_addon=payload['destination']['addon'], osf_support_email=settings.OSF_SUPPORT_EMAIL) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} else: try: metadata = payload['metadata'] node_addon = node.get_addon(payload['provider']) except KeyError: raise HTTPError(httplib.BAD_REQUEST) if node_addon is None: raise HTTPError(httplib.BAD_REQUEST) metadata['path'] = metadata['path'].lstrip('/') node_addon.create_waterbutler_log(auth, action, metadata) with transaction.atomic(): file_signals.file_updated.send(node=node, user=user, event_type=action, payload=payload) return {'status': 'success'}
def resolve_guid(guid, suffix=None): """Load GUID by primary key, look up the corresponding view function in the routing table, and return the return value of the view function without changing the URL. :param str guid: GUID primary key :param str suffix: Remainder of URL after the GUID :return: Return value of proxied view function """ try: # Look up guid_object = Guid.load(guid) except KeyError as e: if e.message == 'osfstorageguidfile': # Used when an old detached OsfStorageGuidFile object is accessed raise HTTPError(http.NOT_FOUND) else: raise e if guid_object: # verify that the object implements a GuidStoredObject-like interface. If a model # was once GuidStoredObject-like but that relationship has changed, it's # possible to have referents that are instances of classes that don't # have a deep_url attribute or otherwise don't behave as # expected. if not hasattr(guid_object.referent, 'deep_url'): sentry.log_message('Guid resolved to an object with no deep_url', dict(guid=guid)) raise HTTPError(http.NOT_FOUND) referent = guid_object.referent if referent is None: logger.error('Referent of GUID {0} not found'.format(guid)) raise HTTPError(http.NOT_FOUND) if not referent.deep_url: raise HTTPError(http.NOT_FOUND) # Handle file `/download` shortcut with supported types. if suffix and suffix.rstrip('/').lower() == 'download': file_referent = None if isinstance(referent, Preprint) and referent.primary_file: file_referent = referent.primary_file elif isinstance(referent, BaseFileNode) and referent.is_file: file_referent = referent if file_referent: if isinstance( file_referent.target, Preprint) and not file_referent.target.is_published: # TODO: Ideally, permissions wouldn't be checked here. # This is necessary to prevent a logical inconsistency with # the routing scheme - if a preprint is not published, only # admins and moderators should be able to know it exists. auth = Auth.from_kwargs(request.args.to_dict(), {}) # Check if user isn't a nonetype or that the user has admin/moderator/superuser permissions if auth.user is None or not ( auth.user.has_perm('view_submissions', file_referent.target.provider) or file_referent.target.has_permission( auth.user, permissions.ADMIN)): raise HTTPError(http.NOT_FOUND) # Extend `request.args` adding `action=download`. request.args = request.args.copy() request.args.update({'action': 'download'}) # Do not include the `download` suffix in the url rebuild. url = _build_guid_url(urllib.unquote(file_referent.deep_url)) return proxy_url(url) # Handle Ember Applications if isinstance(referent, Preprint): if referent.provider.domain_redirect_enabled: # This route should always be intercepted by nginx for the branded domain, # w/ the exception of `<guid>/download` handled above. return redirect(referent.absolute_url, http.MOVED_PERMANENTLY) if PROXY_EMBER_APPS: resp = requests.get(EXTERNAL_EMBER_APPS['preprints']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT) return Response(stream_with_context(resp.iter_content()), resp.status_code) return send_from_directory(preprints_dir, 'index.html') if isinstance(referent, BaseFileNode) and referent.is_file and (getattr( referent.target, 'is_quickfiles', False)): if referent.is_deleted: raise HTTPError(http.GONE) if PROXY_EMBER_APPS: resp = requests.get( EXTERNAL_EMBER_APPS['ember_osf_web']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT) return Response(stream_with_context(resp.iter_content()), resp.status_code) return send_from_directory(ember_osf_web_dir, 'index.html') if isinstance(referent, Registration) and not suffix: if waffle.flag_is_active(request, features.EMBER_REGISTRIES_DETAIL_PAGE): # Route only the base detail view to ember if PROXY_EMBER_APPS: resp = requests.get( EXTERNAL_EMBER_APPS['registries']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT) return Response(stream_with_context(resp.iter_content()), resp.status_code) return send_from_directory(registries_dir, 'index.html') url = _build_guid_url(urllib.unquote(referent.deep_url), suffix) return proxy_url(url) # GUID not found; try lower-cased and redirect if exists guid_object_lower = Guid.load(guid.lower()) if guid_object_lower: return redirect(_build_guid_url(guid.lower(), suffix)) # GUID not found raise HTTPError(http.NOT_FOUND)
def check_access(node, auth, action, cas_resp): """Verify that user can perform requested action on resource. Raise appropriate error code if action cannot proceed. """ permission = permission_map.get(action, None) if permission is None: raise HTTPError(httplib.BAD_REQUEST) if cas_resp: if permission == 'read': if node.is_public: return True required_scope = oauth_scopes.CoreScopes.NODE_FILE_READ else: required_scope = oauth_scopes.CoreScopes.NODE_FILE_WRITE if not cas_resp.authenticated \ or required_scope not in oauth_scopes.normalize_scopes(cas_resp.attributes['accessTokenScope']): raise HTTPError(httplib.FORBIDDEN) if permission == 'read': if node.can_view(auth): return True # The user may have admin privileges on a parent node, in which # case they should have read permissions if node.is_registration and node.registered_from.can_view(auth): return True if permission == 'write' and node.can_edit(auth): return True # Users attempting to register projects with components might not have # `write` permissions for all components. This will result in a 403 for # all `copyto` actions as well as `copyfrom` actions if the component # in question is not public. To get around this, we have to recursively # check the node's parent node to determine if they have `write` # permissions up the stack. # TODO(hrybacki): is there a way to tell if this is for a registration? # All nodes being registered that receive the `copyto` action will have # `node.is_registration` == True. However, we have no way of telling if # `copyfrom` actions are originating from a node being registered. # TODO This is raise UNAUTHORIZED for registrations that have not been archived yet if action == 'copyfrom' or (action == 'copyto' and node.is_registration): parent = node.parent_node while parent: if parent.can_edit(auth): return True parent = parent.parent_node # Users with the prereg admin permission should be allowed to download files # from prereg challenge draft registrations. try: prereg_schema = MetaSchema.objects.get(name='Prereg Challenge', schema_version=2) allowed_nodes = [node] + node.parents prereg_draft_registration = DraftRegistration.objects.filter( branched_from__in=allowed_nodes, registration_schema=prereg_schema) if action == 'download' and \ auth.user is not None and \ prereg_draft_registration.count() > 0 and \ auth.user.has_perm('osf.administer_prereg'): return True except MetaSchema.DoesNotExist: pass raise HTTPError(httplib.FORBIDDEN if auth.user else httplib.UNAUTHORIZED)
def validate_page_num(page, pages): if page < 0 or (pages and page >= pages): raise HTTPError(http.BAD_REQUEST, data=dict(message_long='Invalid value for "page".'))
def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 3600): """ A registered user claiming the unclaimed user account as an contributor to a project. Send an email for claiming the account to the referrer and notify the claimer. :param claimer: the claimer :param unclaimed_user: the user account to claim :param node: the project node where the user account is claimed :param throttle: the time period in seconds before another claim for the account can be made :return: :raise: http.BAD_REQUEST """ unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) # check throttle timestamp = unclaimed_record.get('last_sent') if not throttle_period_expired(timestamp, throttle): raise HTTPError( http.BAD_REQUEST, data=dict( message_long= 'User account can only be claimed with an existing user once every 24 hours' )) # roll the valid token for each email, thus user cannot change email and approve a different email address verification_key = generate_verification_key(verification_type='claim') unclaimed_record['token'] = verification_key['token'] unclaimed_record['expires'] = verification_key['expires'] unclaimed_record['claimer_email'] = claimer.username unclaimed_user.save() referrer = OSFUser.load(unclaimed_record['referrer_id']) claim_url = web_url_for( 'claim_user_registered', uid=unclaimed_user._primary_key, pid=node._primary_key, token=unclaimed_record['token'], _external=True, ) # Send mail to referrer, telling them to forward verification link to claimer mails.send_mail( referrer.username, mails.FORWARD_INVITE_REGISTERED, user=unclaimed_user, referrer=referrer, node=node, claim_url=claim_url, fullname=unclaimed_record['name'], osf_contact_email=settings.OSF_CONTACT_EMAIL, ) unclaimed_record['last_sent'] = get_timestamp() unclaimed_user.save() # Send mail to claimer, telling them to wait for referrer mails.send_mail( claimer.username, mails.PENDING_VERIFICATION_REGISTERED, fullname=claimer.fullname, referrer=referrer, node=node, osf_contact_email=settings.OSF_CONTACT_EMAIL, )
def external_login_email_post(): """ View to handle email submission for first-time oauth-login user. HTTP Method: POST """ form = ResendConfirmationForm(request.form) session = get_session() if not session.is_external_first_login: raise HTTPError(http.UNAUTHORIZED) external_id_provider = session.data['auth_user_external_id_provider'] external_id = session.data['auth_user_external_id'] fullname = session.data['auth_user_fullname'] service_url = session.data['service_url'] # TODO: @cslzchen use user tags instead of destination destination = 'dashboard' for campaign in campaigns.get_campaigns(): if campaign != 'institution': # Handle different url encoding schemes between `furl` and `urlparse/urllib`. # OSF use `furl` to parse service url during service validation with CAS. However, `web_url_for()` uses # `urlparse/urllib` to generate service url. `furl` handles `urlparser/urllib` generated urls while ` but # not vice versa. campaign_url = furl.furl(campaigns.campaign_url_for(campaign)).url external_campaign_url = furl.furl( campaigns.external_campaign_url_for(campaign)).url if campaigns.is_proxy_login(campaign): # proxy campaigns: OSF Preprints and branded ones if check_service_url_with_proxy_campaign( str(service_url), campaign_url, external_campaign_url): destination = campaign # continue to check branded preprints even service url matches osf preprints if campaign != 'osf-preprints': break elif service_url.startswith(campaign_url): # osf campaigns: OSF Prereg and ERPC destination = campaign break if form.validate(): clean_email = form.email.data user = get_user(email=clean_email) external_identity = { external_id_provider: { external_id: None, }, } try: ensure_external_identity_uniqueness(external_id_provider, external_id, user) except ValidationError as e: raise HTTPError(http.FORBIDDEN, e.message) if user: # 1. update user oauth, with pending status external_identity[external_id_provider][external_id] = 'LINK' if external_id_provider in user.external_identity: user.external_identity[external_id_provider].update( external_identity[external_id_provider]) else: user.external_identity.update(external_identity) # 2. add unconfirmed email and send confirmation email user.add_unconfirmed_email(clean_email, external_identity=external_identity) user.save() send_confirm_email(user, clean_email, external_id_provider=external_id_provider, external_id=external_id, destination=destination) # 3. notify user message = language.EXTERNAL_LOGIN_EMAIL_LINK_SUCCESS.format( external_id_provider=external_id_provider, email=user.username) kind = 'success' # 4. remove session and osf cookie remove_session(session) else: # 1. create unconfirmed user with pending status external_identity[external_id_provider][external_id] = 'CREATE' user = User.create_unconfirmed(username=clean_email, password=None, fullname=fullname, external_identity=external_identity, campaign=None) # TODO: [#OSF-6934] update social fields, verified social fields cannot be modified user.save() # 3. send confirmation email send_confirm_email(user, user.username, external_id_provider=external_id_provider, external_id=external_id, destination=destination) # 4. notify user message = language.EXTERNAL_LOGIN_EMAIL_CREATE_SUCCESS.format( external_id_provider=external_id_provider, email=user.username) kind = 'success' # 5. remove session remove_session(session) status.push_status_message(message, kind=kind, trust=False) else: forms.push_errors_to_status(form.errors) # Don't go anywhere return {'form': form, 'external_id_provider': external_id_provider}
def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None): """Load an instance of Model by primary key or query. Raise an appropriate HTTPError if no record is found or if the query fails to find a unique record :param type Model: StoredObject subclass to query :param pk_or_query: :type pk_or_query: either - a <basestring> representation of the record's primary key, e.g. 'abcdef' - a <QueryBase> subclass query to uniquely select a record, e.g. Q('title', 'eq', 'Entitled') & Q('version', 'eq', 1) :param bool allow_deleted: allow deleleted records? :param basestring display_name: :raises: HTTPError(404) if the record does not exist :raises: HTTPError(400) if no unique record is found :raises: HTTPError(410) if the resource is deleted and allow_deleted = False :return: Model instance """ display_name = display_name or '' # FIXME: Not everything that uses this decorator needs to be markupsafe, but OsfWebRenderer error.mako does... safe_name = markupsafe.escape(display_name) select_for_update = check_select_for_update(request) if isinstance(pk_or_query, Q): try: instance = Model.objects.filter(pk_or_query).select_for_update( ).get() if select_for_update else Model.objects.get(pk_or_query) except Model.DoesNotExist: raise HTTPError( http.NOT_FOUND, data=dict(message_long= 'No {name} record matching that query could be found' .format(name=safe_name))) except Model.MultipleObjectsReturned: raise HTTPError( http.BAD_REQUEST, data=dict( message_long= 'The query must match exactly one {name} record'.format( name=safe_name))) else: instance = Model.load(pk_or_query, select_for_update=select_for_update) if not instance: raise HTTPError( http.NOT_FOUND, data=dict( message_long= 'No {name} record with that primary key could be found'. format(name=safe_name))) if getattr(instance, 'is_deleted', False) and getattr( instance, 'suspended', False): raise HTTPError( 451, data=dict( # 451 - Unavailable For Legal Reasons message_short='Content removed', message_long='This content has been removed')) if not allow_deleted and getattr(instance, 'is_deleted', False): raise HTTPError(http.GONE) return instance
def addon_view_or_download_file(auth, path, provider, **kwargs): extras = request.args.to_dict() extras.pop('_', None) # Clean up our url params a bit action = extras.get('action', 'view') node = kwargs.get('node') or kwargs['project'] node_addon = node.get_addon(provider) provider_safe = markupsafe.escape(provider) path_safe = markupsafe.escape(path) project_safe = markupsafe.escape(node.project_or_component) if not path: raise HTTPError(httplib.BAD_REQUEST) if not isinstance(node_addon, BaseStorageAddon): raise HTTPError( httplib.BAD_REQUEST, data={ 'message_short': 'Bad Request', 'message_long': 'The {} add-on containing {} is no longer connected to {}.'. format(provider_safe, path_safe, project_safe) }) if not node_addon.has_auth: raise HTTPError( httplib.UNAUTHORIZED, data={ 'message_short': 'Unauthorized', 'message_long': 'The {} add-on containing {} is no longer authorized.'.format( provider_safe, path_safe) }) if not node_addon.complete: raise HTTPError( httplib.BAD_REQUEST, data={ 'message_short': 'Bad Request', 'message_long': 'The {} add-on containing {} is no longer configured.'.format( provider_safe, path_safe) }) savepoint_id = transaction.savepoint() file_node = BaseFileNode.resolve_class(provider, BaseFileNode.FILE).get_or_create( node, path) # Note: Cookie is provided for authentication to waterbutler # it is overriden to force authentication as the current user # the auth header is also pass to support basic auth version = file_node.touch( request.headers.get('Authorization'), **dict(extras, cookie=request.cookies.get(settings.COOKIE_NAME))) if version is None: # File is either deleted or unable to be found in the provider location # Rollback the insertion of the file_node transaction.savepoint_rollback(savepoint_id) if not file_node.pk: redirect_file_node = BaseFileNode.load(path) # Allow osfstorage to redirect if the deep url can be used to find a valid file_node if redirect_file_node and redirect_file_node.provider == 'osfstorage' and not redirect_file_node.is_deleted: return redirect( redirect_file_node.node.web_url_for( 'addon_view_or_download_file', path=redirect_file_node._id, provider=redirect_file_node.provider)) raise HTTPError(httplib.NOT_FOUND, data={ 'message_short': 'File Not Found', 'message_long': 'The requested file could not be found.' }) return addon_deleted_file(file_node=file_node, path=path, **kwargs) else: transaction.savepoint_commit(savepoint_id) # TODO clean up these urls and unify what is used as a version identifier if request.method == 'HEAD': return make_response(('', httplib.FOUND, { 'Location': file_node.generate_waterbutler_url( **dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render')) })) if action == 'download': format = extras.get('format') _, extension = os.path.splitext(file_node.name) # avoid rendering files with the same format type. if format and '.{}'.format(format) != extension: return redirect('{}/export?format={}&url={}'.format( MFR_SERVER_URL, format, urllib.quote( file_node.generate_waterbutler_url( **dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render'))))) return redirect( file_node.generate_waterbutler_url( **dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render'))) if action == 'get_guid': draft_id = extras.get('draft') draft = DraftRegistration.load(draft_id) if draft is None or draft.is_approved: raise HTTPError(httplib.BAD_REQUEST, data={ 'message_short': 'Bad Request', 'message_long': 'File not associated with required object.' }) guid = file_node.get_guid(create=True) guid.referent.save() return dict(guid=guid._id) if len(request.path.strip('/').split('/')) > 1: guid = file_node.get_guid(create=True) return redirect( furl.furl('/{}/'.format(guid._id)).set(args=extras).url) return addon_view_file(auth, node, file_node, version)
def get_folders(self, folder_id=None, **kwargs): """Get list of folders underneath the folder with id ``folder_id``. If ``folder_id`` is ``None``, return a single entry representing the root folder. In OneDrive, the root folder has a unique id, so fetch that and return it. This method returns a list of dicts with metadata about each folder under ``folder_id``. These dicts have the following properties:: { 'addon': 'onedrive', # short name of the addon 'id': folder_id, # id of the folder. root may need special casing 'path': '/', # human-readable path of the folder 'kind': 'folder', # always 'folder' 'name': '/ (Full OneDrive)', # human readable name of the folder. root may need special casing 'urls': { # urls to fetch information about the folder 'folders': api_v2_url( # url to get subfolders of this folder. 'nodes/{}/addons/onedrive/folders/'.format(self.owner._id), params={'id': folder_id} ), } } Some providers include additional information:: * figshare includes ``permissions``, ``hasChildren`` * googledrive includes ``urls.fetch`` :param str folder_id: the id of the folder to fetch subfolders of. Defaults to ``None`` :rtype: list :return: a list of dicts with metadata about the subfolder of ``folder_id``. """ if folder_id is None: return [{ 'id': DEFAULT_ROOT_ID, 'path': '/', 'addon': 'onedrive', 'kind': 'folder', 'name': '/ (Full OneDrive)', 'urls': { 'folders': api_v2_url('nodes/{}/addons/onedrive/folders/'.format( self.owner._id), params={'id': DEFAULT_ROOT_ID}), } }] try: access_token = self.fetch_access_token() except exceptions.InvalidAuthError: raise HTTPError(403) client = OneDriveClient(access_token) items = client.folders(folder_id) return [{ 'addon': 'onedrive', 'kind': 'folder', 'id': item['id'], 'name': item['name'], 'path': item['name'], 'urls': { 'folders': api_v2_url('nodes/{}/addons/onedrive/folders/'.format( self.owner._id), params={'id': item['id']}), } } for item in items]
def node_fork_page(auth, node, **kwargs): try: fork = node.fork_node(auth) except PermissionsError: raise HTTPError(http.FORBIDDEN, redirect_url=node.url) return fork.url
from .exceptions import ( NameEmptyError, NameInvalidError, NameMaximumLengthError, PageCannotRenameError, PageConflictError, PageNotFoundError, InvalidVersionError, ) from .models import NodeWikiPage logger = logging.getLogger(__name__) WIKI_NAME_EMPTY_ERROR = HTTPError( http.BAD_REQUEST, data=dict(message_short='Invalid request', message_long='The wiki page name cannot be empty.')) WIKI_NAME_MAXIMUM_LENGTH_ERROR = HTTPError( http.BAD_REQUEST, data=dict( message_short='Invalid request', message_long='The wiki page name cannot be more than 100 characters.')) WIKI_PAGE_CANNOT_RENAME_ERROR = HTTPError( http.BAD_REQUEST, data=dict(message_short='Invalid request', message_long='The wiki page cannot be renamed.')) WIKI_PAGE_CONFLICT_ERROR = HTTPError( http.CONFLICT, data=dict(message_short='Page conflict', message_long='A wiki page with that name already exists.')) WIKI_PAGE_NOT_FOUND_ERROR = HTTPError(
def project_statistics(auth, node, **kwargs): if not (node.can_edit(auth) or node.is_public): raise HTTPError(http.FORBIDDEN) return _view_project(node, auth, primary=True)
def view_institution(inst_id, **kwargs): try: inst = Institution.objects.get(_id=inst_id, is_deleted=False) except Institution.DoesNotExist: raise HTTPError(http_status.HTTP_404_NOT_FOUND) return serialize_institution(inst)
node_settings, cache_file, start_render=True, file_path=file_path, file_content=None, download_path=download_url, ) rv = { 'file_name': file_name, 'render_url': render_url, 'rendered': rendered, 'info_url': info_url, } rv.update(_view_project(node, auth)) return rv FILE_NOT_FOUND_ERROR = HTTPError(http.NOT_FOUND, data=dict( message_short='File not found', message_long='The file you requested could not be found.' )) @must_be_valid_project # injects project @must_be_contributor_or_public # injects user, project def download_file(fid, **kwargs): node = kwargs['node'] or kwargs['project'] try: vid = get_latest_version_number(fid, node) + 1 except FileNotFoundError: raise FILE_NOT_FOUND_ERROR redirect_url = node.web_url_for('download_file_by_version', fid=fid, vid=vid) return redirect(redirect_url)
def claim_user_registered(auth, node, **kwargs): """ View that prompts user to enter their password in order to claim being a contributor on a project. A user must be logged in. """ current_user = auth.user sign_out_url = web_url_for('auth_register', logout=True, next=request.url) if not current_user: return redirect(sign_out_url) # Logged in user should not be a contributor the project if node.is_contributor(current_user): logout_url = web_url_for('auth_logout', redirect_url=request.url) data = { 'message_short': 'Already a contributor', 'message_long': ('The logged-in user is already a contributor to this ' 'project. Would you like to <a href="{}">log out</a>?').format(logout_url) } raise HTTPError(http.BAD_REQUEST, data=data) uid, pid, token = kwargs['uid'], kwargs['pid'], kwargs['token'] unreg_user = User.load(uid) if not verify_claim_token(unreg_user, token, pid=node._primary_key): error_data = { 'message_short': 'Invalid url.', 'message_long': 'The token in the URL is invalid or has expired.' } raise HTTPError(http.BAD_REQUEST, data=error_data) # Store the unreg_user data on the session in case the user registers # a new account session.data['unreg_user'] = { 'uid': uid, 'pid': pid, 'token': token } form = PasswordForm(request.form) if request.method == 'POST': if form.validate(): if current_user.check_password(form.password.data): node.replace_contributor(old=unreg_user, new=current_user) node.save() status.push_status_message( 'You are now a contributor to this project.', kind='success', trust=False ) return redirect(node.url) else: status.push_status_message(language.LOGIN_FAILED, kind='warning', trust=False) else: forms.push_errors_to_status(form.errors) if is_json_request(): form_ret = forms.utils.jsonify(form) user_ret = profile_utils.serialize_user(current_user, full=False) else: form_ret = form user_ret = current_user return { 'form': form_ret, 'user': user_ret, 'signOutUrl': sign_out_url }
def upload_file_public(auth, node_addon, **kwargs): node = kwargs['node'] or kwargs['project'] do_redirect = request.form.get('redirect', False) name, content, content_type, size = prepare_file(request.files['file']) if size > (node_addon.config.max_file_size * MEGABYTE): raise HTTPError( http.BAD_REQUEST, data={ 'message_short': 'File too large.', 'message_long': 'The file you are trying to upload exceeds ' 'the maximum file size limit.', }, ) try: fobj = node.add_file( auth, name, content, size, content_type ) except FileNotModified: return { 'actionTaken': None, 'name': name, } # existing file was updated? was_updated = node.logs[-1].action == NodeLog.FILE_UPDATED unique, total = get_basic_counters( 'download:{0}:{1}'.format( node._id, fobj.path.replace('.', '_') ) ) file_info = { 'name': name, 'size': [ float(size), rubeus.format_filesize(size), ], # URLs 'urls': { 'view': fobj.url(node), 'download': fobj.download_url(node), 'delete': fobj.api_url(node), }, rubeus.KIND: rubeus.FILE, 'permissions': { 'view': True, 'edit': True, }, 'dates': { 'uploaded': [ time.mktime(fobj.date_uploaded.timetuple()), fobj.date_uploaded.strftime('%Y/%m/%d %I:%M %p'), ], }, 'downloads': total or 0, 'actionTaken': NodeLog.FILE_UPDATED if was_updated else NodeLog.FILE_ADDED, } if do_redirect: return redirect(request.referrer) return file_info, 201
def osfstorage_create_child(file_node, payload, **kwargs): parent = file_node # Just for clarity name = payload.get('name') user = OSFUser.load(payload.get('user')) is_folder = payload.get('kind') == 'folder' if getattr(file_node.target, 'is_registration', False) and not getattr(file_node.target, 'archiving', False): raise HTTPError( httplib.BAD_REQUEST, data={ 'message_short': 'Registered Nodes are immutable', 'message_long': "The operation you're trying to do cannot be applied to registered Nodes, which are immutable", }) if not (name or user) or '/' in name: raise HTTPError(httplib.BAD_REQUEST) if getattr(file_node.target, 'is_quickfiles', False) and is_folder: raise HTTPError(httplib.BAD_REQUEST, data={ 'message_long': 'You may not create a folder for QuickFiles' }) try: # Create a save point so that we can rollback and unlock # the parent record with transaction.atomic(): if is_folder: created, file_node = True, parent.append_folder(name) else: created, file_node = True, parent.append_file(name) except (ValidationError, IntegrityError): created, file_node = False, parent.find_child_by_name( name, kind=int(not is_folder)) if not created and is_folder: raise HTTPError( httplib.CONFLICT, data={ 'message_long': 'Cannot create folder "{name}" because a file or folder already exists at path "{path}"' .format( name=file_node.name, path=file_node.materialized_path, ) }) if not is_folder: try: if file_node.checkout is None or file_node.checkout._id == user._id: version = file_node.create_version( user, dict( payload['settings'], **dict( payload['worker'], **{ 'object': payload['metadata']['name'], 'service': payload['metadata']['provider'], })), dict(payload['metadata'], **payload['hashes'])) version_id = version._id archive_exists = version.archive is not None else: raise HTTPError( httplib.FORBIDDEN, data={ 'message_long': 'File cannot be updated due to checkout status.' }) except KeyError: raise HTTPError(httplib.BAD_REQUEST) else: version_id = None archive_exists = False return { 'status': 'success', 'archive': not archive_exists, # Should waterbutler also archive this file 'data': file_node.serialize(), 'version': version_id, }, httplib.CREATED if created else httplib.OK
def view_file(auth, **kwargs): node_settings = kwargs['node_addon'] node = kwargs['node'] or kwargs['project'] file_name = kwargs['fid'] file_name_clean = file_name.replace('.', '_') try: guid = OsfGuidFile.find_one( Q('node', 'eq', node) & Q('name', 'eq', file_name) ) except: guid = OsfGuidFile( node=node, name=file_name, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) # Throw 404 and log error if file not found in files_versions try: file_id = node.files_versions[file_name_clean][-1] except KeyError: logger.error('File {} not found in files_versions of component {}.'.format( file_name_clean, node._id )) raise HTTPError(http.NOT_FOUND) file_object = NodeFile.load(file_id) # Ensure NodeFile is attached to Node; should be fixed by actions or # improved data modeling in future if not file_object.node: file_object.node = node file_object.save() download_url = file_object.download_url(node) render_url = file_object.render_url(node) info_url = file_object.info_url(node) file_path = os.path.join( settings.UPLOADS_PATH, node._primary_key, file_name ) # Throw 404 and log error if file not found on disk if not os.path.isfile(file_path): logger.error('File {} not found on disk.'.format(file_path)) raise HTTPError(http.NOT_FOUND) _, file_ext = os.path.splitext(file_path.lower()) # Get or create rendered file cache_file = get_cache_file( file_object.filename, file_object.latest_version_number(node) ) rendered = get_cache_content( node_settings, cache_file, start_render=True, file_path=file_path, file_content=None, download_path=download_url, ) rv = { 'file_name': file_name, 'render_url': render_url, 'rendered': rendered, 'info_url': info_url, } rv.update(_view_project(node, auth)) return rv
def user_notifications(auth, **kwargs): """Get subscribe data from user""" if not settings.ENABLE_EMAIL_SUBSCRIPTIONS: raise HTTPError(http.BAD_REQUEST) return {'mailing_lists': auth.user.mailing_lists}
def update_user(auth): """Update the logged-in user's profile.""" # trust the decorator to handle auth user = auth.user data = request.get_json() validate_user(data, user) # TODO: Expand this to support other user attributes ########## # Emails # ########## if 'emails' in data: emails_list = [x['address'].strip().lower() for x in data['emails']] if user.username.strip().lower() not in emails_list: raise HTTPError(httplib.FORBIDDEN) available_emails = [ each.strip().lower() for each in list(user.emails.values_list('address', flat=True)) + user.unconfirmed_emails ] # removals removed_emails = [ each.strip().lower() for each in available_emails if each not in emails_list ] if user.username.strip().lower() in removed_emails: raise HTTPError(httplib.FORBIDDEN) for address in removed_emails: if user.emails.filter(address=address): try: user.remove_email(address) except PermissionsError as e: raise HTTPError(httplib.FORBIDDEN, str(e)) user.remove_unconfirmed_email(address) # additions added_emails = [ each['address'].strip().lower() for each in data['emails'] if each['address'].strip().lower() not in available_emails ] for address in added_emails: try: user.add_unconfirmed_email(address) except (ValidationError, ValueError): raise HTTPError(http.BAD_REQUEST, data=dict(message_long='Invalid Email')) except BlacklistedEmailError: sentry.log_message('User attempted to add a blacklisted email', extra_data={ 'user_id': user.id, 'address': address, }) raise HTTPError( http.BAD_REQUEST, data=dict(message_long=language.BLACKLISTED_EMAIL)) # TODO: This setting is now named incorrectly. if settings.CONFIRM_REGISTRATIONS_BY_EMAIL: if not throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): raise HTTPError( httplib.BAD_REQUEST, data={ 'message_long': 'Too many requests. Please wait a while before adding an email to your account.' }) send_confirm_email(user, email=address) ############ # Username # ############ # get the first email that is set to primary and has an address primary_email = next(( each for each in data['emails'] # email is primary if each.get('primary') and each.get('confirmed') # an address is specified (can't trust those sneaky users!) and each.get('address'))) if primary_email: primary_email_address = primary_email['address'].strip().lower() if primary_email_address not in [ each.strip().lower() for each in user.emails.values_list('address', flat=True) ]: raise HTTPError(httplib.FORBIDDEN) username = primary_email_address # make sure the new username has already been confirmed if username and username != user.username and user.emails.filter( address=username).exists(): mails.send_mail(user.username, mails.PRIMARY_EMAIL_CHANGED, user=user, new_address=username, can_change_preferences=False, osf_contact_email=settings.OSF_CONTACT_EMAIL) # Remove old primary email from subscribed mailing lists for list_name, subscription in user.mailchimp_mailing_lists.items( ): if subscription: mailchimp_utils.unsubscribe_mailchimp_async( list_name, user._id, username=user.username) user.username = username ################### # Timezone/Locale # ################### if 'locale' in data: if data['locale']: locale = data['locale'].replace('-', '_') user.locale = locale # TODO: Refactor to something like: # user.timezone = data.get('timezone', user.timezone) if 'timezone' in data: if data['timezone']: user.timezone = data['timezone'] user.save() # Update subscribed mailing lists with new primary email # TODO: move to user.save() for list_name, subscription in user.mailchimp_mailing_lists.items(): if subscription: mailchimp_utils.subscribe_mailchimp(list_name, user._id) return _profile_view(user, is_profile=True)
def get_target_user(auth, uid=None): target = User.load(uid) if uid else auth.user if target is None: raise HTTPError(http.NOT_FOUND) return target
def external_login_confirm_email_get(auth, uid, token): """ View for email confirmation links when user first login through external identity provider. HTTP Method: GET When users click the confirm link, they are expected not to be logged in. If not, they will be logged out first and redirected back to this view. After OSF verifies the link and performs all actions, they will be automatically logged in through CAS and redirected back to this view again being authenticated. :param auth: the auth context :param uid: the user's primary key :param token: the verification token """ user = User.load(uid) if not user: raise HTTPError(http.BAD_REQUEST) destination = request.args.get('destination') if not destination: raise HTTPError(http.BAD_REQUEST) # if user is already logged in if auth and auth.user: # if it is a wrong user if auth.user._id != user._id: return auth_logout(redirect_url=request.url) # if it is the expected user new = request.args.get('new', None) if destination in campaigns.get_campaigns(): # external domain takes priority campaign_url = campaigns.external_campaign_url_for(destination) if not campaign_url: campaign_url = campaigns.campaign_url_for(destination) return redirect(campaign_url) if new: status.push_status_message(language.WELCOME_MESSAGE, kind='default', jumbotron=True, trust=True) return redirect(web_url_for('dashboard')) # token is invalid if token not in user.email_verifications: raise HTTPError(http.BAD_REQUEST) verification = user.email_verifications[token] email = verification['email'] provider = verification['external_identity'].keys()[0] provider_id = verification['external_identity'][provider].keys()[0] # wrong provider if provider not in user.external_identity: raise HTTPError(http.BAD_REQUEST) external_status = user.external_identity[provider][provider_id] try: ensure_external_identity_uniqueness(provider, provider_id, user) except ValidationError as e: raise HTTPError(http.FORBIDDEN, e.message) if not user.is_registered: user.register(email) if email.lower() not in user.emails: user.emails.append(email.lower()) user.date_last_logged_in = timezone.now() user.external_identity[provider][provider_id] = 'VERIFIED' user.social[provider.lower()] = provider_id del user.email_verifications[token] user.verification_key = generate_verification_key() user.save() service_url = request.url if external_status == 'CREATE': mails.send_mail(to_addr=user.username, mail=mails.WELCOME, mimetype='html', user=user) service_url += '&{}'.format(urllib.urlencode({'new': 'true'})) elif external_status == 'LINK': mails.send_mail( user=user, to_addr=user.username, mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, external_id_provider=provider, ) # redirect to CAS and authenticate the user with the verification key return redirect( cas.get_login_url(service_url, username=user.username, verification_key=user.verification_key))