def _process(self): f = request.files['picture'] try: pic = Image.open(f) except IOError: raise UserValueError( _('You cannot upload this file as profile picture.')) if pic.format.lower() not in {'jpeg', 'png', 'gif'}: raise UserValueError( _('The file has an invalid format ({format}).').format( format=pic.format)) if pic.mode not in {'RGB', 'RGBA'}: pic = pic.convert('RGB') image_bytes = BytesIO() pic = square(pic) if pic.height > 256: pic = pic.resize((256, 256), resample=Image.BICUBIC) pic.save(image_bytes, 'PNG') image_bytes.seek(0) content = image_bytes.read() self.user.picture = content self.user.picture_metadata = { 'hash': crc32(content), 'size': len(content), 'filename': os.path.splitext(secure_filename(f.filename, 'user'))[0] + '.png', 'content_type': 'image/png' } flash(_('Profile picture uploaded'), 'success') logger.info('Profile picture of user %s uploaded by %s', self.user, session.user) return jsonify_data(content=get_picture_data(self.user))
def _process(self): self.user.picture = None self.user.picture_metadata = None flash(_('Profile picture deleted'), 'success') logger.info('Profile picture of user %s deleted by %s', self.user, session.user) return jsonify_data(content=None)
def _original_user(self): # A proper user, with an id that can be mapped directly to sqlalchemy if isinstance(self.id, int) or self.id.isdigit(): return User.get(int(self.id)) # A user who had no real indico account but an ldap identifier/email. # In this case we try to find his real user and replace the ID of this object # with that user's ID. data = self.id.split(':') # TODO: Once everything is in SQLAlchemy this whole thing needs to go away! user = None if data[0] == 'LDAP': identifier = data[1] email = data[2] # You better have only one ldap provider or at least different identifiers ;) identity = Identity.find_first(Identity.provider != 'indico', Identity.identifier == identifier) if identity: user = identity.user elif data[0] == 'Nice': email = data[1] else: return None if not user: user = User.find_first(User.all_emails.contains(email)) if user: self._old_id = self.id self.id = str(user.id) logger.info("Updated legacy user id (%s => %s)", self._old_id, self.id) return user
def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: logger.info("Found pending user %s to be merged into %s", existing, self.user) # If the pending user has missing names, copy them from the active one # to allow it to be marked as not pending and deleted during the merge. existing.first_name = existing.first_name or self.user.first_name existing.last_name = existing.last_name or self.user.last_name merge_users(existing, self.user) flash( _("Merged data from existing '{}' identity").format( existing.email)) existing.is_pending = False self.user.secondary_emails.add(data['email']) signals.users.email_added.send(self.user, email=data['email']) flash( _('The email address {email} has been added to your account.'). format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def _process_PUT(self): if self.user == session.user: raise Forbidden(_('You cannot block yourself')) self.user.is_blocked = True logger.info('User %s blocked %s', session.user, self.user) flash(_('{name} has been blocked.').format(name=self.user.name), 'success') return jsonify(success=True)
def _process(self, source): self.user.picture_source = source if source == ProfilePictureSource.standard: self.user.picture = None self.user.picture_metadata = None logger.info('Profile picture of user %s removed by %s', self.user, session.user) return '', 204 if source == ProfilePictureSource.custom: f = request.files['picture'] try: pic = Image.open(f) except OSError: raise UserValueError(_('You cannot upload this file as profile picture.')) if pic.format.lower() not in {'jpeg', 'png', 'gif', 'webp'}: raise UserValueError(_('The file has an invalid format ({format}).').format(format=pic.format)) if pic.mode not in ('RGB', 'RGBA'): pic = pic.convert('RGB') pic = square(pic) if pic.height > 256: pic = pic.resize((256, 256), resample=Image.BICUBIC) image_bytes = BytesIO() pic.save(image_bytes, 'PNG') image_bytes.seek(0) set_user_avatar(self.user, image_bytes.read(), f.filename) else: content, lastmod = get_gravatar_for_user(self.user, source == ProfilePictureSource.identicon, 256) set_user_avatar(self.user, content, source.name, lastmod) logger.info('Profile picture of user %s updated by %s', self.user, session.user) return '', 204
def _process_DELETE(self): self.user.is_blocked = False logger.info('User %s unblocked %s', session.user, self.user) flash( _('{name} has been unblocked.').format(name=self.user.name), 'success') return jsonify(success=True)
def _process(self): form = MergeForm() if form.validate_on_submit(): source = form['source_user'].data target = form['target_user'].data logger.info("User {} initiated merge of {} into {}".format(session.user, source, target)) merge_users(source, target) flash(_('The users have been successfully merged.'), 'success') return redirect(url_for('.user_profile', user_id=target.id)) return WPUsersAdmin.render_template('users_merge.html', form=form)
def synchronize_data(self, refresh=False, silent=False): """Synchronize the fields of the user from the sync identity. This will take only into account :attr:`synced_fields`. :param refresh: bool -- Whether to refresh the synced identity with the sync provider before instead of using the stored data. (Only if the sync provider supports refresh.) :param silent: bool -- Whether to just synchronize but not flash any messages """ from indico.modules.users import logger identity = self._get_synced_identity(refresh=refresh) if identity is None: return if not any(identity.data.values()): # refuse to sync with empty identities, just in case - if there is no # data at all there's a good chance something is wrong! return affiliation_data = identity.data.get('affiliation_data') for field in self.synced_fields: old_value = getattr(self, field) new_value = identity.data.get(field) or '' if field == 'email': new_value = new_value.lower() if field in ('first_name', 'last_name', 'email') and not new_value: continue if field == 'affiliation': if affiliation_data: self._affiliation.affiliation_link = Affiliation.get_or_create_from_data( affiliation_data) else: self._affiliation.affiliation_link = None # clear link to predefined affiliation if old_value == new_value: continue logger.info('Syncing %s for %r from %r to %r', field, self, old_value, new_value) if field == 'email': if not self._synchronize_email(new_value, silent=silent): continue else: setattr(self, field, new_value) if not silent: flash( _("Your {field_name} has been synchronized from '{old_value}' to '{new_value}'." ).format(field_name=syncable_fields[field], old_value=old_value, new_value=new_value))
def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: logger.info("Found pending user {} to be merged into {}".format(existing, self.user)) merge_users(existing, self.user) flash(_("Merged data from existing '{}' identity").format(existing.email)) existing.is_pending = False self.user.secondary_emails.add(data['email']) flash(_('The email address {email} has been added to your account.').format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def _process(self, changes): logger.info('Profile of user %r updated by %r: %r', self.user, session.user, changes) synced_fields = set(changes.pop('synced_fields', self.user.synced_fields)) syncable_fields = {k for k, v in self.user.synced_values.items() if v or k not in ('first_name', 'last_name')} # we set this first so these fields are skipped below and only # get updated in synchronize_data which will flash a message # informing the user about the changes made by the sync self.user.synced_fields = synced_fields & syncable_fields if (affiliation_data := changes.pop('affiliation_data', None)) and 'affiliation' not in self.user.synced_fields: if affiliation_data['affiliation_id']: self.user._affiliation.affiliation_link = Affiliation.get_or_404(affiliation_data['affiliation_id'], is_deleted=False) self.user._affiliation.name = self.user._affiliation.affiliation_link.name else: self.user._affiliation.affiliation_link = None self.user._affiliation.name = affiliation_data['name']
def _process(self): form = MergeForm() if form.validate_on_submit(): source = form.source_user.data target = form.target_user.data errors, warnings = _get_merge_problems(source, target) if errors: raise BadRequest(_('Merge aborted due to failed sanity check')) if warnings: logger.info("User %s initiated merge of %s into %s (with %d warnings)", session.user, source, target, len(warnings)) else: logger.info("User %s initiated merge of %s into %s", session.user, source, target) merge_users(source, target) flash(_('The users have been successfully merged.'), 'success') return redirect(url_for('.user_profile', user_id=target.id)) return WPUsersAdmin.render_template('users_merge.html', 'users', form=form)
def update_gravatars(user=None): if user is not None: # explicitly scheduled update (after an email change) if user.picture_source not in (ProfilePictureSource.gravatar, ProfilePictureSource.identicon): return users = [user] else: users = User.query.filter(~User.is_deleted, User.picture_source == ProfilePictureSource.gravatar).all() for user in committing_iterator(users): gravatar, lastmod = get_gravatar_for_user(user, identicon=False, lastmod=user.picture_metadata['lastmod']) if gravatar is None: logger.debug('Gravatar for %r did not change (not modified)', user) continue if crc32(gravatar) == user.picture_metadata['hash']: logger.debug('Gravatar for %r did not change (same hash)', user) user.picture_metadata['lastmod'] = lastmod flag_modified(user, 'picture_metadata') continue set_user_avatar(user, gravatar, 'gravatar', lastmod) logger.info('Gravatar of user %s updated', user)
def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: logger.info("Found pending user %s to be merged into %s", existing, self.user) # If the pending user has missing names, copy them from the active one # to allow it to be marked as not pending and deleted during the merge. existing.first_name = existing.first_name or self.user.first_name existing.last_name = existing.last_name or self.user.last_name merge_users(existing, self.user) flash(_("Merged data from existing '{}' identity").format(existing.email)) existing.is_pending = False self.user.secondary_emails.add(data['email']) signals.users.email_added.send(self.user, email=data['email']) flash(_('The email address {email} has been added to your account.').format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: logger.info( "Found pending user {} to be merged into {}".format( existing, self.user)) merge_users(existing, self.user) flash( _("Merged data from existing '{}' identity").format( existing.email)) existing.is_pending = False self.user.secondary_emails.add(data['email']) signals.users.email_added.send(self.user, email=data['email']) flash( _('The email address {email} has been added to your account.'). format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def merge_users(source, target, force=False): """Merge two users together, unifying all related data :param source: source user (will be set as deleted) :param target: target user (final) """ if source.is_deleted and not force: raise ValueError( 'Source user {} has been deleted. Merge aborted.'.format(source)) if target.is_deleted: raise ValueError( 'Target user {} has been deleted. Merge aborted.'.format(target)) # Move emails to the target user primary_source_email = source.email logger.info("Target %s initial emails: %s", target, ', '.join(target.all_emails)) logger.info("Source %s emails to be linked to target %s: %s", source, target, ', '.join(source.all_emails)) UserEmail.query.filter_by(user_id=source.id).update({ UserEmail.user_id: target.id, UserEmail.is_primary: False }) # Make sure we don't have stale data after the bulk update we just performed db.session.expire_all() # Update favorites target.favorite_users |= source.favorite_users target.favorite_of |= source.favorite_of target.favorite_categories |= source.favorite_categories # Update category suggestions SuggestedCategory.merge_users(target, source) # Merge identities for identity in set(source.identities): identity.user = target # Notify signal listeners about the merge signals.users.merged.send(target, source=source) db.session.flush() # Mark source as merged source.merged_into_user = target source.is_deleted = True db.session.flush() # Restore the source user's primary email source.email = primary_source_email db.session.flush() logger.info("Successfully merged %s into %s", source, target)
def merge_users(source, target, force=False): """Merge two users together, unifying all related data :param source: source user (will be set as deleted) :param target: target user (final) """ if source.is_deleted and not force: raise ValueError('Source user {} has been deleted. Merge aborted.'.format(source)) if target.is_deleted: raise ValueError('Target user {} has been deleted. Merge aborted.'.format(target)) # Move emails to the target user primary_source_email = source.email logger.info("Target %s initial emails: %s", target, ', '.join(target.all_emails)) logger.info("Source %s emails to be linked to target %s: %s", source, target, ', '.join(source.all_emails)) UserEmail.find(user_id=source.id).update({ UserEmail.user_id: target.id, UserEmail.is_primary: False }) # Make sure we don't have stale data after the bulk update we just performed db.session.expire_all() # Update favorites target.favorite_users |= source.favorite_users target.favorite_of |= source.favorite_of target.favorite_categories |= source.favorite_categories # Update category suggestions SuggestedCategory.merge_users(target, source) # Merge identities for identity in set(source.identities): identity.user = target # Notify signal listeners about the merge signals.users.merged.send(target, source=source) db.session.flush() # Mark source as merged source.merged_into_user = target source.is_deleted = True db.session.flush() # Restore the source user's primary email source.email = primary_source_email db.session.flush() logger.info("Successfully merged %s into %s", source, target)
def merge_users(source, target, force=False): """Merge two users together, unifying all related data :param source: source user (will be set as deleted) :param target: target user (final) """ if source.is_deleted and not force: raise ValueError('Source user {} has been deleted. Merge aborted.'.format(source)) if target.is_deleted: raise ValueError('Target user {} has been deleted. Merge aborted.'.format(target)) # Merge links for link in source.linked_objects: if link.object is None: # remove link if object does no longer exist db.session.delete(link) else: link.user = target # De-duplicate links unique_links = {(link.object, link.role): link for link in target.linked_objects} to_delete = set(target.linked_objects) - set(unique_links.viewvalues()) for link in to_delete: db.session.delete(link) # Move emails to the target user primary_source_email = source.email logger.info("Target %s initial emails: %s", target, ', '.join(target.all_emails)) logger.info("Source %s emails to be linked to target %s: %s", source, target, ', '.join(source.all_emails)) UserEmail.find(user_id=source.id).update({ UserEmail.user_id: target.id, UserEmail.is_primary: False }) # Make sure we don't have stale data after the bulk update we just performed db.session.expire_all() # Update favorites target.favorite_users |= source.favorite_users target.favorite_of |= source.favorite_of target.favorite_categories |= source.favorite_categories # Merge identities for identity in set(source.identities): identity.user = target # Merge avatars in redis if redis_write_client: avatar_links.merge_avatars(target, source) suggestions.merge_avatars(target, source) # Notify signal listeners about the merge signals.users.merged.send(target, source=source) db.session.flush() # Mark source as merged source.merged_into_user = target source.is_deleted = True db.session.flush() # Restore the source user's primary email source.email = primary_source_email db.session.flush() logger.info("Successfully merged %s into %s", source, target)