def post(self): """ Tag the given document as todo. Creates a tag relation, so that the authenticated user is marking the given document as todo. Request: `POST` `/tags/add` Request body: {'document_id': @document_id@} """ document_id = self.request.validated['document_id'] document_type = ROUTE_TYPE user_id = self.request.authenticated_userid if get_tag_relation(user_id, document_id): raise HTTPBadRequest('This document is already tagged.') DBSession.add(DocumentTag( user_id=user_id, document_id=document_id, document_type=document_type)) DBSession.add(DocumentTagLog( user_id=user_id, document_id=document_id, document_type=document_type, is_creation=True)) notify_es_syncer(self.request.registry.queue_config) return {}
def post(self): """ Untag the given document. Request: `POST` `/tags/remove` Request body: {'document_id': @document_id@} """ document_id = self.request.validated['document_id'] document_type = ROUTE_TYPE user_id = self.request.authenticated_userid tag_relation = get_tag_relation(user_id, document_id) if tag_relation: DBSession.delete(tag_relation) DBSession.add(DocumentTagLog( user_id=user_id, document_id=document_id, document_type=document_type, is_creation=False)) else: log.warning( 'tried to delete not existing tag relation ' '({0}, {1})'.format(user_id, document_id)) raise HTTPBadRequest('This document has no such tag.') notify_es_syncer(self.request.registry.queue_config) return {}
def _collection_post( self, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document_in = self.request.validated document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id=user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): create_associations(document, document_in['associations'], user_id) notify_es_syncer(self.request.registry.queue_config) return {'document_id': document.document_id}
def test_process_task(self): """Tests that the syncer listens to messages and sends changes to ElasticSearch. """ waypoint = Waypoint( document_id=51251, waypoint_type='summit', elevation=2000, geometry=DocumentGeometry( geom='SRID=3857;POINT(635956 5723604)'), locales=[ WaypointLocale( lang='fr', title='Mont Granier', description='...', summary='Le Mont [b]Granier[/b]') ]) self.session.add(waypoint) self.session.flush() user_id = self.global_userids['contributor'] DocumentRest.create_new_version(waypoint, user_id) self.session.flush() t = transaction.begin() notify_es_syncer(self.queue_config) t.commit() syncer = SyncWorker( self.queue_config.connection, self.queue_config.queue, session=self.session) next(syncer.consume(limit=1)) index = elasticsearch_config['index'] doc = SearchWaypoint.get(id=51251, index=index) self.assertEqual(doc['title_fr'], 'Mont Granier') self.assertEqual(doc['summary_fr'], 'Le Mont Granier ') self.assertEqual(doc['doc_type'], 'w')
def _collection_post(self, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document_in = self.request.validated document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id=user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): create_associations(document, document_in['associations'], user_id) notify_es_syncer(self.request.registry.queue_config) return {'document_id': document.document_id}
def delete(self): """ Delete a document locale. Request: `DELETE` `/documents/delete/{id}/{lang}` """ document_id = self.request.validated['id'] document_type = self.request.validated['document_type'] lang = self.request.validated['lang'] # Note: if an error occurs while deleting, SqlAlchemy will # automatically cancel all DB changes. # If only one locale is available, deleting it implies to remove # the whole document. if self.request.validated['is_only_locale']: return self._delete(document_id, document_type) clazz, clazz_locale, archive_clazz, archive_clazz_locale = _get_models( document_type) _remove_locale_versions(document_id, lang) _remove_archive_locale(archive_clazz_locale, document_id, lang) _remove_locale(clazz_locale, document_id, lang) update_langs_of_changes(document_id) update_cache_version_full(document_id, document_type) update_deleted_locales_list(document_id, document_type, lang) notify_es_syncer(self.request.registry.queue_config) return {}
def test_process_task(self): """Tests that the syncer listens to messages and sends changes to ElasticSearch. """ waypoint = Waypoint( document_id=51251, waypoint_type='summit', elevation=2000, geometry=DocumentGeometry( geom='SRID=3857;POINT(635956 5723604)'), locales=[ WaypointLocale( lang='fr', title='Mont Granier', description='...', summary='Le Mont [b]Granier[/b]') ]) self.session.add(waypoint) self.session.flush() user_id = self.global_userids['contributor'] DocumentRest.create_new_version(waypoint, user_id) self.session.flush() t = transaction.begin() notify_es_syncer(self.queue_config) t.commit() syncer = SyncWorker( self.queue_config.connection, self.queue_config.queue, 1000, session=self.session) next(syncer.consume(limit=1)) index = elasticsearch_config['index'] doc = SearchWaypoint.get(id=51251, index=index) self.assertEqual(doc['title_fr'], 'Mont Granier') # self.assertEqual(doc['summary_fr'], 'Le Mont Granier ') self.assertEqual(doc['doc_type'], 'w')
def merge_user_accounts(source_user_id, target_user_id, queue_config): print('Removing from cache...') remove_from_cache(source_user_id) print('Removing geo associations...') _remove_geo_associations(source_user_id) print('Transfering associations...') _transfer_associations(source_user_id, target_user_id) print('Transfering tags...') _transfer_tags(source_user_id, target_user_id) print('Updating feed entries...') _update_feed_entries(source_user_id, target_user_id) print('Updating contributions versions and histories...') _update_history_metadata(source_user_id, target_user_id) print('Unregistering from mailing lists...') _unregister_from_mailinglists(source_user_id) print('Removing profile and user account...') _remove_user_account(source_user_id) # update the cache version for the source and target user accounts print('Updating associated documents cache...') update_cache_version_direct(source_user_id) update_cache_version_full(target_user_id, USERPROFILE_TYPE) # notify ES the source account no longer exists print('Notifying Elastic Search...') notify_es_syncer(queue_config)
def _put( self, clazz, schema, clazz_locale=None, before_update=None, after_update=None): user_id = self.request.authenticated_userid id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id, clazz_locale=clazz_locale) if document.redirects_to: raise HTTPBadRequest('can not update merged document') if document.protected and not self.request.has_permission('moderator'): raise HTTPForbidden('No permission to change a protected document') self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) if before_update: before_update(document, document_in, user_id=user_id) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_types, changed_langs) = document.get_update_type(old_versions) if update_types: # A new version needs to be created and persisted self._update_version( document, user_id, self.request.validated['message'], update_types, changed_langs) if document.type != AREA_TYPE and UpdateType.GEOM in update_types: update_areas_for_document(document, reset=True) if after_update: after_update(document, update_types, user_id=user_id) # And the search updated notify_es_syncer(self.request.registry.queue_config) associations = self.request.validated.get('associations', None) if associations: synchronize_associations(document, associations, user_id) return {}
def post(self): """ Merges a document into another document. - Associations and tags of the source document are transferred to the target document. - The association log entries are rewritten to the target document. - The time of the log entries is updated, so that the ES syncer will pick up the new associations of the target document. - The attribute `redirects_to` of the source document is set. - A new version is created for the source document. This makes sure that the ES syncer removes the document from ES index. - Update the cache version of the source document. - Update the cache version of the target document and its associations. - Removes the feed entries of the source document. Request: `POST` `/documents/merge` Request body: { 'source_document_id': @document_id@, 'target_document_id': @document_id@ } """ source_document_id = self.request.validated['source_document_id'] target_document_id = self.request.validated['target_document_id'] source_doc = DBSession.query(Document).get(source_document_id) # transfer associations from source to target transfer_associations(source_document_id, target_document_id) # transfer tags from source to target transfer_tags(source_document_id, target_document_id) # if waypoint, update main waypoint of routes if source_doc.type == WAYPOINT_TYPE: _transfer_main_waypoint(source_document_id, target_document_id) # set redirection and create a new version source_doc.redirects_to = target_document_id DocumentRest.update_version( source_doc, self.request.authenticated_userid, 'merged with {}'.format(target_document_id), [UpdateType.FIGURES], []) # update the cache version for the source and target document update_cache_version_direct(source_document_id) update_cache_version_full(target_document_id, source_doc.type) _remove_feed_entry(source_document_id) if source_doc.type == IMAGE_TYPE: delete_all_files_for_image(source_document_id, self.request) notify_es_syncer(self.request.registry.queue_config) return {}
def _delete(self, document_id, document_type): if document_type == IMAGE_TYPE: # Files are actually removed only if the transaction succeeds delete_all_files_for_image(document_id, self.request) self._delete_document(document_id, document_type) update_deleted_documents_list(document_id, document_type) notify_es_syncer(self.request.registry.queue_config) return {}
def _create_document(self, document_in, schema, before_add=None, after_add=None, allow_anonymous=False): if allow_anonymous and document_in.get('anonymous') and \ self.request.registry.anonymous_user_id: user_id = self.request.registry.anonymous_user_id else: user_id = self.request.authenticated_userid document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if document.type != MAP_TYPE: update_maps_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): check_association = association_permission_checker( self.request, skip_outing_check=document.type == OUTING_TYPE) added_associations = create_associations( document, document_in['associations'], user_id, check_association=check_association) update_cache_version_associations(added_associations, [], document.document_id) update_feed_document_create(document, user_id) notify_es_syncer(self.request.registry.queue_config) return document
def post(self): request = self.request user = request.validated['user'] user.clear_validation_nonce() user.email_validated = True # the user profile can be indexed once the account is confirmed notify_es_syncer(self.request.registry.queue_config) # Synchronizing to Discourse is unnecessary as it will be done # during the redirect_without_nonce call below. # The user was validated by the nonce so we can log in token = log_validated_user_i_know_what_i_do(user, request) if token: response = token_to_response(user, token, request) settings = request.registry.settings try: client = get_discourse_client(settings) r = client.redirect_without_nonce(user) response['redirect_internal'] = r except: # Any error with discourse must prevent login and validation log.error('Error logging into discourse for %d', user.id, exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') return response else: request.errors.status = 403 request.errors.add('body', 'user', 'Login failed') return None
def post(self): request = self.request user = request.validated['user'] user.clear_validation_nonce() user.email_validated = True # the user profile can be indexed once the account is confirmed notify_es_syncer(self.request.registry.queue_config) # Synchronizing to Discourse is unnecessary as it will be done # during the redirect_without_nonce call below. # The user was validated by the nonce so we can log in token = log_validated_user_i_know_what_i_do(user, request) if token: response = token_to_response(user, token, request) settings = request.registry.settings try: client = get_discourse_client(settings) r = client.redirect_without_nonce(user) response['redirect_internal'] = r except: # Any error with discourse must prevent login and validation log.error( 'Error logging into discourse for %d', user.id, exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') return response else: request.errors.status = 403 request.errors.add('body', 'user', 'Login failed') return None
def delete(self): """ Delete a document. Request: `DELETE` `/documents/delete/{id}` """ document_id = self.request.validated['id'] document_type = self.request.validated['document_type'] # Note: if an error occurs while deleting, SqlAlchemy will # automatically cancel all DB changes. if document_type == IMAGE_TYPE: # Files are actually removed only if the transaction succeeds delete_all_files_for_image(document_id, self.request) self._delete_document(document_id, document_type) _update_deleted_documents_list(document_id, document_type) notify_es_syncer(self.request.registry.queue_config) return {}
def _create_document( self, document_in, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if document.type != MAP_TYPE: update_maps_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): check_association = association_permission_checker( self.request, skip_outing_check=document.type == OUTING_TYPE) added_associations = create_associations( document, document_in['associations'], user_id, check_association=check_association) update_cache_version_associations( added_associations, [], document.document_id) update_feed_document_create(document, user_id) notify_es_syncer(self.request.registry.queue_config) return document
def update_document(document, document_in, request, before_update=None, after_update=None, manage_versions=None): user_id = request.authenticated_userid # remember the current version numbers of the document old_versions = document.get_versions() if before_update: before_update(document, document_in) # update the document with the input document document.update(document_in) if manage_versions: manage_versions(document, old_versions) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_types, changed_langs) = document.get_update_type(old_versions) if update_types: # A new version needs to be created and persisted DocumentRest.update_version(document, user_id, request.validated['message'], update_types, changed_langs) if document.type != AREA_TYPE and UpdateType.GEOM in update_types: update_areas_for_document(document, reset=True) if document.type != MAP_TYPE and UpdateType.GEOM in update_types: update_maps_for_document(document, reset=True) if after_update: after_update(document, update_types, user_id=user_id) update_cache_version(document) associations = request.validated.get('associations', None) if associations: check_association_add = \ association_permission_checker(request) check_association_remove = \ association_permission_removal_checker(request) added_associations, removed_associations = \ synchronize_associations( document, associations, user_id, check_association_add=check_association_add, check_association_remove=check_association_remove) if update_types or associations: # update search index notify_es_syncer(request.registry.queue_config) update_feed_document_update(document, user_id, update_types) if associations and (removed_associations or added_associations): update_cache_version_associations(added_associations, removed_associations) return update_types
def notify_es_syncer_if_needed(association, request): if sync.requires_updates(association): notify_es_syncer(request.registry.queue_config)
def post(self): user = self.get_user() request = self.request validated = request.validated result = {} # Before all, check whether the user knows the current password current_password = validated['currentpassword'] if not user.validate_password(current_password): request.errors.add('body', 'currentpassword', 'Invalid password') return sync_sso = False # update password if a new password is provided if 'newpassword' in validated: user.password = validated['newpassword'] # start email validation procedure if a new email is provided email_link = None if 'email' in validated and validated['email'] != user.email: user.email_to_validate = validated['email'] user.update_validation_nonce( Purpose.change_email, VALIDATION_EXPIRE_DAYS) email_service = get_email_service(self.request) nonce = user.validation_nonce settings = request.registry.settings link = settings['mail.validate_change_email_url_template'].format( '#', nonce) email_link = link result['email'] = validated['email'] result['sent_email'] = True sync_sso = True update_search_index = False if 'name' in validated: user.name = validated['name'] result['name'] = user.name update_search_index = True sync_sso = True if 'forum_username' in validated: user.forum_username = validated['forum_username'] result['forum_username'] = user.forum_username update_search_index = True sync_sso = True if 'is_profile_public' in validated: user.is_profile_public = validated['is_profile_public'] # Synchronize everything except the new email (still stored # in the email_to_validate attribute while validation is pending). if sync_sso: try: client = get_discourse_client(request.registry.settings) client.sync_sso(user) except: log.error('Error syncing with discourse', exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') if email_link: email_service.send_change_email_confirmation(user, link) if update_search_index: # when user name changes, the search index has to be updated notify_es_syncer(self.request.registry.queue_config) # also update the cache version of the user profile update_cache_version(user.profile) return result
def post(self): user = self.get_user() request = self.request validated = request.validated result = {} # Before all, check whether the user knows the current password current_password = validated['currentpassword'] if not user.validate_password(current_password): request.errors.add('body', 'currentpassword', 'Invalid password') return sync_sso = False # update password if a new password is provided if 'newpassword' in validated: user.password = validated['newpassword'] # start email validation procedure if a new email is provided email_link = None if 'email' in validated and validated['email'] != user.email: user.email_to_validate = validated['email'] user.update_validation_nonce(Purpose.change_email, VALIDATION_EXPIRE_DAYS) email_service = get_email_service(self.request) nonce = user.validation_nonce settings = request.registry.settings link = settings['mail.validate_change_email_url_template'].format( '#', nonce) email_link = link result['email'] = validated['email'] result['sent_email'] = True sync_sso = True update_search_index = False if 'name' in validated: user.name = validated['name'] result['name'] = user.name update_search_index = True sync_sso = True if 'forum_username' in validated: user.forum_username = validated['forum_username'] result['forum_username'] = user.forum_username update_search_index = True sync_sso = True if 'is_profile_public' in validated: user.is_profile_public = validated['is_profile_public'] # Synchronize everything except the new email (still stored # in the email_to_validate attribute while validation is pending). if sync_sso: try: client = get_discourse_client(request.registry.settings) client.sync_sso(user) except Exception: log.error('Error syncing with discourse', exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() except Exception: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') if email_link: email_service.send_change_email_confirmation(user, link) if update_search_index: # when user name changes, the search index has to be updated notify_es_syncer(self.request.registry.queue_config) # also update the cache version of the user profile update_cache_version(user.profile) return result
def _put( self, clazz, schema, clazz_locale=None, before_update=None, after_update=None): user_id = self.request.authenticated_userid id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id, clazz_locale=clazz_locale) if document.redirects_to: raise HTTPBadRequest('can not update merged document') if document.protected and not self.request.has_permission('moderator'): raise HTTPForbidden('No permission to change a protected document') self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) if before_update: before_update(document, document_in, user_id=user_id) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_types, changed_langs) = document.get_update_type(old_versions) if update_types: # A new version needs to be created and persisted DocumentRest.update_version( document, user_id, self.request.validated['message'], update_types, changed_langs) if document.type != AREA_TYPE and UpdateType.GEOM in update_types: update_areas_for_document(document, reset=True) if document.type != MAP_TYPE and UpdateType.GEOM in update_types: update_maps_for_document(document, reset=True) if after_update: after_update(document, update_types, user_id=user_id) update_cache_version(document) associations = self.request.validated.get('associations', None) if associations: check_association_add = \ association_permission_checker(self.request) check_association_remove = \ association_permission_removal_checker(self.request) added_associations, removed_associations = \ synchronize_associations( document, associations, user_id, check_association_add=check_association_add, check_association_remove=check_association_remove) if update_types or associations: # update search index notify_es_syncer(self.request.registry.queue_config) update_feed_document_update(document, user_id, update_types) if associations and (removed_associations or added_associations): update_cache_version_associations( added_associations, removed_associations) return {}