def collection_post(self): association = schema_association.objectify(self.request.validated) association.parent_document_type = \ self.request.validated['parent_document_type'] association.child_document_type = \ self.request.validated['child_document_type'] if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add( association.get_log(self.request.authenticated_userid)) update_cache_version_associations( [{'parent_id': association.parent_document_id, 'parent_type': association.parent_document_type, 'child_id': association.child_document_id, 'child_type': association.child_document_type}], []) notify_es_syncer_if_needed(association, self.request) update_feed_association_update( association.parent_document_id, association.parent_document_type, association.child_document_id, association.child_document_type, self.request.authenticated_userid) return {}
def collection_delete(self): association_in = schema_association.objectify(self.request.validated) association = self._load(association_in) if association is None: # also accept {parent_document_id: y, child_document_id: x} when # for an association {parent_document_id: x, child_document_id: x} association_in = Association( parent_document_id=association_in.child_document_id, child_document_id=association_in.parent_document_id) association = self._load(association_in) if association is None: raise HTTPBadRequest('association does not exist') if is_main_waypoint_association(association): raise HTTPBadRequest( 'as the main waypoint of the route, this waypoint can not ' 'be disassociated') log = association.get_log(self.request.authenticated_userid, is_creation=False) DBSession.delete(association) DBSession.add(log) return {}
def update_version(document, user_id, comment, update_types, changed_langs): assert user_id assert update_types meta_data = HistoryMetaData(comment=comment, user_id=user_id) archive = DocumentRest._get_document_archive(document, update_types) geometry_archive = \ DocumentRest._get_geometry_archive(document, update_types) langs = DocumentRest._get_langs_to_update(document, update_types, changed_langs) locale_versions = [] for lang in langs: locale = document.get_locale(lang) locale_archive = DocumentRest._get_locale_archive( locale, changed_langs) version = DocumentVersion( document_id=document.document_id, lang=locale.lang, document_archive=archive, document_geometry_archive=geometry_archive, document_locales_archive=locale_archive, history_metadata=meta_data) locale_versions.append(version) DBSession.add(archive) DBSession.add(meta_data) DBSession.add_all(locale_versions) DBSession.flush()
def collection_delete(self): association_in = schema_association.objectify(self.request.validated) association = self._load(association_in) if association is None: # also accept {parent_document_id: y, child_document_id: x} when # for an association {parent_document_id: x, child_document_id: x} association_in = Association( parent_document_id=association_in.child_document_id, child_document_id=association_in.parent_document_id) association = self._load(association_in) if association is None: raise HTTPBadRequest('association does not exist') if is_main_waypoint_association(association): raise HTTPBadRequest( 'as the main waypoint of the route, this waypoint can not ' 'be disassociated') log = association.get_log( self.request.authenticated_userid, is_creation=False) DBSession.delete(association) DBSession.add(log) return {}
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] user.update_validation_nonce(Purpose.registration, VALIDATION_EXPIRE_DAYS) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')]) DBSession.add(user) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') # also create a version for the profile DocumentRest.create_new_version(user.profile, user.id) # The user needs validation email_service = get_email_service(self.request) nonce = user.validation_nonce settings = self.request.registry.settings link = settings['mail.validate_register_url_template'] % nonce email_service.send_registration_confirmation(user, link) return to_json_dict(user, schema_user)
def _update_version(self, document, user_id, comment, update_types, changed_langs): assert user_id assert update_types meta_data = HistoryMetaData(comment=comment, user_id=user_id) archive = self._get_document_archive(document, update_types) geometry_archive = \ self._get_geometry_archive(document, update_types) cultures = \ self._get_cultures_to_update(document, update_types, changed_langs) locale_versions = [] for culture in cultures: locale = document.get_locale(culture) locale_archive = self._get_locale_archive(locale, changed_langs) version = DocumentVersion( document_id=document.document_id, culture=locale.culture, document_archive=archive, document_geometry_archive=geometry_archive, document_locales_archive=locale_archive, history_metadata=meta_data ) locale_versions.append(version) DBSession.add(archive) DBSession.add(meta_data) DBSession.add_all(locale_versions) DBSession.flush()
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] user.update_validation_nonce( Purpose.registration, VALIDATION_EXPIRE_DAYS) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')] ) DBSession.add(user) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') # also create a version for the profile DocumentRest.create_new_version(user.profile, user.id) # The user needs validation email_service = get_email_service(self.request) nonce = user.validation_nonce settings = self.request.registry.settings link = settings['mail.validate_register_url_template'] % nonce email_service.send_registration_confirmation(user, link) return to_json_dict(user, schema_user)
def remove_association(parent_document_id, parent_document_type, child_document_id, child_document_type, user_id, check_first=False, check_association=None): """Remove an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association(parent_document_id=parent_document_id, parent_document_type=parent_document_type, child_document_id=child_document_id, child_document_type=child_document_type) if check_first and not exists_already(association): return if check_association: check_association(association) DBSession.query(Association).filter_by( parent_document_id=parent_document_id, child_document_id=child_document_id).delete() DBSession.add(association.get_log(user_id, is_creation=False))
def update_feed_document_create(document, user_id): """Creates a new entry in the feed table when creating a new document. """ if document.redirects_to or \ document.type in NO_FEED_DOCUMENT_TYPES: return # make sure all updates are written to the database, so that areas and # users can be queried DBSession.flush() activities = [] if document.type in [ARTICLE_TYPE, OUTING_TYPE, ROUTE_TYPE]: activities = document.activities user_ids = [user_id] if document.type == OUTING_TYPE: participant_ids = _get_participants_of_outing(document.document_id) user_ids = list(set(user_ids).union(participant_ids)) area_ids = [] if document.type != ARTICLE_TYPE: area_ids = _get_area_ids(document) change = DocumentChange( user_id=user_id, change_type='created', document_id=document.document_id, document_type=document.type, activities=activities, area_ids=area_ids, user_ids=user_ids ) DBSession.add(change) DBSession.flush()
def collection_post(self): association = schema_association.objectify(self.request.validated) association.parent_document_type = \ self.request.validated['parent_document_type'] association.child_document_type = \ self.request.validated['child_document_type'] if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add(association.get_log(self.request.authenticated_userid)) update_cache_version_associations( [{ 'parent_id': association.parent_document_id, 'parent_type': association.parent_document_type, 'child_id': association.child_document_id, 'child_type': association.child_document_type }], []) notify_es_syncer_if_needed(association, self.request) update_feed_association_update(association.parent_document_id, association.parent_document_type, association.child_document_id, association.child_document_type, self.request.authenticated_userid) return {}
def _collection_post( self, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document_in = self.request.validated document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id=user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): create_associations(document, document_in['associations'], user_id) notify_es_syncer(self.request.registry.queue_config) return {'document_id': document.document_id}
def _update_version(self, document, comment, update_types, changed_langs): assert update_types meta_data = HistoryMetaData(comment=comment) archive = self._get_document_archive(document, update_types) geometry_archive = \ self._get_geometry_archive(document, update_types) cultures = \ self._get_cultures_to_update(document, update_types, changed_langs) locale_versions = [] for culture in cultures: locale = document.get_locale(culture) locale_archive = self._get_locale_archive(locale, changed_langs) version = DocumentVersion( document_id=document.document_id, culture=locale.culture, document_archive=archive, document_geometry_archive=geometry_archive, document_locales_archive=locale_archive, history_metadata=meta_data) locale_versions.append(version) DBSession.add(archive) DBSession.add(meta_data) DBSession.add_all(locale_versions) DBSession.flush()
def _collection_post(self, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document_in = self.request.validated document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id=user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): create_associations(document, document_in['associations'], user_id) notify_es_syncer(self.request.registry.queue_config) return {'document_id': document.document_id}
def create_new_version(document, user_id, written_at=None): assert user_id archive = document.to_archive() archive_locales = document.get_archive_locales() archive_geometry = document.get_archive_geometry() meta_data = HistoryMetaData(comment='creation', user_id=user_id, written_at=written_at) versions = [] for locale in archive_locales: version = DocumentVersion( document_id=document.document_id, lang=locale.lang, document_archive=archive, document_locales_archive=locale, document_geometry_archive=archive_geometry, history_metadata=meta_data) versions.append(version) DBSession.add(archive) DBSession.add_all(archive_locales) DBSession.add(meta_data) DBSession.add_all(versions) DBSession.flush()
def add_or_retrieve_token(value, expire, userid): token = DBSession.query(Token).filter(Token.value == value and User.id == userid).first() if not token: token = Token(value=value, expire=expire, userid=userid) DBSession.add(token) DBSession.flush() return token
def add_or_retrieve_token(value, expire, userid): token = DBSession.query(Token). \ filter(Token.value == value, User.id == userid).first() if not token: token = Token(value=value, expire=expire, userid=userid) DBSession.add(token) DBSession.flush() return token
def collection_post(self): association = schema_association.objectify(self.request.validated) if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add(association.get_log(self.request.authenticated_userid)) return {}
def collection_post(self): association = schema_association.objectify(self.request.validated) if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add( association.get_log(self.request.authenticated_userid)) return {}
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] DBSession.add(user) try: DBSession.flush() except: # TODO: log the error for debugging raise HTTPInternalServerError('Error persisting user') return to_json_dict(user, schema_user)
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None # TODO additional validation: at least one culture, only one instance # for each culture, geometry DBSession.add(document) DBSession.flush() self._create_new_version(document) return to_json_dict(document, schema)
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None DBSession.add(document) DBSession.flush() user_id = self.request.authenticated_userid self._create_new_version(document, user_id) sync_search_index(document) return to_json_dict(document, schema)
def add_association( parent_document_id, child_document_id, user_id, check_first=False): """Create an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association( parent_document_id=parent_document_id, child_document_id=child_document_id) if check_first and exists_already(association): return DBSession.add(association) DBSession.add(association.get_log(user_id, is_creation=True))
def update_feed_images_upload(images, images_in, user_id): """When uploading a set of images, create a feed entry for the document the images are linked to. """ if not images or not images_in: return assert len(images) == len(images_in) # get the document that the images were uploaded to document_id, document_type = get_linked_document(images_in) if not document_id or not document_type: return image1_id, image2_id, image3_id, more_images = get_images( images, images_in, document_id, document_type) if not image1_id: return # load the feed entry for the images change = get_existing_change(document_id) if not change: log.warn('no feed change for document {}'.format(document_id)) return if change.user_id == user_id: # if the same user, only update time and change_type. # this avoids that multiple entries are shown in the feed for the same # document. change.change_type = 'updated' change.time = func.now() else: # if different user: first try to get an existing feed entry of the # user for the document change_by_user = get_existing_change_for_user(document_id, user_id) if change_by_user: change = change_by_user change.change_type = 'added_photos' change.time = func.now() else: change = change.copy() change.change_type = 'added_photos' change.user_id = user_id change.user_ids = list(set(change.user_ids).union([user_id])) _update_images(change, image1_id, image2_id, image3_id, more_images) DBSession.add(change) DBSession.flush()
def add_association(parent_document_id, child_document_id, user_id, check_first=False): """Create an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association(parent_document_id=parent_document_id, child_document_id=child_document_id) if check_first and exists_already(association): return DBSession.add(association) DBSession.add(association.get_log(user_id, is_creation=True))
def remove_association( parent_document_id, child_document_id, user_id, check_first=False): """Remove an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association( parent_document_id=parent_document_id, child_document_id=child_document_id) if check_first and not exists_already(association): return DBSession.query(Association).filter_by( parent_document_id=parent_document_id, child_document_id=child_document_id).delete() DBSession.add(association.get_log(user_id, is_creation=False))
def _create_document(self, document_in, schema, before_add=None, after_add=None, allow_anonymous=False): if allow_anonymous and document_in.get('anonymous') and \ self.request.registry.anonymous_user_id: user_id = self.request.registry.anonymous_user_id else: user_id = self.request.authenticated_userid document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if document.type != MAP_TYPE: update_maps_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): check_association = association_permission_checker( self.request, skip_outing_check=document.type == OUTING_TYPE) added_associations = create_associations( document, document_in['associations'], user_id, check_association=check_association) update_cache_version_associations(added_associations, [], document.document_id) update_feed_document_create(document, user_id) notify_es_syncer(self.request.registry.queue_config) return document
def update_feed_images_upload(images, images_in, user_id): """When uploading a set of images, create a feed entry for the document the images are linked to. """ if not images or not images_in: return assert len(images) == len(images_in) # get the document that the images were uploaded to document_id, document_type = get_linked_document(images_in) if not document_id or not document_type: return image1_id, image2_id, image3_id, more_images = get_images( images, images_in, document_id, document_type) if not image1_id: return # load the feed entry for the images change = get_existing_change(document_id) if not change: log.warn('no feed change for document {}'.format(document_id)) return if change.user_id == user_id: # if the same user, only update time and change_type. # this avoids that multiple entries are shown in the feed for the same # document. change.change_type = 'updated' change.time = func.now() else: # if different user: copy the feed entry change = change.copy() change.change_type = 'added_photos' change.user_id = user_id change.user_ids = list(set(change.user_ids).union([user_id])) change.image1_id = image1_id change.image2_id = image2_id change.image3_id = image3_id change.more_images = more_images DBSession.add(change) DBSession.flush()
def collection_delete(self): association_in = schema_association.objectify(self.request.validated) association = self._load(association_in) if association is None: # also accept {parent_document_id: y, child_document_id: x} when # for an association {parent_document_id: x, child_document_id: x} association_in = Association( parent_document_id=association_in.child_document_id, child_document_id=association_in.parent_document_id) association = self._load(association_in) if association is None: raise HTTPBadRequest('association does not exist') _check_required_associations(association) check_permission_for_association_removal(self.request, association) log = association.get_log(self.request.authenticated_userid, is_creation=False) DBSession.delete(association) DBSession.add(log) update_cache_version_associations( [], [{ 'parent_id': association.parent_document_id, 'parent_type': association.parent_document_type, 'child_id': association.child_document_id, 'child_type': association.child_document_type }]) notify_es_syncer_if_needed(association, self.request) update_feed_association_update(association.parent_document_id, association.parent_document_type, association.child_document_id, association.child_document_type, self.request.authenticated_userid) return {}
def _create_new_version(self, document): archive = document.to_archive() archive_locales = document.get_archive_locales() archive_geometry = document.get_archive_geometry() meta_data = HistoryMetaData(comment='creation') versions = [] for locale in archive_locales: version = DocumentVersion( document_id=document.document_id, culture=locale.culture, document_archive=archive, document_locales_archive=locale, document_geometry_archive=archive_geometry, history_metadata=meta_data) versions.append(version) DBSession.add(archive) DBSession.add_all(archive_locales) DBSession.add(meta_data) DBSession.add_all(versions) DBSession.flush()
def _create_new_version(self, document): archive = document.to_archive() archive_locales = document.get_archive_locales() archive_geometry = document.get_archive_geometry() meta_data = HistoryMetaData(comment='creation') versions = [] for locale in archive_locales: version = DocumentVersion( document_id=document.document_id, culture=locale.culture, document_archive=archive, document_locales_archive=locale, document_geometry_archive=archive_geometry, history_metadata=meta_data ) versions.append(version) DBSession.add(archive) DBSession.add_all(archive_locales) DBSession.add(meta_data) DBSession.add_all(versions) DBSession.flush()
def collection_delete(self): association_in = schema_association.objectify(self.request.validated) association = self._load(association_in) if association is None: # also accept {parent_document_id: y, child_document_id: x} when # for an association {parent_document_id: x, child_document_id: x} association_in = Association( parent_document_id=association_in.child_document_id, child_document_id=association_in.parent_document_id) association = self._load(association_in) if association is None: raise HTTPBadRequest('association does not exist') _check_required_associations(association) check_permission_for_association_removal(self.request, association) log = association.get_log( self.request.authenticated_userid, is_creation=False) DBSession.delete(association) DBSession.add(log) update_cache_version_associations( [], [{'parent_id': association.parent_document_id, 'parent_type': association.parent_document_type, 'child_id': association.child_document_id, 'child_type': association.child_document_type}]) notify_es_syncer_if_needed(association, self.request) update_feed_association_update( association.parent_document_id, association.parent_document_type, association.child_document_id, association.child_document_type, self.request.authenticated_userid) return {}
def _create_document( self, document_in, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if document.type != MAP_TYPE: update_maps_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): check_association = association_permission_checker( self.request, skip_outing_check=document.type == OUTING_TYPE) added_associations = create_associations( document, document_in['associations'], user_id, check_association=check_association) update_cache_version_associations( added_associations, [], document.document_id) update_feed_document_create(document, user_id) notify_es_syncer(self.request.registry.queue_config) return document
def create_new_version(document, user_id): assert user_id archive = document.to_archive() archive_locales = document.get_archive_locales() archive_geometry = document.get_archive_geometry() meta_data = HistoryMetaData(comment='creation', user_id=user_id) versions = [] for locale in archive_locales: version = DocumentVersion( document_id=document.document_id, lang=locale.lang, document_archive=archive, document_locales_archive=locale, document_geometry_archive=archive_geometry, history_metadata=meta_data ) versions.append(version) DBSession.add(archive) DBSession.add_all(archive_locales) DBSession.add(meta_data) DBSession.add_all(versions) DBSession.flush()
def update_deleted_locales_list(document_id, document_type, lang): DBSession.add( ESDeletedLocale(document_id=document_id, type=document_type, lang=lang))
def _update_deleted_documents_list(document_id, document_type): DBSession.add( ESDeletedDocument(document_id=document_id, type=document_type))
def post(self): """ Synchronize user details and return authentication url. Important: Email addresses need to be validated by external site. """ request = self.request sso_key = request.validated['sso_key'] sso_external_id = request.validated['sso_external_id'] user = request.validated['sso_user'] if user is None: # create new user user = User( username=request.validated['username'], name=request.validated['name'], forum_username=request.validated['forum_username'], email=request.validated['email'], email_validated=True, # MUST be validated by external site lang=request.validated['lang'], password=generate_token() # random password ) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')], ) DBSession.add(user) DBSession.flush() if sso_external_id is None: sso_external_id = SsoExternalId( domain=sso_key.domain, external_id=request.validated['external_id'], user=user, ) DBSession.add(sso_external_id) sso_external_id.token = generate_token() sso_external_id.expire = sso_expire_from_now() client = get_discourse_client(request.registry.settings) discourse_userid = call_discourse(get_discourse_userid, client, user.id) if discourse_userid is None: call_discourse(client.sync_sso, user) discourse_userid = client.get_userid(user.id) # From cache # Groups are added to discourse, not removed group_ids = [] discourse_groups = None groups = request.validated['groups'] or '' for group_name in groups.split(','): if group_name == '': continue group_id = None if discourse_groups is None: discourse_groups = call_discourse(client.client.groups) group_id = None for discourse_group in discourse_groups: if discourse_group['name'] == group_name: group_id = discourse_group['id'] if group_id is None: # If group is not found, we ignore it as we want to return # a valid token for user authentication pass else: group_ids.append(group_id) for group_id in group_ids: call_discourse(client.client.add_user_to_group, group_id, discourse_userid) return { 'url': '{}/sso-login?no_redirect&{}'.format( request.registry.settings['ui.url'], urlencode({'token': sso_external_id.token})) }