def validate_association(request): """Check if the given documents exist and if an association between the two document types is valid. """ parent_document_id = request.validated.get('parent_document_id') child_document_id = request.validated.get('child_document_id') parent_document_type = None if parent_document_id: parent_document_type = DBSession.query(Document.type). \ filter(Document.document_id == parent_document_id).scalar() if not parent_document_type: request.errors.add( 'body', 'parent_document_id', 'parent document does not exist') child_document_type = None if child_document_id: child_document_type = DBSession.query(Document.type). \ filter(Document.document_id == child_document_id).scalar() if not child_document_type: request.errors.add( 'body', 'child_document_id', 'child document does not exist') if parent_document_type and child_document_type: association_type = (parent_document_type, child_document_type) if association_type not in valid_associations: request.errors.add( 'body', 'association', 'invalid association type')
def _transfer_associations(source_user_id, target_user_id): transfer_associations(source_user_id, target_user_id) # Also reassign the association author in association logs # to the target user: DBSession.query(AssociationLog). \ filter(AssociationLog.user_id == source_user_id). \ update({AssociationLog.user_id: target_user_id})
def remove_association(parent_document_id, parent_document_type, child_document_id, child_document_type, user_id, check_first=False, check_association=None): """Remove an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association(parent_document_id=parent_document_id, parent_document_type=parent_document_type, child_document_id=child_document_id, child_document_type=child_document_type) if check_first and not exists_already(association): return if check_association: check_association(association) DBSession.query(Association).filter_by( parent_document_id=parent_document_id, child_document_id=child_document_id).delete() DBSession.add(association.get_log(user_id, is_creation=False))
def _get_documents( self, clazz, schema, clazz_locale, adapt_schema, custom_filter, include_areas, set_custom_fields, meta_params, load_documents): base_query = DBSession.query(clazz).\ filter(getattr(clazz, 'redirects_to').is_(None)) base_total_query = DBSession.query(getattr(clazz, 'document_id')).\ filter(getattr(clazz, 'redirects_to').is_(None)) if custom_filter: base_query = custom_filter(base_query) base_total_query = custom_filter(base_total_query) base_query = add_load_for_locales(base_query, clazz, clazz_locale) base_query = base_query.options(joinedload(getattr(clazz, 'geometry'))) if clazz == Outing: base_query = base_query. \ order_by(clazz.date_end.desc()). \ order_by(clazz.document_id.desc()) else: base_query = base_query.order_by(clazz.document_id.desc()) base_query = add_load_for_profiles(base_query, clazz) base_total_query = add_profile_filter(base_total_query, clazz) if include_areas: base_query = base_query. \ options( joinedload(getattr(clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents, total = load_documents(base_query, base_total_query) set_available_langs(documents, loaded=True) lang = meta_params['lang'] if lang is not None: set_best_locale(documents, lang) if include_areas: self._set_areas_for_documents(documents, lang) if set_custom_fields: set_custom_fields(documents, lang) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': total }
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) route_ids = get_first_column( DBSession.query( Route.document_id).select_from(with_query_waypoints).join( Association, with_query_waypoints.c.document_id == Association.parent_document_id).join( Route, Association.child_document_id == Route.document_id).filter( Route.redirects_to.is_(None)).order_by( with_query_waypoints.c.priority.desc(), Route.document_id.desc()).limit(NUM_ROUTES).all()) total = DBSession.query(Route.document_id). \ select_from(with_query_waypoints). \ join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). \ join( Route, Association.child_document_id == Route.document_id). \ filter(Route.redirects_to.is_(None)). \ count() waypoint.associations['all_routes'] = get_documents_for_ids( route_ids, lang, route_documents_config, total)
def get_creators(document_ids): """ Get the creator for the list of given document ids. """ t = DBSession.query( ArchiveDocument.document_id.label('document_id'), User.id.label('user_id'), User.name.label('name'), over( func.rank(), partition_by=ArchiveDocument.document_id, order_by=HistoryMetaData.id).label('rank')). \ select_from(ArchiveDocument). \ join( DocumentVersion, and_( ArchiveDocument.document_id == DocumentVersion.document_id, ArchiveDocument.version == 1)). \ join(HistoryMetaData, DocumentVersion.history_metadata_id == HistoryMetaData.id). \ join(User, HistoryMetaData.user_id == User.id). \ filter(ArchiveDocument.document_id.in_(document_ids)). \ subquery('t') query = DBSession.query( t.c.document_id, t.c.user_id, t.c.name). \ filter(t.c.rank == 1) return { document_id: { 'name': name, 'user_id': user_id } for document_id, user_id, name in query }
def _get_history(self, document_id, lang): # FIXME conditional permission check (when outings implemented) # is_outing = DBSession.query(Outing) \ # .filter(Outing.document_id == document_id).count() # if is_outing > 0: # # validate permission (authenticated + associated) # # return 403 if not correct title = DBSession.query(DocumentLocale.title) \ .filter(DocumentLocale.document_id == document_id) \ .filter(DocumentLocale.lang == lang) \ .first() if not title: raise HTTPNotFound('no locale document for "{0}"'.format(lang)) versions = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user')) \ .filter(DocumentVersion.document_id == document_id) \ .filter(DocumentVersion.lang == lang) \ .order_by(DocumentVersion.id) \ .all() return { 'title': title.title, 'versions': [serialize_version(v) for v in versions] }
def validate_topic_create(request, **kwargs): document_id = request.validated['document_id'] lang = request.validated['lang'] locale = DBSession.query(DocumentLocale) \ .filter(DocumentLocale.document_id == document_id) \ .filter(DocumentLocale.lang == lang) \ .one_or_none() if locale is None: request.errors.add('body', '{}/{}'.format(document_id, lang), 'Document not found') return request.validated['locale'] = locale document = DBSession.query(Document) \ .filter(Document.document_id == document_id) \ .one_or_none() if document is None: request.errors.add('body', '{}/{}'.format(document_id, lang), 'Document not found') return request.validated['document'] = document if locale.topic_id is not None: request.errors.add('body', '{}_{}'.format(document_id, lang), 'Topic already exists', topic_id=locale.topic_id)
def validate_association(request): """Check if the given documents exist and if an association between the two document types is valid. """ parent_document_id = request.validated.get('parent_document_id') child_document_id = request.validated.get('child_document_id') parent_document_type = None if parent_document_id: parent_document_type = DBSession.query(Document.type). \ filter(Document.document_id == parent_document_id).scalar() if not parent_document_type: request.errors.add('body', 'parent_document_id', 'parent document does not exist') child_document_type = None if child_document_id: child_document_type = DBSession.query(Document.type). \ filter(Document.document_id == child_document_id).scalar() if not child_document_type: request.errors.add('body', 'child_document_id', 'child document does not exist') if parent_document_type and child_document_type: association_type = (parent_document_type, child_document_type) if association_type not in valid_associations: request.errors.add('body', 'association', 'invalid association type')
def _remove_user_account(user_id): DBSession.query(Token).filter(Token.userid == user_id).delete() DBSession.query(User).filter(User.id == user_id).delete() # Delete profile document and its archives remove_whole_document(user_id, UserProfile, None, ArchiveUserProfile, None) update_deleted_documents_list(user_id, USERPROFILE_TYPE)
def get(self): id = self.request.validated['id'] lang = self.request.validated['lang'] # FIXME conditional permission check (when outings implemented) # is_outing = DBSession.query(Outing) \ # .filter(Outing.document_id == id).count() # if is_outing > 0: # # validate permission (authenticated + associated) # # return 403 if not correct title = DBSession.query(DocumentLocale.title) \ .filter(DocumentLocale.document_id == id) \ .filter(DocumentLocale.culture == lang) \ .first() if not title: raise HTTPNotFound('no locale document for ' + lang) versions = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user')) \ .filter(DocumentVersion.document_id == id) \ .filter(DocumentVersion.culture == lang) \ .order_by(DocumentVersion.id) \ .all() return { 'title': title.title, 'versions': [self._serialize_version(v) for v in versions] }
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) route_ids = get_first_column( DBSession.query(Route.document_id) .select_from(with_query_waypoints) .join(Association, with_query_waypoints.c.document_id == Association.parent_document_id) .join(Route, Association.child_document_id == Route.document_id) .filter(Route.redirects_to.is_(None)) .order_by(with_query_waypoints.c.priority.desc(), Route.document_id.desc()) .limit(NUM_ROUTES) .all() ) total = ( DBSession.query(Route.document_id) .select_from(with_query_waypoints) .join(Association, with_query_waypoints.c.document_id == Association.parent_document_id) .join(Route, Association.child_document_id == Route.document_id) .filter(Route.redirects_to.is_(None)) .count() ) waypoint.associations["all_routes"] = get_documents_for_ids(route_ids, lang, route_documents_config, total)
def main(argv=sys.argv): parser = argparse.ArgumentParser() parser.add_argument("src_id", help="Source user identifier", type=int) parser.add_argument("tgt_id", help="Target user identifier", type=int) parser.add_argument( "-f", "--force", action="store_true", help="Merge user without confirmation" ) args = parser.parse_args() source_user_id = args.src_id target_user_id = args.tgt_id if source_user_id == target_user_id: exit('ERROR: source and target user accounts cannot be the same') settings_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../../../production.ini') settings = get_appsettings(settings_file) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) queue_config = get_queue_config(settings) logging.basicConfig() logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN) source_user = DBSession.query(User).get(source_user_id) if not source_user: exit('ERROR: source user account (id {}) does not exist'.format( source_user_id)) target_user = DBSession.query(User).get(target_user_id) if not target_user: exit('ERROR: target user account (id {}) does not exist'.format( target_user_id)) if not args.force: sys.stdout.write( '\n' 'Are you sure you want to merge the following accounts? [y/N]\n' 'source: id {}: {}/{}\n' 'target: id {}: {}/{}\n'.format( source_user.id, source_user.name, source_user.forum_username, target_user.id, target_user.name, target_user.forum_username)) if input().lower()[:1] != 'y': exit('ABORTED: User accounts merging has been aborted') print('Merging user account {} to user account {} in progress.\n' 'Please wait...'.format(source_user_id, target_user_id)) with transaction.manager: merge_user_accounts(source_user_id, target_user_id, queue_config) print('SUCCESS: User account {} has been merged to user account {}'.format( source_user_id, target_user_id))
def _remove_history_metadata(document_id): history_metadata_ids = DBSession. \ query(DocumentVersion.history_metadata_id). \ filter(DocumentVersion.document_id == document_id). \ all() DBSession.query(DocumentVersion). \ filter(DocumentVersion.document_id == document_id). \ delete() DBSession.execute(HistoryMetaData.__table__.delete().where( HistoryMetaData.id.in_(history_metadata_ids)))
def get_changes_of_profile_feed(user_id, token_id, token_time, limit): user_exists_query = DBSession.query(User).filter(User.id == user_id).exists() user_exists = DBSession.query(user_exists_query).scalar() if not user_exists: raise HTTPNotFound("user not found") user_filter = DocumentChange.user_ids.op("&&")([user_id]) return get_changes_of_feed(token_id, token_time, limit, user_filter)
def _remove_archive(archive_clazz, document_id): archive_document_ids = DBSession.query(ArchiveDocument.id). \ filter(ArchiveDocument.document_id == document_id). \ subquery() DBSession.execute(archive_clazz.__table__.delete().where( getattr(archive_clazz, 'id').in_(archive_document_ids))) DBSession.query(ArchiveDocument). \ filter(ArchiveDocument.document_id == document_id). \ delete()
def sso_sync_validator(request, **kwargs): if 'sso_key' not in request.validated: return # validated by colander schema sso_key = DBSession.query(SsoKey). \ filter(SsoKey.key == request.validated['sso_key']). \ one_or_none() if sso_key is None: log.warning('Attempt to use sso_sync with bad key from {}'.format( request.client_addr)) request.errors.status = 403 request.errors.add('body', 'sso_key', 'Invalid') return user = None # search user by external_id if 'external_id' not in request.validated: return # validated by colander schema sso_external_id = DBSession.query(SsoExternalId). \ filter(SsoExternalId.domain == sso_key.domain). \ filter(SsoExternalId.external_id == request.validated['external_id']). \ one_or_none() if sso_external_id is not None: user = sso_external_id.user if user is None: # search user by email if request.validated['email'] is None: request.errors.add('body', 'email', 'Required') return user = DBSession.query(User). \ filter(User.email == request.validated['email']). \ one_or_none() if user is None: username = request.validated['username'] if username is None: request.errors.add('body', 'username', 'Required') request.validated['name'] = request.validated['name'] or username request.validated['forum_username'] = \ request.validated['forum_username'] or username if request.validated['lang'] is None: request.errors.add('body', 'lang', 'Required') validate_unique_attribute('email', request, **kwargs) validate_unique_attribute('username', request, **kwargs) validate_forum_username(request, **kwargs) validate_unique_attribute('forum_username', request, lowercase=True, **kwargs) request.validated['sso_key'] = sso_key request.validated['sso_external_id'] = sso_external_id request.validated['sso_user'] = user
def validate_body_user_id(request, **kwargs): """ Check that the user exists. """ user_id = request.validated['user_id'] user_exists_query = DBSession.query(User). \ filter(User.id == user_id). \ exists() user_exists = DBSession.query(user_exists_query).scalar() if not user_exists: request.errors.add('body', 'user_id', 'user {0} does not exist'.format(user_id))
def get_changes_of_profile_feed(user_id, token_id, token_time, limit): user_exists_query = DBSession.query(User). \ filter(User.id == user_id). \ exists() user_exists = DBSession.query(user_exists_query).scalar() if not user_exists: raise HTTPNotFound('user not found') user_filter = DocumentChange.user_ids.op('&&')([user_id]) return get_changes_of_feed(token_id, token_time, limit, user_filter)
def _remove_archive_locale(archive_clazz_locale, document_id): if archive_clazz_locale: archive_document_locale_ids = DBSession. \ query(ArchiveDocumentLocale.id). \ filter(ArchiveDocumentLocale.document_id == document_id). \ subquery() DBSession.execute(archive_clazz_locale.__table__.delete().where( getattr(archive_clazz_locale, 'id').in_(archive_document_locale_ids))) DBSession.query(ArchiveDocumentLocale). \ filter(ArchiveDocumentLocale.document_id == document_id). \ delete()
def main(argv=sys.argv): if len(argv) < 3: usage(argv) source_user_id = int(argv[1]) target_user_id = int(argv[2]) if source_user_id == target_user_id: exit('ERROR: source and target user accounts cannot be the same') settings_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), '../../../production.ini') settings = get_appsettings(settings_file) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) queue_config = get_queue_config(settings) logging.basicConfig() logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN) source_user = DBSession.query(User).get(source_user_id) if not source_user: exit('ERROR: source user account (id {}) does not exist'.format( source_user_id)) target_user = DBSession.query(User).get(target_user_id) if not target_user: exit('ERROR: target user account (id {}) does not exist'.format( target_user_id)) sys.stdout.write( '\n' 'Are you sure you want to merge the following user accounts? [y/N]\n' 'source: id {}: {}/{}\n' 'target: id {}: {}/{}\n'.format( source_user.id, source_user.name, source_user.forum_username, target_user.id, target_user.name, target_user.forum_username)) if input().lower()[:1] != 'y': exit('ABORTED: User accounts merging has been aborted') print('Merging user account {} to user account {} in progress.\n' 'Please wait...'.format(source_user_id, target_user_id)) with transaction.manager: merge_user_accounts(source_user_id, target_user_id, queue_config) print('SUCCESS: User account {} has been merged to user account {}'.format( source_user_id, target_user_id))
def _remove_locale(clazz_locale, document_id): document_locale_ids = DBSession.query(DocumentLocale.id). \ filter(DocumentLocale.document_id == document_id). \ subquery() # Remove links to comments (comments themselves are not removed) DBSession.execute(DocumentTopic.__table__.delete().where( DocumentTopic.document_locale_id.in_(document_locale_ids))) if clazz_locale: DBSession.execute(clazz_locale.__table__.delete().where( getattr(clazz_locale, 'id').in_(document_locale_ids))) DBSession.query(DocumentLocale). \ filter(DocumentLocale.document_id == document_id). \ delete()
def _is_only_waypoint_of_route(document_id): routes = DBSession.query(Association.child_document_id). \ filter(and_( Association.parent_document_id == document_id, Association.child_document_type == ROUTE_TYPE, )).subquery() only_waypoint = DBSession.query(Association). \ filter(and_( Association.child_document_id == routes.c.child_document_id, Association.parent_document_type == WAYPOINT_TYPE )). \ group_by(Association.child_document_id). \ having(func.count('*') == 1). \ exists() return DBSession.query(only_waypoint).scalar()
def _is_only_route_of_outing(document_id): outings = DBSession.query(Association.child_document_id). \ filter(and_( Association.parent_document_id == document_id, Association.child_document_type == OUTING_TYPE, )).subquery() only_route = DBSession.query(Association). \ filter(and_( Association.child_document_id == outings.c.child_document_id, Association.parent_document_type == ROUTE_TYPE )). \ group_by(Association.child_document_id). \ having(func.count('*') == 1). \ exists() return DBSession.query(only_route).scalar()
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) total = DBSession.query(Route.document_id). \ select_from(with_query_waypoints). \ join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). \ join( Route, Association.child_document_id == Route.document_id). \ filter(Route.redirects_to.is_(None)). \ count() routes = limit_route_fields( DBSession.query(Route). select_from(with_query_waypoints). join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). join( Route, Association.child_document_id == Route.document_id). filter(Route.redirects_to.is_(None)). order_by( with_query_waypoints.c.priority.desc(), Route.document_id.desc()). limit(NUM_ROUTES) ). \ all() if lang is not None: set_best_locale(routes, lang) waypoint.associations['all_routes'] = { 'total': total, 'routes': [ to_json_dict(route, schema_association_route) for route in routes ] }
def set_recent_outings(waypoint, lang): """Set last 10 outings on routes associated to the given waypoint. """ t_outing_route = aliased(Association, name='a1') t_route_wp = aliased(Association, name='a2') with_query_waypoints = _get_select_children(waypoint) recent_outing_ids = get_first_column( DBSession.query(Outing.document_id). filter(Outing.redirects_to.is_(None)). join( t_outing_route, Outing.document_id == t_outing_route.child_document_id). join( t_route_wp, and_( t_route_wp.child_document_id == t_outing_route.parent_document_id, t_route_wp.child_document_type == ROUTE_TYPE, )). join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id ). order_by(Outing.date_end.desc()). limit(NUM_RECENT_OUTINGS). all()) total = DBSession.query(Outing.document_id). \ filter(Outing.redirects_to.is_(None)). \ join( t_outing_route, Outing.document_id == t_outing_route.child_document_id). \ join( t_route_wp, and_( t_route_wp.child_document_id == t_outing_route.parent_document_id, t_route_wp.child_document_type == ROUTE_TYPE, )). \ join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id ). \ count() waypoint.associations['recent_outings'] = get_documents_for_ids( recent_outing_ids, lang, outing_documents_config, total)
def get(self): """Get the user profile feed for a user. Request: `GET` `/profile-feed?u={user_id}[&pl=...][&limit=...][&token=...]` Parameters: `u={user_id}` (required) The id of the user whose profile feed is requested. For the other parameters, see above for '/feed'. """ lang, token_id, token_time, limit = get_params(self.request) # load the requested user requested_user_id = self.request.validated['u'] requested_user = DBSession.query(User). \ filter(User.id == requested_user_id). \ filter(User.email_validated). \ options(load_only(User.id, User.is_profile_public)). \ first() if not requested_user: raise HTTPNotFound('user not found') elif requested_user.is_profile_public or \ self.request.has_permission('authenticated'): # only return the feed if authenticated or if the user marked # the profile as public changes = get_changes_of_profile_feed(requested_user_id, token_id, token_time, limit) return load_feed(changes, lang) else: raise HTTPForbidden('no permission to see the feed')
def set_available_langs(documents, loaded=False): """Load and set the available langs for the given documents. """ if len(documents) == 0: return if loaded: # all locales are already loaded, so simply set the attribute for document in documents: document.available_langs = [ locale.lang for locale in document.locales ] else: document_ids = [doc.document_id for doc in documents] documents_for_id = {doc.document_id: doc for doc in documents} # aggregate the langs per document into an array lang_agg = func.array_agg( DocumentLocale.lang, type_=postgresql.ARRAY(String)).label('langs') langs_per_doc = DBSession.query( DocumentLocale.document_id, lang_agg). \ filter(DocumentLocale.document_id.in_(document_ids)). \ group_by(DocumentLocale.document_id). \ all() for document_id, langs in langs_per_doc: document = documents_for_id.get(document_id) document.available_langs = langs
def get_associated_user_ids(xreport_id): associated_user_ids = get_first_column( DBSession.query(User.id).join( Association, Association.parent_document_id == User.id).filter( Association.child_document_id == xreport_id).group_by( User.id).all()) return associated_user_ids
def post(self): request = self.request username = request.validated['username'] password = request.validated['password'] user = DBSession.query(User). \ filter(User.username == username).first() token = try_login(user, password, request) if user else None if token: response = token_to_response(user, token, request) if 'discourse' in request.json: settings = request.registry.settings client = get_discourse_client(settings) try: if 'sso' in request.json and 'sig' in request.json: sso = request.json['sso'] sig = request.json['sig'] redirect = client.redirect(user, sso, sig) response['redirect'] = redirect else: r = client.redirect_without_nonce(user) response['redirect_internal'] = r except: # Any error with discourse should not prevent login log.warning('Error logging into discourse for %d', user.id, exc_info=True) return response else: request.errors.status = 403 request.errors.add('body', 'user', 'Login failed') return None
def _paginate_offset(self, clazz, schema, adapt_schema): """Return a batch of documents with the given `offset` and `limit`. """ validated = self.request.validated offset = validated['offset'] if 'offset' in validated else 0 limit = min( validated['limit'] if 'limit' in validated else LIMIT_DEFAULT, LIMIT_MAX) base_query = DBSession.query(clazz) documents = base_query. \ options(joinedload(getattr(clazz, 'locales'))). \ options(joinedload(getattr(clazz, 'geometry'))). \ order_by(clazz.document_id.desc()). \ slice(offset, offset + limit). \ all() set_available_cultures(documents, loaded=True) if validated.get('lang') is not None: set_best_locale(documents, validated.get('lang')) total = base_query.count() return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': total }
def get(self): # load the requested user requested_user_id = self.request.validated['id'] requested_user = DBSession.query(User). \ filter(User.id == requested_user_id). \ filter(User.email_validated). \ options(load_only( User.id, User.is_profile_public, User.name)). \ first() if not requested_user: raise HTTPNotFound('user not found') elif requested_user.is_profile_public or \ self.request.has_permission('authenticated'): # only show the full profile if authenticated or if the user marked # the profile as public return self._get(user_profile_documents_config, schema_user_profile) else: # otherwise only return the user name return { 'not_authorized': True, 'document_id': requested_user.id, 'name': requested_user.name }
def get(self): """Get the user profile feed for a user. Request: `GET` `/profile-feed?u={user_id}[&pl=...][&limit=...][&token=...]` Parameters: `u={user_id}` (required) The id of the user whose profile feed is requested. For the other parameters, see above for '/feed'. """ lang, token_id, token_time, limit = get_params(self.request) # load the requested user requested_user_id = self.request.validated["u"] requested_user = ( DBSession.query(User) .filter(User.id == requested_user_id) .filter(User.email_validated) .options(load_only(User.id, User.is_profile_public)) .first() ) if not requested_user: raise HTTPNotFound("user not found") elif requested_user.is_profile_public or self.request.has_permission("authenticated"): # only return the feed if authenticated or if the user marked # the profile as public changes = get_changes_of_profile_feed(requested_user_id, token_id, token_time, limit) return load_feed(changes, lang) else: raise HTTPForbidden("no permission to see the feed")
def _load_version(self, document_id, lang, version_id, clazz, locale_clazz, schema, adapt_schema): version = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user'). load_only(User.id, User.name)) \ .options(joinedload( DocumentVersion.document_archive.of_type(clazz))) \ .options(joinedload( DocumentVersion.document_locales_archive.of_type( locale_clazz))) \ .options(joinedload(DocumentVersion.document_geometry_archive)) \ .filter(DocumentVersion.id == version_id) \ .filter(DocumentVersion.document_id == document_id) \ .filter(DocumentVersion.lang == lang) \ .first() if version is None: raise HTTPNotFound('invalid version') archive_document = version.document_archive archive_document.geometry = version.document_geometry_archive archive_document.locales = [version.document_locales_archive] if adapt_schema: schema = adapt_schema(schema, archive_document) previous_version_id, next_version_id = get_neighbour_version_ids( version_id, document_id, lang) return { 'document': to_json_dict(archive_document, schema), 'version': serialize_version(version), 'previous_version_id': previous_version_id, 'next_version_id': next_version_id, }
def post(self): request = self.request username = request.validated["username"] password = request.validated["password"] user = DBSession.query(User).filter(User.username == username).first() token = try_login(user, password, request) if user else None if token: response = token_to_response(user, token, request) if "discourse" in request.json: settings = request.registry.settings client = get_discourse_client(settings) try: if "sso" in request.json and "sig" in request.json: sso = request.json["sso"] sig = request.json["sig"] redirect = client.redirect(user, sso, sig) response["redirect"] = redirect else: r = client.redirect_without_nonce(user) response["redirect_internal"] = r except: # Any error with discourse should not prevent login log.warning("Error logging into discourse for %d", user.id, exc_info=True) return response else: request.errors.status = 403 request.errors.add("body", "user", "Login failed") return None
def update_linked_route_titles(waypoint, update_types, user_id): """When a waypoint is the main waypoint of a route, the field `title_prefix`, which caches the waypoint name, has to be updated. This method takes care of updating all routes, that the waypoint is "main waypoint" of. """ if UpdateType.LANG not in update_types: # if the locales did not change, no need to continue return linked_routes = DBSession.query(Route). \ filter(Route.main_waypoint_id == waypoint.document_id). \ options(joinedload(Route.locales).load_only( RouteLocale.lang, RouteLocale.id)). \ options(load_only(Route.document_id)). \ all() if linked_routes: waypoint_locales = waypoint.locales waypoint_locales_index = { locale.lang: locale for locale in waypoint_locales} for route in linked_routes: set_route_title_prefix( route, waypoint_locales, waypoint_locales_index)
def get_linked_xreports(document, lang): condition_as_child = and_( Association.child_document_id == Xreport.document_id, Association.parent_document_id == document.document_id ) condition_as_parent = and_( Association.child_document_id == document.document_id, Association.parent_document_id == Xreport.document_id ) if document.type in [WAYPOINT_TYPE, USERPROFILE_TYPE, ARTICLE_TYPE, IMAGE_TYPE]: condition = condition_as_parent elif document.type in [ROUTE_TYPE, OUTING_TYPE]: condition = condition_as_child xreport_ids = get_first_column( DBSession.query(Xreport.document_id). filter(Xreport.redirects_to.is_(None)). join( Association, condition). group_by(Xreport.document_id). all()) return get_documents_for_ids( xreport_ids, lang, xreport_documents_config).get('documents')
def delete_all_files_for_image(document_id, request): """ When the current database transaction is committed successfully, delete all files of the given image document by making a request to the image service. Note that no error is raised if this requests fails. """ filenames_result = DBSession.query(ArchiveImage.filename). \ filter(ArchiveImage.document_id == document_id). \ group_by(ArchiveImage.filename). \ all() filenames = [f for (f, ) in filenames_result] settings = request.registry.settings url = '{}/{}'.format(settings['image_backend.url'], 'delete') def send_delete_request(): response = requests.post(url, data={ 'secret': settings['image_backend.secret_key'], 'filenames': filenames }) if response.status_code != 200: raise HTTPInternalServerError( 'Deleting image files failed: {} {}'.format( response.status_code, response.reason)) run_on_successful_transaction(send_delete_request)
def set_available_langs(documents, loaded=False): """Load and set the available langs for the given documents. """ if len(documents) == 0: return if loaded: # all locales are already loaded, so simply set the attribute for document in documents: document.available_langs = [ locale.lang for locale in document.locales] else: document_ids = [doc.document_id for doc in documents] documents_for_id = {doc.document_id: doc for doc in documents} # aggregate the langs per document into an array lang_agg = func.array_agg( DocumentLocale.lang, type_=postgresql.ARRAY(String)).label('langs') langs_per_doc = DBSession.query( DocumentLocale.document_id, lang_agg). \ filter(DocumentLocale.document_id.in_(document_ids)). \ group_by(DocumentLocale.document_id). \ all() for document_id, langs in langs_per_doc: document = documents_for_id.get(document_id) document.available_langs = langs
def get_linked_articles(document, lang): condition_as_child = and_( Association.child_document_id == Article.document_id, Association.parent_document_id == document.document_id ) condition_as_parent = and_( Association.child_document_id == document.document_id, Association.parent_document_id == Article.document_id ) if document.type == IMAGE_TYPE: condition = condition_as_parent elif document.type in [WAYPOINT_TYPE, OUTING_TYPE, ROUTE_TYPE, BOOK_TYPE, XREPORT_TYPE, USERPROFILE_TYPE]: condition = condition_as_child elif document.type == ARTICLE_TYPE: condition = or_(condition_as_child, condition_as_parent) article_ids = get_first_column( DBSession.query(Article.document_id). filter(Article.redirects_to.is_(None)). join( Association, condition). group_by(Article.document_id). all()) return get_documents_for_ids( article_ids, lang, article_documents_config).get('documents')
def _paginate_after(self, clazz, schema, adapt_schema): """ Returns all documents for which `document_id` is smaller than the given id in `after`. """ after = self.request.validated['after'] limit = self.request.validated['limit'] limit = min(LIMIT_DEFAULT if limit is None else limit, LIMIT_MAX) base_query = DBSession.query(clazz) documents = base_query. \ options(joinedload(getattr(clazz, 'locales'))). \ order_by(clazz.document_id.desc()). \ filter(clazz.document_id < after). \ limit(limit). \ all() set_available_cultures(documents) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': -1 }
def get_linked_articles(document, lang): condition_as_child = and_( Association.child_document_id == Article.document_id, Association.parent_document_id == document.document_id) condition_as_parent = and_( Association.child_document_id == document.document_id, Association.parent_document_id == Article.document_id) if document.type == IMAGE_TYPE: condition = condition_as_parent elif document.type in [ WAYPOINT_TYPE, OUTING_TYPE, ROUTE_TYPE, BOOK_TYPE, XREPORT_TYPE, USERPROFILE_TYPE ]: condition = condition_as_child elif document.type == ARTICLE_TYPE: condition = or_(condition_as_child, condition_as_parent) article_ids = get_first_column( DBSession.query(Article.document_id).filter( Article.redirects_to.is_(None)).join( Association, condition).group_by(Article.document_id).all()) return get_documents_for_ids(article_ids, lang, article_documents_config).get('documents')
def get_linked_routes(document, lang): condition_as_child = and_( Association.child_document_id == Route.document_id, Association.parent_document_id == document.document_id) condition_as_parent = and_( Association.child_document_id == document.document_id, Association.parent_document_id == Route.document_id) if document.type == WAYPOINT_TYPE: condition = condition_as_child elif document.type in [OUTING_TYPE, IMAGE_TYPE, ARTICLE_TYPE, XREPORT_TYPE]: condition = condition_as_parent else: condition = or_(condition_as_child, condition_as_parent) route_ids = get_first_column( DBSession.query(Route.document_id). filter(Route.redirects_to.is_(None)). join(Association, condition). group_by(Route.document_id). all()) return get_documents_for_ids( route_ids, lang, route_documents_config).get('documents')
def _transfer_main_waypoint(source_document_id, target_document_id): target_waypoint = DBSession.query(Waypoint).get(target_document_id) DBSession.execute(Route.__table__.update().where( Route.main_waypoint_id == source_document_id).values( main_waypoint_id=target_document_id)) update_linked_route_titles(target_waypoint, [UpdateType.LANG], None)
def post(self): request = self.request username = request.validated['username'] password = request.validated['password'] user = DBSession.query(User). \ filter(User.username == username).first() token = try_login(user, password, request) if user else None if token: response = token_to_response(user, token, request) if 'discourse' in request.json: settings = request.registry.settings client = get_discourse_client(settings) try: if 'sso' in request.json and 'sig' in request.json: sso = request.json['sso'] sig = request.json['sig'] redirect = client.redirect(user, sso, sig) response['redirect'] = redirect else: r = client.redirect_without_nonce(user) response['redirect_internal'] = r except: # Any error with discourse should not prevent login log.warning( 'Error logging into discourse for %d', user.id, exc_info=True) return response else: request.errors.status = 403 request.errors.add('body', 'user', 'Login failed') return None
def remove_association( parent_document_id, child_document_id, user_id, check_first=False): """Remove an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association( parent_document_id=parent_document_id, child_document_id=child_document_id) if check_first and not exists_already(association): return DBSession.query(Association).filter_by( parent_document_id=parent_document_id, child_document_id=child_document_id).delete() DBSession.add(association.get_log(user_id, is_creation=False))
def is_unused_user_attribute(attrname, value, lowercase=False): attr = getattr(User, attrname) query = DBSession.query(User) if lowercase: query = query.filter(func.lower(attr) == value.lower()) else: query = query.filter(attr == value) return query.count() == 0
def add_or_retrieve_token(value, expire, userid): token = DBSession.query(Token).filter(Token.value == value and User.id == userid).first() if not token: token = Token(value=value, expire=expire, userid=userid) DBSession.add(token) DBSession.flush() return token
def get_linked_waypoint_children(document): return _limit_waypoint_fields( DBSession.query(Waypoint). filter(Waypoint.redirects_to.is_(None)). join(Association, Association.child_document_id == Waypoint.document_id). filter(Association.parent_document_id == document.document_id)). \ all()
def put(self): old_main_waypoint_id = DBSession.query(Route.main_waypoint_id). \ filter(Route.document_id == self.request.validated['id']).scalar() return self._put( Route, schema_route, before_update=functools.partial( update_default_geometry, old_main_waypoint_id), after_update=update_title_prefix)
def post(self): request = self.request userid = request.authenticated_userid user = DBSession.query(User).filter(User.id == userid).first() token = renew_token(user, request) if token: return token_to_response(user, token, request) else: raise HTTPInternalServerError('Error renewing token')
def exists_already(link): """ Checks if the given association exists already. For example, for two given documents D1 and D2, it checks if there is no association D1 -> D2 or D2 -> D1. """ associations_exists = DBSession.query(Association). \ filter(or_( and_( Association.parent_document_id == link.parent_document_id, Association.child_document_id == link.child_document_id ), and_( Association.child_document_id == link.parent_document_id, Association.parent_document_id == link.child_document_id ) )). \ exists() return DBSession.query(associations_exists).scalar()
def validate_required_user_from_email(request, **kwargs): validate_required_json_string("email", request) if len(request.errors) != 0: return email = request.validated["email"] user = DBSession.query(User).filter(User.email == email).first() if user: request.validated["user"] = user else: request.errors.add("body", "email", "No user with this email")
def get_linked_users(document, lang): user_ids = get_first_column( DBSession.query(User.id). join(Association, Association.parent_document_id == User.id). filter(Association.child_document_id == document.document_id). group_by(User.id). all()) return get_documents_for_ids( user_ids, lang, user_profile_documents_config).get('documents')
def validate_required_user_from_email(request): validate_required_json_string("email", request) if len(request.errors) != 0: return email = request.validated['email'] user = DBSession.query(User).filter(User.email == email).first() if user: request.validated['user'] = user else: request.errors.add('body', 'email', 'No user with this email')
def _has_permission(self, user_id, outing_id): """Check if the user with the given id has permission to change an outing. That is only users that are currently assigned to the outing can modify it. """ return DBSession.query(exists().where( and_( Association.parent_document_id == user_id, Association.child_document_id == outing_id ))).scalar()
def get_documents(documents_config, meta_params, search_documents): lang = meta_params['lang'] base_query = DBSession.query(documents_config.clazz). \ filter(getattr(documents_config.clazz, 'redirects_to').is_(None)) base_total_query = DBSession. \ query(getattr(documents_config.clazz, 'document_id')). \ filter(getattr(documents_config.clazz, 'redirects_to').is_(None)) base_total_query = add_profile_filter( base_total_query, documents_config.clazz) base_query = add_load_for_profiles(base_query, documents_config.clazz) if documents_config.clazz == Outing: base_query = base_query. \ order_by(documents_config.clazz.date_end.desc()). \ order_by(documents_config.clazz.document_id.desc()) elif documents_config.clazz == Xreport: base_query = base_query. \ order_by(documents_config.clazz.date.desc()). \ order_by(documents_config.clazz.document_id.desc()) else: base_query = base_query.order_by( documents_config.clazz.document_id.desc()) document_ids, total = search_documents(base_query, base_total_query) cache_keys = get_cache_keys( document_ids, lang, documents_config.document_type) def get_documents_from_cache_keys(*cache_keys): """ This method is called from dogpile.cache with the cache keys for the documents that are not cached yet. """ ids = [get_document_id(cache_key) for cache_key in cache_keys] docs = _get_documents_from_ids( ids, base_query, documents_config, lang) assert len(cache_keys) == len(docs), \ 'the number of returned documents must match ' + \ 'the number of keys' return docs # get the documents from the cache or from the database documents = get_or_create_multi( cache_document_listing, cache_keys, get_documents_from_cache_keys, should_cache_fn=lambda v: v is not None) documents = [doc for doc in documents if doc] total = total if total is not None else len(documents) return { 'documents': documents, 'total': total }