def _collection_get(self, clazz, schema): documents = DBSession. \ query(clazz). \ options(joinedload(getattr(clazz, 'locales'))). \ limit(30) return [to_json_dict(doc, schema) for doc in documents]
def search_for_type( search_term, document_type, model, schema, adapt_schema, limit, lang): # search in all title* (title_en, title_fr, ...), summary* and # description* fields. "boost" title fields and summary fields. search_query = MultiMatch( query=search_term, fields=['title*^3', 'summary*^2', 'description*'] ) # filter on the document_type type_query = Term(doc_type=document_type) search = create_search().\ query(search_query).\ filter(type_query).\ fields([]).\ extra(from_=0, size=limit) # only request the document ids from ES response = search.execute() document_ids = [int(doc.meta.id) for doc in response] # then load the documents for the returned ids documents = get_documents(document_ids, model, lang) count = len(documents) total = response.hits.total return { 'count': count, 'total': total, 'documents': [ to_json_dict(doc, adapt_schema(schema, doc)) for doc in documents ] }
def _get_in_lang(self, id, lang, clazz, schema, editing_view, clazz_locale=None, adapt_schema=None, include_maps=True, include_areas=True, set_custom_associations=None, set_custom_fields=None): document = self._get_document( clazz, id, clazz_locale=clazz_locale, lang=lang) if document.redirects_to: return { 'redirects_to': document.redirects_to, 'available_langs': get_available_langs(document.redirects_to) } set_available_langs([document]) self._set_associations(document, lang, editing_view) if not editing_view and set_custom_associations: set_custom_associations(document, lang) if not editing_view and include_areas: self._set_areas(document, lang) if include_maps: self._set_maps(document, lang) if set_custom_fields: set_custom_fields(document) if adapt_schema: schema = adapt_schema(schema, document) return to_json_dict(document, schema, with_special_locales_attrs=True)
def get(self): """Get the filter preferences of the authenticated user. Request: `GET` `/users/preferences[?pl=...]` Parameters: `pl=...` (optional) When set only the given locale will be included (if available). Otherwise the default locale of the user will be used. """ user = self.get_user() lang = self.request.validated.get("lang") if not lang: lang = user.lang areas = user.feed_filter_areas if lang is not None: set_best_locale(areas, lang) return { "followed_only": user.feed_followed_only, "activities": user.feed_filter_activities, "areas": [to_json_dict(a, schema_listing_area) for a in areas], }
def _load_version(self, document_id, lang, version_id, clazz, locale_clazz, schema, adapt_schema): version = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user'). load_only(User.id, User.name)) \ .options(joinedload( DocumentVersion.document_archive.of_type(clazz))) \ .options(joinedload( DocumentVersion.document_locales_archive.of_type( locale_clazz))) \ .options(joinedload(DocumentVersion.document_geometry_archive)) \ .filter(DocumentVersion.id == version_id) \ .filter(DocumentVersion.document_id == document_id) \ .filter(DocumentVersion.lang == lang) \ .first() if version is None: raise HTTPNotFound('invalid version') archive_document = version.document_archive archive_document.geometry = version.document_geometry_archive archive_document.locales = [version.document_locales_archive] if adapt_schema: schema = adapt_schema(schema, archive_document) previous_version_id, next_version_id = get_neighbour_version_ids( version_id, document_id, lang) return { 'document': to_json_dict(archive_document, schema), 'version': serialize_version(version), 'previous_version_id': previous_version_id, 'next_version_id': next_version_id, }
def _put(self, clazz, schema): id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id) self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_type, changed_langs) = \ self._check_update_type(document, old_versions) self._update_version( document, self.request.validated['message'], update_type, changed_langs) return to_json_dict(document, schema)
def get(self): """Get the filter preferences of the authenticated user. Request: `GET` `/users/preferences[?pl=...]` Parameters: `pl=...` (optional) When set only the given locale will be included (if available). Otherwise the default locale of the user will be used. """ user = self.get_user() lang = self.request.validated.get('lang') if not lang: lang = user.lang areas = user.feed_filter_areas if lang is not None: set_best_locale(areas, lang) return { 'followed_only': user.feed_followed_only, 'activities': user.feed_filter_activities, 'langs': user.feed_filter_langs, 'areas': [to_json_dict(a, schema_listing_area) for a in areas] }
def _get_documents( self, clazz, schema, clazz_locale, adapt_schema, custom_filter, include_areas, set_custom_fields, meta_params, load_documents): base_query = DBSession.query(clazz).\ filter(getattr(clazz, 'redirects_to').is_(None)) base_total_query = DBSession.query(getattr(clazz, 'document_id')).\ filter(getattr(clazz, 'redirects_to').is_(None)) if custom_filter: base_query = custom_filter(base_query) base_total_query = custom_filter(base_total_query) base_query = add_load_for_locales(base_query, clazz, clazz_locale) base_query = base_query.options(joinedload(getattr(clazz, 'geometry'))) if clazz == Outing: base_query = base_query. \ order_by(clazz.date_end.desc()). \ order_by(clazz.document_id.desc()) else: base_query = base_query.order_by(clazz.document_id.desc()) base_query = add_load_for_profiles(base_query, clazz) base_total_query = add_profile_filter(base_total_query, clazz) if include_areas: base_query = base_query. \ options( joinedload(getattr(clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents, total = load_documents(base_query, base_total_query) set_available_langs(documents, loaded=True) lang = meta_params['lang'] if lang is not None: set_best_locale(documents, lang) if include_areas: self._set_areas_for_documents(documents, lang) if set_custom_fields: set_custom_fields(documents, lang) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': total }
def _put(self, clazz, schema): id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id) self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_type, changed_langs) = \ self._check_update_type(document, old_versions) self._update_version(document, self.request.validated['message'], update_type, changed_langs) return to_json_dict(document, schema)
def _paginate_after(self, clazz, schema, adapt_schema): """ Returns all documents for which `document_id` is smaller than the given id in `after`. """ after = self.request.validated['after'] limit = self.request.validated['limit'] limit = min(LIMIT_DEFAULT if limit is None else limit, LIMIT_MAX) base_query = DBSession.query(clazz) documents = base_query. \ options(joinedload(getattr(clazz, 'locales'))). \ order_by(clazz.document_id.desc()). \ filter(clazz.document_id < after). \ limit(limit). \ all() set_available_cultures(documents) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': -1 }
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] user.update_validation_nonce( Purpose.registration, VALIDATION_EXPIRE_DAYS) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')] ) DBSession.add(user) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') # also create a version for the profile DocumentRest.create_new_version(user.profile, user.id) # The user needs validation email_service = get_email_service(self.request) nonce = user.validation_nonce settings = self.request.registry.settings link = settings['mail.validate_register_url_template'] % nonce email_service.send_registration_confirmation(user, link) return to_json_dict(user, schema_user)
def _get_in_lang(self, id, lang, clazz, schema, clazz_locale=None, adapt_schema=None, include_maps=True, include_areas=True, set_custom_associations=None): document = self._get_document( clazz, id, clazz_locale=clazz_locale, lang=lang) if document.redirects_to: return { 'redirects_to': document.redirects_to, 'available_langs': get_available_langs(document.redirects_to) } set_available_langs([document]) editing_view = self.request.GET.get('e', '0') != '0' self._set_associations(document, lang, editing_view) if not editing_view and set_custom_associations: set_custom_associations(document, lang) if not editing_view and include_areas: self._set_areas(document, lang) if include_maps: self._set_maps(document, lang) if adapt_schema: schema = adapt_schema(schema, document) return to_json_dict(document, schema)
def _paginate_offset(self, clazz, schema, adapt_schema): """Return a batch of documents with the given `offset` and `limit`. """ validated = self.request.validated offset = validated['offset'] if 'offset' in validated else 0 limit = min( validated['limit'] if 'limit' in validated else LIMIT_DEFAULT, LIMIT_MAX) base_query = DBSession.query(clazz) documents = base_query. \ options(joinedload(getattr(clazz, 'locales'))). \ options(joinedload(getattr(clazz, 'geometry'))). \ order_by(clazz.document_id.desc()). \ slice(offset, offset + limit). \ all() set_available_cultures(documents, loaded=True) if validated.get('lang') is not None: set_best_locale(documents, validated.get('lang')) total = base_query.count() return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': total }
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] user.update_validation_nonce(Purpose.registration, VALIDATION_EXPIRE_DAYS) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')]) DBSession.add(user) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') # also create a version for the profile DocumentRest.create_new_version(user.profile, user.id) # The user needs validation email_service = get_email_service(self.request) nonce = user.validation_nonce settings = self.request.registry.settings link = settings['mail.validate_register_url_template'] % nonce email_service.send_registration_confirmation(user, link) return to_json_dict(user, schema_user)
def get(self): id = self.request.validated['id'] user = DBSession. \ query(User). \ filter(User.id == id). \ first() return to_json_dict(user, schema_user)
def _get_in_lang(self, id, lang, clazz, schema, adapt_schema=None): document = self._get_document(clazz, id, lang) set_available_cultures([document]) if adapt_schema: schema = adapt_schema(schema, document) return to_json_dict(document, schema)
def _get_documents_from_ids(document_ids, base_query, documents_config, lang): """ Load the documents for the ids and return them as json dict. The returned list contains None values for documents that could not be loaded, and the list has the same order has the document id list. """ base_query = base_query.options( load_only(*documents_config.get_load_only_fields())) base_query = add_load_for_locales( base_query, documents_config.clazz, documents_config.clazz_locale, documents_config.get_load_only_fields_locales()) if len(documents_config.get_load_only_fields_geometry()) > 1: # only load the geometry if the fields list contains other columns than # 'version' base_query = base_query.options( joinedload(getattr(documents_config.clazz, 'geometry')).load_only( *documents_config.get_load_only_fields_geometry())) if documents_config.include_areas: base_query = base_query. \ options( joinedload(getattr(documents_config.clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents = _load_documents(document_ids, documents_config.clazz, base_query) set_available_langs(documents, loaded=True) if lang is not None: set_best_locale(documents, lang) if documents_config.include_areas: _set_areas_for_documents(documents, lang) if documents_config.include_img_count: _set_img_count_for_documents(documents, document_ids) if documents_config.set_custom_fields: documents_config.set_custom_fields(documents, lang) # make sure the documents are returned in the same order document_index = {doc.document_id: doc for doc in documents} documents = [document_index.get(id) for id in document_ids] return [ to_json_dict( doc, documents_config.schema if not documents_config.adapt_schema else documents_config.adapt_schema(documents_config.schema, doc), with_special_geometry_attrs=True) if doc else None for doc in documents ]
def _get_documents_from_ids( document_ids, base_query, documents_config, lang): """ Load the documents for the ids and return them as json dict. The returned list contains None values for documents that could not be loaded, and the list has the same order has the document id list. """ base_query = base_query.options( load_only(*documents_config.get_load_only_fields()) ) base_query = add_load_for_locales( base_query, documents_config.clazz, documents_config.clazz_locale, documents_config.get_load_only_fields_locales()) if len(documents_config.get_load_only_fields_geometry()) > 1: # only load the geometry if the fields list contains other columns than # 'version' base_query = base_query.options( joinedload(getattr(documents_config.clazz, 'geometry')). load_only(*documents_config.get_load_only_fields_geometry()) ) if documents_config.include_areas: base_query = base_query. \ options( joinedload(getattr(documents_config.clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents = _load_documents( document_ids, documents_config.clazz, base_query) set_available_langs(documents, loaded=True) if lang is not None: set_best_locale(documents, lang) if documents_config.include_areas: _set_areas_for_documents(documents, lang) if documents_config.set_custom_fields: documents_config.set_custom_fields(documents, lang) # make sure the documents are returned in the same order document_index = {doc.document_id: doc for doc in documents} documents = [document_index.get(id) for id in document_ids] return [ to_json_dict( doc, documents_config.schema if not documents_config.adapt_schema else documents_config.adapt_schema( documents_config.schema, doc) ) if doc else None for doc in documents ]
def _set_associations(self, document, lang, editing_view): linked_docs = get_associations(document, lang, editing_view) associations = {} for typ, docs in linked_docs.items(): schema = association_schemas[typ] associations[typ] = [to_json_dict(d, schema) for d in docs] document.associations = associations
def _set_areas_for_documents(documents, lang): for document in documents: # expunge is set to False because the parent document of the areas # was already disconnected from the session at this point set_best_locale(document._areas, lang, expunge=False) document.areas = [ to_json_dict(m, schema_listing_area) for m in document._areas ]
def _set_associations(self, document, lang, editing_view): linked_docs = get_associations(document, lang, editing_view) associations = {} for typ, docs in linked_docs.items(): schema = association_schemas[typ] associations[typ] = [ to_json_dict(d, schema) for d in docs ] document.associations = associations
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] DBSession.add(user) try: DBSession.flush() except: # TODO: log the error for debugging raise HTTPInternalServerError('Error persisting user') return to_json_dict(user, schema_user)
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None DBSession.add(document) DBSession.flush() user_id = self.request.authenticated_userid self._create_new_version(document, user_id) sync_search_index(document) return to_json_dict(document, schema)
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None # TODO additional validation: at least one culture, only one instance # for each culture, geometry DBSession.add(document) DBSession.flush() self._create_new_version(document) return to_json_dict(document, schema)
def _put(self, clazz, schema): user_id = self.request.authenticated_userid id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id) self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # find out whether the update of the geometry should be skipped. skip_geometry_update = document_in.geometry is None # update the document with the input document document.update(document_in) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_type, changed_langs) = document.get_update_type(old_versions) if update_type: # A new version needs to be created and persisted self._update_version( document, user_id, self.request.validated['message'], update_type, changed_langs) # And the search updated sync_search_index(document) json_dict = to_json_dict(document, schema) if skip_geometry_update: # Optimization: the geometry is not sent back if the client # requested to skip the geometry update. Geometries may be very # huge; this optimization should speed the data transfer. json_dict['geometry'] = None return json_dict
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) total = DBSession.query(Route.document_id). \ select_from(with_query_waypoints). \ join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). \ join( Route, Association.child_document_id == Route.document_id). \ filter(Route.redirects_to.is_(None)). \ count() routes = limit_route_fields( DBSession.query(Route). select_from(with_query_waypoints). join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). join( Route, Association.child_document_id == Route.document_id). filter(Route.redirects_to.is_(None)). order_by( with_query_waypoints.c.priority.desc(), Route.document_id.desc()). limit(NUM_ROUTES) ). \ all() if lang is not None: set_best_locale(routes, lang) waypoint.associations['all_routes'] = { 'total': total, 'routes': [ to_json_dict(route, schema_association_route) for route in routes ] }
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) total = DBSession.query(Route.document_id). \ select_from(with_query_waypoints). \ join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). \ join( Route, Association.child_document_id == Route.document_id). \ filter(Route.redirects_to.is_(None)). \ count() routes = limit_route_fields( DBSession.query(Route). select_from(with_query_waypoints). join( Association, with_query_waypoints.c.document_id == Association.parent_document_id). join( Route, Association.child_document_id == Route.document_id). filter(Route.redirects_to.is_(None)). order_by( with_query_waypoints.c.priority.desc(), Route.document_id.desc()). limit(NUM_ROUTES) ). \ all() if lang is not None: set_best_locale(routes, lang) waypoint.associations['all_routes'] = { 'total': total, 'routes': [to_json_dict(route, schema_association_route) for route in routes] }
def search_for_types(search_types, search_term, limit, lang): """Get results for all given types. """ if not search_types: return {} document_id = try_to_parse_document_id(search_term) if document_id is not None: # search by document id for every type results_for_type = [([document_id], None)] * len(search_types) else: # search in ElasticSearch results_for_type = do_multi_search_for_types(search_types, search_term, limit) # load the documents using the document ids returned from the search results = {} for search_type, result_for_type in zip(search_types, results_for_type): (key, document_type, model, locale_model, schema, adapt_schema) = \ search_type (document_ids, total) = result_for_type documents = get_documents(document_ids, model, locale_model, lang) count = len(documents) total = total if total is not None else count results[key] = { 'count': count, 'total': total, 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc)) for doc in documents ] } return results
def _get_in_lang(self, id, lang, clazz, schema, clazz_locale=None, adapt_schema=None, include_maps=True, include_areas=True, set_custom_associations=None): document = self._get_document(clazz, id, clazz_locale=clazz_locale, lang=lang) if document.redirects_to: return { 'redirects_to': document.redirects_to, 'available_langs': get_available_langs(document.redirects_to) } set_available_langs([document]) editing_view = self.request.GET.get('e', '0') != '0' self._set_associations(document, lang, editing_view) if not editing_view and set_custom_associations: set_custom_associations(document, lang) if not editing_view and include_areas: self._set_areas(document, lang) if include_maps: self._set_maps(document, lang) if adapt_schema: schema = adapt_schema(schema, document) return to_json_dict(document, schema)
def search_for_types(search_types, search_term, limit, lang): """Get results for all given types. """ if not search_types: return {} document_id = try_to_parse_document_id(search_term) if document_id is not None: # search by document id for every type results_for_type = [([document_id], None)] * len(search_types) else: # search in ElasticSearch results_for_type = do_multi_search_for_types( search_types, search_term, limit) # load the documents using the document ids returned from the search results = {} for search_type, result_for_type in zip(search_types, results_for_type): (key, document_type, model, locale_model, schema, adapt_schema) = \ search_type (document_ids, total) = result_for_type documents = get_documents(document_ids, model, locale_model, lang) count = len(documents) total = total if total is not None else count results[key] = { 'count': count, 'total': total, 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ] } return results
def set_recent_outings(route, lang): """Set last 10 outings on the given route. """ recent_outings = DBSession.query(Outing). \ filter(Outing.redirects_to.is_(None)). \ join( Association, Outing.document_id == Association.child_document_id). \ filter(Association.parent_document_id == route.document_id). \ options(load_only( Outing.document_id, Outing.activities, Outing.date_start, Outing.date_end, Outing.version, Outing.protected)). \ options(joinedload(Outing.locales).load_only( DocumentLocale.lang, DocumentLocale.title, DocumentLocale.version)). \ order_by(Outing.date_end.desc()). \ limit(NUM_RECENT_OUTINGS). \ all() set_author(recent_outings, None) if lang is not None: set_best_locale(recent_outings, lang) total = DBSession.query(Outing.document_id). \ filter(Outing.redirects_to.is_(None)). \ join( Association, Outing.document_id == Association.child_document_id). \ filter(Association.parent_document_id == route.document_id). \ count() route.associations['recent_outings'] = { 'outings': [ to_json_dict(user, schema_association_outing) for user in recent_outings ], 'total': total }
def _get_version(self, clazz, locale_clazz, schema, adapt_schema=None): id = self.request.validated['id'] lang = self.request.validated['lang'] version_id = self.request.validated['version_id'] version = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user')) \ .options(joinedload( DocumentVersion.document_archive.of_type(clazz))) \ .options(joinedload( DocumentVersion.document_locales_archive.of_type( locale_clazz))) \ .options(joinedload(DocumentVersion.document_geometry_archive)) \ .filter(DocumentVersion.id == version_id) \ .filter(DocumentVersion.document_id == id) \ .filter(DocumentVersion.culture == lang) \ .first() if version is None: raise HTTPNotFound('invalid version') archive_document = version.document_archive archive_document.geometry = version.document_geometry_archive archive_document.locales = [version.document_locales_archive] if adapt_schema: schema = adapt_schema(schema, archive_document) previous_version_id, next_version_id = get_neighbour_version_ids( version_id, id, lang ) return { 'document': to_json_dict(archive_document, schema), 'version': self._serialize_version(version), 'previous_version_id': previous_version_id, 'next_version_id': next_version_id, }
def _get_documents(self, clazz, schema, clazz_locale, adapt_schema, custom_filter, include_areas, set_custom_fields, meta_params, load_documents): base_query = DBSession.query(clazz).\ filter(getattr(clazz, 'redirects_to').is_(None)) base_total_query = DBSession.query(getattr(clazz, 'document_id')).\ filter(getattr(clazz, 'redirects_to').is_(None)) if custom_filter: base_query = custom_filter(base_query) base_total_query = custom_filter(base_total_query) base_query = add_load_for_locales(base_query, clazz, clazz_locale) base_query = base_query.options(joinedload(getattr(clazz, 'geometry'))) if clazz == Outing: base_query = base_query. \ order_by(clazz.date_end.desc()). \ order_by(clazz.document_id.desc()) else: base_query = base_query.order_by(clazz.document_id.desc()) base_query = add_load_for_profiles(base_query, clazz) base_total_query = add_profile_filter(base_total_query, clazz) if include_areas: base_query = base_query. \ options( joinedload(getattr(clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents, total = load_documents(base_query, base_total_query) set_available_langs(documents, loaded=True) lang = meta_params['lang'] if lang is not None: set_best_locale(documents, lang) if include_areas: self._set_areas_for_documents(documents, lang) if set_custom_fields: set_custom_fields(documents, lang) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc)) for doc in documents ], 'total': total }
def _set_maps(self, document, lang): topo_maps = get_maps(document, lang) document.maps = [ to_json_dict(m, schema_listing_topo_map) for m in topo_maps ]
def _set_areas(self, document, lang): areas = get_areas(document, lang) document.areas = [ to_json_dict(m, schema_listing_area) for m in areas ]
def set_recent_outings(waypoint, lang): """Set last 10 outings on routes associated to the given waypoint. """ t_outing_route = aliased(Association, name='a1') t_route_wp = aliased(Association, name='a2') t_route = aliased(Document, name='r') with_query_waypoints = _get_select_children(waypoint) recent_outings = DBSession.query(Outing). \ filter(Outing.redirects_to.is_(None)). \ join( t_outing_route, Outing.document_id == t_outing_route.child_document_id). \ join( t_route, and_( t_outing_route.parent_document_id == t_route.document_id, t_route.type == ROUTE_TYPE)). \ join( t_route_wp, t_route_wp.child_document_id == t_route.document_id). \ join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id ). \ options(load_only( Outing.document_id, Outing.activities, Outing.date_start, Outing.date_end, Outing.version, Outing.protected)). \ options(joinedload(Outing.locales).load_only( DocumentLocale.lang, DocumentLocale.title, DocumentLocale.version)). \ order_by(Outing.date_end.desc()). \ limit(NUM_RECENT_OUTINGS). \ all() set_author(recent_outings, None) if lang is not None: set_best_locale(recent_outings, lang) total = DBSession.query(Outing.document_id). \ filter(Outing.redirects_to.is_(None)). \ join( t_outing_route, Outing.document_id == t_outing_route.child_document_id). \ join( t_route, and_( t_outing_route.parent_document_id == t_route.document_id, t_route.type == ROUTE_TYPE)). \ join( t_route_wp, t_route_wp.child_document_id == t_route.document_id). \ join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id ). \ count() waypoint.associations['recent_outings'] = { 'total': total, 'outings': [ to_json_dict(outing, schema_association_outing) for outing in recent_outings ] }
def _get(self, clazz, schema): id = self.request.validated['id'] culture = self.request.GET.get('l') document = self._get_document(clazz, id, culture) return to_json_dict(document, schema)
def _set_areas(self, document, lang): areas = get_areas(document, lang) document.areas = [to_json_dict(m, schema_listing_area) for m in areas]