def _collection_post( self, schema, before_add=None, after_add=None): user_id = self.request.authenticated_userid document_in = self.request.validated document = schema.objectify(document_in) document.document_id = None if before_add: before_add(document, user_id=user_id) DBSession.add(document) DBSession.flush() DocumentRest.create_new_version(document, user_id) if document.type != AREA_TYPE: update_areas_for_document(document, reset=False) if after_add: after_add(document, user_id=user_id) if document_in.get('associations', None): create_associations(document, document_in['associations'], user_id) notify_es_syncer(self.request.registry.queue_config) return {'document_id': document.document_id}
def _get_history(self, document_id, lang): # FIXME conditional permission check (when outings implemented) # is_outing = DBSession.query(Outing) \ # .filter(Outing.document_id == document_id).count() # if is_outing > 0: # # validate permission (authenticated + associated) # # return 403 if not correct title = DBSession.query(DocumentLocale.title) \ .filter(DocumentLocale.document_id == document_id) \ .filter(DocumentLocale.lang == lang) \ .first() if not title: raise HTTPNotFound('no locale document for "{0}"'.format(lang)) versions = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user')) \ .filter(DocumentVersion.document_id == document_id) \ .filter(DocumentVersion.lang == lang) \ .order_by(DocumentVersion.id) \ .all() return { 'title': title.title, 'versions': [serialize_version(v) for v in versions] }
def set_linked_routes(waypoint, lang): """ Set associated routes for the given waypoint including associated routes of child and grandchild waypoints. Note that this function returns a dict and not a list! """ with_query_waypoints = _get_select_children(waypoint) route_ids = get_first_column( DBSession.query(Route.document_id) .select_from(with_query_waypoints) .join(Association, with_query_waypoints.c.document_id == Association.parent_document_id) .join(Route, Association.child_document_id == Route.document_id) .filter(Route.redirects_to.is_(None)) .order_by(with_query_waypoints.c.priority.desc(), Route.document_id.desc()) .limit(NUM_ROUTES) .all() ) total = ( DBSession.query(Route.document_id) .select_from(with_query_waypoints) .join(Association, with_query_waypoints.c.document_id == Association.parent_document_id) .join(Route, Association.child_document_id == Route.document_id) .filter(Route.redirects_to.is_(None)) .count() ) waypoint.associations["all_routes"] = get_documents_for_ids(route_ids, lang, route_documents_config, total)
def collection_post(self): association = schema_association.objectify(self.request.validated) association.parent_document_type = \ self.request.validated['parent_document_type'] association.child_document_type = \ self.request.validated['child_document_type'] if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add( association.get_log(self.request.authenticated_userid)) update_cache_version_associations( [{'parent_id': association.parent_document_id, 'parent_type': association.parent_document_type, 'child_id': association.child_document_id, 'child_type': association.child_document_type}], []) notify_es_syncer_if_needed(association, self.request) update_feed_association_update( association.parent_document_id, association.parent_document_type, association.child_document_id, association.child_document_type, self.request.authenticated_userid) return {}
def post(self): request = self.request user = request.validated['user'] user.password = request.validated['password'] # The user was validated by the nonce so we can log in token = log_validated_user_i_know_what_i_do(user, request) if token: settings = request.registry.settings response = token_to_response(user, token, request) try: client = get_discourse_client(settings) r = client.redirect_without_nonce(user) response['redirect_internal'] = r except: # Since only the password is changed, any error with discourse # must not prevent login and validation. log.error( 'Error logging into discourse for %d', user.id, exc_info=True) user.clear_validation_nonce() try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') return response else: request.errors.status = 403 request.errors.add('body', 'user', 'Login failed') return None
def _get_documents( self, clazz, schema, clazz_locale, adapt_schema, custom_filter, include_areas, set_custom_fields, meta_params, load_documents): base_query = DBSession.query(clazz).\ filter(getattr(clazz, 'redirects_to').is_(None)) base_total_query = DBSession.query(getattr(clazz, 'document_id')).\ filter(getattr(clazz, 'redirects_to').is_(None)) if custom_filter: base_query = custom_filter(base_query) base_total_query = custom_filter(base_total_query) base_query = add_load_for_locales(base_query, clazz, clazz_locale) base_query = base_query.options(joinedload(getattr(clazz, 'geometry'))) if clazz == Outing: base_query = base_query. \ order_by(clazz.date_end.desc()). \ order_by(clazz.document_id.desc()) else: base_query = base_query.order_by(clazz.document_id.desc()) base_query = add_load_for_profiles(base_query, clazz) base_total_query = add_profile_filter(base_total_query, clazz) if include_areas: base_query = base_query. \ options( joinedload(getattr(clazz, '_areas')). load_only( 'document_id', 'area_type', 'version', 'protected', 'type'). joinedload('locales'). load_only( 'lang', 'title', 'version') ) documents, total = load_documents(base_query, base_total_query) set_available_langs(documents, loaded=True) lang = meta_params['lang'] if lang is not None: set_best_locale(documents, lang) if include_areas: self._set_areas_for_documents(documents, lang) if set_custom_fields: set_custom_fields(documents, lang) return { 'documents': [ to_json_dict( doc, schema if not adapt_schema else adapt_schema(schema, doc) ) for doc in documents ], 'total': total }
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ # Configure SQLAlchemy engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine # Configure ElasticSearch configure_es_from_config(settings) config = Configurator(settings=settings) config.include('cornice') config.registry.queue_config = get_queue_config(settings) bypass_auth = False if 'noauthorization' in settings: bypass_auth = asbool(settings['noauthorization']) if not bypass_auth: config.include("pyramid_jwtauth") # Intercept request handling to validate token against the database config.add_tween('c2corg_api.jwt_database_validation_tween_factory') # Inject ACLs config.set_root_factory(RootFactory) else: log.warning('Bypassing authorization') # Scan MUST be the last call otherwise ACLs will not be set # and the permissions would be bypassed config.scan(ignore='c2corg_api.tests') return config.make_wsgi_app()
def validate_association(request): """Check if the given documents exist and if an association between the two document types is valid. """ parent_document_id = request.validated.get('parent_document_id') child_document_id = request.validated.get('child_document_id') parent_document_type = None if parent_document_id: parent_document_type = DBSession.query(Document.type). \ filter(Document.document_id == parent_document_id).scalar() if not parent_document_type: request.errors.add( 'body', 'parent_document_id', 'parent document does not exist') child_document_type = None if child_document_id: child_document_type = DBSession.query(Document.type). \ filter(Document.document_id == child_document_id).scalar() if not child_document_type: request.errors.add( 'body', 'child_document_id', 'child document does not exist') if parent_document_type and child_document_type: association_type = (parent_document_type, child_document_type) if association_type not in valid_associations: request.errors.add( 'body', 'association', 'invalid association type')
def remove_token(token): now = datetime.datetime.utcnow() condition = Token.value == token and Token.expire > now result = DBSession.execute(Token.__table__.delete().where(condition)) if result.rowcount == 0: log.debug("Failed to remove token %s" % token) DBSession.flush()
def collection_post(self): settings = self.request.registry.settings locale = self.request.validated['locale'] title = "{}_{}".format(locale.document_id, locale.lang) content = '<a href="{}">{}</a>'.format( self.request.referer, locale.title) category = settings['discourse.category'] # category could be id or name try: category = int(category) except: pass client = get_discourse_client(settings) try: response = client.client.create_post(content, title=title, category=category) except: raise HTTPInternalServerError('Error with Discourse') if "topic_id" in response: document_topic = DocumentTopic(topic_id=response['topic_id']) locale.document_topic = document_topic update_cache_version_direct(locale.document_id) DBSession.flush() return response
def get_creators(document_ids): """ Get the creator for the list of given document ids. """ t = DBSession.query( ArchiveDocument.document_id.label('document_id'), User.id.label('user_id'), User.name.label('name'), over( func.rank(), partition_by=ArchiveDocument.document_id, order_by=HistoryMetaData.id).label('rank')). \ select_from(ArchiveDocument). \ join( DocumentVersion, and_( ArchiveDocument.document_id == DocumentVersion.document_id, ArchiveDocument.version == 1)). \ join(HistoryMetaData, DocumentVersion.history_metadata_id == HistoryMetaData.id). \ join(User, HistoryMetaData.user_id == User.id). \ filter(ArchiveDocument.document_id.in_(document_ids)). \ subquery('t') query = DBSession.query( t.c.document_id, t.c.user_id, t.c.name). \ filter(t.c.rank == 1) return { document_id: { 'name': name, 'user_id': user_id } for document_id, user_id, name in query }
def setUp(self): # noqa self.app = TestApp(self.app) registry = self.app.app.registry self.mailer = get_mailer(registry) self.email_service = EmailService(self.mailer, settings) EmailService.instance = None self.config = testing.setUp() self.connection = self.engine.connect() # begin a non-ORM transaction self.trans = self.connection.begin() # DBSession is the scoped session manager used in the views, # reconfigure it to use this test's connection DBSession.configure(bind=self.connection) # create a session bound to the connection, this session is the one # used in the test code self.session = self.Session(bind=self.connection) self.queue_config = registry.queue_config reset_queue(self.queue_config) reset_cache_key()
def update_feed_document_update(document, user_id, update_types): """Update the feed entry for a document: - update `area_ids` if the geometry has changed. - update `activities` if figures have changed. - update `user_ids` if the document is an outing and the participants have changed. Only when updating `user_ids`, the "actor" of the feed entry is changed. And only then the time is updated and the `change_type` set to `updated` to push the entry to the top of the feed. """ if document.redirects_to: # TODO delete existing feed entry? # see https://github.com/c2corg/v6_api/issues/386 return if document.type in [IMAGE_TYPE, USERPROFILE_TYPE, AREA_TYPE]: return DBSession.flush() # update areas if UpdateType.GEOM in update_types: update_areas_of_changes(document) # updates activities if document.type in [ARTICLE_TYPE, OUTING_TYPE, ROUTE_TYPE] and \ UpdateType.FIGURES in update_types: update_activities_of_changes(document) # update users_ids/participants (only for outings) if document.type != OUTING_TYPE: return update_participants_of_outing(document.document_id, user_id)
def get(self): id = self.request.validated['id'] lang = self.request.validated['lang'] # FIXME conditional permission check (when outings implemented) # is_outing = DBSession.query(Outing) \ # .filter(Outing.document_id == id).count() # if is_outing > 0: # # validate permission (authenticated + associated) # # return 403 if not correct title = DBSession.query(DocumentLocale.title) \ .filter(DocumentLocale.document_id == id) \ .filter(DocumentLocale.culture == lang) \ .first() if not title: raise HTTPNotFound('no locale document for ' + lang) versions = DBSession.query(DocumentVersion) \ .options(joinedload('history_metadata').joinedload('user')) \ .filter(DocumentVersion.document_id == id) \ .filter(DocumentVersion.culture == lang) \ .order_by(DocumentVersion.id) \ .all() return { 'title': title.title, 'versions': [self._serialize_version(v) for v in versions] }
def _put(self, clazz, schema): id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id) self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_type, changed_langs) = \ self._check_update_type(document, old_versions) self._update_version( document, self.request.validated['message'], update_type, changed_langs) return to_json_dict(document, schema)
def collection_delete(self): association_in = schema_association.objectify(self.request.validated) association = self._load(association_in) if association is None: # also accept {parent_document_id: y, child_document_id: x} when # for an association {parent_document_id: x, child_document_id: x} association_in = Association( parent_document_id=association_in.child_document_id, child_document_id=association_in.parent_document_id) association = self._load(association_in) if association is None: raise HTTPBadRequest('association does not exist') if is_main_waypoint_association(association): raise HTTPBadRequest( 'as the main waypoint of the route, this waypoint can not ' 'be disassociated') log = association.get_log( self.request.authenticated_userid, is_creation=False) DBSession.delete(association) DBSession.add(log) return {}
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] user.update_validation_nonce( Purpose.registration, VALIDATION_EXPIRE_DAYS) # directly create the user profile, the document id of the profile # is the user id lang = user.lang user.profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(lang=lang, title='')] ) DBSession.add(user) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') # also create a version for the profile DocumentRest.create_new_version(user.profile, user.id) # The user needs validation email_service = get_email_service(self.request) nonce = user.validation_nonce settings = self.request.registry.settings link = settings['mail.validate_register_url_template'] % nonce email_service.send_registration_confirmation(user, link) return to_json_dict(user, schema_user)
def _put( self, clazz, schema, clazz_locale=None, before_update=None, after_update=None): user_id = self.request.authenticated_userid id = self.request.validated['id'] document_in = \ schema.objectify(self.request.validated['document']) self._check_document_id(id, document_in.document_id) # get the current version of the document document = self._get_document(clazz, id, clazz_locale=clazz_locale) if document.redirects_to: raise HTTPBadRequest('can not update merged document') if document.protected and not self.request.has_permission('moderator'): raise HTTPForbidden('No permission to change a protected document') self._check_versions(document, document_in) # remember the current version numbers of the document old_versions = document.get_versions() # update the document with the input document document.update(document_in) if before_update: before_update(document, document_in, user_id=user_id) try: DBSession.flush() except StaleDataError: raise HTTPConflict('concurrent modification') # when flushing the session, SQLAlchemy automatically updates the # version numbers in case attributes have changed. by comparing with # the old version numbers, we can check if only figures or only locales # have changed. (update_types, changed_langs) = document.get_update_type(old_versions) if update_types: # A new version needs to be created and persisted self._update_version( document, user_id, self.request.validated['message'], update_types, changed_langs) if document.type != AREA_TYPE and UpdateType.GEOM in update_types: update_areas_for_document(document, reset=True) if after_update: after_update(document, update_types, user_id=user_id) # And the search updated notify_es_syncer(self.request.registry.queue_config) associations = self.request.validated.get('associations', None) if associations: synchronize_associations(document, associations, user_id) return {}
def update_cache_version_direct(document_id): """ Update the cache version for the document with the given id without updating any dependencies. """ DBSession.execute( text('SELECT guidebook.increment_cache_version(:document_id)'), {'document_id': document_id} )
def update_activities_of_changes(document): """Update the activities of all feed entries of the given document. """ DBSession.execute( DocumentChange.__table__.update(). where(DocumentChange.document_id == document.document_id). values(activities=document.activities) )
def add_or_retrieve_token(value, expire, userid): token = DBSession.query(Token).filter(Token.value == value and User.id == userid).first() if not token: token = Token(value=value, expire=expire, userid=userid) DBSession.add(token) DBSession.flush() return token
def get_changes_of_profile_feed(user_id, token_id, token_time, limit): user_exists_query = DBSession.query(User).filter(User.id == user_id).exists() user_exists = DBSession.query(user_exists_query).scalar() if not user_exists: raise HTTPNotFound("user not found") user_filter = DocumentChange.user_ids.op("&&")([user_id]) return get_changes_of_feed(token_id, token_time, limit, user_filter)
def update_cache_version_for_map(topo_map): """ Invalidate the cache keys of all documents that are currently associated to the given map. Note that the cache key of the map itself is not changed when calling this function. """ DBSession.execute( text('SELECT guidebook.update_cache_version_for_map(:document_id)'), {'document_id': topo_map.document_id} )
def _get_document(self, clazz, id, clazz_locale=None, lang=None): """Get a document with either a single locale (if `lang is given) or with all locales. If no document exists for the given id, a `HTTPNotFound` exception is raised. """ if not lang: document_query = DBSession. \ query(clazz). \ filter(getattr(clazz, 'document_id') == id). \ options(joinedload('geometry')) document_query = add_load_for_locales( document_query, clazz, clazz_locale) document_query = add_load_for_profiles(document_query, clazz) document = document_query.first() else: locales_type = with_polymorphic(DocumentLocale, clazz_locale) \ if clazz_locale else DocumentLocale locales_attr = getattr(clazz, 'locales') locales_type_eager = locales_attr.of_type(clazz_locale) \ if clazz_locale else locales_attr document_query = DBSession. \ query(clazz). \ join(locales_type). \ filter(getattr(clazz, 'document_id') == id). \ filter(DocumentLocale.lang == lang). \ options(joinedload('geometry')).\ options(contains_eager(locales_type_eager, alias=locales_type)) document_query = add_load_for_profiles(document_query, clazz) document = document_query.first() if not document: # the requested locale might not be available, try to get the # document without locales document_query = DBSession. \ query(clazz). \ filter(getattr(clazz, 'document_id') == id). \ options(joinedload('geometry')) document_query = add_load_for_profiles(document_query, clazz) document = document_query.first() if document: # explicitly set `locales` to an empty list so that they # are no lazy loaded document.locales = [] # also detach the document from the session, so that the # empty list is not persisted DBSession.expunge(document) if not document: raise HTTPNotFound('document not found') return document
def tearDown(self): # noqa # rollback - everything that happened with the Session above # (including calls to commit()) is rolled back. testing.tearDown() if not keep: self.trans.rollback() else: self.trans.commit() DBSession.remove() self.session.close() self.connection.close()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) alembic_config = Config(alembic_configfile) setup_db(alembic_config, DBSession)
def post(self): user = schema_create_user.objectify(self.request.validated) user.password = self.request.validated['password'] DBSession.add(user) try: DBSession.flush() except: # TODO: log the error for debugging raise HTTPInternalServerError('Error persisting user') return to_json_dict(user, schema_user)
def collection_post(self): association = schema_association.objectify(self.request.validated) if exists_already(association): raise HTTPBadRequest( 'association (or its back-link) exists already') DBSession.add(association) DBSession.add( association.get_log(self.request.authenticated_userid)) return {}
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None # TODO additional validation: at least one culture, only one instance # for each culture, geometry DBSession.add(document) DBSession.flush() self._create_new_version(document) return to_json_dict(document, schema)
def _collection_post(self, clazz, schema): document = schema.objectify(self.request.validated) document.document_id = None DBSession.add(document) DBSession.flush() user_id = self.request.authenticated_userid self._create_new_version(document, user_id) sync_search_index(document) return to_json_dict(document, schema)
def post(self): """ Merges a document into another document. - Associations and tags of the source document are transferred to the target document. - The association log entries are rewritten to the target document. - The time of the log entries is updated, so that the ES syncer will pick up the new associations of the target document. - The attribute `redirects_to` of the source document is set. - A new version is created for the source document. This makes sure that the ES syncer removes the document from ES index. - Update the cache version of the source document. - Update the cache version of the target document and its associations. - Removes the feed entries of the source document. Request: `POST` `/documents/merge` Request body: { 'source_document_id': @document_id@, 'target_document_id': @document_id@ } """ source_document_id = self.request.validated['source_document_id'] target_document_id = self.request.validated['target_document_id'] source_doc = DBSession.query(Document).get(source_document_id) # transfer associations from source to target transfer_associations(source_document_id, target_document_id) # transfer tags from source to target transfer_tags(source_document_id, target_document_id) # if waypoint, update main waypoint of routes if source_doc.type == WAYPOINT_TYPE: _transfer_main_waypoint(source_document_id, target_document_id) # set redirection and create a new version source_doc.redirects_to = target_document_id DocumentRest.update_version( source_doc, self.request.authenticated_userid, 'merged with {}'.format(target_document_id), [UpdateType.FIGURES], []) # update the cache version for the source and target document update_cache_version_direct(source_document_id) update_cache_version_full(target_document_id, source_doc.type) _remove_feed_entry(source_document_id) if source_doc.type == IMAGE_TYPE: delete_all_files_for_image(source_document_id, self.request) notify_es_syncer(self.request.registry.queue_config) return {}
def remove_association(parent_document_id, child_document_id, user_id, check_first=False): """Remove an association between the two documents and create a log entry in the association history table with the given user id. """ association = Association(parent_document_id=parent_document_id, child_document_id=child_document_id) if check_first and not exists_already(association): return DBSession.query(Association).filter_by( parent_document_id=parent_document_id, child_document_id=child_document_id).delete() DBSession.add(association.get_log(user_id, is_creation=False))
def _has_permission(self, user_id, outing_id): """Check if the user with the given id has permission to change an outing. That is only users that are currently assigned to the outing can modify it. """ return DBSession.query(exists().where( and_(Association.parent_document_id == user_id, Association.child_document_id == outing_id))).scalar()
def is_unused_user_attribute(attrname, value, lowercase=False): attr = getattr(User, attrname) query = DBSession.query(User) if lowercase: query = query.filter(func.lower(attr) == value.lower()) else: query = query.filter(attr == value) return query.count() == 0
def update_areas_of_changes(document): """Update the area ids of all feed entries of the given document. """ areas_select = select( [ # concatenate with empty array to avoid null values # select ARRAY[]::integer[] || array_agg(area_id) literal_column('ARRAY[]::integer[]').op('||')( func.array_agg( AreaAssociation.area_id, type_=postgresql.ARRAY(Integer))) ]).\ where(AreaAssociation.document_id == document.document_id) DBSession.execute(DocumentChange.__table__.update().where( DocumentChange.document_id == document.document_id).values( area_ids=areas_select.as_scalar()))
def put(self): old_main_waypoint_id = DBSession.query(Route.main_waypoint_id). \ filter(Route.document_id == self.request.validated['id']).scalar() return self._put(Route, schema_route, before_update=functools.partial( update_default_geometry, old_main_waypoint_id), after_update=update_title_prefix)
def get_linked_waypoint_children(document): return _limit_waypoint_fields( DBSession.query(Waypoint). filter(Waypoint.redirects_to.is_(None)). join(Association, Association.child_document_id == Waypoint.document_id). filter(Association.parent_document_id == document.document_id)). \ all()
def get_linked_users(document, lang): user_ids = get_first_column( DBSession.query(User.id).join( Association, Association.parent_document_id == User.id).filter( Association.child_document_id == document.document_id).group_by(User.id).all()) return get_documents_for_ids( user_ids, lang, user_profile_documents_config).get('documents')
def post(self): request = self.request userid = request.authenticated_userid user = DBSession.query(User).filter(User.id == userid).first() token = renew_token(user, request) if token: return token_to_response(user, token, request) else: raise HTTPInternalServerError('Error renewing token')
def exists_already(link): """ Checks if the given association exists already. For example, for two given documents D1 and D2, it checks if there is no association D1 -> D2 or D2 -> D1. """ associations_exists = DBSession.query(Association). \ filter(or_( and_( Association.parent_document_id == link.parent_document_id, Association.child_document_id == link.child_document_id ), and_( Association.child_document_id == link.parent_document_id, Association.parent_document_id == link.child_document_id ) )). \ exists() return DBSession.query(associations_exists).scalar()
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ # Configure SQLAlchemy engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine # Configure ElasticSearch configure_es_from_config(settings) config = Configurator(settings=settings) config.include('cornice') config.registry.queue_config = get_queue_config(settings) # FIXME? Make sure this tween is run after the JWT validation # Using an explicit ordering in config files might be needed. config.add_tween('c2corg_api.tweens.rate_limiting.' + 'rate_limiting_tween_factory', under='pyramid_tm.tm_tween_factory') bypass_auth = False if 'noauthorization' in settings: bypass_auth = asbool(settings['noauthorization']) if not bypass_auth: config.include("pyramid_jwtauth") # Intercept request handling to validate token against the database config.add_tween('c2corg_api.tweens.jwt_database_validation.' + 'jwt_database_validation_tween_factory') # Inject ACLs config.set_root_factory(RootFactory) else: log.warning('Bypassing authorization') configure_caches(settings) configure_feed(settings, config) configure_anonymous(settings, config) # Scan MUST be the last call otherwise ACLs will not be set # and the permissions would be bypassed config.scan(ignore='c2corg_api.tests') return config.make_wsgi_app()
def collection_post(self): settings = self.request.registry.settings locale = self.request.validated['locale'] title = "{}_{}".format(locale.document_id, locale.lang) content = '<a href="{}">{}</a>'.format( self.request.referer, locale.title or parse_url(self.request.referer).path) category = settings['discourse.category'] # category could be id or name try: category = int(category) except: pass client = get_discourse_client(settings) try: response = client.client.create_post(content, title=title, category=category) except Exception as e: log.error('Error with Discourse: {}'.format(str(e)), exc_info=True) raise HTTPInternalServerError('Error with Discourse') if "topic_id" in response: topic_id = response['topic_id'] document_topic = DocumentTopic(topic_id=topic_id) locale.document_topic = document_topic update_cache_version_direct(locale.document_id) DBSession.flush() if locale.type == document_types.OUTING_TYPE: try: self.invite_participants(client, locale, topic_id) except: log.error( 'Inviting participants of outing {} failed'.format( locale.document_id), exc_info=True) return response
def set_recent_outings(waypoint, lang): """Set last 10 outings on routes associated to the given waypoint. """ t_outing_route = aliased(Association, name='a1') t_route_wp = aliased(Association, name='a2') with_query_waypoints = _get_select_children(waypoint) recent_outing_ids = get_first_column( DBSession.query(Outing.document_id).filter( Outing.redirects_to.is_(None)).join( t_outing_route, Outing.document_id == t_outing_route.child_document_id).join( t_route_wp, and_( t_route_wp.child_document_id == t_outing_route.parent_document_id, t_route_wp.child_document_type == ROUTE_TYPE, )).join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id).order_by( Outing.date_end.desc()).limit( NUM_RECENT_OUTINGS).all()) total = DBSession.query(Outing.document_id). \ filter(Outing.redirects_to.is_(None)). \ join( t_outing_route, Outing.document_id == t_outing_route.child_document_id). \ join( t_route_wp, and_( t_route_wp.child_document_id == t_outing_route.parent_document_id, t_route_wp.child_document_type == ROUTE_TYPE, )). \ join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id ). \ count() waypoint.associations['recent_outings'] = get_documents_for_ids( recent_outing_ids, lang, outing_documents_config, total)
def validate_required_user_from_email(request): validate_required_json_string("email", request) if len(request.errors) != 0: return email = request.validated['email'] user = DBSession.query(User).filter(User.email == email).first() if user: request.validated['user'] = user else: request.errors.add('body', 'email', 'No user with this email')
def set_author(outings, lang): """Set the author (the user who created an outing) on a list of outings. """ if not outings: return outing_ids = [o.document_id for o in outings] t = DBSession.query( ArchiveDocument.document_id.label('document_id'), User.id.label('user_id'), User.username.label('username'), User.name.label('name'), over( func.rank(), partition_by=ArchiveDocument.document_id, order_by=HistoryMetaData.id).label('rank')). \ select_from(ArchiveDocument). \ join( DocumentVersion, and_( ArchiveDocument.document_id == DocumentVersion.document_id, ArchiveDocument.version == 1)). \ join(HistoryMetaData, DocumentVersion.history_metadata_id == HistoryMetaData.id). \ join(User, HistoryMetaData.user_id == User.id). \ filter(ArchiveDocument.document_id.in_(outing_ids)). \ subquery('t') query = DBSession.query( t.c.document_id, t.c.user_id, t.c.username, t.c.name). \ filter(t.c.rank == 1) author_for_outings = { document_id: { 'username': username, 'name': name, 'user_id': user_id } for document_id, user_id, username, name in query } for outing in outings: outing.author = author_for_outings.get(outing.document_id)
def update_map(topo_map, reset=False): """Create associations for the given map with all intersecting documents. If `reset` is True, all possible existing associations to this map are dropped before creating new associations. """ if reset: DBSession.execute( TopoMapAssociation.__table__.delete().where( TopoMapAssociation.topo_map_id == topo_map.document_id) ) if topo_map.redirects_to: # ignore forwarded maps return map_geom = select([DocumentGeometry.geom_detail]). \ where(DocumentGeometry.document_id == topo_map.document_id) intersecting_documents = DBSession. \ query( DocumentGeometry.document_id, # id of a document literal_column(str(topo_map.document_id))). \ join( Document, and_( Document.document_id == DocumentGeometry.document_id, Document.type != MAP_TYPE)). \ filter(Document.redirects_to.is_(None)). \ filter( or_( DocumentGeometry.geom.ST_Intersects( map_geom.label('t1')), DocumentGeometry.geom_detail.ST_Intersects( map_geom.label('t2')) )) DBSession.execute( TopoMapAssociation.__table__.insert().from_select( [TopoMapAssociation.document_id, TopoMapAssociation.topo_map_id], intersecting_documents)) # update cache key for now associated docs update_cache_version_for_map(topo_map)
def post(self): request = self.request user = request.validated['user'] user.update_validation_nonce(Purpose.new_password, VALIDATION_EXPIRE_DAYS) try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user') email_service = get_email_service(request) nonce = user.validation_nonce settings = request.registry.settings link = settings['mail.request_password_change_url_template'] % nonce email_service.send_request_change_password(user, link) return {}
def setup_package(): # set up database engine = get_engine() DBSession.configure(bind=engine) alembic_config = _get_alembic_config() downgrade(alembic_config, 'base') initializedb.setup_db(alembic_config, DBSession) # set up ElasticSearch configure_es_from_config(settings) initializees.drop_index() initializees.setup_es() # Add test data needed for all tests with transaction.manager: _add_global_test_data(DBSession) fill_index(DBSession) DBSession.remove()
def get_documents(documents_config, meta_params, search_documents): lang = meta_params['lang'] base_query = DBSession.query(documents_config.clazz). \ filter(getattr(documents_config.clazz, 'redirects_to').is_(None)) base_total_query = DBSession. \ query(getattr(documents_config.clazz, 'document_id')). \ filter(getattr(documents_config.clazz, 'redirects_to').is_(None)) base_total_query = add_profile_filter( base_total_query, documents_config.clazz) base_query = add_load_for_profiles(base_query, documents_config.clazz) if documents_config.clazz == Outing: base_query = base_query. \ order_by(documents_config.clazz.date_end.desc()). \ order_by(documents_config.clazz.document_id.desc()) elif documents_config.clazz == Xreport: base_query = base_query. \ order_by(documents_config.clazz.date.desc()). \ order_by(documents_config.clazz.document_id.desc()) else: base_query = base_query.order_by( documents_config.clazz.document_id.desc()) document_ids, total = search_documents(base_query, base_total_query) cache_keys = get_cache_keys( document_ids, lang, documents_config.document_type) def get_documents_from_cache_keys(*cache_keys): """ This method is called from dogpile.cache with the cache keys for the documents that are not cached yet. """ ids = [get_document_id(cache_key) for cache_key in cache_keys] docs = _get_documents_from_ids( ids, base_query, documents_config, lang) assert len(cache_keys) == len(docs), \ 'the number of returned documents must match ' + \ 'the number of keys' return docs # get the documents from the cache or from the database documents = get_or_create_multi( cache_document_listing, cache_keys, get_documents_from_cache_keys, should_cache_fn=lambda v: v is not None) documents = [doc for doc in documents if doc] total = total if total is not None else len(documents) return { 'documents': documents, 'total': total }
def get_linked_books(document, lang): book_ids = get_first_column( DBSession.query(Book.document_id).filter( Book.redirects_to.is_(None)).join( Association, and_(Association.child_document_id == document.document_id, Association.parent_document_id == Book.document_id)).group_by(Book.document_id).all()) return get_documents_for_ids(book_ids, lang, book_documents_config).get('documents')
def get_linked_outings(document, lang): outing_ids = get_first_column( DBSession.query(Outing.document_id).filter( Outing.redirects_to.is_(None)).join( Association, and_(Association.parent_document_id == Outing.document_id, Association.child_document_id == document.document_id)).group_by(Outing.document_id).all()) return get_documents_for_ids(outing_ids, lang, outing_documents_config).get('documents')
def post(self): request = self.request user = request.validated['user'] user.clear_validation_nonce() user.email = user.email_to_validate user.email_to_validate = None # Synchronize the new email (and other parameters) try: client = get_discourse_client(request.registry.settings) client.sync_sso(user) except: log.error('Error syncing email with discourse', exc_info=True) raise HTTPInternalServerError('Error with Discourse') try: DBSession.flush() except: log.warning('Error persisting user', exc_info=True) raise HTTPInternalServerError('Error persisting user')
def get_linked_waypoint_parents(document, lang): waypoint_ids = get_first_column( DBSession.query(Waypoint.document_id).filter( Waypoint.redirects_to.is_(None)).join( Association, Association.parent_document_id == Waypoint.document_id).filter( Association.child_document_id == document.document_id).group_by(Waypoint.document_id).all()) return get_documents_for_ids(waypoint_ids, lang, waypoint_documents_config).get('documents')
def check_filename_unique(image, request, updating): """Checks that filename is unique """ if 'filename' not in image: return sql = DBSession.query(Image) \ .filter(Image.filename == image['filename']) if updating: sql = sql.filter(Image.document_id != image['document_id']) if sql.count() > 0: request.errors.add('body', 'filename', 'Unique')
def set_best_locale(documents, preferred_lang, expunge=True): """Sets the "best" locale on the given documents. The "best" locale is the locale in the given "preferred language" if available. Otherwise it is the "most relevant" translation according to `langs_priority`. """ if preferred_lang is None: return for document in documents: # need to detach the document from the session, so that the # following change to `document.locales` is not persisted if expunge and not inspect(document).detached: DBSession.expunge(document) if document.locales: available_locales = { locale.lang: locale for locale in document.locales} best_locale = get_best_locale(available_locales, preferred_lang) if best_locale: document.locales = [best_locale]
def _set_img_count_for_documents(documents, document_ids): res = DBSession.query( Association.parent_document_id, func.count('*')). \ filter(and_( Association.parent_document_id.in_(document_ids), Association.child_document_type == IMAGE_TYPE)). \ group_by(Association.parent_document_id).all() counts = {document_id: count for (document_id, count) in res} for document in documents: doc_id = document.document_id document.img_count = counts[doc_id] if doc_id in counts else 0
def get_cache_key(document_id, lang, document_type, custom_cache_key=None): """ Returns an identifier which reflects the version of a document and all its associated documents. This identifier is used as cache key and as ETag value. """ version = DBSession.query(CacheVersion.version). \ filter(CacheVersion.document_id == document_id). \ first() return _format_cache_key(document_id, lang, version[0] if version else None, document_type, custom_cache_key)
def _get_document_archive(document, update_types): if (UpdateType.FIGURES in update_types): # the document has changed, create a new archive version archive = document.to_archive() else: # the document has not changed, load the previous archive version archive = DBSession.query(ArchiveDocument). \ filter( ArchiveDocument.version == document.version, ArchiveDocument.document_id == document.document_id). \ one() return archive
def get_linked_images(document, lang): image_ids = get_first_column( DBSession.query(Image.document_id).filter( Image.redirects_to.is_(None)).join( Association, and_(Association.child_document_id == Image.document_id, Association.parent_document_id == document.document_id)).order_by(asc( Image.date_time)).group_by(Image.document_id).all()) return get_documents_for_ids(image_ids, lang, image_documents_config).get('documents')
def is_less_than_24h_old(document_id): """Check that the first version of this document was created less than 24h ago. """ written_at = DBSession.query( HistoryMetaData.written_at.label('written_at')). \ select_from(HistoryMetaData). \ join(DocumentVersion, DocumentVersion.history_metadata_id == HistoryMetaData.id). \ filter(DocumentVersion.document_id == document_id). \ order_by(asc(HistoryMetaData.written_at)).limit(1).scalar() return datetime.now(timezone.utc) - written_at <= timedelta(hours=24)