def update_cache_version_direct(document_id): """ Update the cache version for the document with the given id without updating any dependencies. """ DBSession.execute( text('SELECT guidebook.increment_cache_version(:document_id)'), {'document_id': document_id})
def update_cache_version_full(document_id, type): """ Update the cache version of the given document + associated documents. """ DBSession.execute( text('SELECT guidebook.update_cache_version(:document_id, :type)'), {'document_id': document_id, 'type': type} )
def _transfer_main_waypoint(source_document_id, target_document_id): target_waypoint = DBSession.query(Waypoint).get(target_document_id) DBSession.execute(Route.__table__.update().where( Route.main_waypoint_id == source_document_id).values( main_waypoint_id=target_document_id)) update_linked_route_titles(target_waypoint, [UpdateType.LANG], None)
def update_activities_of_changes(document): """Update the activities of all feed entries of the given document. """ DBSession.execute( DocumentChange.__table__.update(). where(DocumentChange.document_id == document.document_id). values(activities=document.activities) )
def update_cache_version_direct(document_id): """ Update the cache version for the document with the given id without updating any dependencies. """ DBSession.execute( text('SELECT guidebook.increment_cache_version(:document_id)'), {'document_id': document_id} )
def update_cache_version_for_map(topo_map): """ Invalidate the cache keys of all documents that are currently associated to the given map. Note that the cache key of the map itself is not changed when calling this function. """ DBSession.execute( text('SELECT guidebook.update_cache_version_for_map(:document_id)'), {'document_id': topo_map.document_id})
def _remove_archive(archive_clazz, document_id): archive_document_ids = DBSession.query(ArchiveDocument.id). \ filter(ArchiveDocument.document_id == document_id). \ subquery() DBSession.execute(archive_clazz.__table__.delete().where( getattr(archive_clazz, 'id').in_(archive_document_ids))) DBSession.query(ArchiveDocument). \ filter(ArchiveDocument.document_id == document_id). \ delete()
def _remove_history_metadata(document_id): history_metadata_ids = DBSession. \ query(DocumentVersion.history_metadata_id). \ filter(DocumentVersion.document_id == document_id). \ all() DBSession.query(DocumentVersion). \ filter(DocumentVersion.document_id == document_id). \ delete() DBSession.execute(HistoryMetaData.__table__.delete().where( HistoryMetaData.id.in_(history_metadata_ids)))
def update_cache_version_for_map(topo_map): """ Invalidate the cache keys of all documents that are currently associated to the given map. Note that the cache key of the map itself is not changed when calling this function. """ DBSession.execute( text('SELECT guidebook.update_cache_version_for_map(:document_id)'), {'document_id': topo_map.document_id} )
def update_langs_of_changes(document_id): """Update the langs of all feed entries of the given document. """ langs = DBSession. \ query(cast( func.array_agg(DocumentLocale.lang), ArrayOfEnum(enums.lang))). \ filter(DocumentLocale.document_id == document_id). \ group_by(DocumentLocale.document_id). \ subquery('langs') DBSession.execute(DocumentChange.__table__.update().where( DocumentChange.document_id == document_id).values( langs=langs.select()))
def _remove_archive_locale(archive_clazz_locale, document_id): if archive_clazz_locale: archive_document_locale_ids = DBSession. \ query(ArchiveDocumentLocale.id). \ filter(ArchiveDocumentLocale.document_id == document_id). \ subquery() DBSession.execute(archive_clazz_locale.__table__.delete().where( getattr(archive_clazz_locale, 'id').in_(archive_document_locale_ids))) DBSession.query(ArchiveDocumentLocale). \ filter(ArchiveDocumentLocale.document_id == document_id). \ delete()
def transfer_tags(source_document_id, target_document_id): # get the ids of users that have already tagged the target document target_user_ids_result = DBSession. \ query(DocumentTag.user_id). \ filter(DocumentTag.document_id == target_document_id). \ all() target_user_ids = [user_id for (user_id, ) in target_user_ids_result] # move the current tags (only if the target document does not # already have been tagged by the same user) DBSession.execute(DocumentTag.__table__.update().where( _and_in(DocumentTag.document_id == source_document_id, DocumentTag.user_id, target_user_ids)).values(document_id=target_document_id)) # remove remaining tags DBSession.execute(DocumentTag.__table__.delete().where( DocumentTag.document_id == source_document_id)) # transfer the tag log entries DBSession.execute(DocumentTagLog.__table__.update().where( _and_in(DocumentTagLog.document_id == source_document_id, DocumentTagLog.user_id, target_user_ids)).values(document_id=target_document_id, written_at=func.now())) DBSession.execute(DocumentTagLog.__table__.delete().where( DocumentTagLog.document_id == source_document_id))
def _remove_locale(clazz_locale, document_id): document_locale_ids = DBSession.query(DocumentLocale.id). \ filter(DocumentLocale.document_id == document_id). \ subquery() # Remove links to comments (comments themselves are not removed) DBSession.execute(DocumentTopic.__table__.delete().where( DocumentTopic.document_locale_id.in_(document_locale_ids))) if clazz_locale: DBSession.execute(clazz_locale.__table__.delete().where( getattr(clazz_locale, 'id').in_(document_locale_ids))) DBSession.query(DocumentLocale). \ filter(DocumentLocale.document_id == document_id). \ delete()
def _remove_archive_locale(archive_clazz_locale, document_id, lang=None): if archive_clazz_locale: query = DBSession.query(ArchiveDocumentLocale.id). \ filter(ArchiveDocumentLocale.document_id == document_id) if lang: query = query.filter(ArchiveDocumentLocale.lang == lang) archive_document_locale_ids = query.subquery() DBSession.execute(archive_clazz_locale.__table__.delete().where( getattr(archive_clazz_locale, 'id').in_(archive_document_locale_ids))) query = DBSession.query(ArchiveDocumentLocale). \ filter(ArchiveDocumentLocale.document_id == document_id) if lang: query = query.filter(ArchiveDocumentLocale.lang == lang) query.delete()
def remove_token(token): now = datetime.datetime.utcnow() condition = Token.value == token and Token.expire > now result = DBSession.execute(Token.__table__.delete().where(condition)) if result.rowcount == 0: log.debug('Failed to remove token %s' % token) DBSession.flush()
def remove_token(token): now = datetime.datetime.utcnow() condition = Token.value == token and Token.expire > now result = DBSession.execute(Token.__table__.delete().where(condition)) if result.rowcount == 0: log.debug("Failed to remove token %s" % token) DBSession.flush()
def update_areas_of_changes(document): """Update the area ids of all feed entries of the given document. """ areas_select = select( [ # concatenate with empty array to avoid null values # select ARRAY[]::integer[] || array_agg(area_id) literal_column('ARRAY[]::integer[]').op('||')( func.array_agg( AreaAssociation.area_id, type_=postgresql.ARRAY(Integer))) ]).\ where(AreaAssociation.document_id == document.document_id) DBSession.execute(DocumentChange.__table__.update().where( DocumentChange.document_id == document.document_id).values( area_ids=areas_select.as_scalar()))
def update_map(topo_map, reset=False): """Create associations for the given map with all intersecting documents. If `reset` is True, all possible existing associations to this map are dropped before creating new associations. """ if reset: DBSession.execute( TopoMapAssociation.__table__.delete().where( TopoMapAssociation.topo_map_id == topo_map.document_id) ) if topo_map.redirects_to: # ignore forwarded maps return map_geom = select([DocumentGeometry.geom_detail]). \ where(DocumentGeometry.document_id == topo_map.document_id) intersecting_documents = DBSession. \ query( DocumentGeometry.document_id, # id of a document literal_column(str(topo_map.document_id))). \ join( Document, and_( Document.document_id == DocumentGeometry.document_id, Document.type != MAP_TYPE)). \ filter(Document.redirects_to.is_(None)). \ filter( or_( DocumentGeometry.geom.ST_Intersects( map_geom.label('t1')), DocumentGeometry.geom_detail.ST_Intersects( map_geom.label('t2')) )) DBSession.execute( TopoMapAssociation.__table__.insert().from_select( [TopoMapAssociation.document_id, TopoMapAssociation.topo_map_id], intersecting_documents)) # update cache key for now associated docs update_cache_version_for_map(topo_map)
def update_areas_of_changes(document): """Update the area ids of all feed entries of the given document. """ areas_select = select( [ # concatenate with empty array to avoid null values # select ARRAY[]::integer[] || array_agg(area_id) literal_column('ARRAY[]::integer[]').op('||')( func.array_agg( AreaAssociation.area_id, type_=postgresql.ARRAY(Integer))) ]).\ where(AreaAssociation.document_id == document.document_id) DBSession.execute( DocumentChange.__table__.update(). where(DocumentChange.document_id == document.document_id). values(area_ids=areas_select.as_scalar()) )
def update_area(area, reset=False): """Create associations for the given area with all intersecting documents. If `reset` is True, all possible existing associations to this area are dropped before creating new associations. """ if reset: DBSession.execute( AreaAssociation.__table__.delete().where( AreaAssociation.area_id == area.document_id) ) if area.redirects_to: # ignore forwarded areas return area_geom = select([DocumentGeometry.geom_detail]). \ where(DocumentGeometry.document_id == area.document_id) intersecting_documents = DBSession. \ query( DocumentGeometry.document_id, # id of a document literal_column(str(area.document_id))). \ join( Document, and_( Document.document_id == DocumentGeometry.document_id, Document.type != AREA_TYPE)). \ filter(Document.redirects_to.is_(None)). \ filter( or_( DocumentGeometry.geom.ST_Intersects( area_geom.label('t1')), DocumentGeometry.geom_detail.ST_Intersects( area_geom.label('t2')) )) DBSession.execute( AreaAssociation.__table__.insert().from_select( [AreaAssociation.document_id, AreaAssociation.area_id], intersecting_documents))
def update_maps_for_document(document, reset=False): """Create associations for the given documents with all intersecting maps. If `reset` is True, all possible existing associations to this document are dropped before creating new associations. """ if reset: DBSession.execute( TopoMapAssociation.__table__.delete().where( TopoMapAssociation.document_id == document.document_id) ) if document.redirects_to: # ignore forwarded maps return document_geom = select([DocumentGeometry.geom]). \ where(DocumentGeometry.document_id == document.document_id) document_geom_detail = select([DocumentGeometry.geom_detail]). \ where(DocumentGeometry.document_id == document.document_id) intersecting_maps = DBSession. \ query( DocumentGeometry.document_id, # id of a map literal_column(str(document.document_id))). \ join( TopoMap, TopoMap.document_id == DocumentGeometry.document_id). \ filter(TopoMap.redirects_to.is_(None)). \ filter( or_( DocumentGeometry.geom_detail.ST_Intersects( document_geom.label('t1')), DocumentGeometry.geom_detail.ST_Intersects( document_geom_detail.label('t2')) )) DBSession.execute( TopoMapAssociation.__table__.insert().from_select( [TopoMapAssociation.topo_map_id, TopoMapAssociation.document_id], intersecting_maps))
def update_participants_of_outing(outing_id, user_id): existing_change = get_existing_change(outing_id) if not existing_change: log.warn('no feed change for document {}'.format(outing_id)) return participant_ids = _get_participants_of_outing(outing_id) if set(existing_change.user_ids) == set(participant_ids): # participants have not changed, stop return existing_change.user_ids = participant_ids if existing_change.user_id != user_id: # a different user is doing this change, only set a different user id # if the user is one of the participants (to ignore moderator edits) if user_id in participant_ids: existing_change.user_id = user_id existing_change.change_type = 'updated' existing_change.time = func.now() DBSession.flush() # now also update the participants of other feed entries of the outing: # set `user_ids` to the union of the participant ids and the `user_id` of # the entry participants_and_editor = text( 'ARRAY(SELECT DISTINCT UNNEST(array_cat(' ' ARRAY[guidebook.feed_document_changes.user_id], :participants)) ' 'ORDER BY 1)') DBSession.execute( DocumentChange.__table__.update(). where(DocumentChange.document_id == outing_id). where(DocumentChange.change_id != existing_change.change_id). values(user_ids=participants_and_editor), {'participants': participant_ids} )
def _remove_locale_versions(document_id, lang): # Only history metadata not shared with other locales should be removed. # This subquery gets the list of history_metadata_id with their lang and # number of associated locales: t = DBSession.query( DocumentVersion.history_metadata_id, DocumentVersion.lang, over( func.count('*'), partition_by=DocumentVersion.history_metadata_id).label('cnt')). \ filter(DocumentVersion.document_id == document_id). \ subquery('t') # Gets the list of history_metadata_id associated only # to the current locale: history_metadata_ids = DBSession.query(t.c.history_metadata_id). \ filter(t.c.lang == lang).filter(t.c.cnt == 1).all() DBSession.query(DocumentVersion). \ filter(DocumentVersion.document_id == document_id). \ filter(DocumentVersion.lang == lang).delete() if len(history_metadata_ids): DBSession.execute(HistoryMetaData.__table__.delete().where( HistoryMetaData.id.in_(history_metadata_ids)))
def _update_feed_entries(source_user_id, target_user_id): # Transfer feed entries to the target user only if no similar entry # already exists in the target user feed items. shared_doc_ids = DBSession.query(DocumentChange.document_id). \ filter(DocumentChange.user_id == target_user_id). \ intersect( DBSession.query(DocumentChange.document_id). filter(DocumentChange.user_id == source_user_id) ).subquery() DBSession.execute( DocumentChange.__table__.update().where(and_( DocumentChange.user_id == source_user_id, ~DocumentChange.document_id.in_(shared_doc_ids) )).values({ DocumentChange.user_id: target_user_id }) ) # Remove remaining feed items since they already exist for the target user DBSession.query(DocumentChange). \ filter(DocumentChange.user_id == source_user_id).delete() # If target user_id is already in the list of users associated # to feed items (user_ids), simply remove source user_id from the list: DBSession.execute( DocumentChange.__table__.update().where(and_( any_(DocumentChange.user_ids) == source_user_id, any_(DocumentChange.user_ids) == target_user_id )).values({ DocumentChange.user_ids: func.array_remove( DocumentChange.user_ids, source_user_id) }) ) # Else replace source user_id by target user_id in user_ids DBSession.execute( DocumentChange.__table__.update().where( any_(DocumentChange.user_ids) == source_user_id ).values({ DocumentChange.user_ids: func.array_replace( DocumentChange.user_ids, source_user_id, target_user_id) }) ) # Remove subscriptions to/of the source user DBSession.query(FollowedUser). \ filter(or_( FollowedUser.followed_user_id == source_user_id, FollowedUser.follower_user_id == source_user_id )).delete() # Remove feed filter prefs DBSession.query(FilterArea). \ filter(FilterArea.user_id == source_user_id).delete()
def update_cache_version_associations(added_associations, removed_associations, ignore_document_id=None): changed_associations = added_associations + removed_associations if not changed_associations: return documents_to_update = set() waypoints_to_update = set() routes_to_update = set() for association in changed_associations: documents_to_update.add(association['parent_id']) documents_to_update.add(association['child_id']) if association['parent_type'] == WAYPOINT_TYPE and \ association['child_type'] == ROUTE_TYPE: waypoints_to_update.add(association['parent_id']) elif association['parent_type'] == ROUTE_TYPE and \ association['child_type'] == OUTING_TYPE: routes_to_update.add(association['parent_id']) if ignore_document_id is not None: documents_to_update.remove(ignore_document_id) if documents_to_update: # update the cache version of the documents of added and removed # associations DBSession.execute( text('SELECT guidebook.increment_cache_versions(:document_ids)'), {'document_ids': list(documents_to_update)}) if waypoints_to_update: # if an association between waypoint and route was removed/added, # the waypoint parents and grand-parents have to be updated DBSession.execute( text( 'SELECT guidebook.update_cache_version_of_waypoints(:waypoint_ids)' ), # noqa: E501 {'waypoint_ids': list(waypoints_to_update)}) if routes_to_update: # if an association between route and outing was removed/added, # waypoints (and parents and grand-parents) associated to the route # have to be updated DBSession.execute( text('SELECT guidebook.update_cache_version_of_routes(:route_ids)' ), # noqa: E501 {'route_ids': list(routes_to_update)})
def _transfer_tags(source_user_id, target_user_id): target_document_ids_result = DBSession. \ query(DocumentTag.document_id). \ filter(DocumentTag.user_id == target_user_id). \ all() target_document_ids = [ document_id for (document_id,) in target_document_ids_result] # move the current tags (only if the target user has not # already tagged the same document) transfered_document_ids_result = DBSession.execute( DocumentTag.__table__.update(). where(_and_in( DocumentTag.user_id == source_user_id, DocumentTag.document_id, target_document_ids )). values(user_id=target_user_id). returning(DocumentTag.document_id) ) # remove remaining tags DBSession.execute( DocumentTag.__table__.delete(). where(DocumentTag.user_id == source_user_id) ) # remove all existing logs DBSession.execute( DocumentTagLog.__table__.delete(). where(DocumentTagLog.user_id == source_user_id) ) # create new ones for the transfered tags transfered_document_ids = [{ 'document_id': document_id, 'user_id': target_user_id, # FIXME OK as long as tags are only used for routes: 'document_type': ROUTE_TYPE, 'is_creation': True, 'written_at': func.now(), } for (document_id,) in transfered_document_ids_result] if len(transfered_document_ids): DBSession.execute( DocumentTagLog.__table__.insert().values(transfered_document_ids))
def update_cache_version_associations( added_associations, removed_associations, ignore_document_id=None): changed_associations = added_associations + removed_associations if not changed_associations: return documents_to_update = set() waypoints_to_update = set() routes_to_update = set() for association in changed_associations: documents_to_update.add(association['parent_id']) documents_to_update.add(association['child_id']) if association['parent_type'] == WAYPOINT_TYPE and \ association['child_type'] == ROUTE_TYPE: waypoints_to_update.add(association['parent_id']) elif association['parent_type'] == ROUTE_TYPE and \ association['child_type'] == OUTING_TYPE: routes_to_update.add(association['parent_id']) if ignore_document_id is not None: documents_to_update.remove(ignore_document_id) if documents_to_update: # update the cache version of the documents of added and removed # associations DBSession.execute( text('SELECT guidebook.increment_cache_versions(:document_ids)'), {'document_ids': list(documents_to_update)} ) if waypoints_to_update: # if an association between waypoint and route was removed/added, # the waypoint parents and grand-parents have to be updated DBSession.execute( text('SELECT guidebook.update_cache_version_of_waypoints(:waypoint_ids)'), # noqa: E501 {'waypoint_ids': list(waypoints_to_update)} ) if routes_to_update: # if an association between route and outing was removed/added, # waypoints (and parents and grand-parents) associated to the route # have to be updated DBSession.execute( text('SELECT guidebook.update_cache_version_of_routes(:route_ids)'), # noqa: E501 {'route_ids': list(routes_to_update)} )
def update_cache_version(document): DBSession.execute( text('SELECT guidebook.update_cache_version(:document_id, :type)'), {'document_id': document.document_id, 'type': document.type} )
def transfer_associations(source_document_id, target_document_id): # get the document ids the target is already associated with target_child_ids_result = DBSession. \ query(Association.child_document_id). \ filter(Association.parent_document_id == target_document_id). \ all() target_child_ids = [child_id for (child_id, ) in target_child_ids_result] target_parent_ids_result = DBSession. \ query(Association.parent_document_id). \ filter(Association.child_document_id == target_document_id). \ all() target_parent_ids = [ parent_id for (parent_id, ) in target_parent_ids_result ] # move the current associations (only if the target document does not # already have an association with the same document) DBSession.execute(Association.__table__.update().where( _and_in( Association.parent_document_id == source_document_id, Association.child_document_id, target_child_ids)).values(parent_document_id=target_document_id)) DBSession.execute(Association.__table__.update().where( _and_in( Association.child_document_id == source_document_id, Association.parent_document_id, target_parent_ids)).values(child_document_id=target_document_id)) # remove remaining associations DBSession.execute(Association.__table__.delete().where( or_(Association.child_document_id == source_document_id, Association.parent_document_id == source_document_id))) # transfer the association log entries DBSession.execute(AssociationLog.__table__.update().where( _and_in(AssociationLog.parent_document_id == source_document_id, AssociationLog.child_document_id, target_child_ids)).values( parent_document_id=target_document_id, written_at=func.now())) DBSession.execute(AssociationLog.__table__.update().where( _and_in(AssociationLog.child_document_id == source_document_id, AssociationLog.parent_document_id, target_parent_ids)).values( child_document_id=target_document_id, written_at=func.now())) DBSession.execute(AssociationLog.__table__.delete().where( or_(AssociationLog.child_document_id == source_document_id, AssociationLog.parent_document_id == source_document_id)))
def _unregister_from_mailinglists(user_id): email = DBSession.query(User.email). \ filter(User.id == user_id).subquery() DBSession.execute( Mailinglist.__table__.delete().where(Mailinglist.email == email) )