def _flush(self, parent_relationship): """Manually INSERT generated automappings.""" if not self.auto_mappings: return with benchmark("Automapping flush"): current_user_id = login.get_current_user_id() automapping_result = db.session.execute( Automapping.__table__.insert().values( relationship_id=parent_relationship.id, source_id=parent_relationship.source_id, source_type=parent_relationship.source_type, destination_id=parent_relationship.destination_id, destination_type=parent_relationship.destination_type, ) ) automapping_id = automapping_result.inserted_primary_key now = datetime.now() # We are doing an INSERT IGNORE INTO here to mitigate a race condition # that happens when multiple simultaneous requests create the same # automapping. If a relationship object fails our unique constraint # it means that the mapping was already created by another request # and we can safely ignore it. inserter = Relationship.__table__.insert().prefix_with("IGNORE") original = self.order(Stub.from_source(parent_relationship), Stub.from_destination(parent_relationship)) db.session.execute(inserter.values([{ "id": None, "modified_by_id": current_user_id, "created_at": now, "updated_at": now, "source_id": src.id, "source_type": src.type, "destination_id": dst.id, "destination_type": dst.type, "context_id": None, "status": None, "parent_id": parent_relationship.id, "automapping_id": automapping_id} for src, dst in self.auto_mappings if (src, dst) != original])) # (src, dst) is sorted self._set_audit_id_for_issues(automapping_id) cache = get_cache(create=True) if cache: # Add inserted relationships into new objects collection of the cache, # so that they will be logged within event and appropriate revisions # will be created. cache.new.update( (relationship, relationship.log_json()) for relationship in Relationship.query.filter_by( automapping_id=automapping_id, ) )
def _flush(self, parent_relationship): if len(self.auto_mappings) == 0: return with self.benchmark("Automapping flush"): current_user = get_current_user() automapping = Automapping(parent_relationship) db.session.add(automapping) db.session.flush() now = datetime.now() # We are doing an INSERT IGNORE INTO here to mitigate a race condition # that happens when multiple simultaneous requests create the same # automapping. If a relationship object fails our unique constraint # it means that the mapping was already created by another request # and we can safely ignore it. inserter = Relationship.__table__.insert().prefix_with("IGNORE") original = self.relate(Stub.from_source(parent_relationship), Stub.from_destination(parent_relationship)) db.session.execute(inserter.values([{ "id": None, "modified_by_id": current_user.id, "created_at": now, "updated_at": now, "source_id": src.id, "source_type": src.type, "destination_id": dst.id, "destination_type": dst.type, "context_id": None, "status": None, "parent_id": parent_relationship.id, "automapping_id": automapping.id} for src, dst in self.auto_mappings if (src, dst) != original])) # (src, dst) is sorted cache = get_cache(create=True) if cache: # Add inserted relationships into new objects collection of the cache, # so that they will be logged within event and appropriate revisions # will be created. cache.new.update( (relationship, relationship.log_json()) for relationship in Relationship.query.filter_by( parent_id=parent_relationship.id, modified_by_id=current_user.id, created_at=now, updated_at=now, ) )
def clear_cache(session): cache = get_cache() if cache: cache.clear()
def update_cache_after_flush(session, flush_context): cache = get_cache(create=False) if cache: cache.update_after_flush(session, flush_context)
def update_cache_before_flush(session, flush_context, objects): cache = get_cache(create=True) if cache: cache.update_before_flush(session, flush_context)
def update_cache(session, flush_context): cache = get_cache(create = True) if cache: cache.update(session)
def get_modified_objects(session): session.flush() cache = get_cache() if cache: return cache.copy()