def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = MetaData() meta.bind = migrate_engine maptables(meta) DBSession.configure(bind=migrate_engine) print "Mapping dataset members to values" for ds in DBSession.query(Taggable).filter( Taggable.resource_type == 'dataset'): values = [] members = ds.findtag('members') if members: for x in members.values: make_transient(x) values.append(x) ds.values = values for x in ds.values: DBSession.add(x) if ds.resource_user_type == 'datasets': ds.resource_user_type = None transaction.commit() for ds in DBSession.query(Taggable).filter( Taggable.resource_type == 'dataset'): members = ds.findtag('members') if members: ds.children.remove(members) print "removed tags" transaction.commit()
def test_deleted_flag(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = sessionmaker()() u1 = User(name='u1') sess.add(u1) sess.commit() sess.delete(u1) sess.flush() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) sess.rollback() assert u1 in sess sess.delete(u1) sess.commit() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) make_transient(u1) sess.add(u1) sess.commit() eq_(sess.query(User).count(), 1)
def detach(self): if self in g.db: g.db.expunge(self) make_transient(self) self.id = None return self
def new_version(self, session): # convert to an INSERT make_transient(self) self.id = None # history of the 'elements' collection. # this is a tuple of groups: (added, unchanged, deleted) hist = attributes.get_history(self, "elements") # rewrite the 'elements' collection # from scratch, removing all history attributes.set_committed_value(self, "elements", {}) # new elements in the "added" group # are moved to our new collection. for elem in hist.added: self.elements[elem.name] = elem # copy elements in the 'unchanged' group. # the new ones associate with the new ConfigData, # the old ones stay associated with the old ConfigData for elem in hist.unchanged: self.elements[elem.name] = ConfigValueAssociation( elem.config_value )
def save(self): self.logger.debug(".save()") db_session = DbSession() # Prevent expiration after the session is closed or object is made transient or disconnected db_session.expire_on_commit = False try: # No need to 'add', committing this class db_session.add(self) db_session.commit() # Keep it detached make_transient(self) make_transient_to_detached(self) except InvalidRequestError as e: self.logger.error( ".save() - Could not commit to {} table in database".format( self.__tablename__), exc_info=True) self.__cleanupDbSession(db_session, self.__class__.__name__) except Exception as e: db_session.rollback() self.logger.error( ".save() - Could not commit to {} table in database".format( self.__tablename__), exc_info=True) raise DbException( "Could not commit to {} table in database".format( self.__tablename__))
def check_pdf_urls(pdf_urls): for url in pdf_urls: make_transient(url) # free up the connection while doing net IO safe_commit(db) db.engine.dispose() req_pool = get_request_pool() checked_pdf_urls = req_pool.map(get_pdf_url_status, pdf_urls, chunksize=1) req_pool.close() req_pool.join() row_dicts = [x.__dict__ for x in checked_pdf_urls] for row_dict in row_dicts: row_dict.pop('_sa_instance_state') db.session.bulk_update_mappings(PdfUrl, row_dicts) start_time = time() commit_success = safe_commit(db) if not commit_success: logger.info(u"COMMIT fail") logger.info(u"commit took {} seconds".format(elapsed(start_time, 2)))
def get_config(): db_session = DbSession() # Prevent expiration after the session is closed or object is made transient or disconnected db_session.expire_on_commit = False ccm = None try: # Should be only one, return last modified ccm = db_session.query(ChargerConfigModel) \ .order_by(desc(ChargerConfigModel.modified_at)) \ .first() # Detach (not transient) from database, allows saving in other Threads # https://docs.sqlalchemy.org/en/14/orm/session_api.html#sqlalchemy.orm.make_transient_to_detached make_transient(ccm) make_transient_to_detached(ccm) except InvalidRequestError as e: ChargerConfigModel.__cleanupDbSession(db_session, ChargerConfigModel.__class__) except Exception as e: # Nothing to roll back ChargerConfigModel.logger.error( "Could not query from {} table in database".format( ChargerConfigModel.__tablename__), exc_info=True) raise DbException( "Could not query from {} table in database".format( ChargerConfigModel.__tablename__)) return ccm
def copy(self): # get a list of classification_links from this dimension before we make any changes # TODO: In reality there will only ever be one of these. We should refactor the model to reflect this. links = [] for link in self.classification_links: db.session.expunge(link) make_transient(link) links.append(link) # get the existing chart and table before we lift from session chart_object = self.dimension_chart table_object = self.dimension_table # lift dimension from session db.session.expunge(self) make_transient(self) # update disassociated dimension self.guid = create_guid(self.title) if chart_object: self.dimension_chart = chart_object.copy() if table_object: self.dimension_table = table_object.copy() for dc in links: self.classification_links.append(dc) return self
def login(self, username, password): """Retorna un usuario que coincida con el username y password dados. parameters ---------- Usuarioname : str password : str El password deberá estar sin encriptar. returns ------- Usuario None Retornará None si no encuentra nada. """ password = hashlib.sha512(password.encode()).hexdigest() with transaction() as session: usuario = session.query(Usuario).\ options(joinedload('rol'). joinedload('modulos'). joinedload('children')).\ filter(Usuario.username == username).\ filter(Usuario.password == password).\ filter(Usuario.enabled).\ first() if not usuario: return None session.expunge(usuario) make_transient(usuario) usuario.rol.modulos = self.order_modules(usuario.rol.modulos) return usuario
def copy_course(course_id): course = EduQACourse.query.get(course_id) db.session.expunge(course) make_transient(course) course.th_name = course.th_name + '(copy)' course.th_code = course.th_code + '(copy)' course.academic_year = None course.creator = current_user course.created_at = localtz.localize(datetime.now()) course.updater = current_user course.updated_at = localtz.localize(datetime.now()) course.id = None the_course = EduQACourse.query.get(course_id) for instructor in the_course.instructors: course.instructors.append(instructor) for ss in the_course.sessions: s = EduQACourseSession( start=ss.start, end=ss.end, course=course, type_=ss.type_, desc=ss.desc, ) for instructor in ss.instructors: s.instructors.append(instructor) course.sessions.append(s) try: db.session.add(course) db.session.commit() except: flash(u'ไม่สามารถคัดลอกรายวิชาได้', 'warning') else: flash(u'รายวิชาได้รับการคัดลอกเรียบร้อยแล้ว', 'success') return redirect(url_for('eduqa.show_course_detail', course_id=course.id))
def scrape_pages(pages): for page in pages: make_transient(page) # free up the connection while doing net IO db.session.close() db.engine.dispose() pool = get_worker_pool() map_results = pool.map(scrape_page, pages, chunksize=1) scraped_pages = [p for p in map_results if p] logger.info('finished scraping all pages') pool.close() pool.join() logger.info('preparing update records') row_dicts = [x.__dict__ for x in scraped_pages] for row_dict in row_dicts: row_dict.pop('_sa_instance_state') logger.info('saving update records') db.session.bulk_update_mappings(PageNew, row_dicts) for scraped_page in scraped_pages: scraped_page.save_first_version_availability() scraped_page_ids = [p.id for p in scraped_pages] return scraped_page_ids
def get_stock_info_by_symbol(self, symbol): Session = self.conn.get_sessionmake() session = Session() stock_info = session.query(StockInfo).filter(StockInfo.symbol==symbol).scalar() if stock_info is not None: make_transient(stock_info) session.close() return stock_info
def get_copy(cls, id): inst = cls.get(id) if not inst: return inst DBSession.expunge(inst) make_transient(inst) inst.id = None return inst
def undo(self): try: self.session.delete(self.database_entry) except InvalidRequestError: # database entry cannot be removed because the last call was not # followed by a commit -> use make_transient to revert putting the # entry into the pending state make_transient(self.database_entry)
def __call__(self): try: self.database_entry.tags.append(self.tag) except InvalidRequestError: # self.tag cannot be added because it was just removed # -> put it back to transient state make_transient(self.tag) self.database_entry.tags.append(self.tag)
def undo(self): try: self.database_entry.tags.append(self.tag) except InvalidRequestError: # self.tag cannot be added because it was just removed # -> put it back to transient state make_transient(self.tag) self.database_entry.tags.append(self.tag)
def get_active_filters(self): session = self.session_maker() filters = [] for f in session.query(ActiveItemFilter): make_transient(f) f.values = json.loads(f.values) filters.append(f) return filters
def new_version(self, session): # make us transient (removes persistent # identity). make_transient(self) # set 'id' to None. # a new PK will be generated on INSERT. self.id = None
def copy_project_sample_annotations(cls, psa): locus_annotations = map(SampleLocusAnnotation.copy_sample_locus_annotation, psa.locus_annotations) db.session.expunge(psa) make_transient(psa) psa.id = None psa.locus_annotations = locus_annotations return psa
def update_org_id(old_id, new_id, new_parent_id=None, new_name=None): bind = op.get_bind() session = Session(bind=bind) org = session.query(Organization).get(old_id) if not org: return OT = OrgTree() if len(OT.here_and_below_id(old_id)) > 1: raise ValueError('organization `{}` cannot be moved ' 'before children'.format(old_id)) make_transient(org) org.id = new_id if new_parent_id: org.partOf_id = new_parent_id if new_name: org.name = new_name session.add(org) session.commit() # update the related fields to point at new org for qb in session.query(QuestionnaireBank).filter_by( organization_id=old_id): qb.organization_id = new_id session.commit() for uc in session.query(UserConsent).filter_by(organization_id=old_id): uc.organization_id = new_id session.commit() for uo in session.query(UserOrganization).filter_by( organization_id=old_id): uo.organization_id = new_id session.commit() for tou in session.query(ToU).filter_by(organization_id=old_id): tou.organization_id = new_id session.commit() for oi in session.query(OrganizationIdentifier).filter_by( organization_id=old_id): oi.organization_id = new_id session.commit() for ol in session.query(OrganizationLocale).filter_by( organization_id=old_id): ol.organization_id = new_id session.commit() for oa in session.query(OrganizationAddress).filter_by( organization_id=old_id): oa.organization_id = new_id session.commit() session.execute('DELETE FROM organizations WHERE id = {}'.format(old_id))
def __call__(self): try: self.session.add(self.database_entry) except InvalidRequestError: # database entry cannot be added because it was removed from the # database -> use make_transient to send this object back to # the transient state make_transient(self.database_entry) self.session.add(self.database_entry)
def transient_copy(session, inst): """Copy a sample, this approach forces a bunch of flushes""" from sqlalchemy.orm import make_transient session.expunge(inst) make_transient(inst) inst.id = None session.add(inst) session.flush() return inst
def clone_row(row, event_id=None): db.session.expunge(row) make_transient(row) row.id = None if event_id: row.event_id = event_id save_to_db(row) db.session.flush() return row
def clone_dataset(dataset_id, **kwargs): """ Get a single dataset, by ID """ user_id = int(kwargs.get('user_id')) if dataset_id is None: return None dataset = DBSession.query(Dataset).filter( Dataset.dataset_id == dataset_id).options( joinedload_all('metadata')).first() if dataset is None: raise HydraError("Dataset %s does not exist." % (dataset_id)) if dataset is not None and dataset.created_by != user_id: owner = DBSession.query(DatasetOwner).filter( DatasetOwner.dataset_id == Dataset.dataset_id, DatasetOwner.user_id == user_id).first() if owner is None: raise PermissionError( "User %s is not an owner of dataset %s and therefore cannot clone it." % (user_id, dataset_id)) DBSession.expunge(dataset) make_transient(dataset) dataset.data_name = dataset.data_name + "(Clone)" dataset.dataset_id = None dataset.cr_date = None #Try to avoid duplicate metadata entries if the entry has been cloned previously for m in dataset.metadata: if m.metadata_name in ("clone_of", "cloned_by"): del (m) cloned_meta = Metadata() cloned_meta.metadata_name = "clone_of" cloned_meta.metadata_val = str(dataset_id) dataset.metadata.append(cloned_meta) cloned_meta = Metadata() cloned_meta.metadata_name = "cloned_by" cloned_meta.metadata_val = str(user_id) dataset.metadata.append(cloned_meta) dataset.set_hash() DBSession.add(dataset) DBSession.flush() cloned_dataset = DBSession.query(Dataset).filter( Dataset.dataset_id == dataset.dataset_id).first() return cloned_dataset
def clone(model): """ Clones the given model and removes it's primary key :param model: :return: """ db.session.expunge(model) make_transient(model) model.id = None return model
def run(self): """Run data loader""" source = self.source_session target = self.target_session for entity in self.entities: LOGGER.info('Loading entity {0!r}'.format(entity)) for record in source.query(entity): source.expunge(record) make_transient(record) target.add(record)
def copy_locus_bin_set(cls, lbs): bins = list(map(Bin.copy_bin, lbs.bins)) db.session.expunge(lbs) make_transient(lbs) lbs.id = None lbs.bins = bins return lbs
def test_calculate_amount(self): account = Tag(user=self.users[0], name="account") account_group = TagGroup(user=self.users[0], tags=[account]) db.add(Transaction(user=self.users[0], amount=500, incomeTagGroup=account_group)) db.add(Transaction(user=self.users[0], amount=-100, incomeTagGroup=account_group)) db.flush() t = Transaction(user=self.users[0], amount=33, incomeTagGroup=account_group) make_transient(t) t.calculate_amount() self.assertEquals(t.amount, 33-400)
def copy_artifact_estimator(cls, ae): assert isinstance(ae, cls) art_eqs = list(map(ArtifactEquation.copy_artifact_equation, ae.artifact_equations)) db.session.expunge(ae) make_transient(ae) ae.id = None ae.artifact_equations = art_eqs return ae
def update_org_id(old_id, new_id, new_parent_id=None, new_name=None): bind = op.get_bind() session = Session(bind=bind) org = session.query(Organization).get(old_id) if not org: return OT = OrgTree() if len(OT.here_and_below_id(old_id)) > 1: raise ValueError('organization `{}` cannot be moved ' 'before children'.format(old_id)) make_transient(org) org.id = new_id if new_parent_id: org.partOf_id = new_parent_id if new_name: org.name = new_name session.add(org) session.commit() # update the related fields to point at new org for qb in session.query(QuestionnaireBank).filter_by(organization_id=old_id): qb.organization_id = new_id session.commit() for uc in session.query(UserConsent).filter_by(organization_id=old_id): uc.organization_id = new_id session.commit() for uo in session.query(UserOrganization).filter_by(organization_id=old_id): uo.organization_id = new_id session.commit() for tou in session.query(ToU).filter_by(organization_id=old_id): tou.organization_id = new_id session.commit() for oi in session.query(OrganizationIdentifier).filter_by(organization_id=old_id): oi.organization_id = new_id session.commit() for ol in session.query(OrganizationLocale).filter_by(organization_id=old_id): ol.organization_id = new_id session.commit() for oa in session.query(OrganizationAddress).filter_by(organization_id=old_id): oa.organization_id = new_id session.commit() session.execute('DELETE FROM organizations WHERE id = {}'.format(old_id))
def clone_dataset(dataset_id,**kwargs): """ Get a single dataset, by ID """ user_id = int(kwargs.get('user_id')) if dataset_id is None: return None dataset = DBSession.query(Dataset).filter( Dataset.dataset_id==dataset_id).options(joinedload_all('metadata')).first() if dataset is None: raise HydraError("Dataset %s does not exist."%(dataset_id)) if dataset is not None and dataset.created_by != user_id: owner = DBSession.query(DatasetOwner).filter( DatasetOwner.dataset_id==Dataset.dataset_id, DatasetOwner.user_id==user_id).first() if owner is None: raise PermissionError("User %s is not an owner of dataset %s and therefore cannot clone it."%(user_id, dataset_id)) DBSession.expunge(dataset) make_transient(dataset) dataset.data_name = dataset.data_name + "(Clone)" dataset.dataset_id = None dataset.cr_date = None #Try to avoid duplicate metadata entries if the entry has been cloned previously for m in dataset.metadata: if m.metadata_name in ("clone_of", "cloned_by"): del(m) cloned_meta = Metadata() cloned_meta.metadata_name = "clone_of" cloned_meta.metadata_val = str(dataset_id) dataset.metadata.append(cloned_meta) cloned_meta = Metadata() cloned_meta.metadata_name = "cloned_by" cloned_meta.metadata_val = str(user_id) dataset.metadata.append(cloned_meta) dataset.set_hash() DBSession.add(dataset) DBSession.flush() cloned_dataset = DBSession.query(Dataset).filter( Dataset.dataset_id==dataset.dataset_id).first() return cloned_dataset
def get_lowest_unit(self, stock_symbol): Session = self.conn.get_sessionmake() session = Session() stock_lowest_unit = session.query(StockLowestUnit).\ filter(StockLowestUnit.symbol == stock_symbol).\ scalar() if stock_lowest_unit is not None: make_transient(stock_lowest_unit) session.close() return stock_lowest_unit
def get_stock_transaction_by_trans_id(self, trans_id): Session = self.conn.get_sessionmake() session = Session() stock_transaction = session.query(StockTransaction).filter(StockTransaction.trans_id == trans_id).scalar() if stock_transaction is not None: make_transient(stock_transaction) session.close() return stock_transaction
def detach_from_session(session: Session, an_orm: M) -> M: """ Detach from the session the given object. :param session: :param an_orm: A SQLAlchemy produced instance. :return: """ if not inspect(an_orm).detached: session.expunge(an_orm) if an_orm in session: make_transient(an_orm) return an_orm
def get_stock_stock_price_range_by_symbol(self, symbol): Session = self.conn.get_sessionmake() session = Session() stock_price_range = session.query(StockPriceRange).filter(StockPriceRange.symbol==symbol).scalar() if stock_price_range is not None: make_transient(stock_price_range) session.close() return stock_price_range
def create_copy(self, page_guid, page_version, version_type, created_by): page = self.get_page_with_version(page_guid, page_version) next_version = page.next_version_number_by_type(version_type) if version_type != "copy" and self.already_updating( page.guid, next_version): raise UpdateAlreadyExists() dimensions = [dimension for dimension in page.dimensions] uploads = [upload for upload in page.uploads] db.session.expunge(page) make_transient(page) original_guid = page.guid if version_type == "copy": page.guid = str(uuid.uuid4()) page.title = f"COPY OF {page.title}" # Duplicate (URI + version) in the same subtopic would mean we can't resolve preview URLs to a single page while self.new_uri_invalid(page, page.uri): page.uri = f"{page.uri}-copy" page.version = next_version page.status = "DRAFT" page.created_by = created_by page.created_at = datetime.utcnow() page.publication_date = None page.published = False page.internal_edit_summary = None page.external_edit_summary = None page.latest = True for dimension in dimensions: page.dimensions.append(dimension.copy()) for upload in uploads: file_name = upload.file_name db.session.expunge(upload) make_transient(upload) upload.guid = create_guid(file_name) page.uploads.append(upload) db.session.add(page) db.session.commit() previous_page = page.get_previous_version() if previous_page is not None: previous_page.latest = False db.session.add(previous_page) db.session.commit() upload_service.copy_uploads(page, page_version, original_guid) return page
def new_version(self, session): # optional - set previous version to have is_current_version=False old_id = self.id session.query(self.__class__).filter_by(id=old_id).update( values=dict(is_current_version=False), synchronize_session=False) # make us transient (removes persistent # identity). make_transient(self) # increment version_id, which means we have a new PK. self.version_id += 1
def test_make_transient_plus_rollback(self): # test for [ticket:2182] mapper(User, users) sess = Session() u1 = User(name='test') sess.add(u1) sess.commit() sess.delete(u1) sess.flush() make_transient(u1) sess.rollback()
def get_stock_cash_by_symbol(self, symbol): Session = self.conn.get_sessionmake() session = Session() stock_cash = session.query(StockCash).filter(StockCash.symbol==symbol).scalar() if stock_cash is not None: make_transient(stock_cash) session.close() return stock_cash
def copy_folder(self, folder_id): folder = get_folder_by_id(self._db_session, folder_id) parent_folder = self._get_parent() self._db_session.expunge(folder) make_transient(folder) folder.id = None folder.folder_id = None folder.parent_id = parent_folder.id self._db_session.add(folder) self._db_session.commit() self._db_session.add(EventModel(event_type="ITEM_COPY", source_id=folder.folder_id, source_type="folder")) self._db_session.commit() return json.dumps(folder)
def test_make_transient_plus_rollback(self): # test for [ticket:2182] users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() u1 = User(name="test") sess.add(u1) sess.commit() sess.delete(u1) sess.flush() make_transient(u1) sess.rollback()
def copy_file(self, file_id): file_object = get_file_by_id(self._db_session, file_id) self._check_file_lock(file_object) parent_folder = self._get_parent() self._db_session.expunge(file_object) make_transient(file_object) file_object.id = None file_object.file_id = None file_object.parent_id = parent_folder.id self._db_session.add(file_object) self._db_session.commit() self._db_session.add(EventModel(event_type='ITEM_COPY', source_id=file_object.file_id, source_type='file')) self._db_session.commit() return json.dumps(file_object)
def create(cls, rapport): soknad = SoknadRepo.find_by_id(rapport.soknad_id) for arrangement in soknad.arrangement: make_transient(arrangement) rapport.arrangement.append(cls.copy_model_object(arrangement)) for okonomipost in soknad.okonomipost: make_transient(okonomipost) rapport.okonomipost.append(cls.copy_model_object(okonomipost)) rapport = cls.save(rapport) soknad.status = SoknadStateMachine.s_rapport_pabegynt.id SoknadRepo.save(soknad) return rapport
def test_make_transient(self): mapper(User, users) sess = create_session() sess.add(User(name='test')) sess.flush() u1 = sess.query(User).first() make_transient(u1) assert u1 not in sess sess.add(u1) assert u1 in sess.new u1 = sess.query(User).first() sess.expunge(u1) make_transient(u1) sess.add(u1) assert u1 in sess.new
def test_make_transient(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = create_session() sess.add(User(name='test')) sess.flush() u1 = sess.query(User).first() make_transient(u1) assert u1 not in sess sess.add(u1) assert u1 in sess.new u1 = sess.query(User).first() sess.expunge(u1) make_transient(u1) sess.add(u1) assert u1 in sess.new # test expired attributes # get unexpired u1 = sess.query(User).first() sess.expire(u1) make_transient(u1) assert u1.id is None assert u1.name is None # works twice make_transient(u1) sess.close() u1.name = 'test2' sess.add(u1) sess.flush() assert u1 in sess sess.delete(u1) sess.flush() assert u1 not in sess assert_raises(sa.exc.InvalidRequestError, sess.add, u1) make_transient(u1) sess.add(u1) sess.flush() assert u1 in sess
def get_all_exercises(self): result = None #log.debug("get_all_exercises()") try: exercises = self.session.query(Exercise).order_by(Exercise.id).all() result = dict() for ex in exercises: self.session.expunge(ex) make_transient(ex) result[ex.id] = ex except Exception as e: log.error("---------------------------------------") log.error("Error - get_all_exercises(): %s", e) log.debug("get_all_exercises() ") return result
def copy(self): id = self.request.matchdict.get("map_id") map = DBSession.query(Map).get(id) user = self.request.user if user is None: return HTTPUnauthorized() if map is None: return HTTPNotFound() DBSession.expunge(map) make_transient(map) map.uuid = None params = self.request.params if 'title' in params: map.title = unicode(params.get('title')) if 'description' in params: map.description = unicode(params.get('description')) map.public = False map.user_login = user.username map.category_id = None if 'category_id' in params: cat = params.get('category_id') map.category_id = None if cat == '' else cat if 'public' in params: str = unicode(params.get('public')) if str.lower() == u'true': map.public = True map.create_date = None map.update_date = None DBSession.add(map) DBSession.commit() if map.uuid is not None: features = DBSession.query(Feature).filter( Feature.map_id == id).all() for f in features: DBSession.expunge(f) make_transient(f) f.id = None map.features.append(f) DBSession.commit() return {'success': map.uuid is not None, 'uuid': map.uuid}
def index(self): p = User.query.filter_by(user_id='fooobar').first() if p: db.session.delete(p) db.session.commit() db.session.rollback() u = User.query.first() from sqlalchemy.orm import make_transient db.session.expunge(u) make_transient(u) u.id = None u.user_id = 'fooobar' u.update_password('fooobar') u.email = '*****@*****.**' db.session.add(u) db.session.commit() return render_template('register/index.html')
def copy_model(self, model, filterfunc=lambda q: q, batch_size=10000): fks = self.find_foreign_keys(model) i = 0 with self.source.no_autoflush: for ref in filterfunc(self.source.query(model)).yield_per(1000): make_transient(ref) for fk in fks: value = getattr(ref, fk.parent.name) setattr(ref, fk.parent.name, self.look_up_reference(fk, value)) old_id = ref.id ref.id = None self.target.add(ref) self.target.flush() assert ref.id is not None self.set_reference(model, old_id, ref.id) i += 1 if i % batch_size == 0: self.target.commit() self.target.commit()