def delete(self, current_user_id: int, prj_id: int, only_objects: bool) -> Tuple[int, int, int, int]: # Security barrier _current_user, _project = RightsBO.user_wants(self.session, current_user_id, Action.ADMINISTRATE, prj_id) # Troll-ish way of erasing all_object_ids = ProjectBO.get_all_object_ids(self.session, prj_id=prj_id) # Build a big set obj_set = EnumeratedObjectSet(self.session, all_object_ids) # Prepare a remover thread that will run in // with DB queries remover = VaultRemover(self.link_src, logger).do_start() # Do the deletion itself. nb_objs, nb_img_rows, img_files = obj_set.delete( self.DELETE_CHUNK_SIZE, remover.add_files) ProjectBO.delete_object_parents(self.session, prj_id) if only_objects: # Update stats, should all be 0... ProjectBO.update_taxo_stats(self.session, prj_id) # Stats depend on taxo stats ProjectBO.update_stats(self.session, prj_id) else: ProjectBO.delete(self.session, prj_id) self.session.commit() # Wait for the files handled remover.wait_for_done() return nb_objs, 0, nb_img_rows, len(img_files)
def _do_merge(self, dest_prj: Project): """ Real merge operation. """ # Loop over involved tables and remap free columns for a_mapped_tbl in MAPPED_TABLES: remaps = self.remap_operations.get(a_mapped_tbl) # Do the remappings if any if remaps is not None: logger.info("Doing re-mapping in %s: %s", a_mapped_tbl.__tablename__, remaps) ProjectBO.remap(self.session, self.src_prj_id, a_mapped_tbl, remaps) # Collect orig_id dest_parents = InBundle.fetch_existing_parents(self.ro_session, prj_id=self.prj_id) src_parents = InBundle.fetch_existing_parents(self.ro_session, prj_id=self.src_prj_id) # Compute needed projections in order to keep orig_id unicity common_samples = self.get_ids_for_common_orig_id( Sample, dest_parents, src_parents) common_acquisitions = self.get_ids_for_common_orig_id( Acquisition, dest_parents, src_parents) # Align foreign keys, to Project, Sample and Acquisition for a_fk_to_proj_tbl in [ Sample, Acquisition, ObjectHeader, ParticleProject ]: upd: Query = self.session.query(a_fk_to_proj_tbl) if a_fk_to_proj_tbl == Sample: # Move (i.e. change project) samples which are 'new' from merged project, # so take all of them from src project... upd = upd.filter( a_fk_to_proj_tbl.projid == self.src_prj_id) # type: ignore # ...but not the ones with same orig_id, which are presumably equal. upd = upd.filter( Sample.sampleid != all_(list(common_samples.keys()))) # And update the column upd_values = {'projid': self.prj_id} elif a_fk_to_proj_tbl == Acquisition: # Acquisitions which were created, in source, under new samples, will 'follow' # them during above move, thanks to the FK on acq_sample_id. # BUT some acquisitions were potentially created in source project, inside # forked samples. They need to be attached to the dest (self) corresponding sample. if len(common_samples) > 0: # Build a CTE with values for the update smp_cte = values_cte("upd_smp", ("src_id", "dst_id"), [(k, v) for k, v in common_samples.items()]) smp_subqry = self.session.query(smp_cte.c.column2).filter( smp_cte.c.column1 == Acquisition.acq_sample_id) upd_values = { 'acq_sample_id': func.coalesce( smp_subqry.scalar_subquery(), # type: ignore Acquisition.acq_sample_id) } upd = upd.filter(Acquisition.acq_sample_id == any_( list(common_samples.keys()))) # upd = upd.filter(Acquisition.acquisid != all_(list(common_acquisitions.keys()))) if len(common_samples) == 0: # Nothing to do. There were only new samples, all of them moved to self. continue elif a_fk_to_proj_tbl == ObjectHeader: # Generated SQL looks like: # with upd_acq (src_id, dst_id) as (values (5,6), (7,8)) # update obj_head # set acquisid = coalesce((select dst_id from upd_acq where acquisid=src_id), acquisid) # where acquisid in (select src_id from upd_acq) if len(common_acquisitions) > 0: # Object must follow its acquisition acq_cte = values_cte( "upd_acq", ("src_id", "dst_id"), [(k, v) for k, v in common_acquisitions.items()]) acq_subqry = self.session.query(acq_cte.c.column2).filter( acq_cte.c.column1 == ObjectHeader.acquisid) upd_values = { 'acquisid': func.coalesce( acq_subqry.scalar_subquery(), # type:ignore ObjectHeader.acquisid) } upd = upd.filter(ObjectHeader.acquisid == any_( list(common_acquisitions.keys()))) if len(common_acquisitions) == 0: # Nothing to do. There were only new acquisitions, all of them moved to self. continue else: # For Particle project upd = upd.filter( ParticleProject.projid == self.src_prj_id) # type: ignore upd_values = {'projid': self.prj_id} rowcount = upd.update(values=upd_values, synchronize_session=False) table_name = a_fk_to_proj_tbl.__tablename__ # type: ignore logger.info("Update in %s: %s rows", table_name, rowcount) # Acquisition & twin Process have followed their enclosing Sample # Remove the parents which are duplicate from orig_id point of view for a_fk_to_proj_tbl in [Acquisition, Sample]: to_del: Query = self.session.query(a_fk_to_proj_tbl) if a_fk_to_proj_tbl == Acquisition: # Remove conflicting acquisitions, they should be empty? to_del = to_del.filter(Acquisition.acquisid == any_( list(common_acquisitions.keys()))) # type: ignore elif a_fk_to_proj_tbl == Sample: # Remove conflicting samples to_del = to_del.filter(Sample.sampleid == any_( list(common_samples.keys()))) # type: ignore rowcount = to_del.delete(synchronize_session=False) table_name = a_fk_to_proj_tbl.__tablename__ # type: ignore logger.info("Delete in %s: %s rows", table_name, rowcount) self.dest_augmented_mappings.write_to_project(dest_prj) ProjectPrivilegeBO.generous_merge_into(self.session, self.prj_id, self.src_prj_id) # Completely erase the source project ProjectBO.delete(self.session, self.src_prj_id)