def delete(self, current_user_id: int, prj_id: int, only_objects: bool) -> Tuple[int, int, int, int]: # Security barrier _current_user, _project = RightsBO.user_wants(self.session, current_user_id, Action.ADMINISTRATE, prj_id) # Troll-ish way of erasing all_object_ids = ProjectBO.get_all_object_ids(self.session, prj_id=prj_id) # Build a big set obj_set = EnumeratedObjectSet(self.session, all_object_ids) # Prepare a remover thread that will run in // with DB queries remover = VaultRemover(self.link_src, logger).do_start() # Do the deletion itself. nb_objs, nb_img_rows, img_files = obj_set.delete( self.DELETE_CHUNK_SIZE, remover.add_files) ProjectBO.delete_object_parents(self.session, prj_id) if only_objects: # Update stats, should all be 0... ProjectBO.update_taxo_stats(self.session, prj_id) # Stats depend on taxo stats ProjectBO.update_stats(self.session, prj_id) else: ProjectBO.delete(self.session, prj_id) self.session.commit() # Wait for the files handled remover.wait_for_done() return nb_objs, 0, nb_img_rows, len(img_files)
def delete(self, current_user_id: UserIDT, object_ids: ObjectIDListT) -> Tuple[int, int, int, int]: """ Remove from DB all the objects with ID in given list. """ # Security check obj_set = EnumeratedObjectSet(self.session, object_ids) # Get project IDs for the objects and verify rights prj_ids = obj_set.get_projects_ids() for a_prj_id in prj_ids: RightsBO.user_wants(self.session, current_user_id, Action.ADMINISTRATE, a_prj_id) # Prepare & start a remover thread that will run in // with DB queries remover = VaultRemover(self.link_src, logger).do_start() # Do the deletion itself. nb_objs, nb_img_rows, img_files = obj_set.delete(self.CHUNK_SIZE, remover.add_files) # Update stats on impacted project(s) for prj_id in prj_ids: ProjectBO.update_taxo_stats(self.session, prj_id) # Stats depend on taxo stats ProjectBO.update_stats(self.session, prj_id) self.session.commit() # Wait for the files handled remover.wait_for_done() return nb_objs, 0, nb_img_rows, len(img_files)
def update_db_stats(self): """ Refresh the database for aggregates. """ project_ids = [ a_project.projid for a_project in self.collection.projects ] for a_project_id in project_ids: # Ensure the taxo stats are OK ProjectBO.update_taxo_stats(self.session, projid=a_project_id) # Ensure that the geography is OK propagated upwards from objects, for all projects inside the collection Sample.propagate_geo(self.session, prj_id=a_project_id) a_stat: ProjectTaxoStats for a_stat in ProjectBO.read_taxo_stats(self.session, project_ids, []): self.validated_count += a_stat.nb_validated
def reset_to_predicted(self, current_user_id: UserIDT, proj_id: ProjectIDT, filters: ProjectFilters) -> None: """ Query the given project with given filters, reset the resulting objects to predicted. """ # Security check RightsBO.user_wants(self.session, current_user_id, Action.ADMINISTRATE, proj_id) impacted_objs = [r[0] for r in self.query(current_user_id, proj_id, filters)[0]] EnumeratedObjectSet(self.session, impacted_objs).reset_to_predicted() # Update stats ProjectBO.update_taxo_stats(self.session, proj_id) # Stats depend on taxo stats ProjectBO.update_stats(self.session, proj_id) self.session.commit()
def revert_to_history(self, current_user_id: UserIDT, proj_id: ProjectIDT, filters: ProjectFilters, dry_run: bool, target: Optional[int]) -> Tuple[List[HistoricalLastClassif], ClassifSetInfoT]: """ Revert to classification history the given set, if dry_run then only simulate. """ # Security check RightsBO.user_wants(self.session, current_user_id, Action.ADMINISTRATE, proj_id) # Get target objects impacted_objs = [r[0] for r in self.query(current_user_id, proj_id, filters)[0]] obj_set = EnumeratedObjectSet(self.session, impacted_objs) # We don't revert to a previous version in history from same annotator but_not_by: Optional[int] = None but_not_by_str = filters.get('filt_last_annot', None) if but_not_by_str is not None: try: but_not_by = int(but_not_by_str) except ValueError: pass if dry_run: # Return information on what to do impact = obj_set.evaluate_revert_to_history(target, but_not_by) # And names for display classifs = TaxonomyBO.names_with_parent_for(self.session, self.collect_classif(impact)) else: # Do the real thing impact = obj_set.revert_to_history(target, but_not_by) classifs = {} # Update stats ProjectBO.update_taxo_stats(self.session, proj_id) # Stats depend on taxo stats ProjectBO.update_stats(self.session, proj_id) self.session.commit() # Give feedback return impact, classifs