def update(self, session: Session, title: str, project_ids: ProjectIDListT, provider_user: Any, contact_user: Any, citation: str, abstract: str, description: str, creator_users: List[Any], associate_users: List[Any], creator_orgs: List[Any], associate_orgs: List[Any]): project_ids.sort() # TODO: projects update using given list assert project_ids == self.project_ids, "Cannot update composing projects yet" # Redo sanity check & aggregation as underlying projects might have changed self._add_composing_projects(session, project_ids) coll_id = self._collection.id # Simple fields update self._collection.title = title self._collection.citation = citation self._collection.abstract = abstract self._collection.description = description # Copy provider user id if provider_user is not None: self._collection.provider_user_id = provider_user.id # Copy contact user id if contact_user is not None: self._collection.contact_user_id = contact_user.id # Dispatch members by role by_role = { COLLECTION_ROLE_DATA_CREATOR: creator_users, COLLECTION_ROLE_ASSOCIATED_PERSON: associate_users } # Remove all to avoid diff-ing session.query(CollectionUserRole). \ filter(CollectionUserRole.collection_id == coll_id).delete() # Add all for a_role, a_user_list in by_role.items(): for a_user in a_user_list: session.add( CollectionUserRole(collection_id=coll_id, user_id=a_user.id, role=a_role)) # Dispatch orgs by role by_role_org = { COLLECTION_ROLE_DATA_CREATOR: creator_orgs, COLLECTION_ROLE_ASSOCIATED_PERSON: associate_orgs } # Remove all to avoid diff-ing session.query(CollectionOrgaRole). \ filter(CollectionOrgaRole.collection_id == coll_id).delete() # Add all for a_role, an_org_list in by_role_org.items(): for an_org in an_org_list: session.add( CollectionOrgaRole(collection_id=coll_id, organisation=an_org, role=a_role)) session.commit()
def create_job(cls, session: Session, user_id: UserIDT, job_type: str, args: Dict) -> Job: job = Job() job.state = DBJobStateEnum.Pending job.progress_msg = cls.PENDING_MESSAGE job.creation_date = job.updated_on = datetime.now() job.type = job_type job.owner_id = user_id job.params = json_dumps(args) job.inside = job.reply = json_dumps({}) job.messages = json_dumps([]) session.add(job) session.commit() return job
def delete(session: Session, coll_id: CollectionIDT): """ Completely remove the collection. Being just a set of project references, the pointed-at projects are not impacted. """ # Remove links first session.query(CollectionProject). \ filter(CollectionProject.collection_id == coll_id).delete() session.query(CollectionUserRole). \ filter(CollectionUserRole.collection_id == coll_id).delete() session.query(CollectionOrgaRole). \ filter(CollectionOrgaRole.collection_id == coll_id).delete() # Remove collection session.query(Collection). \ filter(Collection.id == coll_id).delete() session.commit()
def get_acquisitions(cls, session: Session, sample: Sample) -> List[Acquisition]: """ Get acquisitions for the sample """ qry: Query = session.query(Acquisition) qry = qry.join(Sample) qry = qry.filter(Sample.sampleid == sample.sampleid) return qry.all()
def _add_composing_projects(self, session: Session, project_ids: ProjectIDListT): """ Add the given projects into DB, doing sanity checks. """ qry: Query = session.query(Project).filter(Project.projid.in_(project_ids)) qry = qry.join(Sample, Project.all_samples).options(contains_eager(Project.all_samples)) db_projects = qry.all() assert len(db_projects) == len(project_ids) # Loop on projects, adding them and collecting aggregated data prj_licenses: Set[LicenseEnum] = set() samples_per_project: Dict[str, Project] = {} problems: List[str] = [] a_db_project: Project for a_db_project in db_projects: self._collection.projects.append(a_db_project) prj_licenses.add(cast(LicenseEnum, a_db_project.license)) for a_sample in a_db_project.all_samples: sample_id = a_sample.orig_id # Sanity check: sample orig_id must be unique in the collection if sample_id in samples_per_project: problems.append("Sample with orig_id %s is in both '%s'(#%d) and '%s'(#%d)" % (sample_id, samples_per_project[sample_id].title, samples_per_project[sample_id].projid, a_db_project.title, a_db_project.projid)) else: samples_per_project[sample_id] = a_db_project # Set self to most restrictive of all licenses max_restrict = max([DataLicense.RESTRICTION[a_prj_lic] for a_prj_lic in prj_licenses]) self._collection.license = DataLicense.BY_RESTRICTION[max_restrict] # TODO: Default creators using classification history in DB. Knowing that it's partial. # Report (brutally) problems assert len(problems) == 0, "\n".join(problems)
def __init__(self, session: Session, project_ids: ProjectIDListT): self.instrument_names: List[InstrumentIDT] = [] qry: Query = session.query(Acquisition.instrument).distinct() qry = qry.join(Sample).join(Project) if len(project_ids) > 0: qry = qry.filter(Project.projid.in_(project_ids)) qry = qry.order_by(Acquisition.instrument) self.instrument_names = [nm for nm, in qry.all() if nm] # Filter NULL & empty strings
def get_one(session: Session, coll_id: CollectionIDT) -> Optional['CollectionBO']: """ Find a Collection by its ID and return it, None if not found. """ # Find the collection ret = session.query(Collection).get(coll_id) if ret is None: return None else: return CollectionBO(ret).enrich()
def get_all_object_ids(cls, session: Session, acquis_id: AcquisitionIDT, classif_ids: Optional[ClassifIDListT] = None) \ -> List[int]: qry: Query = session.query(ObjectHeader.objid) qry = qry.join( Acquisition, and_(ObjectHeader.acquisid == Acquisition.acquisid, Acquisition.acquisid == acquis_id)) if classif_ids is not None: qry = qry.filter(ObjectHeader.classif_id.in_(classif_ids)) return [an_id for an_id in qry.all()]
def get_sums_by_taxon(cls, session: Session, acquis_id: AcquisitionIDT) \ -> Dict[ClassifIDT, int]: res: ResultProxy = session.execute( "SELECT o.classif_id, count(1)" " FROM obj_head o " " WHERE o.acquisid = :acq" " GROUP BY o.classif_id", {"acq": acquis_id}) return { int(classif_id): int(cnt) for (classif_id, cnt) in res.fetchall() }
def get_for_update(cls, session: Session, job_id: JobIDT) -> 'JobBO': """ Return a single JobBO. If used in a 'with' context, the session will commit on context exit. Note: it's not only the Job which will be committed, but the _whole_ session. """ job = session.query(Job).get(job_id) if job is None: raise ValueError job.updated_on = datetime.now() ret = JobBO(job) ret._session = session return ret
def get_sums_by_taxon(cls, session: Session, acquis_id: AcquisitionIDT) \ -> Dict[ClassifIDT, int]: sql = text("SELECT o.classif_id, count(1)" " FROM obj_head o " " WHERE o.acquisid = :acq " " AND o.classif_id IS NOT NULL " " AND o.classif_qual = 'V'" " GROUP BY o.classif_id") res: Result = session.execute(sql, {"acq": acquis_id}) return { int(classif_id): int(cnt) for (classif_id, cnt) in res.fetchall() }
def get_preferences_per_project(session: Session, user_id: int, project_id: int, key: str) -> Any: """ Get a preference, for given project and user. Keys are not standardized (for now). """ current_user: User = session.query(User).get(user_id) prefs_for_proj: UserPreferences = current_user.preferences_for_projects.filter_by( project_id=project_id).first() if prefs_for_proj: all_prefs_for_proj = json.loads(prefs_for_proj.json_prefs) else: all_prefs_for_proj = dict() return all_prefs_for_proj.get(key, "")
def set_preferences_per_project(session: Session, user_id: int, project_id: int, key: str, value: Any): """ Set preference for a key, for given project and user. The key disappears if set to empty string. """ current_user: User = session.query(User).get(user_id) prefs_for_proj: UserPreferences = current_user.preferences_for_projects.filter_by( project_id=project_id).first() if prefs_for_proj: all_prefs_for_proj = json.loads(prefs_for_proj.json_prefs) else: prefs_for_proj = UserPreferences() prefs_for_proj.project_id = project_id prefs_for_proj.user_id = user_id session.add(prefs_for_proj) all_prefs_for_proj = dict() all_prefs_for_proj[key] = value if value == '': del all_prefs_for_proj[key] prefs_for_proj.json_prefs = json.dumps(all_prefs_for_proj) logger.info("for %s and %d: %s", current_user.name, project_id, prefs_for_proj.json_prefs) session.commit()
def create(session: Session, title: str, project_ids: ProjectIDListT) -> CollectionIDT: """ Create using minimum fields. """ # Find the collection db_coll = Collection() db_coll.title = title session.add(db_coll) session.flush() # to get the collection ID bo_coll = CollectionBO(db_coll) bo_coll.set_composing_projects(session, project_ids) session.commit() return bo_coll.id
def read_from_db(self, session: Session, prj_id: ProjectIDT): """ Read the project topology from DB. """ qry: Query = session.query(Sample) qry = qry.join(Acquisition).join(Process).join(ObjectHeader) qry = qry.filter(Sample.projid == prj_id) qry = qry.with_entities(Sample.orig_id, Acquisition.orig_id, Process.orig_id, ObjectHeader.objid) sam_orig_id: str acq_orig_id: str prc_orig_id: str objid: ObjectIDT for sam_orig_id, acq_orig_id, prc_orig_id, objid in qry.all(): # Get/create acquisitions for this sample objs_for_acquisition = self.add_association( sam_orig_id, acq_orig_id) # Store twin process if prc_orig_id is not None: self.acquisition_child[acq_orig_id] = prc_orig_id # Store objects for acquisition objs_for_acquisition.add(objid)
def __init__(self, session: Session, project_ids: ProjectIDListT): qry: Query = session.query(Acquisition.instrument) qry = qry.join(Sample).join(Project) # TODO: WTF WTF just for adding a column to the select qry = qry.add_columns( text(Project.__table__.name + "." + Project.__table__.c.projid.name)) # Below SQLAlchemy complains # qry = qry.add_columns(Project.projid) if len(project_ids) > 0: qry = qry.filter(Project.projid.in_(project_ids)) qry = qry.distinct() instruments_by_proj: Dict[ProjectIDT, Set[InstrumentIDT]] = {} instrument_names = set() for ins_name, projid in qry.all(): if ins_name: instruments_by_proj.setdefault(projid, set()).add(ins_name) instrument_names.add(ins_name) else: pass # Filter NULL & empty strings self.by_project = instruments_by_proj self.instrument_names = sorted(list(instrument_names))
def __init__(self, session: Session, acquisition_id: AcquisitionIDT): super().__init__(session) self.acquis = session.query(Acquisition).get(acquisition_id)
def __init__(self, session: Session, sample_id: SampleIDT): super().__init__(session) self.sample = session.query(Sample).get(sample_id)
def get_one(cls, session: Session, job_id: JobIDT) -> Optional['JobBO']: job = session.query(Job).get(job_id) if job is None: return None return JobBO(job)
def __init__(self, session: Session, process_id: ProcessIDT): super().__init__(session) self.process = session.query(Process).get(process_id)