def user_delete(user_id): """ Delete the user with the provided id in the database """ from core.model.project import Project u = User.from_id(user_id) if u: Project.delete(u.sandbox_id) Session().query(User).filter_by(id=user_id).delete(synchronize_session=False)
def user_delete(user_id): """ Delete the user with the provided id in the database """ from core.model.project import Project u = User.from_id(user_id) if u: Project.delete(u.sandbox_id) Session().query(User).filter_by(id=user_id).delete( synchronize_session=False)
def analaysis_get_fullpath(self): """ Return the list of project from the root to the last one where the analysis is stored """ from core.model import Project fullpath = [] project = Project.from_id(self.project_id) while project is not None: fullpath.insert(0,{"id": project.id, "name":project.name}) project = None if not project.parent_id else Project.from_id(project.parent_id) return fullpath
def analaysis_get_fullpath(self): """ Return the list of project from the root to the last one where the analysis is stored """ from core.model import Project fullpath = [] project = Project.from_id(self.project_id) while project is not None: fullpath.insert(0, {"id": project.id, "name": project.name}) project = None if not project.parent_id else Project.from_id( project.parent_id) return fullpath
def user_init(self, loading_depth=0, force=False): """ Init properties of an user : - id : int : the unique id of the user in the database - login : str : the login of the user - firstname : str : firstname of the user - lastname : str : lastname of the user - email : str : email - function : str : the function of the user - location : str : the location of the user - is_admin : bool : is the user an admin or not - is_activated : bool : is the user activated or not - sandbox_id : int : this id refer to the sandbox project of the user - update_date : datetime : the last time that the object have been updated - create_date : datetime : the date when the object have been created If loading_depth is > 0, Following properties fill be loaded : (Max depth level is 2) - sandbox : Project : The sandbox project of the user """ from core.model.project import Project # Avoid recursion infinit loop if hasattr(self, "loading_depth") and not force: return else: self.loading_depth = min(2, loading_depth) try: self.sandbox = None if self.loading_depth > 0: self.sandbox = Project.from_id(self.sandbox_id, self.loading_depth - 1) except Exception as ex: raise RegovarException("User data corrupted (id={}).".format(self.id), "", ex)
def user_init(self, loading_depth=0, force=False): """ Init properties of an user : - id : int : the unique id of the user in the database - login : str : the login of the user - firstname : str : firstname of the user - lastname : str : lastname of the user - email : str : email - function : str : the function of the user - location : str : the location of the user - is_admin : bool : is the user an admin or not - is_activated : bool : is the user activated or not - sandbox_id : int : this id refer to the sandbox project of the user - update_date : datetime : the last time that the object have been updated - create_date : datetime : the date when the object have been created If loading_depth is > 0, Following properties fill be loaded : (Max depth level is 2) - sandbox : Project : The sandbox project of the user """ from core.model.project import Project # Avoid recursion infinit loop if hasattr(self, "loading_depth") and not force: return else: self.loading_depth = min(2, loading_depth) try: self.sandbox = None if self.loading_depth > 0: self.sandbox = Project.from_id(self.sandbox_id, self.loading_depth-1) except Exception as ex: raise RegovarException("User data corrupted (id={}).".format(self.id), "", ex)
def analysis_init(self, loading_depth=0, force_refresh=False): """ Init properties of an analysis : - id : int : the unique id of the analysis in the database - project_id : int : the id of the project that owns this analysis - name : str : the name of the analysis - comment : str : an optional comment - settings : json : parameters used to init the analysis - fields : [str] : The list of field's id to display - filter : json : The last current filter to applied - order : [str] : The list of field's id to used to order result - selection : [str] : The list of ids of selected variants - create_date : datetime : The date when the analysis have been created - update_date : datetime : The last time that the analysis have been updated - total_variants : int : The total number of variant in this analysis - reference_id : int : Refer to the id of the reference used for this analysis - computing_progress: json : Used when the working table is computed to store the current progress, error, messages, ... - status : enum : The status of the analysis : 'empty', 'computing', 'ready', 'error' - filters_ids : [int] : The list of ids of filters saved for this analysis - samples_ids : [int] : The list of ids of samples used for analysis - files_ids : [int] : The list of ids of files associated to the analysis (via analysis_file table) - attributes : json : The list of attributes defined for this analysis - fullpath : [json] : The list of folder from root to the analyses [{"id":int, "name":str},...] - statistics : json : Statistics about the analysis If loading_depth is > 0, Following properties fill be loaded : (Max depth level is 2) - project : Project : The that own the analysis - samples : [Sample] : The list of samples owns by the analysis - filters : [Filter] : The list of Filter created in the analysis - files : [File] : The list of File associated to the analysis (via analysis_file table) """ from core.model.project import Project # With depth loading, sqlalchemy may return several time the same object. Take care to not erase the good depth level) # Avoid recursion infinit loop if hasattr(self, "loading_depth") and not force_refresh and self.loading_depth >= loading_depth: return else: self.loading_depth = min(2, loading_depth) try: if not self.filter: self.filter = ANALYSIS_DEFAULT_FILTER self.filters_ids = self.get_filters_ids() self.samples_ids = AnalysisSample.get_samples_ids(self.id) self.attributes = self.get_attributes() self.files_ids = AnalysisFile.get_files_ids(self.id) self.fullpath = self.get_fullpath() self.project = None self.samples = [] self.filters = [] self.files = [] self.panels = [] if self.loading_depth > 0: self.project = Project.from_id(self.project_id, self.loading_depth-1) self.samples = AnalysisSample.get_samples(self.id, self.loading_depth-1) self.filters = self.get_filters(self.loading_depth-1) self.files = AnalysisFile.get_files(self.id, self.loading_depth-1) except Exception as ex: raise RegovarException("Analysis data corrupted (id={}).".format(self.id), "", ex)
def user_new(login=None): """ Return a new user object """ from core.model.project import Project # Check login or create a fake one if not provided if not login: login = "******".format(User.count() + 1) # create sandbox project sandbox = Project.new() sandbox.load({"comment": "My sandbox"}) u = User(login=login, sandbox_id=sandbox.id) try: u.save() except Exception as ex: raise RegovarException( "Unable to create new user with provided informations.", "", ex) return u
def user_new(login=None): """ Return a new user object """ from core.model.project import Project # Check login or create a fake one if not provided if not login: login = "******".format(User.count() + 1) # create sandbox project sandbox = Project.new() sandbox.load({"comment": "My sandbox", "is_sandbox": True}) u = User(login=login, sandbox_id=sandbox.id) try: u.save() except Exception as ex: raise RegovarException("Unable to create new user with provided informations.", "", ex) u.init() return u
def subject_get_projects(self, loading_depth=0): """ Return the list of projects linked to the subject """ from core.model.project import Project return Project.from_ids(self.get_projects_ids(), loading_depth)
def analysis_init(self, loading_depth=0, force_refresh=False): """ Init properties of an analysis : - id : int : the unique id of the analysis in the database - project_id : int : the id of the project that owns this analysis - name : str : the name of the analysis - comment : str : an optional comment - settings : json : parameters used to init the analysis - fields : [str] : The list of field's id to display - filter : json : The last current filter to applied - order : [str] : The list of field's id to used to order result - selection : [str] : The list of ids of selected variants - create_date : datetime : The date when the analysis have been created - update_date : datetime : The last time that the analysis have been updated - total_variants : int : The total number of variant in this analysis - reference_id : int : Refer to the id of the reference used for this analysis - computing_progress: json : Used when the working table is computed to store the current progress, error, messages, ... - status : enum : The status of the analysis : 'empty', 'computing', 'ready', 'error' - filters_ids : [int] : The list of ids of filters saved for this analysis - samples_ids : [int] : The list of ids of samples used for analysis - files_ids : [int] : The list of ids of files associated to the analysis (via analysis_file table) - attributes : json : The list of attributes defined for this analysis - fullpath : [json] : The list of folder from root to the analyses [{"id":int, "name":str},...] - statistics : json : Statistics about the analysis If loading_depth is > 0, Following properties fill be loaded : (Max depth level is 2) - project : Project : The that own the analysis - samples : [Sample] : The list of samples owns by the analysis - filters : [Filter] : The list of Filter created in the analysis - files : [File] : The list of File associated to the analysis (via analysis_file table) """ from core.model.project import Project # With depth loading, sqlalchemy may return several time the same object. Take care to not erase the good depth level) # Avoid recursion infinit loop if hasattr(self, "loading_depth" ) and not force_refresh and self.loading_depth >= loading_depth: return else: self.loading_depth = min(2, loading_depth) try: if not self.filter: self.filter = ANALYSIS_DEFAULT_FILTER self.filters_ids = self.get_filters_ids() self.samples_ids = AnalysisSample.get_samples_ids(self.id) self.attributes = self.get_attributes() self.files_ids = AnalysisFile.get_files_ids(self.id) self.fullpath = self.get_fullpath() self.project = None self.samples = [] self.filters = [] self.files = [] self.panels = [] if self.loading_depth > 0: self.project = Project.from_id(self.project_id, self.loading_depth - 1) self.samples = AnalysisSample.get_samples(self.id, self.loading_depth - 1) self.filters = self.get_filters(self.loading_depth - 1) self.files = AnalysisFile.get_files(self.id, self.loading_depth - 1) except Exception as ex: raise RegovarException( "Analysis data corrupted (id={}).".format(self.id), "", ex)
def analysis_load(self, data): """ Helper to update several paramters at the same time. Note that dynamics properties (project, samples, files, attributes) cannot be updated with this method. However, you can update project_id . To update sample you must used dedicated models object : AnalysisSample """ from core.model.project import Project settings = False need_to_clean_db = False try: if "name" in data.keys(): self.name = check_string(data['name']) if "project_id" in data.keys(): self.project_id = check_int(data['project_id']) if "comment" in data.keys(): self.comment = check_string(data['comment']) if "create_date" in data.keys(): self.create_date = check_date(data['create_date']) if "update_date" in data.keys(): self.update_date = check_date(data['update_date']) if "fields" in data.keys(): self.fields = data["fields"] if "filter" in data.keys(): self.filter = data["filter"] if "selection" in data.keys(): self.selection = data["selection"] if "order" in data.keys(): self.order = data["order"] if "total_variants" in data.keys(): self.total_variants = check_int(data["total_variants"]) if "reference_id" in data.keys(): self.reference_id = check_int(data["reference_id"]) if "computing_progress" in data.keys(): self.computing_progress = check_float(data["computing_progress"]) if "status" in data.keys(): self.status = check_string(data["status"], "empty") if data["status"] else "empty" if "attributes" in data.keys(): self.attributes = data["attributes"] if "statistics" in data.keys(): self.statistics = data["statistics"] if "files_ids" in data.keys(): self.files_ids = data['files_ids'] # Remove old for fid in self.files_ids: if fid not in data["files_ids"]: AnalysisFile.delete(self.id, fid) # Add new samples for fid in data["files_ids"]: if fid not in self.files_ids: AnalysisFile.new(self.id, sid) if "settings" in data.keys(): # When settings change, need to regenerate working table self.settings = data["settings"] self.status = "empty" self.computing_progress = None need_to_clean_db = True if "samples_ids" in data.keys(): # Remove old for sid in self.samples_ids: if sid not in data["samples_ids"]: AnalysisSample.delete(self.id, sid) # Add new samples for sid in data["samples_ids"]: if sid not in self.samples_ids: AnalysisSample.new(self.id, sid) # When settings change, need to regenerate working table self.status = "empty" self.computing_progress = None need_to_clean_db = True # If settings empty, init it with informations from samples if len(self.settings["annotations_db"]) == 0: settings = self.settings from core.model.sample import Sample dbuids = [] for sid in data["samples_ids"]: sample = Sample.from_id(sid) if sample and sample.default_dbuid: for dbuid in sample.default_dbuid: if dbuid not in dbuids: dbuids.append(dbuid) self.status = "empty" settings["annotations_db"] = dbuids self.settings = settings if need_to_clean_db: execute( "DROP TABLE IF EXISTS wt_{0} CASCADE; DROP TABLE IF EXISTS wt_{0}_var CASCADE;" .format(self.id)) # check to reload dynamics properties if self.loading_depth > 0: self.project = Project.from_id(self.project_id, self.loading_depth - 1) self.samples = AnalysisSample.get_samples(self.id, self.loading_depth - 1) self.filters = self.get_filters(self.loading_depth - 1) self.files = AnalysisFile.get_files(self.id, self.loading_depth - 1) self.save() # FIXME : why sqlalchemy don't care about json settings the first time ? if settings: self.settings = settings self.save() # END FIXME except Exception as ex: raise RegovarException('Invalid input data to load.', "", ex) return self
def analysis_load(self, data): """ Helper to update several paramters at the same time. Note that dynamics properties (project, samples, files, attributes) cannot be updated with this method. However, you can update project_id . To update sample you must used dedicated models object : AnalysisSample """ from core.model.project import Project settings = False need_to_clean_db = False try: if "name" in data.keys(): self.name = check_string(data['name']) if "project_id" in data.keys(): self.project_id = check_int(data['project_id']) if "comment" in data.keys(): self.comment = check_string(data['comment']) if "create_date" in data.keys(): self.create_date = check_date(data['create_date']) if "update_date" in data.keys(): self.update_date = check_date(data['update_date']) if "fields" in data.keys(): self.fields = data["fields"] if "filter" in data.keys(): self.filter = data["filter"] if "selection" in data.keys(): self.selection = data["selection"] if "order" in data.keys(): self.order = data["order"] if "total_variants" in data.keys(): self.total_variants = check_int(data["total_variants"]) if "reference_id" in data.keys(): self.reference_id = check_int(data["reference_id"]) if "computing_progress" in data.keys(): self.computing_progress = check_float(data["computing_progress"]) if "status" in data.keys(): self.status = check_string(data["status"], "empty") if data["status"] else "empty" if "attributes" in data.keys(): self.attributes = data["attributes"] if "statistics" in data.keys(): self.statistics = data["statistics"] if "files_ids" in data.keys(): self.files_ids = data['files_ids'] # Remove old for fid in self.files_ids: if fid not in data["files_ids"]: AnalysisFile.delete(self.id, fid) # Add new samples for fid in data["files_ids"]: if fid not in self.files_ids: AnalysisFile.new(self.id, sid) if "settings" in data.keys(): # When settings change, need to regenerate working table self.settings = data["settings"] self.status = "empty" self.computing_progress = None need_to_clean_db = True if "samples_ids" in data.keys(): # Remove old for sid in self.samples_ids: if sid not in data["samples_ids"]: AnalysisSample.delete(self.id, sid) # Add new samples for sid in data["samples_ids"]: if sid not in self.samples_ids: AnalysisSample.new(self.id, sid) # When settings change, need to regenerate working table self.status = "empty" self.computing_progress = None need_to_clean_db = True # If settings empty, init it with informations from samples if len(self.settings["annotations_db"]) == 0: settings = self.settings from core.model.sample import Sample dbuids = [] for sid in data["samples_ids"]: sample = Sample.from_id(sid) if sample and sample.default_dbuid: for dbuid in sample.default_dbuid: if dbuid not in dbuids: dbuids.append(dbuid) self.status = "empty" settings["annotations_db"] = dbuids self.settings = settings if need_to_clean_db: execute("DROP TABLE IF EXISTS wt_{0} CASCADE; DROP TABLE IF EXISTS wt_{0}_var CASCADE;".format(self.id)) # check to reload dynamics properties if self.loading_depth > 0: self.project = Project.from_id(self.project_id, self.loading_depth-1) self.samples = AnalysisSample.get_samples(self.id, self.loading_depth-1) self.filters = self.get_filters(self.loading_depth-1) self.files = AnalysisFile.get_files(self.id, self.loading_depth-1) self.save() # FIXME : why sqlalchemy don't care about json settings the first time ? if settings: self.settings = settings self.save() # END FIXME except Exception as ex: raise RegovarException('Invalid input data to load.', "", ex) return self