def update_project(self, database): """Fetch project info and update project in the database.""" opended_after_140630 = comp_dates('2014-06-30', self.ordered_opened) try: LOG.info('Handeling {proj}'.format(proj = self.name)) project = database.ProjectDB(lims, self.id, self.samp_db) key = find_proj_from_view(self.proj_db, self.name) project.obj['_id'] = find_or_make_key(key) if not opended_after_140630: project.obj = load_status_from_google_docs.get(self.name, project.obj) if self.upload_data: info = save_couchdb_obj(self.proj_db, project.obj) else: info = self.print_couchdb_obj_to_file(project.obj) return "project {name} is handled and {info}: _id = {id}".format( name=self.name, info=info, id=project.obj['_id']) except: return ('Issues geting info for {name}. The "Application" udf might' ' be missing'.format(name = self.name))
def update_project(self, database): """Fetch project info and update project in the database.""" opended_after_140630 = comp_dates('2014-06-30', self.ordered_opened) try: LOG.info('Handeling {proj}'.format(proj=self.name)) project = database.ProjectDB(lims, self.id, self.samp_db) key = find_proj_from_view(self.proj_db, self.name) project.obj['_id'] = find_or_make_key(key) if not opended_after_140630: project.obj = load_status_from_google_docs.get( self.name, project.obj) if self.upload_data: info = save_couchdb_obj(self.proj_db, project.obj) else: info = self.print_couchdb_obj_to_file(project.obj) return "project {name} is handled and {info}: _id = {id}".format( name=self.name, info=info, id=project.obj['_id']) except: return ( 'Issues geting info for {name}. The "Application" udf might' ' be missing'.format(name=self.name))
def __init__(self, lims_instance, project_id, samp_db): self.lims = lims_instance self.samp_db = samp_db self.lims_project = Project(self.lims,id = project_id) self.preps = ProcessInfo(self.lims , self.lims.get_processes(projectname = self.lims_project.name, type = AGRLIBVAL.values())) runs = self.lims.get_processes(projectname = self.lims_project.name, type = SEQUENCING.values()) self.runs = ProcessInfo(self.lims, runs) project_summary = self.lims.get_processes(projectname = self.lims_project.name, type = SUMMARY.values()) self.project = {'source' : 'lims', 'application' : None, 'samples':{}, 'open_date' : self.lims_project.open_date, 'close_date' : self.lims_project.close_date, 'entity_type' : 'project_summary', 'contact' : self.lims_project.researcher.email, 'project_name' : self.lims_project.name, 'project_id' : self.lims_project.id} self.project = get_udfs('details', self.project, self.lims_project.udf.items(), PROJ_UDF_EXCEPTIONS) if dict(self.lims_project.researcher.lab.udf.items()).has_key('Affiliation'): self.project['affiliation'] = dict(self.lims_project.researcher.lab.udf.items())['Affiliation'] if len(project_summary) == 1: self.project = get_udfs('project_summary', self.project, project_summary[0].udf.items()) elif len(project_summary) > 1: print 'Warning. project summary process run more than once' #################Temporary solution untill 20158 implemented in lims >>>>>>>>>>>>>>>>>>>>>>> ## can be reooved when all project opened before 2014-01-27 have been closed for more than 60 days ## then we also need to block old projects so that they are not overwriten in case of someone manualy ## updating it with the -p flagg opened = self.lims_project.open_date googledocs_status = {} if opened: try: googledocs_status = load_status_from_google_docs.get(self.lims_project.name) except: print 'issues finding status from 20158' pass ## Finish Date = last seq date if proj closed. Will be removed and feched from lims. seq_finished = None if self.lims_project.close_date and len(runs) > 0: d = '2000-10-10' for run in runs: try: new_date = dict(run.udf.items())['Finish Date'].isoformat() if comp_dates(d,new_date): d = new_date seq_finished = d except: pass self.project['sequencing_finished'] = seq_finished #Temporary solution untill 20158 implemented in lims <<<<<<<<<<<<<<<<<<<<<<< ## Getting sample info samples = self.lims.get_samples(projectlimsid = self.lims_project.id) self.project['no_of_samples'] = len(samples) if len(samples) > 0: processes_per_artifact = self.build_processes_per_artifact(self.lims, self.lims_project.name) self.project['first_initial_qc'] = '3000-10-10' for samp in samples: sampDB = SampleDB(self.lims, samp.id, self.project['project_name'], self.samp_db, self.project['application'], self.preps.info, self.runs.info, googledocs_status,#googledocs_status Temporary solution untill 20158 implemented in lims!! processes_per_artifact = processes_per_artifact) self.project['samples'][sampDB.name] = sampDB.obj ##### initial qc fixa try: initial_qc_start_date = self.project['samples'][sampDB.name]['initial_qc']['start_date'] if comp_dates(initial_qc_start_date,self.project['first_initial_qc']): self.project['first_initial_qc'] = initial_qc_start_date except: pass self.project = delete_Nones(self.project)