def post(self, project): user = self.get_current_user() a_type = self.get_argument('type', '') title = self.get_argument('title', '') url = self.get_argument('url', '') desc = self.get_argument('desc', '') if not a_type or not title: self.set_status(400) self.finish( '<html><body>Link title and type is required</body></html>') else: p = Project(lims, id=project) p.get(force=True) links = json.loads(p.udf['Links']) if 'Links' in p.udf else {} links[str(datetime.datetime.now())] = { 'user': user.name, 'email': user.email, 'type': a_type, 'title': title, 'url': url, 'desc': desc } p.udf['Links'] = json.dumps(links) p.put() self.set_status(200) #ajax cries if it does not get anything back self.set_header("Content-type", "application/json") self.finish(json.dumps(links))
def main(args): log = lutils.setupLog('bioinfologger', args.logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) with open(args.conf) as conf_file: conf = yaml.safe_load(conf_file) bioinfodb = lutils.setupServer(conf)['bioinfo_analysis'] open_projects = bioinfodb.view('latest_data/sample_id_open') for row in open_projects.rows: project_id = row.key[0] sample_id = row.key[3] close_date = None try: close_date = Project(lims=lims, id=project_id).close_date except HTTPError as e: if '404: Project not found' in e.message: log.error('Project '+project_id+' not found in LIMS') continue if close_date is not None: try: doc = bioinfodb.get(row.id) except Exception as e: log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while accessing doc from statusdb') doc['project_closed'] = True try: bioinfodb.save(doc) log.info('Updated Project '+project_id+ ' Sample '+sample_id) except Exception as e: log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while saving to statusdb')
def main(args): lims_db = get_session() lims = Lims(BASEURI,USERNAME,PASSWORD) with open(args.conf) as cf: db_conf = yaml.load(cf) couch = setupServer(db_conf) db = couch["expected_yields"] postgres_string="{} hours".format(args.hours) project_ids=get_last_modified_projectids(lims_db, postgres_string) min_yields = {} for row in db.view("yields/min_yield"): db_key = ' '.join(x if x else '' for x in row.key).strip() min_yields[db_key] = row.value for project in [Project(lims, id=x) for x in project_ids]: samples_count = 0 samples = lims.get_samples(projectname=project.name) for sample in samples: if not("Status (manual)" in sample.udf and sample.udf["Status (manual)"] == "Aborted"): samples_count +=1 try: lanes_ordered = project.udf['Sequence units ordered (lanes)'] key = project.udf['Sequencing platform'] except: continue if key in min_yields: value = min_yields[key] try: project.udf['Reads Min'] = float(value) * lanes_ordered / samples_count project.put() except ZeroDivisionError: pass
def make_project_running_note(application, project, note, category, user, email): timestamp = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S') newNote = {'user': user, 'email': email, 'note': note, 'category' : category, 'timestamp': timestamp} p = Project(lims, id=project) p.get(force=True) running_notes = json.loads(p.udf['Running Notes']) if 'Running Notes' in p.udf else {} running_notes.update({timestamp: newNote}) # Saving running note in LIMS p.udf['Running Notes'] = json.dumps(running_notes) p.put() p.get(force=True) #Retry once more if p.udf['Running Notes'] != json.dumps(running_notes): p.udf['Running Notes'] = json.dumps(running_notes) p.put() p.get(force=True) #In the rare case saving to LIMS does not work assert (p.udf['Running Notes'] == json.dumps(running_notes)), "The Running note wasn't saved in LIMS!" #saving running notes directly in genstat, because reasons. v=application.projects_db.view("project/project_id") for row in v[project]: doc_id=row.value doc=application.projects_db.get(doc_id) doc['details']['running_notes']=json.dumps(running_notes) application.projects_db.save(doc) #### Check and send mail to tagged users pattern = re.compile("(@)([a-zA-Z0-9.-]+)") userTags = pattern.findall(note) if userTags: RunningNotesDataHandler.notify_tagged_user(application, userTags, project, note, category, user, timestamp) #### return newNote
def post(self, project): note = self.get_argument('note', '') category = self.get_argument('category', '') user = self.get_secure_cookie('user') email = self.get_secure_cookie('email') timestamp = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S') if not note: self.set_status(400) self.finish('<html><body>No project id or note parameters found</body></html>') else: newNote = {'user': user, 'email': email, 'note': note, 'category' : category, 'timestamp': timestamp} p = Project(lims, id=project) p.get(force=True) running_notes = json.loads(p.udf['Running Notes']) if 'Running Notes' in p.udf else {} running_notes[timestamp] = newNote p.udf['Running Notes'] = json.dumps(running_notes) p.put() #saving running notes directly in genstat, because reasons. v=self.application.projects_db.view("project/project_id") for row in v[project]: doc_id=row.value doc=self.application.projects_db.get(doc_id) doc['details']['running_notes']=json.dumps(running_notes) self.application.projects_db.save(doc) self.set_status(201) self.write(json.dumps(newNote))
def main(lims, args): p = Process(lims, id=args.pid) log = [] datamap = {} wsname = None username = "******".format(p.technician.first_name, p.technician.last_name) user_email = p.technician.email for art in p.all_inputs(): if len(art.samples) != 1: log.append( "Warning : artifact {0} has more than one sample".format( art.id)) for sample in art.samples: #take care of lamda DNA if sample.project: if sample.project.id not in datamap: datamap[sample.project.id] = [sample.name] else: datamap[sample.project.id].append(sample.name) for art in p.all_outputs(): try: wsname = art.location[0].name break except: pass now = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") for pid in datamap: pj = Project(lims, id=pid) if len(datamap[pid]) > 1: rnt = "{0} samples planned for {1}".format(len(datamap[pid]), wsname) else: rnt = "{0} sample planned for {1}".format(len(datamap[pid]), wsname) running_note = { "note": rnt, "user": username, "email": user_email, "category": "Workset" } write_note_to_couch(pid, now, running_note, lims.get_uri()) log.append( "Updated project {0} : {1}, {2} samples in this workset".format( pid, pj.name, len(datamap[pid]))) with open("EPP_Notes.log", "w") as flog: flog.write("\n".join(log)) for out in p.all_outputs(): #attach the log file if out.name == "RNotes Log": attach_file(os.path.join(os.getcwd(), "EPP_Notes.log"), out) sys.stderr.write("Updated {0} projects successfully".format( len(list(datamap.keys()))))
def get(self, project): self.set_header("Content-type", "application/json") p = Project(lims, id=project) p.get(force=True) links = json.loads(p.udf['Links']) if 'Links' in p.udf else {} #Sort by descending date, then hopefully have deviations on top sorted_links = OrderedDict() for k, v in sorted(links.items(), key=lambda t: t[0], reverse=True): sorted_links[k] = v sorted_links = OrderedDict(sorted(sorted_links.items(), key=lambda k: k[1]['type'])) self.write(sorted_links)
def get(self, project_id): project=Project(lims, id=project_id) processes=lims.get_processes(projectname=project.name, type='Project Summary 1.3') samples=lims.get_samples(projectname=project.name) self.getProjectSummaryFields(lims) t = self.application.loader.load("project_summary.html") self.write(t.generate(gs_globals=self.application.gs_globals, project_id=project_id, processes=processes, samples=samples, step_fields=self.step_fields, sample_fields=self.sample_fields))
def get(self, project): self.set_header("Content-type", "application/json") p = Project(lims, id=project) try: p.get(force=True) except: raise tornado.web.HTTPError(404, reason='Project not found: {}'.format(project)) # self.set_status(404) # self.write({}) else: # Sorted running notes, by date running_notes = json.loads(p.udf['Running Notes']) if 'Running Notes' in p.udf else {} sorted_running_notes = OrderedDict() for k, v in sorted(running_notes.items(), key=lambda t: t[0], reverse=True): sorted_running_notes[k] = v self.write(sorted_running_notes)
def get(self): limsg = lims.Lims(BASEURI, USERNAME, PASSWORD) queues = {} queues['TruSeqRNAprep'] = Queue(limsg, id='311') queues['TruSeqSmallRNA'] = Queue(limsg, id='410') queues['TruSeqDNAPCR_free'] = Queue(limsg, id='407') queues['ThruPlex'] = Queue(limsg, id='451') queues['Genotyping'] = Queue(limsg, id='901') queues['RadSeq'] = Queue(limsg, id='1201') queues['SMARTerPicoRNA'] = Queue(limsg, id='1551') queues['ChromiumGenomev2'] = Queue(limsg, id='1801') methods = queues.keys() pools = {} for method in methods: pools[method] = {} for artifact in queues[method].artifacts: name = artifact.name project = artifact.name.split('_')[0] if project in pools[method]: pools[method][project]['samples'].append(name) else: total_num_samples = limsg.get_sample_number( projectlimsid=project) proj = Project(limsg, id=project) try: date_queued = proj.udf['Queued'].strftime("%Y-%m-%d") except KeyError: # Queued should really be on a project at this point, but mistakes happen date_queued = None projName = proj.name pools[method][project] = { 'total_num_samples': total_num_samples, 'queued_date': date_queued, 'pname': projName, 'samples': [name] } self.set_header("Content-type", "application/json") self.write(json.dumps(pools))
def test_create_entity(self): with patch('genologics.lims.requests.post', return_value=Mock(content=self.sample_creation, status_code=201)) as patch_post: l = Sample.create( self.lims, project=Project(self.lims, uri='project'), container=Container(self.lims, uri='container'), position='1:1', name='s1', ) data = '''<?xml version=\'1.0\' encoding=\'utf-8\'?> <smp:samplecreation xmlns:smp="http://genologics.com/ri/sample"> <name>s1</name> <project uri="project" limsid="project" /> <location> <container uri="container" /> <value>1:1</value> </location> </smp:samplecreation>''' assert elements_equal(ElementTree.fromstring(patch_post.call_args_list[0][1]['data']), ElementTree.fromstring(data))
def post(self, project_id): try: data = json.loads(self.request.body) text = data.get('text', '') p = Project(lims, id=project_id) p.udf['Internal Costs'] = text p.put() view = self.application.projects_db.view("project/project_id") for row in view[project_id]: doc=self.application.projects_db.get(row.id) doc['details']['internal_costs']=text self.application.projects_db.save(doc) except Exception as e: self.set_status(400) self.finish('<html><body><p>could not update Entity {} :</p><pre>{}</pre></body></html>'.format( project_id, e)) else: self.set_status(200) self.set_header("Content-type", "application/json") self.write(self.request.body)
def update_project(self, lims_id: str, name: str = None) -> None: """Update information about a project.""" lims_project = Project(self, id=lims_id) if name: lims_project.name = name lims_project.put()
def test_project_example(self): with patch("genologics.lims.Lims.get", side_effect=test_utils.patched_get): pj = Project(self.lims, id='BLA1') self.assertEquals(pj.name, 'Test')