def processWSUL(options, queue, logqueue): mycouch = sdb.Couch() mycouch.set_db("worksets") mycouch.connect() view = mycouch.db.view('worksets/name') mylims = Lims(BASEURI, USERNAME, PASSWORD) work=True procName = mp.current_process().name proclog = logging.getLogger(procName) proclog.setLevel(level=logging.INFO) mfh = QueueHandler(logqueue) mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') mfh.setFormatter(mft) proclog.addHandler(mfh) while work: #grabs project from queue try: ws_id = queue.get(block=True, timeout=3) proclog.info("Starting work on {}".format(ws_id)) except Queue.Empty: work = False proclog.info("exiting gracefully") break else: wsp = Process(mylims, id=ws_id) if not wsp.date_run: continue lc = lclasses.LimsCrawler(mylims, wsp) lc.crawl() try: ws = lclasses.Workset(mylims,lc, proclog) except NameError: continue #If there is already a workset with that name in the DB if len(view[ws.obj['name']].rows) == 1: remote_doc=view[ws.obj['name']].rows[0].value #remove id and rev for comparison doc_id = remote_doc.pop('_id') doc_rev = remote_doc.pop('_rev') if remote_doc != ws.obj: #if they are different, though they have the same name, upload the new one ws.obj=lutils.merge(ws.obj, remote_doc) ws.obj['_id'] = doc_id ws.obj['_rev'] = doc_rev mycouch.db[doc_id] = ws.obj proclog.info("updating {0}".format(ws.obj['name'])) else: proclog.info("not modifying {0}".format(ws.obj['name'])) elif len(view[ws.obj['name']].rows) == 0: #it is a new doc, upload it mycouch.save(ws.obj) proclog.info("saving {0}".format(ws.obj['name'])) else: proclog.warn("more than one row with name {0} found".format(ws.obj['name'])) #signals to queue job is done queue.task_done()
def main(args): log = setupLog(args) lims = Lims(BASEURI, USERNAME, PASSWORD) #this will decide how far back we are looking if args.ws: wsp = Process(lims, id=args.ws) lc = LimsCrawler(lims, wsp) try: ws = Workset(lims, lc, log) except NameError: log.error("no name found for this workset") #pprint(ws.obj) mycouch = sdb.Couch() mycouch.set_db("worksets") mycouch.connect() view = mycouch.db.view('worksets/name') #If there is already a workset with that name in the DB if len(view[ws.obj['name']].rows) == 1: remote_doc = view[ws.obj['name']].rows[0].value #remove id and rev for comparison doc_id = remote_doc.pop('_id') doc_rev = remote_doc.pop('_rev') if remote_doc != ws.obj: #if they are different, though they have the same name, upload the new one ws.obj['_id'] = doc_id ws.obj['_rev'] = doc_rev mycouch.db[doc_id] = ws.obj log.info("updating {0}".format(ws.obj['name'])) elif len(view[ws.obj['name']].rows) == 0: #it is a new doc, upload it mycouch.save(ws.obj) log.info("saving {0}".format(ws.obj['name'])) else: log.warn("more than one row with name {0} found".format( ws.obj['name'])) else: yesterday = datetime.today() - timedelta(args.days) stryes = yesterday.strftime("%Y-%m-%dT%H:%M:%SZ") wsts = lims.get_processes(type=pc.WORKSET.values(), last_modified=stryes) masterProcess(args, wsts, lims, log)
def main(args): log = lutils.setupLog('worksetlogger', args.logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) #this will decide how far back we are looking if args.ws: wsp = Process(lims, id=args.ws) lc = lclasses.LimsCrawler(lims, wsp) lc.crawl() try: ws = lclasses.Workset(lims,lc, log) except NameError: log.error("no name found for this workset") mycouch = sdb.Couch() mycouch.set_db("worksets") mycouch.connect() view = mycouch.db.view('worksets/name') #If there is already a workset with that name in the DB if len(view[ws.obj['name']].rows) == 1: remote_doc = view[ws.obj['name']].rows[0].value #remove id and rev for comparison doc_id = remote_doc.pop('_id') doc_rev = remote_doc.pop('_rev') if remote_doc != ws.obj: #if they are different, though they have the same name, upload the new one ws.obj=lutils.merge(ws.obj, remote_doc) ws.obj['_id'] = doc_id ws.obj['_rev'] = doc_rev mycouch.db[doc_id] = ws.obj log.info("updating {0}".format(ws.obj['name'])) else: log.info("not modifying {0}".format(ws.obj['name'])) elif len(view[ws.obj['name']].rows) == 0: #it is a new doc, upload it mycouch.save(ws.obj) log.info("saving {0}".format(ws.obj['name'])) else: log.warn("more than one row with name {0} found".format(ws.obj['name'])) else: try: from genologics_sql.queries import get_last_modified_processes, get_processes_in_history from genologics_sql.utils import get_session session=get_session() #Aggregate QC, Setup workset plate, or sequencing. recent_processes=get_last_modified_processes(session,[8,204,38,714,46]) #Setup workset plate is 204 processes_to_update=[] for p in recent_processes: if p.typeid==204: processes_to_update.append(p) else: processes_to_update.extend(get_processes_in_history(session, p.processid, [204])) wsts=[] for p in set(processes_to_update): wsts.append(Process(lims, id=p.luid)) log.info("list compiled via the DB") except ImportError: starting_date= datetime.today() - timedelta(args.days) str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ") wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date) log.info("list compiled via the API") log.info("the following processes will be updated : {0}".format(wsts)) lpar.masterProcess(args, wsts, lims, log)