Exemple #1
0
def get_sequencing_steps(session, interval="24 hours"):
    #38, 46 and 714 are hiseq, miseq and hiseqX sequencing
    return get_last_modified_processes(session, [38,714,46], interval)
Exemple #2
0
def get_sequencing_steps(session, interval="24 hours"):
    #38, 46, 714 and 1454 are hiseq, miseq, hiseqX and novaseq sequencing
    return get_last_modified_processes(session, [38,714, 1454, 46], interval)
Exemple #3
0
def main(args):
    log = lutils.setupLog('worksetlogger', args.logfile)
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    #this will decide how far back we are looking
    if args.ws:
        wsp = Process(lims, id=args.ws)
        lc = lclasses.LimsCrawler(lims, wsp)
        lc.crawl()
        try:
            ws = lclasses.Workset(lims,lc, log)
        except NameError:
            log.error("no name found for this workset")
        mycouch = sdb.Couch()
        mycouch.set_db("worksets")
        mycouch.connect()
        view = mycouch.db.view('worksets/name')
        #If there is already a workset with that name in the DB
        if len(view[ws.obj['name']].rows) == 1:
            remote_doc = view[ws.obj['name']].rows[0].value
            #remove id and rev for comparison
            doc_id = remote_doc.pop('_id')
            doc_rev = remote_doc.pop('_rev')
            if remote_doc != ws.obj:
                #if they are different, though they have the same name, upload the new one
                ws.obj=lutils.merge(ws.obj, remote_doc)
                ws.obj['_id'] = doc_id
                ws.obj['_rev'] = doc_rev
                mycouch.db[doc_id] = ws.obj 
                log.info("updating {0}".format(ws.obj['name']))
            else:
                log.info("not modifying {0}".format(ws.obj['name']))

        elif len(view[ws.obj['name']].rows) == 0:
            #it is a new doc, upload it
            mycouch.save(ws.obj) 
            log.info("saving {0}".format(ws.obj['name']))
        else:
            log.warn("more than one row with name {0} found".format(ws.obj['name']))
    else:
        try:
             from genologics_sql.queries import get_last_modified_processes, get_processes_in_history
             from genologics_sql.utils import get_session
             session=get_session()
             #Aggregate QC, Setup workset plate, or sequencing. 
             recent_processes=get_last_modified_processes(session,[8,204,38,714,46])
             #Setup workset plate is 204
             processes_to_update=[]
             for p in recent_processes:
                 if p.typeid==204:
                     processes_to_update.append(p)
                 else:
                     processes_to_update.extend(get_processes_in_history(session, p.processid, [204]))

             wsts=[]
             for p in set(processes_to_update):
                wsts.append(Process(lims, id=p.luid))
             log.info("list compiled via the DB")
                

        except ImportError:
            starting_date= datetime.today() - timedelta(args.days)
            str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ")
            wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date)
            log.info("list compiled via the API")

	
        log.info("the following processes will be updated : {0}".format(wsts))
        lpar.masterProcess(args, wsts, lims, log)