예제 #1
0
def main(args):

    log = lutils.setupLog('worksetlogger', args.logfile)
    session = get_session()
    if args.ws:
        step = session.query(Process).filter_by(luid=args.ws).one()
        ws = lclasses.Workset_SQL(session, log, step)
        with open(args.conf) as conf_file:
            conf = yaml.load(conf_file, Loader=yaml.SafeLoader)
        couch = lutils.setupServer(conf)
        db = couch["worksets"]
        doc = {}
        for row in db.view('worksets/lims_id')[ws.obj['id']]:
            doc = db.get(row.id)

        final_doc = lutils.merge(ws.obj, doc)

        db.save(final_doc)

    elif args.recent:
        recent_processes = get_last_modified_processes(
            session, [8, 46, 117, 204, 1454, 1908], args.interval)
        processes_to_update = set()
        for p in recent_processes:
            if p.typeid in [
                    117, 204
            ] and p.daterun:  #will only catch finished setup workset plate
                processes_to_update.add(p)
            else:
                processes_to_update.update(
                    get_processes_in_history(session, p.processid, [117, 204]))

        log.info("the following processes will be updated : {0}".format(
            processes_to_update))
        lpar.masterProcessSQL(args, processes_to_update, log)
예제 #2
0
파일: parallel.py 프로젝트: espre05/LIMS2DB
def processWSUL(options, queue, logqueue):
    mycouch = sdb.Couch()
    mycouch.set_db("worksets")
    mycouch.connect()
    view = mycouch.db.view('worksets/name')
    mylims = Lims(BASEURI, USERNAME, PASSWORD)
    work=True
    procName = mp.current_process().name
    proclog = logging.getLogger(procName)
    proclog.setLevel(level=logging.INFO)
    mfh = QueueHandler(logqueue)
    mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    proclog.addHandler(mfh)

    while work:
        #grabs project from queue
        try:
            ws_id = queue.get(block=True, timeout=3)
            proclog.info("Starting work on {}".format(ws_id))
        except Queue.Empty:
            work = False
            proclog.info("exiting gracefully")
            break
        else:
            wsp = Process(mylims, id=ws_id)
            if not wsp.date_run:
                continue
            lc = lclasses.LimsCrawler(mylims, wsp)
            lc.crawl()
            try:
                ws = lclasses.Workset(mylims,lc, proclog)
            except NameError:
                continue

            #If there is already a workset with that name in the DB
            if len(view[ws.obj['name']].rows) == 1:
                remote_doc=view[ws.obj['name']].rows[0].value
                #remove id and rev for comparison
                doc_id = remote_doc.pop('_id')
                doc_rev = remote_doc.pop('_rev')
                if remote_doc != ws.obj:
                    #if they are different, though they have the same name, upload the new one
                    ws.obj=lutils.merge(ws.obj, remote_doc)
                    ws.obj['_id'] = doc_id
                    ws.obj['_rev'] = doc_rev
                    mycouch.db[doc_id] = ws.obj 
                    proclog.info("updating {0}".format(ws.obj['name']))
                else:
                    proclog.info("not modifying {0}".format(ws.obj['name']))
            elif len(view[ws.obj['name']].rows) == 0:
                #it is a new doc, upload it
                mycouch.save(ws.obj) 
                proclog.info("saving {0}".format(ws.obj['name']))
            else:
                proclog.warn("more than one row with name {0} found".format(ws.obj['name']))
            #signals to queue job is done
            queue.task_done()
예제 #3
0
def processWSUL(options, queue, logqueue):
    mycouch = sdb.Couch()
    mycouch.set_db("worksets")
    mycouch.connect()
    view = mycouch.db.view('worksets/name')
    mylims = Lims(BASEURI, USERNAME, PASSWORD)
    work=True
    procName = mp.current_process().name
    proclog = logging.getLogger(procName)
    proclog.setLevel(level=logging.INFO)
    mfh = QueueHandler(logqueue)
    mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    proclog.addHandler(mfh)

    while work:
        #grabs project from queue
        try:
            ws_id = queue.get(block=True, timeout=3)
            proclog.info("Starting work on {}".format(ws_id))
        except Queue.Empty:
            work = False
            proclog.info("exiting gracefully")
            break
        else:
            wsp = Process(mylims, id=ws_id)
            if not wsp.date_run:
                continue
            lc = lclasses.LimsCrawler(mylims, wsp)
            lc.crawl()
            try:
                ws = lclasses.Workset(mylims,lc, proclog)
            except NameError:
                continue

            #If there is already a workset with that name in the DB
            if len(view[ws.obj['name']].rows) == 1:
                remote_doc=view[ws.obj['name']].rows[0].value
                #remove id and rev for comparison
                doc_id = remote_doc.pop('_id')
                doc_rev = remote_doc.pop('_rev')
                if remote_doc != ws.obj:
                    #if they are different, though they have the same name, upload the new one
                    ws.obj=lutils.merge(ws.obj, remote_doc)
                    ws.obj['_id'] = doc_id
                    ws.obj['_rev'] = doc_rev
                    mycouch.db[doc_id] = ws.obj
                    proclog.info("updating {0}".format(ws.obj['name']))
                else:
                    proclog.info("not modifying {0}".format(ws.obj['name']))
            elif len(view[ws.obj['name']].rows) == 0:
                #it is a new doc, upload it
                mycouch.save(ws.obj)
                proclog.info("saving {0}".format(ws.obj['name']))
            else:
                proclog.warn("more than one row with name {0} found".format(ws.obj['name']))
            #signals to queue job is done
            queue.task_done()
예제 #4
0
def main(args):
    log = lutils.setupLog('worksetlogger', args.logfile)
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    #this will decide how far back we are looking
    if args.ws:
        wsp = Process(lims, id=args.ws)
        lc = lclasses.LimsCrawler(lims, wsp)
        lc.crawl()
        try:
            ws = lclasses.Workset(lims,lc, log)
        except NameError:
            log.error("no name found for this workset")
        #pprint(ws.obj)
        mycouch = sdb.Couch()
        mycouch.set_db("worksets")
        mycouch.connect()
        view = mycouch.db.view('worksets/name')
        #If there is already a workset with that name in the DB
        if len(view[ws.obj['name']].rows) == 1:
            remote_doc = view[ws.obj['name']].rows[0].value
            #remove id and rev for comparison
            doc_id = remote_doc.pop('_id')
            doc_rev = remote_doc.pop('_rev')
            if remote_doc != ws.obj:
                #if they are different, though they have the same name, upload the new one
                ws.obj=lutils.merge(ws.obj, remote_doc)
                ws.obj['_id'] = doc_id
                ws.obj['_rev'] = doc_rev
                mycouch.db[doc_id] = ws.obj 
                log.info("updating {0}".format(ws.obj['name']))
            else:
                log.info("not modifying {0}".format(ws.obj['name']))

        elif len(view[ws.obj['name']].rows) == 0:
            #it is a new doc, upload it
            mycouch.save(ws.obj) 
            log.info("saving {0}".format(ws.obj['name']))
        else:
            log.warn("more than one row with name {0} found".format(ws.obj['name']))
    else:
        starting_date= datetime.today() - timedelta(args.days)
        str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ")
        wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date)
        log.info("the following processes will be updated : {0}".format(wsts))
        lpar.masterProcess(args, wsts, lims, log)
예제 #5
0
def processWSULSQL(args, queue, logqueue):
    work=True
    session=get_session()
    with open(args.conf) as conf_file:
        conf=yaml.load(conf_file, Loader=yaml.SafeLoader)
    couch=lutils.setupServer(conf)
    db=couch["worksets"]
    procName = mp.current_process().name
    proclog = logging.getLogger(procName)
    proclog.setLevel(level=logging.INFO)
    mfh = QueueHandler(logqueue)
    mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    proclog.addHandler(mfh)

    while work:
        #grabs project from queue
        try:
            ws_id = queue.get(block=True, timeout=3)
            proclog.info("Starting work on {}".format(ws_id))
        except Queue.Empty:
            work = False
            proclog.info("exiting gracefully")
            break
        else:
            step=session.query(gt.Process).filter(gt.Process.processid == int(ws_id)).one()
            ws=lclasses.Workset_SQL(session, proclog, step)
            doc={}
            for row in db.view('worksets/lims_id')[ws.obj['id']]:
                doc=db.get(row.id)
            if doc:
                final_doc=lutils.merge(ws.obj, doc)
            else:
                final_doc=ws.obj
            #clean possible name duplicates
            for row in db.view('worksets/name')[ws.obj['name']]:
                doc=db.get(row.id)
                if doc['id'] != ws.obj['id']:
                    proclog.warning("Duplicate name {} for worksets {} and {}".format(doc['name'], doc['id'], final_doc['id']))
                    db.delete(doc)
            db.save(final_doc)
            proclog.info("updating {0}".format(ws.obj['name']))
            queue.task_done()
예제 #6
0
def main(args):
    log = lutils.setupLog('worksetlogger', args.logfile)
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    #this will decide how far back we are looking
    if args.ws:
        wsp = Process(lims, id=args.ws)
        lc = lclasses.LimsCrawler(lims, wsp)
        lc.crawl()
        try:
            ws = lclasses.Workset(lims,lc, log)
        except NameError:
            log.error("no name found for this workset")
        mycouch = sdb.Couch()
        mycouch.set_db("worksets")
        mycouch.connect()
        view = mycouch.db.view('worksets/name')
        #If there is already a workset with that name in the DB
        if len(view[ws.obj['name']].rows) == 1:
            remote_doc = view[ws.obj['name']].rows[0].value
            #remove id and rev for comparison
            doc_id = remote_doc.pop('_id')
            doc_rev = remote_doc.pop('_rev')
            if remote_doc != ws.obj:
                #if they are different, though they have the same name, upload the new one
                ws.obj=lutils.merge(ws.obj, remote_doc)
                ws.obj['_id'] = doc_id
                ws.obj['_rev'] = doc_rev
                mycouch.db[doc_id] = ws.obj 
                log.info("updating {0}".format(ws.obj['name']))
            else:
                log.info("not modifying {0}".format(ws.obj['name']))

        elif len(view[ws.obj['name']].rows) == 0:
            #it is a new doc, upload it
            mycouch.save(ws.obj) 
            log.info("saving {0}".format(ws.obj['name']))
        else:
            log.warn("more than one row with name {0} found".format(ws.obj['name']))
    else:
        try:
             from genologics_sql.queries import get_last_modified_processes, get_processes_in_history
             from genologics_sql.utils import get_session
             session=get_session()
             #Aggregate QC, Setup workset plate, or sequencing. 
             recent_processes=get_last_modified_processes(session,[8,204,38,714,46])
             #Setup workset plate is 204
             processes_to_update=[]
             for p in recent_processes:
                 if p.typeid==204:
                     processes_to_update.append(p)
                 else:
                     processes_to_update.extend(get_processes_in_history(session, p.processid, [204]))

             wsts=[]
             for p in set(processes_to_update):
                wsts.append(Process(lims, id=p.luid))
             log.info("list compiled via the DB")
                

        except ImportError:
            starting_date= datetime.today() - timedelta(args.days)
            str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ")
            wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date)
            log.info("list compiled via the API")

	
        log.info("the following processes will be updated : {0}".format(wsts))
        lpar.masterProcess(args, wsts, lims, log)