Esempio n. 1
0
def main(args):

    with open(args.conf, 'r') as conf_file:
        conf = yaml.safe_load(conf_file)
    couch = lutils.setupServer(conf)

    with open(args.oconf, 'r') as ocf:
        oconf = yaml.load(ocf)['order_portal']

    mainlog = get_logger('psullogger')

    lims_db = get_session()
    host = get_configuration()['url']

    if args.name:
        pj_id = lims_db.query(
            DBProject.luid).filter(DBProject.name == args.name).scalar()
    else:
        pj_id = args.pid

    P = ProjectSQL(lims_db, mainlog, pj_id, host, couch, oconf)

    if args.test:
        pp = pprint.pprint(P.obj)
    else:
        P.save()
Esempio n. 2
0
def main(args):
    log = lutils.setupLog('bioinfologger', args.logfile)
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    with open(args.conf) as conf_file:
        conf = yaml.safe_load(conf_file)
    bioinfodb = lutils.setupServer(conf)['bioinfo_analysis']
    open_projects = bioinfodb.view('latest_data/sample_id_open')

    for row in open_projects.rows:
        project_id = row.key[0]
        sample_id = row.key[3]
        close_date = None
        try:
            close_date = Project(lims=lims, id=project_id).close_date
        except HTTPError as e:
            if '404: Project not found' in e.message:
                log.error('Project '+project_id+' not found in LIMS')
                continue
        if close_date is not None:
            try:
                doc = bioinfodb.get(row.id)
            except Exception as e:
                log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while accessing doc from statusdb')
            doc['project_closed'] = True
            try:
                bioinfodb.save(doc)
                log.info('Updated Project '+project_id+ ' Sample '+sample_id)
            except Exception as e:
                log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while saving to statusdb')
def main(args):
    lims_db = get_session()
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    with open(args.conf) as cf:
        db_conf = yaml.load(cf)
        couch = setupServer(db_conf)
    db = couch["expected_yields"]
    postgres_string = "{} hours".format(args.hours)
    project_ids = get_last_modified_projectids(lims_db, postgres_string)

    for project in [Project(lims, id=x) for x in project_ids]:
        samples_count = 0
        samples = lims.get_samples(projectname=project.name)
        for sample in samples:
            if not ("Status (manual)" in sample.udf
                    and sample.udf["Status (manual)"] == "Aborted"):
                samples_count += 1
        try:
            lanes_ordered = project.udf['Sequence units ordered (lanes)']
            key = parse_sequencing_platform(project.udf['Sequencing platform'])
        except:
            continue
        for row in db.view("yields/min_yield"):
            db_key = [x.lower() if x else None for x in row.key]
            if db_key == key:
                try:
                    project.udf['Reads Min'] = float(
                        row.value) * lanes_ordered / samples_count
                    project.put()
                except ZeroDivisionError:
                    pass
def main(args):

    with open(args.conf) as conf_file:
        conf = yaml.safe_load(conf_file)
    couch = lutils.setupServer(conf)

    mainlog = get_logger('fsullogger')
    db_session = get_session()
    query = "select distinct pro.* from container ct \
                inner join containerplacement cp on ct.containerid=cp.containerid \
                inner join processiotracker piot on piot.inputartifactid=cp.processartifactid \
                inner join process pro on pro.processid=piot.processid \
                where pro.typeid in ({seq_type_ids}) and ct.name='{ct_name}';".format(
        seq_type_ids=",".join(list(pc_cg.SEQUENCING.keys())),
        ct_name=args.flowcell)
    seq_steps = db_session.query(Process).from_statement(text(query)).all()

    for step in seq_steps:
        for udf in step.udfs:
            if udf.udfname == "Run ID":
                fcid = udf.udfvalue

        # generate the lims_data dict key
        lims_data = create_lims_data_obj(db_session, step)
        if args.test:
            pp = pprint.pprint(lims_data)
        else:
            mainlog.info("updating {}".format(fcid))
            # update the couch right couch document
            upload_to_couch(couch, fcid, lims_data)
Esempio n. 5
0
def main(args):

    log = lutils.setupLog('worksetlogger', args.logfile)
    session = get_session()
    if args.ws:
        step = session.query(Process).filter_by(luid=args.ws).one()
        ws = lclasses.Workset_SQL(session, log, step)
        with open(args.conf) as conf_file:
            conf = yaml.load(conf_file, Loader=yaml.SafeLoader)
        couch = lutils.setupServer(conf)
        db = couch["worksets"]
        doc = {}
        for row in db.view('worksets/lims_id')[ws.obj['id']]:
            doc = db.get(row.id)

        final_doc = lutils.merge(ws.obj, doc)

        db.save(final_doc)

    elif args.recent:
        recent_processes = get_last_modified_processes(
            session, [8, 46, 117, 204, 1454, 1908], args.interval)
        processes_to_update = set()
        for p in recent_processes:
            if p.typeid in [
                    117, 204
            ] and p.daterun:  #will only catch finished setup workset plate
                processes_to_update.add(p)
            else:
                processes_to_update.update(
                    get_processes_in_history(session, p.processid, [117, 204]))

        log.info("the following processes will be updated : {0}".format(
            processes_to_update))
        lpar.masterProcessSQL(args, processes_to_update, log)
def main(args):
    lims_db = get_session()
    lims = Lims(BASEURI,USERNAME,PASSWORD)
    with open(args.conf) as cf:
        db_conf = yaml.load(cf)
        couch = setupServer(db_conf)
    db = couch["expected_yields"]
    postgres_string="{} hours".format(args.hours)
    project_ids=get_last_modified_projectids(lims_db, postgres_string)

    for project in [Project(lims, id=x) for x in project_ids]:
        samples_count = 0
        samples = lims.get_samples(projectname=project.name)
        for sample in samples:
            if not("Status (manual)" in sample.udf and sample.udf["Status (manual)"] == "Aborted"):
                samples_count +=1
        try:
            lanes_ordered = project.udf['Sequence units ordered (lanes)']
            key = parse_sequencing_platform(project.udf['Sequencing platform'])
        except:
            continue
        for row in db.view("yields/min_yield"):
            db_key = [x.lower() if x else None for x in row.key]
            if db_key==key:
                try:
                    project.udf['Reads Min'] = float(row.value) * lanes_ordered / samples_count
                    project.put()
                except ZeroDivisionError:
                    pass
Esempio n. 7
0
def main(args):
    #get the session with the lims db
    db_session=get_session()

    #set up a log
    mainlog = logging.getLogger('fsullogger')
    mainlog.setLevel(level=logging.INFO)
    mfh = logging.handlers.RotatingFileHandler(args.logfile, maxBytes=209715200, backupCount=5)
    mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    mainlog.addHandler(mfh)

    #read the configuration
    with open(args.conf) as conf_file:
        conf=yaml.load(conf_file)

    
    couch=setupServer(conf)
    interval="{} hours".format(args.hours)

    #list the right sequencing steps
    seq_steps=get_sequencing_steps(db_session, interval)


    for step in seq_steps:
        for udf in step.udfs: 
            if udf.udfname=="Run ID":
                fcid=udf.udfvalue

        mainlog.info("updating {}".format(fcid))
        #generate the lims_data dict key
        lims_data=create_lims_data_obj(db_session, step)
        #update the couch right couch document
        upload_to_couch(couch,fcid, lims_data)
Esempio n. 8
0
def main(args):
    #get the session with the lims db
    db_session = get_session()

    #set up a log
    mainlog = logging.getLogger('fsullogger')
    mainlog.setLevel(level=logging.INFO)
    mfh = logging.handlers.RotatingFileHandler(args.logfile,
                                               maxBytes=209715200,
                                               backupCount=5)
    mft = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    mainlog.addHandler(mfh)

    #read the configuration
    with open(args.conf) as conf_file:
        conf = yaml.load(conf_file, Loader=yaml.SafeLoader)

    couch = setupServer(conf)
    interval = "{} hours".format(args.hours)

    #list the right sequencing steps
    if args.flowcell:
        query = "select distinct pro.* from container ct \
                inner join containerplacement cp on ct.containerid=cp.containerid \
                inner join processiotracker piot on piot.inputartifactid=cp.processartifactid \
                inner join process pro on pro.processid=piot.processid \
                where pro.typeid in ({seq_type_ids}) and ct.name='{ct_name}';".format(
            seq_type_ids=",".join(list(pc_cg.SEQUENCING.keys())),
            ct_name=args.flowcell)
        seq_steps = db_session.query(Process).from_statement(text(query)).all()
    else:
        seq_steps = get_sequencing_steps(db_session, interval)

    for step in seq_steps:
        for udf in step.udfs:
            if udf.udfname == "Run ID":
                fcid = udf.udfvalue

        mainlog.info("updating {}".format(fcid))
        #generate the lims_data dict key
        lims_data = create_lims_data_obj(db_session, step)
        #update the couch right couch document
        upload_to_couch(couch, fcid, lims_data)
Esempio n. 9
0
def processWSULSQL(args, queue, logqueue):
    work=True
    session=get_session()
    with open(args.conf) as conf_file:
        conf=yaml.load(conf_file, Loader=yaml.SafeLoader)
    couch=lutils.setupServer(conf)
    db=couch["worksets"]
    procName = mp.current_process().name
    proclog = logging.getLogger(procName)
    proclog.setLevel(level=logging.INFO)
    mfh = QueueHandler(logqueue)
    mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    mfh.setFormatter(mft)
    proclog.addHandler(mfh)

    while work:
        #grabs project from queue
        try:
            ws_id = queue.get(block=True, timeout=3)
            proclog.info("Starting work on {}".format(ws_id))
        except Queue.Empty:
            work = False
            proclog.info("exiting gracefully")
            break
        else:
            step=session.query(gt.Process).filter(gt.Process.processid == int(ws_id)).one()
            ws=lclasses.Workset_SQL(session, proclog, step)
            doc={}
            for row in db.view('worksets/lims_id')[ws.obj['id']]:
                doc=db.get(row.id)
            if doc:
                final_doc=lutils.merge(ws.obj, doc)
            else:
                final_doc=ws.obj
            #clean possible name duplicates
            for row in db.view('worksets/name')[ws.obj['name']]:
                doc=db.get(row.id)
                if doc['id'] != ws.obj['id']:
                    proclog.warning("Duplicate name {} for worksets {} and {}".format(doc['name'], doc['id'], final_doc['id']))
                    db.delete(doc)
            db.save(final_doc)
            proclog.info("updating {0}".format(ws.obj['name']))
            queue.task_done()
Esempio n. 10
0
def main(args):

    with open(args.conf, 'r') as conf_file:
        conf = yaml.safe_load(conf_file)
    couch = lutils.setupServer(conf)
    db = couch['projects']
    view = db.view('samples/customer_names')
    d = None
    for row in view[args.project]:
        d = row.value

    if not d:
        print("Project not found")
        return 0

    print('<table class="table table-striped">')
    print('<tr><th>NGI Name</th><th>Customer Name</th></tr>')
    for sample in sorted(d.keys()):
        print("<tr><td>{}</td><td>{}</td></tr>".format(sample, d[sample]))

    print("</table>")