def main(args): log = lutils.setupLog('bioinfologger', args.logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) with open(args.conf) as conf_file: conf = yaml.safe_load(conf_file) bioinfodb = lutils.setupServer(conf)['bioinfo_analysis'] open_projects = bioinfodb.view('latest_data/sample_id_open') for row in open_projects.rows: project_id = row.key[0] sample_id = row.key[3] close_date = None try: close_date = Project(lims=lims, id=project_id).close_date except HTTPError as e: if '404: Project not found' in e.message: log.error('Project '+project_id+' not found in LIMS') continue if close_date is not None: try: doc = bioinfodb.get(row.id) except Exception as e: log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while accessing doc from statusdb') doc['project_closed'] = True try: bioinfodb.save(doc) log.info('Updated Project '+project_id+ ' Sample '+sample_id) except Exception as e: log.error(e + 'in Project '+project_id+ ' Sample '+sample_id+ ' while saving to statusdb')
def main(args): log = lutils.setupLog('worksetlogger', args.logfile) session = get_session() if args.ws: step = session.query(Process).filter_by(luid=args.ws).one() ws = lclasses.Workset_SQL(session, log, step) with open(args.conf) as conf_file: conf = yaml.load(conf_file, Loader=yaml.SafeLoader) couch = lutils.setupServer(conf) db = couch["worksets"] doc = {} for row in db.view('worksets/lims_id')[ws.obj['id']]: doc = db.get(row.id) final_doc = lutils.merge(ws.obj, doc) db.save(final_doc) elif args.recent: recent_processes = get_last_modified_processes( session, [8, 46, 117, 204, 1454, 1908], args.interval) processes_to_update = set() for p in recent_processes: if p.typeid in [ 117, 204 ] and p.daterun: #will only catch finished setup workset plate processes_to_update.add(p) else: processes_to_update.update( get_processes_in_history(session, p.processid, [117, 204])) log.info("the following processes will be updated : {0}".format( processes_to_update)) lpar.masterProcessSQL(args, processes_to_update, log)
def diff_project_objects(pj_id, couch, logfile): proj_db = couch['projects'] samp_db = couch['samples'] log = setupLog('diff - {}'.format(pj_id), logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) view = proj_db.view('projects/lims_followed') try: old_project_couchid = view[pj_id].rows[0].value except KeyError, IndexError: log.error("No such project {}".format(pj_id))
def diff_project_objects(pj_id, couch, logfile, new=True): proj_db = couch['projects'] samp_db = couch['samples'] log = setupLog('diff - {}'.format(pj_id), logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) view = proj_db.view('projects/lims_followed') try: old_project_couchid = view[pj_id].rows[0].value except KeyError, IndexError: log.error("No such project {}".format(pj_id))
def main(args): log = lutils.setupLog('worksetlogger', args.logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) #this will decide how far back we are looking if args.ws: wsp = Process(lims, id=args.ws) lc = lclasses.LimsCrawler(lims, wsp) lc.crawl() try: ws = lclasses.Workset(lims,lc, log) except NameError: log.error("no name found for this workset") #pprint(ws.obj) mycouch = sdb.Couch() mycouch.set_db("worksets") mycouch.connect() view = mycouch.db.view('worksets/name') #If there is already a workset with that name in the DB if len(view[ws.obj['name']].rows) == 1: remote_doc = view[ws.obj['name']].rows[0].value #remove id and rev for comparison doc_id = remote_doc.pop('_id') doc_rev = remote_doc.pop('_rev') if remote_doc != ws.obj: #if they are different, though they have the same name, upload the new one ws.obj=lutils.merge(ws.obj, remote_doc) ws.obj['_id'] = doc_id ws.obj['_rev'] = doc_rev mycouch.db[doc_id] = ws.obj log.info("updating {0}".format(ws.obj['name'])) else: log.info("not modifying {0}".format(ws.obj['name'])) elif len(view[ws.obj['name']].rows) == 0: #it is a new doc, upload it mycouch.save(ws.obj) log.info("saving {0}".format(ws.obj['name'])) else: log.warn("more than one row with name {0} found".format(ws.obj['name'])) else: starting_date= datetime.today() - timedelta(args.days) str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ") wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date) log.info("the following processes will be updated : {0}".format(wsts)) lpar.masterProcess(args, wsts, lims, log)
def main(ws_id): log = lutils.setupLog('worksetlogger', "out.log") lims = Lims(BASEURI, USERNAME, PASSWORD) wsp = gent.Process(lims, id=ws_id) lc = lclasses.LimsCrawler(lims, wsp) lc.crawl() ws1 = lclasses.Workset(lims, lc, log) session = get_session() step = session.query(Process).filter_by(luid=ws_id).one() ws2 = lclasses.Workset_SQL(session, log, step) diffs = my_comp(ws1.obj, ws2.obj) if diffs: print "\n".join(diffs) #print "##########################" #pprint(ws1.obj) #print "##########################" #pprint(ws2.obj) else: print "no diff found"
def diff_project_objects(pj_id, couch, proj_db, logfile, oconf): # Import is put here to defer circular imports from LIMS2DB.classes import ProjectSQL log = setupLog('diff - {}'.format(pj_id), logfile) view = proj_db.view('projects/lims_followed') def fetch_project(pj_id): try: old_project_couchid = view[pj_id].rows[0].value except (KeyError, IndexError): log.error("No such project {}".format(pj_id)) return None return old_project_couchid try: old_project_couchid = fetch_project(pj_id) except http_client.BadStatusLine: log.error("BadStatusLine received after large project") # Retry old_project_couchid = fetch_project(pj_id) if old_project_couchid is None: return None old_project = proj_db.get(old_project_couchid) old_project.pop('_id', None) old_project.pop('_rev', None) old_project.pop('modification_time', None) old_project.pop('creation_time', None) old_project['details'].pop('running_notes', None) old_project['details'].pop('snic_checked', None) session = get_session() host = get_configuration()['url'] new_project = ProjectSQL(session, log, pj_id, host, couch, oconf) fediff = diff_objects(old_project, new_project.obj) return (fediff, old_project, new_project.obj)
def main(args): log = lutils.setupLog('worksetlogger', args.logfile) lims = Lims(BASEURI, USERNAME, PASSWORD) #this will decide how far back we are looking if args.ws: wsp = Process(lims, id=args.ws) lc = lclasses.LimsCrawler(lims, wsp) lc.crawl() try: ws = lclasses.Workset(lims,lc, log) except NameError: log.error("no name found for this workset") mycouch = sdb.Couch() mycouch.set_db("worksets") mycouch.connect() view = mycouch.db.view('worksets/name') #If there is already a workset with that name in the DB if len(view[ws.obj['name']].rows) == 1: remote_doc = view[ws.obj['name']].rows[0].value #remove id and rev for comparison doc_id = remote_doc.pop('_id') doc_rev = remote_doc.pop('_rev') if remote_doc != ws.obj: #if they are different, though they have the same name, upload the new one ws.obj=lutils.merge(ws.obj, remote_doc) ws.obj['_id'] = doc_id ws.obj['_rev'] = doc_rev mycouch.db[doc_id] = ws.obj log.info("updating {0}".format(ws.obj['name'])) else: log.info("not modifying {0}".format(ws.obj['name'])) elif len(view[ws.obj['name']].rows) == 0: #it is a new doc, upload it mycouch.save(ws.obj) log.info("saving {0}".format(ws.obj['name'])) else: log.warn("more than one row with name {0} found".format(ws.obj['name'])) else: try: from genologics_sql.queries import get_last_modified_processes, get_processes_in_history from genologics_sql.utils import get_session session=get_session() #Aggregate QC, Setup workset plate, or sequencing. recent_processes=get_last_modified_processes(session,[8,204,38,714,46]) #Setup workset plate is 204 processes_to_update=[] for p in recent_processes: if p.typeid==204: processes_to_update.append(p) else: processes_to_update.extend(get_processes_in_history(session, p.processid, [204])) wsts=[] for p in set(processes_to_update): wsts.append(Process(lims, id=p.luid)) log.info("list compiled via the DB") except ImportError: starting_date= datetime.today() - timedelta(args.days) str_date= starting_date.strftime("%Y-%m-%dT%H:%M:%SZ") wsts = lims.get_processes(type=pc.WORKSET.values(),last_modified=str_date) log.info("list compiled via the API") log.info("the following processes will be updated : {0}".format(wsts)) lpar.masterProcess(args, wsts, lims, log)
) parser.add_argument( '-l', '--log', dest='logfile', default=os.path.join(os.environ['HOME'], 'log/LIMS2DB', 'OrderPortal_update.log'), help='log file. Default: ~/log/LIMS2DB/OrderPortal_update.log') parser.add_argument('-d', '--dryrun', action='store_true', dest='dryrun', default=False, help='dry run: no changes stored') args = parser.parse_args() log = lutils.setupLog('orderlogger', args.logfile) with open(args.config) as config_file: creds = json.load(config_file) OP_BASE_URL = creds['OrderPortal'].get( 'URL') # Base URL for your OrderPortal instance. API_KEY = creds['OrderPortal'].get( 'API_KEY') # API key for the user account. headers = {'X-OrderPortal-API-key': API_KEY} ord_port_apis = Order_Portal_APIs(OP_BASE_URL, headers, log) if args.option == 'OrderStatus': ord_port_apis.update_order_status(date.today(), args.dryrun) elif args.option == 'OrderInternalID': ord_port_apis.update_order_internal_id( date.today() - timedelta(days=1), args.dryrun)