def main(args): with open(args.conf, 'r') as conf_file: conf = yaml.safe_load(conf_file) couch = lutils.setupServer(conf) with open(args.oconf, 'r') as ocf: oconf = yaml.load(ocf)['order_portal'] mainlog = get_logger('psullogger') lims_db = get_session() host = get_configuration()['url'] if args.name: pj_id = lims_db.query( DBProject.luid).filter(DBProject.name == args.name).scalar() else: pj_id = args.pid P = ProjectSQL(lims_db, mainlog, pj_id, host, couch, oconf) if args.test: pp = pprint.pprint(P.obj) else: P.save()
def main(options): conf = options.conf output_f = options.output_f couch = load_couch_server(conf) mainlims = Lims(BASEURI, USERNAME, PASSWORD) lims_db = get_session() mainlog = logging.getLogger('psullogger') mainlog.setLevel(level=logging.INFO) mfh = logging.handlers.RotatingFileHandler(options.logfile, maxBytes=209715200, backupCount=5) mft = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') mfh.setFormatter(mft) mainlog.addHandler(mfh) # try getting orderportal config oconf = None try: with open(options.oconf, 'r') as ocf: oconf = yaml.load(ocf, Loader=yaml.SafeLoader)['order_portal'] except Exception as e: mainlog.warn( "Loading orderportal config {} failed due to {}, so order information " "for project will not be updated".format(options.oconf, e)) if options.project_name: host = get_configuration()['url'] pj_id = lims_db.query(DBProject.luid).filter( DBProject.name == options.project_name).scalar() if not pj_id: pj_id = options.project_name P = ProjectSQL(lims_db, mainlog, pj_id, host, couch, oconf) if options.upload: P.save( update_modification_time=not options.no_new_modification_time) else: if output_f is not None: with open(output_f, 'w') as f: f.write(json.dumps(P.obj)) else: print(json.dumps(P.obj)) else: projects = create_projects_list(options, lims_db, mainlims, mainlog) masterProcess(options, projects, mainlims, mainlog, oconf) lims_db.commit() lims_db.close()
def main(options): conf = options.conf upload_data = options.upload output_f = options.output_f couch = load_couch_server(conf) proj_db = couch['projects'] samp_db = couch['samples'] mainlims = Lims(BASEURI, USERNAME, PASSWORD) lims_db = get_session() mainlog = logging.getLogger('psullogger') mainlog.setLevel(level=logging.INFO) mfh = logging.handlers.RotatingFileHandler(options.logfile, maxBytes=209715200, backupCount=5) mft = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') mfh.setFormatter(mft) mainlog.addHandler(mfh) # try getting orderportal config oconf = None if not options.old: try: with open(options.oconf, 'r') as ocf: oconf = yaml.load(ocf)['order_portal'] except Exception as e: mainlog.warn("Loading orderportal config {} failed due to {}, so order information "\ "for project will not be updated".format(options.oconf, e)) if options.project_name: if options.old: proj = mainlims.get_projects(name = options.project_name) if not proj: mainlog.warn('No project named {man_name} in Lims'.format( man_name = options.project_name)) P = PSUL(proj[0], samp_db, proj_db, options.upload, options.project_name, output_f, mainlog) P.handle_project() else: host=get_configuration()['url'] pj_id=lims_db.query(DBProject.luid).filter(DBProject.name == options.project_name).scalar() if not pj_id: pj_id=options.project_name P = ProjectSQL(lims_db, mainlog, pj_id, host, couch, oconf) if options.upload: P.save(update_modification_time=not options.no_new_modification_time) else: if output_f is not None: with open(output_f, 'w') as f: pprint(P.obj, stream=f) else: pprint(P.obj) else : projects=create_projects_list(options, lims_db, mainlims, mainlog) masterProcess(options,projects, mainlims, mainlog, oconf) lims_db.commit() lims_db.close()
def main(options): conf = options.conf upload_data = options.upload output_f = options.output_f couch = load_couch_server(conf) proj_db = couch['projects'] samp_db = couch['samples'] mainlims = Lims(BASEURI, USERNAME, PASSWORD) lims_db = get_session() mainlog = logging.getLogger('psullogger') mainlog.setLevel(level=logging.INFO) mfh = logging.handlers.RotatingFileHandler(options.logfile, maxBytes=209715200, backupCount=5) mft = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') mfh.setFormatter(mft) mainlog.addHandler(mfh) if options.project_name: if options.old: proj = mainlims.get_projects(name=options.project_name) if not proj: mainlog.warn('No project named {man_name} in Lims'.format( man_name=options.project_name)) P = PSUL(proj[0], samp_db, proj_db, options.upload, options.project_name, output_f, mainlog) P.handle_project() else: host = get_configuration()['url'] pj_id = lims_db.query(DBProject.luid).filter( DBProject.name == options.project_name).scalar() if not pj_id: pj_id = options.project_name P = ProjectSQL(lims_db, mainlog, pj_id, host, couch) if options.upload: P.save() else: pprint(P.obj) else: projects = create_projects_list(options, lims_db, mainlims, mainlog) masterProcess(options, projects, mainlims, mainlog) lims_db.commit() lims_db.close()
def diff_project_objects(pj_id, couch, proj_db, logfile, oconf): # Import is put here to defer circular imports from LIMS2DB.classes import ProjectSQL log = setupLog('diff - {}'.format(pj_id), logfile) view = proj_db.view('projects/lims_followed') def fetch_project(pj_id): try: old_project_couchid = view[pj_id].rows[0].value except (KeyError, IndexError): log.error("No such project {}".format(pj_id)) return None return old_project_couchid try: old_project_couchid = fetch_project(pj_id) except http_client.BadStatusLine: log.error("BadStatusLine received after large project") # Retry old_project_couchid = fetch_project(pj_id) if old_project_couchid is None: return None old_project = proj_db.get(old_project_couchid) old_project.pop('_id', None) old_project.pop('_rev', None) old_project.pop('modification_time', None) old_project.pop('creation_time', None) old_project['details'].pop('running_notes', None) old_project['details'].pop('snic_checked', None) session = get_session() host = get_configuration()['url'] new_project = ProjectSQL(session, log, pj_id, host, couch, oconf) fediff = diff_objects(old_project, new_project.obj) return (fediff, old_project, new_project.obj)
def processPSUL(options, queue, logqueue, oconf=None): couch = load_couch_server(options.conf) db_session = get_session() work = True procName = mp.current_process().name proclog = logging.getLogger(procName) proclog.setLevel(level=logging.INFO) mfh = QueueHandler(logqueue) mft = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') mfh.setFormatter(mft) proclog.addHandler(mfh) # Not completely sure what this does, maybe trying to load balance? try: time.sleep(int(procName[8:])) except: time.sleep(1) while work: # grabs project from queue try: projname = queue.get(block=True, timeout=3) proclog.info("Starting work on {} ".format(projname)) proclog.info("Approximately {} projects left in queue".format( queue.qsize())) except Queue.Empty: work = False proclog.info("exiting gracefully") break except NotImplementedError: # qsize failed, no big deal pass else: # locks the project : cannot be updated more than once. lockfile = os.path.join(options.lockdir, projname) if not os.path.exists(lockfile): try: open(lockfile, 'w').close() except: proclog.error("cannot create lockfile {}".format(lockfile)) try: pj_id = db_session.query(DBProject.luid).filter( DBProject.name == projname).scalar() host = get_configuration()['url'] P = ProjectSQL(db_session, proclog, pj_id, host, couch, oconf) P.save() except: error = sys.exc_info() stack = traceback.extract_tb(error[2]) proclog.error("{0}:{1}\n{2}".format( error[0], error[1], formatStack(stack))) try: os.remove(lockfile) except: proclog.error("cannot remove lockfile {}".format(lockfile)) else: proclog.info( "project {} is locked, skipping.".format(projname)) # signals to queue job is done queue.task_done() db_session.commit() db_session.close()
try: old_project_couchid = view[pj_id].rows[0].value except KeyError, IndexError: log.error("No such project {}".format(pj_id)) old_project = proj_db.get(old_project_couchid) old_project.pop('_id', None) old_project.pop('_rev', None) old_project.pop('modification_time', None) old_project.pop('creation_time', None) if new: from LIMS2DB.classes import ProjectSQL session = get_session() host = get_configuration()['url'] new_project = ProjectSQL(session, log, pj_id, host, couch) else: import LIMS2DB.objectsDB.objectsDB as DB new_project = DB.ProjectDB(lims, pj_id, samp_db, log) fediff = diff_objects(old_project, new_project.obj) return (fediff, old_project, new_project.obj) def diff_objects(o1, o2, parent=''): diffs = {} for key in o1: if key in o2: if isinstance(o1[key], dict):