Exemplo n.º 1
0
def findprojs(key):
    projects=set()
    if key == 'all':
        udf={'Bioinformatic QC':'WG re-seq (IGN)'}
        projects.update(lims.get_projects(udf=udf))
        udf={'Sequencing platform':'HiSeq X'}
        projects.update(lims.get_projects(udf=udf))
        return [(p.name, p.id) for p in projects]
    elif key == 'new':
        ret=set()
        udf={'Bioinformatic QC':'WG re-seq (IGN)'}
        projects.update(lims.get_projects(udf=udf))
        udf={'Sequencing platform':'HiSeq X'}
        projects.update(lims.get_projects(udf=udf))
        try:
            from genologics_sql.queries import get_last_modified_projectids
            from genologics_sql.utils import get_session
            session=get_session()
            valid_pids=get_last_modified_projectids(session)
            ret=[x for x in projects if x.id in valid_pids]
        except ImportError:
            logging.info("direct sql query did not work")
            valid_pids=[]
            delta=datetime.timedelta(hours=240)
            time_string_pc=(datetime.datetime.now()-delta).strftime('%Y-%m-%dT%H:%M:%SZ')
            for p in projects:
                if (not p.close_date) and lims.get_processes(projectname=p.name, last_modified=time_string_pc):
                    ret.add(p)


        return [(p.name, p.id) for p in ret]
    else:
        projects=lims.get_projects(name=key)
        return [(p.name, p.id) for p in projects]
Exemplo n.º 2
0
def main(args):
    lims_db = get_session()
    lims = Lims(BASEURI, USERNAME, PASSWORD)
    with open(args.conf) as cf:
        db_conf = yaml.load(cf)
        couch = setupServer(db_conf)
    db = couch["expected_yields"]
    postgres_string = "{} hours".format(args.hours)
    project_ids = get_last_modified_projectids(lims_db, postgres_string)

    for project in [Project(lims, id=x) for x in project_ids]:
        samples_count = 0
        samples = lims.get_samples(projectname=project.name)
        for sample in samples:
            if not ("Status (manual)" in sample.udf
                    and sample.udf["Status (manual)"] == "Aborted"):
                samples_count += 1
        try:
            lanes_ordered = project.udf['Sequence units ordered (lanes)']
            key = parse_sequencing_platform(project.udf['Sequencing platform'])
        except:
            continue
        for row in db.view("yields/min_yield"):
            db_key = [x.lower() if x else None for x in row.key]
            if db_key == key:
                try:
                    project.udf['Reads Min'] = float(
                        row.value) * lanes_ordered / samples_count
                    project.put()
                except ZeroDivisionError:
                    pass
Exemplo n.º 3
0
def findprojs(key):
    projects = set()
    if key == 'all':
        udf = {'Bioinformatic QC': 'WG re-seq (IGN)'}
        projects.update(lims.get_projects(udf=udf))
        udf = {'Sequencing platform': 'HiSeq X'}
        projects.update(lims.get_projects(udf=udf))
        return [(p.name, p.id) for p in projects]
    elif key == 'new':
        ret = set()
        udf = {'Bioinformatic QC': 'WG re-seq (IGN)'}
        projects.update(lims.get_projects(udf=udf))
        udf = {'Sequencing platform': 'HiSeq X'}
        projects.update(lims.get_projects(udf=udf))
        try:
            from genologics_sql.queries import get_last_modified_projectids
            from genologics_sql.utils import get_session
            session = get_session()
            valid_pids = get_last_modified_projectids(session)
            ret = [x for x in projects if x.id in valid_pids]
        except ImportError:
            logging.info("direct sql query did not work")
            valid_pids = []
            delta = datetime.timedelta(hours=240)
            time_string_pc = (datetime.datetime.now() -
                              delta).strftime('%Y-%m-%dT%H:%M:%SZ')
            for p in projects:
                if (not p.close_date) and lims.get_processes(
                        projectname=p.name, last_modified=time_string_pc):
                    ret.add(p)

        return [(p.name, p.id) for p in ret]
    else:
        projects = lims.get_projects(name=key)
        return [(p.name, p.id) for p in projects]
def create_projects_list(options, db_session, lims, log):
    projects = []
    if options.all_projects:
        if options.hours:
            postgres_string = "{} hours".format(options.hours)
            project_ids = get_last_modified_projectids(db_session,
                                                       postgres_string)
            valid_projects = db_session.query(DBProject).filter(
                DBProject.luid.in_(project_ids)).all()
            log.info("project list : {0}".format(" ".join(
                [p.luid for p in valid_projects])))
            return valid_projects
        else:
            projects = db_session.query(DBProject).all()
            log.info("project list : {0}".format(" ".join(
                [p.luid for p in projects])))
            return projects

    elif options.input:
        with open(options.input, "r") as input_file:
            for pname in input_file:
                try:
                    projects.append(lims.get_projects(name=pname.rstrip())[0])
                except IndexError:
                    pass

        return projects
def main(args):
    lims_db = get_session()
    lims = Lims(BASEURI,USERNAME,PASSWORD)
    with open(args.conf) as cf:
        db_conf = yaml.load(cf)
        couch = setupServer(db_conf)
    db = couch["expected_yields"]
    postgres_string="{} hours".format(args.hours)
    project_ids=get_last_modified_projectids(lims_db, postgres_string)

    for project in [Project(lims, id=x) for x in project_ids]:
        samples_count = 0
        samples = lims.get_samples(projectname=project.name)
        for sample in samples:
            if not("Status (manual)" in sample.udf and sample.udf["Status (manual)"] == "Aborted"):
                samples_count +=1
        try:
            lanes_ordered = project.udf['Sequence units ordered (lanes)']
            key = parse_sequencing_platform(project.udf['Sequencing platform'])
        except:
            continue
        for row in db.view("yields/min_yield"):
            db_key = [x.lower() if x else None for x in row.key]
            if db_key==key:
                try:
                    project.udf['Reads Min'] = float(row.value) * lanes_ordered / samples_count
                    project.put()
                except ZeroDivisionError:
                    pass
Exemplo n.º 6
0
    def update_order_status(self, to_date, dry_run):
        lims_db = genologics_sql.utils.get_session()
        pjs = queries.get_last_modified_projectids(lims_db, '24 hours')
        yesterday = (to_date - timedelta(days=1)).strftime('%Y-%m-%d')
        today = to_date.strftime('%Y-%m-%d')
        for p in pjs:
            project = Project(self.lims, id=p)
            try:
                ORDER_ID = project.udf['Portal ID']
            except KeyError:
                continue
            if not ORDER_ID.startswith('NGI'):
                continue
            url = '{base}/api/v1/order/{id}'.format(base=self.base_url,
                                                    id=ORDER_ID)
            response = requests.get(url, headers=self.headers)
            data = ''
            try:
                data = response.json()
            except ValueError:  #In case a portal id does not exit on lims, skip the proj
                continue
            url = ''
            status = ''

            if (data['status'] == 'accepted' and project.udf.get('Queued')
                    and not project.close_date):
                url = data['links']['processing']['href']
                status_set = 'processing'
            if data['status'] == 'processing':
                if project.udf.get('Aborted') and (
                        project.udf.get('Aborted') == today
                        or project.udf.get('Aborted') == yesterday):
                    url = data['links']['aborted']['href']
                    status_set = 'aborted'
                elif project.close_date and (project.close_date == today or
                                             project.close_date == yesterday):
                    url = data['links']['closed']['href']
                    status_set = 'closed'
            if url:
                if not dry_run:
                    #Order portal sends a mail to user on status change
                    response = requests.post(url, headers=self.headers)
                    assert response.status_code == 200, (response.status_code,
                                                         response.reason)
                    self.log.info(
                        'Updated status for order {} from {} to {}'.format(
                            ORDER_ID, data['status'], status_set))
                else:
                    print(
                        'Dry run: {} Updated status for order {} from {} to {}'
                        .format(date.today(), ORDER_ID, data['status'],
                                status_set))
def create_projects_list(options, lims, log):
        projects=[]
        if options.all_projects:
            projects = lims.get_projects()
            if options.hours:
                postgres_string="{} hours".format(options.hours)
                db_session=get_session()
                project_ids=get_last_modified_projectids(db_session, postgres_string)
                valid_projects=[Project(lims, id=x) for x in project_ids]
                log.info("project list : {0}".format(" ".join([p.id for p in valid_projects])))
                return valid_projects
            else:
                log.info("project list : {0}".format(" ".join([p.id for p in projects])))
                return projects

        elif options.input:
            with open(options.input, "r") as input_file:
                for pname in input_file:
                    try:
                        projects.append(lims.get_projects(name=pname.rstrip())[0] )
                    except IndexError:
                        pass

            return projects