Esempio n. 1
0
    def register_algorithm(self):
        storage = QSAlgorithmSource(config)
        existing_folders = [tup[0] for tup in Session.query(ReprocessConfig.original_folder).all()]

        errors = dict()
        src_dir = self.form_result['src_dir']
        if src_dir in existing_folders:
            errors['src_dir'] = 'This algorithm has already been registered.'

        elif not storage.source_path_exists(src_dir):
            errors['src_dir'] = 'This algorithm is not accessible in the file system.'

        if self.form_result['peak_detection_version'] == (0,0):
            # this is arbitrary
            peak_detection_version = (0, QUANTASOFT_DIR_VERSION_RE.search(src_dir).group(1).split('_')[-1])
        else:
            peak_detection_version = self.form_result['peak_detection_version']

        if self.form_result['peak_quantitation_version'] == (0,0):
            peak_quantitation_version = (0, QUANTASOFT_DIR_VERSION_RE.search(src_dir).group(1).split('_')[-1])
        else:
            peak_quantitation_version = self.form_result['peak_quantitation_version']

        if errors:
            resp = self._algorithms_base()
            defaults = AlgorithmRegisterForm.from_python(self.form_result)
            return h.render_bootstrap_form(resp,
                defaults=defaults,
                errors=errors,
                error_formatters=h.tw_bootstrap_error_formatters)

        try:
            rp = ReprocessConfig(name=src_dir.split(os.path.sep)[0],
                                 code=self.form_result['code'],
                                 peak_detection_major=peak_detection_version[0],
                                 peak_detection_minor=peak_detection_version[1],
                                 peak_quant_major=peak_quantitation_version[0],
                                 peak_quant_minor=peak_quantitation_version[1],
                                 trigger_fixed_width=100,
                                 active=True,
                                 cluster_mode=ReprocessConfig.CLUSTER_MODE_CLUSTER,
                                 original_folder=src_dir)

            storage.add_reprocessor(src_dir, self.form_result['code'])
            Session.add(rp)
            Session.commit()
            session['flash'] = 'New algorithm reprocessor created.'
            session.save()
            return redirect(url(controller='product', action='algorithms'))

        except shutil.Error:
            session['flash'] = 'Could not copy source algorithm to destination.'
            session['flash_class'] = 'error'
            session.save()
            return redirect(url(controller='product', action='algorithms'))
        except IOError:
            session['flash'] = "Could not access the algorithm's file system."
            session['flash_class'] = 'error'
            session.save()
            return redirect(url(controller='product', action='algorithms'))
Esempio n. 2
0
    def command(self):
        app = self.load_wsgi_app()
        root = app.config['qlb.dg_root']
        top_folders = app.config['qlb.top_dg_folders']
        source = DGLogSource(root, top_folders)

        min_file_dict = dict(Session.query(DropletGeneratorRun.dirname,
                                      func.max(DropletGeneratorRun.basename).label('last_file')).\
                                group_by(DropletGeneratorRun.dirname).all())
        
        min_file_prefix = '2011-03-21'

        dgs = Session.query(DropletGenerator).all()
        dg_ids = [dg.id for dg in dgs]

        for dirname, basename in source.path_iter(min_file_name=min_file_prefix, min_file_dict=min_file_dict):
            print dirname, basename
            dg_run = read_dg_log(source.full_path(dirname, basename))
            if not dg_run:
                continue
            dg_run.dirname = dirname
            dg_run.basename = basename
            if dg_run.droplet_generator_id in dg_ids:
                Session.add(dg_run)
                Session.commit()
Esempio n. 3
0
    def batch_plate_do_upload(self, id=None):
        batch_plate = self.__load_batch_plate(id)
        if not batch_plate:
            abort(404)
        box2 = self.form_result['box2']
        plate = self.form_result['plate']
        plate_type = batch_plate.batch.plate_type
        if plate_type.code == 'fvtitr' and len(plate.analyzed_wells) == 4:
            # if four wells, it's really a MFGCC (FVTITR FAM+/VIC+ should have 2)
            plate_type = Session.query(PlateType).filter_by(code='mfgcc').one()

        plateobj = save_plate_from_upload_request(request.POST['plate'], plate, box2, plate_type_obj=plate_type)

        # I want to put this in the form validator, but it's field dependent, so not right now
        if plate_type.code in ('mfgcc', 'bcc'):
            ok, message = validate_colorcomp_plate(plate)
            if not ok:
                response = self._batch_plate_upload_base(id)
                Session.rollback()
                return h.render_bootstrap_form(response, errors={'plate': message})
        
        Session.add(plateobj)
        if batch_plate.batch.plate_type.code == plate_type.code:
            batch_plate.plate = plateobj
        else:
            batch_plate.secondary_plate = plateobj

        batch_plate.qc_plate = self.form_result['qc_plate']
        batch_plate.plate_notes = self.form_result['plate_notes']
        Session.commit()

        session['flash'] = 'Plate linked.'
        session.save()
        return redirect(url(controller='metrics', action='per_plate', id=plateobj.id))
Esempio n. 4
0
 def __update_batch_plate_record(self, record):
     record.dg_method = self.form_result['dg_method']
     record.qc_plate = self.form_result['qc_plate'] and True or False
     record.plate_notes = self.form_result['plate_notes']
     record.thermal_cycler_id = self.form_result['thermal_cycler_id']
     Session.add(record)
     Session.commit()
Esempio n. 5
0
 def __form_to_model(self, form, model=None):
     if not model:
         model = SequenceGroupTag()
         Session.add(model)
     
     model.name  = form['name']
     model.notes = form['notes']
     model.owner_id = form['owner_id']
     Session.merge(model)
     return model
Esempio n. 6
0
    def update_reader(self):
        log_entry = self.__make_box2_log_entry(self.form_result)
        Session.add(log_entry)
        Session.commit()

        box2 = Session.query(Box2).get(self.form_result['box2_id'])
        session['flash'] = 'Configuration for %s updated.' % box2.name
        session.save()

        redirect(url(controller='admin', action='reader_history', id=box2.code))
Esempio n. 7
0
    def enzyme_conc_create(self):
        conc = EnzymeConcentration()

        for k, v in self.form_result.items():
            setattr(conc, k, v)

        Session.add(conc)
        Session.commit()

        redirect(url(controller="assay", action="view", id=self.form_result["assay_id"]))
Esempio n. 8
0
    def upload_file(self, id=None):
        self.__setup_box2_code_context(id)
        source = QLStorageSource(config)
        basename = upload_basename(self.form_result['file'].filename)
        errors = {}

        existing_path = self.__file_name_query(c.box2.id, basename)
        if existing_path and not self.form_result['file_id'] == existing_path.id:
            # todo, if existing update path
            errors = dict(file='File with this name already exists for this reader.  Use the Update page.')

        path = "%s_%s" % (int(round(time.time())), basename)
        thefile = self.form_result['file'].file

        filerec = self.__file_id_query(c.box2.id, self.form_result['file_id'])
        new_record = False
        if not filerec:
            filerec = Box2File(box2_id=c.box2.id)
            new_record = True

        filerec.name = basename
        filerec.deleted = False
        filerec.path = path
        filerec.updated = datetime.datetime.now()
        filerec.description = self.form_result['description']
        filerec.mime_type = guess_type(basename)[0] or 'text/plain'


        if errors:
            response = self._upload_base(id)
            return h.render_bootstrap_form(response, errors=errors, error_formatters=h.tw_bootstrap_error_formatters)

        try:
            attachment_dir = self.__upload_file_dir(c.box2)
            if not os.path.exists(attachment_dir):
                os.mkdir(attachment_dir)

            permanent_path = self.__upload_file_path(c.box2, path)
            permanent_file = open(permanent_path, 'wb')
            shutil.copyfileobj(thefile, permanent_file)
            thefile.close()
            permanent_file.close()

            filerec.size = os.stat(permanent_path).st_size
            if new_record:
                Session.add(filerec)
            else:
                Session.merge(filerec)
            Session.commit()
            session['flash'] = 'File uploaded.'
            write_success = True
        except Exception, e:
            session['flash'] = 'Could not upload file: %s' % str(e)
            session['flash_class'] = 'error'
            write_success = False
Esempio n. 9
0
    def create(self):
        self.__load_context()
        plate_setup = PlateSetup()
        plate_setup.project_id = self.form_result['project_id']
        plate_setup.author_id = self.form_result['author_id']
        plate_setup.name = self.form_result['name']
        plate_setup.prefix = make_setup_name(plate_setup)

        Session.add(plate_setup)
        Session.commit()
        redirect(url(controller='setup', action='consumable', id=plate_setup.id, beta=c.beta))
Esempio n. 10
0
    def update_size(self, id=None):
        batch = self.__batch(id)
        if not batch:
            abort(404)

        batch_test = self.__batch_test(id)
        if not batch_test:
            batch_test = ConsumableBatchTest(consumable_batch_id=batch.id)
            Session.add(batch_test)

        batch_test.pixel_calibration = self.form_result["pixel_calibration"]

        garbage = []
        # check for cleared entities first
        for chan in batch_test.size_channels:
            thechip = [chip for chip in self.form_result["chips"] if chip["chip_num"] == chan.chip_num]
            if not thechip:
                garbage.append(chan)
                continue

            thechan = [c for c in thechip[0]["channels"] if c["channel_num"] == chan.channel_num]
            if not thechan:
                garbage.append(chan)
                continue

            if thechan[0]["droplet_count"] is None and thechan[0]["mean"] is None and thechan[0]["stdev"] is None:
                garbage.append(chan)

        for g in garbage:
            batch_test.size_channels.remove(g)
            Session.delete(g)

        # This is the case for a GAE-like Entity or a Mongo object or storing
        # JSON in a text column or whatever
        for chip in self.form_result["chips"]:
            for channel in chip["channels"]:
                if channel["droplet_count"] is not None or channel["mean"] is not None or channel["stdev"] is not None:
                    dbchan = batch_test.size_channel(chip["chip_num"], channel["channel_num"])
                    if not dbchan:
                        dbchan = ConsumableBatchSizeChannel(
                            chip_num=chip["chip_num"], channel_num=channel["channel_num"]
                        )
                        batch_test.size_channels.append(dbchan)

                    dbchan.size_mean = channel["mean"]
                    dbchan.size_stdev = channel["stdev"]
                    dbchan.droplet_count = channel["droplet_count"]

        Session.commit()
        session["flash"] = "Sizes updated."
        session.save()

        return redirect(url(controller="consumable", action="size", id=batch.id))
Esempio n. 11
0
 def command(self):
     self.load_wsgi_app()
     
     unknown_plates = Session.query(QLBPlate).filter(QLBPlate.plate == None)
     for qlbplate in unknown_plates:
         try:
             plate = plate_from_qlp(qlbplate)
             Session.add(plate)
             qlbplate.plate = plate
             Session.commit()
         except Exception:
             Session.rollback()
Esempio n. 12
0
 def create(self):
     batch = ConsumableBatch(
         manufacturer=self.form_result["manufacturer"],
         insert=self.form_result["insert"],
         consumable_molding_style_id=self.form_result["molding_style"],
         consumable_bonding_style_id=self.form_result["bonding_style"],
         bside=self.form_result["bside"],
         lot_num=self.form_result["lot_number"],
         manufacturing_date=self.form_result["manufacture_date"],
     )
     Session.add(batch)
     Session.commit()
     session.flash = "Created batch %s" % self.form_result["lot_number"]
     session.save()
     return redirect(url(controller="consumable", action="details", id=batch.id))
Esempio n. 13
0
def add_qlp_plate_record(qlplate, qlbfile):
    """
    Create a QLBPlate object based off a new QLBFile.  Adds to
    the current SQLAlchemy Session object, but does not commit (will
    rollback, however, if there is a problem)
    """
    valid_plate = True
    plate = None
    
    try:
        plate = QLBPlate()
        set_qlp_plate_record_attrs(plate, qlplate)
        plate.file = qlbfile
        Session.add(plate)
    except Exception, e:
        print e
        Session.rollback()
        valid_plate = False
Esempio n. 14
0
 def cache(self):
     addresses = self.form_result['addresses']
     for a in addresses:
         if a['validated']:
             cache = MapCache(verified = a['validated'],
                              address = a['address'],
                              lat = a['lat'],
                              lon = a['lon'])
         else:
             cache = MapCache(verified = False,
                              address = a['address'])
         try:
             Session.add(cache)
             Session.commit()
         except Exception, e:
             # in case multiple same addresses per update
             # just fail
             continue
Esempio n. 15
0
    def do_register_lab_reader(self):
        LAB_FILEROOT = 'main'
        storage = QLStorageSource(config)
        path = self.form_result['path']
        name = self.form_result['path'][3:] # sans DR
        new_reader_path = storage.real_path(LAB_FILEROOT, self.form_result['path'])

        new_reader = Box2(name=name,
                          code=name,
                          src_dir=path,
                          reader_type=Box2.READER_TYPE_WHOLE,
                          active=True,
                          fileroot=LAB_FILEROOT)
        Session.add(new_reader)
        Session.commit()
        session['flash'] = 'Reader %s added.' % name
        session.save()
        return redirect(url(controller='admin', action='register_lab'))
Esempio n. 16
0
 def tag(self):
     well = Session.query(QLBWell).get(self.form_result['well_id'])
     if not well:
         abort(500)
     
     tag_ids = [tag.id for tag in well.tags]
     new_id = self.form_result['tag_id']
     if new_id not in tag_ids:
         new_tag = Session.query(WellTag).get(new_id)
         if not new_tag:
             abort(500)
         tag = QLBWellTag(well=well, well_tag=new_tag, tagger_id=self.form_result['tagger_id'])
         Session.add(tag)
         Session.commit()
     
     if self.form_result['tagger_id']:
         session['person_id'] = self.form_result['tagger_id']
         session.save()
     return {'tag_id': new_id, 'tag_names': [tag.name for tag in well.tags]}
Esempio n. 17
0
    def add_reader_fix(self):
        box2_id = self.form_result['box2_id']
        box = Session.query(Box2).get(box2_id)
        if not box:
            abort(404)

        log = DRFixLog(box2_id=box.id,
                       problem=self.form_result['problem'],
                       root_cause=self.form_result['root_cause'],
                       fix=self.form_result['fix'],
                       time_effective=datetime.now(),
                       reporter_id=self.form_result['reporter_id'])
        
        Session.add(log)
        Session.commit()

        session['flash'] = 'Added fix for %s.' % box.name
        session.save()

        redirect(url(controller='admin', action='reader_history', id=box.code))
Esempio n. 18
0
 def save_circuit(self):
     log = Session.query(Box2Log).get(self.form_result['log_id'])
     if not log:
         abort(404)
     try:
         circ = Box2Circuit(name=self.form_result['name'],
                            log_template_id=self.form_result['log_id'])
         Session.add(circ)
         Session.commit()
         log.box2_circuit_id = circ.id
         Session.commit()
         session['flash'] = 'Configuration for %s updated and circuit "%s" created.' % (log.box2.name, circ.name)
         session.save()
         redirect(url(controller='admin', action='reader_history', id=log.box2.code))
     except exc.IntegrityError, e:
         Session.rollback()
         session['flash'] = 'There is already a circuit by that name.'
         session['flash_class'] = 'error'
         session.save()
         redirect(url(controller='admin', action='circuit', id=log.id))
Esempio n. 19
0
    def __update_batch_record(self, record=None):
        new_record = False
        if not record:
            new_record = True
            record = ManufacturingPlateBatch(name=self.form_result['name'],
                                             plate_type_id=self.form_result['plate_type'],
                                             creation_date=self.form_result['creation_date'],
                                             default_dg_method=self.form_result['dg_method'])
        
        record.creator_id = self.form_result['creator_id']
        record.notes = self.form_result['notes']
        record.fam_hi_size = self.form_result['fam_hi_size']
        record.vic_hi_size = self.form_result['vic_hi_size']
        record.hex_hi_size = self.form_result['hex_hi_size']

        try:
            Session.add(record)
            Session.commit()
        except IntegrityError, e:
            Session.rollback()
            raise e
Esempio n. 20
0
    def update_reader_status(self):
        box2_id = self.form_result['box2_id']
        box = Session.query(Box2).get(box2_id)
        if not box:
            abort(404)
        
        box.status = self.form_result['status']
        box.status_comment = self.form_result['status_comment']

        log = DRStatusLog(box2_id=box.id,
                          status=box.status,
                          status_comment=box.status_comment,
                          time_effective=datetime.now(),
                          reporter_id=self.form_result['reporter_id'])
        Session.add(log)
        Session.commit()

        session['flash'] = 'Updated status for %s.' % box.name
        session.save()

        redirect(url(controller='admin', action='reader_history', id=box.code))
Esempio n. 21
0
	def __setup_plate(self, qlplate, path, name):
		dbfile = QLBFile(dirname=os.path.dirname(local(path)),
		                 basename=path,
		                 run_id=name,
		                 type='processed',
		                 read_status=1,
		                 mtime=datetime.now())
		
		# TODO add plate type
		plate = Plate(name=name,
		              run_time=datetime.strptime(qlplate.host_datetime, '%Y:%m:%d %H:%M:%S'))

		# TODO make this a general purpose function -- seems useful
		# (this will go into cron, I believe)
		qplate = QLBPlate(plate=plate,
		                  file=dbfile,
		                  host_datetime=qlplate.host_datetime)
		
		qlwells = sorted(qlplate.analyzed_wells.items())
		for name, well in qlwells:
			dbwell = QLBWell(file_id=-1,
			                 well_name=name,
			                 experiment_name=well.experiment_name,
			                 experiment_type=well.experiment_type,
			                 num_channels=well.num_channels,
			                 host_datetime=well.host_datetime)
			
			for idx, channel in enumerate(well.channels):
				dbchannel = QLBWellChannel(channel_num=idx,
				                           type=channel.type,
				                           target=channel.target)
				dbwell.channels.append(dbchannel)
			
			qplate.wells.append(dbwell)
		
		Session.add(plate)
		return plate, PlateMetric(plate=plate)
Esempio n. 22
0
    def batch_size_upload(self, id=None):
        batch = self.__batch(id)
        if not batch:
            abort(404)

        batch_test = self.__batch_test(id)
        if not batch_test:
            batch_test = ConsumableBatchTest(consumable_batch_id=batch.id)
            Session.add(batch_test)

        batch_test.pixel_calibration = self.form_result["pixel_calibration"]
        for i in range(len(batch_test.size_channels)):
            sc = batch_test.size_channels.pop()
            Session.delete(sc)

        # place files in order
        chip_num = 0
        pc = batch_test.pixel_calibration
        for idx, channel in enumerate(sorted(self.form_result["sizes"], key=operator.itemgetter("file_num"))):
            if idx % 8 == 0:
                chip_num = chip_num + 1

            dbchan = ConsumableBatchSizeChannel(
                chip_num=chip_num,
                channel_num=(idx % 8) + 1,
                size_mean=channel["mean"] * pc,
                size_stdev=channel["stdev"] * pc,
                droplet_count=channel["droplet_count"],
            )
            batch_test.size_channels.append(dbchan)

        Session.commit()
        session["flash"] = "Sizes updated."
        session.save()

        return redirect(url(controller="consumable", action="size", id=batch.id))
Esempio n. 23
0
    def create(self):
        assay = Assay()
        for k, v in self.form_result.items():
            if k not in ("seq_padding_pos5", "seq_padding_pos3", "sequences"):
                setattr(assay, k, v)

        # avoids double lookup if lookup has already been done
        if self.form_result["sequences"]:
            # add 1000-padding sequence and snps to each.
            seq_source = UCSCSequenceSource()
            snp_source = HG19Source()

            for seq in self.form_result["sequences"]:
                # TODO: just call sequences_for_assay here?  logic the same.
                if not seq["positive_sequence"] or seq["negative_sequence"]:
                    sequence = seq_source.sequence(
                        seq["chromosome"],
                        seq["start_pos"] - self.form_result["seq_padding_pos5"],
                        seq["end_pos"] + self.form_result["seq_padding_pos3"],
                    )

                    cache_rec = HG19AssayCache(
                        chromosome=sequence.chromosome,
                        start_pos=seq["start_pos"],
                        end_pos=seq["end_pos"],
                        seq_padding_pos5=self.form_result["seq_padding_pos5"],
                        seq_padding_pos3=self.form_result["seq_padding_pos3"],
                        positive_sequence=sequence.positive_strand_sequence,
                    )
                else:
                    cache_rec = HG19AssayCache(
                        chromosome=seq["chromosome"],
                        start_pos=seq["start_pos"],
                        end_pos=seq["end_pos"],
                        seq_padding_pos5=seq["padding_pos5"],
                        seq_padding_pos3=seq["padding_pos3"],
                        positive_sequence=seq["positive_sequence"] or None,
                        negative_sequence=seq["negative_sequence"] or None,
                    )

                for snp in seq["snps"]:
                    cache_rec.snps.append(
                        SNP131AssayCache(
                            bin=snp["bin"],
                            chrom=snp["chrom"],
                            chromStart=snp["chromStart"],
                            chromEnd=snp["chromEnd"],
                            name=snp["name"],
                            score=snp["score"],
                            strand=snp["strand"],
                            refNCBI=snp["refNCBI"],
                            refUCSC=snp["refUCSC"],
                            observed=snp["observed"],
                            molType=snp["molType"],
                            class_=snp["class_"],
                            valid=snp["valid"],
                            avHet=snp["avHet"],
                            avHetSE=snp["avHetSE"],
                            func=snp["func"],
                            locType=snp["locType"],
                            weight=snp["weight"],
                        )
                    )

                assay.cached_sequences.append(cache_rec)

        Session.add(assay)
        self.__update_tms(assay)

        Session.commit()
        redirect(url(controller="assay", action="view", id=assay.id))
Esempio n. 24
0
def __scan_plate(file_source, image_source, path_id, path, mtime_dict, plate_type=None, file_lists=None):
    """
    The method responsible for taking a QLP file on disk and creating
    thumbnails and adding/updating records in the database based off
    the contents of that file.

    This is a nasty abstraction, I apologize.

    TODO: make this more natural along add/update line, do not require use of
    mtime_dict or file_list (or return file_list as files touched)

    Returns the Plate object of the added/updated plate, or None if there was
    no touch/error.

    :param file_source: The source of the QLP files (QLStorageSource)
    :param image_source: The source/sink of thumbnails (QLStorageSource)
    :param path_id: The unique identifier of the plate file.  Computed by run_id()
    :param path: The actual file path of the QLP.
    :param mtime_dict: A mapping between plates and their last updated times.  This will
                       indicate whether or not a plate is 'dirty' with respect to the DB.
    :param plate_type: A plate type.  Supplying this will indicate that the special metrics
                       corresponding to that plate type should be computed during the scan.
    :param file_lists: A logging object used in the scan to record files that are missing,
                       poorly processed, etc.  Side-effected by this method.
    """
    if not file_lists:
        file_lists = defaultdict(list)
    
        # if the file is not being tracked, attempt to add it
        if not mtime_dict.has_key(path_id):
            print "Adding plate: %s" % path
            qlbfile, qlplate, valid_file = add_qlp_file_record(file_source, path)
            if not valid_file:
                print "Invalid file: %s" % path
                file_lists['invalid_plates'].append(path)
                return None
            elif path.endswith('HFE_Plate.qlp'):
                qlbfile.read_status = -7
                print "Ignoring HFE Plate: %s" % path
                Session.commit()
                return None
            elif qlbfile.version is 'Unknown':
                qlbfile.read_status = -8
                print "Ignoring plate run with unknown QS version: %s" % path
                Session.commit()
                return None
            
            if(qlbfile.version_tuple < (0,1,1,9)):
                # we don't recognize the QLP file version, ditch
                qlbfile.read_status = -2
                Session.commit()
                return None
                
            qlbplate, valid_plate = add_qlp_plate_record(qlplate, qlbfile)
            if not valid_plate:
                # invalid plate
                print "Could not read plate: %s" % path
                qlbfile.read_status = -20
                Session.commit()
                file_lists['unreadable_plates'].append(path)
                return None
                
            
            for well_name, proc_qlwell in sorted(qlplate.analyzed_wells.items()):
                
                # remove empty/blank wells generated by eng group
                if (well_name is None or well_name == ''):
                    del qlplate.analyzed_wells[well_name]
                    continue

                raw_qlwell = None
                # TODO: abstract?
                well_loc = "%s_%s_RAW.qlb" % (path[:-4], well_name)
                # process QLP only
                if not os.path.isfile(well_loc):
                    print "Could not find well file: %s" % well_loc
                    file_lists['missing_wells'].append(well_loc)
                    well_file = None
                    # proceed, as file may just not have been delivered
                    valid_file = True
                else:
                    well_file, raw_qlwell, valid_file = add_qlb_file_record(file_source, well_loc)
                
                if not valid_file:
                    print "Invalid well file: %s" % well_loc
                    file_lists['invalid_wells'].append(well_loc)
                    continue
                    
                qlbwell, valid_well = add_qlb_well_record(well_file, well_name, proc_qlwell, raw_qlwell)
                if valid_well:
                    qlbplate.wells.append(qlbwell)
            
            # bug 829: if there are invalid wells, do not process the plate;
            # wait for the well files to complete processing, get on next run
            #
            #
            if file_lists['invalid_wells']:
                print "Skipping plate processing (invalid well): %s" % path
                Session.rollback()
                return None # continue plate

            plate_meta = plate_from_qlp(qlbplate)
            Session.add(plate_meta)

            qlbplate.plate = plate_meta

            validation_test = get_product_validation_plate(qlplate, plate_meta)

            if not validation_test:
                if not apply_setup_to_plate(qlplate, plate_meta):
                    apply_template_to_plate(qlplate, plate_meta)
            
            # OK, try it now
            try:
                for well in qlbplate.wells:
                    if well.file_id != -1:
                        well.file.read_status = 1
                qlbplate.file.read_status = 1
                Session.commit()
                write_images_stats_for_plate(qlbplate, qlplate, image_source, override_plate_type=plate_type)
                Session.commit()
                qlbplate.plate.score = Plate.compute_score(qlbplate.plate)
                Session.commit()
                if validation_test:
                    validation_test.plate_id = qlbplate.plate.id
                    Session.add(validation_test)
                    Session.commit()
                file_lists['added_plates'].append(path)
                return plate_meta
            except Exception, e:
                print e
                print "Could not process new plate: %s" % path
                file_lists['unwritable_plates'].append(path)
                Session.rollback()
                
        elif time_equals(mtime_dict[path_id][1], datetime.fromtimestamp(os.stat(path).st_mtime)):
            return None
        else: 
            # strategy: reprocess the plate and update.
            qlbfile = Session.query(QLBFile).get(mtime_dict[path_id][0])
            if not qlbfile:
                print "No file for path: %s" % path
                return None
            elif path.endswith('HFE_Plate.qlp'):
                qlbfile.mtime = datetime.fromtimestamp(os.stat(path).st_mtime)
                Session.commit()
                return None
            
            qlbplates = Session.query(QLBPlate).filter_by(file_id=qlbfile.id).\
                                options(joinedload_all(QLBPlate.wells, QLBWell.channels)).all()
            if not qlbplates:
                print "No plate for read file: %s" % path
                return None
            
            qlbplate = qlbplates[0]
            if not qlbplate.plate_id:
                print "No plate for read file (plate deleted): %s" % path
                qlbfile.mtime = datetime.fromtimestamp(os.stat(path).st_mtime)
                Session.commit() 
                return None
            
            print "Updating plate %s/%s: %s" % (qlbplate.plate_id, qlbplate.id, path)
            qlplate = get_plate(path)
            updated = update_qlp_plate_record(qlbplate, qlplate)
            if not updated:
                print "Could not read updated file"
                Session.rollback()
                qlbplate.file.read_status = -30
                Session.commit()
                file_lists['unreadable_plates'].append(path)
                return None
            
            # this is basically the same as on add -- abstract?
            #
            # TODO (GitHub Issue 30): handle case where a previously analyzed well is switched to 'Not Used'
            for well_name, proc_qlwell in sorted(qlplate.analyzed_wells.items()):
                raw_qlwell = None
                
                # TODO: abstract?    
                well_loc = "%s_%s_RAW.qlb" % (path[:-4], well_name)
                qlbwells = [well for well in qlbplate.wells if well.well_name == well_name]
                if not qlbwells:
                    # add qlb file record
                    if not os.path.isfile(well_loc):
                        print "Could not find well file: %s" % well_loc
                        well_file = None
                        valid_file = True
                        file_lists['missing_wells'].append(well_loc)
                    else:
                        well_file, raw_qlwell, valid_file = add_qlb_file_record(file_source, well_loc)
                    
                    if not valid_file:
                        print "Invalid well file: %s" % well_loc
                        file_lists['invalid_wells'].append(well_loc)
                        continue
                    
                    qlbwell, valid_well = add_qlb_well_record(well_file, well_name, proc_qlwell, raw_qlwell)
                    if valid_well:
                        qlbplate.wells.append(qlbwell)
                    else:
                        file_lists['invalid_wells'].append(well_loc)
                        print "Could not add well %s: %s" % (well_name, well_loc)
                else:
                    qlbwell = qlbwells[0]

                    if not os.path.isfile(well_loc):
                        print "Could not find well file to update: %s" % well_loc
                        file_lists['missing_wells'].append(well_loc)
                        update_qlb_well_record(qlbwell, well_name, proc_qlwell, None)
                    else:
                        if qlbwell.file_id == -1:
                            well_file, raw_qlwell, valid_file = add_qlb_file_record(file_source, well_loc)
                            if valid_file:
                                qlbwell.file = well_file
                        update_qlb_well_record(qlbwell, well_name, proc_qlwell, raw_qlwell)
                
            # in lieu of updating plate meta (though it maybe should be done)
            qlbplate.plate.program_version = qlbplate.host_software
            
            try:
                for well in qlbplate.wells:
                    if well.file_id != -1 and well.file:
                        well.file.read_status = 1
                qlbplate.file.read_status = 1
                qlbfile.mtime = datetime.fromtimestamp(os.stat(path).st_mtime)
                Session.commit()
                # this is where updating the dirty bits would come in handy
                write_images_stats_for_plate(qlbplate, qlplate, image_source, overwrite=True, override_plate_type=plate_type)
                Session.commit()
                qlbplate.plate.score = Plate.compute_score(qlbplate.plate)
                Session.commit()
                file_lists['updated_plates'].append(path)
                return qlbplate.plate
            except Exception, e:
                print e
                print "Could not update plate %s/%s: %s" % (qlbplate.plate_id, qlbplate.id, path)
                file_lists['unwritable_plates'].append(path)
                Session.rollback()
Esempio n. 25
0
def write_images_stats_for_plate(dbplate, qlplate, image_source, overwrite=False, override_plate_type=None):
    """
    Write plate metrics to the database and thumbnails to local storage,
    as dictated by image_source.

    Metrics will be related to the supplied dbplate (Plate model)
    qlplate is a QLPlate object derived from reading the QLP file.
    """
    if image_source.subdir_exists(str(dbplate.id)):
        if not overwrite:
            return
    else:
        image_source.make_subdir(str(dbplate.id))
    
    max_amplitudes = (24000, 12000)
    show_only_gated = False # keep default behavior
    if qlplate:
        for well_name, qlwell in sorted(qlplate.analyzed_wells.items()):
            # TODO: common lib?
            if well_channel_automatic_classification(qlwell, 0):
                fig = plot_fam_peaks(qlwell.peaks,
                                     threshold=qlwell.channels[0].statistics.threshold,
                                     max_amplitude=max_amplitudes[0])
            else:
                fig = plot_fam_peaks(qlwell.peaks,
                                     threshold=qlwell.channels[0].statistics.threshold,
                                     threshold_color='red',
                                     max_amplitude=max_amplitudes[0],
                                     background_rgb=MANUAL_THRESHOLD_FAM_BGCOLOR)
            fig.savefig(image_source.get_path('%s/%s_%s.png' % (dbplate.id, well_name, 0)), format='png', dpi=72)
            plt_cleanup(fig)

            if well_channel_automatic_classification(qlwell, 1):
                fig = plot_vic_peaks(qlwell.peaks,
                                     threshold=qlwell.channels[1].statistics.threshold,
                                     max_amplitude=max_amplitudes[1])
            else:
                fig = plot_vic_peaks(qlwell.peaks,
                                     threshold=qlwell.channels[1].statistics.threshold,
                                     threshold_color='red',
                                     max_amplitude=max_amplitudes[1],
                                     background_rgb=MANUAL_THRESHOLD_VIC_BGCOLOR)
                
            fig.savefig(image_source.get_path('%s/%s_%s.png' % (dbplate.id, well_name, 1)), format='png', dpi=72)
            plt_cleanup(fig)

            if qlwell.clusters_defined:
                threshold_fallback = qlwell.clustering_method == QLWell.CLUSTERING_TYPE_THRESHOLD
                fig = plot_cluster_2d(qlwell.peaks,
                                      width=60,
                                      height=60,
                                      thresholds=[qlwell.channels[0].statistics.threshold,
                                                  qlwell.channels[1].statistics.threshold],
                                      boundaries=[0,0,12000,24000],
                                      show_axes=False,
                                      antialiased=True,
                                      unclassified_alpha=0.5,
                                      use_manual_clusters=not well_channel_automatic_classification(qlwell),
                                      highlight_thresholds=threshold_fallback)
                fig.savefig(image_source.get_path('%s/%s_2d.png' % (dbplate.id, well_name)), format='png', dpi=72)
                plt_cleanup(fig)
        
        pm = [pm for pm in dbplate.plate.metrics if pm.reprocess_config_id is None]
        for p in pm:
            Session.delete(p)

        plate = dbplate_tree(dbplate.plate.id)
        
        # override plate_type if supplied (another artifact of bad abstraction)
        if override_plate_type:
            plate.plate_type = override_plate_type

        # this relies on apply_template/apply_setup working correctly on plate addition
        # verify on DR 10005 plate that this works
        if plate.plate_type and plate.plate_type.code in beta_plate_types:
            plate_metrics = get_beta_plate_metrics(plate, qlplate)
        else:
            plate_metrics = process_plate(plate, qlplate)
        Session.add(plate_metrics)
Esempio n. 26
0
 def command(self):
     self.load_wsgi_app()
     setup = generate_custom_setup(*self.args[:-1])
     Session.add(setup)
     Session.commit()
Esempio n. 27
0
    def update_reader_circuit(self):
        log_entry = self.__make_box2_log_entry(self.form_result)
        Session.add(log_entry)
        Session.commit()

        redirect(url(controller='admin', action='circuit', id=log_entry.id))
Esempio n. 28
0
    def __update_groove_record(self, record=None):

        new_record = False
        errors = {}
        if not record:
            new_record = True
            plate_type = Session.query(PlateType).get(self.form_result["plate_type"])
            person = Session.query(Person).get(self.form_result["droplet_maker"])
            if self.form_result["droplet_generator"]:
                dg = Session.query(DropletGenerator).get(self.form_result["droplet_generator"])
            else:
                dg = None
            if not plate_type:
                errors["plate_type"] = "Unknown plate type."
            else:
                qlt_paths = self.__get_layout_qlts(plate_type, self.form_result["runs"])
                if not qlt_paths:
                    errors["runs"] = "Invalid layout for this plate type."
                else:
                    # create the name
                    setup = []
                    for i, path in enumerate(qlt_paths):
                        name = self.__name_for_groove_plate(
                            plate_type,
                            self.form_result["droplet_type"],
                            person,
                            self.form_result["creation_date"],
                            self.form_result["identifier"],
                            i,
                        )
                        setup.append((name, path))

                    plate_setup = PlateSetup(
                        name=name[:-2],
                        prefix=name[:-2],
                        setup=json.dumps(setup),
                        time_updated=self.form_result["creation_date"],
                        project_id=Session.query(Project).filter_by(name="GrooveTest").first().id,
                        author_id=person.id,
                        droplet_maker_id=person.id,
                        dr_oil=22,
                        plate_type_id=plate_type.id,
                        droplet_generator_id=dg.id if dg else None,
                        notes=self.form_result["notes"],
                    )
                    dt = self.form_result["droplet_type"]
                    if dt == 1:  # TODO LITERALS
                        plate_setup.skin_type = Plate.SKIN_TYPE_SKINNED
                        plate_setup.dg_oil = 14
                        plate_setup.master_mix = 12
                    else:
                        plate_setup.skin_type = Plate.SKIN_TYPE_SKINLESS
                        plate_setup.dg_oil = 31

                    if dt == 2:
                        plate_setup.chemistry_type = Plate.CHEMISTRY_TYPE_GREEN
                        plate_setup.master_mix = 205
                    else:
                        plate_setup.chemistry_type = Plate.CHEMISTRY_TYPE_TAQMAN

                    if dt == 3:
                        plate_setup.master_mix = 501

                    Session.add(plate_setup)
                    Session.commit()
                    return plate_setup, None
        else:
            record.notes = self.form_result["notes"]
            record.droplet_generator_id = self.form_result["droplet_generator"]
            Session.commit()
            return record, None

        if errors:
            return None, errors
Esempio n. 29
0
    def create(self, id):
        """
        KIND OF UNSAFE.  Creates/registers a reader.
        """
        serial = self.form_result['serial']
        reader_type = self.form_result['reader_type']
        if reader_type == Box2.READER_TYPE_WHOLE:
            ## check name is in correct format...
            if not re.search( '^771BR\d{4}$', serial ):
                session['flash'] = 'New Reader name must follow "771BR####" convention'
                session['flash_class'] = 'error'
                session.save()
                return redirect(url(controller='admin', action='register', id=id))
            elif ( len( serial ) > 15 ):
                session['flash'] = 'Reader name can not contain more then 15 characters'
                session['flash_class'] = 'error'
                session.save()
                return redirect(url(controller='admin', action='register', id=id))

            code = 'p%s' % serial
        elif reader_type == Box2.READER_TYPE_FLUIDICS_MODULE:
            serial = serial.upper()

            if ( len( serial ) > 15 ):
                session['flash'] = 'Fluidics name can not contain more then 15 characters'
                session['flash_class'] = 'error'
                session.save()
                return redirect(url(controller='admin', action='register', id=id))

            code = 'f%s' % serial
        elif reader_type == Box2.READER_TYPE_DETECTOR_MODULE:
            ## check to make sure someone isn't acidently adding a DR
            if  re.search( '^771BR\d*', serial ):
                session['flash'] = 'New Detector module names do not contain "771BR" '
                session['flash_class'] = 'error'
                session.save()
                return redirect(url(controller='admin', action='register', id=id) )
            elif ( len( serial ) > 15 ):
                session['flash'] = 'Detector name can not contain more then 15 characters'
                session['flash_class'] = 'error'
                session.save()
                return redirect(url(controller='admin', action='register', id=id))

            serial = serial.upper()
            code = 'd%s' % serial

        # check and catch if reader already exists
        box2 = Session.query(Box2).filter_by(code=code).first()
        if box2:
            session['flash'] = 'Unit %s is already registered.' % self.form_result['serial']
            session['flash_class'] = 'error'
            session.save()
            return redirect(url(controller='admin', action='register', id=id))
        
        #If not exists create one instead
        if reader_type == Box2.READER_TYPE_WHOLE:
            src_dir = "DR %s" % serial
            reader = Box2(name=u'Prod %s' % serial, code=code, src_dir=src_dir, \
                          reader_type = Box2.READER_TYPE_WHOLE, \
                          fileroot=config['qlb.fileroot.register_fileroot'], active=True)
        elif reader_type == Box2.READER_TYPE_FLUIDICS_MODULE:
            src_dir="FM %s" % serial
            reader = Box2(name=u'QL-FSA %s' % serial, code=code, src_dir=src_dir, \
                          reader_type = Box2.READER_TYPE_FLUIDICS_MODULE, \
                          fileroot=config['qlb.fileroot.register_fileroot'], active=True)
        elif reader_type == Box2.READER_TYPE_DETECTOR_MODULE:
            src_dir="DM %s" % serial
            reader = Box2(name=u'DET %s' % serial, code=code, src_dir=src_dir, \
                          reader_type = Box2.READER_TYPE_DETECTOR_MODULE, \
                          fileroot=config['qlb.fileroot.register_fileroot'], active=True)       

 
        Session.add(reader)
        reader.active = True

        local_plate_source = QLBPlateSource(config, [reader])
        dirname = local_plate_source.real_path(reader.fileroot, src_dir)
        try:
            os.mkdir(dirname)
            Session.commit()
            session['flash'] = 'Unit %s registered.' % serial
            session.save()
        except Exception, e:
            session['flash'] = 'Could not create a directory for unit %s' % serial
            session['flash_class'] = 'error'
            session.save()
            Session.rollback()
            return redirect(url(controller='admin', action='register', id=id))
Esempio n. 30
0
 def create_bonding(self):
     style = ConsumableBondingStyle(name=self.form_result["name"], description=self.form_result["description"])
     Session.add(style)
     Session.commit()
     session["flash"] = "Bonding style '%s' created." % style.name
     return redirect(url(controller="consumable", action="new"))