def save_results(conn, files, plate): # Upload the results as OMERO.table print("saving results...") Nuclei = pandas.concat(files, ignore_index=True) summary = Nuclei.groupby('Image').mean() # Free memory del Nuclei cols = [] for col in summary.columns: if col == 'Image': cols.append(ImageColumn(col, '', summary[col])) elif col == 'Well': cols.append(WellColumn(col, '', summary[col])) elif summary[col].dtype == 'int64': cols.append(LongColumn(col, '', summary[col])) elif summary[col].dtype == 'float64': cols.append(DoubleColumn(col, '', summary[col])) resources = conn.c.sf.sharedResources() repository_id = resources.repositories().descriptions[0].getId().getValue() table_name = "idr0002_cellprofiler" table = resources.newTable(repository_id, table_name) table.initialize(cols) table.addData(cols) # Link the table to the plate orig_file = table.getOriginalFile() file_ann = FileAnnotationWrapper(conn) file_ann.setNs(NSBULKANNOTATIONS) file_ann._obj.file = OriginalFileI(orig_file.id.val, False) file_ann.save() plate.linkAnnotation(file_ann) table.close()
def get_empty_columns(self, n_columns): """ Retrieves a set of empty OmeroTables columns for the analysis results prefixed by an ImageColumn and RoiColumn to handle these linked object indexes. """ columns = [ImageColumn('Image', '', list()), RoiColumn('ROI', '', list())] for i in range(n_columns): columns.append(DoubleColumn('', '', list())) return columns
def save_table(conn, images, image_data, script_params, project=None): """Summarise ROIs as Table (1 row per Image) linked to Project.""" resources = conn.c.sf.sharedResources() repository_id = resources.repositories().descriptions[0].getId().getValue() table_name = "batch_roi_export" table = resources.newTable(repository_id, table_name) # Create table image_ids = [i.id for i in images] img_column = ImageColumn('Image', '', image_ids) cols = [DoubleColumn(k, '', image_data[k]) for k in SUMMARY_COL_NAMES] data = [img_column] + cols table.initialize(data) table.addData(data) table.close() if project is None: log("No Project found to link table") else: link_table(conn, table, project)
def parse(self): provider = self.original_file_provider data = provider.get_original_file_data(self.original_file) try: rows = list(csv.reader(data, delimiter=",")) finally: data.close() columns = [ ImageColumn("Image", "", list()), RoiColumn("ROI", "", list()), StringColumn("Type", "", 12, list()), ] for row in rows[1:]: wellnumber = self.well_name_to_number(row[0]) image = self.analysis_ctx.\ image_from_wellnumber(wellnumber) # TODO: what to do with the field?! # field = int(row[1]) # image = images[field] roi = RoiI() shape = PointI() shape.cx = rdouble(float(row[2])) shape.cy = rdouble(float(row[3])) shape.textValue = rstring(row[4]) roi.addShape(shape) roi.image = image.proxy() rid = self.update_service\ .saveAndReturnIds([roi])[0] columns[0].values.append(image.id.val) columns[1].values.append(rid) columns[2].values.append(row[4]) return MeasurementParsingResult([columns])
def save_table(conn, images, export_data, script_params): """Summarise ROIs as Table (1 row per Image) linked to Project.""" resources = conn.c.sf.sharedResources() repository_id = resources.repositories().descriptions[0].getId().getValue() table_name = "batch_roi_export" table = resources.newTable(repository_id, table_name) # Summarise data into columns image_ids = [i.id for i in images] col_data = defaultdict(list) for image in images: data = get_summary_data_for_image(conn, image, export_data, script_params) for key in SUMMARY_COL_NAMES: col_data[key].append(data[key] if data is not None else 0) # Create table img_column = ImageColumn('Image', '', image_ids) cols = [DoubleColumn(k, '', col_data[k]) for k in SUMMARY_COL_NAMES] data = [img_column] + cols table.initialize(data) table.addData(data) table.close() # Link Table to first Project we find project = None for image in images: project = image.getProject() if project is not None: break if project is None: log("No Project found to link table") else: link_table(conn, table, project)
def parse(self): log.info("Parsing: %s" % self.original_file.name.val) provider = self.original_file_provider data = provider.get_original_file_data(self.original_file) try: events = ('start', 'end') well_data = None n_roi = 0 n_measurements = 0 cells_columns = {'Image': ImageColumn('Image', '', list()), 'Cell': LongColumn('Cell', '', list()), 'ROI': RoiColumn('ROI', '', list()) } organelles_columns = {'Image': ImageColumn('Image', '', list()), 'Cell': LongColumn('Cell', '', list()), } nuclei_columns = {'Image': ImageColumn('Image', '', list()), 'Cell': LongColumn('Cell', '', list()), 'ROI': RoiColumn('ROI', '', list()) } for event, element in iterparse(data, events=events): if event == 'start' and element.tag == 'WellData' \ and element.get('cell') != 'Summary': row = int(element.get('row')) - 1 col = int(element.get('col')) - 1 i = int(element.get('field')) - 1 try: well, images = self.get_well_images(row, col) if not images: continue image = images[i] except: log.exception("ERROR: Failed to get well images") continue self.check_sparse_data(cells_columns.values()) self.check_sparse_data(nuclei_columns.values()) self.check_sparse_data(organelles_columns.values()) cell = long(element.get('cell')) cells_columns['Cell'].values.append(cell) nuclei_columns['Cell'].values.append(cell) organelles_columns['Cell'].values.append(cell) well_data = element cells_columns['Image'].values.append(image.id.val) nuclei_columns['Image'].values.append(image.id.val) organelles_columns['Image'].values.append(image.id.val) elif well_data is not None and event == 'start' \ and element.tag == 'Measure': source = element.get('source') key = element.get('key') value = float(element.get('value')) if source == self.CELLS_SOURCE: columns_list = [cells_columns] elif source == self.NUCLEI_SOURCE: columns_list = [nuclei_columns] elif source == self.ORGANELLES_SOURCE: columns_list = [organelles_columns] else: columns_list = [cells_columns, nuclei_columns, organelles_columns] for columns in columns_list: if key not in columns: columns[key] = DoubleColumn(key, '', list()) columns[key].values.append(value) n_measurements += 1 elif event == 'end' and element.tag == 'WellData': if well_data is not None: n_roi += 1 well_data.clear() well_data = None else: element.clear() # Final row sparseness check self.check_sparse_data(cells_columns.values()) self.check_sparse_data(nuclei_columns.values()) self.check_sparse_data(organelles_columns.values()) log.info("Total ROI: %d" % n_roi) log.info("Total measurements: %d" % n_measurements) sets_of_columns = [cells_columns.values(), nuclei_columns.values(), organelles_columns.values()] return MeasurementParsingResult(sets_of_columns) finally: data.close()