def make_tifs(animal, channel, njobs): """ This method will: 1. Fetch the sections from the database 2. Yank the tif out of the czi file according to the index and channel with the bioformats tool. 3. Then updates the database with updated meta information Args: animal: the prep id of the animal channel: the channel of the stack to process njobs: number of jobs for parallel computing compression: default is no compression so we can create jp2 files for CSHL. The files get compressed using LZW when running create_preps.py Returns: nothing """ logger = get_logger(animal) fileLocationManager = FileLocationManager(animal) sqlController = SqlController(animal) INPUT = fileLocationManager.czi OUTPUT = fileLocationManager.tif os.makedirs(OUTPUT, exist_ok=True) sections = sqlController.get_distinct_section_filenames(animal, channel) sqlController.set_task(animal, QC_IS_DONE_ON_SLIDES_IN_WEB_ADMIN) sqlController.set_task(animal, CZI_FILES_ARE_CONVERTED_INTO_NUMBERED_TIFS_FOR_CHANNEL_1) commands = [] for section in sections: input_path = os.path.join(INPUT, section.czi_file) output_path = os.path.join(OUTPUT, section.file_name) cmd = ['/usr/local/share/bftools/bfconvert', '-bigtiff', '-separate', '-series', str(section.scene_index), '-channel', str(section.channel_index), '-nooverwrite', input_path, output_path] if not os.path.exists(input_path): continue if os.path.exists(output_path): continue commands.append(cmd) with Pool(njobs) as p: p.map(workernoshell, commands)
import unittest, os from flask import Flask from flask_sqlalchemy import SQLAlchemy from unittest.mock import Mock, patch from sqlalchemy.orm import sessionmaker from utilities.logger import get_logger from order_app.order_list import OrderList from order_app.models import create_db_if_not_exists, db_connect, create_tables logger = get_logger('test') class PlaceOrderDBTestCase(unittest.TestCase): @classmethod def setUpClass(cls): """ Define integration test DB connection string, and the databaes for test, followed by creating the TestDB and the related tables. Initialize args for PlaceOrder, with predefinitions in json file. Start all mocking necessary for testing. """ cls.db_test_name = 'TestDB' cls.global_db_config = { 'drivername': 'postgresql', 'host': os.getenv('DB_HOST'), 'port': os.getenv('DB_PORT'), 'username': os.getenv('DB_USERNAME'), 'password': os.getenv('DB_PASSWORD'), 'database': cls.db_test_name }
try: os.listdir(INPUT) except OSError as e: print(e) sys.exit() for i, tif in enumerate(tqdm(tifs)): print(tif.file_name) input_path = os.path.join(INPUT, str(i).zfill(3) + '.tif') if os.path.exists(input_path): print(input_path) tif.file_size = os.path.getsize(input_path) sqlController.update_row(tif) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Work on Animal') parser.add_argument('--animal', help='Enter the animal animal', required=True) parser.add_argument('--channel', help='Enter channel', required=True) args = parser.parse_args() animal = args.animal channel = int(args.channel) # TEST loggers logger = get_logger(animal) logger.info('Update channel {} tifs'.format(channel)) update_tifs(animal, channel)
import json, requests from flask import request from utilities.logger import get_logger from werkzeug import exceptions from order_app.models import Order logger = get_logger('flask_order_app') class OrderList(): def __init__(self, page, limit): self.page = page self.limit = limit def query_paginated_orders(self): order_items = None err_msg = None try: #only get columns: id, distance, status, and order by id descending. order_paged = Order.query.with_entities( Order.id, Order.distance, Order.status).order_by( Order.id.desc()).paginate(self.page, self.limit) order_items = [{ 'id': order[0], 'distance': order[1], 'status': order[2] } for order in order_paged.items] logger.info( "Successfully retrieved orders with pagination %s on page %s." % (self.limit, self.page)) logger.info("Has previous page: %s, and has next page: %s." %
def make_combined(animal, channel): """ This method takes all tif files by channel and creates a histogram of the combined image space. :param animal: the prep_id of the animal we are working with :param channel: the channel {1,2,3} :return: nothing """ logger = get_logger(animal) fileLocationManager = FileLocationManager(animal) INPUT = os.path.join(fileLocationManager.prep, f'CH{channel}', 'thumbnail') MASK_INPUT = fileLocationManager.thumbnail_masked OUTPUT = os.path.join(fileLocationManager.histogram, f'CH{channel}') os.makedirs(OUTPUT, exist_ok=True) tifs = os.listdir(INPUT) lfiles = len(tifs) hist_dict = Counter({}) for i, tif in enumerate(tqdm(tifs)): filename = str(i).zfill(3) + '.tif' input_path = os.path.join(INPUT, filename) mask_path = os.path.join(MASK_INPUT, filename) try: img = io.imread(input_path) except: logger.error(f'Could not read {input_path}') lfiles -= 1 break try: mask = io.imread(mask_path) except: logger.warning(f'Could not open {mask_path}') continue # mask image img = cv2.bitwise_and(img, img, mask=mask) try: flat = img.flatten() #flat = np.random.choice(flat, 1000) del img except: logger.error(f'Could not flatten file {input_path}') lfiles -= 1 break try: #hist,bins = np.histogram(flat, bins=nbins) img_counts = np.bincount(flat) except: logger.error(f'Could not create counts {input_path}') lfiles -= 1 break try: img_dict = Counter( dict(zip(np.unique(flat), img_counts[img_counts.nonzero()]))) except: logger.error(f'Could not create counter {input_path}') lfiles -= 1 break try: hist_dict = hist_dict + img_dict except: logger.error(f'Could not add files {input_path}') lfiles -= 1 break hist_dict = dict(hist_dict) hist_values = [i / lfiles for i in hist_dict.values()] fig = plt.figure() plt.rcParams['figure.figsize'] = [10, 6] plt.bar(list(hist_dict.keys()), hist_values, color=COLORS[channel]) plt.yscale('log') plt.grid(axis='y', alpha=0.75) plt.xlabel('Value') plt.ylabel('Frequency') plt.title(f'{animal} channel {channel} @16bit with {lfiles} tif files') outfile = f'{animal}.png' outpath = os.path.join(OUTPUT, outfile) fig.savefig(outpath, bbox_inches='tight') print('Finished')
def make_histogram(animal, channel): """ This method creates an individual histogram for each tif file by channel. Args: animal: the prep id of the animal channel: the channel of the stack to process {1,2,3} Returns: nothing """ logger = get_logger(animal) fileLocationManager = FileLocationManager(animal) sqlController = SqlController(animal) INPUT = os.path.join(fileLocationManager.prep, f'CH{channel}', 'thumbnail') MASK_INPUT = fileLocationManager.thumbnail_masked tifs = sqlController.get_sections(animal, channel) error = test_dir(animal, INPUT, downsample=True, same_size=False) if len(tifs) == 0: error += " No sections in the database" if len(error) > 0: print(error) sys.exit() ch_dir = f'CH{channel}' OUTPUT = os.path.join(fileLocationManager.histogram, ch_dir) os.makedirs(OUTPUT, exist_ok=True) progress_id = sqlController.get_progress_id(True, channel, 'HISTOGRAM') sqlController.set_task(animal, progress_id) for i, tif in enumerate(tqdm(tifs)): filename = str(i).zfill(3) + '.tif' input_path = os.path.join(INPUT, filename) mask_path = os.path.join(MASK_INPUT, filename) output_path = os.path.join(OUTPUT, os.path.splitext(tif.file_name)[0] + '.png') if not os.path.exists(input_path): print('Input tif does not exist', input_path) continue if os.path.exists(output_path): continue try: img = io.imread(input_path) except: logger.warning(f'Could not open {input_path}') continue try: mask = io.imread(mask_path) except: logger.warning(f'Could not open {mask_path}') continue img = cv2.bitwise_and(img, img, mask=mask) if img.shape[0] * img.shape[1] > 1000000000: scale = 1 / float(2) img = img[::int(1. / scale), ::int(1. / scale)] try: flat = img.flatten() except: logger.warning(f'Could not flat {input_path}') continue fig = plt.figure() plt.rcParams['figure.figsize'] = [10, 6] plt.hist(flat, flat.max(), [0, 10000], color=COLORS[channel]) plt.style.use('ggplot') plt.yscale('log') plt.grid(axis='y', alpha=0.75) plt.xlabel('Value') plt.ylabel('Frequency') plt.title(f'{tif.file_name} @16bit') plt.close() fig.savefig(output_path, bbox_inches='tight')
from sqlalchemy.ext.compiler import compiles from sqlalchemy.types import DateTime from sqlalchemy.exc import OperationalError from sqlalchemy.engine.url import URL from sqlalchemy import create_engine from flask_sqlalchemy import SQLAlchemy import datetime from order_app import settings from order_app.settings import app from utilities.logger import get_logger DeclarativeBase = declarative_base( ) #For SQLAlchemy (can make use of SQLAlchemy sessions, row lock etc.) logger = get_logger('database') global_db_config = settings.DATABASE db = SQLAlchemy(app) #For Flask def create_db_if_not_exists(db_config=None): """ Create Database if not exists, using postgres default user. """ try: #Test if connects to order_app-specific database successfully if not db_config: db_config = global_db_config.copy() engine = create_engine(URL(**db_config)) conn = engine.connect()