示例#1
0
def test_create_save_and_load(tmpdir):
    rng = np.random.RandomState(42)
    tmpdir = Path(str(tmpdir))
    save_file = str(tmpdir / 'data.npz')

    n_images, n_patches_per_image = 2, 4

    def _create(img_size, img_axes, patch_size, patch_axes):
        U, V = (rng.uniform(size=(n_images, ) + img_size) for _ in range(2))
        X, Y, XYaxes = create_patches(raw_data=RawData.from_arrays(
            U, V, img_axes),
                                      patch_size=patch_size,
                                      patch_axes=patch_axes,
                                      n_patches_per_image=n_patches_per_image,
                                      save_file=save_file)
        (_X, _Y), val_data, _XYaxes = load_training_data(save_file,
                                                         verbose=True)
        assert val_data is None
        assert _XYaxes[-1 if backend_channels_last else 1] == 'C'
        _X, _Y = (move_image_axes(u, fr=_XYaxes, to=XYaxes) for u in (_X, _Y))
        assert np.allclose(X, _X, atol=1e-6)
        assert np.allclose(Y, _Y, atol=1e-6)
        assert set(XYaxes) == set(_XYaxes)
        assert load_training_data(save_file,
                                  validation_split=0.5)[2] is not None
        assert all(
            len(x) == 3 for x in load_training_data(save_file, n_images=3)[0])

    _create((64, 64), 'YX', (16, 16), None)
    _create((64, 64), 'YX', (16, 16), 'YX')
    _create((64, 64), 'YX', (16, 16, 1), 'YXC')
    _create((1, 64, 64), 'CYX', (16, 16), 'YX')
    _create((1, 64, 64), 'CYX', (1, 16, 16), None)
    _create((64, 3, 64), 'YCX', (3, 16, 16), 'CYX')
    _create((64, 3, 64), 'YCX', (16, 16, 3), 'YXC')
示例#2
0
def main():
    tifs = []
    folders = []
    for root, dirs, files in os.walk(args.indir):
        for file in files:
            if file.endswith('tif') & ('mask' not in file):
                tifs.append(Path(root) / file)
                if Path(root) not in folders:
                    folders.append(Path(root))

    if not args.summarize_only:
        model = StarDist2D(None, name='gcamp-stardist', basedir='models')

        for tif in tifs:
            print(("Analyzing %s..." % str(tif.stem)), end='', flush=True)
            movie = imread(str(tif))
            num_frames, num_ch, dim_y, dim_x = get_movie_dims(movie)
            labels, df = analyze_gcamp(movie, model, num_frames, num_ch, dim_y,
                                       dim_x)
            savedir = tif.parent
            mask_file = savedir / (tif.stem + '_mask.tif')
            data_file = savedir / (tif.stem + '_analysis.csv')
            save_tiff_imagej_compatible(mask_file,
                                        labels.astype("uint8"),
                                        axes="TYX")
            df.to_csv(data_file)
            print("done!")

    for folder in folders:
        print(("Summarizing %s...") % str(folder), end='', flush=True)
        summary_dfs = summarize_folder(folder)
        savedir = folder.parent
        for summary, df in summary_dfs.items():
            df.to_csv(savedir / (folder.stem + '_' + summary + '.csv'))
        print('done!')

    print("Mischief managed :)")
示例#3
0
文件: model.py 项目: ml-lab/stardist
    def __init__(self, config=Config(), name=None, basedir='.'):
        """See class docstring."""

        config is None or isinstance(config, Config) or _raise(
            ValueError('Invalid configuration: %s' % str(config)))
        # if config is not None and not config.is_valid():
        #     invalid_attr = config.is_valid(True)[1]
        #     raise ValueError('Invalid configuration attributes: ' + ', '.join(invalid_attr))

        name is None or isinstance(name, string_types) or _raise(ValueError())
        isinstance(basedir, (string_types, Path)) or _raise(ValueError())
        self.config = config
        self.basedir = Path(basedir)
        self.name = name
        self._set_logdir()
        self._model_prepared = False
        self.keras_model = self._build()
        if config is None:
            self._find_and_load_weights()
示例#4
0
def test_rawdata_from_folder(tmpdir):
    rng = np.random.RandomState(42)
    tmpdir = Path(str(tmpdir))

    n_images, img_size, img_axes = 3, (64,64), 'YX'
    data = {'X' : rng.uniform(size=(n_images,)+img_size).astype(np.float32),
            'Y' : rng.uniform(size=(n_images,)+img_size).astype(np.float32)}

    for name,images in data.items():
        (tmpdir/name).mkdir(exist_ok=True)
        for i,img in enumerate(images):
            imsave(str(tmpdir/name/('img_%02d.tif'%i)),img)

    raw_data = RawData.from_folder(str(tmpdir),['X'],'Y',img_axes)
    assert raw_data.size == n_images
    for i,(x,y,axes,mask) in enumerate(raw_data.generator()):
        assert mask is None
        assert axes == img_axes
        assert any(np.allclose(x,u) for u in data['X'])
        assert any(np.allclose(y,u) for u in data['Y'])
示例#5
0
import csbdeep

import numpy as np
import os
import glob

from tifffile import imread
from csbdeep.utils import Path, download_and_extract_zip_file, plot_some
from csbdeep.io import save_tiff_imagej_compatible
from csbdeep.models import CARE, ProjectionCARE
from helpers import save_8bit_tiff_imagej_compatible

try:
    from pathlib import Path
    Path().expanduser()
except (ImportError, AttributeError):
    from pathlib2 import Path

try:
    import tempfile
    tempfile.TemporaryDirectory
except (ImportError, AttributeError):
    from backports import tempfile

from skimage import exposure

import time

os.environ["CUDA_VISIBLE_DEVICES"] = "0"
import os
import glob
from csbdeep.utils.tf import limit_gpu_memory

from tifffile import imread
from csbdeep.utils import Path, download_and_extract_zip_file, plot_some
from csbdeep.io import save_tiff_imagej_compatible
from csbdeep.models import CARE

# In[3]:

basedirLow = '/local/u934/private/v_kapoor/ProjectionTraining/MasterLow/VeryLow/'
basedirResults3D = '/local/u934/private/v_kapoor/ProjectionTraining/MasterLow/NotsoLow/'
ModelName = 'BorialisS1S2FlorisMidNoiseModel'
BaseDir = '/data/u934/service_imagerie/v_kapoor/CurieDeepLearningModels/'
Path(basedirResults3D).mkdir(exist_ok=True)

# In[4]:

model = CARE(config=None, name=ModelName, basedir=BaseDir)

# In[6]:

Raw_path = os.path.join(basedirLow, '*tif')

axes = 'ZYX'
smallaxes = 'YX'
filesRaw = glob.glob(Raw_path)

filesRaw.sort
print(len(filesRaw))
示例#7
0
# In[4]:

ConfigNPZ = open(
    "/run/media/sancere/DATA1/Lucas_NextonCreated_npz/Parameters_Npz/ConfigNPZ_Training_CARE_restoration_SpinwideFRAP4_Bin2.txt",
    "w+")
ConfigNPZ.write("patch_size = {} \n n_patches_per_image = {}".format(
    patch_size, n_patches_per_image))
ConfigNPZ.close()

# In[5]:

# assert X.shape == Y.shape
# print("shape of X,Y =", X.shape)
# print("axes  of X,Y =", XY_axes)

# # In[6]:

# for i in range(4):
#     plt.figure(figsize=(16,4))
#     sl = slice(8*i, 8*(i+1)), 0
#     plot_some(X[sl],Y[sl],title_list=[np.arange(sl[0].start,sl[0].stop)])
#     plt.show()
# None;

# In[7]:

from csbdeep.utils import Path

TriggerName = '/home/sancere/NextonDisk_1/TimeTrigger/TTGenDataDen1'
Path(TriggerName).mkdir(exist_ok=True)
import csbdeep

import numpy as np
import os
import glob

from tifffile import imread
from csbdeep.utils import Path, download_and_extract_zip_file, plot_some
from csbdeep.io import save_tiff_imagej_compatible
from csbdeep.models import CARE, ProjectionCARE
from helpers import save_8bit_tiff_imagej_compatible

try:
    from pathlib import Path
    Path().expanduser()
except (ImportError, AttributeError):
    from pathlib2 import Path

try:
    import tempfile
    tempfile.TemporaryDirectory
except (ImportError, AttributeError):
    from backports import tempfile

from skimage import exposure

import time

os.environ["CUDA_VISIBLE_DEVICES"] = "0"
示例#9
0
my_parser.add_argument('--quick_demo',type=bool,default='False',help='Run quick training demo (True) or full training (False)')
my_parser.add_argument('--epochs',type=int,default=400,help='Number of training epochs')
my_parser.add_argument('--steps_per_epoch',type=int,default=100,help='Steps per training epoch')


args = my_parser.parse_args()


trainpath=args.path
np.random.seed(42)
lbl_cmap = random_label_cmap()


X = sorted(glob(trainpath+'/images/*.tif'))
Y = sorted(glob(trainpath+'/masks/*.tif'))
assert all(Path(x).name==Path(y).name for x,y in zip(X,Y))

X = list(map(imread,X))
Y = list(map(imread,Y))
n_channel = 1 if X[0].ndim == 2 else X[0].shape[-1]



axis_norm = (0,1)   # normalize channels independently
# axis_norm = (0,1,2) # normalize channels jointly
if n_channel > 1:
    print("Normalizing image channels %s." % ('jointly' if axis_norm is None or 2 in axis_norm else 'independently'))
    sys.stdout.flush()

X = [normalize(x,1,99.8,axis=axis_norm) for x in tqdm(X)]
Y = [fill_label_holes(y) for y in tqdm(Y)]
示例#10
0
def main():
    if not ('__file__' in locals() or '__file__' in globals()):
        print('running interactively, exiting.')
        sys.exit(0)

    # parse arguments
    parser, args = parse_args()
    args_dict = vars(args)

    # exit and show help if no arguments provided at all
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(0)

    # check for required arguments manually (because of argparse issue)
    required = ('--input-dir','--input-axes', '--norm-pmin', '--norm-pmax', '--model-basedir', '--model-name', '--output-dir')
    for r in required:
        dest = r[2:].replace('-','_')
        if args_dict[dest] is None:
            parser.print_usage(file=sys.stderr)
            print("%s: error: the following arguments are required: %s" % (parser.prog,r), file=sys.stderr)
            sys.exit(1)

    # show effective arguments (including defaults)
    if not args.quiet:
        print('Arguments')
        print('---------')
        pprint(args_dict)
        print()
        sys.stdout.flush()

    # logging function
    log = (lambda *a,**k: None) if args.quiet else tqdm.write

    # get list of input files and exit if there are none
    file_list = list(Path(args.input_dir).glob(args.input_pattern))
    if len(file_list) == 0:
        log("No files to process in '%s' with pattern '%s'." % (args.input_dir,args.input_pattern))
        sys.exit(0)

    # delay imports after checking to all required arguments are provided
    from tifffile import imread, imsave
    import keras.backend as K
    from csbdeep.models import CARE
    from csbdeep.data import PercentileNormalizer
    sys.stdout.flush()
    sys.stderr.flush()

    # limit gpu memory
    if args.gpu_memory_limit is not None:
        from csbdeep.utils.tf import limit_gpu_memory
        limit_gpu_memory(args.gpu_memory_limit)

    # create CARE model and load weights, create normalizer
    K.clear_session()
    model = CARE(config=None, name=args.model_name, basedir=args.model_basedir)
    if args.model_weights is not None:
        print("Loading network weights from '%s'." % args.model_weights)
        model.load_weights(args.model_weights)
    normalizer = PercentileNormalizer(pmin=args.norm_pmin, pmax=args.norm_pmax, do_after=args.norm_undo)

    processed = []

    # process all files
    for file_in in tqdm(file_list, disable=args.quiet):
        # construct output file name
        file_out = Path(args.output_dir) / args.output_name.format (
            file_path = str(file_in.relative_to(args.input_dir).parent),
            file_name = file_in.stem, file_ext = file_in.suffix,
            model_name = args.model_name, model_weights = Path(args.model_weights).stem if args.model_weights is not None else None
        )

        # checks
        (file_in.suffix.lower()  in ('.tif','.tiff') and
         file_out.suffix.lower() in ('.tif','.tiff')) or _raise(ValueError('only tiff files supported.'))

        # load and predict restored image
        img = imread(str(file_in))
        restored = model.predict(img, axes=args.input_axes, normalizer=normalizer, n_tiles=args.n_tiles)

        # restored image could be multi-channel even if input image is not
        axes_out = axes_check_and_normalize(args.input_axes)
        if restored.ndim > img.ndim:
            assert restored.ndim == img.ndim + 1
            assert 'C' not in axes_out
            axes_out += 'C'

        # convert data type (if necessary)
        restored = restored.astype(np.dtype(args.output_dtype), copy=False)

        # save to disk
        if not args.dry_run:
            file_out.parent.mkdir(parents=True, exist_ok=True)
            if args.imagej_tiff:
                save_tiff_imagej_compatible(str(file_out), restored, axes_out)
            else:
                imsave(str(file_out), restored)

        processed.append((file_in,file_out))


    # print summary of processed files
    if not args.quiet:
        sys.stdout.flush()
        sys.stderr.flush()
        n_processed   = len(processed)
        len_processed = len(str(n_processed))
        log('Finished processing %d %s' % (n_processed, 'files' if n_processed > 1 else 'file'))
        log('-' * (26+len_processed if n_processed > 1 else 26))
        for i,(file_in,file_out) in enumerate(processed):
            len_file = max(len(str(file_in)),len(str(file_out)))
            log(('{:>%d}. in : {:>%d}'%(len_processed,len_file)).format(1+i,str(file_in)))
            log(('{:>%d}  out: {:>%d}'%(len_processed,len_file)).format('',str(file_out)))
示例#11
0
def summarize_folder(folder, min_frames=240, min_gcamp=130):
    """Collects analyzed dataframes within a folder and returns
    a simple summary of maximum responses across all cells given
    a set of filtering criteria"""

    csvs = []
    for file in os.listdir(folder):
        if 'csv' in file:
            csvs.append(file)

    data = {}

    for csv in csvs:
        csv = Path(csv)
        dat = pd.read_csv(folder / csv, index_col=0)
        name = (csv.stem.replace('analysis', '').replace(
            'Gcamp6f', '').replace('PC12',
                                   '').replace('_', ' ').replace('  ',
                                                                 ' ').strip())
        date = str(csv.parent.name).split(" ")[0]
        dat['date'] = date
        data[name] = dat

    data = pd.concat(data,
                     names=['condition'
                            ]).reset_index(level=0).reset_index(drop=True)

    cells_in_conditions = np.unique(data[['condition',
                                          'cell']].to_records(index=False))

    for condition, cell in cells_in_conditions:
        num_frames = len(data.loc[(data['condition'] == condition) &
                                  (data['cell'] == cell), 'frame'])
        mean_gcamp = data.loc[(data['condition'] == condition) &
                              (data['cell'] == cell), 'primary_mean'].mean()
        if (num_frames <= min_frames) or (mean_gcamp < min_gcamp):
            data = data.drop(data[(data['condition'] == condition)
                                  & (data['cell'] == cell)].index)

    for normalization in [
            'primary_mean', 'primary_intden', 'secondary_mean',
            'secondary_intden'
    ]:
        data[normalization + "_normalized"] = 0
        for condition, cell in cells_in_conditions:
            baseline = data.loc[(data['condition'] == condition) &
                                (data['cell'] == cell) & (data['frame'] < 100),
                                normalization]
            baseline = np.mean(baseline)
            normalized = data.loc[(data['condition'] == condition) &
                                  (data['cell'] == cell),
                                  normalization].values / baseline
            data.loc[(data['condition'] == condition) & (data['cell'] == cell),
                     normalization + "_normalized"] = normalized

    output = {}

    output['intden_by_time'] = data.pivot_table(
        values='primary_intden_normalized',
        index='frame',
        columns=['condition', 'cell'])
    output['intden_by_time'].columns = output[
        'intden_by_time'].columns.droplevel(1)

    output['mean_by_time'] = data.pivot_table(values='primary_mean_normalized',
                                              index='frame',
                                              columns=['condition', 'cell'])
    output['mean_by_time'].columns = output['mean_by_time'].columns.droplevel(
        1)

    output['max_drug_mean'] = (data[(data['frame'] > 100)
                                    & (data['frame'] < 200)].pivot_table(
                                        values='primary_mean_normalized',
                                        index='cell',
                                        columns='condition',
                                        aggfunc=max))
    output['max_drug_intden'] = (data[(data['frame'] > 100)
                                      & (data['frame'] < 200)].pivot_table(
                                          values='primary_intden_normalized',
                                          index='cell',
                                          columns='condition',
                                          aggfunc=max))

    output['max_intden_kcl_spike'] = (
        data[(data['frame'] > 300) & (data['frame'] < 400)].pivot_table(
            values='primary_intden_normalized',
            index='cell',
            columns='condition',
            aggfunc=max))
    output['max_mean_kcl_spike'] = (data[(data['frame'] > 300)
                                         & (data['frame'] < 400)].pivot_table(
                                             values='primary_mean_normalized',
                                             index='cell',
                                             columns='condition',
                                             aggfunc=max))

    output['max_intden_kcl_plateau'] = (data[(
        data['frame'] > 430)].pivot_table(values='primary_intden_normalized',
                                          index='cell',
                                          columns='condition',
                                          aggfunc=max))
    output['max_mean_kcl_plateau'] = (data[(data['frame'] > 430)].pivot_table(
        values='primary_mean_normalized',
        index='cell',
        columns='condition',
        aggfunc=max))

    return output