class TestImagingDataset(object): def setup(self): global tmp_dir path = example_hdf5() seq = Sequence.create('HDF5', path, 'yxt') self.filepath = os.path.join(tmp_dir, "test_imaging_dataset.sima") self.ds = ImagingDataset([seq, seq], self.filepath) self.rois = ROI.ROIList.load(example_imagej_rois(), fmt='ImageJ') self.filepath_tiffs = os.path.join(tmp_dir, "test_dataset_tiffs.sima") seq = Sequence.create( 'TIFFs', [[example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()]]) self.ds_tiffs = ImagingDataset([seq, seq], self.filepath_tiffs) def teardown(self): shutil.rmtree(self.filepath) shutil.rmtree(self.filepath_tiffs) def load_saved_tiffs_dataset(self): tiff_ds = ImagingDataset.load(self.filepath_tiffs) assert_equal(tiff_ds.sequences[0].shape, (3, 4, 173, 173, 2)) def test_time_averages(self): averages = self.ds.time_averages assert_equal(self.ds.frame_shape, averages.shape) # Check it twice, since second time should load from a saved pkl averages2 = self.ds.time_averages assert_equal(self.ds.frame_shape, averages2.shape) def test_export_averages_tiff16(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages( [time_avg_path], fmt='TIFF16', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint16'), np.array(Image.open(time_avg_path))) def test_export_averages_tiff8(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages( [time_avg_path], fmt='TIFF8', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint8'), np.array(Image.open(time_avg_path))) def test_export_averages_hdf5(self): time_avg_path = os.path.join(self.filepath, 'time_avg.h5') self.ds.export_averages(time_avg_path, fmt='HDF5', scale_values=False) h5_time_avg = h5py.File(time_avg_path, 'r')['time_average'] assert_equal(self.ds.time_averages.astype('uint16'), h5_time_avg) assert_equal(np.string_(self.ds.channel_names), np.string_(h5_time_avg.attrs['channel_names'])) dim_labels = [dim.label for dim in h5_time_avg.dims] assert_equal(['z', 'y', 'x', 'c'], dim_labels) def test_add_and_delete_rois(self): self.ds.add_ROIs(self.rois, 'rois') assert_equal(len(self.ds.ROIs), 1) self.ds.add_ROIs(self.rois, 'rois2') assert_equal(len(self.ds.ROIs), 2) assert_equal(sorted(self.ds.ROIs.keys()), ['rois', 'rois2']) assert_equal(len(self.ds.ROIs['rois']), 2) # This should quietly do nothing self.ds.delete_ROIs('foo') self.ds.delete_ROIs('rois') assert_equal(len(self.ds.ROIs), 1) self.ds.delete_ROIs('rois2') assert_equal(len(self.ds.ROIs), 0) # This should quietly do nothing self.ds.delete_ROIs('foo') def test_rois(self): assert_equal(len(self.ds.ROIs), 0) def test_extract(self): extracted = self.ds.extract(self.rois, label='rois') assert_equal(len(self.ds.signals()), 1) assert_equal(extracted['raw'], self.ds.signals()['rois']['raw']) assert_equal(len(extracted['raw']), 2) assert_equal(len(extracted['raw'][0]), 2) @dec.skipif(not _has_picos) def test_infer_spikes(self): self.ds.extract(self.rois, label='rois') spikes, fits, parameters = self.ds.infer_spikes() signals = self.ds.signals()['rois'] assert_equal(signals['spikes'], spikes) assert_equal(signals['spikes_fits'], fits) # assert_equal(signals['spikes_params'], parameters) assert_equal(len(spikes), 2) assert_equal(len(fits), 2) assert_equal(len(parameters), 2) assert_equal(spikes[0].shape, (2, 20)) assert_equal(fits[0].shape, (2, 20))
def _load_version0(path): """Load a SIMA 0.x dataset Parameters ---------- path : str The path to the original saved dataset, ending in .sima Examples -------- >>> from sima.misc import example_data >>> from sima.misc.convert import _load_version0 >>> ds = _load_version0(example_data()) """ def parse_channel(channel): """Parse an old format channel stored a dictionary Parameters ---------- channel : dict Returns ------- result : sima.Sequence A sequence equivalent to the old format channel. """ _resolve_paths(channel, path) klass = channel.pop('__class__') if klass == 'sima.iterables.MultiPageTIFF': result = Sequence.create('TIFF', channel['path']) try: clip = channel['clip'] except KeyError: pass else: if clip is not None: s = (slice(None), slice(None)) + tuple( slice(*[None if x is 0 else x for x in dim]) for dim in clip) result = result[s] return result elif klass == 'sima.iterables.HDF5': raise Exception('TODO') else: raise Exception('Format not recognized.') def parse_sequence(sequence): channels = [parse_channel(c) for c in sequence] return Sequence.join(channels) with open(os.path.join(path, 'dataset.pkl'), 'rb') as f: unpickler = Unpickler(f) dataset_dict = unpickler.load() iterables = dataset_dict.pop('iterables') sequences = [parse_sequence(seq) for seq in iterables] # Apply displacements if they exist try: with open(os.path.join(path, 'displacements.pkl'), 'rb') as f: displacements = pkl.load(f) except IOError: pass else: assert all(np.all(d >= 0) for d in displacements) max_disp = np.max(list(chain(*displacements)), axis=0) frame_shape = np.array(sequences[0].shape)[1:] frame_shape[1:3] += max_disp sequences = [ s.apply_displacements(d.reshape(s.shape[:3] + (2,)), frame_shape) for s, d in zip(sequences, displacements)] try: trim_coords = dataset_dict.pop('_lazy__trim_coords') except KeyError: try: trim_criterion = dataset_dict.pop('trim_criterion') except KeyError: pass else: raise Exception( 'Parsing of trim_criterion ' + str(trim_criterion) + ' not yet implemented') else: sequences = [s[:, :, trim_coords[0][0]:trim_coords[1][0], trim_coords[0][1]:trim_coords[1][1]] for s in sequences] ds = ImagingDataset(sequences, None) ds.savedir = path # Add ROIs if they exist try: with open(os.path.join(path, 'rois.pkl'), 'rb') as f: rois = pkl.load(f) except IOError: pass else: roi_lists = {} for label, roi_list_dict in rois.iteritems(): roi_list = [] for roi in roi_list_dict['rois']: mask = roi['mask'] polygons = roi['polygons'] if mask is not None: new_roi = ROI(mask=mask) else: new_roi = ROI(polygons=polygons) new_roi.id = roi['id'] new_roi.label = roi['label'] new_roi.tags = roi['tags'] new_roi.im_shape = roi['im_shape'] roi_list.append(new_roi) roi_lists[label] = ROIList(roi_list) roi_lists[label].timestamp = roi_list_dict['timestamp'] for label, roi_list in roi_lists.iteritems(): ds.add_ROIs(roi_list, label=label) return ds
class TestImagingDataset(object): def setup(self): global tmp_dir path = example_hdf5() seq = Sequence.create('HDF5', path, 'yxt') self.filepath = os.path.join(tmp_dir, "test_imaging_dataset.sima") self.ds = ImagingDataset([seq, seq], self.filepath) self.rois = ROI.ROIList.load(example_imagej_rois(), fmt='ImageJ') self.filepath_tiffs = os.path.join(tmp_dir, "test_dataset_tiffs.sima") seq = Sequence.create( 'TIFFs', [[example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()]]) self.ds_tiffs = ImagingDataset([seq, seq], self.filepath_tiffs) def teardown(self): shutil.rmtree(self.filepath) shutil.rmtree(self.filepath_tiffs) def load_saved_tiffs_dataset(self): tiff_ds = ImagingDataset.load(self.filepath_tiffs) assert_equal(tiff_ds.sequences[0].shape, (3, 4, 173, 173, 2)) def test_time_averages(self): averages = self.ds.time_averages assert_equal(self.ds.frame_shape, averages.shape) # Check it twice, since second time should load from a saved pkl averages2 = self.ds.time_averages assert_equal(self.ds.frame_shape, averages2.shape) def test_time_std(self): std = self.ds.time_std assert_equal(self.ds.frame_shape, std.shape) # Check it twice, since second time should load from a saved pkl std2 = self.ds.time_std assert_equal(self.ds.frame_shape, std2.shape) def test_time_kurtosis(self): kurtosis = self.ds.time_kurtosis assert_equal(self.ds.frame_shape, kurtosis.shape) # Check it twice, since second time should load from a saved pkl kurtosis2 = self.ds.time_kurtosis assert_equal(self.ds.frame_shape, kurtosis2.shape) def test_export_averages_tiff16(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages([time_avg_path], fmt='TIFF16', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint16'), np.array(Image.open(time_avg_path))) def test_export_averages_tiff8(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages([time_avg_path], fmt='TIFF8', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint8'), np.array(Image.open(time_avg_path))) def test_export_averages_hdf5(self): time_avg_path = os.path.join(self.filepath, 'time_avg.h5') self.ds.export_averages(time_avg_path, fmt='HDF5', scale_values=False) h5_time_avg = h5py.File(time_avg_path, 'r')['time_average'] assert_equal(self.ds.time_averages.astype('uint16'), h5_time_avg) assert_equal(np.string_(self.ds.channel_names), np.string_(h5_time_avg.attrs['channel_names'])) dim_labels = [dim.label for dim in h5_time_avg.dims] assert_equal(['z', 'y', 'x', 'c'], dim_labels) def test_add_and_delete_rois(self): self.ds.add_ROIs(self.rois, 'rois') assert_equal(len(self.ds.ROIs), 1) self.ds.add_ROIs(self.rois, 'rois2') assert_equal(len(self.ds.ROIs), 2) assert_equal(sorted(self.ds.ROIs.keys()), ['rois', 'rois2']) assert_equal(len(self.ds.ROIs['rois']), 2) # This should quietly do nothing self.ds.delete_ROIs('foo') self.ds.delete_ROIs('rois') assert_equal(len(self.ds.ROIs), 1) self.ds.delete_ROIs('rois2') assert_equal(len(self.ds.ROIs), 0) # This should quietly do nothing self.ds.delete_ROIs('foo') def test_rois(self): assert_equal(len(self.ds.ROIs), 0) def test_extract(self): extracted = self.ds.extract(self.rois, label='rois') assert_equal(len(self.ds.signals()), 1) assert_equal(extracted['raw'], self.ds.signals()['rois']['raw']) assert_equal(len(extracted['raw']), 2) assert_equal(len(extracted['raw'][0]), 2) # @dec.skipif(not _has_picos) @dec.knownfailureif(True) # infer_spikes is crashing w/o mosek def test_infer_spikes(self): self.ds.extract(self.rois, label='rois') spikes, fits, parameters = self.ds.infer_spikes() signals = self.ds.signals()['rois'] assert_equal(signals['spikes'], spikes) assert_equal(signals['spikes_fits'], fits) # assert_equal(signals['spikes_params'], parameters) assert_equal(len(spikes), 2) assert_equal(len(fits), 2) assert_equal(len(parameters), 2) assert_equal(spikes[0].shape, (2, 20)) assert_equal(fits[0].shape, (2, 20))
class TestImagingDataset(object): def setup(self): global tmp_dir path = example_hdf5() seq = Sequence.create('HDF5', path, 'yxt') self.filepath = os.path.join(tmp_dir, "test_imaging_dataset.sima") self.ds = ImagingDataset([seq, seq], self.filepath) self.filepath_tiffs = os.path.join(tmp_dir, "test_dataset_tiffs.sima") seq = Sequence.create( 'TIFFs', [[example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()], [example_tiffs(), example_tiffs()]]) self.ds_tiffs = ImagingDataset([seq, seq], self.filepath_tiffs) def teardown(self): shutil.rmtree(self.filepath) shutil.rmtree(self.filepath_tiffs) def load_saved_tiffs_dataset(self): tiff_ds = ImagingDataset.load(self.filepath_tiffs) assert_equal(tiff_ds.sequences[0].shape, (3, 4, 173, 173, 2)) def test_time_averages(self): averages = self.ds.time_averages assert_equal(self.ds.frame_shape, averages.shape) # Check it twice, since second time should load from a saved pkl averages2 = self.ds.time_averages assert_equal(self.ds.frame_shape, averages2.shape) def test_export_averages_tiff16(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages( [time_avg_path], fmt='TIFF16', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint16'), np.array(Image.open(time_avg_path))) def test_export_averages_tiff8(self): time_avg_path = os.path.join(self.filepath, 'time_avg_Ch2.tif') self.ds.export_averages( [time_avg_path], fmt='TIFF8', scale_values=False) assert_equal(self.ds.time_averages[0, ..., 0].astype('uint8'), np.array(Image.open(time_avg_path))) def test_export_averages_hdf5(self): time_avg_path = os.path.join(self.filepath, 'time_avg.h5') self.ds.export_averages(time_avg_path, fmt='HDF5', scale_values=False) h5_time_avg = h5py.File(time_avg_path, 'r')['time_average'] assert_equal(self.ds.time_averages.astype('uint16'), h5_time_avg) assert_equal(np.string_(self.ds.channel_names), np.string_(h5_time_avg.attrs['channel_names'])) dim_labels = [dim.label for dim in h5_time_avg.dims] assert_equal(['z', 'y', 'x', 'c'], dim_labels) def test_add_and_delete_rois(self): rois = ROI.ROIList.load(example_imagej_rois(), fmt='ImageJ') self.ds.add_ROIs(rois, 'rois') assert_equal(len(self.ds.ROIs), 1) self.ds.add_ROIs(rois, 'rois2') assert_equal(len(self.ds.ROIs), 2) assert_equal(sorted(self.ds.ROIs.keys()), ['rois', 'rois2']) assert_equal(len(self.ds.ROIs['rois']), 2) # This should quietly do nothing self.ds.delete_ROIs('foo') self.ds.delete_ROIs('rois') assert_equal(len(self.ds.ROIs), 1) self.ds.delete_ROIs('rois2') assert_equal(len(self.ds.ROIs), 0) # This should quietly do nothing self.ds.delete_ROIs('foo') def test_rois(self): assert_equal(len(self.ds.ROIs), 0)