def save_processed(self, name, threshold): dname = '%s-fcache-dir' % name tcname = '%s-fcache-tmp.yml' % name fcname = '%s-fcache.yml' % name cache = '%s-cachefile.npz' % name omname = '%s-omegas.npy' % name pname = lambda s: os.path.join(dname, s) # prepend fc directory os.mkdir(dname) # Steps: # * write frame cache with no omegas to temporary file # * write omegas to file # * modify temporary file to include omegas imageseries.write(self.processed(), pname(tcname), self.PROCFMT, threshold=threshold, cache_file=cache) self.save_omegas(pname(omname)) # modify yaml with open(pname(tcname), 'r') as f: s = f.read() m0 = 'meta: {}' m1 = 'meta:\n omega: ! load-numpy-array %s' % omname with open(pname(fcname), 'w') as f: f.write(s.replace(m0, m1)) os.remove(pname(tcname))
def test_fmtfc(self): """save/load frame-cache format""" imageseries.write(self.is_a, self.fcfile, self.fmt, threshold=self.thresh, cache_file=self.cache_file) is_fc = imageseries.open(self.fcfile, self.fmt, style='yml') diff = compare(self.is_a, is_fc) self.assertAlmostEqual(diff, 0., "frame-cache reconstruction failed") self.assertTrue(compare_meta(self.is_a, is_fc))
def test_fmth5(self): """save/load HDF5 format""" imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path) is_h = imageseries.open(self.h5file, self.fmt, path=self.h5path) diff = compare(self.is_a, is_h) self.assertAlmostEqual(diff, 0., "h5 reconstruction failed") self.assertTrue(compare_meta(self.is_a, is_h))
def test_fmth5_compress_err(self): """HDF5 options: compression level out of range""" with self.assertRaises(ValueError): imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path, gzip=10)
def test_fmth5_nocompress(self): """HDF5 options: no compression""" imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path, gzip=0) is_h = imageseries.open(self.h5file, self.fmt, path=self.h5path) diff = compare(self.is_a, is_h) self.assertAlmostEqual(diff, 0., "h5 reconstruction failed") self.assertTrue(compare_meta(self.is_a, is_h))
def test_fmth5_chunk(self): """HDF5 options: chunk size""" imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path, chunk_rows=0) is_h = imageseries.open(self.h5file, self.fmt, path=self.h5path) diff = compare(self.is_a, is_h) self.assertAlmostEqual(diff, 0., "h5 reconstruction failed") self.assertTrue(compare_meta(self.is_a, is_h))
def test_fmth5_nparray(self): """HDF5 format with numpy array metadata""" key = 'np-array' npa = np.array([0,2.0,1.3]) self.is_a.metadata[key] = npa imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path) is_h = imageseries.open(self.h5file, self.fmt, path=self.h5path) meta = is_h.metadata diff = np.linalg.norm(meta[key] - npa) self.assertAlmostEqual(diff, 0., "h5 numpy array metadata failed")
def test_fmth5_nparray(self): """HDF5 format with numpy array metadata""" key = 'np-array' npa = np.array([0, 2.0, 1.3]) self.is_a.metadata[key] = npa imageseries.write(self.is_a, self.h5file, self.fmt, path=self.h5path) is_h = imageseries.open(self.h5file, self.fmt, path=self.h5path) meta = is_h.metadata diff = np.linalg.norm(meta[key] - npa) self.assertAlmostEqual(diff, 0., "h5 numpy array metadata failed")
def test_fmtfc(self): """save/load frame-cache format""" imageseries.write(self.is_a, self.fcfile, self.fmt, threshold=self.thresh, cache_file=self.cache_file) is_fc = imageseries.open(self.fcfile, self.fmt) diff = compare(self.is_a, is_fc) self.assertAlmostEqual(diff, 0., "frame-cache reconstruction failed") self.assertTrue(compare_meta(self.is_a, is_fc))
def test_fmtfc_nparray(self): """frame-cache format with numpy array metadata""" key = 'np-array' npa = np.array([0,2.0,1.3]) self.is_a.metadata[key] = npa imageseries.write(self.is_a, self.fcfile, self.fmt, threshold=self.thresh, cache_file=self.cache_file) is_fc = imageseries.open(self.fcfile, self.fmt) meta = is_fc.metadata diff = np.linalg.norm(meta[key] - npa) self.assertAlmostEqual(diff, 0., "frame-cache numpy array metadata failed")
def test_fmtfc_nparray(self): """frame-cache format with numpy array metadata""" key = 'np-array' npa = np.array([0, 2.0, 1.3]) self.is_a.metadata[key] = npa imageseries.write(self.is_a, self.fcfile, self.fmt, threshold=self.thresh, cache_file=self.cache_file) is_fc = imageseries.open(self.fcfile, self.fmt) meta = is_fc.metadata diff = np.linalg.norm(meta[key] - npa) self.assertAlmostEqual(diff, 0., "frame-cache numpy array metadata failed")
def save_processed(self, name, threshold, output_dir=None): if output_dir is None: output_dir = os.getcwd() else: os.mkdir(output_dir) # add omegas pims = self.processed() metad = pims.metadata metad['omega'] = self.omegas metad['panel_id'] = self.panel_id cache = '%s-cachefile.npz' % name imageseries.write(pims, "dummy", self.PROCFMT, style="npz", threshold=threshold, cache_file=cache)
# generate omegas nf = len(ims) w = imageseries.omega.OmegaWedges(nf) w.addwedge(start_ome, stop_ome, nf) meta = ims.metadata meta['omega'] = w.omegas w.save_omegas('omegas_FF.npy') print(ims.metadata) # handle dark if dark_file is None: print("making dark image") dark = imageseries.stats.median(ims, nframes=120) np.save('background_%s-%s.npy' % (output_stem, det_id), dark) else: dark = fabio.open(dark_file).data # add flips pims = Pims(ims, [ ('dark', dark), ] + popts) # save as frame-cache print("writing frame cache") imageseries.write(pims, '%s-fc_%s.yml' % (output_stem, det_id), 'frame-cache', cache_file="%s-fc_%s.npz" % (output_stem, det_id), threshold=threshold, output_yaml=False)
image_dir = os.path.join(os.getcwd(), 'imageseries') samp_name = "Ruby1_hydra" scan_number = 0 print("Making requested max frame...") max_frames_output_name = os.path.join( data_dir, "%s_%d-maxframes.hdf5" % (samp_name, scan_number) ) if os.path.exists(max_frames_output_name): os.remove(max_frames_output_name) max_frames = dict.fromkeys(cfg.instrument.hedm.detectors) for det_key in max_frames.iterkeys(): fc_file = os.path.join( image_dir, "%s_%06d-fc_%%s.npz" % (samp_name, scan_number)) ims = imageseries.open(fc_file % det_key, 'frame-cache') max_frames[det_key] = imageseries.stats.max(ims) ims_out = imageseries.open( None, 'array', data=np.array([max_frames[i] for i in max_frames]), meta={'panels': max_frames.keys()} ) imageseries.write( ims_out, max_frames_output_name, 'hdf5', path='/imageseries' )
print >> f, output_str # load basic imageseries: no flip, no omegas ims = imageseries.open(rawfname %det_key, 'image-files') if len(ims) != nf: import pbd; pdb.set_trace() # generate omegas w = imageseries.omega.OmegaWedges(nf) w.addwedge(*wedge_args) meta = ims.metadata meta['omega'] = w.omegas w.save_omegas('omegas_NF.npy') print ims.metadata # make dark print "making dark image" dark = imageseries.stats.median(ims, nframes=120) np.save('median_dark_%s.npy' %det_key, dark) # add processing opts pims = Pims(ims, [('dark', dark),]) # save as frame-cache print "writing frame cache" imageseries.write(pims, 'imageseries-fc_%s.yml' %det_key, 'frame-cache', cache_file="images-fc_%s.npz" %det_key, threshold=15)
) if os.path.exists(max_frames_output_name): os.remove(max_frames_output_name) max_frames = dict.fromkeys(det_keys) for det_key in det_keys: max_frames[det_key] = imageseries.stats.max(imsd[det_key]) ims_out = imageseries.open( None, 'array', data=np.array([max_frames[i] for i in max_frames]), meta={'panels': max_frames.keys()} ) imageseries.write( ims_out, max_frames_output_name, 'hdf5', path='/imageseries' ) #%% class GenerateEtaOmeMaps(object): """ eta-ome map class derived from new image_series and YAML config ...for now... must provide: self.dataStore self.planeData self.iHKLList self.etaEdges # IN RADIANS
def _load_images(self, ims): # load images from imageseries # ... add processing here print "loading images" if self._make_images: assert isinstance(ims, dict), \ "To make images, ims input must be a dictionary" print "making max frames to spec..." max_frames = [] for panel_id in self.panel_ids: panel = self.instr.detectors[panel_id] oims = Oimgs(ims[panel_id]) # now have OmegaImageSeries del_ome = oims.omega[0, 1] - oims.omega[0, 0] # degrees simd = panel.simulate_rotation_series( self.planeData, self.gpl, ome_ranges=self.ome_ranges, chi=self.chi, tVec_s=self.tvec) pred_omes = np.degrees(np.vstack(simd[2])[:, 2]) # in DEGREES ndiv, tol_grid = make_tolerance_grid(del_ome, self._ome_tol, 1, adjust_window=True) frame_indices = [] for ome in pred_omes: expanded_omes = ome + tol_grid fidxs = oims.omegarange_to_frames(expanded_omes[0], expanded_omes[-1]) if len(fidxs) > 0: frame_indices += fidxs if len(frame_indices) == 0: raise RuntimeError( "no omegas in speficied imageseries range(s)") max_frames.append( np.max(np.array([oims[k] for k in frame_indices]), axis=0)) pass # closes loop on panels # max array-based ims of max frames # NOTE: this assumes that the frames are all the same, which # is ok for NF detector at different L distances... ims = imageseries.open(None, 'array', data=np.array(max_frames), meta=dict(panels=self.panel_ids)) gid_str = '' for s in ['%s-' % i for i in self._grain_ids]: gid_str += s if save_max_frames: imageseries.write(ims, 'imageseries-max_grains_%s.h5' % gid_str[:-1], 'hdf5', path='data') pass # closes conditional on make_images m = ims.metadata panel_ids = m['panels'] d = dict(zip(panel_ids, range(len(panel_ids)))) if 'process' in m: pspec = m['process'] ops = [] for p in pspec: k = p.keys()[0] ops.append((k, p[k])) pims = Pimgs(ims, ops) else: pims = ims self.images = [] for panel_id in self.panel_ids: self.images.append(pims[d[panel_id]])