def test_PZFFormat_lossy_uint16_qs(): from PYME.IO import PZFFormat test_data = np.random.poisson(100, 100).reshape(10, 10).astype('uint16') qs = .2 result, header = PZFFormat.loads( PZFFormat.dumps(test_data, compression=PZFFormat.DATA_COMP_HUFFCODE, quantization=PZFFormat.DATA_QUANT_SQRT, quantizationOffset=0, quantizationScale=qs)) #print result test_quant = ( (np.floor(np.sqrt(test_data.astype('f') - .1) / qs).astype('i') * qs)**2).astype('i') print(test_data.min(), test_data.max(), result.min(), result.max(), test_quant.min(), test_quant.max()) #print(test_quant.squeeze() - result.squeeze()) #print(test_data.squeeze()) #print(test_quant.squeeze()) #print(result.squeeze()) print(result.squeeze() - test_quant.squeeze()) assert np.allclose(result.squeeze(), test_quant.squeeze())
def update_base_tile(self, tile_x, tile_y, data, weights, tile_offset=(0, 0), frame_offset=(0, 0), frame_shape=None): """ Over-ridden version of update_base_tile which causes this to be called on the server rather than the client In practice it simply adds each chunk to a queue of chunks that get pushed asynchronously in multiple threads (one for each server). """ import json server_idx = server_for_chunk(tile_x, tile_y, chunk_shape=self.chunk_shape, nr_servers=len(self.servers)) if weights is not 'auto': raise RuntimeError( 'Distributed pyramid only supports auto weights') fn = f'__pyramid_update_tile/{self.base_dir}?x={tile_x}&y={tile_y}&' + \ f'tox={tile_offset[0]}&toy={tile_offset[1]}&fox={frame_offset[0]}&foy={frame_offset[1]}&' + \ f'fsx={frame_shape[0]}&fsy={frame_shape[1]}' self._tile_spoolers[server_idx].put(fn, PZFFormat.dumps(data))
def _save(self, filename, data): from PYME.IO import PZFFormat dirname = os.path.split(filename)[0] if not os.path.exists(dirname): os.makedirs(dirname) with open(filename, 'wb') as f: f.write(PZFFormat.dumps(data.astype('float32')))
def getPZFFrame(self, frameNo): if frameNo >= self.dshape[2]: raise IndexError('Index out of bounds') if not self.usePZFFormat: return PZFFormat.dumps( self.h5f.root.ImageData[frameNo, :, :].squeeze(), compression=self.PZFCompression) else: return self.h5f.root.PZFImageData[frameNo]
def test_PZFFormat_raw_uint8(): from PYME.IO import PZFFormat test_data = np.random.poisson(50, 100).reshape(10, 10).astype('uint8') result, header = PZFFormat.loads(PZFFormat.dumps(test_data)) #print result.squeeze(), test_data, result.shape, test_data.shape assert np.allclose(result.squeeze(), test_data.squeeze())
def test_PZFFormat_lossless_uint16(): from PYME.IO import PZFFormat test_data = np.random.poisson(100, 10000).reshape(100,100).astype('uint16') result, header = PZFFormat.loads(PZFFormat.dumps(test_data, compression = PZFFormat.DATA_COMP_HUFFCODE)) #print result assert np.allclose(result.squeeze(), test_data.squeeze())
def test_PZFFormat_raw_uint16(): from PYME.IO import PZFFormat test_data = np.random.poisson(100, 10000).reshape(100,100).astype('uint16') result, header = PZFFormat.loads(PZFFormat.dumps(test_data)) print(test_data, result.squeeze()) print(header, result.dtype) assert np.allclose(result.squeeze(), test_data.squeeze())
def save_tile(self, layer, x, y, data): from PYME.IO import PZFFormat table = 'layer%d' % layer if not table in self._known_tables: self._cur.execute('CREATE TABLE %s (y INTEGER, x INTEGER, data BLOB)' % table) self._cur.execute('CREATE INDEX %s ON %s (x,y)' % ('idx_' + table, table)) self._known_tables.append(table) self._cur.execute('INSERT INTO %s VALUES (?,?,?)' % table, (x,y,blob(PZFFormat.dumps(data.astype('float32')))))
def putFrame(self, frame): f = pickle.loads(frame) self._checkCreateDataTable(f) if self.usePZFFormat: self.compImageData.append( PZFFormat.dumps(f.squeeze(), compression=self.PZFCompression)) self.compImageData.flush() else: self.imageData.append(f) self.imageData.flush() self.dshape[2] += 1
def get_frame(self, frame_num): if frame_num >= self.n_frames: raise IOError('Frame num %d out of range' % frame_num) with h5rFile.tablesLock: if not self.pzf_index is None: idx = self.pzf_index['Position'][np.searchsorted(self.pzf_index['FrameNum'], frame_num)] else: idx = frame_num data = self.image_data[idx] if isinstance(data, np.ndarray): return PZFFormat.dumps((data.squeeze()), compression = self.PZFCompression) else: #already PZF compressed return data
def test_PZFFormat_lossy_uint16(): from PYME.IO import PZFFormat test_data = np.random.poisson(100, 100).reshape(10,10).astype('uint16') result, header = PZFFormat.loads(PZFFormat.dumps(test_data, compression = PZFFormat.DATA_COMP_HUFFCODE, quantization = PZFFormat.DATA_QUANT_SQRT, quantizationOffset=0, quantizationScale=1)) #print result test_quant = (np.round(np.sqrt(test_data.astype('f'))).astype('i'))**2 #print(test_quant.squeeze() - result.squeeze()) #print(test_data.squeeze()) #print(test_quant.squeeze()) #print(result.squeeze()) assert np.allclose(result.squeeze(), test_quant.squeeze())
def _queuePoll(self): while self._dPoll: try: data = self._postQueue.get_nowait() with self._lock: self._numThreadsProcessing += 1 try: files = [] for imNum, frame in data: if self._aggregate_h5: fn = '/'.join([ '__aggregate_h5', self.seriesName, 'frame%05d.pzf' % imNum ]) else: fn = '/'.join( [self.seriesName, 'frame%05d.pzf' % imNum]) pzf = PZFFormat.dumps(frame, sequenceID=self.sequenceID, frameNum=imNum, **self.compSettings) files.append((fn, pzf)) if len(files) > 0: clusterIO.put_files(files, serverfilter=self.clusterFilter) except Exception as e: self._last_thread_exception = e logging.exception('Exception whilst putting files') raise finally: with self._lock: self._numThreadsProcessing -= 1 time.sleep(.01) #print 't', len(data) except Queue.Empty: time.sleep(.01)
def get_frame_pzf(self): """ Get a frame in PZF format (compressed, fast), uses long polling Returns ------- """ from PYME.IO import PZFFormat with self._new_frame_condition: while self._current_frame is None: self._new_frame_condition.wait() #logger.debug(self._current_frame is None) ret = PZFFormat.dumps(self._current_frame, compression=PZFFormat.DATA_COMP_RAW) self._current_frame = None return ret
def putFrames(self, frames): t1 = time.time() fs = pickle.loads(frames) t2 = time.time() self._checkCreateDataTable(fs[0]) if self.usePZFFormat: for f in fs: self.compImageData.append( PZFFormat.dumps(f.squeeze(), compression=self.PZFCompression)) self.dshape[2] += 1 self.compImageData.flush() else: for f in fs: self.imageData.append(f) self.dshape[2] += 1 self.imageData.flush()
def _save(self, filename, data): from PYME.IO import clusterIO, PZFFormat clusterIO.put_file(filename, PZFFormat.dumps(data.astype('float32')))