def run(runtest,test_functions,repeat=1): l = [(t,repr(t.__doc__.split('\n')[1].strip())) for t in test_functions] #l = [(t,'') for t in test_functions] start_memusage = memusage() diff_memusage = None start_jiffies = jiffies() i = 0 while i<repeat: i += 1 for t,fname in l: runtest(t) if start_memusage is None: continue if diff_memusage is None: diff_memusage = memusage() - start_memusage else: diff_memusage2 = memusage() - start_memusage if diff_memusage2!=diff_memusage: print 'memory usage change at step %i:' % i,\ diff_memusage2-diff_memusage,\ fname diff_memusage = diff_memusage2 current_memusage = memusage() print 'run',repeat*len(test_functions),'tests',\ 'in %.2f seconds' % ((jiffies()-start_jiffies)/100.0) if start_memusage: print 'initial virtual memory size:',start_memusage,'bytes' print 'current virtual memory size:',current_memusage,'bytes'
def deconvolve(self): """ Execute deconvolution iteration and return estimate. """ if VERBOSE > 9: print 'Entering %s.deconvolve' % (self.__class__.__name__) options = self.options save_intermediate_results = options.get( save_intermediate_results=False) data_to_save = ('count', 't', 'mn', 'mx', 'tau1', 'tau2', 'leak', 'e', 's', 'u', 'n', 'u_esu', 'mse', 'mem', 'klic') data_file_name = os.path.join(self.cache_dir, 'deconvolve_data.txt') input_data = numpy.array(self.data, dtype=self.float_dtype) count = -1 append_data_file = False first_estimate = options.get(first_estimate='input image') if first_estimate == 'input image': estimate = input_data.copy() elif first_estimate == 'convolved input image': estimate = self.convolve(input_data) elif first_estimate == '2x convolved input image': estimate = self.convolve(self.convolve(input_data)) elif first_estimate == 'last result': if os.path.isfile(data_file_name): data_file = RowFile(data_file_name) data, data_titles = data_file.read(with_titles=True) data_file.close() counts = map(int, data['count']) for count in reversed(counts): fn = os.path.join(self.cache_dir, 'result_%s.tif' % (count)) if os.path.isfile(fn): append_data_file = True break if append_data_file: print 'Loading the last result from %r.' % (fn) stack = ImageStack.load(fn) estimate = numpy.array(stack.images, dtype=self.float_type) f = open( os.path.join( self.cache_dir, 'deconvolve_data_%s_%s.txt' % (counts[0], count)), 'w') fi = open(data_file_name) f.write(fi.read()) fi.close() f.close() if count != counts[-1]: print 'Expected result %s but got %s, fixing %r' % ( counts[-1], count, data_file_name) data_file = RowFile(data_file_name, titles=data_titles) for c in range(count + 1): data_file.write(', '.join( [str(data[t][c]) for t in data_titles])) data_file.close() if not append_data_file: print 'Found no results in %r, using input image as estimate.' % ( self.cache_dir) count = -1 estimate = input_data.copy() else: raise NotImplementedError( ` first_estimate `) prev_estimate = estimate.copy() initial_photon_count = input_data.sum() print 'Initial photon count: %.3f' % (initial_photon_count) print 'Initial minimum: %.3f' % (estimate.min()) print 'Initial maximum: %.3f' % (estimate.max()) max_count = options.get(max_nof_iterations=50) bar = ProgressBar(0, max_count, totalWidth=40, show_percentage=False) data_norm2 = (input_data**2).sum() if options.get(rltv_estimate_lambda=False) or options.get( rltv_compute_lambda_lsq=False): data_to_save += ('lambda_lsq', ) if self.test_data is not None: data_to_save += ('mseo', ) test_data_norm2 = (self.test_data**2).sum() data_file = RowFile(data_file_name, titles=data_to_save, append=append_data_file) data_file.comment('DeconvolveSysArgv: %s' % (' '.join(map(str, sys.argv)))) if 'mseo' in data_file.extra_titles and 'mseo' not in data_to_save: data_to_save += ('mseo', ) stop_message = '' stop = count >= max_count if stop: stop_message = 'The number of iterations reached to maximal count: %s' % ( max_count) else: if save_intermediate_results: self.save(estimate, 'result_%sm1.tif' % (count + 1)) try: min_mse = 1e300 min_mseo = 1e300 min_tau = 1e300 max_lambda = 0.0 while not stop: count += 1 self.count = count info_map = {} ittime = time.time() prev2_estimate = prev_estimate.copy() prev_estimate = estimate.copy() e, s, u, n = self.compute_estimate(estimate) info_map['E/S/U/N=%s/%s/%s/%s'] = int(e), int(s), int(u), int( n) photon_leak = 1.0 - (e + s + u) / initial_photon_count info_map['LEAK=%s%%'] = 100 * photon_leak if 'leak' in data_to_save: leak = 100 * photon_leak if 'u_esu' in data_to_save: u_esu = u / (e + s + u) #info_map['U/ESU=%s'] = u_esu if 'mn' in data_to_save: mn, mx = estimate.min(), estimate.max() if 'mse' in data_to_save: eh = self.convolve(estimate, inplace=False) mse = ((eh - input_data)**2).sum() / data_norm2 info_map['MSE=%s'] = mse if 'klic' in data_to_save: klic = ops_ext.kullback_leibler_divergence( input_data, eh, 1.0) info_map['KLIC=%s'] = klic if 'mseo' in data_to_save: mseo = ( (estimate - self.test_data)**2).sum() / test_data_norm2 info_map['MSEO=%s'] = mseo if 'tau1' in data_to_save: tau1 = abs(estimate - prev_estimate).sum() / abs(prev_estimate).sum() tau2 = abs(estimate - prev2_estimate).sum() / abs( prev2_estimate).sum() info_map['TAU1/2=%s/%s'] = (tau1, tau2) if 'lambda_lsq' in data_to_save: lambda_lsq = self.lambda_lsq if lambda_lsq > max_lambda: max_lambda = lambda_lsq info_map['LAM/MX=%s/%s'] = lambda_lsq, max_lambda if 'mem' in data_to_save: mem = int(numpy_utils.memusage() / 2**20) #info_map['MEM=%sMB'] = mem info_map['TIME=%ss'] = t = time.time() - ittime bar.updateComment(' ' + ', '.join( [k % (tostr(info_map[k])) for k in sorted(info_map)])) bar(count) if 'mse' in data_to_save and mse < min_mse: min_mse = mse #self.save(discretize(estimate), 'deconvolved_%s_min_mse.tif' % (count)) if 'mseo' in data_to_save and mseo < min_mseo: min_mseo = mseo #self.save(discretize(estimate), 'deconvolved_%s_min_mseo.tif' % (count)) if save_intermediate_results: self.save(estimate, 'result_%s.tif' % (count)) # Stopping criteria: stop = True if abs(photon_leak) > 0.2: stop_message = 'Photons leak is too large: %.3f%%>20%%' % ( photon_leak * 100) elif not u and not int(n): stop_message = 'The number of non converging photons reached to zero.' elif count >= max_count: stop_message = 'The number of iterations reached to maximal count: %s' % ( max_count) elif 'tau1' in data_to_save and tau1 <= float( options.get(rltv_stop_tau=0.0)): stop_message = 'Desired tau-threshold achieved' else: stop = False exec 'data_file.write(%s)' % (', '.join(data_to_save)) if not save_intermediate_results and stop: self.save(estimate, 'result_%s.tif' % (count)) except KeyboardInterrupt: stop_message = 'Iteration was interrupted by user.' print bar.updateComment(' ' + stop_message) bar(count) print data_file.close() return estimate
def show_memory(msg): if VERBOSE: m = numpy_utils.memusage() sys.stdout.write('%s: %s\n' % (msg, bytes2str(m))) sys.stdout.flush()
def show_memory(msg): if VERBOSE: m = numpy_utils.memusage() sys.stdout.write("%s: %s\n" % (msg, bytes2str(m))) sys.stdout.flush()
def deconvolve(self): """ Execute deconvolution iteration and return estimate. """ if VERBOSE>9: print 'Entering %s.deconvolve' % (self.__class__.__name__) options = self.options save_intermediate_results = options.get(save_intermediate_results=False) data_to_save = ('count', 't', 'mn', 'mx', 'tau1', 'tau2', 'leak', 'e', 's', 'u', 'n','u_esu', 'mse', 'mem', 'klic') data_file_name = os.path.join(self.cache_dir, 'deconvolve_data.txt') input_data = numpy.array(self.data, dtype=self.float_dtype) count = -1 append_data_file = False first_estimate = options.get(first_estimate='input image') if first_estimate=='input image': estimate = input_data.copy() elif first_estimate=='convolved input image': estimate = self.convolve(input_data) elif first_estimate=='2x convolved input image': estimate = self.convolve(self.convolve(input_data)) elif first_estimate=='last result': if os.path.isfile(data_file_name): data_file = RowFile(data_file_name) data,data_titles = data_file.read(with_titles=True) data_file.close() counts = map(int, data['count']) for count in reversed (counts): fn =os.path.join(self.cache_dir, 'result_%s.tif' % (count)) if os.path.isfile(fn): append_data_file = True break if append_data_file: print 'Loading the last result from %r.' % (fn) stack = ImageStack.load(fn) estimate = numpy.array(stack.images, dtype=self.float_type) f = open(os.path.join(self.cache_dir, 'deconvolve_data_%s_%s.txt' % (counts[0],count)), 'w') fi = open(data_file_name) f.write(fi.read()) fi.close() f.close() if count != counts[-1]: print 'Expected result %s but got %s, fixing %r' % (counts[-1], count, data_file_name) data_file = RowFile(data_file_name, titles=data_titles) for c in range(count+1): data_file.write(', '.join([str(data[t][c]) for t in data_titles])) data_file.close() if not append_data_file: print 'Found no results in %r, using input image as estimate.' % (self.cache_dir) count = -1 estimate = input_data.copy() else: raise NotImplementedError(`first_estimate`) prev_estimate = estimate.copy() initial_photon_count = input_data.sum() print 'Initial photon count: %.3f' % (initial_photon_count) print 'Initial minimum: %.3f' % (estimate.min()) print 'Initial maximum: %.3f' % (estimate.max()) max_count = options.get(max_nof_iterations=50) bar = ProgressBar(0, max_count, totalWidth=40, show_percentage=False) data_norm2 = (input_data**2).sum() if options.get(rltv_estimate_lambda=False) or options.get(rltv_compute_lambda_lsq=False): data_to_save += ('lambda_lsq',) if self.test_data is not None: data_to_save += ('mseo',) test_data_norm2 = (self.test_data**2).sum() data_file = RowFile(data_file_name, titles = data_to_save, append = append_data_file) data_file.comment('DeconvolveSysArgv: %s' % (' '.join(map(str, sys.argv)))) if 'mseo' in data_file.extra_titles and 'mseo' not in data_to_save: data_to_save += ('mseo',) stop_message = '' stop = count >= max_count if stop: stop_message = 'The number of iterations reached to maximal count: %s' % (max_count) else: if save_intermediate_results: self.save(estimate, 'result_%sm1.tif' % (count+1)) try: min_mse = 1e300 min_mseo = 1e300 min_tau = 1e300 max_lambda = 0.0 while not stop: count += 1 self.count = count info_map = {} ittime = time.time() prev2_estimate = prev_estimate.copy() prev_estimate = estimate.copy() e,s,u,n = self.compute_estimate(estimate) info_map['E/S/U/N=%s/%s/%s/%s'] = int(e), int(s), int(u), int(n) photon_leak = 1.0 - (e+s+u)/initial_photon_count info_map['LEAK=%s%%'] = 100*photon_leak if 'leak' in data_to_save: leak = 100*photon_leak if 'u_esu' in data_to_save: u_esu = u/(e+s+u) #info_map['U/ESU=%s'] = u_esu if 'mn' in data_to_save: mn, mx = estimate.min(), estimate.max() if 'mse' in data_to_save: eh = self.convolve(estimate, inplace=False) mse = ((eh - input_data)**2).sum() / data_norm2 info_map['MSE=%s'] = mse if 'klic' in data_to_save: klic = ops_ext.kullback_leibler_divergence(input_data, eh, 1.0) info_map['KLIC=%s'] = klic if 'mseo' in data_to_save: mseo = ((estimate - self.test_data)**2).sum() / test_data_norm2 info_map['MSEO=%s'] = mseo if 'tau1' in data_to_save: tau1 = abs(estimate - prev_estimate).sum() / abs(prev_estimate).sum() tau2 = abs(estimate - prev2_estimate).sum() / abs(prev2_estimate).sum() info_map['TAU1/2=%s/%s'] = (tau1, tau2) if 'lambda_lsq' in data_to_save: lambda_lsq = self.lambda_lsq if lambda_lsq > max_lambda: max_lambda = lambda_lsq info_map['LAM/MX=%s/%s'] = lambda_lsq, max_lambda if 'mem' in data_to_save: mem = int(numpy_utils.memusage()/2**20) #info_map['MEM=%sMB'] = mem info_map['TIME=%ss'] = t = time.time() - ittime bar.updateComment(' '+', '.join([k%(tostr(info_map[k])) for k in sorted(info_map)])) bar(count) if 'mse' in data_to_save and mse < min_mse: min_mse = mse #self.save(discretize(estimate), 'deconvolved_%s_min_mse.tif' % (count)) if 'mseo' in data_to_save and mseo < min_mseo: min_mseo = mseo #self.save(discretize(estimate), 'deconvolved_%s_min_mseo.tif' % (count)) if save_intermediate_results: self.save(estimate, 'result_%s.tif' % (count)) # Stopping criteria: stop = True if abs(photon_leak) > 0.2: stop_message = 'Photons leak is too large: %.3f%%>20%%' % (photon_leak*100) elif not u and not int (n): stop_message = 'The number of non converging photons reached to zero.' elif count >= max_count: stop_message = 'The number of iterations reached to maximal count: %s' % (max_count) elif 'tau1' in data_to_save and tau1 <= float(options.get(rltv_stop_tau=0.0)): stop_message = 'Desired tau-threshold achieved' else: stop = False exec 'data_file.write(%s)' % (', '.join (data_to_save)) if not save_intermediate_results and stop: self.save(estimate, 'result_%s.tif' % (count)) except KeyboardInterrupt: stop_message = 'Iteration was interrupted by user.' print bar.updateComment (' '+stop_message) bar(count) print data_file.close() return estimate