def plot_imgs(imgs, image_name=None, *argv, **kwargs): #NOT WORKing NOW.... N = len(imgs) sx = np.ceil(np.sqrt(N)) pass
def para_segment_compress_eigerdata(images, mask, md, filename, num_sub=100, bad_pixel_threshold=1e15, hot_pixel_threshold=2**30, bad_pixel_low_threshold=0, nobytes=4, bins=1, dtypes='images', reverse=True, num_max_para_process=50): ''' parallelly compressed eiger data without header, this function is for parallel compress ''' if dtypes == 'uid': uid = md['uid'] #images detector = get_detector(db[uid]) images_ = load_data(uid, detector, reverse=reverse)[:100] N = len(images_) else: N = len(images) #N = int( np.ceil( N/ bins ) ) #print( N, num_sub ) num_sub *= bins if N % num_sub: Nf = N // num_sub + 1 print( 'The average image intensity would be slightly not correct, about 1% error.' ) print( 'Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image' ) else: Nf = N // num_sub print('It will create %i temporary files for parallel compression.' % Nf) if Nf > num_max_para_process: N_runs = np.int(np.ceil(Nf / float(num_max_para_process))) print( 'The parallel run number: %s is larger than num_max_para_process: %s' % (Nf, num_max_para_process)) else: N_runs = 1 result = {} #print( mask_filename )# + '*'* 10 + 'here' ) for nr in range(N_runs): if (nr + 1) * num_max_para_process > Nf: inputs = range(num_max_para_process * nr, Nf) else: inputs = range(num_max_para_process * nr, num_max_para_process * (nr + 1)) fns = [filename + '_temp-%i.tmp' % i for i in inputs] #print( nr, inputs, ) pool = Pool(processes=len(inputs)) #, maxtasksperchild=1000 ) #print( inputs ) for i in inputs: print(i, num_sub, N, i * num_sub, (i + 1) * num_sub) if i * num_sub <= N: result[i] = pool.apply_async(segment_compress_eigerdata, [ images, mask, md, filename + '_temp-%i.tmp' % i, bad_pixel_threshold, hot_pixel_threshold, bad_pixel_low_threshold, nobytes, bins, i * num_sub, (i + 1) * num_sub, dtypes, reverse ]) pool.close() pool.join() pool.terminate() return result
def segment_compress_eigerdata(images, mask, md, filename, bad_pixel_threshold=1e15, hot_pixel_threshold=2**30, bad_pixel_low_threshold=0, nobytes=4, bins=1, N1=None, N2=None, dtypes='images', reverse=True): ''' Create a compressed eiger data without header, this function is for parallel compress for parallel compress don't pass any non-scalar parameters ''' if dtypes == 'uid': uid = md['uid'] #images detector = get_detector(db[uid]) images = load_data(uid, detector, reverse=reverse)[N1:N2] print(N1, N2) Nimg_ = len(images) M, N = images[0].shape avg_img = np.zeros([M, N], dtype=np.float) Nopix = float(avg_img.size) n = 0 good_count = 0 #frac = 0.0 if nobytes == 2: dtype = np.int16 elif nobytes == 4: dtype = np.int32 elif nobytes == 8: dtype = np.float64 else: print( "Wrong type of nobytes, only support 2 [np.int16] or 4 [np.int32]") dtype = np.int32 #Nimg = Nimg_//bins Nimg = int(np.ceil(Nimg_ / bins)) time_edge = np.array( create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bins)) #print( time_edge, Nimg_, Nimg, bins, N1, N2 ) imgsum = np.zeros(Nimg) if bins != 1: print('The frames will be binned by %s' % bins) fp = open(filename, 'wb') for n in range(Nimg): t1, t2 = time_edge[n] if bins != 1: img = np.array(np.average(images[t1:t2], axis=0), dtype=np.float64) #dtype=np.int32) else: img = np.array(images[t1], dtype=np.int32) mask &= img < hot_pixel_threshold p = np.where( (np.ravel(img) > 0) * np.ravel(mask))[0] #don't use masked data v = np.ravel(np.array(img, dtype=dtype))[p] dlen = len(p) imgsum[n] = v.sum() if (dlen == 0) or (imgsum[n] > bad_pixel_threshold) or ( imgsum[n] <= bad_pixel_low_threshold): dlen = 0 fp.write(struct.pack('@I', dlen)) else: np.ravel(avg_img)[p] += v good_count += 1 fp.write(struct.pack('@I', dlen)) fp.write(struct.pack('@{}i'.format(dlen), *p)) if bins == 1: fp.write( struct.pack('@{}{}'.format(dlen, 'ih'[nobytes == 2]), *v)) else: fp.write( struct.pack('@{}{}'.format(dlen, 'dd'[nobytes == 2]), *v)) #n +=1 del p, v, img fp.flush() fp.close() avg_img /= good_count bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | ( np.array(imgsum) <= bad_pixel_low_threshold) sys.stdout.write('#') sys.stdout.flush() #del images, mask, avg_img, imgsum, bad_frame_list #print( 'Should release memory here') return mask, avg_img, imgsum, bad_frame_list
def para_compress_eigerdata(images, mask, md, filename, num_sub=100, bad_pixel_threshold=1e15, hot_pixel_threshold=2**30, bad_pixel_low_threshold=0, nobytes=4, bins=1, dtypes='uid', reverse=True, num_max_para_process=500, cpu_core_number=72, with_pickle=True): if dtypes == 'uid': uid = md['uid'] #images detector = get_detector(db[uid]) images_ = load_data(uid, detector, reverse=reverse)[:100] N = len(images_) else: N = len(images) #print( N) N = int(np.ceil(N / bins)) Nf = int(np.ceil(N / num_sub)) if Nf > cpu_core_number: print( "The process number is larger than %s (XF11ID server core number)" % cpu_core_number) num_sub_old = num_sub num_sub = int(np.ceil(N / cpu_core_number)) Nf = int(np.ceil(N / num_sub)) print("The sub compressed file number was changed from %s to %s" % (num_sub_old, num_sub)) create_compress_header(md, filename + '-header', nobytes, bins) #print( 'done for header here') results = para_segment_compress_eigerdata( images=images, mask=mask, md=md, filename=filename, num_sub=num_sub, bad_pixel_threshold=bad_pixel_threshold, hot_pixel_threshold=hot_pixel_threshold, bad_pixel_low_threshold=bad_pixel_low_threshold, nobytes=nobytes, bins=bins, dtypes=dtypes, num_max_para_process=num_max_para_process) res_ = np.array([results[k].get() for k in list(sorted(results.keys()))]) imgsum = np.zeros(N) bad_frame_list = np.zeros(N, dtype=bool) good_count = 1 for i in range(Nf): mask_, avg_img_, imgsum_, bad_frame_list_ = res_[i] imgsum[i * num_sub:(i + 1) * num_sub] = imgsum_ bad_frame_list[i * num_sub:(i + 1) * num_sub] = bad_frame_list_ if i == 0: mask = mask_ avg_img = np.zeros_like(avg_img_) else: mask *= mask_ if not np.sum(np.isnan(avg_img_)): avg_img += avg_img_ good_count += 1 bad_frame_list = np.where(bad_frame_list)[0] avg_img /= good_count if len(bad_frame_list): print('Bad frame list are: %s' % bad_frame_list) else: print('No bad frames are involved.') print('Combining the seperated compressed files together...') combine_compressed(filename, Nf, del_old=True) del results del res_ if with_pickle: pkl.dump([mask, avg_img, imgsum, bad_frame_list], open(filename + '.pkl', 'wb')) return mask, avg_img, imgsum, bad_frame_list