Exemplo n.º 1
0
def _get_mean_intensity_one_q(FD, sampling, labels):
    mi = np.zeros(int((FD.end - FD.beg) / sampling))
    n = 0
    qind, pixelist = roi.extract_label_indices(labels)
    # iterate over the images to compute multi-tau correlation
    fra_pix = np.zeros_like(pixelist, dtype=np.float64)
    timg = np.zeros(FD.md['ncols'] * FD.md['nrows'], dtype=np.int32)
    timg[pixelist] = np.arange(1, len(pixelist) + 1)
    for i in range(FD.beg, FD.end, sampling):
        (p, v) = FD.rdrawframe(i)
        w = np.where(timg[p])[0]
        pxlist = timg[p[w]] - 1
        mi[n] = np.bincount(qind[pxlist], weights=v[w], minlength=2)[1:]
        n += 1
    return mi
Exemplo n.º 2
0
def init_compress_eigerdata(images,
                            mask,
                            md,
                            filename,
                            bad_pixel_threshold=1e15,
                            hot_pixel_threshold=2**30,
                            bad_pixel_low_threshold=0,
                            nobytes=4,
                            bins=1,
                            with_pickle=True):
    '''
        Compress the eiger data 
        
        Create a new mask by remove hot_pixel
        Do image average
        Do each image sum
        Find badframe_list for where image sum above bad_pixel_threshold
        Generate a compressed data with filename
        
        if bins!=1, will bin the images with bin number as bins
    
        Header contains 1024 bytes ['Magic value', 'beam_center_x', 'beam_center_y', 'count_time', 'detector_distance', 
           'frame_time', 'incident_wavelength', 'x_pixel_size', 'y_pixel_size', 
           bytes per pixel (either 2 or 4 (Default)),
           Nrows, Ncols, Rows_Begin, Rows_End, Cols_Begin, Cols_End ]
           
        Return 
            mask
            avg_img
            imsum
            bad_frame_list
            
    '''
    fp = open(filename, 'wb')
    #Make Header 1024 bytes
    #md = images.md
    if bins != 1:
        nobytes = 8

    Header = struct.pack('@16s8d7I916x', b'Version-COMP0001',
                         md['beam_center_x'], md['beam_center_y'],
                         md['count_time'], md['detector_distance'],
                         md['frame_time'], md['incident_wavelength'],
                         md['x_pixel_size'], md['y_pixel_size'], nobytes,
                         md['pixel_mask'].shape[1], md['pixel_mask'].shape[0],
                         0, md['pixel_mask'].shape[1], 0,
                         md['pixel_mask'].shape[0])

    fp.write(Header)

    Nimg_ = len(images)
    avg_img = np.zeros_like(images[0], dtype=np.float)
    Nopix = float(avg_img.size)
    n = 0
    good_count = 0
    frac = 0.0
    if nobytes == 2:
        dtype = np.int16
    elif nobytes == 4:
        dtype = np.int32
    elif nobytes == 8:
        dtype = np.float64
    else:
        print(
            "Wrong type of nobytes, only support 2 [np.int16] or 4 [np.int32]")
        dtype = np.int32

    Nimg = Nimg_ // bins
    time_edge = np.array(
        create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bins))

    imgsum = np.zeros(Nimg)
    if bins != 1:
        print('The frames will be binned by %s' % bins)

    for n in tqdm(range(Nimg)):
        t1, t2 = time_edge[n]
        img = np.average(images[t1:t2], axis=0)
        mask &= img < hot_pixel_threshold
        p = np.where((np.ravel(img) > 0)
                     & np.ravel(mask))[0]  #don't use masked data
        v = np.ravel(np.array(img, dtype=dtype))[p]
        dlen = len(p)
        imgsum[n] = v.sum()
        if (imgsum[n] > bad_pixel_threshold) or (imgsum[n] <=
                                                 bad_pixel_low_threshold):
            #if imgsum[n] >=bad_pixel_threshold :
            dlen = 0
            fp.write(struct.pack('@I', dlen))
        else:
            np.ravel(avg_img)[p] += v
            good_count += 1
            frac += dlen / Nopix
            #s_fmt ='@I{}i{}{}'.format( dlen,dlen,'ih'[nobytes==2])
            fp.write(struct.pack('@I', dlen))
            fp.write(struct.pack('@{}i'.format(dlen), *p))
            if bins == 1:
                fp.write(
                    struct.pack('@{}{}'.format(dlen, 'ih'[nobytes == 2]), *v))
            else:
                fp.write(
                    struct.pack('@{}{}'.format(dlen, 'dd'[nobytes == 2]), *v))
        #n +=1

    fp.close()
    frac /= good_count
    print("The fraction of pixel occupied by photon is %6.3f%% " %
          (100 * frac))
    avg_img /= good_count

    bad_frame_list = np.where((np.array(imgsum) > bad_pixel_threshold) | (
        np.array(imgsum) <= bad_pixel_low_threshold))[0]
    #bad_frame_list1 = np.where( np.array(imgsum) > bad_pixel_threshold  )[0]
    #bad_frame_list2 = np.where( np.array(imgsum) < bad_pixel_low_threshold  )[0]
    #bad_frame_list =   np.unique( np.concatenate( [bad_frame_list1, bad_frame_list2]) )

    if len(bad_frame_list):
        print('Bad frame list are: %s' % bad_frame_list)
    else:
        print('No bad frames are involved.')
    if with_pickle:
        pkl.dump([mask, avg_img, imgsum, bad_frame_list],
                 open(filename + '.pkl', 'wb'))
    return mask, avg_img, imgsum, bad_frame_list
Exemplo n.º 3
0
def para_compress_eigerdata(images,
                            mask,
                            md,
                            filename,
                            num_sub=100,
                            bad_pixel_threshold=1e15,
                            hot_pixel_threshold=2**30,
                            bad_pixel_low_threshold=0,
                            nobytes=4,
                            bins=1,
                            dtypes='uid',
                            reverse=True,
                            num_max_para_process=500,
                            cpu_core_number=72,
                            with_pickle=True):

    if dtypes == 'uid':
        uid = md['uid']  #images
        detector = get_detector(db[uid])
        images_ = load_data(uid, detector, reverse=reverse)[:100]
        N = len(images_)
    else:
        N = len(images)

    #print( N)

    N = int(np.ceil(N / bins))
    Nf = int(np.ceil(N / num_sub))
    if Nf > cpu_core_number:
        print(
            "The process number is larger than %s (XF11ID server core number)"
            % cpu_core_number)
        num_sub_old = num_sub
        num_sub = int(np.ceil(N / cpu_core_number))
        Nf = int(np.ceil(N / num_sub))
        print("The sub compressed file number was changed from %s to %s" %
              (num_sub_old, num_sub))
    create_compress_header(md, filename + '-header', nobytes, bins)
    #print( 'done for header here')
    results = para_segment_compress_eigerdata(
        images=images,
        mask=mask,
        md=md,
        filename=filename,
        num_sub=num_sub,
        bad_pixel_threshold=bad_pixel_threshold,
        hot_pixel_threshold=hot_pixel_threshold,
        bad_pixel_low_threshold=bad_pixel_low_threshold,
        nobytes=nobytes,
        bins=bins,
        dtypes=dtypes,
        num_max_para_process=num_max_para_process)

    res_ = np.array([results[k].get() for k in list(sorted(results.keys()))])
    imgsum = np.zeros(N)
    bad_frame_list = np.zeros(N, dtype=bool)
    good_count = 1
    for i in range(Nf):
        mask_, avg_img_, imgsum_, bad_frame_list_ = res_[i]
        imgsum[i * num_sub:(i + 1) * num_sub] = imgsum_
        bad_frame_list[i * num_sub:(i + 1) * num_sub] = bad_frame_list_
        if i == 0:
            mask = mask_
            avg_img = np.zeros_like(avg_img_)
        else:
            mask *= mask_
        if not np.sum(np.isnan(avg_img_)):
            avg_img += avg_img_
            good_count += 1

    bad_frame_list = np.where(bad_frame_list)[0]
    avg_img /= good_count

    if len(bad_frame_list):
        print('Bad frame list are: %s' % bad_frame_list)
    else:
        print('No bad frames are involved.')
    print('Combining the seperated compressed files together...')
    combine_compressed(filename, Nf, del_old=True)

    del results
    del res_
    if with_pickle:
        pkl.dump([mask, avg_img, imgsum, bad_frame_list],
                 open(filename + '.pkl', 'wb'))
    return mask, avg_img, imgsum, bad_frame_list