コード例 #1
0
ファイル: bnl_1d-cwd.py プロジェクト: eeedinar/Alzheimers
def load_all_cwd():
    for file in [each for each in os.listdir(os.getcwd()) if each.endswith('.h5')]:
        try:
            print(f'Loading file {file}')
            dt  = h5xs(file, [de.detectors, qgrid2])
            dt.load_data(N=8)
        except:
            print(f'Loading failed {file}')
コード例 #2
0
ファイル: essential_func.py プロジェクト: eeedinar/Alzheimers
def azimuthal_averaging(file, qgrid, n_proc=8, exp_folder = ""):
    """
        azimuthal_averaging(masked_file, qgrid, n_proc=8)
    """
    de = h5exp(exp_folder+"exp.h5")

    dt  = h5xs(file, [de.detectors, qgrid])
    tic = time.perf_counter()
    print(f'Circular averaging starts now ... ')
    dt.load_data(N=n_proc)
    print(f'{file} total 1-d averaging time {time.perf_counter() - tic} seconds')
コード例 #3
0
 def openFileNameDialog(self):
     options = QFileDialog.Options()
     options |= QFileDialog.DontUseNativeDialog
     if self.name == 'exp':
         filename, _ = QFileDialog.getOpenFileName(
             self,
             "Exp File",
             "",
             "All Files (*);;HDF Files (*.h5)",
             options=options)
         global dexp
         dexp = h5exp(filename)
     else:
         filename, _ = QFileDialog.getOpenFileName(
             self,
             "Data File",
             "",
             "All Files (*);HDF Files (*.h5)",
             options=options)
         global data
         data = h5xs(filename, [dexp.detectors, dexp.qgrid])
コード例 #4
0
from py4xs.hdf import h5xs,h5exp,lsh5
from py4xs.data2d import Data2d
import numpy as np
import pylab as plt
import os, time

exp_file = '/home/bashit.a/July-2021/'
samples   = '/home/bashit.a/July-2021/sample-5/'

os.chdir(samples)

de = h5exp(exp_file+"exp.h5")
#de.qgrid
print(de.detectors[1].extension)
qgrid2 = np.hstack([np.arange(0.005, 0.0499, 0.001), np.arange(0.05, 0.099, 0.002), np.arange(0.1, 3.2, 0.005)])
#print(qgrid2)

#load one file in a directory
dt  = h5xs("2512_EC-roi0.h5", [de.detectors, qgrid2])
tic = time.time()
print(f'Loading file at {tic}')
dt.load_data(N=8, debug=True)
tac = time.time()
print(f'total processing time {tac-tic} seconds')
コード例 #5
0
def pack_and_move(data_type, uid, dest_dir):
    # useful for moving files from RAM disk to GPFS during fly scans
    #
    # assume other type of data are saved on RAM disk as well (GPFS not working for WAXS2)
    # these data must be moved manually to GPFS
    global pilatus_trigger_mode  #,CBF_replace_data_path

    print(f"packing: {data_type}, {uid}, {dest_dir}")
    print(f"data source: {PilatusCBFHandler.froot}")
    t0 = time.time()
    # if the dest_dir contains exp.h5, read detectors/qgrid from it
    try:
        dt_exp = h5exp(dest_dir + '/exp.h5')
    except:
        dt_exp = None

    dir_name = None

    if data_type in ["multi", "sol"]:
        uids = uid.split('|')
        if data_type == "sol":
            sb_dict = json.loads(uids.pop())
            pilatus_trigger_mode = triggerMode.fly_scan
        else:
            pilatus_trigger_mode = triggerMode.external_trigger
        ## assume that the meta data contains the holderName
        if 'holderName' not in list(db[uids[0]].start.keys()):
            print(
                "cannot find holderName from the header, using tmp.h5 as filename ..."
            )
            fh5_name = "tmp.h5"
        else:
            dir_name = db[uids[0]].start['holderName']
            fh5_name = dir_name + '.h5'
        fn = pack_h5_with_lock(uids, dest_dir, fn="tmp.h5")
        if fn is not None and dt_exp is not None:
            print('processing ...')
            if data_type == "sol":
                dt = h5sol_HT(fn, [dt_exp.detectors, dt_exp.qgrid])
                dt.assign_buffer(sb_dict)
                dt.process(filter_data=True, sc_factor=0.998, debug='quiet')
                dt.export_d1s(path=dest_dir + "/processed/")
            else:
                dt = h5xs(fn, [dt_exp.detectors, dt_exp.qgrid],
                          transField='em2_sum_all_mean_value')
                dt.load_data(debug="quiet")
            dt.fh5.close()
            if fh5_name is not "tmp.h5":  # temporary fix, for some reason other processes cannot open the packed file
                os.system(f"cd {dest_dir} ; cp tmp.h5 {fh5_name} ; rm tmp.h5")
            del dt, dt_exp
    elif data_type == "HPLC":
        uids = [uid]
        if db[uid].start['plan_name'] == "hplc_scan":
            # this was software_trigger_single_frame when using the flyer-based hplc_scan
            pilatus_trigger_mode = triggerMode.software_trigger_single_frame
        else:
            # data collected using ct
            pilatus_trigger_mode = triggerMode.software_trigger_multi_frame
        fn = pack_h5_with_lock(uid, dest_dir=dest_dir, attach_uv_file=True)
        if fn is not None and dt_exp is not None:
            print('procesing ...')
            dt = h5sol_HPLC(fn, [dt_exp.detectors, dt_exp.qgrid])
            dt.process(debug='quiet')
            dt.fh5.close()
            del dt, dt_exp
    elif data_type == "flyscan" or data_type == "scan":
        if data_type == "flyscan":
            pilatus_trigger_mode = triggerMode.fly_scan
        else:
            pilatus_trigger_mode = triggerMode.external_trigger
        uids = [uid]
        fn = pack_h5_with_lock(uid, dest_dir)
    else:
        print(f"invalid data type: {data_type} .")
        return

    if fn is None:
        return  # packing unsuccessful,
    print(
        f"{time.asctime()}: finished packing/processing, total time lapsed: {time.time()-t0:.1f} sec ..."
    )

    if PilatusCBFHandler.froot == data_file_path.gpfs:
        return

    for uid in uids:
        print(f'moving files for {uid} from RAMDISK to GPFS ...')
        h = db[uid]
        p1 = h.start['data_path']
        #p2 = p1.replace(default_data_path_root, '/ramdisk/')
        p2 = p1.replace(data_file_path.gpfs.value, '/ramdisk')
        sample_name = h.start['sample_name']
        if dir_name is not None:
            cmd = f"rsync -ahv --remove-source-files [email protected]:{p2}{dir_name} {p1}"
        elif os.system(
                f"ssh -q [email protected] [[ -d {p2}{sample_name} ]]") == 0:
            # if sample name is a directory on the RAMDISK, move the entire directory
            cmd = f"rsync -ahv --remove-source-files [email protected]:{p2}{sample_name} {p1}"
        else:
            cmd = f"rsync -ahv --remove-source-files [email protected]:{p2}{sample_name}_*.* {p1}"
            #os.system(f"rsync -ahv --remove-source-files [email protected]:{p2}{h.start['sample_name']}_*.log {p1}")
        print(cmd)
        os.system(cmd)
コード例 #6
0
from py4xs.hdf import h5xs, h5exp, lsh5
from py4xs.data2d import Data2d
import numpy as np
import pylab as plt
import os, time

exp_folder = '/home/bashit.a/July-2021/'
samples = '/home/bashit.a/July-2021/sample-6/'
file = "2029_Dentate_gyrus-roi0.h5"

os.chdir(samples)

de = h5exp(exp_folder + "exp.h5")
#de.qgrid
print(de.detectors[1].extension)
qgrid2 = np.hstack([
    np.arange(0.005, 0.0499, 0.001),
    np.arange(0.05, 0.099, 0.002),
    np.arange(0.1, 3.2, 0.005)
])
#print(qgrid2)

#load one file in a directory
dt = h5xs(file, [de.detectors, qgrid2])
tic = time.time()
print(f'Loading file at {tic}')
dt.load_data(N=8, debug=True)
tac = time.time()
print(f'total processing time {tac-tic} seconds')