예제 #1
0
def write_example(filename):

    # --- prepare data ---

    # Generate fake raw data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
     
    # x, y and z ranges
    x = np.arange(128)
    y = np.arange(128)
    z = np.arange(180);
   
    # --- create file ---

    # Open DataExchange file
    f = DataExchangeFile(filename, mode='w')
    
    # Create core HDF5 dataset in exchange group for 180 deep stack of x,y
    # images /exchange/data
    data_en = DataExchangeEntry.data(data={'value': rawdata, 'units':'counts', 'description': 'Projection Data',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    f.add_entry(data_en)

    # The default location for sample in DataExchangeEntry is /measurement/sample
    # To override the default set e.g 'root'='/measurement_4/sample'
    sample_en = DataExchangeEntry.sample(name={'value': 'Minivirus'}, temperature={'value': 200.0, 'units':'celsius',
                                'dataset_opts': {'dtype': 'd'}})
    f.add_entry(sample_en)

    # --- All done ---
    f.close()
def main():

    file_name = '//local/data/esrf/scan.edf'
    dark_file_name = '/local/data/esrf/dark.edf'
    white_file_name = '/local/data/esrf/flat.edf'
    hdf5_file_name = '/local/data/esrf_test.h5'
    sample_name = 'esrf'

    verbose = True

    if verbose: print file_name
    if verbose: print white_file_name
    if verbose: print hdf5_file_name
#    if verbose: print log_file

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.stack(file_name,
                   hdf5_file_name = hdf5_file_name,
                   white_file_name = white_file_name,
                   dark_file_name = dark_file_name,
                   projections_data_type = 'edf',
                   white_data_type = 'edf',
                   dark_data_type = 'edf',
                   sample_name = sample_name
                   )
    if verbose: print "Done reading data ... "

     
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'ESRF'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': 'ESRF'},
                                        date_time={'value': "2014-12-05T19:42:13+0100"},
                                        beamline={'value': "ID-19"},
                                        )
    )

    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Emmanuelle"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    if verbose: print "Done converting ", file_name
def main():

    file_name = '/local/data/databank/Diamond/projections_13429.hdf'
    hdf5_file_name = '/local/data/databank/dataExchange/microCT/Diamond_2bin.h5'

    verbose = True

    print "Input files base name: ", file_name
    print "Output data exchange file name: ", hdf5_file_name

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.nexus(file_name,
                        hdf5_file_name = hdf5_file_name,
                        projections_start=20,
                        projections_end=1820,
                        projections_step=2,
                        white_start=11,
                        white_end=20,
                        dark_start=1,
                        dark_end=3,
                        sample_name = 'unknown'
                   )
    if verbose: print "Done reading data ... "
    
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'Diamond I12'}) )

    ### Create HDF5 subgroup
    ### /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': "Diamond Light Source"},
                                        date_time={'value': "2013-11-30T19:17:04+0100"},
                                        beamline={'value': "JEEP I12"},
                                        )
    )

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Michael Drakopoulos"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    print "Done creating data exchange file: ", hdf5_file_name
def main():

    file_name = '/local/data/databank/APS_13_BM/run2_soln1_2_.SPE'
    hdf5_file_name = '/local/data/databank/dataExchange/microCT/run2_soln1_2.h5'

    verbose = True

    if verbose: print "Input files base name: ", file_name
    if verbose: print "Output data exchange file name: ", hdf5_file_name

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.multiple_stack(file_name,
                        hdf5_file_name = hdf5_file_name,
                        projections_start=2,
                        projections_end=7,
                        projections_step=2,
                        white_start=1,
                        white_end=8,
                        white_step=2,
                        sample_name = 'Stripe_Solder_Sample_Tip1'
                   )
    if verbose: print "Done reading data ... "
    
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS 13-BM'}) )

    ### Create HDF5 subgroup
    ### /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': "Advanced Photon Source"},
                                        date_time={'value': "2013-11-30T19:17:04+0100"},
                                        beamline={'value': "13-BM"},
                                        )
    )

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Mark Rivers"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    if verbose: print "Done creating data exchange file: ", hdf5_file_name
예제 #5
0
def main():

    file_name = '/local/data/databank/Diamond/projections_13429.hdf'
    hdf5_file_name = '/local/data/databank/dataExchange/microCT/Diamond_2bin.h5'

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.nexus(file_name,
                        hdf5_file_name = hdf5_file_name,
                        projections_start=20,
                        projections_end=1820,
                        projections_step=2,
                        white_start=11,
                        white_end=20,
                        dark_start=1,
                        dark_end=3,
                        sample_name = 'unknown'
                   )
    
    # Add extra metadata if available / desired

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'Diamond I12'}) )

    ### Create HDF5 subgroup
    ### /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': "Diamond Light Source"},
                                        date_time={'value': "2013-11-30T19:17:04+0100"},
                                        beamline={'value': "JEEP I12"},
                                        )
    )

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Michael Drakopoulos"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    print "Done creating data exchange file: ", hdf5_file_name
def write_example(filename):

    # --- prepare data ---

    # Generate fake data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
    rawdata_white = np.ones(2 * 256 * 256, np.uint16).reshape(2, 256, 256)
    rawdata_dark = np.zeros(10 * 256 * 256, np.uint16).reshape(10, 256, 256)
  
    # x, y and z ranges
    x = np.arange(256)
    y = np.arange(256)
    z = np.arange(180);
      
    # --- create file ---

    # Open DataExchangeFile file
    f = DataExchangeFile(filename, mode='w')
    
    # Create core HDF5 dataset in exchange group for 180 deep stack
    # of x,y images /exchange/data
    f.add_entry([
            DataExchangeEntry.data(data={'value':rawdata, 'units':'counts'}),
            DataExchangeEntry.data(data_dark={'value':rawdata_dark, 'units':'counts'}),
            DataExchangeEntry.data(data_white={'value':rawdata_white, 'units':'counts'})
            ]
        )
                      
    # --- All done ---
    f.close()
def write_example(filename):

    # --- prepare data ---

    # Generate fake data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
  
    # x, y and z ranges
    x = np.arange(256)
    y = np.arange(256)
    z = np.arange(180);
       
    # --- create file ---

    # Open DataExchange file
    f = DataExchangeFile(filename, mode='w')
        
    # Create a DataExchangeEntry and dd the entry to the data exchange file.
    f.add_entry(DataExchangeEntry.data(data={'value':rawdata, 'units':'counts'}))
                  
    # --- All done ---
    f.close()
def write_example(filename):

    # --- prepare data ---

    # Generate fake data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
    rawdata_white = np.ones(2 * 256 * 256, np.uint16).reshape(2, 256, 256)
    rawdata_dark = np.zeros(10 * 256 * 256, np.uint16).reshape(10, 256, 256)

    # x, y and z ranges
    x = np.arange(256)
    y = np.arange(256)
    z = np.arange(180)

    # Fabricated theta values
    theta = (z / float(180)) * 180.0
    theta_white = (0.0, 180.0)
    theta_dark = (0.0, 0.0, 0.0, 0.0, 0.0, 180.0, 180.0, 180.0, 180.0, 180.0)

    # --- create file ---

    # Open HDF5 file
    f = DataExchangeFile(filename, mode='w')

    #Create HDF5 dataset in exchange group for data, data_dark & data_white, theta, theta_dark, theta_white under /exchange
    f.add_entry([
        DataExchangeEntry.data(data={
            'value': rawdata,
            'units': 'counts',
            'axes': 'theta:y:x'
        }),
        DataExchangeEntry.data(data_dark={
            'value': rawdata_dark,
            'units': 'counts',
            'axes': 'theta:y:x'
        }),
        DataExchangeEntry.data(data_white={
            'value': rawdata_white,
            'units': 'counts',
            'axes': 'theta:y:x'
        }),
        DataExchangeEntry.data(theta={
            'value': theta,
            'units': 'degrees'
        }),
        DataExchangeEntry.data(theta_dark={
            'value': theta_dark,
            'units': 'degrees'
        }),
        DataExchangeEntry.data(theta_white={
            'value': theta_white,
            'units': 'degrees'
        })
    ])

    # --- All done ---
    f.close()
def write_example(filename):

    # --- prepare data ---

    # Generate fake data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
    rawdata_white = np.ones(2 * 256 * 256, np.uint16).reshape(2, 256, 256)
    rawdata_dark = np.zeros(10 * 256 * 256, np.uint16).reshape(10, 256, 256)
  
    # x, y and z ranges
    x = np.arange(256)
    y = np.arange(256)
    z = np.arange(180);
    
    # Fabricated theta values
    theta = (z / float(180)) * 180.0
    theta_white = (0.0, 180.0)
    theta_dark = (0.0, 0.0, 0.0, 0.0, 0.0, 180.0, 180.0, 180.0, 180.0, 180.0)
    
    # --- create file ---

    # Open HDF5 file
    f = DataExchangeFile(filename, mode='w')

    #Create HDF5 dataset in exchange group for data, data_dark & data_white, theta, theta_dark, theta_white under /exchange
    f.add_entry([
            DataExchangeEntry.data(data={'value':rawdata, 'units':'counts', 'axes': 'theta:y:x'}),
            DataExchangeEntry.data(data_dark={'value':rawdata_dark, 'units':'counts', 'axes': 'theta:y:x'}),
            DataExchangeEntry.data(data_white={'value':rawdata_white, 'units':'counts', 'axes': 'theta:y:x'}),
            DataExchangeEntry.data(theta={'value':theta, 'units':'degrees'}),
            DataExchangeEntry.data(theta_dark={'value':theta_dark, 'units':'degrees'}),
            DataExchangeEntry.data(theta_white={'value':theta_white, 'units':'degrees'})
        ])
                  
    # --- All done ---
    f.close()
예제 #10
0
def main():

    ##file_name = '/local/data/databank/SLS_2011/Blakely_SLS/Blakely.tif'
    ##log_file = '/local/data/databank/SLS_2011/Blakely_SLS/Blakely.log'
    ##
    ##hdf5_file_name = '/local/data/databank/dataExchange/microCT/Blakely_SLS_2011.h5'

    file_name = '/local/data/databank/SLS_2011/Hornby_SLS/Hornby_b.tif'
    log_file = '/local/data/databank/SLS_2011/Hornby_SLS/Hornby.log'

    hdf5_file_name = '/local/data/databank/dataExchange/microCT/Hornby_SLS_2011.h5'

    verbose = True

    if verbose: print file_name
    if verbose: print log_file
    if verbose: print hdf5_file_name



    #Read input SLS data
    file = open(log_file, 'r')
    if verbose: print '###############################'
    for line in file:
        if 'Number of darks' in line:
            NumberOfDarks = re.findall(r'\d+', line)
            if verbose: print 'Number of Darks', NumberOfDarks[0]
        if 'Number of flats' in line:
            NumberOfFlats = re.findall(r'\d+', line)
            if verbose: print 'Number of Flats', NumberOfFlats[0]
        if 'Number of projections' in line:
            NumberOfProjections = re.findall(r'\d+', line)
            if verbose: print 'Number of Projections', NumberOfProjections[0]
        if 'Number of inter-flats' in line:
            NumberOfInterFlats = re.findall(r'\d+', line)
            if verbose: print 'Number of inter-flats', NumberOfInterFlats[0]
        if 'Inner scan flag' in line:
            InnerScanFlag = re.findall(r'\d+', line)
            if verbose: print 'Inner scan flag', InnerScanFlag[0]
        if 'Flat frequency' in line:
            FlatFrequency = re.findall(r'\d+', line)
            if verbose: print 'Flat frequency', FlatFrequency[0]
        if 'Rot Y min' in line:
            RotYmin = re.findall(r'\d+.\d+', line)
            if verbose: print 'Rot Y min', RotYmin[0]
        if 'Rot Y max' in line:
            RotYmax = re.findall(r'\d+.\d+', line)
            if verbose: print 'Rot Y max', RotYmax[0]
        if 'Angular step' in line:
            AngularStep = re.findall(r'\d+.\d+', line)
            if verbose: print 'Angular step', AngularStep[0]
    if verbose: print '###############################'
    file.close()

    dark_start = 1
    dark_end = int(NumberOfDarks[0]) + 1
    white_start = dark_end
    white_end = white_start + int(NumberOfFlats[0])
    projections_start = white_end
    projections_end = projections_start + int(NumberOfProjections[0])

    if verbose: print dark_start, dark_end
    if verbose: print white_start, white_end
    if verbose: print projections_start, projections_end

    dark_start = 1
    dark_end = 21
    white_start = 21
    white_end = 221
    projections_start = 221
    projections_end = 1662

    ### if testing uncomment
    ##dark_end = 4
    ##white_end = 24
    ##projections_end = 224

    mydata = Convert()
    # Create minimal hdf5 file
    mydata.series_of_images(file_name,
                     hdf5_file_name,
                     projections_start,
                     projections_end,
                     white_start = white_start,
                     white_end = white_end,
                     dark_start = dark_start,
                     dark_end = dark_end,
                     verbose = False
                     )

     
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'Tomcat'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': 'Swiss Light Source'},
                                        date_time={'value': "2010-11-08T14:51:56+0100"},
                                        beamline={'value': "Tomcat"},
                                        current={'value': 401.96, 'units': 'mA', 'dataset_opts': {'dtype': 'd'}},
                                        )
    )

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry( DataExchangeEntry.monochromator(type={'value': 'Multilayer'},
                                                energy={'value': 19.260, 'units': 'keV', 'dataset_opts': {'dtype': 'd'}},
                                                mono_stripe={'value': 'Ru/C'},
                                                )
        )

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Federica Marone"},
                                                role={'value':"Project PI"},
                                                affiliation={'value':"Swiss Light Source"},
                                                phone={'value':"+41 56 310 5318"},
                                                email={'value':"*****@*****.**"},

                    )
        )

    # Create HDF5 subgroup
    # /measurement/instrument/detector
    f.add_entry( DataExchangeEntry.detector(manufacturer={'value':'CooKe Corporation'},
                                            model={'value': 'pco dimax'},
                                            serial_number={'value': '1234XW2'},
                                            bit_depth={'value': 12, 'dataset_opts':  {'dtype': 'd'}},
                                            x_pixel_size={'value': 6.7e-6, 'dataset_opts':  {'dtype': 'f'}},
                                            y_pixel_size={'value': 6.7e-6, 'dataset_opts':  {'dtype': 'f'}},
                                            x_dimensions={'value': 2048, 'dataset_opts':  {'dtype': 'i'}},
                                            y_dimensions={'value': 2048, 'dataset_opts':  {'dtype': 'i'}},
                                            x_binning={'value': 1, 'dataset_opts':  {'dtype': 'i'}},
                                            y_binning={'value': 1, 'dataset_opts':  {'dtype': 'i'}},
                                            operating_temperature={'value': 270, 'units':'K', 'dataset_opts':  {'dtype': 'f'}},
                                            exposure_time={'value': 170, 'units':'ms', 'dataset_opts':  {'dtype': 'd'}},
                                            frame_rate={'value': 3, 'dataset_opts':  {'dtype': 'i'}},
                                            output_data={'value':'/exchange'}
                                            )
        )

    f.add_entry(DataExchangeEntry.objective(magnification={'value':10, 'dataset_opts': {'dtype': 'd'}},
                                        )
        )

    f.add_entry(DataExchangeEntry.scintillator(name={'value':'LuAg '},
                                                type={'value':'LuAg'},
                                                scintillating_thickness={'value':20e-6, 'dataset_opts': {'dtype': 'd'}},
            )
        )

    # Create HDF5 subgroup
    # /measurement/experiment
    f.add_entry( DataExchangeEntry.experiment( proposal={'value':"e11218"},
                )
        )
    f.close()
    if verbose: print "Done converting ", file_name
예제 #11
0
def pack_data_exchange():
    f = DataExchangeFile(CXP.io.data_exchange_filename, mode='w')
    sim = DataExchangeEntry.simulation(
        name={'value': 'Simulated Ptycho Data.'},
        energy={
            'value': CXP.experiment.energy,
            'units': 'keV'
        },
        focusing_optic={'value': CXP.experiment.optic},
        probe_modes={'value': CXP.reconstruction.probe_modes},
        noise_model={'value': CXP.simulation.noise_model},
        gaussian_noise_level={'value': CXP.simulation.gaussian_noise_level},
        total_photons={'value': CXP.simulation.total_photons},
        beam_stop={'value': CXP.simulation.beam_stop},
        beam_stop_size={'value': CXP.simulation.beam_stop_size},
        beam_stop_attenuation={'value': CXP.simulation.beam_stop_attenuation},
        defocus={'value': CXP.simulation.defocus},
        position_jitter={
            'value': CXP.reconstruction.initial_position_jitter_radius,
            'units': 'pixels'
        })
    f.add_entry(sim)
    sample = DataExchangeEntry.sample(
        root='/simulation',
        name={'value': 'ground truth sample complex amplitude'},
        data={
            'value': cxph.sample.data[0],
            'units': 'sqrt(counts)'
        },
    )
    f.add_entry(sample)
    probe = DataExchangeEntry.sample(
        root='/simulation',
        entry_name='probe',
    )
    for mode in range(CXP.reconstruction.probe_modes):
        setattr(probe, 'mode_{:d}'.format(mode), {
            'value': cxph.input_probe.modes[mode].data[0],
            'units': 'counts'
        })

    f.add_entry(probe)
    detector = DataExchangeEntry.detector(
        root='/simulation',
        x_pixel_size={'value': CXP.experiment.dx_d},
        y_pixel_size={'value': CXP.experiment.dx_d},
        x_dimension={'value': CXP.experiment.px},
        y_dimension={'value': CXP.experiment.py},
        distance={'value': CXP.experiment.z},
        basis_vectors={
            'value': [[0, -CXP.experiment.dx_d, 0],
                      [-CXP.experiment.dx_d, 0, 0]]
        },
        corner_position={'value': [0, 0, 0]})
    f.add_entry(detector)
    data = DataExchangeEntry.data(
        name={'value': 'simulated_data'},
        data={
            'value': sp.array(cxph.det_mod.data),
            'axes': 'translation:y:x',
            'units': 'counts',
            'dataset_opts': {
                'compression': 'gzip',
                'compression_opts': 4
            }
        },
        translation={'value': '/exchange/sample/geometry/translation'})
    f.add_entry(data)
    # Get scan positions into dex format
    pos = sp.zeros((cxph.positions.total, 3))
    y, x = cxph.positions.correct
    for i in range(cxph.positions.total):
        pos[i, 0], pos[i, 1] = x[i] * CXP.dx_s, y[i] * CXP.dx_s

    positions = DataExchangeEntry.translation(
        root='/exchange/sample/geometry',
        name={'value': 'ptychography scan positions'},
        scan_type={'value': CXP.measurement.ptycho_scan_type},
        data={
            'value': pos,
            'units': 'm'
        })

    f.add_entry(positions)
    f.close()
예제 #12
0
def main():

    file_name = '/local/data/databank/TXM_26ID/Miller1/ABR_1SP_.tif'
    #dark_file_name = '/local/data/databank/AS/Mayo_tooth_AS/BG__AFTER_.tif'
    #white_file_name = '/local/data/databank/AS/Mayo_tooth_AS/BG__BEFORE_.tif'
    hdf5_file_name = '/local/data/databank/dataExchange/TXM/TXM_APS26IDMiller1.h5'
    sample_name = 'Teeth'

    projections_start = 0
    projections_end = 361
    white_start = 0
    white_end = 0
    white_step = 1
    dark_start = 0
    dark_end = 0
    dark_step = 1

    verbose = True

    if verbose: print "Input projection base name: ", file_name
    #if verbose: print "Input white base name: ", white_file_name
    #if verbose: print "Input dark base name: ", dark_file_name
    if verbose: print "Output data exchange file name: ", hdf5_file_name

    mydata = Convert()
    # Create minimal hdf5 file
    mydata.series_of_images(file_name,
                     hdf5_file_name,
                     projections_start,
                     projections_end,
                     #white_file_name = white_file_name,
                     white_start = white_start,
                     white_end = white_end,
                     white_step = white_step,
                     #dark_file_name = dark_file_name,
                     #dark_start = dark_start,
                     #dark_end = dark_end,
                     #dark_step = dark_step,
                     #sample_name = sample_name,
                     projections_digits = 4,
                     #white_digits = 2,
                     #dark_digits = 2,
                     projections_zeros = True,
                     verbose = False
                     )
    if verbose: print "Done reading data ... "
     
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'Australian Synchrotron Facility'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': 'Australian Synchrotron FacilityI'},
                                        date_time={'value': "2013-10-19T22:22:13+0100"},
                                        beamline={'value': "Tomography"},
                                        )
    )

    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Sherry Mayo"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    if verbose: print "Done creating data exchange file: ", hdf5_file_name
def create_theta_stack(filenames, output_filename):

    shutil.copy(filenames[0], output_filename)
    f_tomo = DataExchangeFile(output_filename, mode='a')
    
    # Stackable datasets
    # I will create a theta stack for every dataset which has a root entry called 'exchange' and 'exchange_N'
    with DataExchangeFile(filenames[0], mode='r') as f_dex:
        stackable_datasets = [dataset for dataset in f_dex.keys() if dataset.find('exchange_')>-1]

    print 'Found {:d} stackable datasets: '.format(len(stackable_datasets)), stackable_datasets

    for dataset in stackable_datasets:
        print 'Adding {:s}'.format(dataset)
        # loop through all files to determine the image sizes that will be stacked.
        angles = {}
        shapes = []
        for filename in filenames:
            f_dex = DataExchangeFile(filename, mode='r')
            angles[f_dex['/exchange_0/angle'].value] = (filename, f_dex['/'.join([dataset, 'data'])].shape)
            shapes.append(f_dex['/'.join([dataset, 'data'])].shape)
            f_dex.close()
        shapes = list(set(shapes))

        print 'Found {:d} different array shapes: '.format(len(shapes)), shapes[0]
        # Find the max array size
        if len(shapes)>1:
            xmin, ymin = 0,0
            for dim in shapes[0]:
                if dim[1]>xmin:
                    xmin=dim[1]
                if dim[2]>ymin:
                    ymin=dim[2]
        else:
            channels, xmin, ymin = shapes[0]
        array_shape = [len(filenames), channels, xmin, ymin]

        
        

        # Need to add the DataExchange entry in non-standard way because arrays are so large they can't
        # be held in memory simultaneously. So I create a dataset with a single array and the resize
        # it to the right shape. Then arrays can be added individually.

    
        try:
            del f_tomo[dataset+'/data']
        except KeyError:
            pass
        if dataset == 'exchange_0':
            try:
                del f_tomo[dataset+'/angle']
            except KeyError:
                pass

        for i_theta, angle in enumerate(sorted(angles.keys(), key=float)):
            print 'Adding angle {:s}'.format(angle)
            with DataExchangeFile(angles[angle][0], mode='r') as f_dex:
                
                if i_theta==0:
                    ashape = list(f_dex[dataset+'/data'].shape)
                    ashape.insert(0,1)
                    entry = DataExchangeEntry.data(
                    root='/'+dataset,
                    data={
                        'value': sp.zeros(ashape), 
                        'units': f_dex['/'.join([dataset, 'data'])].attrs['units'], 
                        'description': f_dex['/'.join([dataset, 'data'])].attrs['description'],
                        'axes': 'angle:'+f_dex['/'.join([dataset, 'data'])].attrs['axes'],
                        'dataset_opts':  {
                                'compression': 'gzip', 
                                'compression_opts': 4,
                                'maxshape': (None, None, None, None)
                                } 
                            },
                    angles={
                        'value': sorted(angles.keys(), key=float),
                        'units': 'degrees',
                        'description': 'Angles at which each projection was acquired.'
                        }
                    )

                    
                    f_tomo.add_entry(entry)
                    f_tomo['/'.join([dataset, 'data'])].resize(tuple(array_shape))

                f_tomo['/'.join([dataset, 'data'])][i_theta,:,:,:] = f_dex[dataset+'/data'].value

    f_tomo.close()
예제 #14
0
def write_example(filename):

    # --- prepare data ---

    # Generate fake raw data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
    rawdata_white = np.ones(2 * 256 * 256, np.uint16).reshape(2, 256, 256)
    rawdata_dark = np.zeros(10 * 256 * 256, np.uint16).reshape(10, 256, 256)

    # Generate fake normalized data
    normalizeddata = np.ones(180 * 256 * 256, \
                             np.float64).reshape(180, 256, 256)

    # Generate fake reconstructed data
    reconstructeddata = np.ones(256 * 256 * 256, \
                                np.float64).reshape(256, 256, 256)
     
    # x, y and z ranges
    x = np.arange(128)
    y = np.arange(128)
    z = np.arange(180);
    
    # Fabricated theta values
    theta = (z / float(180)) * 180.0
    theta_white = (0.0, 180.0)
    theta_dark = (0.0, 0.0, 0.0, 0.0, 0.0, 180.0, 180.0, 180.0, 180.0, 180.0)

    # Fabricated data_shift_x and data_shift_y value
    data_shift_x = np.random.randint(-100, 100, size=180) 
    data_shift_y = np.random.randint(-100, 100, size=180) 

    # --- create file ---

    print filename
    
    # Open DataExchange file
    f = DataExchangeFile(filename, mode='w') 
        
    
    # Create core HDF5 dataset in exchange group for 180 deep stack
    # of x,y images /exchange/data
    f.add_entry( DataExchangeEntry.data(data={'value': rawdata, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )
    f.add_entry( DataExchangeEntry.data(title={'value': 'tomography_raw_projections'}))
    f.add_entry( DataExchangeEntry.data(data_dark={'value':rawdata_dark, 'units':'counts', 'axes':'theta_dark:y:x',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )
    f.add_entry( DataExchangeEntry.data(data_white={'value': rawdata_white, 'units':'counts', 'axes':'theta_white:y:x',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )
    f.add_entry( DataExchangeEntry.data(theta={'value': theta, 'units':'degrees'}))
    f.add_entry( DataExchangeEntry.data(theta_dark={'value': theta_dark, 'units':'degrees'}))
    f.add_entry( DataExchangeEntry.data(theta_white={'value': theta_white, 'units':'degrees'}))
    f.add_entry( DataExchangeEntry.data(data_shift_x={'value': data_shift_x}))
    f.add_entry( DataExchangeEntry.data(data_shift_y={'value': data_shift_y}))
                  
    # Exchange HDF5 group
    # /exchange_2
    # this will be the out_put of the normalization process
    f.add_entry( DataExchangeEntry.data(root='exchange_2', title={'value': 'tomography normalized projections'}) )
    f.add_entry( DataExchangeEntry.data(root='exchange_2', data={'value': normalizeddata, 'units':'counts', 'axes':'theta:y:x',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )
    f.add_entry( DataExchangeEntry.data(root='exchange_2', theta={'value': theta, 'units':'degrees'}))

    # Exchange HDF5 group
    # /exchange_3
    # this will be the out_put of the reconstruction process
    f.add_entry( DataExchangeEntry.data(root='exchange_3', title={'value': 'tomography reconstructions'}) )
    f.add_entry( DataExchangeEntry.data(root='exchange_3', data={'value': reconstructeddata, 'units':'density', 'axes':'z:y:x',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )

    # Create HDF5 group measurement
    # /measuremen
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS 2-BM'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': 'APS'}, 
                                        date_time={'value': "2012-07-31T21:15:23+0600"},
                                        beamline={'value': "2-BM"}, 
                                        current={'value': 101.199, 'units': 'mA', 'dataset_opts':  {'dtype': 'd'}},
                                        energy={'value': 7.0, 'units':'GeV', 'dataset_opts':  {'dtype': 'd'}},
                                        mode={'value':'TOPUP'}
                                        )
    )
    # Create HDF5 subgroup
    # /measurement/instrument/attenuator           
    f.add_entry( DataExchangeEntry.attenuator(thickness={'value': 1e-3, 'units': 'm', 'dataset_opts':  {'dtype': 'd'}},
                                            type={'value': 'Al'}
                                            )
        )

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry( DataExchangeEntry.monochromator(type={'value': 'Multilayer'},
                                                energy={'value': 19.26, 'units': 'keV', 'dataset_opts':  {'dtype': 'd'}},
                                                energy_error={'value': 1e-3, 'units': 'keV', 'dataset_opts':  {'dtype': 'd'}},
                                                mono_stripe={'value': 'Ru/C'},
                                                )
        )                                                                                                                                                                                                                                                                                                                                                                                                         

    # Create HDF5 subgroup
    # /measurement/instrument/detector
    f.add_entry( DataExchangeEntry.detector(manufacturer={'value':'CooKe Corporation'},
                                            model={'value': 'pco dimax'},
                                            serial_number={'value': '1234XW2'},
                                            bit_depth={'value': 12, 'dataset_opts':  {'dtype': 'd'}},
                                            x_pixel_size={'value': 6.7e-6, 'dataset_opts':  {'dtype': 'f'}},
                                            y_pixel_size={'value': 6.7e-6, 'dataset_opts':  {'dtype': 'f'}},
                                            x_dimensions={'value': 2048, 'dataset_opts':  {'dtype': 'i'}},
                                            y_dimensions={'value': 2048, 'dataset_opts':  {'dtype': 'i'}},
                                            x_binning={'value': 1, 'dataset_opts':  {'dtype': 'i'}},
                                            y_binning={'value': 1, 'dataset_opts':  {'dtype': 'i'}},
                                            operating_temperature={'value': 270, 'units':'K', 'dataset_opts':  {'dtype': 'f'}},
                                            exposure_time={'value': 170, 'units':'ms', 'dataset_opts':  {'dtype': 'd'}},
                                            frame_rate={'value': 3, 'dataset_opts':  {'dtype': 'i'}},
                                            output_data={'value':'/exchange'}
                                            )
        )

    f.add_entry( DataExchangeEntry.roi(name={'value':'Center Third'},
                                        x1={'value':256, 'dataset_opts':  {'dtype': 'i'}},
                                        x2={'value':1792, 'dataset_opts':  {'dtype': 'i'}},
                                        y1={'value':256, 'dataset_opts':  {'dtype': 'i'}},
                                        y2={'value':1792, 'dataset_opts':  {'dtype': 'i'}},
                                        )
        )

    f.add_entry(DataExchangeEntry.objective(manufacturer={'value':'Zeiss'},
                                            model={'value':'Plan-NEOFLUAR 1004-072'},
                                            magnification={'value':20, 'dataset_opts':  {'dtype': 'd'}},
                                            numerical_aperture={'value':0.5, 'dataset_opts':  {'dtype': 'd'}},
                                        )
        )

    f.add_entry(DataExchangeEntry.scintillator(manufacturer={'value':'Crytur'},
                                                serial_number={'value':'12'},
                                                name={'value':'YAG polished'},
                                                type={'value':'YAG on YAG'},
                                                scintillating_thickness={'value':5e-6, 'dataset_opts':  {'dtype': 'd'}},
                                                substrate_thickness={'value':1e-4, 'dataset_opts':  {'dtype': 'd'}},
            )
        )


    # Create HDF5 subgroup 
    # /measurement/sample
    f.add_entry( DataExchangeEntry.sample( name={'value':'Hornby_b'},
                                            description={'value':'test sample'},
                                            preparation_date={'value':'2011-07-31T21:15:23+0600'},
                                            chemical_formula={'value':'unknown'},
                                            mass={'value':0.25, 'units':'g', 'dataset_opts':  {'dtype': 'd'}},
                                            enviroment={'value':'air'},
                                            temperature={'value':120.0, 'units':'Celsius', 'dataset_opts':  {'dtype': 'd'}},
                                            temperature_set={'value':130.0, 'units':'Celsius', 'dataset_opts':  {'dtype': 'd'}},
            )
        )

    # Create HDF5 subgroup 
    # /measurement/sample/geometry/translation
    f.add_entry( DataExchangeEntry.translation(root='/measurement/sample/geometry',
                    distances={'value':[0,0,0],'axes':'z:y:x', 'units':'m', 'dataset_opts':  {'dtype': 'd'}}
                    )
        )
    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"John Doe"},
                                                role={'value':"Project PI"},
                                                affiliation={'value':"University of California, Berkeley"},
                                                address={'value':"EPS UC Berkeley CA 94720 4767 USA"},
                                                phone={'value':"+1 123 456 0000"},
                                                email={'value':"*****@*****.**"},
                                                facility_user_id={'value':"a123456"},

                    )
        )
    

    # Create HDF5 subgroup
    # /measurement/experiment
    f.add_entry( DataExchangeEntry.experiment(  proposal={'value':"1234"},
                                                activity={'value':"e11218"},
                                                safety={'value':"9876"},
                )
        )

    # --- All done ---
    f.close()
예제 #15
0
def write_example(filename):

    # --- prepare data ---

    # Generate fake raw data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)
     
    # x, y and z ranges
    x = np.arange(128)
    y = np.arange(128)
    z = np.arange(180);
   
    # --- create file ---

    # Open HDF5 file
    f = DataExchangeFile(filename, mode='w') 
    
    # Create core HDF5 dataset in exchange group for 180 deep stack of x,y
    # images /exchange/data
    f.add_entry( DataExchangeEntry.data(data={'value': rawdata, 'units':'counts', 'description': 'Projection Data',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} })
    )

    # Create HDF5 subgroup 
    # /measurement/sample
    f.add_entry( DataExchangeEntry.sample(name={'value': 'Minivirus'}, temperature={'value': 200.0, 'units':'celsius',
                                'dataset_opts': {'dtype': 'd'}})
    )

    # Create HDF5 subgroup 
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS 2-BM'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry( DataExchangeEntry.monochromator(name={'value': 'DMM'}, 
                                                energy={'value': 10.00, 'units':'keV', 'dataset_opts': {'dtype':'d'}}))

    # --- All done ---
    f.close()
예제 #16
0
def main():

    file_name = '/local/data/databank/TXM_26ID/20130731_004_Stripe_Solder_Sample_Tip1_TomoScript_181imgs_p1s_b1.txrm'
    white_file_name = '/local/data/databank/TXM_26ID/20130731_001_Background_Reference_20imgs_p5s_b1.xrm'
    hdf5_file_name = '/local/data/databank/dataExchange/TXM/20130731_004_Stripe_Solder_Sample_Tip1_nx.h5'
    log_file = '/local/data/databank/dataExchange/TXM/20130731_004_Stripe_Solder_Sample_Tip1.log'

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.stack(file_name,
                 hdf5_file_name=hdf5_file_name,
                 white_file_name=white_file_name,
                 sample_name='Stripe_Solder_Sample_Tip1')

    # Add extra metadata if available / desired

    reader = xradia.xrm()
    array = dstruct
    reader.read_txrm(file_name, array)

    # Read angles
    n_angles = np.shape(array.exchange.angles)
    if verbose: print "Done reading ", n_angles, " angles"
    theta = np.zeros(n_angles)
    theta = array.exchange.angles[:]

    # Save any other available metadata in a log file
    f = open(log_file, 'w')
    f.write('Data creation date: \n')
    f.write(str(array.information.file_creation_datetime))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Sample name: \n')
    f.write(str(array.information.sample.name))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Experimenter name: \n')
    f.write(str(array.information.experimenter.name))
    f.write('\n')
    f.write('=======================================\n')
    f.write('X-ray energy: \n')
    f.write(str(array.exchange.energy))
    f.write(str(array.exchange.energy_units))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Angles: \n')
    f.write(str(array.exchange.angles))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Data axes: \n')
    f.write(str(array.exchange.data_axes))
    f.write('\n')
    f.write('=======================================\n')
    f.write('x distance: \n')
    f.write(str(array.exchange.x))
    f.write('\n')
    f.write('=======================================\n')
    f.write('x units: \n')
    f.write(str(array.exchange.x_units))
    f.write('\n')
    f.write('=======================================\n')
    f.write('y distance: \n')
    f.write(str(array.exchange.y))
    f.write('\n')
    f.write('=======================================\n')
    f.write('y units: \n')
    f.write(str(array.exchange.y_units))
    f.write('\n')
    f.close()

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a')

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry(DataExchangeEntry.instrument(name={'value': 'APS-CNM 26-ID'}))

    ### Create HDF5 subgroup
    ### /measurement/instrument/source
    f.add_entry(
        DataExchangeEntry.source(
            name={'value': "Advanced Photon Source"},
            date_time={'value': "2013-07-31T19:42:13+0100"},
            beamline={'value': "26-ID"},
        ))

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry(
        DataExchangeEntry.monochromator(
            type={'value': 'Unknown'},
            energy={
                'value': float(array.exchange.energy[0]),
                'units': 'keV',
                'dataset_opts': {
                    'dtype': 'd'
                }
            },
            mono_stripe={'value': 'Unknown'},
        ))

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry(
        DataExchangeEntry.experimenter(
            name={'value': "Robert Winarski"},
            role={'value': "Project PI"},
        ))

    # Create HDF5 subgroup
    # /measurement/sample
    f.add_entry(
        DataExchangeEntry.data(theta={
            'value': theta,
            'units': 'degrees'
        }))

    f.close()
    print "Done creating data exchange file: ", hdf5_file_name
예제 #17
0
def pack_data_exchange():
    f = DataExchangeFile(CXP.io.data_exchange_filename, mode='w')
    sim = DataExchangeEntry.simulation(
                                        name={'value': 'Simulated Ptycho Data.'},
                                        energy={'value': CXP.experiment.energy, 'units':'keV'},
                                        focusing_optic={'value': CXP.experiment.optic},
                                        probe_modes={'value':CXP.reconstruction.probe_modes},
                                        noise_model={'value': CXP.simulation.noise_model},
                                        gaussian_noise_level={'value': CXP.simulation.gaussian_noise_level},
                                        total_photons={'value': CXP.simulation.total_photons},
                                        beam_stop={'value': CXP.simulation.beam_stop},
                                        beam_stop_size={'value':CXP.simulation.beam_stop_size},
                                        beam_stop_attenuation={'value':CXP.simulation.beam_stop_attenuation},
                                        defocus = {'value':CXP.simulation.defocus},
                                        position_jitter={'value': CXP.reconstruction.initial_position_jitter_radius, 'units':'pixels'}
        )
    f.add_entry(sim)
    sample = DataExchangeEntry.sample(
                                            root='/simulation',
                                            name={'value':'ground truth sample complex amplitude'}, 
                                            data={'value': cxph.sample.data[0], 'units':'sqrt(counts)'},
        )
    f.add_entry(sample)
    probe = DataExchangeEntry.sample(
                                            root='/simulation',
                                            entry_name='probe',
        )
    for mode in range(CXP.reconstruction.probe_modes):
        setattr(probe, 'mode_{:d}'.format(mode), {'value': cxph.input_probe.modes[mode].data[0], 'units':'counts'})

    f.add_entry(probe)
    detector = DataExchangeEntry.detector(
                                            root='/simulation',
                                            x_pixel_size={'value': CXP.experiment.dx_d},
                                            y_pixel_size={'value': CXP.experiment.dx_d},
                                            x_dimension={'value': CXP.experiment.px},
                                            y_dimension={'value': CXP.experiment.py},
                                            distance={'value': CXP.experiment.z},
                                            basis_vectors={'value': [[0,-CXP.experiment.dx_d,0],[-CXP.experiment.dx_d,0,0]]},
                                            corner_position={'value': [0,0,0]}
        )
    f.add_entry(detector)
    data = DataExchangeEntry.data(
                                            name={'value': 'simulated_data'},
                                            data={'value': sp.array(cxph.det_mod.data), 'axes':'translation:y:x', 'units':'counts',
                                            'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4}},
                                            translation={'value':'/exchange/sample/geometry/translation'}          
                                )
    f.add_entry(data)
    # Get scan positions into dex format
    pos = sp.zeros((cxph.positions.total, 3))
    y, x = cxph.positions.correct
    for i in range(cxph.positions.total):
        pos[i,0], pos[i, 1] = x[i]*CXP.dx_s, y[i]*CXP.dx_s

    positions = DataExchangeEntry.translation(
                                            root='/exchange/sample/geometry',
                                            name={'value':'ptychography scan positions'},
                                            scan_type={'value': CXP.measurement.ptycho_scan_type},
                                            data={'value': pos, 'units': 'm'}
                                )

    
    f.add_entry(positions)
    f.close()
def main():


    # Petra III collects data over 360deg but in this data sets they had problem with the rotary
    # stage stop moving . This happened after 180 deg so picking the first 180 deg are good to reconstruct.
    # The 3 blocks below load only the good 180 deg

    ### ct2: pj: from 0 -> 3600; bf from 0 -> 20; df from 0 -> 20
    ##file_name = '/local/data/databank/PetraIII/ct2/ct2_.tif'
    ##dark_file_name = '/local/data/databank/PetraIII/ct2/df2b_.tif'
    ##white_file_name = '/local/data/databank/PetraIII/ct2/bf2b_.tif'
    ##hdf5_file_name = '/local/data/databank/dataExchange/microCT/PetraIII_ct2_180.h5'
    ##sample_name = 'ct2'
    ##
    ### ct2: Wheat root
    ### Sample measured at room temperature
    ##
    ##projections_start = 0
    ##projections_end = 1801
    ##white_start = 0
    ##white_end = 20
    ##white_step = 1
    ##dark_start = 0
    ##dark_end = 20
    ##dark_step = 1

    ### ct3: pj: from 0 -> 3601; bf from 20 -> 39; df from 0 -> 19
    ##file_name = '/local/data/databank/PetraIII/ct3/ct3_.tif'
    ##dark_file_name = '/local/data/databank/PetraIII/ct3/df_.tif'
    ##white_file_name = '/local/data/databank/PetraIII/ct3/bf_.tif'
    ##hdf5_file_name = '/local/data/databank/dataExchange/microCT/PetraIII_ct3_180.h5'
    ##sample_name = 'ct3'
    ##
    ### ct3: Wheat root
    ### Same sample as ct3 but measured at cryogenic condition
    ##
    ##projections_start = 0
    ##projections_end = 1801
    ##white_start = 20
    ##white_end = 40
    ##white_step = 1
    ##dark_start = 0
    ##dark_end = 20
    ##dark_step = 1

    # ct4: pj: from 0 -> 1199; bf from 1 -> 18; df from 0 -> 19
    file_name = '/local/data/databank/PetraIII/ct4/ct4_.tif'
    dark_file_name = '/local/data/databank/PetraIII/ct4/df_ct4_.tif'
    white_file_name = '/local/data/databank/PetraIII/ct4/bf_ct4_.tif'
    hdf5_file_name = '/local/data/databank/dataExchange/microCT/PetraIII_ct4_180.h5'
    sample_name = 'ct4'

    # ct4: Leaf of rice
    # Fresh sample measured at cryogenic condition

    projections_start = 0
    projections_end = 601
    white_start = 1
    white_end = 19
    white_step = 1
    dark_start = 0
    dark_end = 20
    dark_step = 1

    ##### if testing uncomment
    ##projections_start = 0
    ##projections_end = 5
    ##white_start = 0
    ##white_end = 5
    ##white_step = 1
    ##dark_start = 0
    ##dark_end = 5
    ##dark_step = 1


    verbose = True

    if verbose: print file_name
    if verbose: print hdf5_file_name
    if verbose: print sample_name


    if verbose: print "Dark start, end", dark_start, dark_end
    if verbose: print "White start, end", white_start, white_end
    if verbose: print "Projections start, end", projections_start, projections_end


    mydata = Convert()
    # Create minimal hdf5 file
    mydata.series_of_images(file_name,
                     hdf5_file_name,
                     projections_start,
                     projections_end,
                     # projections_angle_range=360,
                     white_file_name = white_file_name,
                     white_start = white_start,
                     white_end = white_end,
                     white_step = white_step,
                     dark_file_name = dark_file_name,
                     dark_start = dark_start,
                     dark_end = dark_end,
                     dark_step = dark_step,
                     sample_name = sample_name,
                     projections_digits = 5,
                     zeros = True,
                     verbose = False
                     )

     
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'Petra III'}) )

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': 'Petra III'},
                                        date_time={'value': "2011-25-05T19:42:13+0100"},
                                        beamline={'value': "P06"},
                                        )
    )

    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Walter Schroeder"},
                                                role={'value':"Project PI"},
                    )
        )

    f.close()
    if verbose: print "Done converting ", file_name
def convert_to_SDE(filename):
    """
    ..function:: convert_to_SDE(filename)

        This function converts a single MAPS created HDF5 file to Scientific Data Exchange format.


        ..param:: filename - the filename of the MAPS hdf5 file to be converted to 
                             Scientific Data Exchange format.


    """
    
    f_maps = h5py.File(filename, mode='r')

    f_dex = DataExchangeFile('/'.join(filename.split('/')[:-1])+'/'+filename.split('/')[-1].split('.')[0]+'_SDE.h5', mode='w')

    for group in MAPS_to_SDE_mapping.keys():

        for entry_type in MAPS_to_SDE_mapping[group].keys():

            for entry in MAPS_to_SDE_mapping[group][entry_type].keys():
                d, kwargs = {}, {}
                # root
                root = MAPS_to_SDE_mapping[group][entry_type][entry][0]
                # value
                if type(MAPS_to_SDE_mapping[group][entry_type][entry][1])==h5val:
                    d['value'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][1].args[0]].value
                    if len(MAPS_to_SDE_mapping[group][entry_type][entry][1].args)>1:
                        d['value'] = d['value'][MAPS_to_SDE_mapping[group][entry_type][entry][1].args[1]]

                elif type(MAPS_to_SDE_mapping[group][entry_type][entry][1])==h5att:
                    d['value'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][1].args[0]].attrs[MAPS_to_SDE_mapping[group][entry_type][entry][1].args[1]]
                else:
                    d['value'] = MAPS_to_SDE_mapping[group][entry_type][entry][1]
                #units
                try:
                    if type(MAPS_to_SDE_mapping[group][entry_type][entry][2])==h5val:
                        d['units'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][2].args[0]].value
                    elif type(MAPS_to_SDE_mapping[group][entry_type][entry][1])==h5att:
                        d['units'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][2].args[0]].attrs[MAPS_to_SDE_mapping[group][entry_type][entry][2].args[1]]
                    else:
                        if MAPS_to_SDE_mapping[group][entry_type][entry][2]: # Could be None
                            d['units'] = MAPS_to_SDE_mapping[group][entry_type][entry][2]
                    #description
                    if type(MAPS_to_SDE_mapping[group][entry_type][entry][3])==h5val:
                        d['description'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][3].args[0]].value
                    elif type(MAPS_to_SDE_mapping[group][entry_type][entry][3])==h5att:
                        d['description'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][3].args[0]].attrs[MAPS_to_SDE_mapping[group][entry_type][entry][3].args[1]]
                    else:
                        d['description'] = f_maps[MAPS_to_SDE_mapping[group][entry_type][entry][3]]
                    #axes
                    d['axes'] = MAPS_to_SDE_mapping[group][entry_type][entry][4]
                    #dataset_opts
                    d['dataset_opts'] = MAPS_to_SDE_mapping[group][entry_type][entry][5]
                except IndexError:
                    pass
                
                if root is None:
                    f_dex.add_entry(
                        getattr(DataExchangeEntry, entry_type)(**{entry: d})
                    )

                else:
                    f_dex.add_entry(
                        getattr(DataExchangeEntry, entry_type)(**{'root': root, entry: d})
                    )
    f_dex.close()
    f_maps.close()
예제 #20
0
    def xtomo_exchange(self,
                       data,
                       data_white=None,
                       data_dark=None,
                       theta=None,
                       sample_name=None,
                       data_exchange_type=None,
                       hdf5_file_name=None,
                       log='INFO'):
        """ 
        Write 3-D data to a data-exchange file.

        Parameters
        ----------            
        data : ndarray
            3-D X-ray absorption tomography raw data.
            Size of the dimensions should be:
            [projections, slices, pixels].
            
        data_white, data_dark : ndarray, optional
            3-D white-field/dark_field data. Multiple
            projections are stacked together to obtain
            a 3-D matrix. 2nd and 3rd dimensions should
            be the same as data: [shots, slices, pixels].
            
        theta : ndarray, optional
            Data acquisition angles corresponding
            to each projection.

        data_excahnge_type : str
            label defyining the type of data contained in data exchange file
            for raw data tomography data use 'tomography_raw_projections'

        hd5_file_name : str
            Output file.

        Notes
        -----
        If file exists, does nothing
                
        Examples
        --------
        - Convert tomographic projection series (raw, dark, white)  of tiff in data exchange:
            
            >>> from dataexchange import xtomo_importer as dx
            >>> from dataexchange import xtomo_exporter as ex

            >>> file_name = '/local/dataraid/databank/Anka/radios/image_.tif'
            >>> dark_file_name = '/local/dataraid/databank/Anka/darks/image_.tif'
            >>> white_file_name = '/local/dataraid/databank/Anka/flats/image_.tif'

            >>> hdf5_file_name = '/local/dataraid/databank/dataExchange/tmp/Anka.h5'

            >>> projections_start = 0
            >>> projections_end = 3167
            >>> white_start = 0
            >>> white_end = 100
            >>> dark_start = 0
            >>> dark_end = 100

            >>> sample_name = 'Anka'
            >>> 
            >>> mydata = dx.Import()
            >>> # Read series of images
            >>> data, white, dark, theta = mydata.xtomo_raw(file_name,
            >>>                                                    projections_start = projections_start,
            >>>                                                    projections_end = projections_end,
            >>>                                                    white_file_name = white_file_name,
            >>>                                                    white_start = white_start,
            >>>                                                    white_end = white_end,
            >>>                                                    dark_file_name = dark_file_name,
            >>>                                                    dark_start = dark_start,
            >>>                                                    dark_end = dark_end,
            >>>                                                    projections_digits = 5,
            >>>                                                    log='INFO'
            >>>                                                    )

            >>> mydata = ex.Export()
            >>> # Create minimal data exchange hdf5 file
            >>> mydata.xtomo_exchange(data = data,
            >>>                       data_white = white,
            >>>                       data_dark = dark,
            >>>                       theta = theta,
            >>>                       hdf5_file_name = hdf5_file_name,
            >>>                       data_exchange_type = 'tomography_raw_projections',
            >>>                       sample_name = sample_name
            >>>                       )

        """

        if (hdf5_file_name != None):
            if os.path.isfile(hdf5_file_name):
                self.logger.error("Data Exchange file: [%s] already exists",
                                  hdf5_file_name)
            else:
                # Create new folder.
                dirPath = os.path.dirname(hdf5_file_name)
                if not os.path.exists(dirPath):
                    os.makedirs(dirPath)

                # Get the file_name in lower case.
                lFn = hdf5_file_name.lower()

                # Split the string with the delimeter '.'
                end = lFn.split('.')

                # Write the Data Exchange HDF5 file.
                # Open DataExchange file
                f = DataExchangeFile(hdf5_file_name, mode='w')

                self.logger.info("Creating Data Exchange File [%s]",
                                 hdf5_file_name)

                # Create core HDF5 dataset in exchange group for projections_theta_range
                # deep stack of x,y images /exchange/data
                self.logger.info(
                    "Adding projections to Data Exchange File [%s]",
                    hdf5_file_name)
                f.add_entry(
                    DataExchangeEntry.data(
                        data={
                            'value': data,
                            'units': 'counts',
                            'description': 'transmission',
                            'axes': 'theta:y:x'
                        }))
                #                f.add_entry( DataExchangeEntry.data(data={'value': data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if (theta != None):
                    f.add_entry(
                        DataExchangeEntry.data(theta={
                            'value': theta,
                            'units': 'degrees'
                        }))
                    self.logger.info("Adding theta to Data Exchange File [%s]",
                                     hdf5_file_name)
                else:
                    self.logger.warning("theta is not defined")
                if (data_dark != None):
                    self.logger.info(
                        "Adding dark fields to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_dark={
                                'value': data_dark,
                                'units': 'counts',
                                'axes': 'theta_dark:y:x'
                            }))
#                    f.add_entry( DataExchangeEntry.data(data_dark={'value': data_dark, 'units':'counts', 'axes':'theta_dark:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data dark is not defined")
                if (data_white != None):
                    self.logger.info(
                        "Adding white fields to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_white={
                                'value': data_white,
                                'units': 'counts',
                                'axes': 'theta_white:y:x'
                            }))
#                    f.add_entry( DataExchangeEntry.data(data_white={'value': data_white, 'units':'counts', 'axes':'theta_white:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data white is not defined")
                if (data_exchange_type != None):
                    self.logger.info(
                        "Adding data type to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            title={'value': data_exchange_type}))
                if (sample_name == None):
                    sample_name = end[0]
                    f.add_entry(
                        DataExchangeEntry.sample(
                            name={'value': sample_name},
                            description={
                                'value':
                                'Sample name was assigned by the HDF5 converter and based on the HDF5 file name'
                            }))
                else:
                    f.add_entry(
                        DataExchangeEntry.sample(
                            name={'value': sample_name},
                            description={
                                'value':
                                'Sample name was read from the user log file'
                            }))
                f.close()
                self.logger.info("DONE!!!!. Created Data Exchange File [%s]",
                                 hdf5_file_name)
        else:
            self.logger.warning("Nothing to do ...")
def main():

    ##file_name = '/local/data/databank/APS_2_BM/Sam18_hornby/raw/Hornby_19keV_10x_.hdf'
    ##log_file = '/local/data/databank/APS_2_BM/Sam18_hornby/raw/Hornby.log'
    ##
    ##hdf5_file_name = '/local/data/databank/dataExchange/microCT/Hornby_APS_2011.h5'

    file_name = '/local/data/databank/APS_2_BM/Sam19_blakely/raw/Blakely_19keV_10x_.hdf'
    log_file = '/local/data/databank/APS_2_BM/Sam19_blakely/raw/Blakely.log'

    hdf5_file_name = '/local/data/databank/dataExchange/microCT/Blakely_APS_2011.h5'

    verbose = True

    if verbose: print file_name
    if verbose: print log_file
    if verbose: print hdf5_file_name



    #Read input SLS data
    file = open(log_file, 'r')
    if verbose: print '###############################'
    for line in file:
        if 'Number of darks' in line:
            NumberOfDarks = re.findall(r'\d+', line)
            if verbose: print 'Number of Darks', NumberOfDarks[0]
        if 'Number of flats' in line:
            NumberOfFlats = re.findall(r'\d+', line)
            if verbose: print 'Number of Flats', NumberOfFlats[0]
        if 'Number of projections' in line:
            NumberOfProjections = re.findall(r'\d+', line)
            if verbose: print 'Number of Projections', NumberOfProjections[0]
        if 'Number of inter-flats' in line:
            NumberOfInterFlats = re.findall(r'\d+', line)
            if verbose: print 'Number of inter-flats', NumberOfInterFlats[0]
        if 'Inner scan flag' in line:
            InnerScanFlag = re.findall(r'\d+', line)
            if verbose: print 'Inner scan flag', InnerScanFlag[0]
        if 'Flat frequency' in line:
            FlatFrequency = re.findall(r'\d+', line)
            if verbose: print 'Flat frequency', FlatFrequency[0]
        if 'Rot Y min' in line:
            RotYmin = re.findall(r'\d+.\d+', line)
            if verbose: print 'Rot Y min', RotYmin[0]
        if 'Rot Y max' in line:
            RotYmax = re.findall(r'\d+.\d+', line)
            if verbose: print 'Rot Y max', RotYmax[0]
        if 'Angular step' in line:
            AngularStep = re.findall(r'\d+.\d+', line)
            if verbose: print 'Angular step', AngularStep[0]
    if verbose: print '###############################'
    file.close()

    dark_start = 1
    dark_end = int(NumberOfDarks[0]) + 1
    white_start = dark_end
    white_end = white_start + int(NumberOfFlats[0])
    projections_start = white_end
    projections_end = projections_start + int(NumberOfProjections[0])

    if verbose: print dark_start, dark_end
    if verbose: print white_start, white_end
    if verbose: print projections_start, projections_end

    dark_start = 1504
    dark_end = 1505
    white_start = 1
    white_end = 2
    projections_start = 2
    projections_end = 1503

    ### if testing uncomment
    ##dark_start = 1
    ##dark_end = 3
    ##white_start = 10
    ##white_end = 12
    ##projections_start = 20
    ##projections_end = 23

    if verbose: print dark_start, dark_end
    if verbose: print white_start, white_end
    if verbose: print projections_start, projections_end

    mydata = Convert()
    # Create minimal hdf5 file
    mydata.series_of_images(file_name,
                     hdf5_file_name,
                     projections_start,
                     projections_end,
                     white_start = white_start,
                     white_end = white_end,
                     dark_start = dark_start,
                     dark_end = dark_end,
                     projections_digits = 5,
                     data_type = 'hdf4',
                     #verbose = False
                 )

     
    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS 2-BM'}) )

    f.add_entry( DataExchangeEntry.source(name={'value': 'Advanced Photon Source'},
                                        date_time={'value': "2012-07-31T21:15:23+0600"},
                                        beamline={'value': "2-BM"},
                                        current={'value': 101.199, 'units': 'mA', 'dataset_opts': {'dtype': 'd'}},
                                        energy={'value': 7.0, 'units':'GeV', 'dataset_opts': {'dtype': 'd'}},
                                        mode={'value':'TOPUP'}
                                        )
    )
    # Create HDF5 subgroup
    # /measurement/instrument/attenuator
    f.add_entry( DataExchangeEntry.attenuator(thickness={'value': 1e-3, 'units': 'm', 'dataset_opts': {'dtype': 'd'}},
                                            type={'value': 'Al'}
                                            )
        )

    # Create HDF5 subgroup
    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry( DataExchangeEntry.monochromator(type={'value': 'Multilayer'},
                                                energy={'value': 19.26, 'units': 'keV', 'dataset_opts': {'dtype': 'd'}},
                                                energy_error={'value': 1e-3, 'units': 'keV', 'dataset_opts': {'dtype': 'd'}},
                                                mono_stripe={'value': 'Ru/C'},
                                                )
        )


    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Jane Waruntorn"},
                                                role={'value':"Project PI"},
                                                affiliation={'value':"University of California"},
                                                facility_user_id={'value':"64924"},

                    )
        )

    f.add_entry(DataExchangeEntry.objective(manufacturer={'value':'Zeiss'},
                                            model={'value':'Plan-NEOFLUAR 1004-072'},
                                            magnification={'value':5, 'dataset_opts': {'dtype': 'd'}},
                                            numerical_aperture={'value':0.5, 'dataset_opts': {'dtype': 'd'}},
                                        )
        )

    f.add_entry(DataExchangeEntry.scintillator(manufacturer={'value':'Crytur'},
                                                serial_number={'value':'12'},
                                                name={'value':'LuAg '},
                                                type={'value':'LuAg'},
                                                scintillating_thickness={'value':50e-6, 'dataset_opts': {'dtype': 'd'}},
                                                substrate_thickness={'value':50e-6, 'dataset_opts': {'dtype': 'd'}},
            )
        )

    # Create HDF5 subgroup
    # /measurement/experiment
    f.add_entry( DataExchangeEntry.experiment( proposal={'value':"GUP-34353"},
                                                activity={'value':"32-IDBC-2013-106491"},
                                                safety={'value':"106491-49734"},
                )
        )


    f.close()
    if verbose: print "Done converting ", file_name
예제 #22
0
def main():

    file_name = '/local/data/databank/TXM_26ID/Miller1/ABR_1SP_.tif'
    #dark_file_name = '/local/data/databank/AS/Mayo_tooth_AS/BG__AFTER_.tif'
    #white_file_name = '/local/data/databank/AS/Mayo_tooth_AS/BG__BEFORE_.tif'
    hdf5_file_name = '/local/data/databank/dataExchange/TXM/TXM_APS26IDMiller1.h5'
    sample_name = 'Teeth'

    projections_start = 0
    projections_end = 361
    white_start = 0
    white_end = 0
    white_step = 1
    dark_start = 0
    dark_end = 0
    dark_step = 1

    verbose = True

    if verbose: print "Input projection base name: ", file_name
    #if verbose: print "Input white base name: ", white_file_name
    #if verbose: print "Input dark base name: ", dark_file_name
    if verbose: print "Output data exchange file name: ", hdf5_file_name

    mydata = Convert()
    # Create minimal hdf5 file
    mydata.series_of_images(
        file_name,
        hdf5_file_name,
        projections_start,
        projections_end,
        #white_file_name = white_file_name,
        white_start=white_start,
        white_end=white_end,
        white_step=white_step,
        #dark_file_name = dark_file_name,
        #dark_start = dark_start,
        #dark_end = dark_end,
        #dark_step = dark_step,
        #sample_name = sample_name,
        projections_digits=4,
        #white_digits = 2,
        #dark_digits = 2,
        projections_zeros=True,
        verbose=False)
    if verbose: print "Done reading data ... "

    # Add extra metadata if available

    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a')

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry(
        DataExchangeEntry.instrument(
            name={'value': 'Australian Synchrotron Facility'}))

    # Create HDF5 subgroup
    # /measurement/instrument/source
    f.add_entry(
        DataExchangeEntry.source(
            name={'value': 'Australian Synchrotron FacilityI'},
            date_time={'value': "2013-10-19T22:22:13+0100"},
            beamline={'value': "Tomography"},
        ))

    # /measurement/experimenter
    f.add_entry(
        DataExchangeEntry.experimenter(
            name={'value': "Sherry Mayo"},
            role={'value': "Project PI"},
        ))

    f.close()
    if verbose: print "Done creating data exchange file: ", hdf5_file_name
예제 #23
0
def main():

    file_name = '/local/data/databank/TXM_26ID/20130731_004_Stripe_Solder_Sample_Tip1_TomoScript_181imgs_p1s_b1.txrm'
    white_file_name = '/local/data/databank/TXM_26ID/20130731_001_Background_Reference_20imgs_p5s_b1.xrm'
    hdf5_file_name = '/local/data/databank/dataExchange/TXM/20130731_004_Stripe_Solder_Sample_Tip1_nx.h5'
    log_file = '/local/data/databank/dataExchange/TXM/20130731_004_Stripe_Solder_Sample_Tip1.log'

    mydata = Convert()
    # Create minimal hdf5 file
    if verbose: print "Reading data ... "
    mydata.stack(file_name,
                   hdf5_file_name = hdf5_file_name,
                   white_file_name = white_file_name,
                   sample_name = 'Stripe_Solder_Sample_Tip1'
                   )
    
    # Add extra metadata if available / desired

    reader = xradia.xrm()
    array = dstruct
    reader.read_txrm(file_name,array)

    # Read angles
    n_angles = np.shape(array.exchange.angles)
    if verbose: print "Done reading ", n_angles, " angles"
    theta = np.zeros(n_angles)
    theta = array.exchange.angles[:]

    # Save any other available metadata in a log file
    f = open(log_file,'w')
    f.write('Data creation date: \n')
    f.write(str(array.information.file_creation_datetime))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Sample name: \n')
    f.write(str(array.information.sample.name))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Experimenter name: \n')
    f.write(str(array.information.experimenter.name))
    f.write('\n')
    f.write('=======================================\n')
    f.write('X-ray energy: \n')
    f.write(str(array.exchange.energy))
    f.write(str(array.exchange.energy_units))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Angles: \n')
    f.write(str(array.exchange.angles))
    f.write('\n')
    f.write('=======================================\n')
    f.write('Data axes: \n')
    f.write(str(array.exchange.data_axes))
    f.write('\n')
    f.write('=======================================\n')
    f.write('x distance: \n')
    f.write(str(array.exchange.x))
    f.write('\n')
    f.write('=======================================\n')
    f.write('x units: \n')
    f.write(str(array.exchange.x_units))
    f.write('\n')
    f.write('=======================================\n')
    f.write('y distance: \n')
    f.write(str(array.exchange.y))
    f.write('\n')
    f.write('=======================================\n')
    f.write('y units: \n')
    f.write(str(array.exchange.y_units))
    f.write('\n')
    f.close()


    # Open DataExchange file
    f = DataExchangeFile(hdf5_file_name, mode='a') 

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS-CNM 26-ID'}) )

    ### Create HDF5 subgroup
    ### /measurement/instrument/source
    f.add_entry( DataExchangeEntry.source(name={'value': "Advanced Photon Source"},
                                        date_time={'value': "2013-07-31T19:42:13+0100"},
                                        beamline={'value': "26-ID"},
                                        )
    )

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry( DataExchangeEntry.monochromator(type={'value': 'Unknown'},
                                                energy={'value': float(array.exchange.energy[0]), 'units': 'keV', 'dataset_opts': {'dtype': 'd'}},
                                                mono_stripe={'value': 'Unknown'},
                                                )
        )

    # Create HDF5 subgroup
    # /measurement/experimenter
    f.add_entry( DataExchangeEntry.experimenter(name={'value':"Robert Winarski"},
                                                role={'value':"Project PI"},
                    )
        )

    # Create HDF5 subgroup
    # /measurement/sample
    f.add_entry( DataExchangeEntry.data(theta={'value': theta, 'units':'degrees'}))

    f.close()
    print "Done creating data exchange file: ", hdf5_file_name
예제 #24
0
    def xtomo_exchange(
        self,
        data,
        data_white=None,
        data_dark=None,
        theta=None,
        data_exchange_type=None,
        source_name=None,
        source_mode=None,
        source_datetime=None,
        beamline=None,
        energy=None,
        current=None,
        actual_pixel_size=None,
        experimenter_name=None,
        experimenter_affiliation=None,
        experimenter_email=None,
        instrument_comment=None,
        sample_name=None,
        sample_comment=None,
        acquisition_mode=None,
        acquisition_comment=None,
        sample_position_x=None,
        sample_position_y=None,
        sample_position_z=None,
        sample_image_shift_x=None,
        sample_image_shift_y=None,
        image_exposure_time=None,
        image_time=None,
        image_theta=None,
        hdf5_file_name=None,
        axes="theta:y:x",
        log="INFO",
    ):
        """ 
        Write 3-D data to a data-exchange file.

        Parameters
                    
        data : ndarray
            3-D X-ray absorption tomography raw data.
            Size of the dimensions should be:
            [projections, slices, pixels].
            
        data_white, data_dark : ndarray, optional
            3-D white-field/dark_field data. Multiple
            projections are stacked together to obtain
            a 3-D matrix. 2nd and 3rd dimensions should
            be the same as data: [shots, slices, pixels].
            
        theta : ndarray, optional
            Data acquisition angles corresponding
            to each projection.

        data_excahnge_type : str, optional
            label defyining the type of data contained in data exchange file
            for raw data tomography data use 'tomography_raw_projections'

        source_name, source_mode, source_datetime : str, optional
            label defining the source name, operation mode and date/time when these values were taken

        beamline : str, optional
            label defining the beamline name
        
        energy, current : float, optional
            X-ray energy and bean current

        actual_pixel_size : float, optional
            pixel size on the sample plane
 
        experimenter_name, experimenter_affiliation, experimenter_email : str, optional
            user name, affiliation and e-mail address

        instrument_comment : str, optional
            instrument comment

        sample_name, sample_comment : str, optional
            sample name and comment
        
        acquisition_mode, acquisition_comment : str, optional
            acquisition mode and comment

        hd5_file_name : str
            Output file.

        Notes
        -----
        If file exists, does nothing
                
        Examples
        
        - Convert tomographic projection series (raw, dark, white)  of tiff in data exchange:
            
            >>> import dataexchange

            >>> file_name = '/local/dataraid/databank/Anka/radios/image_.tif'
            >>> dark_file_name = '/local/dataraid/databank/Anka/darks/image_.tif'
            >>> white_file_name = '/local/dataraid/databank/Anka/flats/image_.tif'

            >>> hdf5_file_name = '/local/dataraid/databank/dataExchange/tmp/Anka.h5'

            >>> projections_start = 0
            >>> projections_end = 3167
            >>> white_start = 0
            >>> white_end = 100
            >>> dark_start = 0
            >>> dark_end = 100

            >>> sample_name = 'Anka'
            >>> 
            >>> # Read raw data
            >>> read = dataexchange.Import()
            >>> data, white, dark, theta = read.xtomo_raw(file_name,
            >>>                                                    projections_start = projections_start,
            >>>                                                    projections_end = projections_end,
            >>>                                                    white_file_name = white_file_name,
            >>>                                                    white_start = white_start,
            >>>                                                    white_end = white_end,
            >>>                                                    dark_file_name = dark_file_name,
            >>>                                                    dark_start = dark_start,
            >>>                                                    dark_end = dark_end,
            >>>                                                    projections_digits = 5,
            >>>                                                    log='INFO'
            >>>                                                    )
            >>>
            >>> # Save data
            >>> write = dataexchange.Export()
            >>> write.xtomo_exchange(data = data,
            >>>                       data_white = white,
            >>>                       data_dark = dark,
            >>>                       theta = theta,
            >>>                       hdf5_file_name = hdf5_file_name,
            >>>                       data_exchange_type = 'tomography_raw_projections',
            >>>                       sample_name = sample_name
            >>>                       )

        """

        if hdf5_file_name != None:
            if os.path.isfile(hdf5_file_name):
                self.logger.error("Data Exchange file: [%s] already exists", hdf5_file_name)
            else:
                # Create new folder.
                dirPath = os.path.dirname(hdf5_file_name)
                if not os.path.exists(dirPath):
                    os.makedirs(dirPath)

                # Get the file_name in lower case.
                lFn = hdf5_file_name.lower()

                # Split the string with the delimeter '.'
                end = lFn.split(".")

                # Write the Data Exchange HDF5 file.
                # Open DataExchange file
                f = DataExchangeFile(hdf5_file_name, mode="w")

                self.logger.info("Creating Data Exchange File [%s]", hdf5_file_name)

                # Create core HDF5 dataset in exchange group for projections_theta_range
                # deep stack of x,y images /exchange/data
                self.logger.info("Adding projections to Data Exchange File [%s]", hdf5_file_name)
                f.add_entry(
                    DataExchangeEntry.data(
                        data={"value": data, "units": "counts", "description": "transmission", "axes": axes}
                    )
                )
                #                f.add_entry( DataExchangeEntry.data(data={'value': data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if theta != None:
                    f.add_entry(DataExchangeEntry.data(theta={"value": theta, "units": "degrees"}))
                    self.logger.info("Adding theta to Data Exchange File [%s]", hdf5_file_name)
                else:
                    self.logger.warning("theta is not defined")
                if data_dark != None:
                    self.logger.info("Adding dark fields to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_dark={"value": data_dark, "units": "counts", "axes": "theta_dark:y:x"}
                        )
                    )
                #                    f.add_entry( DataExchangeEntry.data(data_dark={'value': data_dark, 'units':'counts', 'axes':'theta_dark:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data dark is not defined")
                if data_white != None:
                    self.logger.info("Adding white fields to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_white={"value": data_white, "units": "counts", "axes": "theta_white:y:x"}
                        )
                    )
                #                    f.add_entry( DataExchangeEntry.data(data_white={'value': data_white, 'units':'counts', 'axes':'theta_white:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data white is not defined")
                if data_exchange_type != None:
                    self.logger.info("Adding data type to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry(DataExchangeEntry.data(title={"value": data_exchange_type}))

                if source_name != None:
                    f.add_entry(DataExchangeEntry.source(name={"value": source_name}))
                if source_mode != None:
                    f.add_entry(DataExchangeEntry.source(mode={"value": source_mode}))
                if source_datetime != None:
                    f.add_entry(DataExchangeEntry.source(datetime={"value": source_datetime}))

                if beamline != None:
                    f.add_entry(DataExchangeEntry.source(beamline={"value": beamline}))
                if energy != None:
                    f.add_entry(
                        DataExchangeEntry.monochromator(
                            energy={"value": energy, "units": "keV", "dataset_opts": {"dtype": "d"}}
                        )
                    )
                if current != None:
                    f.add_entry(
                        DataExchangeEntry.source(
                            current={"value": current, "units": "mA", "dataset_opts": {"dtype": "d"}}
                        )
                    )

                if actual_pixel_size != None:
                    f.add_entry(
                        DataExchangeEntry.detector(
                            actual_pixel_size_x={
                                "value": actual_pixel_size,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            },
                            actual_pixel_size_y={
                                "value": actual_pixel_size,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            },
                        )
                    )

                if experimenter_name != None:
                    f.add_entry(DataExchangeEntry.experimenter(name={"value": experimenter_name}))
                if experimenter_affiliation != None:
                    f.add_entry(DataExchangeEntry.experimenter(affiliation={"value": experimenter_affiliation}))
                if experimenter_email != None:
                    f.add_entry(DataExchangeEntry.experimenter(email={"value": experimenter_email}))

                if instrument_comment != None:
                    f.add_entry(DataExchangeEntry.instrument(comment={"value": instrument_comment}))
                if sample_name == None:
                    sample_name = end[0]
                    f.add_entry(
                        DataExchangeEntry.sample(
                            name={"value": sample_name},
                            description={
                                "value": "Sample name assigned by the HDF5 converter and based on the HDF5 file name"
                            },
                        )
                    )
                else:
                    f.add_entry(DataExchangeEntry.sample(name={"value": sample_name}))
                if sample_comment != None:
                    f.add_entry(DataExchangeEntry.sample(comment={"value": sample_comment}))

                if acquisition_mode != None:
                    f.add_entry(DataExchangeEntry.acquisition(mode={"value": acquisition_mode}))
                if acquisition_comment != None:
                    f.add_entry(DataExchangeEntry.acquisition(comment={"value": acquisition_comment}))

                if sample_position_x != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_x={
                                "value": sample_position_x,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )
                if sample_position_y != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_y={
                                "value": sample_position_y,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )
                if sample_position_z != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_z={
                                "value": sample_position_z,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )
                if sample_image_shift_x != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_image_shift_x={
                                "value": sample_image_shift_x,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )
                if sample_image_shift_y != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_image_shift_y={
                                "value": sample_image_shift_y,
                                "units": "microns",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )

                if image_exposure_time != None:
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            image_exposure_time={
                                "value": image_exposure_time,
                                "units": "s",
                                "dataset_opts": {"dtype": "d"},
                            }
                        )
                    )
                if image_time != None:
                    f.add_entry(DataExchangeEntry.acquisition(image_time={"value": image_time}))
                if image_theta != None:
                    f.add_entry(DataExchangeEntry.acquisition(image_theta={"value": image_theta, "units": "degrees"}))
                f.close()
                self.logger.info("DONE!!!!. Created Data Exchange File [%s]", hdf5_file_name)

        else:
            self.logger.warning("Nothing to do ...")
                 hdf5_file_name,
                 projections_start,
                 projections_end,
                 white_start = white_start,
                 white_end = white_end,
                 dark_start = dark_start,
                 dark_end = dark_end,
                 projections_digits = 6,
                 verbose = False
                 )

 
# Add extra metadata if available

# Open DataExchange file
f = DataExchangeFile(hdf5_file_name, mode='a') 

# Create HDF5 subgroup
# /measurement/instrument
f.add_entry( DataExchangeEntry.instrument(name={'value': 'APS 1-ID Tomography'}) )

# Create HDF5 subgroup
# /measurement/instrument/source
f.add_entry( DataExchangeEntry.source(name={'value': 'Advanced Photon Source'},
                                    date_time={'value': "2012-07-08T15:42:56+0100"},
                                    beamline={'value': "1-ID"},
                                    current={'value': 100.96, 'units': 'mA', 'dataset_opts': {'dtype': 'd'}},
                                    )
)

# Create HDF5 subgroup
예제 #26
0
    def xtomo_exchange(xtomo, data, data_white=None, data_dark=None, theta=None, sample_name=None,
                       data_exchange_type=None,
                       hdf5_file_name=None,
                       log='INFO'
                       ):
        """ 
        Write 3-D data to a data-exchange file.

        Parameters
        ----------            
        data : ndarray
            3-D X-ray absorption tomography raw data.
            Size of the dimensions should be:
            [projections, slices, pixels].
            
        data_white, data_dark : ndarray, optional
            3-D white-field/dark_field data. Multiple
            projections are stacked together to obtain
            a 3-D matrix. 2nd and 3rd dimensions should
            be the same as data: [shots, slices, pixels].
            
        theta : ndarray, optional
            Data acquisition angles corresponding
            to each projection.

        data_excahnge_type : str
            label defyining the type of data contained in data exchange file
            for raw data tomography data use 'tomography_raw_projections'

        hd5_file_name : str
            Output file.

        Notes
        -----
        If file exists, does nothing
                
        Examples
        --------
        - Convert tomographic projection series (raw, dark, white)  of tiff in data exchange:
            
            >>> from dataexchange import xtomo_importer as dx
            >>> from dataexchange import xtomo_exporter as ex

            >>> file_name = '/local/dataraid/databank/Anka/radios/image_.tif'
            >>> dark_file_name = '/local/dataraid/databank/Anka/darks/image_.tif'
            >>> white_file_name = '/local/dataraid/databank/Anka/flats/image_.tif'
            >>> 
            >>> hdf5_file_name = '/local/dataraid/databank/dataExchange/microCT/xx_yy_Anka.h5'
            >>> 
            >>> projections_start = 0
            >>> projections_end = 3167
            >>> white_start = 0
            >>> white_end = 100
            >>> dark_start = 0
            >>> dark_end = 100
            >>> 
            >>> sample_name = 'Anka'
            >>> 
            >>> mydata = dx.Import()
            >>> # Read series of images
            >>> data, white, dark, theta = mydata.series_of_images(file_name,
            >>>                                                    projections_start = projections_start,
            >>>                                                    projections_end = projections_end,
            >>>                                                    white_file_name = white_file_name,
            >>>                                                    white_start = white_start,
            >>>                                                    white_end = white_end,
            >>>                                                    dark_file_name = dark_file_name,
            >>>                                                    dark_start = dark_start,
            >>>                                                    dark_end = dark_end,
            >>>                                                    sample_name = sample_name,
            >>>                                                    projections_digits = 5,
            >>>                                                    log='INFO'
            >>>                                                    )
            >>> 
            >>> mydata = ex.Export()
            >>> # Create minimal data exchange hdf5 file
            >>> mydata.xtomo_exchange(data = data,
            >>>                       data_white = white,
            >>>                       data_dark = dark,
            >>>                       theta = theta,
            >>>                       hdf5_file_name = hdf5_file_name,
            >>>                       data_exchange_type = 'tomography_raw_projections'
            >>>                       )

        """
     
        if (hdf5_file_name != None):
            if os.path.isfile(hdf5_file_name):
                xtomo.logger.info("Data Exchange file already exists: [%s]. Next time use the Data Exchange reader instead", hdf5_file_name)
            else:
                # Create new folder.
                dirPath = os.path.dirname(hdf5_file_name)
                if not os.path.exists(dirPath):
                    os.makedirs(dirPath)

                # Get the file_name in lower case.
                lFn = hdf5_file_name.lower()

                # Split the string with the delimeter '.'
                end = lFn.split('.')

                # Write the Data Exchange HDF5 file.
                # Open DataExchange file
                f = DataExchangeFile(hdf5_file_name, mode='w') 

                xtomo.logger.info("Creating Data Exchange File [%s]", hdf5_file_name)

                # Create core HDF5 dataset in exchange group for projections_theta_range
                # deep stack of x,y images /exchange/data
                xtomo.logger.info("Adding projections to Data Exchange File [%s]", hdf5_file_name)
                f.add_entry( DataExchangeEntry.data(data={'value': data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x' }))
#                f.add_entry( DataExchangeEntry.data(data={'value': data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if (theta != None):
                    f.add_entry( DataExchangeEntry.data(theta={'value': theta, 'units':'degrees'}))
                    xtomo.logger.info("Adding theta to Data Exchange File [%s]", hdf5_file_name)
                if (data_dark != None):
                    xtomo.logger.info("Adding dark fields to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry( DataExchangeEntry.data(data_dark={'value': data_dark, 'units':'counts', 'axes':'theta_dark:y:x' }))
#                    f.add_entry( DataExchangeEntry.data(data_dark={'value': data_dark, 'units':'counts', 'axes':'theta_dark:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if (data_white != None):
                    xtomo.logger.info("Adding white fields to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry( DataExchangeEntry.data(data_white={'value': data_white, 'units':'counts', 'axes':'theta_white:y:x' }))
#                    f.add_entry( DataExchangeEntry.data(data_white={'value': data_white, 'units':'counts', 'axes':'theta_white:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if (data_exchange_type != None):
                    xtomo.logger.info("Adding data type to  Data Exchange File [%s]", hdf5_file_name)
                    f.add_entry( DataExchangeEntry.data(title={'value': data_exchange_type}))
                if (sample_name == None):
                    sample_name = end[0]
                    f.add_entry( DataExchangeEntry.sample( name={'value':sample_name}, description={'value':'Sample name was assigned by the HDF5 converter and based on the HDF5 file name'}))
                else:
                    f.add_entry( DataExchangeEntry.sample( name={'value':sample_name}, description={'value':'Sample name was read from the user log file'}))
                f.close()
                xtomo.logger.info("DONE!!!!. Created Data Exchange File [%s]", hdf5_file_name)
        else:
            xtomo.logger.info("Nothing to do ...")
    def xtomo_exchange(self,
                       data,
                       data_white=None,
                       data_dark=None,
                       theta=None,
                       data_exchange_type=None,
                       source_name=None,
                       source_mode=None,
                       source_datetime=None,
                       beamline=None,
                       energy=None,
                       current=None,
                       actual_pixel_size=None,
                       experimenter_name=None,
                       experimenter_affiliation=None,
                       experimenter_email=None,
                       instrument_comment=None,
                       sample_name=None,
                       sample_comment=None,
                       acquisition_mode=None,
                       acquisition_comment=None,
                       sample_position_x=None,
                       sample_position_y=None,
                       sample_position_z=None,
                       sample_image_shift_x=None,
                       sample_image_shift_y=None,
                       hdf5_file_name=None,
                       axes='theta:y:x',
                       log='INFO'):
        """ 
        Write 3-D data to a data-exchange file.

        Parameters
                    
        data : ndarray
            3-D X-ray absorption tomography raw data.
            Size of the dimensions should be:
            [projections, slices, pixels].
            
        data_white, data_dark : ndarray, optional
            3-D white-field/dark_field data. Multiple
            projections are stacked together to obtain
            a 3-D matrix. 2nd and 3rd dimensions should
            be the same as data: [shots, slices, pixels].
            
        theta : ndarray, optional
            Data acquisition angles corresponding
            to each projection.

        data_excahnge_type : str, optional
            label defyining the type of data contained in data exchange file
            for raw data tomography data use 'tomography_raw_projections'

        source_name, source_mode, source_datetime : str, optional
            label defining the source name, operation mode and date/time when these values were taken

        beamline : str, optional
            label defining the beamline name
        
        energy, current : float, optional
            X-ray energy and bean current

        actual_pixel_size : float, optional
            pixel size on the sample plane
 
        experimenter_name, experimenter_affiliation, experimenter_email : str, optional
            user name, affiliation and e-mail address

        instrument_comment : str, optional
            instrument comment

        sample_name, sample_comment : str, optional
            sample name and comment
        
        acquisition_mode, acquisition_comment : str, optional
            acquisition mode and comment

        hd5_file_name : str
            Output file.

        Notes
        -----
        If file exists, does nothing
                
        Examples
        
        - Convert tomographic projection series (raw, dark, white)  of tiff in data exchange:
            
            >>> import dataexchange

            >>> file_name = '/local/dataraid/databank/Anka/radios/image_.tif'
            >>> dark_file_name = '/local/dataraid/databank/Anka/darks/image_.tif'
            >>> white_file_name = '/local/dataraid/databank/Anka/flats/image_.tif'

            >>> hdf5_file_name = '/local/dataraid/databank/dataExchange/tmp/Anka.h5'

            >>> projections_start = 0
            >>> projections_end = 3167
            >>> white_start = 0
            >>> white_end = 100
            >>> dark_start = 0
            >>> dark_end = 100

            >>> sample_name = 'Anka'
            >>> 
            >>> # Read raw data
            >>> read = dataexchange.Import()
            >>> data, white, dark, theta = read.xtomo_raw(file_name,
            >>>                                                    projections_start = projections_start,
            >>>                                                    projections_end = projections_end,
            >>>                                                    white_file_name = white_file_name,
            >>>                                                    white_start = white_start,
            >>>                                                    white_end = white_end,
            >>>                                                    dark_file_name = dark_file_name,
            >>>                                                    dark_start = dark_start,
            >>>                                                    dark_end = dark_end,
            >>>                                                    projections_digits = 5,
            >>>                                                    log='INFO'
            >>>                                                    )
            >>>
            >>> # Save data
            >>> write = dataexchange.Export()
            >>> write.xtomo_exchange(data = data,
            >>>                       data_white = white,
            >>>                       data_dark = dark,
            >>>                       theta = theta,
            >>>                       hdf5_file_name = hdf5_file_name,
            >>>                       data_exchange_type = 'tomography_raw_projections',
            >>>                       sample_name = sample_name
            >>>                       )

        """

        if (hdf5_file_name != None):
            if os.path.isfile(hdf5_file_name):
                self.logger.error("Data Exchange file: [%s] already exists",
                                  hdf5_file_name)
            else:
                # Create new folder.
                dirPath = os.path.dirname(hdf5_file_name)
                if not os.path.exists(dirPath):
                    os.makedirs(dirPath)

                # Get the file_name in lower case.
                lFn = hdf5_file_name.lower()

                # Split the string with the delimeter '.'
                end = lFn.split('.')

                # Write the Data Exchange HDF5 file.
                # Open DataExchange file
                f = DataExchangeFile(hdf5_file_name, mode='w')

                self.logger.info("Creating Data Exchange File [%s]",
                                 hdf5_file_name)

                # Create core HDF5 dataset in exchange group for projections_theta_range
                # deep stack of x,y images /exchange/data
                self.logger.info(
                    "Adding projections to Data Exchange File [%s]",
                    hdf5_file_name)
                f.add_entry(
                    DataExchangeEntry.data(
                        data={
                            'value': data,
                            'units': 'counts',
                            'description': 'transmission',
                            'axes': axes
                        }))
                #                f.add_entry( DataExchangeEntry.data(data={'value': data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                if (theta != None):
                    f.add_entry(
                        DataExchangeEntry.data(theta={
                            'value': theta,
                            'units': 'degrees'
                        }))
                    self.logger.info("Adding theta to Data Exchange File [%s]",
                                     hdf5_file_name)
                else:
                    self.logger.warning("theta is not defined")
                if (data_dark != None):
                    self.logger.info(
                        "Adding dark fields to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_dark={
                                'value': data_dark,
                                'units': 'counts',
                                'axes': 'theta_dark:y:x'
                            }))
#                    f.add_entry( DataExchangeEntry.data(data_dark={'value': data_dark, 'units':'counts', 'axes':'theta_dark:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data dark is not defined")
                if (data_white != None):
                    self.logger.info(
                        "Adding white fields to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            data_white={
                                'value': data_white,
                                'units': 'counts',
                                'axes': 'theta_white:y:x'
                            }))
#                    f.add_entry( DataExchangeEntry.data(data_white={'value': data_white, 'units':'counts', 'axes':'theta_white:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
                else:
                    self.logger.warning("data white is not defined")
                if (data_exchange_type != None):
                    self.logger.info(
                        "Adding data type to  Data Exchange File [%s]",
                        hdf5_file_name)
                    f.add_entry(
                        DataExchangeEntry.data(
                            title={'value': data_exchange_type}))

                if (source_name != None):
                    f.add_entry(
                        DataExchangeEntry.source(name={'value': source_name}))
                if (source_mode != None):
                    f.add_entry(
                        DataExchangeEntry.source(mode={'value': source_mode}))
                if (source_datetime != None):
                    f.add_entry(
                        DataExchangeEntry.source(
                            datetime={'value': source_datetime}))

                if (beamline != None):
                    f.add_entry(
                        DataExchangeEntry.source(beamline={'value': beamline}))
                if (energy != None):
                    f.add_entry(
                        DataExchangeEntry.monochromator(
                            energy={
                                'value': energy,
                                'units': 'keV',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))
                if (current != None):
                    f.add_entry(
                        DataExchangeEntry.source(
                            current={
                                'value': current,
                                'units': 'mA',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))

                if (actual_pixel_size != None):
                    f.add_entry(
                        DataExchangeEntry.detector(actual_pixel_size_x={
                            'value': actual_pixel_size,
                            'units': 'microns',
                            'dataset_opts': {
                                'dtype': 'd'
                            }
                        },
                                                   actual_pixel_size_y={
                                                       'value':
                                                       actual_pixel_size,
                                                       'units': 'microns',
                                                       'dataset_opts': {
                                                           'dtype': 'd'
                                                       }
                                                   }))

                if (experimenter_name != None):
                    f.add_entry(
                        DataExchangeEntry.experimenter(
                            name={'value': experimenter_name}))
                if (experimenter_affiliation != None):
                    f.add_entry(
                        DataExchangeEntry.experimenter(
                            affiliation={'value': experimenter_affiliation}))
                if (experimenter_email != None):
                    f.add_entry(
                        DataExchangeEntry.experimenter(
                            email={'value': experimenter_email}))

                if (instrument_comment != None):
                    f.add_entry(
                        DataExchangeEntry.instrument(
                            comment={'value': instrument_comment}))
                if (sample_name == None):
                    sample_name = end[0]
                    f.add_entry(
                        DataExchangeEntry.sample(
                            name={'value': sample_name},
                            description={
                                'value':
                                'Sample name assigned by the HDF5 converter and based on the HDF5 file name'
                            }))
                else:
                    f.add_entry(
                        DataExchangeEntry.sample(name={'value': sample_name}))
                if (sample_comment != None):
                    f.add_entry(
                        DataExchangeEntry.sample(
                            comment={'value': sample_comment}))

                if (acquisition_mode != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            mode={'value': acquisition_mode}))
                if (acquisition_comment != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            comment={'value': acquisition_comment}))

                if (sample_position_x != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_x={
                                'value': sample_position_x,
                                'units': 'microns',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))
                if (sample_position_y != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_y={
                                'value': sample_position_y,
                                'units': 'microns',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))
                if (sample_position_z != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_position_z={
                                'value': sample_position_z,
                                'units': 'microns',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))

                if (sample_image_shift_x != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_image_shift_x={
                                'value': sample_image_shift_x,
                                'units': 'microns',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))
                if (sample_image_shift_y != None):
                    f.add_entry(
                        DataExchangeEntry.acquisition(
                            sample_image_shift_y={
                                'value': sample_image_shift_y,
                                'units': 'microns',
                                'dataset_opts': {
                                    'dtype': 'd'
                                }
                            }))

                f.close()
                self.logger.info("DONE!!!!. Created Data Exchange File [%s]",
                                 hdf5_file_name)

        else:
            self.logger.warning("Nothing to do ...")
예제 #28
0
def write_example(filename):

    # --- prepare data ---

    # Generate fake raw data
    rawdata = np.ones(180 * 256 * 256, np.uint16).reshape(180, 256, 256)

    # x, y and z ranges
    x = np.arange(128)
    y = np.arange(128)
    z = np.arange(180)

    # --- create file ---

    # Open HDF5 file
    f = DataExchangeFile(filename, mode='w')

    # Create core HDF5 dataset in exchange group for 180 deep stack of x,y
    # images /exchange/data
    f.add_entry(
        DataExchangeEntry.data(
            data={
                'value': rawdata,
                'units': 'counts',
                'description': 'Projection Data',
                'dataset_opts': {
                    'compression': 'gzip',
                    'compression_opts': 4
                }
            }))

    # Create HDF5 subgroup
    # /measurement/sample
    f.add_entry(
        DataExchangeEntry.sample(name={'value': 'Minivirus'},
                                 temperature={
                                     'value': 200.0,
                                     'units': 'celsius',
                                     'dataset_opts': {
                                         'dtype': 'd'
                                     }
                                 }))

    # Create HDF5 subgroup
    # /measurement/instrument
    f.add_entry(DataExchangeEntry.instrument(name={'value': 'APS 2-BM'}))

    # Create HDF5 subgroup
    # /measurement/instrument/monochromator
    f.add_entry(
        DataExchangeEntry.monochromator(name={'value': 'DMM'},
                                        energy={
                                            'value': 10.00,
                                            'units': 'keV',
                                            'dataset_opts': {
                                                'dtype': 'd'
                                            }
                                        }))

    # --- All done ---
    f.close()
예제 #29
0
    def nexus(self, file_name,
                  hdf5_file_name,
                  projections_start=None,
                  projections_end=None,
                  projections_step=None,
                  slices_start=None,
                  slices_end=None,
                  slices_step=None,
                  pixels_start=None,
                  pixels_end=None,
                  pixels_step=None,
                  white_start=None,
                  white_end=None,
                  dark_start=None,
                  dark_end=None,
                  array_name='entry/instrument/detector/data',
                  sample_name=None,
                  dtype='float32'):
        """ Read Data Exchange HDF5 file.

        Parameters
        ----------
        file_name : str
            Input file.

        projections_start, projections_end, projections_step : scalar, optional
            Values of the start, end and step of the projections to
            be used for slicing for the whole ndarray.

        slices_start, slices_end, slices_step : scalar, optional
            Values of the start, end and step of the slices to
            be used for slicing for the whole ndarray.

        pixels_start, pixels_end, pixels_step : scalar, optional
            Values of the start, end and step of the pixels to
            be used for slicing for the whole ndarray.

        white_start, white_end : scalar, optional
            Values of the start, end and step of the
            slicing for the whole white field shots.

        dark_start, dark_end : scalar, optional
            Values of the start, end and step of the
            slicing for the whole dark field shots.

        dtype : str, optional
            Desired output data type.
        """
        print "Reading NeXus file ..."
        self.file_name = file_name

        # Initialize f to null.
        f = None

        # Get the file_name in lower case.
        lFn = file_name.lower()

        # Split the string with the delimeter '.'
        end = lFn.split('.')

        # If the string has an extension.
        if len(end) > 1:
            # Check.
            if end[len(end) - 1] == 'h5' or end[len(end) - 1] == 'hdf':
                f = Hdf5()

        # If f != None the call read on it.
        if not f == None:
            # Read data from exchange group.
            self.data = f.read(file_name,
                                array_name=array_name,
                                x_start=projections_start,
                                x_end=projections_end,
                                x_step=projections_step,
                                y_start=slices_start,
                                y_end=slices_end,
                                y_step=slices_step,
                                z_start=pixels_start,
                                z_end=pixels_end,
                                z_step=pixels_step).astype(dtype)

            # Read white field data from exchange group.
            print white_start, white_end, slices_start, slices_end
            self.white = f.read(file_name,
                                array_name=array_name,
                                x_start=white_start,
                                x_end=white_end,
                                y_start=slices_start,
                                y_end=slices_end,
                                y_step=slices_step,
                                z_start=pixels_start,
                                z_end=pixels_end,
                                z_step=pixels_step).astype(dtype)

            # Read dark field data from exchange group.
            self.dark = f.read(file_name,
                                array_name=array_name,
                                x_start=dark_start,
                                x_end=dark_end,
                                y_start=slices_start,
                                y_end=slices_end,
                                y_step=slices_step,
                                z_start=pixels_start,
                                z_end=pixels_end,
                                z_step=pixels_step).astype(dtype)

            # Assign the rotation center.
            self.center = self.data.shape[2] / 2

            # Write HDF5 file.
            # Open DataExchange file
            f = DataExchangeFile(hdf5_file_name, mode='w') 

            logger.info("Writing the HDF5 file")
            # Create core HDF5 dataset in exchange group for projections_theta_range
            # deep stack of x,y images /exchange/data
            f.add_entry( DataExchangeEntry.data(data={'value': self.data, 'units':'counts', 'description': 'transmission', 'axes':'theta:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
            f.add_entry( DataExchangeEntry.data(theta={'value': self.theta, 'units':'degrees'}))
            f.add_entry( DataExchangeEntry.data(data_dark={'value': self.dark, 'units':'counts', 'axes':'theta_dark:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
            f.add_entry( DataExchangeEntry.data(data_white={'value': self.white, 'units':'counts', 'axes':'theta_white:y:x', 'dataset_opts':  {'compression': 'gzip', 'compression_opts': 4} }))
            f.add_entry( DataExchangeEntry.data(title={'value': 'tomography_raw_projections'}))
            logger.info("Sample name = %s", sample_name)
            if (sample_name == None):
                sample_name = end[0]
                f.add_entry( DataExchangeEntry.sample( name={'value':sample_name}, description={'value':'Sample name was assigned by the HDF5 converter and based on the HDF5 fine name'}))
                logger.info("Assigned default file name: %s", end[0])
            else:
                f.add_entry( DataExchangeEntry.sample( name={'value':sample_name}, description={'value':'Sample name was read from the user log file'}))
                logger.info("Assigned file name from user log")                    
            
            f.close()

        else:
            print 'Unsupported file.'