Esempio n. 1
0
def thetaTwothetaToAlphaIAlphaF(data):
    """ Figures out the angle in, angle out values of each datapoint
    and throws them in the correct bin.  If no grid is specified,
    one is created that covers the whole range of the dataset
    
    **Inputs**
    
    data (ospec2d): input data
    
    **Returns**
    
    output (ospec2d): output data rebinned into alpha_i, alpha_f
    
    2016-04-01 Brian Maranville
    """
    
    theta_axis = data._getAxis('theta')
    twotheta_axis = data._getAxis('twotheta')
 
    th_array = data.axisValues('theta').copy()
    twotheta_array = data.axisValues('twotheta').copy()
    
    two_theta_step = twotheta_array[1] - twotheta_array[0]
    theta_step = th_array[1] - th_array[0]
    
    af_max = (twotheta_array.max() - th_array.min())
    af_min = (twotheta_array.min() - th_array.max())
    alpha_i = th_array.copy()
    alpha_f = arange(af_min, af_max, two_theta_step)
    
    info = [{"name": "alpha_i", "units": "degrees", "values": th_array.copy() },
            {"name": "alpha_f", "units": "degrees", "values": alpha_f.copy() },]
    old_info = data.infoCopy()
    info.append(old_info[2]) # column information!
    info.append(old_info[3]) # creation story!
    output_grid = MetaArray(zeros((th_array.shape[0], alpha_f.shape[0], data.shape[-1])), info=info)
    
    if theta_axis < twotheta_axis: # then theta is first: add a dimension at the end
        alpha_i.shape = alpha_i.shape + (1,)
        ai_out = indices((th_array.shape[0], twotheta_array.shape[0]))[0]
        twotheta_array.shape = (1,) + twotheta_array.shape
    else:       
        alpha_i.shape = (1,) + alpha_i.shape
        ai_out = indices((twotheta_array.shape[0], th_array.shape[0]))[1]
        twotheta_array.shape = twotheta_array.shape + (1,)
    
    af_out = twotheta_array - alpha_i
    
    # getting values from output grid:
    outgrid_info = output_grid.infoCopy()
    numcols = len(outgrid_info[2]['cols'])
    #target_ai = ((ai_out - th_array[0]) / theta_step).flatten().astype(int).tolist()
    target_ai = ai_out.flatten().astype(int).tolist()
    #return target_qx, qxOut
    target_af = ((af_out - af_min) / two_theta_step).flatten().astype(int).tolist()
    
    for i, col in enumerate(outgrid_info[2]['cols']):
        values_to_bin = data[:,:,col['name']].view(ndarray).flatten().tolist()
        print len(target_ai), len(target_af), len(values_to_bin)
        outshape = (output_grid.shape[0], output_grid.shape[1])
        hist2d, xedges, yedges = histogram2d(target_ai, target_af, bins = (outshape[0],outshape[1]), range=((0,outshape[0]),(0,outshape[1])), weights=values_to_bin)
        output_grid[:,:,col['name']] += hist2d
 
    cols = outgrid_info[2]['cols']
    data_cols = [col['name'] for col in cols if col['name'].startswith('counts')]
    monitor_cols = [col['name'] for col in cols if col['name'].startswith('monitor')]
    # just take the first one...
    if len(monitor_cols) > 0:
        monitor_col = monitor_cols[0]
        data_missing_mask = (output_grid[:,:,monitor_col] == 0)
        for dc in data_cols:
            output_grid[:,:,dc].view(ndarray)[data_missing_mask] = NaN;            
    
    #extra info changed
    output_grid._info[-1] = data._info[-1].copy()
    return output_grid
Esempio n. 2
0
def loadMAGIKPSD_helper(file_obj, name, path, collapse=True, collapse_axis='y', auto_PolState=False, PolState='', flip=True, transpose=True):
    lookup = {"DOWN_DOWN":"_down_down", "UP_DOWN":"_up_down", "DOWN_UP":"_down_up", "UP_UP":"_up_up", "entry": ""}
    #nx_entries = LoadMAGIKPSD.load_entries(name, fid, entries=entries)
    #fid.close()
    
    #if not (len(file_obj.detector.counts.shape) == 2):
        # not a 2D object!
    #    return
    for entryname, entry in file_obj.items():
        active_slice = slice(None, DETECTOR_ACTIVE[0], DETECTOR_ACTIVE[1])
        counts_value = entry['DAS_logs/areaDetector/counts'].value[:, 1:DETECTOR_ACTIVE[0]+1, :DETECTOR_ACTIVE[1]]
        dims = counts_value.shape
        ndims = len(dims)
        if auto_PolState:
            PolState = lookup.get(entryname, "")
        # force PolState to a regularized version:
        if not PolState in lookup.values():
            PolState = ''
        #datalen = file_obj.detector.counts.shape[0]
        if ndims == 2:
            if DEBUG: print "2d"
            ypixels = dims[0]
            xpixels = dims[1]
        elif ndims >= 3:
            if DEBUG: print "3d"
            frames = dims[0]
            xpixels = dims[1]
            ypixels = dims[2]
        

        # doesn't really matter; changing so that each keyword (whether it took the default value
        # provided or not) will be defined
        #    if not PolState == '':
        #        creation_story += ", PolState='{0}'".format(PolState)
        # creation_story += ")" 
    
    
        if ndims == 2: # one of the dimensions has been collapsed.
            info = []     
            info.append({"name": "xpixel", "units": "pixels", "values": arange(xpixels) }) # reverse order
            samp_angle = entry['DAS_logs/sampleAngle/softPosition'].value
            det_angle = entry['DAS_logs/detectorAngle/softPosition'].value
            if samp_angle.size > 1:
                yaxis = entry['DAS_logs/sampleAngle/softPosition']
                yaxisname = "theta"                    
            elif det_angle.size > 1:
                yaxis = entry['DAS_logs/detectorAngle/softPosition']
                yaxisname = "det_angle"
            else:
                # need to find the one that's moving...
                yaxis = entry['data/x']
                yaxisname = yaxis.path
            yaxisunits = yaxis.attrs['units']
            yaxisvalues = yaxis.value
            info.append({"name": yaxisname, "units": yaxisunits, "values": yaxisvalues})
            info.extend([
                    {"name": "Measurements", "cols": [
                            {"name": "counts"},
                            {"name": "pixels"},
                            {"name": "monitor"},
                            {"name": "count_time"}]},
                    {"PolState": PolState, "filename": filename, "start_datetime": entry['start_time'].value[0], "friendly_name": entry['DAS_logs/sample/name'].value[0],
                     "entry": entryname, "path":path, "det_angle":entry['DAS_logs/detectorAngle/softPosition'].value,
                     "theta": entry['DAS_logs/sampleAngle/softPosition'].value}]
                )
            data_array = zeros((xpixels, ypixels, 4))
            mon =  entry['DAS_logs']['counter']['liveMonitor'].value
            count_time = entry['DAS_logs']['counter']['liveTime'].value
            if ndims == 2:
                mon.shape = (1,) + mon.shape # broadcast the monitor over the other dimension
                count_time.shape = (1,) + count_time.shape
            counts = counts_value
            if transpose == True: counts = counts.swapaxes(0,1)
            if flip == True: counts = flipud(counts)
            data_array[..., 0] = counts
            #data_array[..., 0] = file_obj.detector.counts
            data_array[..., 1] = 1
            data_array[..., 2] = mon
            data_array[..., 3] = count_time
            # data_array[:,:,4]... I wish!!!  Have to do by hand.
            data = MetaArray(data_array, dtype='float', info=info)
            data.friendly_name = name # goes away on dumps/loads... just for initial object.
            ouput = [data]
        
        elif ndims == 3: # then it's an unsummed collection of detector shots.  Should be one sample and detector angle per frame
            if collapse == True:
                info = []
                xaxis = "xpixel" if collapse_axis == 'y' else "ypixel"
                xdim = xpixels if collapse_axis == 'y' else ypixels
                xaxisvalues = arange(xdim)
                info.append({"name": xaxis, "units": "pixels", "values": xaxisvalues }) # reverse order
                samp_angle = entry['DAS_logs/sampleAngle/softPosition'].value
                det_angle = entry['DAS_logs/detectorAngle/softPosition'].value
                if samp_angle.size > 1:
                    yaxis = entry['DAS_logs/sampleAngle/softPosition']
                    yaxisname = "theta"                    
                elif det_angle.size > 1:
                    yaxis = entry['DAS_logs/detectorAngle/softPosition']
                    yaxisname = "det_angle"
                else:
                    # need to find the one that's moving...
                    yaxis = entry['data/x']
                    yaxisname = yaxis.path
                yaxisunits = yaxis.attrs['units']
                yaxisvalues = yaxis.value
                info.append({"name": yaxisname, "units": yaxisunits, "values": yaxisvalues})
                info.extend([
                        {"name": "Measurements", "cols": [
                                {"name": "counts"},
                                {"name": "pixels"},
                                {"name": "monitor"},
                                {"name": "count_time"}]},
                        {"PolState": PolState, "start_datetime": entry['start_time'].value[0], "path":path, 
                         "det_angle": det_angle.tolist(),
                         "theta": samp_angle.tolist(), 
                         "friendly_name": entry['DAS_logs/sample/name'].value[0], "entry": entryname}]
                    )
                data_array = zeros((xdim, frames, 4))
                mon =  entry['DAS_logs']['counter']['liveMonitor'].value
                count_time = entry['DAS_logs']['counter']['liveTime'].value
                if ndims == 3:
                    mon.shape = (1,) + mon.shape # broadcast the monitor over the other dimension
                    count_time.shape = (1,) + count_time.shape
                axis_to_sum = 2 if collapse_axis == 'y' else 1
                counts = numpy.sum(counts_value, axis=axis_to_sum)
                if transpose == True: counts = counts.swapaxes(0,1)
                if flip == True: counts = flipud(counts)
                data_array[..., 0] = counts
                #data_array[..., 0] = file_obj.detector.counts
                data_array[..., 1] = 1
                data_array[..., 2] = mon
                data_array[..., 3] = count_time
                # data_array[:,:,4]... I wish!!!  Have to do by hand.
                data = MetaArray(data_array, dtype='float', info=info)
                data.friendly_name = name # goes away on dumps/loads... just for initial object.
                output = [data]
            else: # make separate frames           
                infos = []
                data = []
                samp_angle =  entry['DAS_logs/sampleAngle/softPosition'].value.astype('float')
                if samp_angle.shape[0] == 1:
                    samp_angle = numpy.ones((frames,)) * samp_angle
                det_angle = entry['DAS_logs/detectorAngle/softPosition'].value.astype('float')
                if det_angle.shape[0] == 1:
                    det_angle = numpy.ones((frames,)) * det_angle
                count_time = entry['DAS_logs/counter/liveTime'].value
                if count_time.shape[0] == 1:
                    count_time = numpy.ones((frames,)) * count_time
                mon =  entry['DAS_logs/counter/liveMonitor'].value
                if mon.shape[0] == 1:
                    mon = numpy.ones((frames,)) * mon                
                for i in range(frames):
                    info = []
                    info.append({"name": "xpixel", "units": "pixels", "values": range(xpixels) })
                    info.append({"name": "ypixel", "units": "pixels", "values": range(ypixels) })
                    info.extend([
                        {"name": "Measurements", "cols": [
                                {"name": "counts"},
                                {"name": "pixels"},
                                {"name": "monitor"},
                                {"name": "count_time"}]},
                        {"PolState": PolState, "start_datetime": entry['start_time'].value[0], "friendly_name": entry['DAS_logs/sample/name'].value[0],
                         "entry": entryname, "path":path, "samp_angle": samp_angle[i], "det_angle": det_angle[i]}]
                    )
                    data_array = zeros((xpixels, ypixels, 4))
                    counts = counts_value[i]
                    if flip == True: counts = flipud(counts) 
                    data_array[..., 0] = counts
                    data_array[..., 1] = 1
                    data_array[..., 2] = mon[i]
                    data_array[..., 3] = count_time[i]
                    # data_array[:,:,4]... I wish!!!  Have to do by hand.
                    subdata = MetaArray(data_array, dtype='float', info=info)
                    subdata.friendly_name = name + ("_%d" % i) # goes away on dumps/loads... just for initial object.
                    data.append(subdata)
                    output = data
    return output
Esempio n. 3
0
def thetaTwothetaToQxQz(data, output_grid, wavelength=5.0, qxmin=-0.003, qxmax=0.003, qxbins=101, qzmin=0.0001, qzmax=0.1, qzbins=101):
    """ Figures out the Qx, Qz values of each datapoint
    and throws them in the correct bin.  If no grid is specified,
    one is created that covers the whole range of Q in the dataset
    
    If autofill_gaps is True, does reverse lookup to plug holes
    in the output (but pixel count is still zero for these bins)
    
    **Inputs**
    
    data (ospec2d): input data
    
    output_grid (ospec2d): empty data object with axes defined (optional)
    
    wavelength (float): override wavelength in data file
    
    qxmin (float): lower bound of Qx range in rebinning
    
    qxmax (float): upper bound of Qx range in rebinning
    
    qxbins (int): number of bins to subdivide the range between qxmin and qxmax
    
    qzmin (float): lower bound of Qz range in rebinning
    
    qzmax (float): upper bound of Qz range in rebinning
    
    qzbins (int): number of bins to subdivide the range between qzmin and qzmax
    
    **Returns**
    
    output (ospec2d): output data rebinned into Qx, Qz
    
    2016-04-01 Brian Maranville
    """
    print "output grid: ", output_grid
    if output_grid == None:
        info = [{"name": "qx", "units": "inv. Angstroms", "values": linspace(qxmin, qxmax, qxbins) },
            {"name": "qz", "units": "inv. Angstroms", "values": linspace(qzmin, qzmax, qzbins) },]
        old_info = data.infoCopy()
        info.append(old_info[2]) # column information!
        info.append(old_info[3]) # creation story!
        output_grid = MetaArray(zeros((qxbins, qzbins, data.shape[-1])), info=info)
    else:
        outgrid_info = deepcopy(output_grid._info) # take axes and creation story from emptyqxqz...
        outgrid_info[2] = deepcopy(data._info[2]) # take column number and names from dataset
        output_grid = MetaArray(zeros((output_grid.shape[0], output_grid.shape[1], data.shape[2])), info=outgrid_info)
    
    theta_axis = data._getAxis('theta')
    twotheta_axis = data._getAxis('twotheta')
    
    qLength = 2.0 * pi / wavelength
    th_array = data.axisValues('theta').copy()
    twotheta_array = data.axisValues('twotheta').copy()
    
    if theta_axis < twotheta_axis: # then theta is first: add a dimension at the end
        th_array.shape = th_array.shape + (1,)
        twotheta_array.shape = (1,) + twotheta_array.shape
    else:
        twotheta_array.shape = twotheta_array.shape + (1,)
        th_array.shape = (1,) + th_array.shape
        
    tilt_array = th_array - (twotheta_array / 2.0)
    qxOut = 2.0 * qLength * sin((pi / 180.0) * (twotheta_array / 2.0)) * sin(pi * tilt_array / 180.0)
    qzOut = 2.0 * qLength * sin((pi / 180.0) * (twotheta_array / 2.0)) * cos(pi * tilt_array / 180.0)
    
    # getting values from output grid:
    outgrid_info = output_grid.infoCopy()
    numcols = len(outgrid_info[2]['cols'])
    qx_array = output_grid.axisValues('qx')
    dqx = qx_array[1] - qx_array[0]
    qz_array = output_grid.axisValues('qz')
    dqz = qz_array[1] - qz_array[0]
    #framed_array = zeros((qz_array.shape[0] + 2, qx_array.shape[0] + 2, numcols))
    target_qx = ((qxOut - qx_array[0]) / dqx).astype(int)
    #return target_qx, qxOut
    target_qz = ((qzOut - qz_array[0]) / dqz).astype(int)
    
    target_mask = (target_qx >= 0) * (target_qx < qx_array.shape[0])
    target_mask *= (target_qz >= 0) * (target_qz < qz_array.shape[0])
    target_qx_list = target_qx[target_mask]
    target_qz_list = target_qz[target_mask]
    
    for i, col in enumerate(outgrid_info[2]['cols']):
        values_to_bin = data[:,:,col['name']].view(ndarray)[target_mask]
        outshape = (output_grid.shape[0], output_grid.shape[1])
        hist2d, xedges, yedges = histogram2d(target_qx_list,target_qz_list, \
            bins = (outshape[0],outshape[1]), range=((0,outshape[0]),(0,outshape[1])), weights=values_to_bin)
        output_grid[:,:,col['name']] += hist2d
        #framed_array[target_qz_list, target_qx_list, i] = data[:,:,col['name']][target_mask]
 
    cols = outgrid_info[2]['cols']
    data_cols = [col['name'] for col in cols if col['name'].startswith('counts')]
    monitor_cols = [col['name'] for col in cols if col['name'].startswith('monitor')]
    # just take the first one...
    if len(monitor_cols) > 0:
        monitor_col = monitor_cols[0]
        data_missing_mask = (output_grid[:,:,monitor_col] == 0)
        for dc in data_cols:
            output_grid[:,:,dc].view(ndarray)[data_missing_mask] = NaN;
        
    #extra_info
    output_grid._info[-1] = data._info[-1].copy()
    print "output shape:", output_grid.shape
    return output_grid
Esempio n. 4
0
def pixelsToTwotheta(data, params, pixels_per_degree=50.0, qzero_pixel=149.0, instr_resolution=1e-6, ax_name='xpixel'):
    """ input array has axes theta and pixels:
    output array has axes theta and twotheta.
    
    Pixel-to-angle conversion is arithmetic (pixels-per-degree=constant)
    output is rebinned to fit in a rectangular array if detector angle 
    is not fixed. 
    
    **Inputs**

    data (ospec2d) : data in
    
    params (params): parameters override the field values
    
    pixels_per_degree {Pixels per degree} (float): slope of equation relating pixel to angle
    
    qzero_pixel {Q-zero pixel} (float): pixel value for Q=0
    
    instr_resolution {Resolution} (float): steps in angle smaller than this will be ignored/combined
    
    ax_name {Axis name} (str): name of the axis containing pixel data 
    
    **Returns**

    output (ospec2d) : data with pixel axis converted to angle

    2016-04-01 Brian Maranville
    """
   
    if 'pixels_per_degree' in params: pixels_per_degree = params['pixels_per_degree']
    if 'qzero_pixel' in params: qzero_pixel = params['qzero_pixel']
    #kw = locals().keys()
    #print kw, params
    #for name in kw:
    #    if name in params:
    #        exec "print '%s', %s, params['%s']" % (name, name,name) in locals()
    #        exec ("%s = params['%s']" % (name, name)) in locals()
    #        exec "print %s" % (name,) in locals()
    
    pixels_per_degree = float(pixels_per_degree) # coerce, in case it was an integer
    qzero_pixel = float(qzero_pixel) 
    instr_resolution = float(instr_resolution)
    
    print pixels_per_degree, qzero_pixel
    
    new_info = data.infoCopy()
    det_angle = new_info[-1].get('det_angle', None)
    det_angle = array(det_angle)
    # det_angle should be a vector of the same length as the other axis (usually theta)
    # or else just a float, in which case the detector is not moving!
    ndim = len(new_info) - 2 # last two entries in info are for metadata
    pixel_axis = next((i for i in xrange(len(new_info)-2) if new_info[i]['name'] == ax_name), None)
    if pixel_axis < 0:
        raise ValueError("error: no %s axis in this dataset" % (ax_name,))
        
    if hasattr(det_angle, 'max'):
        det_angle_max = det_angle.max()
        det_angle_min = det_angle.min()
    else: # we have a number
        det_angle_max = det_angle_min = det_angle
        
    if ((det_angle_max - det_angle_min) < instr_resolution) or ndim == 1 or ax_name != 'xpixel':
        #then the detector is fixed and we just change the values in 'xpixel' axis vector to twotheta
        # or the axis to be converted is y, which doesn't move in angle.
        print "doing the simple switch of axis values..."
        
        #data_slices = [slice(None, None, 1), slice(None, None, 1)]
        #data_slices[pixel_axis] = slice(None, None, -1)
        
        if ax_name == 'xpixel':
            twotheta_motor = det_angle_min
            new_info[pixel_axis]['name'] = 'twotheta'
        else:
            twotheta_motor = 0.0 # we don't have a y-motor!
            new_info[pixel_axis]['name'] = 'twotheta_y'
            
        pixels = new_info[pixel_axis]['values']
        twoth = (pixels - qzero_pixel) / pixels_per_degree + twotheta_motor
        #new_info[pixel_axis]['values'] = twoth[::-1] # reverse: twotheta increases as pixels decrease
        new_info[pixel_axis]['values'] = twoth
        new_info[pixel_axis]['units'] = 'degrees'
        #new_array = (data.view(ndarray).copy())[data_slices]
        new_array = (data.view(ndarray).copy())
        new_data = MetaArray(new_array, info=new_info)
    
    else:
        # the detector is moving - have to rebin the dataset to contain all values of twoth
        # this is silly but have to set other axis!
        other_axis = (1 if pixel_axis == 0 else 0)
        #other_vector = new_info[other_axis]['values']
        #other_spacing = other_vector[1] - other_vector[0]
        pixels = new_info[pixel_axis]['values']
        twoth = (pixels - qzero_pixel) / pixels_per_degree
        #twoth = twoth[::-1] # reverse
        twoth_min = det_angle_min + twoth.min()
        twoth_max = det_angle_max + twoth.max()
        twoth_max_edge = twoth_max + 1.0 / pixels_per_degree
        dpp = 1.0 / pixels_per_degree
        #output_twoth_bin_edges = arange(twoth_max + dpp, twoth_min - dpp, -dpp)
        output_twoth_bin_edges = arange(twoth_min - dpp, twoth_max + dpp, dpp)
        output_twoth = output_twoth_bin_edges[:-1]
        #other_bin_edges = linspace(other_vector[0], other_vector[-1] + other_spacing, len(other_vector) + 1)
        new_info[pixel_axis]['name'] = 'twotheta' # getting rid of pixel units: substitute twoth
        new_info[pixel_axis]['values'] = output_twoth
        new_info[pixel_axis]['units'] = 'degrees'
        output_shape = [0,0,0]
        output_shape[pixel_axis] = len(output_twoth)
        output_shape[other_axis] = data.shape[other_axis] # len(other_vector)
        output_shape[2] = data.shape[2] # number of columns is unchanged!
        new_data = MetaArray(tuple(output_shape), info=new_info) # create the output data object!
        
        tth_min = twoth.min()
        tth_max = twoth.max()
        data_in = data.view(ndarray).copy()
        for i, da in enumerate(det_angle):
            twoth_min = da + tth_min
            twoth_max = da + tth_max
            input_twoth_bin_edges = empty(len(pixels) + 1)
            input_twoth_bin_edges[-1] = twoth_max + 1.0 / pixels_per_degree
            input_twoth_bin_edges[:-1] = twoth + da         
            #data_cols = ['counts', 'pixels', 'monitor', 'count_time']
            cols = new_info[-2]['cols']
            
            for col in range(len(cols)):
                input_slice = [slice(None, None), slice(None, None), col]
                #input_slice[pixel_axis] = slice(i, i+1)
                input_slice[other_axis] = i
                array_to_rebin = data_in[input_slice]
                new_array = reb.rebin(input_twoth_bin_edges, array_to_rebin, output_twoth_bin_edges)
                new_data.view(ndarray)[input_slice] = new_array
            
    return new_data
Esempio n. 5
0
def LoadMAGIKPSD(filename, path="", friendly_name="", collapse_y=True, auto_PolState=False, PolState='', flip=True, transpose=True, **kw):
    """ 
    loads a data file into a MetaArray and returns that.
    Checks to see if data being loaded is 2D; if not, quits
    
    Need to rebin and regrid if the detector is moving...
    """
    lookup = {"DOWN_DOWN":"_down_down", "UP_DOWN":"_up_down", "DOWN_UP":"_down_up", "UP_UP":"_up_up", "entry": ""}
    if '.nxz' in filename:
        file_obj = hzf.File(filename)
    else:
        # nexus
        file_obj = h5py.File(os.path.join(path, filename))
    
    #if not (len(file_obj.detector.counts.shape) == 2):
        # not a 2D object!
    #    return
    for entryname, entry in file_obj.items():
        active_slice = slice(None, DETECTOR_ACTIVE[0], DETECTOR_ACTIVE[1])
        counts_value = entry['DAS_logs']['areaDetector']['counts'].value[:, 1:DETECTOR_ACTIVE[0]+1, :DETECTOR_ACTIVE[1]]
        dims = counts_value.shape
        print dims
        ndims = len(dims)
        if auto_PolState:
            PolState = lookup.get(entryname, "")
        # force PolState to a regularized version:
        if not PolState in lookup.values():
            PolState = ''
        #datalen = file_obj.detector.counts.shape[0]
        if ndims == 2:
            if DEBUG: print "2d"
            ypixels = dims[0]
            xpixels = dims[1]
        elif ndims >= 3:
            if DEBUG: print "3d"
            frames = dims[0]
            xpixels = dims[1]
            ypixels = dims[2]
        
        creation_story = "LoadMAGIKPSD('{fn}', path='{p}')".format(fn=filename, p=path, aPS=auto_PolState, PS=PolState)

        # doesn't really matter; changing so that each keyword (whether it took the default value
        # provided or not) will be defined
        #    if not PolState == '':
        #        creation_story += ", PolState='{0}'".format(PolState)
        # creation_story += ")" 
    
    
        if ndims == 2: # one of the dimensions has been collapsed.
            info = []     
            info.append({"name": "xpixel", "units": "pixels", "values": arange(xpixels) }) # reverse order
            info.append({"name": "theta", "units": "degrees", "values": entry['DAS_logs']['sampleAngle']['softPosition'].value })
            info.extend([
                    {"name": "Measurements", "cols": [
                            {"name": "counts"},
                            {"name": "pixels"},
                            {"name": "monitor"},
                            {"name": "count_time"}]},
                    {"PolState": PolState, "filename": filename, "start_datetime": dateutil.parser.parse(file_obj.attrs.get('file_time')), "friendly_name": friendly_name,
                     "CreationStory":creation_story, "path":path, "det_angle":entry['DAS_logs']['detectorAngle']['softPosition'].value}]
                )
            data_array = zeros((xpixels, ypixels, 4))
            mon =  entry['DAS_logs']['counter']['liveMonitor'].value
            count_time = entry['DAS_logs']['counter']['liveTime'].value
            if ndims == 2:
                mon.shape = (1,) + mon.shape # broadcast the monitor over the other dimension
                count_time.shape = (1,) + count_time.shape
            counts = counts_value
            if transpose == True: counts = counts.swapaxes(0,1)
            if flip == True: counts = flipud(counts)
            data_array[..., 0] = counts
            #data_array[..., 0] = file_obj.detector.counts
            data_array[..., 1] = 1
            data_array[..., 2] = mon
            data_array[..., 3] = count_time
            # data_array[:,:,4]... I wish!!!  Have to do by hand.
            data = MetaArray(data_array, dtype='float', info=info)
            data.friendly_name = friendly_name # goes away on dumps/loads... just for initial object.
        
        elif ndims == 3: # then it's an unsummed collection of detector shots.  Should be one sample and detector angle per frame
            if collapse_y == True:
                info = []     
                info.append({"name": "xpixel", "units": "pixels", "values": arange(xpixels) }) # reverse order
                info.append({"name": "theta", "units": "degrees", "values": entry['DAS_logs']['sampleAngle']['softPosition'].value })
                info.extend([
                        {"name": "Measurements", "cols": [
                                {"name": "counts"},
                                {"name": "pixels"},
                                {"name": "monitor"},
                                {"name": "count_time"}]},
                        {"PolState": PolState, "filename": filename, "start_datetime": dateutil.parser.parse(file_obj.attrs.get('file_time')), "friendly_name": friendly_name,
                         "CreationStory":creation_story, "path":path, "det_angle":entry['DAS_logs']['detectorAngle']['softPosition'].value}]
                    )
                data_array = zeros((xpixels, frames, 4))
                mon =  entry['DAS_logs']['counter']['liveMonitor'].value
                count_time = entry['DAS_logs']['counter']['liveTime'].value
                if ndims == 3:
                    mon.shape = (1,) + mon.shape # broadcast the monitor over the other dimension
                    count_time.shape = (1,) + count_time.shape
                counts = numpy.sum(counts_value, axis=2)
                if transpose == True: counts = counts.swapaxes(0,1)
                if flip == True: counts = flipud(counts)
                data_array[..., 0] = counts
                #data_array[..., 0] = file_obj.detector.counts
                data_array[..., 1] = 1
                data_array[..., 2] = mon
                data_array[..., 3] = count_time
                # data_array[:,:,4]... I wish!!!  Have to do by hand.
                data = MetaArray(data_array, dtype='float', info=info)
                data.friendly_name = friendly_name # goes away on dumps/loads... just for initial object.
            else: # make separate frames           
                infos = []
                data = []
                samp_angle =  entry['DAS_logs']['sampleAngle']['softPosition'].value
                if samp_angle.shape[0] == 1:
                    samp_angle = numpy.ones((frames,)) * samp_angle
                det_angle = entry['DAS_logs']['detectorAngle']['softPosition'].value
                if det_angle.shape[0] == 1:
                    det_angle = numpy.ones((frames,)) * det_angle
                for i in range(frames):
                    samp_angle =  entry['DAS_logs']['sampleAngle']['softPosition'].value[i]
                    det_angle = entry['DAS_logs']['detectorAngle']['softPosition'].value[i]
                    info = []
                    info.append({"name": "xpixel", "units": "pixels", "values": range(xpixels) })
                    info.append({"name": "ypixel", "units": "pixels", "values": range(ypixels) })
                    info.extend([
                        {"name": "Measurements", "cols": [
                                {"name": "counts"},
                                {"name": "pixels"},
                                {"name": "monitor"},
                                {"name": "count_time"}]},
                        {"PolState": PolState, "filename": filename, "start_datetime": entry['start_time'].value, "friendly_name": friendly_name,
                         "CreationStory":creation_story, "path":path, "samp_angle": samp_angle, "det_angle": det_angle}]
                    )
                    data_array = zeros((xpixels, ypixels, 4))
                    mon =  entry['DAS_logs']['counter']['liveMonitor'].value[i]
                    count_time = entry['DAS_logs']['counter']['liveTime'].value[i]
                    counts = counts_value[i]
                    if flip == True: counts = flipud(counts) 
                    data_array[..., 0] = counts
                    data_array[..., 1] = 1
                    data_array[..., 2] = mon
                    data_array[..., 3] = count_time
                    # data_array[:,:,4]... I wish!!!  Have to do by hand.
                    subdata = MetaArray(data_array, dtype='float', info=info)
                    subdata.friendly_name = friendly_name + ("_%d" % i) # goes away on dumps/loads... just for initial object.
                    data.append(subdata)
    return data 
Esempio n. 6
0
def LoadICPData(filename, path="", friendly_name="", auto_PolState=False, PolState='', flip=True, transpose=True, **kw):
    """ 
    loads a data file into a MetaArray and returns that.
    Checks to see if data being loaded is 2D; if not, quits
    
    Need to rebin and regrid if the detector is moving...
    """
    lookup = {"a":"_down_down", "b":"_up_down", "c":"_down_up", "d":"_up_up", "g": ""}
    file_obj = load(os.path.join(path, filename), format='NCNR NG-1')
    dims = file_obj.detector.counts.shape
    ndims = len(dims)
    #if not (len(file_obj.detector.counts.shape) == 2):
        # not a 2D object!
    #    return
    if auto_PolState:
        key = friendly_name[-2:-1] # na1, ca1 etc. are --, nc1, cc1 are -+...
        PolState = lookup.get(key, "")
    # force PolState to a regularized version:
    if not PolState in lookup.values():
        PolState = ''
    #datalen = file_obj.detector.counts.shape[0]
    if ndims == 2:
        if DEBUG: print "2d"
        ypixels = file_obj.detector.counts.shape[0]
        xpixels = file_obj.detector.counts.shape[1]
    elif ndims >= 3:
        if DEBUG: print "3d"
        frames = file_obj.detector.counts.shape[0]
        ypixels = file_obj.detector.counts.shape[1]
        xpixels = file_obj.detector.counts.shape[2]
        
    creation_story = "LoadICPData('{fn}', path='{p}', auto_PolState={aPS}, PolState='{PS}')".format(fn=filename, p=path, aPS=auto_PolState, PS=PolState)

    # doesn't really matter; changing so that each keyword (whether it took the default value
    # provided or not) will be defined
    #    if not PolState == '':
    #        creation_story += ", PolState='{0}'".format(PolState)
    # creation_story += ")" 
    
    
    if ndims == 2: # one of the dimensions has been collapsed.
        info = []     
        info.append({"name": "xpixel", "units": "pixels", "values": arange(xpixels) }) # reverse order
        info.append({"name": "theta", "units": "degrees", "values": file_obj.sample.angle_x })
        info.extend([
                {"name": "Measurements", "cols": [
                        {"name": "counts"},
                        {"name": "pixels"},
                        {"name": "monitor"},
                        {"name": "count_time"}]},
                {"PolState": PolState, "filename": filename, "start_datetime": file_obj.date, "friendly_name": friendly_name,
                 "CreationStory":creation_story, "path":path, "det_angle":file_obj.detector.angle_x}]
            )
        data_array = zeros((xpixels, ypixels, 4))
        mon = file_obj.monitor.counts
        count_time = file_obj.monitor.count_time
        if ndims == 2:
            mon.shape = (1,) + mon.shape # broadcast the monitor over the other dimension
            count_time.shape = (1,) + count_time.shape
        counts = file_obj.detector.counts
        if transpose == True: counts = counts.swapaxes(0,1)
        if flip == True: counts = flipud(counts)
        data_array[..., 0] = counts
        #data_array[..., 0] = file_obj.detector.counts
        data_array[..., 1] = 1
        data_array[..., 2] = mon
        data_array[..., 3] = count_time
        # data_array[:,:,4]... I wish!!!  Have to do by hand.
        data = MetaArray(data_array, dtype='float', info=info)
        data.friendly_name = friendly_name # goes away on dumps/loads... just for initial object.
        
    elif ndims == 3: # then it's an unsummed collection of detector shots.  Should be one sample and detector angle per frame
        infos = []
        data = []
        for i in range(frames):
            samp_angle = file_obj.sample.angle_x[i]
            det_angle = file_obj.detector.angle_x[i]
            info = []
            info.append({"name": "xpixel", "units": "pixels", "values": range(xpixels) })
            info.append({"name": "ypixel", "units": "pixels", "values": range(ypixels) })
            info.extend([
                {"name": "Measurements", "cols": [
                        {"name": "counts"},
                        {"name": "pixels"},
                        {"name": "monitor"},
                        {"name": "count_time"}]},
                {"PolState": PolState, "filename": filename, "start_datetime": file_obj.date, "friendly_name": friendly_name,
                 "CreationStory":creation_story, "path":path, "samp_angle": samp_angle, "det_angle": det_angle}]
            )
            data_array = zeros((xpixels, ypixels, 4))
            mon = file_obj.monitor.counts[i]
            count_time = file_obj.monitor.count_time[i]
            counts = file_obj.detector.counts[i]
            if flip == True: counts = flipud(counts) 
            data_array[..., 0] = counts
            data_array[..., 1] = 1
            data_array[..., 2] = mon
            data_array[..., 3] = count_time
            # data_array[:,:,4]... I wish!!!  Have to do by hand.
            subdata = MetaArray(data_array, dtype='float', info=info)
            subdata.friendly_name = friendly_name + ("_%d" % i) # goes away on dumps/loads... just for initial object.
            data.append(subdata)
    return data                   
Esempio n. 7
0
 def apply(self, data, theta=None, qxmin=None, qxmax=None, qxbins=None, qzmin=None, qzmax=None, qzbins=None):
     info = [{"name": "qx", "units": "inv. Angstroms", "values": linspace(qxmin, qxmax, qxbins) },
             {"name": "qz", "units": "inv. Angstroms", "values": linspace(qzmin, qzmax, qzbins) },]
     old_info = data.infoCopy()
     info.append(old_info[2]) # column information!
     info.append(old_info[3]) # creation story!
     output_grid = MetaArray(zeros((qxbins, qzbins, data.shape[-1])), info=info)
     
     
     #if output_grid == None:
     #    output_grid = EmptyQxQzGrid(*self.default_qxqz_gridvals)
     #else:
     #    output_grid = deepcopy(output_grid)
         
     if (theta == "") or (theta == None):
         if 'state' in data._info[-1]:
             theta = float(data._info[-1]['state']['A[1]'])
             print 'theta:', theta
         else:
             print "can't run without theta!"
             return
    
     wl_array = data.axisValues('wavelength').copy()
     wl_array.shape = wl_array.shape + (1,)
     twotheta_array = data.axisValues('twotheta').copy()
     twotheta_array.shape = (1,) + twotheta_array.shape
     qxOut, qzOut = self.getQxQz(theta, twotheta_array, wl_array)
     
     # getting values from output grid:
     outgrid_info = output_grid.infoCopy()
     numcols = len(outgrid_info[2]['cols'])
     qx_array = output_grid.axisValues('qx')
     dqx = qx_array[1] - qx_array[0]
     qz_array = output_grid.axisValues('qz')
     dqz = qz_array[1] - qz_array[0]
     framed_array = zeros((qz_array.shape[0]+2, qx_array.shape[0]+2, numcols))
     target_qx = ((qxOut - qx_array[0])/dqx + 1).astype(int)
     #return target_qx, qxOut
     target_qz = ((qzOut - qz_array[0])/dqz + 1).astype(int)
     target_mask = (target_qx >= 0) * (target_qx < qx_array.shape[0])
     target_mask *= (target_qz >= 0) * (target_qz < qz_array.shape[0])
     target_qx_list = target_qx[target_mask]
     target_qz_list = target_qz[target_mask]
     #target_qx = target_qx.clip(0, qx_array.shape[0]+1)
     #target_qz = target_qz.clip(0, qz_array.shape[0]+1)
     
     for i, col in enumerate(outgrid_info[2]['cols']):
         values_to_bin = data[:,:,col['name']][target_mask]
         outshape = (output_grid.shape[0], output_grid.shape[1])
         hist2d, xedges, yedges = histogram2d(target_qx_list,target_qz_list, bins = (outshape[0],outshape[1]), range=((0,outshape[0]),(0,outshape[1])), weights=values_to_bin)
         output_grid[:,:,col['name']] += hist2d
         #framed_array[target_qz_list, target_qx_list, i] = data[:,:,col['name']][target_mask]
         
     #trimmed_array = framed_array[1:-1, 1:-1]
     #output_grid[:,:] = trimmed_array
     
     creation_story = data._info[-1]['CreationStory']
     new_creation_story = creation_story + ".filter('{0}', {1})".format(self.__class__.__name__, output_grid._info[-1]['CreationStory'])
     #print new_creation_story
     output_grid._info[-1] = data._info[-1].copy()
     output_grid._info[-1]['CreationStory'] = new_creation_story
     return output_grid