Ejemplo n.º 1
0
 def extract_ft_force(self, window={}):
     #print('ft force reporting')
     if not window:
         xftf(self.larch,
              group=self.larch,
              _larch=self._larch,
              kmin=self.kmin_ft,
              kmax=self.kmax_ft)
     else:
         window_type = window['window_type']
         tapering = window['tapering']
         r_weight = window['r_weight']
         print('setting window')
         xftf(self.larch,
              group=self.larch,
              _larch=self._larch,
              kmin=self.kmin_ft,
              kmax=self.kmax_ft,
              window=window_type,
              dk=tapering,
              rweight=r_weight)
     self.r = self.larch.r
     self.chir = self.larch.chir
     self.chir_mag = self.larch.chir_mag
     self.chir_im = self.larch.chir_re
     self.chir_re = self.larch.chir_im
     #self.chir_pha = self.larch.chir_phas
     self.kwin = self.larch.kwin
Ejemplo n.º 2
0
def calcFT(k, chi, kw, k_min, k_max, wind, dk_wind):
    ft = larch_builtins._group(mylarch)
    xftf(k,
         chi,
         group=ft,
         kweight=kw,
         kmin=k_min,
         kmax=k_max,
         window=wind,
         dk=dk_wind,
         _larch=mylarch)
    return ft.r, ft.chir, ft.chir_mag, ft.chir_im
Ejemplo n.º 3
0
def calc_FT(k, chi, kmin_, kmax_, kweight_, FTwindow, delta_k):
    ft = larch_builtins._group(mylarch)
    xftf(k,
         chi,
         group=ft,
         kweight=kweight_,
         kmin=kmin_,
         kmax=kmax_,
         window=FTwindow,
         dk=delta_k,
         _larch=mylarch)
    return ft.r, ft.chir_mag, ft.chir_im
Ejemplo n.º 4
0
 def FT_F(self,
          kmin=0,
          kmax=None,
          kweight=0,
          dk=1,
          dk2=None,
          with_phase=False,
          window='kaiser',
          rmax_out=10,
          nfft=2048,
          kstep=0.05,
          **kws):
     """    forward XAFS Fourier transform, from chi(k) to chi(R), using
     common XAFS conventions from larch.
     
     Parameters:
     -----------
       rmax_out: highest R for output data (10 Ang)
       kweight:  exponent for weighting spectra by k**kweight
       kmin:     starting k for FT Window
       kmax:     ending k for FT Window
       dk:       tapering parameter for FT Window
       dk2:      second tapering parameter for FT Window
       window:   name of window type
       nfft:     value to use for N_fft (2048).
       kstep:    value to use for delta_k (0.05 Ang^-1).
       with_phase: output the phase as well as magnitude, real, imag  [False]
     
     Returns:
     ---------
       None   -- outputs are written to supplied group.
     
     Notes:
     -------
     Arrays written to output group:
         kwin               window function Omega(k) (length of input chi(k)).
         r                  uniform array of R, out to rmax_out.
         chir               complex array of chi(R).
         chir_mag           magnitude of chi(R).
         chir_re            real part of chi(R).
         chir_im            imaginary part of chi(R).
         chir_pha           phase of chi(R) if with_phase=True
                            (a noticable performance hit)
     
     Supports First Argument Group convention 
     (with group member names 'k' and 'chi')"""
     args = dict(locals())  #; args.update(kws)
     del args["self"], args["kws"]
     if not (kmax): args["kmax"] = self.k[-1]
     xafs.xftf(self, _larch=self._larch, **args)
     return
Ejemplo n.º 5
0
    def extract_ft(self):
        if self.verbose:
            print('ft reporting')
        print(self.kmin_ft)
        xftf(self.larch, group=self.larch,  _larch=self._larch, kmin=self.kmin_ft, kmax=self.kmax)

        self.r = self.larch.r
        self.chir = self.larch.chir
        self.chir_mag = self.larch.chir_mag
        self.chir_im = self.larch.chir_re
        self.chir_re = self.larch.chir_im
        #self.chir_pha = self.larch.chir_pha
        self.kmax_ft = self.kmax
        self.kwin = self.larch.kwin
Ejemplo n.º 6
0
def run_autobk(Energy,Ut,E0,Rbkg,Kweight,k_min,k_max,fit_s,fit_e,nor_aE0_s,nor_aE0_e,pre_type):
    exafs = larch_builtins._group(mylarch)
    ft = larch_builtins._group(mylarch)
    Pre_edge_kws = {'pre1':fit_s-E0,'pre2':fit_e-E0,'norm1':nor_aE0_s-E0,'norm2':nor_aE0_e-E0}
    if pre_type == 2:
        Pre_edge_kws['nvict']=4
    else:
        pass
    autobk(Energy,mu=Ut,e0=E0,rbkg=Rbkg,kmin=k_min, kmax=k_max,kweight=Kweight,pre_edge_kws=Pre_edge_kws,group=exafs,_larch=mylarch)
    #print larch_builtins._groupitems(exafs,mylarch)
    xftf(exafs.k,exafs.chi,group=ft,kweight=3,kmin=3.0,kmax=12.0,_larch=mylarch)
    k_l = math.sqrt(0.2626*(nor_aE0_s-E0))
    k_h = math.sqrt(0.2626*(nor_aE0_e-E0))
    mask = np.concatenate([np.zeros(len(exafs.k[0:find_near(exafs.k,k_min)])),
                           np.ones(len(exafs.k[find_near(exafs.k,k_min):find_near(exafs.k,k_max)+1])),
                           np.zeros(len(exafs.k[find_near(exafs.k,k_max)+1:]))])
    return exafs.bkg, exafs.pre_edge, exafs.post_edge, exafs.chi, exafs.k, ft.r, ft.chir_mag, ft.chir_im
Ejemplo n.º 7
0
    def paths_optimizations(self, number=0.01, verbose=False):
        r"""
        Paths optimizations using simpsons area calculation.
        The calculation are used to perform

        Inputs:
            number (float): cut off percentage for paths_optimizations, the vals
                is typically set at 1%
        """
        total = 0
        total_area = 0
        contrib = []
        contrib_area = []
        for i in range(len(self.paths)):
            self.best.k = self.ind_export_paths[2 * i, :]
            self.best.chi = self.ind_export_paths[2 * i + 1, :]

            xftf(self.best.k,
                 self.best.chi,
                 kmin=self.params['Kmin'],
                 kmax=self.params['Kmax'],
                 dk=4,
                 window='hanning',
                 kweight=self.params['kweight'],
                 group=self.best,
                 _larch=self.mylarch)

            total += np.linalg.norm(self.best.chir_mag)
            contrib.append(np.linalg.norm(self.best.chir_mag))
            contrib_area.append(simps(self.best.chir_mag, self.best.r))
            total_area += simps(self.best.chir_mag, self.best.r)
        contrib_p = [i / total for i in contrib]
        contrib_ap = [i / total_area for i in contrib_area]
        if verbose:
            print(
                "Paths, Contrib Percentage (2-Norm), Contrib Percentage (Area)"
            )
            for i in range(len(self.paths)):
                print(i + 1, contrib_p[i].round(3), contrib_ap[i].round(3))
        #print(total)
        new_path = (np.argwhere(np.array(contrib_ap) >= number)).flatten() + 1
        print("New Paths")
        print(new_path)
        plt.bar(self.paths, height=contrib_ap)
        plt.xticks(self.paths)
Ejemplo n.º 8
0
 def FT_F(self, kmin=0, kmax=None, kweight=0, dk=1, dk2=None, 
          with_phase=False, window='kaiser', rmax_out=10, nfft=2048, 
          kstep=0.05, **kws ):
     """    forward XAFS Fourier transform, from chi(k) to chi(R), using
     common XAFS conventions from larch.
     
     Parameters:
     -----------
       rmax_out: highest R for output data (10 Ang)
       kweight:  exponent for weighting spectra by k**kweight
       kmin:     starting k for FT Window
       kmax:     ending k for FT Window
       dk:       tapering parameter for FT Window
       dk2:      second tapering parameter for FT Window
       window:   name of window type
       nfft:     value to use for N_fft (2048).
       kstep:    value to use for delta_k (0.05 Ang^-1).
       with_phase: output the phase as well as magnitude, real, imag  [False]
     
     Returns:
     ---------
       None   -- outputs are written to supplied group.
     
     Notes:
     -------
     Arrays written to output group:
         kwin               window function Omega(k) (length of input chi(k)).
         r                  uniform array of R, out to rmax_out.
         chir               complex array of chi(R).
         chir_mag           magnitude of chi(R).
         chir_re            real part of chi(R).
         chir_im            imaginary part of chi(R).
         chir_pha           phase of chi(R) if with_phase=True
                            (a noticable performance hit)
     
     Supports First Argument Group convention 
     (with group member names 'k' and 'chi')"""
     args=dict(locals())#; args.update(kws)
     del args["self"],args["kws"]
     if not(kmax): args["kmax"]=self.k[-1]
     xafs.xftf(self, _larch=self._larch, **args)
     return
Ejemplo n.º 9
0
def read_athena(filename, match=None, do_preedge=True, do_bkg=True, do_fft=True, use_hashkey=False, _larch=None):
    """read athena project file
    returns a Group of Groups, one for each Athena Group in the project file

    Arguments:
        filename (string): name of Athena Project file
        match (sring): pattern to use to limit imported groups (see Note 1)
        do_preedge (bool): whether to do pre-edge subtraction [True]
        do_bkg (bool): whether to do XAFS background subtraction [True]
        do_fft (bool): whether to do XAFS Fast Fourier transform [True]
        use_hashkey (bool): whether to use Athena's hash key as the
                       group name instead of the Athena label [False]

    Returns:
        group of groups each named according the label used by Athena.

    Notes:
        1. To limit the imported groups, use the pattern in `match`,
           using '*' to match 'all' '?' to match any single character,
           or [sequence] to match any of a sequence of letters.  The match
           will always be insensitive to case.
        3. do_preedge,  do_bkg, and do_fft will attempt to reproduce the
           pre-edge, background subtraction, and FFT from Athena by using
           the parameters saved in the project file.
        2. use_hashkey=True will name groups from the internal 5 character
           string used by Athena, instead of the group label.

    Example:
        1. read in all groups from a project file:
           cr_data = read_athena('My Cr Project.prj')

        2. read in only the "merged" data from a Project, and don't do FFT:
           zn_data = read_athena('Zn on Stuff.prj', match='*merge*', do_fft=False)

    """

    from larch_plugins.xafs import pre_edge, autobk, xftf

    if not os.path.exists(filename):
        raise IOError("%s '%s': cannot find file" % (ERR_MSG, filename))

    try:
        fh = GzipFile(filename)
        lines = [bytes2str(t) for t in fh.readlines()]
        fh.close()
    except:
        raise ValueError("%s '%s': invalid gzip file" % (ERR_MSG, filename))

    athenagroups = []
    dat = {"name": ""}
    Athena_version = None
    vline = lines.pop(0)
    if "Athena project file -- Demeter version" not in vline:
        raise ValueError("%s '%s': invalid Athena File" % (ERR_MSG, filename))

    major, minor, fix = "0", "0", "0"
    try:
        vs = vline.split("Athena project file -- Demeter version")[1]
        major, minor, fix = vs.split(".")
    except:
        raise ValueError("%s '%s': cannot read version" % (ERR_MSG, filename))
    if int(minor) < 9 or int(fix[:2]) < 21:
        raise ValueError("%s '%s': file is too old to read" % (ERR_MSG, filename))

    for t in lines:
        if t.startswith("#") or len(t) < 2:
            continue
        key = t.split(" ")[0].strip()
        key = key.replace("$", "").replace("@", "")
        if key == "old_group":
            dat["name"] = perl2json(t)
        elif key == "[record]":
            athenagroups.append(dat)
            dat = {"name": ""}
        elif key == "args":
            dat["args"] = perl2json(t)
        elif key in ("x", "y", "i0"):
            dat[key] = np.array([float(x) for x in perl2json(t)])

    if match is not None:
        match = match.lower()

    out = Group()
    out.__doc__ = """XAFS Data from Athena Project File %s""" % (filename)
    for dat in athenagroups:
        label = dat["name"]
        this = Group(
            athena_id=label, energy=dat["x"], mu=dat["y"], bkg_params=Group(), fft_params=Group(), athena_params=Group()
        )
        if "i0" in dat:
            this.i0 = dat["i0"]
        if "args" in dat:
            for i in range(len(dat["args"]) // 2):
                key = dat["args"][2 * i]
                val = dat["args"][2 * i + 1]
                if key.startswith("bkg_"):
                    setattr(this.bkg_params, key[4:], val)
                elif key.startswith("fft_"):
                    setattr(this.fft_params, key[4:], val)
                elif key == "label":
                    this.label = val
                    if not use_hashkey:
                        label = this.label
                else:
                    setattr(this.athena_params, key, val)
        this.__doc__ = """Athena Group Name %s (key='%s')""" % (label, dat["name"])
        olabel = fix_varname(label)
        if match is not None:
            if not fnmatch(olabel.lower(), match):
                continue

        if do_preedge or do_bkg:
            pars = this.bkg_params
            pre_edge(
                this,
                _larch=_larch,
                e0=float(pars.e0),
                pre1=float(pars.pre1),
                pre2=float(pars.pre2),
                norm1=float(pars.nor1),
                norm2=float(pars.nor2),
                nnorm=float(pars.nnorm) - 1,
                make_flat=bool(pars.flatten),
            )

            if do_bkg and hasattr(pars, "rbkg"):
                autobk(
                    this,
                    _larch=_larch,
                    e0=float(pars.e0),
                    rbkg=float(pars.rbkg),
                    kmin=float(pars.spl1),
                    kmax=float(pars.spl2),
                    kweight=float(pars.kw),
                    dk=float(pars.dk),
                    clamp_lo=float(pars.clamp1),
                    clamp_hi=float(pars.clamp2),
                )

        if do_fft:
            pars = this.fft_params
            kweight = 2
            if hasattr(pars, "kw"):
                kweight = float(pars.kw)
            xftf(
                this,
                _larch=_larch,
                kmin=float(pars.kmin),
                kmax=float(pars.kmax),
                kweight=kweight,
                window=pars.kwindow,
                dk=float(pars.dk),
            )

        setattr(out, olabel, this)
    return out
Ejemplo n.º 10
0
def larch_init(CSV_sub, params):
    r"""
    Larch initialization for data analysis
    Inputs:
        CSV_sub (str): files location of the data files (CSV/XMU)
        params (dics): dicts contain all parameters
    """
    global intervalK
    global best
    global KMIN
    global KMAX
    global KWEIGHT
    global g
    #
    Kmin = params['Kmin']
    Kmax = params['Kmax']
    deltak = params['deltak']

    BIG = int(Kmax / deltak)
    SMALL = int(Kmin / deltak)
    MID = int(BIG - SMALL + 1)
    RBKG = params['rbkg']
    KWEIGHT = params['kweight']

    KMIN = Kmin
    KMAX = Kmax

    BKGKW = params['bkgkw']  # cu = 1 hfal2 = 2.0
    BKGKMAX = params['bkgkmax']  # cu = 25, hfal2 = 15
    #     print(base)
    CSV_PATH = os.path.join(base, CSV_sub)
    g = read_ascii(CSV_PATH)
    best = read_ascii(CSV_PATH, )
    sumgroup = read_ascii(CSV_PATH)

    # back ground subtraction using autobk
    # data kweight
    try:
        g.chi
    except AttributeError:
        autobk(g, rbkg=RBKG, kweight=BKGKW, kmax=BKGKMAX, _larch=mylarch)
        autobk(best, rbkg=RBKG, _larch=mylarch)
        autobk(sumgroup, rbkg=RBKG, _larch=mylarch)

    intervalK = (np.linspace(SMALL, BIG, MID)).tolist()
    '''chang'''
    xftf(g.k,
         g.chi,
         kmin=KMIN,
         kmax=KMAX,
         dk=4,
         window='hanning',
         kweight=KWEIGHT,
         group=g,
         _larch=mylarch)
    xftf(best.k,
         best.chi,
         kmin=KMIN,
         kmax=KMAX,
         dk=4,
         window='hanning',
         kweight=KWEIGHT,
         group=best,
         _larch=mylarch)
    xftf(sumgroup.k,
         sumgroup.chi,
         kmin=KMIN,
         kmax=KMAX,
         dk=4,
         window='hanning',
         kweight=KWEIGHT,
         group=sumgroup,
         _larch=mylarch)
    '''chang end'''

    exp = g.chi
    # params = {}
    # params['Kmin'] = Kmin
    # params['Kmax'] = Kmax
    params['SMALL'] = SMALL
    params['BIG'] = BIG
    params['intervalK'] = intervalK
    return exp, g, params, mylarch
Ejemplo n.º 11
0
def fitness(exp, arr, paths, params, return_r=True):

    base = Path(os.getcwd()).parent.parent

    front = os.path.join(base, params['front'])
    end = '.dat'

    loss = 0
    yTotal = [0] * (401)
    offset = 2
    global best

    Kmin = params['Kmin']
    Kmax = params['Kmax']
    SMALL = params['SMALL']
    BIG = params['BIG']
    Kweight = params['kweight']
    arr_r = []
    # print(paths)
    array_str = "---------------------\n"

    for i in range(len(paths)):
        filename = front + str(paths[i]).zfill(4) + end
        # print(filename)
        path = feffdat.feffpath(filename,
                                s02=str(arr[i, 0]),
                                e0=str(arr[i, 1]),
                                sigma2=str(arr[i, 2]),
                                deltar=str(arr[i, 3]),
                                _larch=mylarch)
        #print(arr[i-1])
        #print(filename)
        feffdat.path2chi(path, larch=mylarch)
        # print("Path", paths[i], path.s02, path.e0, path.sigma2, path.reff+arr[i,3])
        array_str += "Path " + str(paths[i]) + " " + str(path.s02) + " " + str(
            path.e0) + " " + str(np.round(float(path.sigma2), 4)) + " " + str(
                np.round(path.reff + arr[i, 3], 3)) + "\n"

        temp = [
            float(path.s02),
            float(path.e0),
            float(path.sigma2),
            float(path.reff + arr[i, 3]),
            float(path.degen),
            float(path.nleg), (path.geom)
        ]
        arr_r.append(temp)

        y = path.chi

        for k in intervalK:
            yTotal[int(k)] += y[int(k)]
    best.chi = yTotal
    best.k = path.k
    xftf(best.k,
         best.chi,
         kmin=Kmin,
         kmax=Kmax,
         dk=4,
         window='hanning',
         kweight=Kweight,
         group=best,
         _larch=mylarch)

    # offset=-3
    #     plt.plot(path.k[SMALL:BIG], yTotal[SMALL:BIG]*path.k[SMALL:BIG]**2+offset,label="Total")
    #     plt.legend(loc='upper left')
    for j in intervalK:
        #loss = loss + (yTotal[int(j)]*g.k[int(j)]**2 - exp[int(j)]*g.k[int(j)]**2)**2
        loss = loss + (yTotal[int(j)] * g.k[int(j)]**2 -
                       exp[int(j)] * g.k[int(j)]**2)**2

    if return_r == True:
        return path, yTotal, best, loss, arr_r, array_str
    else:
        return path, yTotal, best, loss
Ejemplo n.º 12
0
def fitness_individal(exp, arr, paths, params, plot=False, export=False):
    r"""
    Fittness for individual score
    Inputs:
        exp (larch_object): expereince data for larch object
        arr (array): array for best fit
        path (list): path list
        params (dicts): dictionary for params calculations
        plot (bool): plot for individual paths
        export (bool): return array for each paths
    Outputs:

    """
    global intervalK
    global best
    loss = 0
    yTotal = [0] * (401)
    offset = 5
    global best
    # print(params)
    base = Path(os.getcwd()).parent.parent
    front = os.path.join(base, params['front'])
    end = '.dat'
    Kmax = params['Kmax']
    SMALL = params['SMALL']
    BIG = params['BIG']
    export_paths = np.zeros((2 * len(paths), 401))
    for i in range(len(paths)):
        filename = front + str(paths[i]).zfill(4) + end
        path = feffdat.feffpath(filename,
                                s02=str(arr[i, 0]),
                                e0=str(arr[i, 1]),
                                sigma2=str(arr[i, 2]),
                                deltar=str(arr[i, 3]),
                                _larch=mylarch)

        feffdat.path2chi(path, _larch=mylarch)
        if plot:
            plt.figure()
            plt.plot(path.k,
                     path.chi * path.k**2.0 + offset * (i + 1),
                     label='Path' + str(i))
            plt.xlabel("k$^{2}$(Å$^{-1}$)")
            plt.ylabel("k$^{2}$\chi(k)(Å$^{-2}$)$")
            plt.ylim(-5, len(paths) * offset + offset)
            plt.title('Paths: ' + str(i))
            # plt.title(str(i+1))
            plt.xlim(0, Kmax + 1)

            # print(len(paths))
            # plt.figure()
            # plt.plot(path.k, path.chi*path.k**2.0 + offset*(i+1),label='Path'+str(i))
            # plt.xlabel("k$^{2}$(Å$^{-1}$)")
            # plt.ylabel("k$^{2}$\chi(k)(Å$^{-2}$)$")
            # # plt.ylim(-10,len(paths)*offset+offset)
            # plt.xlim(0,Kmax+1)
            # plt.show()

        if export:
            export_paths[2 * i, :] = path.k
            export_paths[2 * i + 1, :] = (path.chi * path.k**2.0)

        y = path.chi

        for k in intervalK:
            yTotal[int(k)] += y[int(k)]
    best.chi = yTotal
    best.k = path.k
    xftf(best.k,
         best.chi,
         kmin=KMIN,
         kmax=KMAX,
         dk=4,
         window='hanning',
         kweight=KWEIGHT,
         group=best)

    offset = 0
    if plot:
        plt.plot(path.k[SMALL:BIG],
                 yTotal[SMALL:BIG] * path.k[SMALL:BIG]**2 + offset,
                 'b',
                 label="Genetic Algorithm")
    # plt.legend('loc = best')
    # if export:
    # export_paths = export.
    for j in intervalK:
        #loss = loss + (yTotal[int(j)]*g.k[int(j)]**2 - exp[int(j)]*g.k[int(j)]**2)**2
        loss = loss + (yTotal[int(j)] * g.k[int(j)]**2 -
                       exp[int(j)] * g.k[int(j)]**2)**2

    #print(loss)
    return path, yTotal, best, loss, export_paths
Ejemplo n.º 13
0
def read_athena(filename,
                match=None,
                do_preedge=True,
                do_bkg=True,
                do_fft=True,
                use_hashkey=False,
                _larch=None):
    """read athena project file
    returns a Group of Groups, one for each Athena Group in the project file

    Arguments:
        filename (string): name of Athena Project file
        match (string): pattern to use to limit imported groups (see Note 1)
        do_preedge (bool): whether to do pre-edge subtraction [True]
        do_bkg (bool): whether to do XAFS background subtraction [True]
        do_fft (bool): whether to do XAFS Fast Fourier transform [True]
        use_hashkey (bool): whether to use Athena's hash key as the
                       group name instead of the Athena label [False]

    Returns:
        group of groups each named according the label used by Athena.

    Notes:
        1. To limit the imported groups, use the pattern in `match`,
           using '*' to match 'all' '?' to match any single character,
           or [sequence] to match any of a sequence of letters.  The match
           will always be insensitive to case.
        3. do_preedge,  do_bkg, and do_fft will attempt to reproduce the
           pre-edge, background subtraction, and FFT from Athena by using
           the parameters saved in the project file.
        2. use_hashkey=True will name groups from the internal 5 character
           string used by Athena, instead of the group label.

    Example:
        1. read in all groups from a project file:
           cr_data = read_athena('My Cr Project.prj')

        2. read in only the "merged" data from a Project, and don't do FFT:
           zn_data = read_athena('Zn on Stuff.prj', match='*merge*', do_fft=False)

    """

    from larch_plugins.xafs import pre_edge, autobk, xftf
    if not os.path.exists(filename):
        raise IOError("%s '%s': cannot find file" % (ERR_MSG, filename))

    try:
        fh = GzipFile(filename)
        lines = [bytes2str(t) for t in fh.readlines()]
        fh.close()
    except:
        raise ValueError("%s '%s': invalid gzip file" % (ERR_MSG, filename))

    athenagroups = []
    dat = {'name': ''}
    Athena_version = None
    vline = lines.pop(0)
    if "Athena project file -- Demeter version" not in vline:
        raise ValueError("%s '%s': invalid Athena File" % (ERR_MSG, filename))

    major, minor, fix = '0', '0', '0'
    try:
        vs = vline.split("Athena project file -- Demeter version")[1]
        major, minor, fix = vs.split('.')
    except:
        raise ValueError("%s '%s': cannot read version" % (ERR_MSG, filename))
    if int(minor) < 9 or int(fix[:2]) < 21:
        raise ValueError("%s '%s': file is too old to read" %
                         (ERR_MSG, filename))

    for t in lines:
        if t.startswith('#') or len(t) < 2 or 'undef' in t:
            continue
        key = t.split(' ')[0].strip()
        key = key.replace('$', '').replace('@', '')
        if key == 'old_group':
            dat['name'] = perl2json(t)
        elif key == '[record]':
            athenagroups.append(dat)
            dat = {'name': ''}
        elif key == 'args':
            dat['args'] = perl2json(t)
        elif key in ('x', 'y', 'i0', 'signal'):
            dat[key] = np.array([float(x) for x in perl2json(t)])

    if match is not None:
        match = match.lower()

    out = Group()
    out.__doc__ = """XAFS Data from Athena Project File %s""" % (filename)
    for dat in athenagroups:
        label = dat.get('name', 'unknown')
        this = Group(athena_id=label,
                     energy=dat['x'],
                     mu=dat['y'],
                     bkg_params=Group(),
                     fft_params=Group(),
                     athena_params=Group())
        if 'i0' in dat:
            this.i0 = dat['i0']
        if 'args' in dat:
            for i in range(len(dat['args']) // 2):
                key = dat['args'][2 * i]
                val = dat['args'][2 * i + 1]
                if key.startswith('bkg_'):
                    setattr(this.bkg_params, key[4:], val)
                elif key.startswith('fft_'):
                    setattr(this.fft_params, key[4:], val)
                elif key == 'label':
                    this.label = val
                    if not use_hashkey:
                        label = this.label
                else:
                    setattr(this.athena_params, key, val)
        this.__doc__ = """Athena Group Name %s (key='%s')""" % (label,
                                                                dat['name'])
        olabel = fix_varname(label)
        if match is not None:
            if not fnmatch(olabel.lower(), match):
                continue
        if do_preedge or do_bkg:
            pars = this.bkg_params
            pre_edge(this,
                     _larch=_larch,
                     e0=float(pars.e0),
                     pre1=float(pars.pre1),
                     pre2=float(pars.pre2),
                     norm1=float(pars.nor1),
                     norm2=float(pars.nor2),
                     nnorm=float(pars.nnorm) - 1,
                     make_flat=bool(pars.flatten))

            if do_bkg and hasattr(pars, 'rbkg'):
                autobk(this,
                       _larch=_larch,
                       e0=float(pars.e0),
                       rbkg=float(pars.rbkg),
                       kmin=float(pars.spl1),
                       kmax=float(pars.spl2),
                       kweight=float(pars.kw),
                       dk=float(pars.dk),
                       clamp_lo=float(pars.clamp1),
                       clamp_hi=float(pars.clamp2))

        if do_fft:
            pars = this.fft_params
            kweight = 2
            if hasattr(pars, 'kw'):
                kweight = float(pars.kw)
            xftf(this,
                 _larch=_larch,
                 kmin=float(pars.kmin),
                 kmax=float(pars.kmax),
                 kweight=kweight,
                 window=pars.kwindow,
                 dk=float(pars.dk))

        setattr(out, olabel, this)
    return out
Ejemplo n.º 14
0
  10067.28  120488.7  89110.0998902  0.30168329
  10073.72  118833.7  88265.1000656  0.29738025
  10080.18  118434.7  88372.1004302  0.29280544
  10086.66  117995.7  88449.0998063  0.28822094
  10093.17  118435.7  89180.0997098  0.28371228
  10099.69  117303.7  88720.0998253  0.27927983
  10106.22  117929.7  89581.1003571  0.27494432
  10112.78  116857.7  89144.1003332  0.27070279
  10119.36  115791.7  88718.1000129  0.26632896
  10125.96  111467.7  85797.099695  0.26174966
  10132.57  110079.7  85128.099834  0.25704747
  10139.21  104190.7  80953.0999403  0.2523529
  10145.86  93726.7  73074.0996945  0.24890911'''

raw_data_lines = raw_data.split('\n')

raw_data_table = []
for line in raw_data_lines:
    raw_data_table.append(list(map(lambda x: float(x), line.strip().split())))

table = np.array(raw_data_table)

group.energy = table[:, 0]
group.mu = table[:, 3]

e0 = find_e0(group, _larch=mylarch)
pre_edge(group, _larch=mylarch)
autobk(group, _larch=mylarch)
xftf(group, _larch=mylarch)
xftr(group, _larch=mylarch)
Ejemplo n.º 15
0
# using "expected names" for XAFS data
rawdata = np.loadtxt('../xafsdata/fe2o3_rt1.xmu')

xafsdat.energy = rawdata[:, 0]
xafsdat.mu     = rawdata[:, 1]
xafsdat.i0     = rawdata[:, 1]

# run autobk on the xafsdat Group, including a larch Interpreter....
# note that this expects 'energy' and 'mu' to be in xafsdat, and will
# write data for 'k', 'chi', 'kwin', 'e0', ... into xafsdat
autobk(xafsdat, rbkg=1.0, kweight=2, _larch=my_larch)

# Fourier transform to R space, again passing in a Group (here,
# 'k' and 'chi' are expected, and writitng out 'r', 'chir_mag',
# and so on
xftf(xafsdat, kmin=2, kmax=15, dk=3, kweight=2, _larch=my_larch)

#
# plot grid of results:
# mu + bkg
pylab.subplot(2, 2, 1)
pylab.plot(xafsdat.energy, xafsdat.bkg, 'r--')
pylab.plot(xafsdat.energy, xafsdat.mu)
pylab.xlabel('Energy (eV)')

# normalized XANES
# find array bounds for normalized mu(E) for [e0 - 25: e0 + 75]
j0 = np.abs(xafsdat.energy-(xafsdat.e0 - 25.0)).argmin()
j1 = np.abs(xafsdat.energy-(xafsdat.e0 + 75.0)).argmin()

pylab.subplot(2, 2, 2)
Ejemplo n.º 16
0
def estimate_noise(k,
                   chi=None,
                   group=None,
                   rmin=15.0,
                   rmax=30.0,
                   kweight=1,
                   kmin=0,
                   kmax=20,
                   dk=4,
                   dk2=None,
                   kstep=0.05,
                   kwindow='kaiser',
                   nfft=2048,
                   _larch=None,
                   **kws):
    """
    estimate noise levels in EXAFS spectrum and estimate highest k
    where data is above the noise level
    Parameters:
    -----------
      k:        1-d array of photo-electron wavenumber in Ang^-1 (or group)
      chi:      1-d array of chi
      group:    output Group  [see Note below]
      rmin:     minimum R value for high-R region of chi(R)
      rmax:     maximum R value for high-R region of chi(R)
      kweight:  exponent for weighting spectra by k**kweight [1]
      kmin:     starting k for FT Window [0]
      kmax:     ending k for FT Window  [20]
      dk:       tapering parameter for FT Window [4]
      dk2:      second tapering parameter for FT Window [None]
      kstep:    value to use for delta_k ( Ang^-1) [0.05]
      window:   name of window type ['kaiser']
      nfft:     value to use for N_fft [2048].

    Returns:
    ---------
      None   -- outputs are written to supplied group.  Values (scalars) written
      to output group:
        epsilon_k     estimated noise in chi(k)
        epsilon_r     estimated noise in chi(R)
        kmax_suggest  highest estimated k value where |chi(k)| > epsilon_k

    Notes:
    -------

     1. This method uses the high-R portion of chi(R) as a measure of the noise
        level in the chi(R) data and uses Parseval's theorem to convert this noise
        level to that in chi(k).  This method implicitly assumes that there is no
        signal in the high-R portion of the spectrum, and that the noise in the
        spectrum s "white" (independent of R) .  Each of these assumptions can be
        questioned.
     2. The estimate for 'kmax_suggest' has a tendency to be fair but pessimistic
        in how far out the chi(k) data goes before being dominated by noise.
     3. Follows the 'First Argument Group' convention, so that you can either
        specifiy all of (an array for 'k', an array for 'chi', option output Group)
        OR pass a group with 'k' and 'chi' as the first argument
    """
    k, chi, group = parse_group_args(k,
                                     members=('k', 'chi'),
                                     defaults=(chi, ),
                                     group=group,
                                     fcn_name='esitmate_noise')

    # save _sys.xafsGroup -- we want to NOT write to it here!
    savgroup = set_xafsGroup(None, _larch=_larch)
    tmpgroup = Group()
    rmax_out = min(10 * pi, rmax + 2)

    xftf(k,
         chi,
         kmin=kmin,
         kmax=kmax,
         rmax_out=rmax_out,
         kweight=kweight,
         dk=dk,
         dk2=dk2,
         kwindow=kwindow,
         nfft=nfft,
         kstep=kstep,
         group=tmpgroup,
         _larch=_larch)

    chir = tmpgroup.chir
    rstep = tmpgroup.r[1] - tmpgroup.r[0]

    irmin = int(0.01 + rmin / rstep)
    irmax = min(nfft / 2, int(1.01 + rmax / rstep))
    highr = realimag(chir[irmin:irmax])

    # get average of window function value, scale eps_r scale by this
    # this is imperfect, but improves the result.
    kwin_ave = tmpgroup.kwin.sum() * kstep / (kmax - kmin)
    eps_r = sqrt((highr * highr).sum() / len(highr)) / kwin_ave

    # use Parseval's theorem to convert epsilon_r to epsilon_k,
    # compensating for kweight
    w = 2 * kweight + 1
    scale = sqrt((2 * pi * w) / (kstep * (kmax**w - kmin**w)))
    eps_k = scale * eps_r

    # do reverse FT to get chiq array
    xftr(tmpgroup.r,
         tmpgroup.chir,
         group=tmpgroup,
         rmin=0.5,
         rmax=9.5,
         dr=1.0,
         window='parzen',
         nfft=nfft,
         kstep=kstep,
         _larch=_larch)

    # sets kmax_suggest to the largest k value for which
    # | chi(q) / k**kweight| > epsilon_k
    iq0 = index_of(tmpgroup.q, (kmax + kmin) / 2.0)
    tst = tmpgroup.chiq_mag[iq0:] / (tmpgroup.q[iq0:])**kweight
    kmax_suggest = tmpgroup.q[iq0 + where(tst < eps_k)[0][0]]

    # restore original _sys.xafsGroup, set output variables
    _larch.symtable._sys.xafsGroup = savgroup
    group = set_xafsGroup(group, _larch=_larch)
    group.epsilon_k = eps_k
    group.epsilon_r = eps_r
    group.kmax_suggest = kmax_suggest
Ejemplo n.º 17
0
        #   save diagrams
        #   save all as athena project
        groups = []
        for time_stmp, data_ln in zip(group_vals['time'], group_vals['data']):
            new_group = larch.Group()
            new_group.mu = data_ln
            new_group.energy = group_vals['energy']
            # run autobk on the xafsdat Group, including a larch Interpreter....
            # note that this expects 'energy' and 'mu' to be in xafsdat, and will
            # write data for 'k', 'chi', 'kwin', 'e0', ... into xafsdat
            autobk(new_group, rbkg=1.0, kweight=2, _larch=my_larch)

            # Fourier transform to R space, again passing in a Group (here,
            # 'k' and 'chi' are expected, and writitng out 'r', 'chir_mag',
            # and so on
            xftf(new_group, kmin=2, kmax=15, dk=3, kweight=2, _larch=my_larch)

            new_group.label = f"Reading at {time_stmp}"

            # add group to list
            groups.append(new_group)

            # plot and save each file in group
            basic_plot(new_group, save_dir)

            # save energy v normalised mu
            save_e_nmu(new_group, save_dir)
        # merge groups
        merged_group = merge_groups(groups)
        merged_group.label = "merged"
        autobk(merged_group, rbkg=1.0, kweight=2, _larch=my_larch)
Ejemplo n.º 18
0
    def read(self,
             filename=None,
             match=None,
             do_preedge=True,
             do_bkg=True,
             do_fft=True,
             use_hashkey=False):
        """
        read Athena project to group of groups, one for each Athena dataset
        in the project file.  This supports both gzipped and unzipped files
        and old-style perl-like project files and new-style JSON project files

        Arguments:
            filename (string): name of Athena Project file
            match (string): pattern to use to limit imported groups (see Note 1)
            do_preedge (bool): whether to do pre-edge subtraction [True]
            do_bkg (bool): whether to do XAFS background subtraction [True]
            do_fft (bool): whether to do XAFS Fast Fourier transform [True]
            use_hashkey (bool): whether to use Athena's hash key as the
                           group name instead of the Athena label [False]
        Returns:
            None, fills in attributes `header`, `journal`, `filename`, `groups`

        Notes:
            1. To limit the imported groups, use the pattern in `match`,
               using '*' to match 'all', '?' to match any single character,
               or [sequence] to match any of a sequence of letters.  The match
               will always be insensitive to case.
            3. do_preedge,  do_bkg, and do_fft will attempt to reproduce the
               pre-edge, background subtraction, and FFT from Athena by using
               the parameters saved in the project file.
            2. use_hashkey=True will name groups from the internal 5 character
               string used by Athena, instead of the group label.

        Example:
            1. read in all groups from a project file:
               cr_data = read_athena('My Cr Project.prj')

            2. read in only the "merged" data from a Project, and don't do FFT:
               zn_data = read_athena('Zn on Stuff.prj', match='*merge*', do_fft=False)
        """
        if filename is not None:
            self.filename = filename
        if not os.path.exists(self.filename):
            raise IOError("%s '%s': cannot find file" %
                          (ERR_MSG, self.filename))

        from larch_plugins.xafs import pre_edge, autobk, xftf

        if not os.path.exists(filename):
            raise IOError("file '%s' not found" % filename)

        text = _read_raw_athena(filename)
        # failed to read:
        if text is None:
            raise OSError(errval)
        if not _test_athena_text(text):
            raise ValueError("%s '%s': invalid Athena File" %
                             (ERR_MSG, filename))

        # decode JSON or Perl format
        data = None
        try:
            data = parse_jsonathena(text, self.filename)
        except ValueError:
            #  try as perl format
            # print("Not json-athena ", sys.exc_info())
            try:
                data = parse_perlathena(text, self.filename)
            except:
                # print("Not perl-athena ", sys.exc_info())
                pass

        if data is None:
            raise ValueError("cannot read file '%s' as Athena Project File" %
                             (self.filename))

        self.header = data.header
        self.journal = data.journal
        self.group_names = data.group_names

        for gname in data.group_names:
            oname = gname
            if match is not None:
                if not fnmatch(gname.lower(), match):
                    continue
            this = getattr(data, gname)
            if use_hashkey:
                oname = this.athena_id
            if (do_preedge or do_bkg) and (self._larch is not None):
                pars = this.bkg_params
                pre_edge(this,
                         e0=float(pars.e0),
                         pre1=float(pars.pre1),
                         pre2=float(pars.pre2),
                         norm1=float(pars.nor1),
                         norm2=float(pars.nor2),
                         nnorm=float(pars.nnorm),
                         make_flat=bool(pars.flatten),
                         _larch=self._larch)
                if do_bkg and hasattr(pars, 'rbkg'):
                    autobk(this,
                           _larch=self._larch,
                           e0=float(pars.e0),
                           rbkg=float(pars.rbkg),
                           kmin=float(pars.spl1),
                           kmax=float(pars.spl2),
                           kweight=float(pars.kw),
                           dk=float(pars.dk),
                           clamp_lo=float(pars.clamp1),
                           clamp_hi=float(pars.clamp2))
                    if do_fft:
                        pars = this.fft_params
                        kweight = 2
                        if hasattr(pars, 'kw'):
                            kweight = float(pars.kw)
                        xftf(this,
                             _larch=self._larch,
                             kmin=float(pars.kmin),
                             kmax=float(pars.kmax),
                             kweight=kweight,
                             window=pars.kwindow,
                             dk=float(pars.dk))
            self.groups[oname] = this
Ejemplo n.º 19
0
def calc_FT(k,chi,kmin_,kmax_,kweight_):
    ft = larch_builtins._group(mylarch)
    xftf(k,chi,group=ft,kweight=kweight_,kmin=kmin_,kmax=kmax_,_larch=mylarch)
    return ft.r, ft.chir_mag, ft.chir_im
Ejemplo n.º 20
0
def calc_exafs_SplineSmoothing(energy,ut_, E0, fit_s,fit_e,nor_aE0_s,nor_aE0_e,pre_type,degree,kweight,sf):
    delta_ut = []
    ut_wo_bk = np.array([])
    pre_edge = np.array([])
    post_edge = np.array([])
    i = 1
    while i+1 < len(ut_):
        delta_ut.append(((ut_[i+1]-ut_[i])/(energy[i+1]-energy[i])+(ut_[i]-ut_[i-1])/(energy[i]-energy[i-1]))/2)
        i += 1
    delta_ut.append(0.0)
    delta_ut.insert(0,0.0)
    #find nearest point
    startpoint = find_near(energy,fit_s)
    endpoint = find_near(energy,fit_e)
    print (startpoint)
    #print energy[startpoint:endpoint]
    if pre_type == 1:
        fit_r = np.polyfit(energy[startpoint:endpoint],ut_[startpoint:endpoint],1)
        print (fit_r)
        pre_edge = fit_r[0]*energy + fit_r[1]
        ut_wo_bk = ut_ - pre_edge
    elif pre_type == 2:
        fit_lin = np.polyfit(energy[startpoint:endpoint],ut_[startpoint:endpoint],1)
        def fit_f(x,C,D,Y):
            return Y + C/x**3 - D/x**4
        E_s_and_e = [energy[startpoint],energy[endpoint]]
        ut_s_and_e = [ut_[startpoint],ut_[endpoint]]
        X = np.vstack([E_s_and_e ,np.ones(len(E_s_and_e))]).T
        DAT = [energy[startpoint]**4*(ut_[startpoint]-fit_lin[1]),energy[endpoint]**4*(ut_[endpoint]-fit_lin[1])]
        c, d = linalg.lstsq(X,DAT)[0]
        opt, pconv = optim.curve_fit(fit_f,energy[startpoint:endpoint],ut_[startpoint:endpoint],p0=[c,d,fit_lin[1]])
        pre_edge = fit_f(energy,opt[0],opt[1],opt[2])
        ut_wo_bk = ut_ - pre_edge
    elif pre_type == 0:
        pre_edge = np.average(ut_[find_near(energy,fit_s):find_near(energy,fit_e)])
        ut_wo_bk = ut_ - pre_edge
    boundary = [find_near(energy,nor_aE0_s),find_near(energy,nor_aE0_e)]
    norm = np.average(ut_wo_bk[find_near(energy,E0+30.0):find_near(energy,E0+80.0)])
    k = np.array([])
    for e in energy:
        if E0 > e:
            k = np.append(k,(-1.0)*math.sqrt(0.2626*abs(e-E0)))
        else:
            k = np.append(k,math.sqrt(0.2626*abs(e-E0)))
    num = find_near(energy,E0)
    if k[num] < 0:
        num += 1
    array_weight = None
    if kweight != 0:
        array_weight = k[num:boundary[1]+1]**kweight
    #knots = []
    #if num_of_knots == 0:
    #    pass
    #else:
    #    j = 1
    #    delta_k = k[-1]/(num_of_knots+1)
    #    print delta_k
    #    while j < num_of_knots+1:
    #        knots.append(k[find_near(k,delta_k*j)])
    #        j += 1
    #knots = [k[find_near(k,2.0)],k[find_near(k,4.0)],k[find_near(k,6)],k[find_near(k,8.0)],k[find_near(k,10.0)],k[find_near(k,12.0)]]
    #knots_e = np.array(knots)**2/0.2626 + E0
    #print knots
    spline = interp.UnivariateSpline(k[num:boundary[1]+1],ut_wo_bk[num:boundary[1]+1]/norm,w = k[num:boundary[1]+1]**kweight, k=degree)
    spline_e = interp.UnivariateSpline(energy[num:boundary[1]+1],ut_wo_bk[num:boundary[1]+1]/norm, w = k[num:boundary[1]+1]**kweight, k=degree)
    spline.set_smoothing_factor(sf)
    post_edge = np.append(ut_wo_bk[:boundary[0]]/norm, spline(k[boundary[0]:boundary[1]+1]))
    post_edge = np.append(post_edge, ut_wo_bk[boundary[1]+1:]/norm)
    chi = ut_wo_bk/norm - post_edge
    bkg = pre_edge + post_edge*norm
    k_interp = np.array([])
    k_interp = np.append(k_interp,0.0)
    while k_interp[-1]-0.05 < k[-1]:
        k_interp = np.append(k_interp,k_interp[-1]+0.05)
    print (len(k_interp))
    chi_interp = np.interp(k_interp,k[num:],chi[num:])
    ft = larch_builtins._group(mylarch)
    #tft = larch_builtins._group(mylarch)
    #chi_q = larch_builtins._group(mylarch)
    xftf(k_interp,chi_interp,group=ft,kweight=3,kmin=3.0,kmax=12.0,_larch=mylarch)
    post_edge_ = np.append(ut_wo_bk[0:num]/norm,spline(k[num:]))
    return bkg, pre_edge, post_edge_*norm+pre_edge, chi_interp, k_interp, ft.r, ft.chir_mag, ft.chir_im, spline.get_knots()
Ejemplo n.º 21
0
def estimate_noise(k, chi=None, group=None, rmin=15.0, rmax=30.0,
                   kweight=1, kmin=0, kmax=20, dk=4, dk2=None, kstep=0.05,
                   kwindow='kaiser', nfft=2048, _larch=None, **kws):
    """
    estimate noise levels in EXAFS spectrum and estimate highest k
    where data is above the noise level
    Parameters:
    -----------
      k:        1-d array of photo-electron wavenumber in Ang^-1 (or group)
      chi:      1-d array of chi
      group:    output Group  [see Note below]
      rmin:     minimum R value for high-R region of chi(R)
      rmax:     maximum R value for high-R region of chi(R)
      kweight:  exponent for weighting spectra by k**kweight [1]
      kmin:     starting k for FT Window [0]
      kmax:     ending k for FT Window  [20]
      dk:       tapering parameter for FT Window [4]
      dk2:      second tapering parameter for FT Window [None]
      kstep:    value to use for delta_k ( Ang^-1) [0.05]
      window:   name of window type ['kaiser']
      nfft:     value to use for N_fft [2048].

    Returns:
    ---------
      None   -- outputs are written to supplied group.  Values (scalars) written
      to output group:
        epsilon_k     estimated noise in chi(k)
        epsilon_r     estimated noise in chi(R)
        kmax_suggest  highest estimated k value where |chi(k)| > epsilon_k

    Notes:
    -------

     1. This method uses the high-R portion of chi(R) as a measure of the noise
        level in the chi(R) data and uses Parseval's theorem to convert this noise
        level to that in chi(k).  This method implicitly assumes that there is no
        signal in the high-R portion of the spectrum, and that the noise in the
        spectrum s "white" (independent of R) .  Each of these assumptions can be
        questioned.
     2. The estimate for 'kmax_suggest' has a tendency to be fair but pessimistic
        in how far out the chi(k) data goes before being dominated by noise.
     3. Follows the 'First Argument Group' convention, so that you can either
        specifiy all of (an array for 'k', an array for 'chi', option output Group)
        OR pass a group with 'k' and 'chi' as the first argument
    """
    k, chi, group = parse_group_args(k, members=('k', 'chi'),
                                     defaults=(chi,), group=group,
                                     fcn_name='esitmate_noise')



    # save _sys.xafsGroup -- we want to NOT write to it here!
    savgroup = set_xafsGroup(None, _larch=_larch)
    tmpgroup = Group()
    rmax_out = min(10*pi, rmax+2)

    xftf(k, chi, kmin=kmin, kmax=kmax, rmax_out=rmax_out,
         kweight=kweight, dk=dk, dk2=dk2, kwindow=kwindow,
         nfft=nfft, kstep=kstep, group=tmpgroup, _larch=_larch)

    chir  = tmpgroup.chir
    rstep = tmpgroup.r[1] - tmpgroup.r[0]

    irmin = int(0.01 + rmin/rstep)
    irmax = min(nfft/2,  int(1.01 + rmax/rstep))
    highr = realimag(chir[irmin:irmax])

    # get average of window function value, scale eps_r scale by this
    # this is imperfect, but improves the result.
    kwin_ave = tmpgroup.kwin.sum()*kstep/(kmax-kmin)
    eps_r = sqrt((highr*highr).sum() / len(highr)) / kwin_ave

    # use Parseval's theorem to convert epsilon_r to epsilon_k,
    # compensating for kweight
    w = 2 * kweight + 1
    scale = sqrt((2*pi*w)/(kstep*(kmax**w - kmin**w)))
    eps_k = scale*eps_r

    # do reverse FT to get chiq array
    xftr(tmpgroup.r, tmpgroup.chir, group=tmpgroup, rmin=0.5, rmax=9.5,
         dr=1.0, window='parzen', nfft=nfft, kstep=kstep, _larch=_larch)

    # sets kmax_suggest to the largest k value for which
    # | chi(q) / k**kweight| > epsilon_k
    iq0 = index_of(tmpgroup.q, (kmax+kmin)/2.0)
    tst = tmpgroup.chiq_mag[iq0:] / ( tmpgroup.q[iq0:])**kweight
    kmax_suggest = tmpgroup.q[iq0 + where(tst < eps_k)[0][0]]

    # restore original _sys.xafsGroup, set output variables
    _larch.symtable._sys.xafsGroup = savgroup
    group = set_xafsGroup(group, _larch=_larch)
    group.epsilon_k = eps_k
    group.epsilon_r = eps_r
    group.kmax_suggest = kmax_suggest
Ejemplo n.º 22
0
    def read(self, filename=None, match=None, do_preedge=True, do_bkg=True,
             do_fft=True, use_hashkey=False):
        """
        read Athena project to group of groups, one for each Athena dataset
        in the project file.  This supports both gzipped and unzipped files
        and old-style perl-like project files and new-style JSON project files

        Arguments:
            filename (string): name of Athena Project file
            match (string): pattern to use to limit imported groups (see Note 1)
            do_preedge (bool): whether to do pre-edge subtraction [True]
            do_bkg (bool): whether to do XAFS background subtraction [True]
            do_fft (bool): whether to do XAFS Fast Fourier transform [True]
            use_hashkey (bool): whether to use Athena's hash key as the
                           group name instead of the Athena label [False]
        Returns:
            None, fills in attributes `header`, `journal`, `filename`, `groups`

        Notes:
            1. To limit the imported groups, use the pattern in `match`,
               using '*' to match 'all', '?' to match any single character,
               or [sequence] to match any of a sequence of letters.  The match
               will always be insensitive to case.
            3. do_preedge,  do_bkg, and do_fft will attempt to reproduce the
               pre-edge, background subtraction, and FFT from Athena by using
               the parameters saved in the project file.
            2. use_hashkey=True will name groups from the internal 5 character
               string used by Athena, instead of the group label.

        Example:
            1. read in all groups from a project file:
               cr_data = read_athena('My Cr Project.prj')

            2. read in only the "merged" data from a Project, and don't do FFT:
               zn_data = read_athena('Zn on Stuff.prj', match='*merge*', do_fft=False)
        """
        if filename is not None:
            self.filename = filename
        if not os.path.exists(self.filename):
            raise IOError("%s '%s': cannot find file" % (ERR_MSG, self.filename))

        from larch_plugins.xafs import pre_edge, autobk, xftf


        if not os.path.exists(filename):
            raise IOError("file '%s' not found" % filename)

        text = _read_raw_athena(filename)
        # failed to read:
        if text is None:
            raise OSError(errval)
        if not _test_athena_text(text):
            raise ValueError("%s '%s': invalid Athena File" % (ERR_MSG, filename))

        # decode JSON or Perl format
        data = None
        try:
            data = parse_jsonathena(text, self.filename)
        except ValueError:
            #  try as perl format
            # print("Not json-athena ", sys.exc_info())
            try:
                data = parse_perlathena(text, self.filename)
            except:
                # print("Not perl-athena ", sys.exc_info())
                pass

        if data is None:
            raise ValueError("cannot read file '%s' as Athena Project File" % (self.filename))


        self.header = data.header
        self.journal = data.journal
        self.group_names = data.group_names

        for gname in data.group_names:
            oname = gname
            if match is not None:
                if not fnmatch(gname.lower(), match):
                    continue
            this = getattr(data, gname)
            if use_hashkey:
                oname = this.athena_id
            if (do_preedge or do_bkg) and (self._larch is not None):
                pars = this.bkg_params
                pre_edge(this,  e0=float(pars.e0),
                         pre1=float(pars.pre1), pre2=float(pars.pre2),
                         norm1=float(pars.nor1), norm2=float(pars.nor2),
                         nnorm=float(pars.nnorm),
                         make_flat=bool(pars.flatten), _larch=self._larch)
                if do_bkg and hasattr(pars, 'rbkg'):
                    autobk(this, _larch=self._larch, e0=float(pars.e0),
                           rbkg=float(pars.rbkg), kmin=float(pars.spl1),
                           kmax=float(pars.spl2), kweight=float(pars.kw),
                           dk=float(pars.dk), clamp_lo=float(pars.clamp1),
                           clamp_hi=float(pars.clamp2))
                    if do_fft:
                        pars = this.fft_params
                        kweight=2
                        if hasattr(pars, 'kw'):
                            kweight = float(pars.kw)
                        xftf(this, _larch=self._larch, kmin=float(pars.kmin),
                             kmax=float(pars.kmax), kweight=kweight,
                             window=pars.kwindow, dk=float(pars.dk))
            self.groups[oname] = this