def init_larch(self): t0 = time.time() if self.larch is None: self.larch = Interpreter() self.larch.symtable.set_symbol('_sys.wx.wxapp', wx.GetApp()) self.larch.symtable.set_symbol('_sys.wx.parent', self) self.SetStatusText('ready')
def init_larch(self): if self.larch is None: self.larch = Interpreter() symtab = self.larch.symtable if not symtab.has_symbol('_sys.wx.wxapp'): symtab.set_symbol('_sys.wx.wxapp', wx.GetApp()) if not symtab.has_symbol('_sys.wx.parent'): symtab.set_symbol('_sys.wx.parent', self)
def init_larch(self): t0 = time.time() from larch.wxlib import inputhook self.larch = Interpreter() self.larch.symtable.set_symbol('_sys.wx.wxapp', wx.GetApp()) self.larch.symtable.set_symbol('_sys.wx.parent', self) self.larch('%s = group(filename="%s")' % (SCANGROUP, CURSCAN)) self.SetStatusText('ready') self.datagroups = self.larch.symtable self.title.SetLabel('')
def __init__(self): self._larch = Interpreter() self.input = self._larch.input # InputText(prompt='test>', _larch=self._larch) self.symtable = self._larch.symtable setsym = self.symtable.set_symbol setsym('testdir', os.getcwd()) setsym('_plotter.no_plotting', True) setsym('_plotter.get_display', nullfunction) self.set_stdout()
def __init__(self, folder=None, _larch=None, **kws): kwargs = dict(name='FeffPath wrapper') kwargs.update(kws) Group.__init__(self, **kwargs) self._larch = Interpreter() self.wrapper = feffpathwrapper.FEFFPATH() feffpathwrapper.create_path(self.wrapper) self.wrapper.phpad = '' self.ipot = [] self.rat = [] self.geom = []
def __init__(self): self._larch = Interpreter() self.input = InputText(prompt='test>', _larch=self._larch) self.symtable = self._larch.symtable self.symtable.set_symbol('_plotter.newplot', nullfunction) self.symtable.set_symbol('_plotter.plot', nullfunction) self.symtable.set_symbol('_plotter.oplot', nullfunction) self.symtable.set_symbol('_plotter.imshow', nullfunction) self.symtable.set_symbol('_plotter.plot_text', nullfunction) self.symtable.set_symbol('_plotter.plot_arrow', nullfunction) self.symtable.set_symbol('_plotter.xrfplot', nullfunction) self._larch.writer = sys.stdout = open('_stdout_', 'w')
def __init__(self, wxparent=None, _larch=None): self.wxparent = wxparent self.larch = _larch if self.larch is None: self.larch = Interpreter() self.filelist = None self.file_groups = OrderedDict() self.fit_opts = {} self.group = None self.groupname = None self.report_frame = None self.symtable = self.larch.symtable
def __init__(self): self._larch = Interpreter() self.input = self._larch.input # InputText(prompt='test>', _larch=self._larch) self.symtable = self._larch.symtable self.symtable.set_symbol('testdir', os.getcwd()) self.symtable.set_symbol('_plotter.newplot', nullfunction) self.symtable.set_symbol('_plotter.plot', nullfunction) self.symtable.set_symbol('_plotter.oplot', nullfunction) self.symtable.set_symbol('_plotter.imshow', nullfunction) self.symtable.set_symbol('_plotter.plot_text', nullfunction) self.symtable.set_symbol('_plotter.plot_arrow', nullfunction) self.symtable.set_symbol('_plotter.xrfplot', nullfunction) self.set_stdout()
def init_larch(self): if self.larch is None: self.larch = Interpreter() symtab = self.larch.symtable if not symtab.has_symbol('_sys.wx.wxapp'): symtab.set_symbol('_sys.wx.wxapp', wx.GetApp()) if not symtab.has_symbol('_sys.wx.parent'): symtab.set_symbol('_sys.wx.parent', self) fico = os.path.join(site_config.larchdir, 'icons', ICON_FILE) try: self.SetIcon(wx.Icon(fico, wx.BITMAP_TYPE_ICO)) except: pass
def setUp(self): self.interp = Interpreter() self.symtable = self.interp.symtable self.interp.eval('x = 1') self.outfile = '_stdout_' self.outfile = '_stdout_' self.interp.writer = NamedTemporaryFile('w', delete=False, prefix='larchtest') if not HAS_NUMPY: self.interp("arange = range")
def __init__(self, energy=None, mu=None, z=None, edge='K', mback_kws=None, _larch=None, **kws): kwargs = dict(name='diffKK') kwargs.update(kws) Group.__init__(self, **kwargs) self.energy = energy self.mu = mu self.z = z self.edge = edge self.mback_kws = mback_kws if _larch == None: self._larch = Interpreter() else: self._larch = _larch
def __init__(self, wxparent=None, _larch=None): self.wxparent = wxparent self.larch = _larch if self.larch is None: self.larch = Interpreter() self.filelist = None self.file_groups = {} self.proc_opts = {} self.fit_opts = {} self.group = None self.groupname = None self.report_frame = None self.symtable = self.larch.symtable self.symtable.set_symbol('_sys.wx.wxapp', wx.GetApp()) self.symtable.set_symbol('_sys.wx.parent', self)
def initialize_group(self): """ initalize larch group """ # Create 3 empty group self.mylarch = Interpreter() self.g = read_ascii(self.data_path) self.best = read_ascii(self.data_path) self.sumgroup = read_ascii(self.data_path) # Check if try: self.g.chi except AttributeError: autobk(self.g, rbkg=rbkg, kweight=bkgkw, kmax=bkgkmax, _larch=self.mylarch) autobk(self.best, rbkg=rbkg, _larch=self.mylarch) autobk(self.sumgroup, rbkg=rbkg, _larch=self.mylarch)
def __init__(self, host='localhost', port=4966, logRequests=False, allow_none=True, keepalive_time=3 * 24 * 3600): self.out_buffer = [] self.larch = Interpreter(writer=self) self.larch.input.prompt = '' self.larch.input.prompt2 = '' self.larch.run_init_scripts() self.larch('_sys.client = group(keepalive_time=%f)' % keepalive_time) self.larch('_sys.wx = group(wxapp=None)') _sys = self.larch.symtable._sys _sys.color_exceptions = False _sys.client.last_event = int(time()) _sys.client.pid_server = int(os.getpid()) _sys.client.app = 'unknown' _sys.client.pid = 0 _sys.client.user = '******' _sys.client.machine = 'unknown' self.client = self.larch.symtable._sys.client SimpleXMLRPCServer.__init__(self, (host, port), logRequests=logRequests, allow_none=allow_none) self.register_introspection_functions() self.register_function(self.larch_exec, 'larch') for method in ('ls', 'chdir', 'cd', 'cwd', 'shutdown', 'set_keepalive_time', 'set_client_info', 'get_client_info', 'get_data', 'get_rawdata', 'get_messages', 'len_messages'): self.register_function(getattr(self, method), method) # sys.stdout = self self.finished = False signal.signal(signal.SIGINT, self.signal_handler) self.activity_thread = Thread(target=self.check_activity)
def __init__(self, folder=None, _larch=None, **kws): kwargs = dict(name='Feff85exafs unit test: %s' % folder) kwargs.update(kws) Group.__init__(self, **kwargs) self._larch = Interpreter() self.doplot = True self.doscf = False # True = use self-consistency self.verbose = True # True = print Feff's screen messages and other screen messages self.feffran = False # True = Feff calculation has been run self.count = 0 self.feffcount = 0 self.datacount = 0 self.failed = list() if folder[-1] == '/': folder = folder[:-1] # strip trailing / self.folder = folder if not isdir(folder): folder = join('tests', folder) if not isdir(folder): print colored(folder + " isn't one of the available tests", 'magenta', attrs=['bold']) return None self.path = realpath(folder) self.testrun = realpath(join(self.path, 'testrun')) self.fefflog = realpath(join(self.path, 'testrun', 'f85e.log')) self.__testpaths() self.repotop = getcwd() if not self.repotop.endswith('feff85exafs'): self.repotop = realpath(join('..')) # the f85e shell script emulates the behavior of the monolithic Feff application self.eps5 = 0.00001 self.eps4 = 0.0001 self.eps3 = 0.001 self.epsilon = self.eps4 self.epsfit = self.eps3 self.firstshell = False self.fittest = None self.wrapper_available = wrapper_available if wrapper_available: self.sp = scatpath()
def __init__(self, feffinp=None, verbose=True, repo=None, _larch=None, **kws): kwargs = dict(name='Feff runner') kwargs.update(kws) Group.__init__(self, **kwargs) if _larch == None: self._larch = Interpreter() else: self._larch = _larch self.feffinp = feffinp self.verbose = verbose self.mpse = False self.repo = repo self.resolved = None self.threshold = [] self.chargetransfer = []
def __init__(self, wxparent=None, writer=None, _larch=None, prompt=None, historyfile=None, output=None, input=None): self._larch = _larch self.textstyle = None if _larch is None: self._larch = Interpreter(historyfile=historyfile, writer=self) self._larch.run_init_scripts() self.symtable = self._larch.symtable self.prompt = prompt self.input = input self.output = output self.set_textstyle(mode='text') self._larch("_sys.display.colors['text2'] = {'color': 'blue'}", add_history=False) self._larch.add_plugin('wx', wxparent=wxparent) self.symtable.set_symbol('_builtin.force_wxupdate', False) self.symtable.set_symbol('_sys.wx.force_wxupdate', False) self.symtable.set_symbol('_sys.wx.wxapp', output) self.symtable.set_symbol('_sys.wx.parent', wx.GetApp().GetTopWindow()) if self.output is not None: style = self.output.GetDefaultStyle() bgcol = style.GetBackgroundColour() sfont = style.GetFont() self.textstyle = wx.TextAttr('black', bgcol, sfont) self.SetPrompt(True) self.flush_timer = wx.Timer(wxparent) self.needs_flush = True wxparent.Bind(wx.EVT_TIMER, self.onFlushTimer, self.flush_timer) self.flush_timer.Start(500)
def __init__(self, _larch=None, **kws): kwargs = dict(name='Feff test framework') kwargs.update(kws) Group.__init__(self, **kwargs) self._larch = Interpreter() self.materials = ("Copper", "NiO", "FeS2", "UO2", "BaZrO3", "bromoadamantane", "uranyl") self.tests = ('scf', 'iorder', 'mpse') self.__material = None self.__test = None self.testmodule = None self.json = None self.mustache = None self.dryrun = False self.dopathfinder = False self.tableformat = 'pipe' # 'plain', 'simple', 'grid', 'fancy_grid', 'pipe', 'orgtbl' # 'rst', 'mediawiki', 'html', 'latex', 'latex_booktabs' ## some things to make the cohabitation with f85ut happy self.doplot = False self.verbose = False self.firstshell = False self.folder = None self.path = None
class XASDataSet: _md = {} _filename = '' _larch = Interpreter(with_plugins=False) def __init__(self, name=None, md=None, energy=None, mu=None, filename=None, datatype=None, *args, **kwargs): self.larch = xafsgroup() if md is not None: self._md = md if 'e0' in md: self.larch.e0 = int(md['e0']) elif 'edge' in md: edge = md['edge'] self.larch.e0 = int(edge[edge.find('(') + 1:edge.find(')')]) if mu is not None: self.larch.mu = np.array(mu) if energy is not None: self.larch.energy = np.array(energy) if filename is not None: self._filename = filename if name is not None: self.name = name if datatype is not None: self.datatype = datatype if mu is not None and energy is not None: self.clamp_hi = 0 self.clamp_lo = 0 self.normalize() self.deriv() self.extract_chi() self.kmin_ft = 3 self.kmax_ft = self.kmax def update_larch(self): if self.mu is not None: self.larch.mu = np.array(self.mu) if self.energy is not None: self.larch.energy = np.array(self.energy) def deriv(self): mu_deriv = np.diff(np.transpose(self.mu.values)) / np.diff(self.energy) self.mu_deriv = mu_deriv[0] self.energy_deriv = (self.energy[1:] + self.energy[:-1]) / 2 def flatten(self): step_index = int(np.argwhere(self.energy > self.e0)[0]) zeros = np.zeros(step_index) ones = np.ones(self.energy.shape[0] - step_index) step = np.concatenate((zeros, ones), axis=0) diffline = (self.post_edge - self.pre_edge) / self.edge_step self.flat = self.norm + step * (1 - diffline) def normalize(self): pre_edge(self.larch, group=self.larch, _larch=self._larch) self.energy = self.larch.energy self.mu = self.larch.mu self.norm = self.larch.norm self.new_ds = False self.pre1 = self.larch.pre_edge_details.pre1 self.pre2 = self.larch.pre_edge_details.pre2 self.norm1 = self.larch.pre_edge_details.norm1 self.norm2 = self.larch.pre_edge_details.norm2 self.e0 = self.larch.e0 self.pre_edge = self.larch.pre_edge self.post_edge = self.larch.post_edge self.edge_step = self.larch.edge_step self.flatten() def normalize_force(self): pre_edge(self.larch, group=self.larch, _larch=self._larch, e0=self.e0, pre1=self.pre1, pre2=self.pre2, norm1=self.norm1, norm2=self.norm2) self.norm = self.larch.norm self.e0 = self.larch.e0 self.pre_edge = self.larch.pre_edge self.post_edge = self.larch.post_edge self.edge_step = self.larch.edge_step self.flatten() def extract_chi(self): #print('chi reporting') autobk(self.larch, group=self.larch, _larch=self._larch) self.chi = self.larch.chi self.bkg = self.larch.bkg self.kmin = self.larch.autobk_details.kmin self.kmax = self.larch.autobk_details.kmax self.nclamp = 2 self.rbkg = 1 #self.kmin_ft = self.kmin def extract_chi_force(self): #print('chi force reporting') # autobk(self.larch, group=self.larch, _larch=self._larch, e0=self.e0, kmin=self.kmin, kmax=self.kmax) autobk(self.larch, group=self.larch, _larch=self._larch, e0=self.e0, kmin=self.kmin, kmax=self.kmax, nclamp=2, clamp_hi=10) self.k = self.larch.k self.chi = self.larch.chi self.bkg = self.larch.bkg def extract_ft(self): #print('ft reporting') print(self.kmin_ft) xftf(self.larch, group=self.larch, _larch=self._larch, kmin=self.kmin_ft, kmax=self.kmax) self.r = self.larch.r self.chir = self.larch.chir self.chir_mag = self.larch.chir_mag self.chir_im = self.larch.chir_re self.chir_re = self.larch.chir_im #self.chir_pha = self.larch.chir_pha self.kmax_ft = self.kmax self.kwin = self.larch.kwin def extract_ft_force(self, window={}): #print('ft force reporting') if not window: xftf(self.larch, group=self.larch, _larch=self._larch, kmin=self.kmin_ft, kmax=self.kmax_ft) else: window_type = window['window_type'] tapering = window['tapering'] r_weight = window['r_weight'] print('setting window') xftf(self.larch, group=self.larch, _larch=self._larch, kmin=self.kmin_ft, kmax=self.kmax_ft, window=window_type, dk=tapering, rweight=r_weight) self.r = self.larch.r self.chir = self.larch.chir self.chir_mag = self.larch.chir_mag self.chir_im = self.larch.chir_re self.chir_re = self.larch.chir_im #self.chir_pha = self.larch.chir_phas self.kwin = self.larch.kwin @property def md(self): return self._md @md.setter def md(self, md): self._md = md if 'e0' in md: self.larch.e0 = int(md['e0']) pass elif 'edge' in md: edge = md['edge'] self.larch.e0 = int(edge[edge.find('(') + 1:edge.find(')')]) @property def mu(self): return self._mu @mu.setter def mu(self, mu): if hasattr(mu, 'values'): values = mu.values else: values = mu self._mu = pd.DataFrame(values, columns=['mu']) self.larch.mu = self._mu @property def filename(self): return self._filename @filename.setter def filename(self, filename): self._filename = filename
#!/usr/bin/env python ## Autobk (XAFS background subtraction) in pure Python, ## using Python code from Lxsarch. from larch import Interpreter from larch_plugins.xafs import pre_edge, autobk from larch_plugins.io import read_ascii # create plain interpreter, don't load all the plugins _larch = Interpreter(with_plugins=False) fname = '../xafsdata/cu_rt01.xmu' cu = read_ascii(fname, labels='energy mu i0', _larch=_larch) print( 'Read ASCII File:', cu) print( dir(cu)) pre_edge(cu, _larch=_larch) print( 'After pre-edge:') print( dir(cu)) autobk(cu, rbkg=1.0, kweight=1, _larch=_larch) print( 'After autobk:') print( dir(cu))
def init_larch(self): if self.larch is None: self.larch = Interpreter()
import time import matplotlib as mpl #mpl.use('Agg') import matplotlib.pyplot as plt import sys import csv from pathlib import Path global mylarch global base # global front # global end # global intervalK # global Kmin mylarch = Interpreter() base = Path(os.getcwd()).parent.parent # front = os.path.join(base,"path_files/TcCl6/feff") # end = '.dat' # Larch has two types of files, from Larch which is the chik, and the experimential files def larch_init(CSV_sub, params): r""" Larch initialization for data analysis Inputs: CSV_sub (str): files location of the data files (CSV/XMU) params (dics): dicts contain all parameters """ global intervalK
if overwrite is False: _logger.info(f"overwrite is {overwrite} -> nothing to do!") return else: _fileExists = False if overwrite and _fileExists: os.remove(fileout) h5out = h5py.File(fileout, mode="a", track_order=True) create_ds_args = {"track_order": True} dicttoh5(adict, h5out, create_dataset_args=create_ds_args) h5out.close() _logger.info(f"Athena project converted to {fileout}") if __name__ == "__main__": # some tests while devel _curdir = os.path.dirname(os.path.realpath(__file__)) _exdir = os.path.join(os.path.dirname(os.path.dirname(_curdir)), "examples", "pca") fnroot = "cyanobacteria" atpfile = os.path.join(_exdir, f"{fnroot}.prj") if 0: from larch import Interpreter aprj = AthenaProject(_larch=Interpreter()) aprj.read(atpfile, do_bkg=False) # there is currently a bug in do_bkg! adict = aprj.as_dict() if 0: athena_to_hdf5(atpfile, fileout=f"{fnroot}.h5", overwrite=True) pass
def setUp(self): self.session = Interpreter() self.symtable = self.session.symtable self.set_stdout()
from larch import Interpreter linp = Interpreter() def onVarChange(group=None, symbolname=None, value=None, **kws): print('var changed ', group, symbolname, value, kws) linp('x = 100.0') linp.symtable.add_callback('x', onVarChange) linp.symtable.set_symbol('x', 30) linp.symtable.set_symbol('x', 'a string') linp('x = arange(7)')
Interpreter) from larch.xafs import (find_e0, pre_edge, autobk, xftf, xftr) from larch.io import create_athena import larch.utils.show as lus import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec from BMM.functions import etok, ktoe from BMM import user_ns as user_ns_module user_ns = vars(user_ns_module) from BMM.user_ns.bmm import BMMuser LARCH = Interpreter() class Pandrosus(): '''A thin wrapper around basic XAS data processing for individual data sets as implemented in Larch. The plotting capabilities of this class are very similar to the orange plot buttons in Athena. Attributes ---------- uid : str Databroker unique ID of the data set, used to fetch mu(E) from the database name : str Human-readable name of the data set
def deglitch(energy, mu, group, e_window='xas', sg_window_length=9, sg_polyorder=3, alpha=.025, max_glitches='Default', max_glitch_length=4, plot_res=False): """Routine to deglitch a XAS spectrum. This function deglitches points in XAS data through two-step fitting with Savitzky-Golay filter and outlier identification with generalized extreme student deviate test. This code requires the data group to have at least an energy and normalized absorption channel. Parameters ---------- energy : array Array of the energies of the XAS scan mu : array Array of the absorption coefficient data group : Larch Group Larch Group to be modified by deglitching procedure e_window : {'xas', 'xanes', 'exafs', (float, float)} 'xas' scans the full spectrum. 'xanes' looks from the beginning up to the edge + 150eV. 'exafs' looks at the edge + 150eV to the end. (float, float) provides start and end energies in eV for analysis sg_window_length : odd int, default: 7 Window length to build Savitzky-Golay filter from normalized data sg_polyorder : int, default: 3 Polynomial order to build Savitzky-Golay filter from normalized data alpha : float, default: .001 Alpha value for generalized ESD test for outliers. max_glitches : int, default: len(data)//10 Maximum number of outliers to remove. plot_res : bool, default: False Command to plot the final normalized residuals and a histogram of their distribution. Returns ------- None """ import numpy as np from scipy.interpolate import interp1d from scipy.signal import savgol_filter from larch_plugins.utils import group2dict from larch_plugins.xafs import find_e0 from larch import Interpreter from copy import deepcopy session = Interpreter(with_plugins=False) # computing the energy window to perform the deglitch: e_val = 150 # energy limit to separate xanes from exafs [eV] e_windows = ['xas', 'xanes', 'exafs'] if e_window in e_windows: if e_window =='xas': e_window = [energy[0], energy[-1]] else: if 'e0' not in dir(group): e0 = find_e0(energy, mu=mu, group=group, _larch=session) else: e0 = getattr(group, 'e0') if e_window =='xanes': e_window = [energy[0], e0+e_val] else: e_window = [e0+e_val, energy[-1]] index = np.where((energy >= e_window[0]) & (energy <= e_window[1])) index = index[0] # creating copies of original data mu_copy = np.copy(mu) # interpolated values for posterior analysis will be inserted in this ener = np.copy(energy) # copy of energy to create interp1d function without the potential glitches # not limited to start:end to ensure data at edges gets best possible fit sg_init = savgol_filter(mu, sg_window_length, sg_polyorder) # computing the difference between normalized spectrum and the savitsky-golay filter res1 = mu - sg_init roll_mad1 = roll_med(abs(res1), window = 2*(sg_window_length+(max_glitch_length-1))+1, edgemethod='calc') res_norm = res1 / roll_mad1 #If the max is not set to an int, the max will be set to the default of the length of the analyzed data//10 if type(max_glitches) != int: max_glitches = len(res1)//10 out1 = genesd(res_norm[index], max_glitches, alpha) #finds outliers in residuals between data and Savitzky-Golay filter if index[0] != 0: #compensates for nonzero starting index out1 = out1 + index[0] if len(out1) == 0: #deglitching ends here if no outliers are found in this first round of analysis return e2 = np.delete(ener, out1) #removes points that are poorly fitted by the S-G filter n2 = np.delete(mu_copy, out1) f = interp1d(e2, n2, kind='cubic') interp_pts = f(energy[out1]) #interpolates for normalized mu at the removed energies for i, point in enumerate(out1): mu_copy[point] = interp_pts[i] #inserts interpolated points into normalized data sg_final = savgol_filter(mu_copy, sg_window_length, sg_polyorder) #fits the normalized absorption with the interpolated points res2 = mu - sg_final roll_mad2 = roll_med(abs(res2), window = (2*max_glitch_length)+1, edgemethod='calc') res_norm2 = res2 / roll_mad2 if plot_res: import matplotlib.pyplot as plt fig, axes = plt.subplots(ncols=2, figsize=(8,2.5), gridspec_kw={'width_ratios':[2, 1]}) axes[0].plot(res_norm, color='tab:orange') axes[0].set_ylabel('Residuals (μ(E))') axes[0].set_xlabel('Point Index') #plotting the normalized residuals on a point-index basis critval = find_critval(res_norm2, alpha) axes[1].hist(res_norm, bins=len(ener)//20, range=(-1*critval, critval), color='tab:orange') #plots histogram for normalized residuals axes[1].set_ylabel('Number of Points') axes[1].set_xlabel('Norm. Resid. Value') #Will not plot large outliers, since the limits are set at the initial critical values for the genesd plt.show() glitches_init = genesd(res_norm2[index], max_glitches, alpha)#by normalizing the standard deviation to the same window as our S-G calculation, #we can tackle the full spectrum, accounting for the noise we expect in the data; #as a bonus, with the S-G filter, we ideally have a near-normal distribution of residuals #(which makes the generalized ESD a robust method for finding the outliers) if index[0] != 0: glitches_init = glitches_init + index[0] glitches = np.array([]) for glitch in glitches_init: if True in np.where(abs(glitch-out1)<(sg_window_length//2)+1, True, False): glitches = np.append(glitches, glitch) glitches[::-1].sort() glitches = glitches.astype(int) data_filt = deepcopy(group) #non-destructive copy for comparison group_dict = group2dict(data_filt) #transfers data copy to a dictionary (easier to work with) if len(glitches) == 0: glitches = None else: glitch_dict = {energy[glitch] : {} for glitch in glitches} for number in glitches: targetLength = len(energy) #everything that is of the same length as the energy array will have the indices #corresponding to glitches removed for key in dir(group): if type(getattr(group, key)) == np.ndarray or type(getattr(group, key)) == list: if len(getattr(group, key)) == targetLength and key!='energy': #deletes the energy last glitch_dict[getattr(group, 'energy')[number]].update({key : group_dict[key][number]}) group_dict[key] = np.delete(group_dict[key], number) #replaces the array with one that removes glitch points #numpy arrays require extra steps to delete an element (which is why this takes this structure) #removed indices is reversed to avoid changing the length ahead of the removal of points group_dict['energy'] = np.delete(group_dict['energy'], number) glitch_dict[energy[number]].update({'params' : {'e_window':e_window, 'sg_window_length':sg_window_length, 'sg_polyorder':sg_polyorder, 'alpha':alpha, 'max_glitches':max_glitches, 'max_glitch_length':max_glitch_length}}) if glitches is not None: if hasattr(group,'glitches'): group_dict['glitches'].update(glitch_dict) else: setattr(group,'glitches', glitch_dict) dataKeys = list(group_dict.keys()) for item in dataKeys: setattr(group, item, group_dict[item]) return