Example #1
0
 def test_each(self):
     os.chdir(self.datadir)
     fldr6 = SF.DataFolder(".", pattern="QD*.dat", pruned=True)
     fldr4 = SF.DataFolder(self.datadir, pattern="QD-SQUID-VSM.dat")
     fldr5 = fldr4.clone
     shaper = lambda f: f.shape
     fldr6.sort()
     res = fldr6.each(shaper)
     self.assertEqual(res, [(6049, 88), (3026, 41), (1410, 57), (412, 72)],
                      "__call__ on each fauiled.")
     fldr6.each.del_column(0)
     res = fldr6.each(shaper)
     self.assertEqual(res, [(6049, 87), (3026, 40), (1410, 56), (412, 71)],
                      "Proxy method call via each failed")
     paths = [
         'QD-MH.dat', 'QD-PPMS.dat', 'QD-PPMS2.dat', 'QD-SQUID-VSM.dat'
     ]
     filenames = [
         path.relpath(x, start=fldr6.directory)
         for x in fldr6.each.filename.tolist()
     ]
     self.assertEqual(filenames, paths,
                      "Reading attributes from each failed.")
     eval(
         '(hysteresis_correct@fldr4)(setas="3.xy",saturated_fraction=0.25)')
     self.assertTrue(
         "Hc" in fldr4[0],
         "Matrix multiplication of callable by DataFolder failed test.")
     fldr5.each(hysteresis_correct, setas="3.xy", saturated_fraction=0.25)
     self.assertTrue(
         "Hc" in fldr5[0],
         "Call on DataFolder.each() failed to apply function to folder")
     meths = [x for x in dir(fldr6.each) if not x.startswith("_")]
     self.assertEqual(len(meths), 128,
                      "Dir of folders.each failed ({}).".format(len(meths)))
Example #2
0
 def test_Properties(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     self.assertEqual(fldr.mindepth,0,"Minimum depth of flat group n ot equal to zero.")
     fldr/="Loaded as"
     grps=list(fldr.lsgrp)
     skip=0 if hyperspy_ok else 1
     self.assertEqual(len(grps),26-skip,"Length of lsgrp not as expected: {} not 25".format(len(grps)))
     fldr.debug=True
     self.fldr=fldr
     self.assertTrue(fldr["XRDFile"][0].debug,"Setting debug on folder failed!")
     fldr.debug=False
     fldr["QDFile"].group("Byapp")
     self.assertEqual(fldr.trunkdepth,1,"Trunkdepth failed")
     self.assertEqual(fldr.mindepth,1,"mindepth attribute of folder failed.")
     self.assertEqual(fldr.depth,2,"depth attribute failed.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     fldr+=Data()
     skip=1 if hyperspy_ok else 2
     self.assertEqual(len(list(fldr.loaded)),1,"loaded attribute failed {}".format(len(list(fldr.loaded))))
     self.assertEqual(len(list(fldr.not_empty)),len(fldr)-skip,"not_empty attribute failed.")
     fldr-="Untitled"
     self.assertFalse(fldr.is_empty,"fldr.is_empty failed")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     objects=copy(fldr.objects)
     fldr.objects=dict(objects)
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Folder objects not reset to regexp dictionary")
     fldr.objects=objects
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Setting Folder objects mangled type")
     fldr.type=Data()
     self.assertTrue(issubclass(fldr.type,Data),"Settin type by instance of class failed")
Example #3
0
 def test_discard_earlier(self):
     fldr2=SF.DataFolder(path.join(pth,"tests/Stoner/folder_data"),pattern="*.dat",discard_earlier=True)
     fldr3=SF.DataFolder(path.join(pth,"tests/Stoner/folder_data"),pattern="*.dat")
     self.assertEqual(len(fldr2),1,"Folder created with disacrd_earlier has wrong length ({})".format(len(fldr2)))
     self.assertEqual(len(fldr3),5,"Folder created without disacrd_earlier has wrong length ({})".format(len(fldr3)))
     fldr3.keep_latest()
     self.assertEqual(list(fldr2.ls),list(fldr3.ls),"Folder.keep_latest didn't do the same as discard_earliest in constructor.")
Example #4
0
 def test_grouping(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     self.assertEqual(fldr4.mindepth,3,"Unflattened DataFolder had wrong mindepth.")
     self.assertEqual(fldr4.shape, (~~fldr4).shape,"Datafodler changed shape on flatten/unflatten")
     fldr5=fldr4.select(amplitude=1.4,recurse=True)
     fldr5.prune()
     pruned=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.0': (5, {}),
                   '0.25': (5, {}),
                   '0.5': (5, {}),
                   '0.75': (5, {}),
                   '1.0': (5, {})})})})
     selected=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.25': (1, {}), '0.5': (1, {}), '0.75': (1, {}), '1.0': (1, {})})})})
     self.assertEqual(fldr5.shape,pruned,"Folder pruning gave an unxpected shape.")
     self.assertEqual(fldr5[("test","1.4","0.5",0,"phase")],0.5,"Multilevel indexing of tree failed.")
     shape=(~(~fldr4).select(amplitude=1.4).select(frequency=1).select(phase__gt=0.2)).shape
     self.fldr4=fldr4
     self.assertEqual(shape, selected,"Multi selects and inverts failed.")
     g=(~fldr4)/10
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (15, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Dive by int failed.")
     g["Group 6"]-=5
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by int failed.")
     remove=g["Group 3"][4]
     g["Group 3"]-=remove
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (14, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by object failed.")
     d=fldr4["test",1.0,1.0].gather(0,1)
     self.assertEqual(d.shape,(181,6),"Gather seems have failed.")
     self.assertTrue(np.all(fldr4["test",1.0,1.0].slice_metadata("phase")==
                            np.ones(5)),"Slice metadata failure.")
     d=(~fldr4).extract("phase","frequency","amplitude","params")
     self.assertEqual(d.shape,(150,6),"Extract failed to produce data of correct shape.")
     self.assertEqual(d.column_headers,['phase', 'frequency', 'amplitude', 'params', 'params', 'params'],"Exctract failed to get correct column headers.")
     p=fldr4["test",1.0,1.0]
     p=SF.PlotFolder(p)
     p.plot()
     self.assertEqual(len(plt.get_fignums()),1,"Failed to generate a single plot for PlotFolder.")
     plt.close("all")
Example #5
0
 def test_Base_Operators(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fl=len(fldr)
     d=Data(np.ones((100,5)))
     fldr+=d
     self.assertEqual(fl+1,len(fldr),"Failed += operator on DataFolder")
     fldr2=fldr+fldr
     self.assertEqual((fl+1)*2,len(fldr2),"Failed + operator with DataFolder on DataFolder")
     fldr-="Untitled"
     self.assertEqual(len(fldr),fl,"Failed to remove Untitled-0 from DataFolder by name.")
     fldr-="New-XRay-Data.dql"
     self.assertEqual(fl-1,len(fldr),"Failed to remove NEw Xray data by name.")
     del fldr["1449 37.0 kx.emd"]
     fldr/="Loaded as"
     self.assertEqual(len(fldr["QDFile"]),4,"Failoed to group folder by Loaded As metadata with /= opeator.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fldr2=SF.DataFolder(path.join(self.datadir,"NLIV"),pattern="*.txt")
     fldr2.group(lambda x:"zero" if x["iterator"]%2==0 else "one")
     fldr3=fldr+fldr2
     self.assertEqual(fldr3.shape,(47, {'one': (9, {}), 'zero': (7, {})}),"Adding two DataFolders with groups failed")
     fldr4=fldr3-fldr2
     fldr4.prune()
     self.assertEqual(fldr4.shape,fldr.shape,"Failed to subtract one DataFolder from another :{}".format(fldr4.shape))
     del fldr2["one"]
     self.assertEqual(fldr2.shape,(0, {'zero': (7, {})}),"Delitem with group failed")
     fldr2.key=path.basename(fldr2.key)
     self.assertEqual(repr(fldr2),"DataFolder(NLIV) with pattern ('*.txt',) has 0 files and 1 groups\n\tDataFolder(zero) with pattern ['*.txt'] has 7 files and 0 groups","Representation methods failed")
     self.fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     names=list(self.fldr.ls)[::2]
     self.fldr-=names
     self.assertEqual(len(self.fldr),23,"Failed to delete from a sequence")
     try:
         self.fldr-0.34
     except TypeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a TypeError when subtracting a float")
     try:
         self.fldr-Data()
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
     try:
         self.fldr-"Wiggle"
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
Example #6
0
 def test_Properties(self):
     fldr = SF.DataFolder(self.datadir, debug=False, recursive=False)
     fldr /= "Loaded as"
     fldr["QDFile"].group("Byapp")
     self.assertEqual(fldr.mindepth, 1,
                      "mindepth attribute of folder failed.")
     self.assertEqual(fldr.depth, 2, "depth attribute failed.")
     fldr = SF.DataFolder(self.datadir, debug=False, recursive=False)
     fldr += Data()
     self.assertEqual(
         len(list(fldr.loaded)), 1,
         "loaded attribute failed {}".format(len(list(fldr.loaded))))
     self.assertEqual(len(list(fldr.not_empty)),
                      len(fldr) - 1, "not_empty attribute failed.")
     fldr -= "Untitled"
Example #7
0
    def test_loader_opts(self):
        self.fldr7 = SF.DataFolder(
            path.join(self.datadir, "NLIV"),
            pattern=re.compile(r".*at (?P<field>[0-9\-\.]*)\.txt"),
            read_means=True)
        x = self.fldr7.metadata.slice(["field", "Voltage", "Current"],
                                      output="Data")
        self.assertEqual(
            x.span("field"), (-0.05, 0.04),
            "Extract from name pattern and slice into metadata failed.")
        self.assertTrue(
            all(x // "Current" < 0) and all(x // "Current" > -1E-20),
            "Extract means failed.")
        self.assertEqual(list(self.fldr7.not_loaded), [],
                         "Not loaded attribute failed.")
        self.fldr7.unload(0)
        self.assertEqual(len(list(self.fldr7.not_loaded)), 1,
                         "Unload by index failed.")
        self.fldr7.unload()
        self.assertEqual(len(list(self.fldr7.not_loaded)), len(self.fldr7),
                         "Unload all failed.")

        def add_col(d):
            d.add_column(np.ones(len(d)) * d["field"], header="field")

        self.fldr7.each(add_col)
        self.fldr7.concatenate()
        self.assertEqual(self.fldr7[0].shape, (909, 4), "Concatenate failed.")
Example #8
0
 def test_clone(self):
      self.fldr=SF.DataFolder(self.datadir, pattern='*.txt')
      self.fldr.abc = 123 #add an attribute
      t = self.fldr.__clone__()
      self.assertTrue(t.pattern==['*.txt'], 'pattern didnt copy over')
      self.assertTrue(hasattr(t, "abc") and t.abc==123, 'user attribute didnt copy over')
      self.assertTrue(isinstance(t['recursivefoldertest'],SF.DataFolder), 'groups didnt copy over')
 def test_methods(self):
     sliced = np.array([
         'MDAASCIIFile', 'BNLFile', 'DataFile', 'DataFile', 'DataFile',
         'MokeFile', 'EasyPlotFile', 'DataFile', 'DataFile', 'DataFile'
     ])
     self.fldr = SF.DataFolder(self.datadir, pattern='*.txt').sort()
     self.assertTrue(
         np.all(self.fldr.slice_metadata("Loaded as") == sliced),
         "Slicing metadata failed to work.")
Example #10
0
 def test_methods(self):
     sliced=np.array(['DataFile', 'MDAASCIIFile', 'BNLFile', 'DataFile', 'DataFile',
    'DataFile', 'DataFile', 'MokeFile', 'EasyPlotFile', 'DataFile',
    'DataFile', 'DataFile'],
   dtype='<U12')
     self.fldr=SF.DataFolder(self.datadir, pattern='*.txt').sort()
     test_sliced=self.fldr.slice_metadata("Loaded as")
     self.assertEqual(len(sliced),len(test_sliced),"Test slice not equal length - sample-data changed? {}".format(test_sliced))
     self.assertTrue(np.all(test_sliced==sliced),"Slicing metadata failed to work.")
Example #11
0
 def test_saving(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     newdir=tempfile.mkdtemp()
     fldr4.save(newdir)
     fldr5=SF.DataFolder(newdir)
     self.assertEqual(fldr4.shape,fldr5.shape,"Saved DataFolder and loaded DataFolder have different shapes")
Example #12
0
 def test_methods(self):
     sliced=np.array(['DataFile', 'MDAASCIIFile', 'BNLFile', 'DataFile', 'DataFile',
    'DataFile', 'DataFile', 'MokeFile', 'EasyPlotFile', 'DataFile',
    'DataFile', 'DataFile'],
       dtype='<U12')
     fldr=SF.DataFolder(self.datadir, pattern='*.txt', recursive=False).sort()
     self.fldr=fldr
     test_sliced=fldr.slice_metadata("Loaded as")
     self.assertEqual(len(sliced),len(test_sliced),"Test slice not equal length - sample-data changed? {}".format(test_sliced))
     self.assertTrue(np.all(test_sliced==sliced),"Slicing metadata failed to work.")
     fldr.insert(5,Data())
     self.assertEqual(list(fldr.ls)[5],"Untitled","Insert failed")
     self.fldr=fldr
     _=fldr[-1]
     self.assertEqual(list(reversed(fldr))[0].filename,fldr[-1].filename)
Example #13
0
 def test_metadata(self):
     os.chdir(self.datadir)
     fldr6=SF.DataFolder(".",pattern="QD*.dat",pruned=True)
     self.assertEqual(repr(fldr6.metadata),"The DataFolder . has 9 common keys of metadata in 4 Data objects",
                      "Representation method of metadata wrong.")
     self.assertEqual(len(fldr6.metadata),9,"Length of common metadata not right.")
     self.assertEqual(list(fldr6.metadata.keys()),['Byapp',
                                                    'Datatype,Comment',
                                                    'Datatype,Time',
                                                    'Fileopentime',
                                                    'Loaded as',
                                                    'Loaded from',
                                                    'Startupaxis-X',
                                                    'Startupaxis-Y1',
                                                    'Stoner.class'],"metadata.keys() not right.")
     self.assertEqual(len(list(fldr6.metadata.all_keys())),49,"metadata.all_keys() the wrong length.")
     self.assertTrue(isinstance(fldr6.metadata.slice("Loaded from")[0],dict),"metadata.slice not returtning a dictionary.")
     self.assertTrue(isinstance(fldr6.metadata.slice("Loaded from",values_only=True),list),"metadata.slice not returtning a list with values_only=True.")
     self.assertTrue(isinstance(fldr6.metadata.slice("Loaded from",output="Data"),Data),"metadata.slice not returtning Data with outpt='data'.")
Example #14
0
 def test_Folders(self):
     self.fldr = SF.DataFolder(self.datadir, debug=False, recursive=False)
     fldr = self.fldr
     fl = len(fldr)
     datfiles = fnmatch.filter(os.listdir(self.datadir), "*.dat")
     length = len([
         i for i in os.listdir(self.datadir)
         if path.isfile(os.path.join(self.datadir, i))
     ]) - 1  # don't coiunt TDMS index
     self.assertEqual(length, fl,
                      "Failed to initialise DataFolder from sample data")
     self.assertEqual(fldr.index(path.basename(fldr[-1].filename)), fl - 1,
                      "Failed to index back on filename")
     self.assertEqual(fldr.count(path.basename(fldr[-1].filename)), 1,
                      "Failed to count filename with string")
     self.assertEqual(fldr.count("*.dat"), len(datfiles),
                      "Count with a glob pattern failed")
     self.assertEqual(
         len(fldr[::2]), ceil(len(fldr) / 2.0),
         "Failed to get the correct number of elements in a folder slice")
Example #15
0
 def test_Operators(self):
     fldr = SF.DataFolder(self.datadir, debug=False, recursive=False)
     fl = len(fldr)
     d = Data(np.ones((100, 5)))
     fldr += d
     self.assertEqual(fl + 1, len(fldr), "Failed += operator on DataFolder")
     fldr2 = fldr + fldr
     self.assertEqual((fl + 1) * 2, len(fldr2),
                      "Failed + operator with DataFolder on DataFolder")
     fldr -= "Untitled"
     self.assertEqual(
         len(fldr), fl,
         "Failed to remove Untitled-0 from DataFolder by name.")
     fldr -= "New-XRay-Data.dql"
     self.assertEqual(fl - 1, len(fldr),
                      "Failed to remove NEw Xray data by name.")
     fldr += "New-XRay-Data.dql"
     self.assertEqual(len(fldr), fl,
                      "Failed += oeprator with string on DataFolder")
     fldr /= "Loaded as"
     self.assertEqual(
         len(fldr["QDFile"]), 4,
         "Failoed to group folder by Loaded As metadata with /= opeator.")
 def test_metadata(self):
     os.chdir(self.datadir)
     fldr6 = SF.DataFolder(".", pattern="QD*.dat", pruned=True)
     fldr6.sort()
     self.assertEqual(
         repr(fldr6.metadata),
         "The DataFolder . has 9 common keys of metadata in 4 Data objects",
         "Representation method of metadata wrong.")
     self.assertEqual(len(fldr6.metadata), 9,
                      "Length of common metadata not right.")
     self.assertEqual(list(fldr6.metadata.keys()), [
         'Byapp', 'Datatype,Comment', 'Datatype,Time', 'Fileopentime',
         'Loaded as', 'Loaded from', 'Startupaxis-X', 'Startupaxis-Y1',
         'Stoner.class'
     ], "metadata.keys() not right.")
     self.assertEqual(len(list(fldr6.metadata.all_keys())), 49,
                      "metadata.all_keys() the wrong length.")
     self.assertTrue(
         isinstance(fldr6.metadata.slice("Loaded from")[0], dict),
         "metadata.slice not returtning a dictionary.")
     self.assertTrue(
         isinstance(fldr6.metadata.slice("Loaded from", values_only=True),
                    list),
         "metadata.slice not returtning a list with values_only=True.")
     self.assertTrue(
         isinstance(fldr6.metadata.slice("Loaded from", output="Data"),
                    Data),
         "metadata.slice not returtning Data with outpt='data'.")
     for fmt, typ in zip(
         ["dict", "list", "array", "data", "frame", "smart"],
         [(list, dict, int), (list, tuple, int),
          (np.ndarray, np.ndarray, np.int64), (Data, np.ndarray, np.int64),
          (DataFrame, Series, np.int64), (list, dict, int)]):
         ret = fldr6.metadata.slice("Datatype,Comment",
                                    "Datatype,Time",
                                    output=fmt)
         for ix, t in enumerate(typ):
             self.assertTrue(
                 isinstance(ret, t),
                 "Return from slice metadata for output={} and dimension {} had type {} and not {}"
                 .format(fmt, ix, type(ret), t))
             try:
                 ret = ret[0]
             except (KeyError):
                 ret = ret[list(ret.keys())[0]]
             except (IndexError, TypeError):
                 pass
     for k, typ in zip(
         ['Info.Sample_Holder', ('Info.Sample_Holder', "Datatype,Comment")],
         [(np.ndarray, np.ndarray, np.str), (np.ndarray, np.ndarray)]):
         ret = fldr6.metadata[k]
         for ix, t in enumerate(typ):
             self.assertTrue(
                 isinstance(ret, t),
                 "Indexing metadata for key={} and dimension {} had type {} and not {}"
                 .format(k, ix, type(ret), t))
             try:
                 ret = ret[0]
             except (KeyError):
                 ret = ret[list(ret.keys())[0]]
             except AttributeError:
                 ret = ret.data[0]
             except (IndexError, TypeError):
                 pass
     del fldr6.metadata["Datatype,Comment"]
     try:
         ret = fldr6.metadata["Datatype,Comment"]
     except KeyError:
         pass
     else:
         self.assertTrue(False,
                         "Failed to delete from metadata : {}".format(ret))
Example #17
0
 def setUp(self):
     self.fldr=SF.DataFolder(self.datadir,debug=False)
# The start and end runs for this batch
startrun = 49173
endrun = 52000
# Which column are we analysing ?
signal = "fluo"
# A filename pattern that will grab the run number from the filename
pattern = re.compile("i10-(?P<run>\d*)\.dat")
# The Data spool directory
directory = "C:\Data\data"
# Set the limits used on the normalisation
rfit = (660, 670)
lfit = (615, 630)

# Read the directory of data files and sort by run number
fldr = SF.DataFolder(directory, pattern=pattern, read_means=True)
fldr.sort("run")
# Remove files outside of the run number range
fldr.filterout(lambda f: f["run"] > endrun or f["run"] < startrun)
# group the files by position, temperatures and polarisations
fldr.group([position, temp, helicity])
# Normalise the files grouped by helicity, temperature and position and produce one averaged file
# for each position, temperature and helicity
fldr.walk_groups(
    norm_group,
    group=True,
    replace_terminal=True,
    walker_args={
        "signal": signal,
        "lfit": lfit,
        "rfit": rfit
 def setUp(self):
     self.fldr = SF.DataFolder(self.datadir)
Example #20
0
 def test_attr_access(self):
     self.fldr=SF.PlotFolder(path.join(self.datadir,"NLIV"),pattern="*.txt",setas="yx")