def test_metadata_save(self):
     local = path.dirname(__file__)
     t = np.arange(12).reshape(3,4) #set up a test data file with mixed metadata
     t = Data(t)
     t.column_headers = ["1","2","3","4"]
     metitems = [True,1,0.2,{"a":1, "b":"abc"},(1,2),np.arange(3),[1,2,3], "abc", #all types accepted
                 r"\\abc\cde", 1e-20, #extra tests
                 [1,(1,2),"abc"], #list with different types
                 [[[1]]], #nested list
                 None, #None value
                 ]
     metnames = ["t"+str(i) for i in range(len(metitems))]
     for k,v in zip(metnames,metitems):
         t[k] = v
     t.save(path.join(local, "mixedmetatest.dat"))
     tl = Data(path.join(local, "mixedmetatest.txt")) #will change extension to txt if not txt or tdi, is this what we want?
     t2 = self.d4.clone  #check that python tdi save is the same as labview tdi save
     t2.save(path.join(local, "mixedmetatest2.txt"))
     t2l = Data(path.join(local, "mixedmetatest2.txt"))
     for orig, load in [(t,tl), (t2, t2l)]:
         for k in ['Loaded as', 'TDI Format']:
             orig[k]=load[k]
         self.assertTrue(np.allclose(orig.data, load.data))
         self.assertTrue(orig.column_headers==load.column_headers)
         self.res=load.metadata^orig.metadata
         self.assertTrue(load.metadata==orig.metadata,"Metadata not the same on round tripping to disc")
     os.remove(path.join(local, "mixedmetatest.txt")) #clear up
     os.remove(path.join(local, "mixedmetatest2.txt"))
 def setUp(self):
     self.d = Data(path.join(path.dirname(__file__), "CoreTest.dat"),
                   setas="xy")
     self.d2 = Data(
         path.join(__home__, "..", "sample-data", "TDI_Format_RT.txt"))
     self.d3 = Data(
         path.join(__home__, "..", "sample-data", "New-XRay-Data.dql"))
 def test_extra_plots(self):
     x = np.random.uniform(-np.pi, np.pi, size=5001)
     y = np.random.uniform(-np.pi, np.pi, size=5001)
     z = (np.cos(4 * np.sqrt(x**2 + y**2)) *
          np.exp(-np.sqrt(x**2 + y**2) / 3.0))**2
     self.d2 = Data(x, y, z, column_headers=["X", "Y", "Z"], setas="xyz")
     self.d2.contour_xyz(projection="2d")  #
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.close("all")
     X, Y, Z = self.d2.griddata(xlim=(-np.pi, np.pi), ylim=(-np.pi, np.pi))
     plt.imshow(Z)
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.imshow(Z)
     plt.close("all")
     x, y = np.meshgrid(np.linspace(-np.pi, np.pi, 10),
                        np.linspace(-np.pi, np.pi, 10))
     z = np.zeros_like(x)
     w = np.cos(np.sqrt(x**2 + y**2))
     q = np.arctan2(x, y)
     u = np.abs(w) * np.cos(q)
     v = np.abs(w) * np.sin(q)
     self.d3 = Data(x.ravel(),
                    y.ravel(),
                    z.ravel(),
                    u.ravel(),
                    v.ravel(),
                    w.ravel(),
                    setas="xyzuvw")
     self.d3.plot()
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.close("all")
 def setUp(self):
     self.d1 = Data(path.join(self.datadir, "OVF1.ovf"))
     self.d2 = Data(path.join(self.datadir, "TDI_Format_RT.txt"))
     self.d3 = Data(path.join(self.datadir, "New-XRay-Data.dql"))
     self.d4 = Data(np.column_stack([np.ones(100),
                                     np.ones(100) * 2]),
                    setas="xy")
Exemple #5
0
 def test_Properties(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     self.assertEqual(fldr.mindepth,0,"Minimum depth of flat group n ot equal to zero.")
     fldr/="Loaded as"
     grps=list(fldr.lsgrp)
     skip=0 if hyperspy_ok else 1
     self.assertEqual(len(grps),26-skip,"Length of lsgrp not as expected: {} not 25".format(len(grps)))
     fldr.debug=True
     self.fldr=fldr
     self.assertTrue(fldr["XRDFile"][0].debug,"Setting debug on folder failed!")
     fldr.debug=False
     fldr["QDFile"].group("Byapp")
     self.assertEqual(fldr.trunkdepth,1,"Trunkdepth failed")
     self.assertEqual(fldr.mindepth,1,"mindepth attribute of folder failed.")
     self.assertEqual(fldr.depth,2,"depth attribute failed.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     fldr+=Data()
     skip=1 if hyperspy_ok else 2
     self.assertEqual(len(list(fldr.loaded)),1,"loaded attribute failed {}".format(len(list(fldr.loaded))))
     self.assertEqual(len(list(fldr.not_empty)),len(fldr)-skip,"not_empty attribute failed.")
     fldr-="Untitled"
     self.assertFalse(fldr.is_empty,"fldr.is_empty failed")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     objects=copy(fldr.objects)
     fldr.objects=dict(objects)
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Folder objects not reset to regexp dictionary")
     fldr.objects=objects
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Setting Folder objects mangled type")
     fldr.type=Data()
     self.assertTrue(issubclass(fldr.type,Data),"Settin type by instance of class failed")
Exemple #6
0
    def test_scale(self):
        x=np.linspace(-5,5,101)
        y=np.sin(x)
        orig=Data(x+np.random.normal(size=101,scale=0.025),y+np.random.normal(size=101,scale=0.01))
        orig.setas="xy"

        XTests=[[(0,0,0.5),(0,2,-0.1)],
                 [(0,0,0.5)],
                 [(0,2,-0.2)]]
        YTests=[[(1,1,0.5),(1,2,-0.1)],
                 [(1,1,0.5)],
                 [(1,2,-0.2)]]
        for xmode,xdata,xtests in zip(["linear","scale","offset"],[x*2+0.2,x*2,x+0.2],XTests):
            for ymode,ydata,ytests in zip(["linear","scale","offset"],[y*2+0.2,y*2,y+0.2],YTests):
                to_scale=Data(xdata+np.random.normal(size=101,scale=0.025),ydata+np.random.normal(size=101,scale=0.01))
                to_scale.setas="xy"
                to_scale.scale(orig,xmode=xmode,ymode=ymode)
                transform=to_scale["Transform"]
                t_err=to_scale["Transform Err"]
                for i,j,v in xtests+ytests:
                    self.assertLessEqual(np.abs(transform[i,j]-v),5*t_err[i,j],"Failed to get correct trandorm factor for {}:{} ({} vs {}".format(xmode,ymode,transform[i,j],v))

        to_scale=Data(x*5+0.1+np.random.normal(size=101,scale=0.025),y*0.5+0.1+0.5*x+np.random.normal(size=101,scale=0.01))
        to_scale.setas="xy"
        to_scale.scale(orig,xmode="affine")
        a_tranform=np.array([[0.2,0.,-0.02],[-0.2, 2.,-0.17]])
        t_delta=np.abs(to_scale["Transform"]-a_tranform)
        t_in_range=t_delta<to_scale["Transform Err"]*5
        self.assertTrue(np.all(t_in_range),"Failed to produce correct affine scaling {} vs {}".format(to_scale["Transform"],a_tranform))
Exemple #7
0
 def test_clip(self):
     x=np.linspace(0,np.pi*10,1001)
     y=np.sin(x)
     z=np.cos(x)
     d=Data(x,y,z,setas="xyz")
     d.clip((-0.1,0.2),"Column 2")
     self.assertTrue((d.z.min()>=-0.1) and (d.z.max()<=0.2),"Clip with a column specified failed.")
     d=Data(x,y,z,setas="xyz")
     d.clip((-0.5,0.7))
     self.assertTrue((d.y.min()>=-0.5) and (d.y.max()<=0.7),"Clip with no column specified failed.")
Exemple #8
0
 def test_Base_Operators(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fl=len(fldr)
     d=Data(np.ones((100,5)))
     fldr+=d
     self.assertEqual(fl+1,len(fldr),"Failed += operator on DataFolder")
     fldr2=fldr+fldr
     self.assertEqual((fl+1)*2,len(fldr2),"Failed + operator with DataFolder on DataFolder")
     fldr-="Untitled"
     self.assertEqual(len(fldr),fl,"Failed to remove Untitled-0 from DataFolder by name.")
     fldr-="New-XRay-Data.dql"
     self.assertEqual(fl-1,len(fldr),"Failed to remove NEw Xray data by name.")
     del fldr["1449 37.0 kx.emd"]
     fldr/="Loaded as"
     self.assertEqual(len(fldr["QDFile"]),4,"Failoed to group folder by Loaded As metadata with /= opeator.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fldr2=SF.DataFolder(path.join(self.datadir,"NLIV"),pattern="*.txt")
     fldr2.group(lambda x:"zero" if x["iterator"]%2==0 else "one")
     fldr3=fldr+fldr2
     self.assertEqual(fldr3.shape,(47, {'one': (9, {}), 'zero': (7, {})}),"Adding two DataFolders with groups failed")
     fldr4=fldr3-fldr2
     fldr4.prune()
     self.assertEqual(fldr4.shape,fldr.shape,"Failed to subtract one DataFolder from another :{}".format(fldr4.shape))
     del fldr2["one"]
     self.assertEqual(fldr2.shape,(0, {'zero': (7, {})}),"Delitem with group failed")
     fldr2.key=path.basename(fldr2.key)
     self.assertEqual(repr(fldr2),"DataFolder(NLIV) with pattern ('*.txt',) has 0 files and 1 groups\n\tDataFolder(zero) with pattern ['*.txt'] has 7 files and 0 groups","Representation methods failed")
     self.fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     names=list(self.fldr.ls)[::2]
     self.fldr-=names
     self.assertEqual(len(self.fldr),23,"Failed to delete from a sequence")
     try:
         self.fldr-0.34
     except TypeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a TypeError when subtracting a float")
     try:
         self.fldr-Data()
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
     try:
         self.fldr-"Wiggle"
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
Exemple #9
0
 def test_metadata_save(self):
     local = path.dirname(__file__)
     t = np.arange(12).reshape(
         3, 4)  #set up a test data file with mixed metadata
     t = Data(t)
     t.column_headers = ["1", "2", "3", "4"]
     metitems = [
         True,
         1,
         0.2,
         {
             "a": 1,
             "b": "abc"
         },
         (1, 2),
         np.arange(3),
         [1, 2, 3],
         "abc",  #all types accepted
         r"\\abc\cde",
         1e-20,  #extra tests
         [1, (1, 2), "abc"],  #list with different types
         [[[1]]]  #nested list
     ]
     metnames = ["t" + str(i) for i in range(len(metitems))]
     for k, v in zip(metnames, metitems):
         t[k] = v
     t.save(path.join(local, "mixedmetatest.dat"))
     tl = Data(
         path.join(local, "mixedmetatest.txt")
     )  #will change extension to txt if not txt or tdi, is this what we want?
     t2 = self.d4.clone  #check that python tdi save is the same as labview tdi save
     t2.save(path.join(local, "mixedmetatest2.txt"))
     t2l = Data(path.join(local, "mixedmetatest2.txt"))
     for orig, load in [(t, tl), (t2, t2l)]:
         self.assertTrue(np.allclose(orig.data, load.data))
         self.assertTrue(orig.column_headers == load.column_headers)
         self.assertTrue(
             all([i in load.metadata.keys() for i in orig.metadata.keys()]))
         for k in orig.metadata.keys():
             if isinstance(orig[k], np.ndarray):
                 self.assertTrue(np.allclose(load[k], orig[k]))
             elif isinstance(orig[k], float) and np.isnan(orig[k]):
                 self.assertTrue(np.isnan(load[k]))
             else:
                 self.assertTrue(
                     load[k] == orig[k],
                     "Not equal for metadata: {}".format(load[k]))
     os.remove(path.join(local, "mixedmetatest.txt"))  #clear up
     os.remove(path.join(local, "mixedmetatest2.txt"))
 def test_constructor(self):
     """Constructor Tests"""
     d = Data()
     self.assertTrue(d.shape == (1, 0), "Bare constructor failed")
     d = Data(self.d)
     self.assertTrue(np.all(d.data == self.d.data),
                     "Constructor from DataFile failed")
     d = Data([np.ones(100), np.zeros(100)])
     self.assertTrue(d.shape == (100, 2),
                     "Constructor from iterable list of nd array failed")
     d = Data([np.ones(100), np.zeros(100)], ["X", "Y"])
     self.assertTrue(
         d.column_headers == ["X", "Y"],
         "Failed to set column headers in constructor: {}".format(
             d.column_headers))
 def test_len(self):
     # Check that length of the column is the same as length of the data
     self.assertEqual(len(Data()),0,"Empty DataFile not length zero")
     self.assertEqual(len(self.d.column(0)),len(self.d),"Column 0 length not equal to DataFile length")
     self.assertEqual(len(self.d),self.d.data.shape[0],"DataFile length not equal to data.shape[0]")
     # Check that self.column_headers returns the right length
     self.assertEqual(len(self.d.column_headers),self.d.data.shape[1],"Length of column_headers not equal to data.shape[1]")
 def test_indexing(self):
     #Check all the indexing possibilities
     data=np.array(self.d.data)
     colname=self.d.column_headers[0]
     self.assertTrue(all(self.d.column(colname)==self.d[:,0]),"Failed direct indexing versus column method")
     self.assertTrue(all(self.d[:,0]==data[:,0]),"Failed direct idnexing versusus direct array index")
     self.assertTrue(all(self.d[:,[0,1]]==data),"Failed direct list indexing")
     self.assertTrue(all(self.d[::2,:]==data[::2]),"Failed slice indexing rows")
     self.assertTrue(all(self.d[colname]==data[:,0]),"Failed direct indexing by column name")
     self.assertTrue(all(self.d[:,colname]==data[:,0]),"Failed fallback indexing by column name")
     self.assertEqual(self.d[25,1],645.0,"Failed direct single cell index")
     self.assertEqual(self.d[25,"Y-Data"],645.0,"Failoed single cell index direct")
     self.assertEqual(self.d["Y-Data",25],645.0,"Failoed single cell fallback index order")
     self.d["X-Dat"]=[11,12,13,14,15]
     self.assertEqual(self.d["X-Dat",2],13,"Failed indexing of metadata lists with tuple")
     self.assertEqual(self.d["X-Dat"][2],13,"Failed indexing of metadata lists with double indices")
     d=Data(np.ones((10,10)))
     d[0,0]=5 #Index by tuple into data
     d["Column_1",0]=6 # Index by column name, row into data
     d[0,"Column_2"]=7 #Index by row, column name into data
     d["Column_3"]=[1,2,3,4] # Create a metadata
     d["Column_3",2]=2 # Index existing metadata via tuple
     d.metadata[0,5]=10
     d[0,5]=12 # Even if tuple, index metadata if already existing.
     self.assertTrue(np.all(d[0]==np.array([5,6,7,1,1,1,1,1,1,1])),"setitem on Data to index into Data.data failed.\n{}".format(d[0]))
     self.assertEqual(d.metadata["Column_3"],[1,2,2,4],"Tuple indexing into metadata Failed.")
     self.assertEqual(d.metadata[0,5],12,"Indexing of pre-existing metadta keys rather than Data./data failed.")
     self.assertEqual(d.metadata[1],[1, 2, 2, 4],"Indexing metadata by integer failed.")
Exemple #13
0
    def hysteresis(self, mask=None):
        """Make a hysteresis loop of the average intensity in the given images

        Keyword Argument:
            mask(ndarray or list):
                boolean array of same size as an image or imarray or list of
                masks for each image. If True then don't include that area in
                the intensity averaging.

        Returns
        -------
        hyst(Data):
            'Field', 'Intensity', 2 column array
        """
        hyst = np.column_stack((self.fields, np.zeros(len(self))))
        for i in range(len(self)):
            im = self[i]
            if isinstance(mask, np.ndarray) and len(mask.shape) == 2:
                hyst[i, 1] = np.average(im[np.invert(mask.astype(bool))])
            elif isinstance(mask, np.ndarray) and len(mask.shape) == 3:
                hyst[i,
                     1] = np.average(im[np.invert(mask[i, :, :].astype(bool))])
            elif isinstance(mask, (tuple, list)):
                hyst[i, 1] = np.average(im[np.invert(mask[i])])
            else:
                hyst[i, 1] = np.average(im)
        d = Data(hyst, setas="xy")
        d.column_headers = ["Field", "Intensity"]
        return d
 def test_threshold(self):
     #set up some zigzag data
     #mins at 0,100,200,300,400, max at 50, 150, 250, 350 and zeros in between
     ar = np.zeros((400, 2))
     ar[:, 0] = np.arange(0, len(ar))
     for i in range(4):
         ar[i * 100:i * 100 + 50, 1] = np.linspace(-1, 1, 50)
     for i in range(4):
         ar[i * 100 + 50:i * 100 + 100, 1] = np.linspace(1, -1, 50)
     d = Data(ar, setas='xy')
     self.assertTrue(
         len(
             d.threshold(0, rising=True, falling=False, all_vals=True) ==
             4))
     self.assertTrue(
         len(
             d.threshold(0, rising=False, falling=True, all_vals=True) ==
             4))
     self.assertTrue(
         len(
             d.threshold(0,
                         interpolate=False,
                         rising=False,
                         falling=True,
                         all_vals=True) == 4))
     self.assertTrue(d.threshold(0, all_vals=True)[1] == 124.5)
 def test_functions(self):
     #Test section:
     self.s1 = self.d1.section(z=(12, 13))
     self.assertTrue(142.710 < self.d2.mean("Temp") < 142.711,
                     "Failed on the mean test.")
     self.assertTrue(
         round(self.d2.span("Temp")[0], 1) == 4.3
         and round(self.d2.span("Temp")[1], 1) == 291.6,
         "Span test failed.")
     f = self.d2.split(lambda r: r["Temp"] < 150)
     self.assertTrue(len(f[0]) == 838, "Split failed to work.")
     self.assertEqual(
         len(
             self.d3.threshold(2000,
                               rising=True,
                               falling=True,
                               all_vals=True)), 5, "Threshold failure.")
     self.d4.add(0, 1, "Add")
     self.d4.subtract(1, 0, header="Subtract")
     self.d4.multiply(0, 1, header="Multiply")
     self.d4.divide(0, 1, header="Divide")
     self.d4.diffsum(0, 1, header="Diffsum")
     self.assertTrue(
         np.all(self.d4[0] == np.array([-0.5, -1, -3, 3, -1, 2])),
         "Test column ops failed.")
     d = Data(np.zeros((100, 1)))
     d.add(0, 1.0)
     self.assertEqual(np.sum(d[:, 0]), 100., "Add with a flot didn't work")
     d.add(0, np.ones(100))
     self.assertEqual(np.sum(d[:, 0]), 200., "Add with an array failed.")
def norm_group(pos, _, **kargs):
    """Takes the drain current for each file in group and builds an analysis file and works out the mean drain"""
    if "signal" in kargs:
        signal = kargs["signal"]
    else:
        signal = "fluo"
    lfit = kargs["lfit"]
    rfit = kargs["rfit"]

    posfile = Data()
    posfile.metadata = pos[0].metadata
    posfile = posfile & pos[0].column(0)
    posfile.column_headers = ["Energy"]
    for f in pos:
        print(str(f["run"]) + str(f.find_col(signal)))
        posfile = posfile & f.column(signal)
    posfile.add_column(lambda r: np.mean(r[1:]), "mean drain")
    ec = posfile.find_col("Energy")
    md = posfile.find_col("mean drain")
    linearfit = scipy.poly1d(
        posfile.polyfit(ec, md, 1, lambda x, y: lfit[0] <= x <= lfit[1]))
    posfile.add_column(lambda r: r[md] - linearfit(r[ec]), "minus linear")
    highend = posfile.mean("minus", lambda r: rfit[0] <= r[ec] <= rfit[1])
    ml = posfile.find_col("minus linear")
    posfile.add_column(lambda r: r[ml] / highend, "normalised")
    if "group_key" in kargs:
        posfile[kargs["group_key"]] = pos.key
    return posfile
 def test_Operators(self):
     self.setUp()
     fldr = self.fldr
     fl = len(fldr)
     d = Data(np.ones((100, 5)))
     fldr += d
     self.assertEqual(fl + 1, len(fldr), "Failed += operator on DataFolder")
     fldr2 = fldr + fldr
     self.assertEqual((fl + 1) * 2, len(fldr2),
                      "Failed + operator with DataFolder on DataFolder")
     fldr -= "Untitled"
     self.assertEqual(
         len(fldr), fl,
         "Failed to remove Untitled-0 from DataFolder by name.")
     fldr -= "New-XRay-Data.dql"
     self.assertEqual(fl - 1, len(fldr),
                      "Failed to remove NEw Xray data by name.")
     fldr += "New-XRay-Data.dql"
     self.assertEqual(len(fldr), fl,
                      "Failed += oeprator with string on DataFolder")
     fldr /= "Loaded as"
     self.assertEqual(
         len(fldr["QDFile"]), 4,
         "Failoed to group folder by Loaded As metadata with /= opeator.")
     fldr.flatten()
Exemple #18
0
    def Fit(self):
        """Run the fitting code."""
        self.Discard().Normalise().offset_correct()
        chi2 = self.p0.shape[0] > 1

        method = getattr(self, self.method)

        if not chi2:  # Single fit mode, consider whether to plot and save etc
            fit = method(
                self.model,
                p0=self.p0,
                result=True,
                header="Fit",
                output="report",
            )

            if self.show_plot:
                self.plot_results()
            if self.save_fit:
                self.save(False)
            if self.report:
                print(fit.fit_report())
            return fit
        d = Data(self)
        fit = d.lmfit(
            self.model, p0=self.p0, result=True, header="Fit", output="data"
        )

        if self.show_plot:
            fit.plot(multiple="panels", capsize=3)
            fit.yscale = "log"  # Adjust y scale for chi^2
            fit.tight_layout()
        if self.save_fit:
            fit.filename = None
            fit.save(False)
Exemple #19
0
def import_moke(filename, roi=[-np.inf, np.inf], normalise=True):
    """
    Returns moke data from a file
    ----------------------------------------------------
    Params:
    - filename of data file (str)
    - region of interest - lower and upper bound of roi (2-long array)
    - normalise = True/False

    Returns:
    - x and y data for MOKE in ROI
    ----------------------------------------------------
    """
    lower = roi[0]
    upper = roi[1]
    d = Data(filename)
    d.setas(x='Field(T)', y='MOKE Signal')  # Set axes

    if normalise == True:
        d.y = normalise_moke(d.y)

    d.del_rows('Field(T)', (lower, upper),
               invert=True)  # Delete rows except for ROI

    return d.x, d.y
Exemple #20
0
 def test_sg_filter(self):
     x=np.linspace(0,10*np.pi,1001)
     y=np.sin(x)+np.random.normal(size=1001,scale=0.05)
     d=Data(x,y,column_headers=["Time","Signal"],setas="xy")
     d.SG_Filter(order=1,result=True)
     d.setas="x.y"
     d.y=d.y-np.cos(x)
     self.assertAlmostEqual(d.y[5:-5].mean(), 0,places=2,msg="Failed to differentiate correctly")
    def test_csvfile(self):

        self.csv = Data(path.join(self.datadir, "working", "CSVFile_test.dat"),
                        filetype="JustNumbers",
                        column_headers=["Q", "I", "dI"],
                        setas="xye")
        self.assertEqual(self.csv.shape, (167, 3),
                         "Failed to load CSVFile from text")
Exemple #22
0
def hist(im, *args, **kargs):
    """Pass through to :py:func:`matplotlib.pyplot.hist` function."""
    counts, edges = np.histogram(im.ravel(), *args, **kargs)
    centres = (edges[1:] + edges[:-1]) / 2
    new = Data(np.column_stack((centres, counts)))
    new.column_headers = ["Intensity", "Frequency"]
    new.setas = "xy"
    return new
Exemple #23
0
 def test_grouping(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     self.assertEqual(fldr4.mindepth,3,"Unflattened DataFolder had wrong mindepth.")
     self.assertEqual(fldr4.shape, (~~fldr4).shape,"Datafodler changed shape on flatten/unflatten")
     fldr5=fldr4.select(amplitude=1.4,recurse=True)
     fldr5.prune()
     pruned=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.0': (5, {}),
                   '0.25': (5, {}),
                   '0.5': (5, {}),
                   '0.75': (5, {}),
                   '1.0': (5, {})})})})
     selected=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.25': (1, {}), '0.5': (1, {}), '0.75': (1, {}), '1.0': (1, {})})})})
     self.assertEqual(fldr5.shape,pruned,"Folder pruning gave an unxpected shape.")
     self.assertEqual(fldr5[("test","1.4","0.5",0,"phase")],0.5,"Multilevel indexing of tree failed.")
     shape=(~(~fldr4).select(amplitude=1.4).select(frequency=1).select(phase__gt=0.2)).shape
     self.fldr4=fldr4
     self.assertEqual(shape, selected,"Multi selects and inverts failed.")
     g=(~fldr4)/10
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (15, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Dive by int failed.")
     g["Group 6"]-=5
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by int failed.")
     remove=g["Group 3"][4]
     g["Group 3"]-=remove
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (14, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by object failed.")
     d=fldr4["test",1.0,1.0].gather(0,1)
     self.assertEqual(d.shape,(181,6),"Gather seems have failed.")
     self.assertTrue(np.all(fldr4["test",1.0,1.0].slice_metadata("phase")==
                            np.ones(5)),"Slice metadata failure.")
     d=(~fldr4).extract("phase","frequency","amplitude","params")
     self.assertEqual(d.shape,(150,6),"Extract failed to produce data of correct shape.")
     self.assertEqual(d.column_headers,['phase', 'frequency', 'amplitude', 'params', 'params', 'params'],"Exctract failed to get correct column headers.")
     p=fldr4["test",1.0,1.0]
     p=SF.PlotFolder(p)
     p.plot()
     self.assertEqual(len(plt.get_fignums()),1,"Failed to generate a single plot for PlotFolder.")
     plt.close("all")
Exemple #24
0
 def test_apply(self):
     self.app=Data(np.zeros((100,1)),setas="y")
     self.app.apply(lambda r:r.i[0],header="Counter")
     def calc(r,omega=1.0,k=1.0):
         return np.sin(r.y*omega)
     self.app.apply(calc,replace=False,header="Sin",_extra={"omega":0.1},k=1.0)
     self.app.apply(lambda r:r.__class__([r[1],r[0]]),replace=True,header=["Index","Sin"])
     self.app.setas="xy"
     self.assertAlmostEqual(self.app.integrate(output="result"),18.87616564214,msg="Integrate after aplies failed.")
Exemple #25
0
    def setUp(self):

        x_data = np.linspace(-10, 10, 101)
        y_data = 0.01 * x_data**2 + 0.3 * x_data - 2

        y_data *= np.random.normal(size=101, loc=1.0, scale=0.01)
        x_data += np.random.normal(size=101, scale=0.02)

        self.data = Data(x_data, y_data, column_headers=["X", "Y"])
        self.data.setas = "xy"
Exemple #26
0
 def setUp(self):
     """Create a test data set."""
     x = np.linspace(1, 10, 10)
     y = 2 * x - 3
     dy = np.abs(y / 100)
     z = x + 4
     dz = np.abs(z / 100)
     self.data = Data(
         np.column_stack((x, y, dy, z, dz)),
         column_headers=["Tine", "Signal 1", "d1", "Signal 2", "d2"])
Exemple #27
0
def profile_line(img, src=None, dst=None, linewidth=1, order=1, mode="constant", cval=0.0, constrain=True, **kargs):
    """Wrapper for sckit-image method of the same name to get a line_profile.

    Parameters:
        img(ImageArray): Image data to take line section of
        src, dst (2-tuple of int or float): start and end of line profile. If the co-ordinates
            are given as intergers then they are assumed to be pxiel co-ordinates, floats are
            assumed to be real-space co-ordinates using the embedded metadata.
        linewidth (int): the wideth of the profile to be taken.
        order (int 1-3): Order of interpolation used to find image data when not aligned to a point
        mode (str): How to handle data outside of the image.
        cval (float): The constant value to assume for data outside of the image is mode is "constant"
        constrain (bool): Ensure the src and dst are within the image (default True).

    Returns:
        A :py:class:`Stoner.Data` object containing the line profile data and the metadata from the image.
    """
    scale = img.get("MicronsPerPixel", 1.0)
    r, c = img.shape
    if src is None and dst is None:
        if "x" in kargs:
            src = (kargs["x"], 0)
            dst = (kargs["x"], r)
        if "y" in kargs:
            src = (0, kargs["y"])
            dst = (c, kargs["y"])
    if isinstance(src, float):
        src = (src, src)
    if isinstance(dst, float):
        dst = (dst, dst)
    dst = _scale(dst, scale)
    src = _scale(src, scale)
    if not istuple(src, int, int):
        raise ValueError("src co-ordinates are not a 2-tuple of ints.")
    if not istuple(dst, int, int):
        raise ValueError("dst co-ordinates are not a 2-tuple of ints.")

    if constrain:
        fix = lambda x, mx: int(round(sorted([0, x, mx])[1]))
        r, c = img.shape
        src = list(src)
        src = (fix(src[0], r), fix(src[1], c))
        dst = (fix(dst[0], r), fix(dst[1], c))

    result = measure.profile_line(img, src, dst, linewidth, order, mode, cval)
    points = measure.profile._line_profile_coordinates(src, dst, linewidth)[:, :, 0]
    ret = Data()
    ret.data = points.T
    ret.setas = "xy"
    ret &= np.sqrt(ret.x ** 2 + ret.y ** 2) * scale
    ret &= result
    ret.column_headers = ["X", "Y", "Distance", "Intensity"]
    ret.setas = "..xy"
    ret.metadata = img.metadata.copy()
    return ret
    def test_deltions(self):
        ch=["{}-Data".format(chr(x)) for x in range(65,91)]
        data=np.zeros((100,26))
        metadata=OrderedDict([("Key 1",True),("Key 2",12),("Key 3","Hellow world")])
        self.dd=Data(metadata)
        self.dd.data=data
        self.dd.column_headers=ch
        self.dd.setas="3.x3.y3.z"
        self.repr_string="""===========================  ========  =======  ========  =======  ========  ========
TDI Format 1.5                 D-Data   ....      H-Data   ....      Y-Data    Z-Data
index                           3 (x)              7 (y)                 24        25
===========================  ========  =======  ========  =======  ========  ========
Key 1{Boolean}= True                0                  0                  0         0
Key 2{I32}= 12                      0  ...             0  ...             0         0
Key 3{String}= Hellow world         0  ...             0  ...             0         0
Stoner.class{String}= Data          0  ...             0  ...             0         0
...                                 0  ...             0  ...             0         0"""
        self.assertEqual("\n".join(repr(self.dd).split("\n")[:9]),self.repr_string,"Representation with interesting columns failed.")
        del self.dd["Key 1"]
        self.assertEqual(len(self.dd.metadata),3,"Deletion of metadata failed.")
        del self.dd[20:30]
        self.assertEqual(self.dd.shape,(90,26),"Deleting rows directly failed.")
        self.dd.del_column("Q")
        self.assertEqual(self.dd.shape,(90,25),"Deleting rows directly failed.")
        self.dd%=3
        self.assertEqual(self.dd.shape,(90,24),"Deleting rows directly failed.")
        self.dd.setas="x..y..z"
        self.dd.del_column(self.dd.setas.not_set)
        self.assertEqual(self.dd.shape,(90,3),"Deleting rows directly failed.")
        self.dd.mask[50,1]=True
        self.dd.del_column()
        self.assertEqual(self.dd.shape,(90,2),"Deletion of masked rows failed.")
        self.d5=Data(np.ones((10,10)))
        self.d5.column_headers=["A"]*5+["B"]*5
        self.d5.del_column("A",duplicates=True)
        self.assertEqual(self.d5.shape,(10,6),"Failed to delete columns with duplicates True and col specified.")
        self.d5=Data(np.ones((10,10)))
        self.d5.column_headers=list("ABCDEFGHIJ")
        self.d5.setas="..x..y"
        self.d5.del_column(True)
        self.assertEqual(self.d5.column_headers,["C","F"],"Failed to delete columns with col=True")
    def LoadData(self, data_item_number, filename):
        """LoadData(self, data_item_number, filename) --> none

        Loads the data from filename into the data_item_number.
        """
        try:
            datafile = Data(str(filename),
                            debug=True)  # does all the hard work here
        except Exception as e:
            ShowWarningDialog(
                self.parent,
                "Could not load the file: " + filename +
                " \nPlease check the format.\n\n Stoner.Data" +
                " gave the following error:\n" + str(e),
            )
        else:
            # For the freak case of only one data point
            try:
                if datafile.setas.cols["axes"] == 0:
                    self.x_col = datafile.find_col(self.x_col)
                    self.y_col = datafile.find_col(self.y_col)
                    self.e_col = datafile.find_col(self.e_col)
                    datafile.etsas(x=self.x_col, y=self.y_col, e=self.e_col)
                else:
                    self.x_col = datafile.setas.cols["xcol"]
                    self.y_col = datafile.setas.cols["ycol"][0]
                    if len(datafile.setas.cols["yerr"]) > 0:
                        self.e_col = datafile.setas.cols["yerr"][0]
                    else:
                        datafile.add_column(np.ones(len(datafile)))
                        datafile.setas[-1] = "e"
            except Exception as e:
                ShowWarningDialog(
                    self.parent,
                    "The data file does not contain" +
                    "all the columns specified in the opions\n" + e.message,
                )
                # Okay now we have showed a dialog lets bail out ...
                return
            # The data is set by the default Template.__init__ function, neat hu
            # Know the loaded data goes into *_raw so that they are not
            # changed by the transforms
            datafile.y = np.where(datafile.y == 0.0, 1e-8, datafile.y)
            self.data[data_item_number].x_raw = datafile.x
            self.data[data_item_number].y_raw = datafile.y
            self.data[data_item_number].error_raw = datafile.e
            # Run the commands on the data - this also sets the x,y, error memebers
            # of that data item.
            self.data[data_item_number].run_command()

            # Send an update that new data has been loaded
            self.SendUpdateDataEvent()
 def test_Properties(self):
     self.setUp()
     fldr=self.fldr
     fldr/="Loaded as"
     fldr["QDFile"].group("Byapp")
     self.assertEqual(fldr.mindepth,1,"mindepth attribute of folder failed.")
     self.assertEqual(fldr.depth,2,"depth attribute failed.")
     self.setUp()
     fldr=self.fldr
     fldr+=Data()
     self.assertEqual(len(list(fldr.loaded)),1,"loaded attribute failed {}".format(len(list(fldr.loaded))))
     self.assertEqual(len(list(fldr.not_empty)),len(fldr)-1,"not_empty attribute failed.")
     fldr-="Untitled"