Beispiel #1
0
    def Fit(self):
        """Run the fitting code."""
        self.Discard().Normalise().offset_correct()
        chi2= self.p0.shape[0]>1

        method=getattr(self,self.method)

        if not chi2: # Single fit mode, consider whether to plot and save etc
            fit=method(self.model,p0=self.p0,result=True,header="Fit",output="report")

            if self.show_plot:
                self.plot_results()
            if self.save_fit:
                self.save(False)
            if self.report:
                print(fit.fit_report())
            return fit
        else: #chi^2 mapping mode
            d=Data(self)
            fit = d.lmfit(self.model, p0=self.p0, result=True, header="Fit", output="data")

            if self.show_plot:
                fit.plot(multiple="panels",capsize=3)
                fit.yscale = "log"  # Adjust y scale for chi^2
                fit.tight_layout()
            if self.save_fit:
                fit.filename=None
                fit.save(False)
Beispiel #2
0
    def hysteresis(self, mask=None):
        """Make a hysteresis loop of the average intensity in the given images

        Keyword Argument:
            mask(ndarray or list):
                boolean array of same size as an image or imarray or list of
                masks for each image. If True then don't include that area in
                the intensity averaging.

        Returns
        -------
        hyst(Data):
            'Field', 'Intensity', 2 column array
        """
        hyst = np.column_stack((self.fields, np.zeros(len(self))))
        for i in range(len(self)):
            im = self[i]
            if isinstance(mask, np.ndarray) and len(mask.shape) == 2:
                hyst[i, 1] = np.average(im[np.invert(mask.astype(bool))])
            elif isinstance(mask, np.ndarray) and len(mask.shape) == 3:
                hyst[i,
                     1] = np.average(im[np.invert(mask[i, :, :].astype(bool))])
            elif isinstance(mask, (tuple, list)):
                hyst[i, 1] = np.average(im[np.invert(mask[i])])
            else:
                hyst[i, 1] = np.average(im)
        d = Data(hyst, setas="xy")
        d.column_headers = ["Field", "Intensity"]
        return d
 def test_indexing(self):
     #Check all the indexing possibilities
     data=np.array(self.d.data)
     colname=self.d.column_headers[0]
     self.assertTrue(all(self.d.column(colname)==self.d[:,0]),"Failed direct indexing versus column method")
     self.assertTrue(all(self.d[:,0]==data[:,0]),"Failed direct idnexing versusus direct array index")
     self.assertTrue(all(self.d[:,[0,1]]==data),"Failed direct list indexing")
     self.assertTrue(all(self.d[::2,:]==data[::2]),"Failed slice indexing rows")
     self.assertTrue(all(self.d[colname]==data[:,0]),"Failed direct indexing by column name")
     self.assertTrue(all(self.d[:,colname]==data[:,0]),"Failed fallback indexing by column name")
     self.assertEqual(self.d[25,1],645.0,"Failed direct single cell index")
     self.assertEqual(self.d[25,"Y-Data"],645.0,"Failoed single cell index direct")
     self.assertEqual(self.d["Y-Data",25],645.0,"Failoed single cell fallback index order")
     self.d["X-Dat"]=[11,12,13,14,15]
     self.assertEqual(self.d["X-Dat",2],13,"Failed indexing of metadata lists with tuple")
     self.assertEqual(self.d["X-Dat"][2],13,"Failed indexing of metadata lists with double indices")
     d=Data(np.ones((10,10)))
     d[0,0]=5 #Index by tuple into data
     d["Column_1",0]=6 # Index by column name, row into data
     d[0,"Column_2"]=7 #Index by row, column name into data
     d["Column_3"]=[1,2,3,4] # Create a metadata
     d["Column_3",2]=2 # Index existing metadata via tuple
     d.metadata[0,5]=10
     d[0,5]=12 # Even if tuple, index metadata if already existing.
     self.assertTrue(np.all(d[0]==np.array([5,6,7,1,1,1,1,1,1,1])),"setitem on Data to index into Data.data failed.\n{}".format(d[0]))
     self.assertEqual(d.metadata["Column_3"],[1,2,2,4],"Tuple indexing into metadata Failed.")
     self.assertEqual(d.metadata[0,5],12,"Indexing of pre-existing metadta keys rather than Data./data failed.")
     self.assertEqual(d.metadata[1],[1, 2, 2, 4],"Indexing metadata by integer failed.")
 def setUp(self):
     self.d = Data(path.join(path.dirname(__file__), "CoreTest.dat"),
                   setas="xy")
     self.d2 = Data(
         path.join(__home__, "..", "sample-data", "TDI_Format_RT.txt"))
     self.d3 = Data(
         path.join(__home__, "..", "sample-data", "New-XRay-Data.dql"))
Beispiel #5
0
    def Fit(self):
        """Run the fitting code."""
        self.Discard().Normalise().offset_correct()
        chi2 = self.p0.shape[0] > 1

        method = getattr(self, self.method)

        if not chi2:  # Single fit mode, consider whether to plot and save etc
            fit = method(
                self.model,
                p0=self.p0,
                result=True,
                header="Fit",
                output="report",
            )

            if self.show_plot:
                self.plot_results()
            if self.save_fit:
                self.save(False)
            if self.report:
                print(fit.fit_report())
            return fit
        d = Data(self)
        fit = d.lmfit(
            self.model, p0=self.p0, result=True, header="Fit", output="data"
        )

        if self.show_plot:
            fit.plot(multiple="panels", capsize=3)
            fit.yscale = "log"  # Adjust y scale for chi^2
            fit.tight_layout()
        if self.save_fit:
            fit.filename = None
            fit.save(False)
 def test_extra_plots(self):
     x = np.random.uniform(-np.pi, np.pi, size=5001)
     y = np.random.uniform(-np.pi, np.pi, size=5001)
     z = (np.cos(4 * np.sqrt(x**2 + y**2)) *
          np.exp(-np.sqrt(x**2 + y**2) / 3.0))**2
     self.d2 = Data(x, y, z, column_headers=["X", "Y", "Z"], setas="xyz")
     self.d2.contour_xyz(projection="2d")  #
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.close("all")
     X, Y, Z = self.d2.griddata(xlim=(-np.pi, np.pi), ylim=(-np.pi, np.pi))
     plt.imshow(Z)
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.imshow(Z)
     plt.close("all")
     x, y = np.meshgrid(np.linspace(-np.pi, np.pi, 10),
                        np.linspace(-np.pi, np.pi, 10))
     z = np.zeros_like(x)
     w = np.cos(np.sqrt(x**2 + y**2))
     q = np.arctan2(x, y)
     u = np.abs(w) * np.cos(q)
     v = np.abs(w) * np.sin(q)
     self.d3 = Data(x.ravel(),
                    y.ravel(),
                    z.ravel(),
                    u.ravel(),
                    v.ravel(),
                    w.ravel(),
                    setas="xyzuvw")
     self.d3.plot()
     self.assertEqual(len(plt.get_fignums()), 1,
                      "Setting Data.fig by integer failed.")
     plt.close("all")
Beispiel #7
0
 def test_Properties(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     self.assertEqual(fldr.mindepth,0,"Minimum depth of flat group n ot equal to zero.")
     fldr/="Loaded as"
     grps=list(fldr.lsgrp)
     skip=0 if hyperspy_ok else 1
     self.assertEqual(len(grps),26-skip,"Length of lsgrp not as expected: {} not 25".format(len(grps)))
     fldr.debug=True
     self.fldr=fldr
     self.assertTrue(fldr["XRDFile"][0].debug,"Setting debug on folder failed!")
     fldr.debug=False
     fldr["QDFile"].group("Byapp")
     self.assertEqual(fldr.trunkdepth,1,"Trunkdepth failed")
     self.assertEqual(fldr.mindepth,1,"mindepth attribute of folder failed.")
     self.assertEqual(fldr.depth,2,"depth attribute failed.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     fldr+=Data()
     skip=1 if hyperspy_ok else 2
     self.assertEqual(len(list(fldr.loaded)),1,"loaded attribute failed {}".format(len(list(fldr.loaded))))
     self.assertEqual(len(list(fldr.not_empty)),len(fldr)-skip,"not_empty attribute failed.")
     fldr-="Untitled"
     self.assertFalse(fldr.is_empty,"fldr.is_empty failed")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     objects=copy(fldr.objects)
     fldr.objects=dict(objects)
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Folder objects not reset to regexp dictionary")
     fldr.objects=objects
     self.assertTrue(isinstance(fldr.objects,regexpDict),"Setting Folder objects mangled type")
     fldr.type=Data()
     self.assertTrue(issubclass(fldr.type,Data),"Settin type by instance of class failed")
Beispiel #8
0
def hist(im, *args, **kargs):
    """Pass through to :py:func:`matplotlib.pyplot.hist` function."""
    counts, edges = np.histogram(im.ravel(), *args, **kargs)
    centres = (edges[1:] + edges[:-1]) / 2
    new = Data(np.column_stack((centres, counts)))
    new.column_headers = ["Intensity", "Frequency"]
    new.setas = "xy"
    return new
Beispiel #9
0
 def test_sg_filter(self):
     x=np.linspace(0,10*np.pi,1001)
     y=np.sin(x)+np.random.normal(size=1001,scale=0.05)
     d=Data(x,y,column_headers=["Time","Signal"],setas="xy")
     d.SG_Filter(order=1,result=True)
     d.setas="x.y"
     d.y=d.y-np.cos(x)
     self.assertAlmostEqual(d.y[5:-5].mean(), 0,places=2,msg="Failed to differentiate correctly")
Beispiel #10
0
def hist(im, *args, **kargs):
    """Pass through to :py:func:`matplotlib.pyplot.hist` function."""
    counts, edges = np.histogram(im.ravel(), *args, **kargs)
    centres = (edges[1:] + edges[:-1]) / 2
    new = Data(np.column_stack((centres, counts)))
    new.column_headers = ["Intensity", "Frequency"]
    new.setas = "xy"
    return new
Beispiel #11
0
 def test_grouping(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     self.assertEqual(fldr4.mindepth,3,"Unflattened DataFolder had wrong mindepth.")
     self.assertEqual(fldr4.shape, (~~fldr4).shape,"Datafodler changed shape on flatten/unflatten")
     fldr5=fldr4.select(amplitude=1.4,recurse=True)
     fldr5.prune()
     pruned=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.0': (5, {}),
                   '0.25': (5, {}),
                   '0.5': (5, {}),
                   '0.75': (5, {}),
                   '1.0': (5, {})})})})
     selected=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.25': (1, {}), '0.5': (1, {}), '0.75': (1, {}), '1.0': (1, {})})})})
     self.assertEqual(fldr5.shape,pruned,"Folder pruning gave an unxpected shape.")
     self.assertEqual(fldr5[("test","1.4","0.5",0,"phase")],0.5,"Multilevel indexing of tree failed.")
     shape=(~(~fldr4).select(amplitude=1.4).select(frequency=1).select(phase__gt=0.2)).shape
     self.fldr4=fldr4
     self.assertEqual(shape, selected,"Multi selects and inverts failed.")
     g=(~fldr4)/10
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (15, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Dive by int failed.")
     g["Group 6"]-=5
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by int failed.")
     remove=g["Group 3"][4]
     g["Group 3"]-=remove
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (14, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by object failed.")
     d=fldr4["test",1.0,1.0].gather(0,1)
     self.assertEqual(d.shape,(181,6),"Gather seems have failed.")
     self.assertTrue(np.all(fldr4["test",1.0,1.0].slice_metadata("phase")==
                            np.ones(5)),"Slice metadata failure.")
     d=(~fldr4).extract("phase","frequency","amplitude","params")
     self.assertEqual(d.shape,(150,6),"Extract failed to produce data of correct shape.")
     self.assertEqual(d.column_headers,['phase', 'frequency', 'amplitude', 'params', 'params', 'params'],"Exctract failed to get correct column headers.")
     p=fldr4["test",1.0,1.0]
     p=SF.PlotFolder(p)
     p.plot()
     self.assertEqual(len(plt.get_fignums()),1,"Failed to generate a single plot for PlotFolder.")
     plt.close("all")
Beispiel #12
0
 def test_grouping(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     self.assertEqual(fldr4.mindepth,3,"Unflattened DataFolder had wrong mindepth.")
     self.assertEqual(fldr4.shape, (~~fldr4).shape,"Datafodler changed shape on flatten/unflatten")
     fldr5=fldr4.select(amplitude=1.4,recurse=True)
     fldr5.prune()
     pruned=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.0': (5, {}),
                   '0.25': (5, {}),
                   '0.5': (5, {}),
                   '0.75': (5, {}),
                   '1.0': (5, {})})})})
     selected=(0,
             {'test': (0,
                {'1.4': (0,
                  {'0.25': (1, {}), '0.5': (1, {}), '0.75': (1, {}), '1.0': (1, {})})})})
     self.assertEqual(fldr5.shape,pruned,"Folder pruning gave an unxpected shape.")
     self.assertEqual(fldr5[("test","1.4","0.5",0,"phase")],0.5,"Multilevel indexing of tree failed.")
     shape=(~(~fldr4).select(amplitude=1.4).select(frequency=1).select(phase__gt=0.2)).shape
     self.fldr4=fldr4
     self.assertEqual(shape, selected,"Multi selects and inverts failed.")
     g=(~fldr4)/10
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (15, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Dive by int failed.")
     g["Group 6"]-=5
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (15, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by int failed.")
     remove=g["Group 3"][4]
     g["Group 3"]-=remove
     self.assertEqual(g.shape,(0,{'Group 0': (15, {}),'Group 1': (15, {}),'Group 2': (15, {}),'Group 3': (14, {}),'Group 4': (15, {}),
                                  'Group 5': (15, {}),'Group 6': (14, {}),'Group 7': (15, {}),'Group 8': (15, {}),'Group 9': (15, {})}),"Sub by object failed.")
     d=fldr4["test",1.0,1.0].gather(0,1)
     self.assertEqual(d.shape,(181,6),"Gather seems have failed.")
     self.assertTrue(np.all(fldr4["test",1.0,1.0].slice_metadata("phase")==
                            np.ones(5)),"Slice metadata failure.")
     d=(~fldr4).extract("phase","frequency","amplitude","params")
     self.assertEqual(d.shape,(150,6),"Extract failed to produce data of correct shape.")
     self.assertEqual(d.column_headers,['phase', 'frequency', 'amplitude', 'params', 'params', 'params'],"Exctract failed to get correct column headers.")
     p=fldr4["test",1.0,1.0]
     p=SF.PlotFolder(p)
     p.plot()
     self.assertEqual(len(plt.get_fignums()),1,"Failed to generate a single plot for PlotFolder.")
     plt.close("all")
Beispiel #13
0
 def test_apply(self):
     self.app=Data(np.zeros((100,1)),setas="y")
     self.app.apply(lambda r:r.i[0],header="Counter")
     def calc(r,omega=1.0,k=1.0):
         return np.sin(r.y*omega)
     self.app.apply(calc,replace=False,header="Sin",_extra={"omega":0.1},k=1.0)
     self.app.apply(lambda r:r.__class__([r[1],r[0]]),replace=True,header=["Index","Sin"])
     self.app.setas="xy"
     self.assertAlmostEqual(self.app.integrate(output="result"),18.87616564214,msg="Integrate after aplies failed.")
Beispiel #14
0
 def setUp(self):
     """Create a test data set."""
     x = np.linspace(1, 10, 10)
     y = 2 * x - 3
     dy = np.abs(y / 100)
     z = x + 4
     dz = np.abs(z / 100)
     self.data = Data(
         np.column_stack((x, y, dy, z, dz)),
         column_headers=["Tine", "Signal 1", "d1", "Signal 2", "d2"])
Beispiel #15
0
    def setUp(self):

        x_data = np.linspace(-10, 10, 101)
        y_data = 0.01 * x_data**2 + 0.3 * x_data - 2

        y_data *= np.random.normal(size=101, loc=1.0, scale=0.01)
        x_data += np.random.normal(size=101, scale=0.02)

        self.data = Data(x_data, y_data, column_headers=["X", "Y"])
        self.data.setas = "xy"
Beispiel #16
0
 def test_Base_Operators(self):
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fl=len(fldr)
     d=Data(np.ones((100,5)))
     fldr+=d
     self.assertEqual(fl+1,len(fldr),"Failed += operator on DataFolder")
     fldr2=fldr+fldr
     self.assertEqual((fl+1)*2,len(fldr2),"Failed + operator with DataFolder on DataFolder")
     fldr-="Untitled"
     self.assertEqual(len(fldr),fl,"Failed to remove Untitled-0 from DataFolder by name.")
     fldr-="New-XRay-Data.dql"
     self.assertEqual(fl-1,len(fldr),"Failed to remove NEw Xray data by name.")
     del fldr["1449 37.0 kx.emd"]
     fldr/="Loaded as"
     self.assertEqual(len(fldr["QDFile"]),4,"Failoed to group folder by Loaded As metadata with /= opeator.")
     fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     for d in fldr:
         _=d["Loaded as"]
     fldr=baseFolder(fldr)
     fldr2=SF.DataFolder(path.join(self.datadir,"NLIV"),pattern="*.txt")
     fldr2.group(lambda x:"zero" if x["iterator"]%2==0 else "one")
     fldr3=fldr+fldr2
     self.assertEqual(fldr3.shape,(47, {'one': (9, {}), 'zero': (7, {})}),"Adding two DataFolders with groups failed")
     fldr4=fldr3-fldr2
     fldr4.prune()
     self.assertEqual(fldr4.shape,fldr.shape,"Failed to subtract one DataFolder from another :{}".format(fldr4.shape))
     del fldr2["one"]
     self.assertEqual(fldr2.shape,(0, {'zero': (7, {})}),"Delitem with group failed")
     fldr2.key=path.basename(fldr2.key)
     self.assertEqual(repr(fldr2),"DataFolder(NLIV) with pattern ('*.txt',) has 0 files and 1 groups\n\tDataFolder(zero) with pattern ['*.txt'] has 7 files and 0 groups","Representation methods failed")
     self.fldr=SF.DataFolder(self.datadir,debug=False,recursive=False)
     names=list(self.fldr.ls)[::2]
     self.fldr-=names
     self.assertEqual(len(self.fldr),23,"Failed to delete from a sequence")
     try:
         self.fldr-0.34
     except TypeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a TypeError when subtracting a float")
     try:
         self.fldr-Data()
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
     try:
         self.fldr-"Wiggle"
     except RuntimeError:
         pass
     else:
         self.assertTrue(False,"Failed to throw a RuntimeError when subtracting a non-member")
Beispiel #17
0
class AnalysisMixins_test(unittest.TestCase):
    """Path to sample Data File"""
    datadir = path.join(pth, "sample-data")

    def setUp(self):

        x_data = np.linspace(-10, 10, 101)
        y_data = 0.01 * x_data**2 + 0.3 * x_data - 2

        y_data *= np.random.normal(size=101, loc=1.0, scale=0.01)
        x_data += np.random.normal(size=101, scale=0.02)

        self.data = Data(x_data, y_data, column_headers=["X", "Y"])
        self.data.setas = "xy"

    def test_cuve_fit(self):
        for output, fmt in zip(["fit", "row", "full", "dict", "data"],
                               [tuple, np.ndarray, tuple, dict, Data]):
            res = self.data.curve_fit(fit, p0=[0.02, 0.2, 2], output=output)
            self.assertTrue(
                isinstance(res, fmt),
                "Failed to get expected output from curve_fit for {} (got {})".
                format(output, type(res)))

    def test_lmfit(self):
        for output, fmt in zip(["fit", "row", "full", "dict", "data"],
                               [tuple, np.ndarray, tuple, dict, Data]):
            res = self.data.lmfit(fit, p0=[0.02, 0.2, 2], output=output)
            self.assertTrue(
                isinstance(res, fmt),
                "Failed to get expected output from lmfit for {} (got {})".
                format(output, type(res)))

    def test_odr(self):
        for output, fmt in zip(["fit", "row", "full", "dict", "data"],
                               [tuple, np.ndarray, tuple, dict, Data]):
            res = self.data.odr(fit, p0=[0.02, 0.2, 2], output=output)
            self.assertTrue(
                isinstance(res, fmt),
                "Failed to get expected output from idr for {} (got {})".
                format(output, type(res)))

    def test_differential_evolution(self):
        for output, fmt in zip(["fit", "row", "full", "dict", "data"],
                               [tuple, np.ndarray, tuple, dict, Data]):
            res = self.data.differential_evolution(fit,
                                                   p0=[0.02, 0.2, 2],
                                                   output=output)
            self.assertTrue(
                isinstance(res, fmt),
                "Failed to get expected output from differential_evolution for {} (got {})"
                .format(output, type(res)))
Beispiel #18
0
    def LoadData(self, data_item_number, filename):
        """LoadData(self, data_item_number, filename) --> none

        Loads the data from filename into the data_item_number.
        """
        try:
            datafile = Data(str(filename),
                            debug=True)  # does all the hard work here
        except Exception as e:
            ShowWarningDialog(
                self.parent,
                "Could not load the file: " + filename +
                " \nPlease check the format.\n\n Stoner.Data" +
                " gave the following error:\n" + str(e),
            )
        else:
            # For the freak case of only one data point
            try:
                if datafile.setas.cols["axes"] == 0:
                    self.x_col = datafile.find_col(self.x_col)
                    self.y_col = datafile.find_col(self.y_col)
                    self.e_col = datafile.find_col(self.e_col)
                    datafile.etsas(x=self.x_col, y=self.y_col, e=self.e_col)
                else:
                    self.x_col = datafile.setas.cols["xcol"]
                    self.y_col = datafile.setas.cols["ycol"][0]
                    if len(datafile.setas.cols["yerr"]) > 0:
                        self.e_col = datafile.setas.cols["yerr"][0]
                    else:
                        datafile.add_column(np.ones(len(datafile)))
                        datafile.setas[-1] = "e"
            except Exception as e:
                ShowWarningDialog(
                    self.parent,
                    "The data file does not contain" +
                    "all the columns specified in the opions\n" + e.message,
                )
                # Okay now we have showed a dialog lets bail out ...
                return
            # The data is set by the default Template.__init__ function, neat hu
            # Know the loaded data goes into *_raw so that they are not
            # changed by the transforms
            datafile.y = np.where(datafile.y == 0.0, 1e-8, datafile.y)
            self.data[data_item_number].x_raw = datafile.x
            self.data[data_item_number].y_raw = datafile.y
            self.data[data_item_number].error_raw = datafile.e
            # Run the commands on the data - this also sets the x,y, error memebers
            # of that data item.
            self.data[data_item_number].run_command()

            # Send an update that new data has been loaded
            self.SendUpdateDataEvent()
 def setUp(self):
     self.d1 = Data(path.join(self.datadir, "OVF1.ovf"))
     self.d2 = Data(path.join(self.datadir, "TDI_Format_RT.txt"))
     self.d3 = Data(path.join(self.datadir, "New-XRay-Data.dql"))
     self.d4 = Data(np.column_stack([np.ones(100),
                                     np.ones(100) * 2]),
                    setas="xy")
 def test_constructor(self):
     """Constructor Tests"""
     d = Data()
     self.assertTrue(d.shape == (1, 0), "Bare constructor failed")
     d = Data(self.d)
     self.assertTrue(np.all(d.data == self.d.data),
                     "Constructor from DataFile failed")
     d = Data([np.ones(100), np.zeros(100)])
     self.assertTrue(d.shape == (100, 2),
                     "Constructor from iterable list of nd array failed")
     d = Data([np.ones(100), np.zeros(100)], ["X", "Y"])
     self.assertTrue(
         d.column_headers == ["X", "Y"],
         "Failed to set column headers in constructor: {}".format(
             d.column_headers))
 def test_len(self):
     # Check that length of the column is the same as length of the data
     self.assertEqual(len(Data()),0,"Empty DataFile not length zero")
     self.assertEqual(len(self.d.column(0)),len(self.d),"Column 0 length not equal to DataFile length")
     self.assertEqual(len(self.d),self.d.data.shape[0],"DataFile length not equal to data.shape[0]")
     # Check that self.column_headers returns the right length
     self.assertEqual(len(self.d.column_headers),self.d.data.shape[1],"Length of column_headers not equal to data.shape[1]")
 def test_Operators(self):
     self.setUp()
     fldr = self.fldr
     fl = len(fldr)
     d = Data(np.ones((100, 5)))
     fldr += d
     self.assertEqual(fl + 1, len(fldr), "Failed += operator on DataFolder")
     fldr2 = fldr + fldr
     self.assertEqual((fl + 1) * 2, len(fldr2),
                      "Failed + operator with DataFolder on DataFolder")
     fldr -= "Untitled"
     self.assertEqual(
         len(fldr), fl,
         "Failed to remove Untitled-0 from DataFolder by name.")
     fldr -= "New-XRay-Data.dql"
     self.assertEqual(fl - 1, len(fldr),
                      "Failed to remove NEw Xray data by name.")
     fldr += "New-XRay-Data.dql"
     self.assertEqual(len(fldr), fl,
                      "Failed += oeprator with string on DataFolder")
     fldr /= "Loaded as"
     self.assertEqual(
         len(fldr["QDFile"]), 4,
         "Failoed to group folder by Loaded As metadata with /= opeator.")
     fldr.flatten()
    def test_csvfile(self):

        self.csv = Data(path.join(self.datadir, "working", "CSVFile_test.dat"),
                        filetype="JustNumbers",
                        column_headers=["Q", "I", "dI"],
                        setas="xye")
        self.assertEqual(self.csv.shape, (167, 3),
                         "Failed to load CSVFile from text")
Beispiel #24
0
 def test_clip(self):
     x=np.linspace(0,np.pi*10,1001)
     y=np.sin(x)
     z=np.cos(x)
     d=Data(x,y,z,setas="xyz")
     d.clip((-0.1,0.2),"Column 2")
     self.assertTrue((d.z.min()>=-0.1) and (d.z.max()<=0.2),"Clip with a column specified failed.")
     d=Data(x,y,z,setas="xyz")
     d.clip((-0.5,0.7))
     self.assertTrue((d.y.min()>=-0.5) and (d.y.max()<=0.7),"Clip with no column specified failed.")
Beispiel #25
0
def import_moke(filename, roi=[-np.inf, np.inf], normalise=True):
    """
    Returns moke data from a file
    ----------------------------------------------------
    Params:
    - filename of data file (str)
    - region of interest - lower and upper bound of roi (2-long array)
    - normalise = True/False

    Returns:
    - x and y data for MOKE in ROI
    ----------------------------------------------------
    """
    lower = roi[0]
    upper = roi[1]
    d = Data(filename)
    d.setas(x='Field(T)', y='MOKE Signal')  # Set axes

    if normalise == True:
        d.y = normalise_moke(d.y)

    d.del_rows('Field(T)', (lower, upper),
               invert=True)  # Delete rows except for ROI

    return d.x, d.y
 def test_metadata_save(self):
     local = path.dirname(__file__)
     t = np.arange(12).reshape(3,4) #set up a test data file with mixed metadata
     t = Data(t)
     t.column_headers = ["1","2","3","4"]
     metitems = [True,1,0.2,{"a":1, "b":"abc"},(1,2),np.arange(3),[1,2,3], "abc", #all types accepted
                 r"\\abc\cde", 1e-20, #extra tests
                 [1,(1,2),"abc"], #list with different types
                 [[[1]]], #nested list
                 None, #None value
                 ]
     metnames = ["t"+str(i) for i in range(len(metitems))]
     for k,v in zip(metnames,metitems):
         t[k] = v
     t.save(path.join(local, "mixedmetatest.dat"))
     tl = Data(path.join(local, "mixedmetatest.txt")) #will change extension to txt if not txt or tdi, is this what we want?
     t2 = self.d4.clone  #check that python tdi save is the same as labview tdi save
     t2.save(path.join(local, "mixedmetatest2.txt"))
     t2l = Data(path.join(local, "mixedmetatest2.txt"))
     for orig, load in [(t,tl), (t2, t2l)]:
         for k in ['Loaded as', 'TDI Format']:
             orig[k]=load[k]
         self.assertTrue(np.allclose(orig.data, load.data))
         self.assertTrue(orig.column_headers==load.column_headers)
         self.res=load.metadata^orig.metadata
         self.assertTrue(load.metadata==orig.metadata,"Metadata not the same on round tripping to disc")
     os.remove(path.join(local, "mixedmetatest.txt")) #clear up
     os.remove(path.join(local, "mixedmetatest2.txt"))
 def test_apply(self):
     self.app=Data(np.zeros((100,1)),setas="y")
     self.app.apply(lambda r:r.i[0],header="Counter")
     def calc(r,omega=1.0,k=1.0):
         return np.sin(r.y*omega)
     self.app.apply(calc,replace=False,header="Sin",_extra={"omega":0.1},k=1.0)
     self.app.apply(lambda r:r.__class__([r[1],r[0]]),replace=True,header=["Index","Sin"])
     self.app.setas="xy"
     self.assertAlmostEqual(self.app.integrate(),-64.1722191259037,msg="Integrate after aplies failed.")
 def test_functions(self):
     #Test section:
     self.s1=self.d1.section(z=(12,13))
     self.assertTrue(142.710<self.d2.mean("Temp")<142.711,"Failed on the mean test.")
     self.assertTrue(round(self.d2.span("Temp")[0],1)==4.3 and round(self.d2.span("Temp")[1],1)==291.6,"Span test failed.")
     f=self.d2.split(lambda r:r["Temp"]<150)
     self.assertTrue(len(f[0])==838,"Split failed to work.")
     self.assertEqual(len(self.d3.threshold(2000,rising=True,falling=True,all_vals=True)),5,"Threshold failure.")
     self.d4.add(0,1,"Add")
     self.d4.subtract(1,0,header="Subtract")
     self.d4.multiply(0,1,header="Multiply")
     self.d4.divide(0,1,header="Divide")
     self.d4.diffsum(0,1,header="Diffsum")
     self.assertTrue(np.all(self.d4[0]==np.array([-0.5,-1,-3,3,-1,2])),"Test column ops failed.")
     d=Data(np.zeros((100,1)))
     d.add(0,1.0)
     self.assertEqual(np.sum(d[:,0]),100.,"Add with a flot didn't work")
     d.add(0,np.ones(100))
     self.assertEqual(np.sum(d[:,0]),200.,"Add with an array failed.")
Beispiel #29
0
 def test_saving(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     newdir=tempfile.mkdtemp()
     fldr4.save(newdir)
     fldr5=SF.DataFolder(newdir)
     self.assertEqual(fldr4.shape,fldr5.shape,"Saved DataFolder and loaded DataFolder have different shapes")
Beispiel #30
0
 def test_saving(self):
     fldr4=SF.DataFolder()
     x=np.linspace(-np.pi,np.pi,181)
     for phase in np.linspace(0,1.0,5):
         for amplitude in np.linspace(1,2,6):
             for frequency in np.linspace(1,2,5):
                 y=amplitude*np.sin(frequency*x+phase*np.pi)
                 d=Data(x,y,setas="xy",column_headers=["X","Y"])
                 d["frequency"]=frequency
                 d["amplitude"]=amplitude
                 d["phase"]=phase
                 d["params"]=[phase,frequency,amplitude]
                 d.filename="test/{amplitude}/{phase}/{frequency}.dat".format(**d)
                 fldr4+=d
     fldr4.unflatten()
     newdir=tempfile.mkdtemp()
     fldr4.save(newdir)
     fldr5=SF.DataFolder(newdir)
     self.assertEqual(fldr4.shape,fldr5.shape,"Saved DataFolder and loaded DataFolder have different shapes")
def collate(grp, trail, **kargs):
    """Gather all the data up again."""
    grp.sort()
    final = Data()
    final.add_column(grp[0].column("Energy"), "Energy")
    for g in grp:
        final.add_column(g.column("Asym"), g.title)
    if "group_key" in kargs:
        final[kargs["group_key"]] = grp.key
    final["path"] = trail
    if "save" in kargs and kargs["save"]:
        final.save(kargs["filename"])
    return final
Beispiel #32
0
    def test_deltions(self):
        ch = ["{}-Data".format(chr(x)) for x in range(65, 91)]
        data = np.zeros((100, 26))
        metadata = OrderedDict([("Key 1", True), ("Key 2", 12),
                                ("Key 3", "Hellow world")])
        self.dd = Data(metadata)
        self.dd.data = data
        self.dd.column_headers = ch
        self.dd.setas = "3.x3.y3.z"
        self.repr_string = """===========================  ========  =======  ========  =======  ========  ========
TDI Format 1.5                 D-Data   ....      H-Data   ....      Y-Data    Z-Data
index                           3 (x)              7 (y)                 24        25
===========================  ========  =======  ========  =======  ========  ========
Key 1{Boolean}= True                0                  0                  0         0
Key 2{I32}= 12                      0  ...             0  ...             0         0
Key 3{String}= Hellow world         0  ...             0  ...             0         0
Stoner.class{String}= Data          0  ...             0  ...             0         0
...                                 0  ...             0  ...             0         0"""
        self.assertEqual("\n".join(repr(self.dd).split("\n")[:9]),
                         self.repr_string,
                         "Representation with interesting columns failed.")
        del self.dd["Key 1"]
        self.assertEqual(len(self.dd.metadata), 3,
                         "Deletion of metadata failed.")
        del self.dd[20:30]
        self.assertEqual(self.dd.shape, (90, 26),
                         "Deleting rows directly failed.")
        self.dd.del_column("Q")
        self.assertEqual(self.dd.shape, (90, 25),
                         "Deleting rows directly failed.")
        self.dd %= 3
        self.assertEqual(self.dd.shape, (90, 24),
                         "Deleting rows directly failed.")
        self.dd.setas = "x..y..z"
        self.dd.del_column(self.dd.setas.not_set)
        self.assertEqual(self.dd.shape, (90, 3),
                         "Deleting rows directly failed.")
        self.dd.mask[50, 1] = True
        self.dd.del_column()
        self.assertEqual(self.dd.shape, (90, 2),
                         "Deletion of masked rows failed.")
 def test_set_no_figs(self):
     self.assertTrue(Options.no_figs,
                     "Default setting for no_figs option is incorrect.")
     Options.no_figs = True
     e = self.d.clone
     ret = e.plot()
     self.assertTrue(
         ret is None,
         "Output of Data.plot() was not None when no_figs is True  and showfig is not set({})"
         .format(type(ret)))
     Options.no_figs = False
     e.showfig = False
     ret = e.plot()
     self.assertTrue(
         isinstance(ret, Data),
         "Return value of Data.plot() was not self when Data.showfig=False ({})"
         .format(type(ret)))
     e.showfig = True
     ret = e.plot()
     self.assertTrue(
         isinstance(ret, Figure),
         "Return value of Data.plot() was not Figure when Data.showfig=False({})"
         .format(type(ret)))
     e.showfig = None
     ret = e.plot()
     self.assertTrue(
         ret is None,
         "Output of Data.plot() was not None when Data.showfig is None ({})"
         .format(type(ret)))
     Options.no_figs = True
     self.assertTrue(Options.no_figs, "set_option no_figs failed.")
     self.d = Data(
         path.join(__home__, "..", "sample-data", "New-XRay-Data.dql"))
     self.d.showfig = False
     ret = self.d.plot()
     self.assertTrue(
         ret is None,
         "Output of Data.plot() was not None when no_figs is True ({})".
         format(type(ret)))
     Options.no_figs = True
     plt.close("all")
Beispiel #34
0
def profile_line(img, src=None, dst=None, linewidth=1, order=1, mode="constant", cval=0.0, constrain=True, **kargs):
    """Wrapper for sckit-image method of the same name to get a line_profile.

    Parameters:
        img(ImageArray): Image data to take line section of
        src, dst (2-tuple of int or float): start and end of line profile. If the co-ordinates
            are given as intergers then they are assumed to be pxiel co-ordinates, floats are
            assumed to be real-space co-ordinates using the embedded metadata.
        linewidth (int): the wideth of the profile to be taken.
        order (int 1-3): Order of interpolation used to find image data when not aligned to a point
        mode (str): How to handle data outside of the image.
        cval (float): The constant value to assume for data outside of the image is mode is "constant"
        constrain (bool): Ensure the src and dst are within the image (default True).

    Returns:
        A :py:class:`Stoner.Data` object containing the line profile data and the metadata from the image.
    """
    scale = img.get("MicronsPerPixel", 1.0)
    r, c = img.shape
    if src is None and dst is None:
        if "x" in kargs:
            src = (kargs["x"], 0)
            dst = (kargs["x"], r)
        if "y" in kargs:
            src = (0, kargs["y"])
            dst = (c, kargs["y"])
    if isinstance(src, float):
        src = (src, src)
    if isinstance(dst, float):
        dst = (dst, dst)
    dst = _scale(dst, scale)
    src = _scale(src, scale)
    if not istuple(src, int, int):
        raise ValueError("src co-ordinates are not a 2-tuple of ints.")
    if not istuple(dst, int, int):
        raise ValueError("dst co-ordinates are not a 2-tuple of ints.")

    if constrain:
        fix = lambda x, mx: int(round(sorted([0, x, mx])[1]))
        r, c = img.shape
        src = list(src)
        src = (fix(src[0], r), fix(src[1], c))
        dst = (fix(dst[0], r), fix(dst[1], c))

    result = measure.profile_line(img, src, dst, linewidth, order, mode, cval)
    points = measure.profile._line_profile_coordinates(src, dst, linewidth)[:, :, 0]
    ret = Data()
    ret.data = points.T
    ret.setas = "xy"
    ret &= np.sqrt(ret.x ** 2 + ret.y ** 2) * scale
    ret &= result
    ret.column_headers = ["X", "Y", "Distance", "Intensity"]
    ret.setas = "..xy"
    ret.metadata = img.metadata.copy()
    return ret
 def test_threshold(self):
     #set up some zigzag data
     #mins at 0,100,200,300,400, max at 50, 150, 250, 350 and zeros in between
     ar = np.zeros((400, 2))
     ar[:, 0] = np.arange(0, len(ar))
     for i in range(4):
         ar[i * 100:i * 100 + 50, 1] = np.linspace(-1, 1, 50)
     for i in range(4):
         ar[i * 100 + 50:i * 100 + 100, 1] = np.linspace(1, -1, 50)
     d = Data(ar, setas='xy')
     self.assertTrue(
         len(
             d.threshold(0, rising=True, falling=False, all_vals=True) ==
             4))
     self.assertTrue(
         len(
             d.threshold(0, rising=False, falling=True, all_vals=True) ==
             4))
     self.assertTrue(
         len(
             d.threshold(0,
                         interpolate=False,
                         rising=False,
                         falling=True,
                         all_vals=True) == 4))
     self.assertTrue(d.threshold(0, all_vals=True)[1] == 124.5)
 def test_functions(self):
     #Test section:
     self.s1 = self.d1.section(z=(12, 13))
     self.assertTrue(142.710 < self.d2.mean("Temp") < 142.711,
                     "Failed on the mean test.")
     self.assertTrue(
         round(self.d2.span("Temp")[0], 1) == 4.3
         and round(self.d2.span("Temp")[1], 1) == 291.6,
         "Span test failed.")
     f = self.d2.split(lambda r: r["Temp"] < 150)
     self.assertTrue(len(f[0]) == 838, "Split failed to work.")
     self.assertEqual(
         len(
             self.d3.threshold(2000,
                               rising=True,
                               falling=True,
                               all_vals=True)), 5, "Threshold failure.")
     self.d4.add(0, 1, "Add")
     self.d4.subtract(1, 0, header="Subtract")
     self.d4.multiply(0, 1, header="Multiply")
     self.d4.divide(0, 1, header="Divide")
     self.d4.diffsum(0, 1, header="Diffsum")
     self.assertTrue(
         np.all(self.d4[0] == np.array([-0.5, -1, -3, 3, -1, 2])),
         "Test column ops failed.")
     d = Data(np.zeros((100, 1)))
     d.add(0, 1.0)
     self.assertEqual(np.sum(d[:, 0]), 100., "Add with a flot didn't work")
     d.add(0, np.ones(100))
     self.assertEqual(np.sum(d[:, 0]), 200., "Add with an array failed.")
Beispiel #37
0
    def LoadData(self, data_item_number, filename):
        """LoadData(self, data_item_number, filename) --> none

        Loads the data from filename into the data_item_number.
        """
        try:
            datafile=Data(str(filename),debug=True) # does all the hard work here
        except Exception as e:
            ShowWarningDialog(self.parent, 'Could not load the file: ' +\
                    filename + ' \nPlease check the format.\n\n Stoner.Data'\
                    + ' gave the following error:\n'  +  str(e))
        else:
            # For the freak case of only one data point
            try:
                if datafile.setas.cols["axes"]==0:
                    self.x_col=datafile.find_col(self.x_col)
                    self.y_col=datafile.find_col(self.y_col)
                    self.e_col=datafile.find_col(self.e_col)
                    datafile.etsas(x=self.x_col,y=self.y_col,e=self.e_col)
                else:
                    self.x_col=datafile.setas.cols["xcol"]
                    self.y_col=datafile.setas.cols["ycol"][0]
                    if len(datafile.setas.cols["yerr"])>0:
                        self.e_col=datafile.setas.cols["yerr"][0]
                    else:
                        datafile.add_column(np.ones(len(datafile)))
                        datafile.setas[-1]="e"
            except Exception as e:
                ShowWarningDialog(self.parent, 'The data file does not contain'\
                        + 'all the columns specified in the opions\n'+e.message)
                # Okay now we have showed a dialog lets bail out ...
                return
            # The data is set by the default Template.__init__ function, neat hu
            # Know the loaded data goes into *_raw so that they are not
            # changed by the transforms
            datafile.y=np.where(datafile.y==0.0,1E-8,datafile.y)
            self.data[data_item_number].x_raw = datafile.x
            self.data[data_item_number].y_raw =  datafile.y
            self.data[data_item_number].error_raw =  datafile.e
            # Run the commands on the data - this also sets the x,y, error memebers
            # of that data item.
            self.data[data_item_number].run_command()

            # Send an update that new data has been loaded
            self.SendUpdateDataEvent()
def collate(grp,trail,**kargs):
    """Gather all the data up again."""
    grp.sort()
    final=Data()
    final.add_column(grp[0].column('Energy'),'Energy')
    for g in grp:
        final.add_column(g.column('Asym'),g.title)
    if "group_key" in kargs:
        final[kargs["group_key"]]=grp.key
    final["path"]=trail
    if "save" in kargs and kargs["save"]:
        final.save(kargs["filename"])
    return final
Beispiel #39
0
def profile_line(img, src=None, dst=None, linewidth=1, order=1, mode="constant", cval=0.0, constrain=True, **kargs):
    """Wrapper for sckit-image method of the same name to get a line_profile.

    Parameters:
        img(ImageArray): Image data to take line section of
        src, dst (2-tuple of int or float): start and end of line profile. If the co-ordinates
            are given as intergers then they are assumed to be pxiel co-ordinates, floats are
            assumed to be real-space co-ordinates using the embedded metadata.
        linewidth (int): the wideth of the profile to be taken.
        order (int 1-3): Order of interpolation used to find image data when not aligned to a point
        mode (str): How to handle data outside of the image.
        cval (float): The constant value to assume for data outside of the image is mode is "constant"
        constrain (bool): Ensure the src and dst are within the image (default True).

    Returns:
        A :py:class:`Stoner.Data` object containing the line profile data and the metadata from the image.
    """
    scale = img.get("MicronsPerPixel", 1.0)
    r, c = img.shape
    if src is None and dst is None:
        if "x" in kargs:
            src = (kargs["x"], 0)
            dst = (kargs["x"], r)
        if "y" in kargs:
            src = (0, kargs["y"])
            dst = (c, kargs["y"])
    if isinstance(src, float):
        src = (src, src)
    if isinstance(dst, float):
        dst = (dst, dst)
    dst = _scale(dst, scale)
    src = _scale(src, scale)
    if not istuple(src, int, int):
        raise ValueError("src co-ordinates are not a 2-tuple of ints.")
    if not istuple(dst, int, int):
        raise ValueError("dst co-ordinates are not a 2-tuple of ints.")

    if constrain:
        fix = lambda x, mx: int(round(sorted([0, x, mx])[1]))
        r, c = img.shape
        src = list(src)
        src = (fix(src[0], r), fix(src[1], c))
        dst = (fix(dst[0], r), fix(dst[1], c))

    result = measure.profile_line(img, src, dst, linewidth, order, mode, cval)
    points = measure.profile._line_profile_coordinates(src, dst, linewidth)[:, :, 0]
    ret = Data()
    ret.data = points.T
    ret.setas = "xy"
    ret &= np.sqrt(ret.x ** 2 + ret.y ** 2) * scale
    ret &= result
    ret.column_headers = ["X", "Y", "Distance", "Intensity"]
    ret.setas = "..xy"
    ret.metadata = img.metadata.copy()
    return ret
Beispiel #40
0
class Plottest(unittest.TestCase):

    """Path to sample Data File"""
    datadir=path.join(pth,"sample-data")

    def setUp(self):
        self.d=Data(path.join(__home__,"..","sample-data","New-XRay-Data.dql"))

    def test_set_no_figs(self):
        self.assertTrue(Options.no_figs,"Default setting for no_figs option is incorrect.")
        Options.no_figs=True
        e=self.d.clone
        ret=e.plot()
        self.assertTrue(ret is None,"Output of Data.plot() was not None when no_figs is True  and showfig is not set({})".format(type(ret)))
        Options.no_figs=False
        e.showfig=False
        ret=e.plot()
        self.assertTrue(isinstance(ret,Data),"Return value of Data.plot() was not self when Data.showfig=False ({})".format(type(ret)))
        e.showfig=True
        ret=e.plot()
        self.assertTrue(isinstance(ret,Figure),"Return value of Data.plot() was not Figure when Data.showfig=False({})".format(type(ret)))
        e.showfig=None
        ret=e.plot()
        self.assertTrue(ret is None,"Output of Data.plot() was not None when Data.showfig is None ({})".format(type(ret)))
        Options.no_figs=True
        self.assertTrue(Options.no_figs,"set_option no_figs failed.")
        self.d=Data(path.join(__home__,"..","sample-data","New-XRay-Data.dql"))
        self.d.showfig=False
        ret=self.d.plot()
        self.assertTrue(ret is None,"Output of Data.plot() was not None when no_figs is True ({})".format(type(ret)))
        Options.no_figs=True
        plt.close("all")

    def test_template_settings(self):
        template=DefaultPlotStyle(font__weight="bold")
        self.assertEqual(template["font.weight"],"bold","Setting ytemplate parameter in init failed.")
        template(font__weight="normal")
        self.assertEqual(template["font.weight"],"normal","Setting ytemplate parameter in call failed.")
        template["font.weight"]="bold"
        self.assertEqual(template["font.weight"],"bold","Setting ytemplate parameter in setitem failed.")
        del template["font.weight"]
        self.assertEqual(template["font.weight"],"normal","Resettting template parameter failed.")
        keys=sorted([x for x in template])
        self.assertEqual(sorted(template.keys()),keys,"template.keys() and template.iter() disagree.")
        attrs=[x for x in dir(template) if template._allowed_attr(x)]
        length=len(dict(plt.rcParams))+len(attrs)
        self.assertEqual(len(template),length,"templa length wrong.")
 def test_threshold(self):
     #set up some zigzag data
     #mins at 0,100,200,300,400, max at 50, 150, 250, 350 and zeros in between
     ar = np.zeros((400,2))
     ar[:,0]=np.arange(0,len(ar))
     for i in range(4):
         ar[i*100:i*100+50,1] = np.linspace(-1,1,50)
     for i in range(4):
         ar[i*100+50:i*100+100,1] = np.linspace(1,-1,50)
     d = Data(ar, setas='xy')
     self.assertTrue(len(d.threshold(0,rising=True,falling=False,all_vals=True)==4))
     self.assertTrue(len(d.threshold(0,rising=False,falling=True,all_vals=True)==4))
     self.assertTrue(len(d.threshold(0,interpolate=False,rising=False,falling=True,all_vals=True)==4))
     self.assertTrue(d.threshold(0,all_vals=True)[1]==124.5)
     self.thresh=d
     self.assertTrue(np.sum(d.threshold([0.0,0.5,1.0])-np.array([[24.5,36.74999999, 49.]]))<1E-6,"Multiple threshold failed.")
     self.assertAlmostEqual(d.threshold(0,interpolate=False,all_vals=True)[1],124.5,6,"Threshold without interpolation failed.")
     result=d.threshold(0,interpolate=False,all_vals=True,xcol=False)
     self.assertTrue(np.allclose(result,np.array([[ 24.5,   0. ],[124.5,   0. ],[224.5,   0. ],[324.5,   0. ]])),
                     "Failed threshold with False scol - result was {}".format(result))
Beispiel #42
0
 def test_set_no_figs(self):
     self.assertTrue(Options.no_figs,"Default setting for no_figs option is incorrect.")
     Options.no_figs=True
     e=self.d.clone
     ret=e.plot()
     self.assertTrue(ret is None,"Output of Data.plot() was not None when no_figs is True  and showfig is not set({})".format(type(ret)))
     Options.no_figs=False
     e.showfig=False
     ret=e.plot()
     self.assertTrue(isinstance(ret,Data),"Return value of Data.plot() was not self when Data.showfig=False ({})".format(type(ret)))
     e.showfig=True
     ret=e.plot()
     self.assertTrue(isinstance(ret,Figure),"Return value of Data.plot() was not Figure when Data.showfig=False({})".format(type(ret)))
     e.showfig=None
     ret=e.plot()
     self.assertTrue(ret is None,"Output of Data.plot() was not None when Data.showfig is None ({})".format(type(ret)))
     Options.no_figs=True
     self.assertTrue(Options.no_figs,"set_option no_figs failed.")
     self.d=Data(path.join(__home__,"..","sample-data","New-XRay-Data.dql"))
     self.d.showfig=False
     ret=self.d.plot()
     self.assertTrue(ret is None,"Output of Data.plot() was not None when no_figs is True ({})".format(type(ret)))
     Options.no_figs=True
     plt.close("all")
Beispiel #43
0
"""Example of Quadratic Fit."""
from Stoner import Data
import Stoner.Fit as SF
from numpy import linspace
from numpy.random import normal
import matplotlib.pyplot as plt

# Make some data
x = linspace(-10, 10, 101)
y = SF.quadratic(x + normal(size=len(x), scale=0.1), 4, -2, 11) * normal(
    size=len(x), scale=0.05, loc=1.0
)
s = y * 0.05
d = Data(x, y, s, setas="xye", column_headers=["X", "Y"])
d.plot(fmt="r.")

d.polyfit(result=True, header="Polyfit")
d.setas = "x..y"
d.plot(fmt="m-", label="Polyfit")
d.text(
    -9,
    450,
    "Polynominal co-efficients\n{}".format(d["2nd-order polyfit coefficients"]),
    fontdict={"size": "x-small", "color": "magenta"},
)

d.setas = "xy"
d.curve_fit(SF.quadratic, result=True, header="Curve-fit")
d.setas = "x...y"
d.plot(fmt="b-", label="curve-fit")
d.annotate_fit(
Beispiel #44
0
"""Create a 2D vector field plot."""
from Stoner import Data, __home__
from os import path

d = Data(path.join(__home__, "..", "sample-data", "OVF1.ovf"))
e = d.select(Z__between=(10, 11)).select(X__between=(10, 18)).select(Y__between=(5, 13))
e.figure(figsize=(8, 4))

# 2D vectors on a 2D Field
e.setas = "xy.uv."
e.subplot(121)
e.plot()
e.title = "3D Vector, 2D Field"

# 3D Vector on a 2D Field
e.subplot(122)
e.setas = "xy.uvw"
e.plot()
e.title = "3D Vector, 3D Field"

e.tight_layout()
Beispiel #45
0
"""Example plot using experimental GBStyle"""
from Stoner import Data, __home__
from Stoner.plot.formats import GBPlotStyle
import os.path as path

filename = path.realpath(path.join(__home__, "..", "doc", "samples", "sample.txt"))
d = Data(filename, setas="xy", template=GBPlotStyle)
d.y = d.y - (max(d.y) / 2)
d.plot()
"""Simple use of lmfit to fit data."""
from Stoner import Data
from numpy import linspace, exp, random

# Make some data
x = linspace(0, 10.0, 101)
y = 2 + 4 * exp(-x / 1.7) + random.normal(scale=0.2, size=101)

d = Data(x, y, column_headers=["Time", "Signal"], setas="xy")

d.plot(fmt="ro")  # plot our data

func = lambda x, A, B, C: A + B * exp(-x / C)


# Do the fitting and plot the result
fit = d.differential_evolution(
    func, result=True, header="Fit", A=1, B=1, C=1, prefix="Model", residuals=True
)

# Reset labels
d.labels = []

# Make nice two panel plot layout
ax = d.subplot2grid((3, 1), (2, 0))
d.setas = "x..y"
d.plot(fmt="g+")
d.title = ""

ax = d.subplot2grid((3, 1), (0, 0), rowspan=2)
d.setas = "xyy"
"""Example of nDimArrhenius Fit."""
from Stoner import Data
import Stoner.Fit as SF
from numpy import linspace
from numpy.random import normal

# Make some data
T = linspace(50, 500, 101)
R = SF.nDimArrhenius(T + normal(size=len(T), scale=5.0, loc=1.0), 1e6, 0.5, 2)
d = Data(T, R, setas="xy", column_headers=["T", "Rate"])

# Curve_fit on its own
d.curve_fit(SF.nDimArrhenius, p0=[1e6, 0.5, 2], result=True, header="curve_fit")
d.setas = "xyy"
d.plot(fmt=["r.", "b-"])
d.annotate_fit(SF.nDimArrhenius, x=0.25, y=0.3)

# lmfit using lmfit guesses
fit = SF.NDimArrhenius()
p0 = fit.guess(R, x=T)
d.lmfit(fit, p0=p0, result=True, header="lmfit")
d.setas = "x..y"
d.plot(fmt="g-")
d.annotate_fit(SF.NDimArrhenius, x=0.25, y=0.05, prefix="NDimArrhenius")

d.title = "n-D Arrhenius Test Fit"
d.ylabel = "Rate"
d.xlabel = "Temperature (K)"
"""Smoothing Data methods example."""
from Stoner import Data
import matplotlib.pyplot as plt

fig = plt.figure(figsize=(9, 6))

d = Data("Noisy_Data.txt", setas="xy")

d.fig = fig
d.plot(color="grey")
# Filter with Savitsky-Golay filter, linear over 7 ppoints
d.SG_Filter(result=True, points=11, header="S-G Filtered")
d.setas = "x.y"
d.plot(lw=2, label="SG Filter")
d.setas = "xy"
# Filter with cubic splines
d.spline(replace=2, order=3, smoothing=4, header="Spline")
d.setas = "x.y"
d.plot(lw=2, label="Spline")
d.setas = "xy"
# Rebin data
d.smooth("hamming", size=0.2, result=True, replace=False, header="Smoothed")
d.setas = "x...y"
d.plot(lw=2, label="Smoooth", color="green")
d.setas = "xy"
d2 = d.bin(bins=100, mode="lin")
d2.fig = d.fig
d2.plot(lw=2, label="Re-binned", color="blue")
d2.xlim(3.5, 6.5)
d2.ylim(-0.2, 0.4)
Beispiel #49
0
"""Example of nDimArrhenius Fit."""
from Stoner import Data
import Stoner.Fit as SF
from numpy import linspace, ones_like
from numpy.random import normal

# Make some data
V = linspace(-10, 10, 1000)
I = SF.bdr(V, 2.5, 3.2, 0.3, 15.0, 1.0) + normal(size=len(V), scale=1.0e-3)
dI = ones_like(V) * 1.0e-3

# Curve fit
d = Data(V, I, dI, setas="xye", column_headers=["Bias", "Current", "Noise"])

d.curve_fit(SF.bdr, p0=[2.5, 3.2, 0.3, 15.0, 1.0], result=True, header="curve_fit")
d.setas = "xyey"
d.plot(fmt=["r.", "b-"])
d.annotate_fit(
    SF.bdr, x=0.6, y=0.05, prefix="bdr", fontdict={"size": "x-small", "color": "blue"}
)

# lmfit
d.setas = "xy"
fit = SF.BDR(missing="drop")
p0 = fit.guess(I, x=V)
for p, v, mi, mx in zip(
    ["A", "phi", "dphi", "d", "mass"],
    [2.500, 3.2, 0.3, 15.0, 1.0],
    [0.100, 1.0, 0.05, 5.0, 0.5],
    [10, 10.0, 2.0, 30.0, 5.0],
):
Beispiel #50
0
"""
from Stoner import Data
from Stoner.Fit import Linear
from Stoner.Util import format_error
from lmfit.models import ExponentialModel
import numpy as np
import matplotlib.pyplot as pyplot
from copy import copy

filename=False
sensitivity=50

critical_edge=0.8
fringe_offset=1

d=Data(filename,setas="xy") #Load the low angle scan

#Now get the section of the data file that has the peak positions
# This is really doing the hard work
# We differentiate the data using a Savitsky-Golay filter with a 5 point window fitting quartics.
# This has proved most succesful for me looking at some MdV data.
# We then threshold for zero crossing of the derivative
# And check the second derivative to see whether we like the peak as signficant. This is the significance parameter
# and seems to be largely empirical
# Finally we interpolate back to the complete data set to make sure we get the angle as well as the counts.
d.lmfit(ExponentialModel,result=True,replace=False,header="Envelope")
d.subtract("Counts","Envelope",replace=False,header="peaks")
d.setas="xy"
sys.exit()
t=Data(d.interpolate(d.peaks(significance=sensitivity,width=8,poly=4)))
 def setUp(self):
     self.d1=Data(path.join(self.datadir,"OVF1.ovf"))
     self.d2=Data(path.join(self.datadir,"TDI_Format_RT.txt"))
     self.d3=Data(path.join(self.datadir,"New-XRay-Data.dql"))
     self.d4=Data(np.column_stack([np.ones(100),np.ones(100)*2]),setas="xy")
class Analysis_test(unittest.TestCase):

    """Path to sample Data File"""
    datadir=path.join(pth,"sample-data")

    def setUp(self):
        self.d1=Data(path.join(self.datadir,"OVF1.ovf"))
        self.d2=Data(path.join(self.datadir,"TDI_Format_RT.txt"))
        self.d3=Data(path.join(self.datadir,"New-XRay-Data.dql"))
        self.d4=Data(np.column_stack([np.ones(100),np.ones(100)*2]),setas="xy")

    def test_functions(self):
        #Test section:
        self.s1=self.d1.section(z=(12,13))
        self.assertTrue(142.710<self.d2.mean("Temp")<142.711,"Failed on the mean test.")
        self.assertTrue(round(self.d2.span("Temp")[0],1)==4.3 and round(self.d2.span("Temp")[1],1)==291.6,"Span test failed.")
        f=self.d2.split(lambda r:r["Temp"]<150)
        self.assertTrue(len(f[0])==838,"Split failed to work.")
        self.assertEqual(len(self.d3.threshold(2000,rising=True,falling=True,all_vals=True)),5,"Threshold failure.")
        self.d4.add(0,1,"Add")
        self.d4.subtract(1,0,header="Subtract")
        self.d4.multiply(0,1,header="Multiply")
        self.d4.divide(0,1,header="Divide")
        self.d4.diffsum(0,1,header="Diffsum")
        self.assertTrue(np.all(self.d4[0]==np.array([-0.5,-1,-3,3,-1,2])),"Test column ops failed.")
        d=Data(np.zeros((100,1)))
        d.add(0,1.0)
        self.assertEqual(np.sum(d[:,0]),100.,"Add with a flot didn't work")
        d.add(0,np.ones(100))
        self.assertEqual(np.sum(d[:,0]),200.,"Add with an array failed.")

    def test_peaks(self):
        d=self.d3.clone
        d.peaks(width=8,poly=4,significance=100,modify=True)
        self.assertEqual(len(d),11,"Failed on peaks test.")

    def test_threshold(self):
        #set up some zigzag data
        #mins at 0,100,200,300,400, max at 50, 150, 250, 350 and zeros in between
        ar = np.zeros((400,2))
        ar[:,0]=np.arange(0,len(ar))
        for i in range(4):
            ar[i*100:i*100+50,1] = np.linspace(-1,1,50)
        for i in range(4):
            ar[i*100+50:i*100+100,1] = np.linspace(1,-1,50)
        d = Data(ar, setas='xy')
        self.assertTrue(len(d.threshold(0,rising=True,falling=False,all_vals=True)==4))
        self.assertTrue(len(d.threshold(0,rising=False,falling=True,all_vals=True)==4))
        self.assertTrue(len(d.threshold(0,interpolate=False,rising=False,falling=True,all_vals=True)==4))
        self.assertTrue(d.threshold(0,all_vals=True)[1]==124.5)
        self.thresh=d
        self.assertTrue(np.sum(d.threshold([0.0,0.5,1.0])-np.array([[24.5,36.74999999, 49.]]))<1E-6,"Multiple threshold failed.")
        self.assertAlmostEqual(d.threshold(0,interpolate=False,all_vals=True)[1],124.5,6,"Threshold without interpolation failed.")
        result=d.threshold(0,interpolate=False,all_vals=True,xcol=False)
        self.assertTrue(np.allclose(result,np.array([[ 24.5,   0. ],[124.5,   0. ],[224.5,   0. ],[324.5,   0. ]])),
                        "Failed threshold with False scol - result was {}".format(result))

    def test_apply(self):
        self.app=Data(np.zeros((100,1)),setas="y")
        self.app.apply(lambda r:r.i[0],header="Counter")
        def calc(r,omega=1.0,k=1.0):
            return np.sin(r.y*omega)
        self.app.apply(calc,replace=False,header="Sin",_extra={"omega":0.1},k=1.0)
        self.app.apply(lambda r:r.__class__([r[1],r[0]]),replace=True,header=["Index","Sin"])
        self.app.setas="xy"
        self.assertAlmostEqual(self.app.integrate(),-64.1722191259037,msg="Integrate after aplies failed.")
"""Example of using lmfit to do a bounded fit."""
from Stoner import Data
from Stoner.Fit import StretchedExp

# Load dat and plot
d = Data("lmfit_data.txt", setas="xy")

# Do the fit
d.lmfit(StretchedExp, result=True, header="Fit", prefix="")
# plot
d.setas = "xyy"

d.plot(fmt=["+", "-"])
# Make apretty label using Stoner.Util methods
text = r"$y=A e^{-\left(\frac{x}{x_0}\right)^\beta}$" + "\n"
text += d.annotate_fit(StretchedExp, text_only=True, prefix="")
d.text(6, 4e4, text)
Beispiel #54
0
"""Example plot using Joe Batley's plot style."""
from Stoner import Data, __home__
from Stoner.plot.formats import JTBPlotStyle
import os.path as path

filename = path.realpath(path.join(__home__, "..", "doc", "samples", "sample.txt"))
d = Data(filename, setas="xy", template=JTBPlotStyle)
d.plot()
Beispiel #55
0
"""Re-binning data example."""
from Stoner import Data
from Stoner.plot.utils import errorfill

d = Data("Noisy_Data.txt", setas="xy")

d.template.fig_height = 6
d.template.fig_width = 8
d.figure(figsize=(6, 8))
d.subplot(411)

e = d.bin(bins=0.05, mode="lin")
f = d.bin(bins=0.25, mode="lin")
g = d.bin(bins=0.05, mode="log")
h = d.bin(bins=50, mode="log")

for i, (binned, label) in enumerate(
    zip([e, f, g, h], ["0.05 Linear", "0.25 Linear", "0.05 Log", "50 log"])
):
    binned.subplot(411 + i)
    d.plot()
    binned.fig = d.fig
    binned.plot(plotter=errorfill, label=label)

    d.xlim = (1, 6)
    d.ylim(-0.1, 0.4)
    d.title = "Bin demo" if i == 0 else ""
d.tight_layout()
"""Example using select method to pick out data."""
from Stoner import Data

d = Data("sample.txt", setas="xy")
d.plot(fmt="b-")
d.select(Temp__gt=75).select(Res__between=(5.3, 6.3)).plot(fmt="ro", label="portion 1")
d.select(Temp__lt=30).plot(fmt="g<", label="portion 2")
Beispiel #57
0
"""Add an inset to a plot."""
from Stoner import Data

p = Data("sample.txt", setas="xy")
p.plot()
p.inset(loc=1, width="50%", height="50%")
p.setas = "x.y"
p.plot()
p.title = ""  # Turn off the inset title
"""Double y axis plot."""
from Stoner import Data

p = Data("sample.txt", setas="xyy")
# Quick plot
p.plot(multiple="y2")
Beispiel #59
0
"""Example of nDimArrhenius Fit."""
from Stoner import Data
import Stoner.Fit as SF
from numpy import linspace
from numpy.random import normal

# Make some data
T = linspace(200, 350, 101)
R = SF.modArrhenius(T, 1e6, 0.5, 1.5) * normal(scale=0.00005, loc=1.0, size=len(T))
d = Data(T, R, setas="xy", column_headers=["T", "Rate"])

# Curve_fit on its own
d.curve_fit(SF.modArrhenius, p0=[1e6, 0.5, 1.5], result=True, header="curve_fit")
d.setas = "xyy"
d.plot(fmt=["r.", "b-"])
d.annotate_fit(SF.modArrhenius, x=0.2, y=0.5)

# lmfit using lmfit guesses
fit = SF.ModArrhenius()
p0 = [1e6, 0.5, 1.5]
d.lmfit(fit, p0=p0, result=True, header="lmfit")
d.setas = "x..y"
d.plot()
d.annotate_fit(SF.ModArrhenius, x=0.2, y=0.25, prefix="ModArrhenius")

d.title = "Modified Arrhenius Test Fit"
d.ylabel = "Rate"
d.xlabel = "Temperature (K)"
import matplotlib.cm as cmap
import matplotlib.pyplot as plt


def plane(coord, a, b, c):
    """Function to define a plane"""
    return c - (coord[0] * a + coord[1] * b)


coeefs = [1, -0.5, -1]
col = linspace(-10, 10, 6)
X, Y = meshgrid(col, col)
Z = plane((X, Y), *coeefs) + normal(size=X.shape, scale=7.0)
d = Data(
    column_stack((X.ravel(), Y.ravel(), Z.ravel())),
    filename="Fitting a Plane",
    setas="xyz",
)

d.column_headers = ["X", "Y", "Z"]
d.figure(projection="3d")
d.plot_xyz(plotter="scatter")

popt, pcov = d.curve_fit(plane, [0, 1], 2, result=True)
d.setas = "xy.z"

d.plot_xyz(linewidth=0, cmap=cmap.jet)

txt = "$z=c-ax+by$\n"
txt += "\n".join([d.format("plane:{}".format(k), latex=True) for k in ["a", "b", "c"]])