Example #1
0
    def response_data(self, n_freqs=5000):
        """
        Calculate the frequency response of the filter functions defined and return them as 1D-Dataset,
        containing the frequency axis and a DataArray with complex filter amplitudes for each frequency component.
        The DataArrays will be written in the filter order and labeled `filter response omegaN` for a component N.

        :param n_freqs: Number of frequency steps to calculate for.
        :type n_freqs: int

        :return: The DataSet containing the frequency responses.
        :rtype: :class:`~snomtools.data.datasets.DataSet`
        """
        responses = []
        frequencies = None
        for b in self.butters:
            freqs, response = b.response(n_freqs)
            if frequencies is None:
                frequencies = freqs
            else:
                assert np.allclose(
                    freqs,
                    frequencies), "Butters giving inconsistent frequencies."
            responses.append(response)
        das = [
            ds.DataArray(responses[i],
                         label="filter response omega{0}".format(i))
            for i in range(len(self.butters))
        ]
        data = ds.DataSet("Frequency Filter Response Functions", das,
                          [ds.Axis(frequencies, label='frequency')])
        return data
Example #2
0
def hist_asc(source, T_start=None, T_bin=1, tif_probe=None):
    """
	Reads an DLD energy channel histogram, saved in a file with the extension ".hist.asc".

	:param str source: The path of the source file.

	:param int T_start: The start channel of the chosen time binning. By default, the first channel containing counts
		is taken.

	:param int T_bin: The binning of the chosen time binning.

	:param str tif_probe: A tif that was saved at the same time (or with the same settings) as the histogram to read,
		typically when executing "save all" in Terra. This deactivates *T_start* and *T_bin* and reads the binning from
		the	tags in the tiff file instead.

	:return: The imported data.
	:rtype: snomtools.data.datasets.DataSet
	"""
    filepath = os.path.abspath(source)
    filebase = os.path.basename(filepath)

    if tif_probe is not None:
        # Read tif probe file:
        infile = tiff.tifffile.TiffFile(tif_probe)

        # Read time binning metadata from tags:
        roi_and_bin_id = "41010"  # as defined by Christian Schneider #define TIFFTAG_ROI_AND_BIN 41010
        tag = tiff.search_tag(infile, roi_and_bin_id)
        # roi_and_bin_list = tag.value
        T_start, St, T_bin = int(tag.value[2]), int(tag.value[5]), int(
            tag.value[8])
        infile.close()

    # Read the "HistoXplusY" column from the .asc file to an array:
    count_data = numpy.loadtxt(filepath, dtype=int, skiprows=1, usecols=2)
    # Trim the trailing zeroes:
    count_data = numpy.trim_zeros(count_data, 'b')

    # If no start channel is given, guess it by taking the first non-zero entry, taking the binning into account.
    if not tif_probe and T_start is None:
        start_index = numpy.nonzero(count_data)[0][0]
        T_start = start_index * T_bin

    # Trim the leading zeroes:
    count_data = numpy.trim_zeros(count_data)

    # Initialize Channel axis and Count DataArray
    taxis = ds.Axis([T_start + i * T_bin for i in range(count_data.shape[0])],
                    label='channel',
                    plotlabel='Time Channel')
    dataarray = ds.DataArray(count_data,
                             unit='count',
                             label='counts',
                             plotlabel='Counts')

    # Return DataSet:
    return ds.DataSet(label=filebase, datafields=[dataarray], axes=[taxis])
Example #3
0
def read_jpeg(filepath):
    """
	Reads a generic jpeg file. Therefore, the 2D image dimensions are interpreted as x and y.
	Reads only greyscale, if a color (RGB or RGBA) image is given, it will be converted to greyscale.

	:param filepath: String: The (absolute or relative) path of input file.

	:return: The dataset instance generated from the image file.
	"""
    # Translate input path to absolute path:
    filepath = os.path.abspath(filepath)
    filebase = os.path.basename(filepath)

    # Read tif file to numpy array. Axes will be (x, y):
    indata = imageio.imread(filepath, as_gray=True)

    # Initialize data for dataset:
    dataarray = ds.DataArray(indata,
                             unit='dimensionless',
                             label='brightness',
                             plotlabel='Brightness')

    # Careful about orientation! This is like a matrix:
    # rows go first and are numbered in vertical direction -> Y
    # columns go last and are numbered in horizontal direction -> X
    yaxis = ds.Axis(np.arange(0, indata.shape[0]),
                    unit='pixel',
                    label='y',
                    plotlabel='y')
    xaxis = ds.Axis(np.arange(0, indata.shape[1]),
                    unit='pixel',
                    label='x',
                    plotlabel='x')

    # Return dataset:
    return ds.DataSet(label=filebase,
                      datafields=[dataarray],
                      axes=[yaxis, xaxis])
Example #4
0
	def bin_axis(self):
		"""
		Gives the new Axis with ticks via np.mean
		:return:
		"""
		newaxis = self.data.axes

		for ax in range(len(self.binAxisID)):
			oldaxis = self.data.get_axis(self.binAxisID[ax])
			ticks = np.zeros(np.int16(oldaxis.shape[0] / self.binFactor[ax]))
			newSubAxis = ds.Axis(data=ticks, unit=oldaxis.get_unit(),
								 label=oldaxis.get_label() + ' binned x' + str(self.binFactor[ax]),
								 plotlabel=oldaxis.get_plotlabel())  # Make more elegant
			for i in range(np.int16(oldaxis.shape[0] / self.binFactor[ax])):
				newSubAxis[i] = np.mean(oldaxis.get_data()[self.binFactor[ax] * i:self.binFactor[ax] * (i + 1)])
				newaxis[self.binAxisID[ax]] = newSubAxis
		return newaxis
Example #5
0
    def transformed_axis(self, unit=None, label=None, plotlabel=None):
        """
        Calculate the transformed axis, e.g. the frequency axis from the time axis.
        To calculate the frequency values, an FFT of a 1-dimensional probe slice is performed
        and the frequency ticks are calculated from the result.

        :param unit: Convert the calculated axis to this unit, deviating from `self.axis_freq_unit`.
        :type unit: None or str

        :param label: A label for the built axis. If none is given, the prefix `FFT_inverse_`
            is put before the label of the original axis.
        :type label: None or str

        :param plotlabel: An (optional) plotlabel for the generated axis.
        :type plotlabel: None or str

        :return: The frequency axis resulting from the FFT.
        :rtype: :class:`~snomtools.data.datasets.Axis`
        """
        # Calculate the frequency ticks:
        probe_slice = tuple([
            0 if i != self.axis_to_transform_id else np.s_[:]
            for i in range(self.indata.dimensions)
        ])
        ax_size = fftpack.fftshift(
            fftpack.fft(
                self.indata.get_datafield(0).data[probe_slice].magnitude)).size
        fticks = fftpack.fftshift(
            fftpack.fftfreq(ax_size, self.sampling_delta.magnitude))

        # Build the Axis object with the correct unit and return it:
        if label is None:
            label = "FFT-inverse_" + self.axis_to_transform.get_label()
        ax = ds.Axis(fticks,
                     1 / self.axis_to_transform.units,
                     label=label,
                     plotlabel=plotlabel)
        if unit is not None:
            ax.set_unit(unit)
        else:
            ax.set_unit(self.axis_freq_unit)
        return ax
Example #6
0
def timelog_folder(folderpath,
                   timeunit='s',
                   timeunitlabel=None,
                   timeformat=None,
                   prefix="",
                   postfix="",
                   h5target=True):
    """
	# TODO: UPDATE THIS FROM GENERIC COPIED DOCSTRING!
	:param folderpath: The (relative or absolute) path of the folders containing the powerlaw measurement series.

	:return: The dataset containing the images stacked along a time axis.
	"""
    if timeunitlabel is None:
        timeunitlabel = timeunit

    # Translate input path to absolute path:
    folderpath = os.path.abspath(folderpath)

    # Inspect the given folder for the image files:
    timefiles = {}
    for filename in filter(is_jpeg, os.listdir(folderpath)):
        # Strip extension, prefix, postfix:
        timestring = os.path.splitext(filename)[0]
        timestring = timestring.lstrip(prefix)
        timestring = timestring.rstrip(postfix)

        if timeformat:  # If format is given, parse accordingly:
            timestring = timestring.strip()
            imgtime = datetime.datetime.strptime(timestring, timeformat)
        else:  # Else try to parse as best as guessable:
            imgtime = dparser.parse(filename, fuzzy=True)
        timefiles[imgtime] = filename

    # Build time axis:
    axlist = []
    starttime = min(timefiles.keys())
    for imgtime in iter(sorted(timefiles.keys())):
        axlist.append((imgtime - starttime).total_seconds())
    times = u.to_ureg(axlist, 'second').to(timeunit)
    pl = 'Time / ' + timeunitlabel  # Plot label for power axis.
    timeaxis = ds.Axis(times, label='time', plotlabel=pl)

    # ----------------------Create dataset------------------------
    # Test data size:
    sample_data = read_jpeg(
        os.path.join(folderpath, timefiles[list(timefiles.keys())[0]]))
    axlist = [timeaxis] + sample_data.axes
    newshape = timeaxis.shape + sample_data.shape
    # Build the data-structure that the loaded data gets filled into
    if h5target:
        chunks = True
        compression = 'gzip'
        compression_opts = 4

        # Probe HDF5 initialization to optimize buffer size:
        if chunks is True:  # Default is auto chunk alignment, so we need to probe.
            chunk_size = probe_chunksize(shape=newshape,
                                         compression=compression,
                                         compression_opts=compression_opts)
        else:
            chunk_size = chunks
        use_cache_size = buffer_needed(newshape, (0, ),
                                       chunk_size,
                                       dtype=np.uint8)

        # Initialize full DataSet with zeroes:
        dataspace = ds.Data_Handler_H5(
            unit=sample_data.get_datafield(0).get_unit(),
            shape=newshape,
            chunks=chunks,
            compression=compression,
            compression_opts=compression_opts,
            chunk_cache_mem_size=use_cache_size,
            dtype=np.uint8)
        dataarray = ds.DataArray(
            dataspace,
            label=sample_data.get_datafield(0).get_label(),
            plotlabel=sample_data.get_datafield(0).get_plotlabel(),
            h5target=dataspace.h5target,
            chunks=chunks,
            compression=compression,
            compression_opts=compression_opts,
            chunk_cache_mem_size=use_cache_size)
        dataset = ds.DataSet("Powerlaw " + folderpath, [dataarray],
                             axlist,
                             h5target=h5target,
                             chunk_cache_mem_size=use_cache_size)
    else:
        # In-memory data processing without h5 files.
        dataspace = u.to_ureg(np.zeros(newshape, dtype=np.uint8),
                              sample_data.datafields[0].get_unit())
        dataarray = ds.DataArray(
            dataspace,
            label=sample_data.get_datafield(0).get_label(),
            plotlabel=sample_data.get_datafield(0).get_plotlabel(),
            h5target=None)
        dataset = ds.DataSet("Time Log " + folderpath, [dataarray],
                             axlist,
                             h5target=h5target)
    dataarray = dataset.get_datafield(0)

    # ----------------------Fill dataset------------------------
    # Fill in data from imported tiffs:
    slicebase = tuple([np.s_[:] for j in range(len(sample_data.shape))])

    if verbose:
        import time
        print("Reading Time Series Folder of shape: ", dataset.shape)
        if h5target:
            print("... generating chunks of shape: ",
                  dataset.get_datafield(0).data.ds_data.chunks)
            print("... using cache size {0:d} MB".format(use_cache_size //
                                                         1024**2))
        else:
            print("... in memory")
        start_time = time.time()
    for i, imgtime in zip(list(range(len(timefiles))),
                          iter(sorted(timefiles.keys()))):
        islice = (i, ) + slicebase
        # Import jpeg:
        idata = read_jpeg(os.path.join(folderpath, timefiles[imgtime]))

        # Check data consistency:
        assert idata.shape == sample_data.shape, "Trying to combine scan data with different shape."
        for ax1, ax2 in zip(idata.axes, sample_data.axes):
            assert ax1.units == ax2.units, "Trying to combine scan data with different axis dimensionality."
        assert idata.get_datafield(0).units == sample_data.get_datafield(0).units, \
         "Trying to combine scan data with different data dimensionality."

        # Write data:
        dataarray[islice] = idata.get_datafield(0).data
        if verbose:
            tpf = ((time.time() - start_time) / float(i + 1))
            etr = tpf * (dataset.shape[0] - i + 1)
            print(
                "image {0:d} / {1:d}, Time/File {3:.2f}s ETR: {2:.1f}s".format(
                    i, dataset.shape[0], etr, tpf))

    return dataset
Example #7
0
			newda = self.bin_data(h5target=None)

		newds = ds.DataSet(self.data.label + " binned", (newda,), newaxis,
						   self.data.plotconf, h5target=h5target)
		return newds


if __name__ == '__main__':  # Just for testing:
	print("Testing...")
	test_fakedata = True  # Create and test on a fake dataset that's easier to overview:
	if test_fakedata:
		print("Building fake data...")
		fakearray = np.stack([np.arange(50) for i in range(25)] + [np.arange(50) + 100 for i in range(25)])
		fakedata = ds.DataArray(fakearray, h5target=True, chunks=(5, 5))
		fakeds = ds.DataSet("test", [ds.DataArray(fakedata)],
							[ds.Axis(np.arange(50), label="y"), ds.Axis(np.arange(50), label="x")],
							h5target=True)
		fakeds.saveh5("binning_testdata.hdf5")
		print("Test binning on fake data...")
		b = Binning(fakeds, binAxisID=('y', 'x'), binFactor=(2, 8))
		binnedds = b.bin(h5target="binning_outdata.hdf5")
		binnedds.saveh5()

	test_realdata = False  # Testing real data from NFC Session on Ben's PC:
	if test_realdata:
		path = 'E:\\NFC15\\20171207 ZnO+aSiH\\01 DLD PSI -3 to 150 fs step size 400as\\Maximamap\\Driftcorrected\\summed_runs'
		data_dir = path + '\\projected.hdf5'
		# data_dir = path + '\\summed_data.hdf5'
		h5target = path + '\\binned_data.hdf5'
		data = ds.DataSet.from_h5file(data_dir, h5target=h5target)
Example #8
0
                guesslist.append(u.to_ureg(guesselement, guessunit).magnitude)
            guess = tuple(guesslist)
        return curve_fit(fermi_edge, energies.magnitude, intensities.magnitude,
                         guess)


if __name__ == "__main__":
    # Generate some test data:
    E_f, d_E, c, d = 30, 1, 100, 1
    f = FermiEdge.from_coeffs((E_f, d_E, c, d))
    energies = u.to_ureg(np.linspace(25, 35, 1000), 'eV')
    intensities = u.to_ureg(
        f.fermi_edge(energies).magnitude + np.random.randn(1000) * 5, 'count')
    testdata = ds.DataSet("testdata",
                          (ds.DataArray(intensities, label="counts"), ),
                          (ds.Axis(energies, label="E"), ))
    testroi = ds.ROI(testdata, {'E': [u.to_ureg(29.8, 'eV'), None]})

    # Test the single modules:
    guess = FermiEdge.guess_parameters(energies, intensities)
    result = FermiEdge.fit_fermi_edge(energies, intensities, guess)
    print("result: {0}".format(result[0]))
    f = FermiEdge.from_xy(energies, intensities, guess)

    # Test the full thing:
    f = FermiEdge(testroi)
    print("result: {0}".format([f.E_f, f.dE, f.c, f.d]))

    from matplotlib import pyplot as plt

    plt.plot(energies, intensities)