def load_data(self, files, retry_open=None, separate_analysis=False): """Load AFM data""" # expand directories data_files = [] for ff in files: data_files += afmformats.find_data(ff) if not data_files: ret = QtWidgets.QMessageBox.warning( self, "No AFM data found!", "No AFM data files could be found in the location specified.", QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Retry) if retry_open is not None and ret == QtWidgets.QMessageBox.Retry: retry_open() else: # Make sure there are no duplicate files (#12) data_files = sorted(set(data_files)) if separate_analysis: # open each file in one analysis usable = [[ss] for ss in data_files] else: usable = [data_files] for flist in usable: aclass = registry.fd.UiForceDistance self.add_subwindow(aclass, flist)
def load_data(path, callback=None, meta_override=None): """Load data and return list of :class:`afmformats.AFMForceDistance` This is essentially a wrapper around :func:`afmformats.formats.find_data` and :func:`afmformats.formats.load_data` that returns force-distance datasets. Parameters ---------- path: str or pathlib.Path or list of str or list of pathlib.Path path to data files or directory containing data files; if directories are given, they are searched recursively callback: callable function for progress tracking; must accept a float in [0, 1] as an argument. meta_override: dict if specified, contains key-value pairs of metadata that are used when loading the files (see :data:`afmformats.meta.META_FIELDS`) """ paths = afmformats.find_data(path, modality=DEFAULT_MODALITY) data = [] for ii, pp in enumerate(paths): measurements = afmformats.load_data( pp, # recurse callback function with None as default callback=lambda x: callback((ii + x) / len(paths)) if callback else None, meta_override=meta_override, **get_load_data_modality_kwargs()) data += measurements return data
def get_data_paths_enum(path, skip_errors=False): """Return a list with paths and their internal enumeration Parameters ---------- path: str or pathlib.Path or list of str or list of pathlib.Path path to data files or directory containing data files; if directories are given, they are searched recursively skip_errors: bool skip paths that raise errors Returns ------- path_enum: list of lists each entry in the list is a list of [pathlib.Path, int], enumerating all curves in each file """ paths = afmformats.find_data(path, modality=DEFAULT_MODALITY) enumpaths = [] for pp in paths: try: data = load_data(pp) except BaseException: if skip_errors: continue else: raise for dd in data: enumpaths.append([pp, dd.enum]) return enumpaths
def get_data_paths(path): """Return list of data paths with force-distance data DEPRECATED """ warnings.warn( "`get_data_paths` is deprecated! Please use " + "afmformats.find_data(path, modality='force-distance') " + "instead!", DeprecationWarning) return afmformats.find_data(path, modality=DEFAULT_MODALITY)
def fit_perform(path, path_results, profile_path=PROFILE_PATH): path_results = pathlib.Path(path_results) ptsv = path_results / "statistics.tsv" ptif = path_results / "plots.tif" # exported data columns pf = Profile(path=profile_path, create=False) dlist = [ ["path", lambda x: x.path], ["enum", lambda x: x.enum], ["E", lambda x: x.fit_properties["params_fitted"]["E"].value], [ "rating", lambda x: round(x.rate_quality( training_set=pf["rating training set"], regressor=pf["rating regressor"]), ndigits=1) ], ] ddict = dict(dlist) header = "\t".join([dd[0] for dd in dlist]) with ptsv.open(mode="w") as ts: ts.write(header + "\n") # get all files in path datapaths = afmformats.find_data(path, modality="force-distance") with tifffile.TiffWriter(fspath(ptif), imagej=True) as tf, \ ptsv.open(mode="a") as ts: for pp in datapaths: print("Processing: {}".format(pp)) grp = IndentationGroup(pp) for idnt in grp: fit_data(idnt, profile_path=profile_path) # save statistics stats = [str(dd[1](idnt)) for dd in dlist] ts.write("\t".join(stats) + "\n") # save plot imio = io.BytesIO() rating_text = "Rating parameters:\n" \ + "regressor: {}\n".format(pf["rating regressor"]) \ + "training set: {}\n".format(pf["rating training set"]) \ + "rating: {:.1f}\n".format(ddict["rating"](idnt)) plot_data(idnt, add_text=rating_text, path=imio) imio.seek(0) imdat = (mpimg.imread(imio) * 255).astype("uint8") tf.save(imdat, contiguous=True)
import pathlib import pytest import afmformats import afmformats.errors data_path = pathlib.Path(__file__).parent / "data" # pass all available files @pytest.mark.parametrize("path", afmformats.find_data(data_path)) def test_load_all_with_callback(path): """Make sure that the callback function is properly implemented""" calls = [] def callback(value): calls.append(value) try: afmformats.load_data(path=path, callback=callback) except afmformats.errors.MissingMetaDataError: afmformats.load_data(path=path, callback=callback, meta_override={ "spring constant": 20, "sensitivity": .01e-6 }) assert calls[-1] == 1