Example #1
0
def doFolder_dataRed(azavStorage,
                     funcForAveraging=np.nanmean,
                     outStorageFile='auto',
                     reference='min',
                     chi2_0_max='auto',
                     saveTxt=True,
                     first=None,
                     last=None,
                     idx=None,
                     split_angle=False):
    """ azavStorage if a DataStorage instance or the filename to read 
  """

    if isinstance(azavStorage, DataStorage):
        azav = azavStorage
        folder = azavStorage.folder
    elif os.path.isfile(azavStorage):
        folder = os.path.dirname(azavStorage)
        azav = DataStorage(azavStorage)
    else:
        # assume is just a folder name
        folder = azavStorage
        azavStorage = folder + "/pyfai_1d" + default_extension
        azav = DataStorage(azavStorage)

    if split_angle:
        angles = np.unique(azav.log.angle)
        diffs = []
        for angle in angles:
            idx = azav.log.angle == angle
            diffs.append(
                doFolder_dataRed(azav,
                                 funcForAveraging=funcForAveraging,
                                 outStorageFile=None,
                                 reference=reference,
                                 chi2_0_max=chi2_0_max,
                                 saveTxt=False,
                                 idx=idx,
                                 split_angle=False))
        ret = DataStorage(angles=angles, diffs=diffs)
        if outStorageFile == 'auto':
            if not os.path.isdir(folder): folder = "./"
            outStorageFile = folder + "/diffs" + default_extension
        if outStorageFile is not None:
            ret.save(outStorageFile)
        return ret

    azav = copy.deepcopy(azav)

    if last is not None or first is not None and idx is None:
        idx = slice(first, last)

    if idx is not None:
        azav.log.delay = azav.log.delay[idx]
        azav.data_norm = azav.data_norm[idx]
        azav.err_norm = azav.err_norm[idx]

    # laser off is saved as -10s, if using the automatic "min"
    # preventing from using the off images
    # use reference=-10 if this is what you want
    if reference == "min":
        reference = azav.log.delay[azav.log.delay != -10].min()

    # calculate differences
    tr = dataReduction.calcTimeResolvedSignal(
        azav.log.delay,
        azav.data_norm,
        err=azav.err_norm,
        q=azav.q,
        reference=reference,
        funcForAveraging=funcForAveraging,
        chi2_0_max=chi2_0_max)

    tr.folder = folder
    tr.twotheta_rad = azav.twotheta_rad
    tr.twotheta_deg = azav.twotheta_deg
    tr.info = azav.pyfai_info

    if outStorageFile == 'auto':
        if not os.path.isdir(folder): folder = "./"
        outStorageFile = folder + "/diffs" + default_extension
    tr.filename = outStorageFile

    # save txt and npz file
    if saveTxt: dataReduction.saveTxt(folder, tr, info=azav.pyfai_info)

    if outStorageFile is not None:
        tr.save(outStorageFile)

    return tr
Example #2
0
def doFolder(folder="./",
             files='*.edf*',
             nQ=1500,
             force=False,
             mask=None,
             dark=10,
             qlims=None,
             monitor='auto',
             save_pyfai=False,
             saveChi=True,
             poni='pyfai.poni',
             storageFile='auto',
             save=True,
             logDict=None,
             dezinger=None,
             skip_first=0,
             last=None,
             azimuth_range=None):
    """ calculate 1D curves from files in folder

      Parameters
      ----------
      folder : str
          folder to work on
      files : str
          regular expression to look for ccd images (use edf* for including
          gzipped giles)
      nQ : int
          number of Q-points (equispaced)
      monitor : array or (qmin,qmax) or None
          normalization array (or list for q range normalization)
      force : True|False
          if True, redo from beginning even if previous data are found
          if False, do only new files
      mask : can be a list of [filenames|array of booleans|mask string]
          pixels that are True are dis-regarded
      saveChi : True|False
          if False, chi files (text based for each image) are not saved
      dezinger : None or 0<float<100
          use pyfai function 'separate' to remove zingers. The value is the 
          percentile used to find the liquicd baseline, 50 (i.e. median value)
          if a good approximation. Dezinger takes ~200ms per 4M pixel image.
          Needs good center and mask
      logDict : None or dictionary(-like)
          each key is a field. if given it has to have 'file' key
      poni : informationation necessary to build an AzimuthalIntegrator:
          → an AzimuthalIntegrator instance
          → a filename that will be look for in
               1 'folder' first
               2 in ../folder
               3 in ../../folder
               ....
               n-1 in pwd
               n   in homefolder
          → a dictionary (use to bootstrap an AzimuthalIntegrator using 
              AzimuthalIntegrator(**poni)
      save_pyfai : True|False
          if True, it stores all pyfai's internal arrays (~110 MB)
      skip_first : int
          skip the first images (the first one is sometime not ideal)
      last : int
          skip evey image after 'last'
 """

    func = inspect.currentframe()
    args = inspect.getargvalues(func)
    files_reg = files
    # store argument for saving ..
    args = dict([(arg, args.locals[arg]) for arg in args.args])

    folder = folder.replace("//", "/").rstrip("/")

    # can't store aritrary objects
    if isinstance(args['poni'], pyFAI.azimuthalIntegrator.AzimuthalIntegrator):
        args['poni'] = ai_as_dict(args['poni'])

    if storageFile == 'auto':
        fname = "pyfai_1d" + g_default_extension
        if not os.path.isdir(folder):
            # do not overide folder, it might be useful
            storageFile = os.path.join(".", fname)
        else:
            storageFile = os.path.join(folder, fname)

    if os.path.isfile(storageFile) and not force:
        saved = DataStorage(storageFile)
        log.info("Found %d images in storage file" % saved.data.shape[0])
        ai = getAI(poni, folder)
        # consistency check (saved images done with same parameters ?)
        if ai is not None:
            # pyfai cannot be compared (except for its string representation)
            # because before first image some fields are None
            keys_to_compare = "nQ mask dark dezinger skip_first last"
            keys_to_compare = keys_to_compare.split()
            # recursively transform in plain dict and limit comparison to given keys
            saved_args = DataStorage(saved.args).toDict()
            now_args = DataStorage(args).toDict()
            saved_args = dict([(k, saved_args[k]) for k in keys_to_compare])
            now_args = dict([(k, now_args[k]) for k in keys_to_compare])

            if (not compare_pyfai(saved.pyfai,ai)) or  \
                np.any( saved.mask != interpretMasks(mask,saved.mask.shape))  or \
                not utils.is_same(saved_args,now_args) :
                log.warn(
                    "Found inconsistency between curves already saved and new ones"
                )
                log.warn("Redoing saved ones with new parameters")
                if (saved.pyfai_info != ai_as_str(ai)):
                    log.warn("pyfai parameters changed from:\n%s" %
                             saved.pyfai_info + "\nto:\n%s" % ai_as_str(ai))
                if np.any(
                        saved.mask != interpretMasks(mask, saved.mask.shape)):
                    log.warn("Mask changed from:\n%s" % saved.mask +
                             "\nto:\n%s" %
                             interpretMasks(mask, saved.mask.shape))
                if not utils.is_same(saved_args, now_args):
                    for k in set(now_args.keys()) - set(['mask']):
                        if not utils.is_same(saved_args[k], now_args[k]):
                            if isinstance(saved_args[k], dict):
                                for kk in saved_args[k].keys():
                                    if not utils.is_same(
                                            saved_args[k][kk],
                                            now_args[k][kk]):
                                        log.warn(
                                            "Parameter %s.%s" % (k, kk) +
                                            "IS DIFFERENT", saved_args[k][kk],
                                            now_args[k][kk])
                            else:
                                log_str = " %s to %s" % (saved_args[k],
                                                         now_args[k])
                                if len(log_str) > 20:
                                    log_str = ":\n%s\nto:\n%s" % (
                                        saved_args[k], now_args[k])
                                log.warn("Parameter '%s' changed from" % k +
                                         log_str)
                args['force'] = True
                saved = doFolder(**args)
    else:
        saved = None

    files = utils.getFiles(folder, files)
    if logDict is not None:
        files = [f for f in files if utils.getBasename(f) in logDict['file']]

        # sometime one deletes images but not corresponding lines in logfiles...
        if len(files) < len(logDict['file']):
            basenames = np.asarray([utils.getBasename(file) for file in files])
            idx_to_keep = np.asarray([f in basenames for f in logDict['file']])
            for key in logDict.keys():
                logDict[key] = logDict[key][idx_to_keep]
            log.warn(
                "More files in log than actual images, truncating loginfo")

    files = files[skip_first:last]

    if saved is not None:
        files = [
            f for f in files if utils.getBasename(f) not in saved["files"]
        ]
    log.info("Will do azimuthal integration for %d files" % (len(files)))

    files = np.asarray(files)
    basenames = np.asarray([utils.getBasename(file) for file in files])

    if len(files) > 0:
        # which poni file to use:
        ai = getAI(poni, folder)
        _msg = "could not interpret poni info or find poni file"
        if ai is None: raise ValueError(_msg)

        shape = read(files[0]).shape
        mask = interpretMasks(mask, shape)

        data = np.empty((len(files), nQ))
        err = np.empty((len(files), nQ))
        pbar = utils.progressBar(len(files))
        for ifname, fname in enumerate(files):
            img = read(fname)
            q, i, e = do1d(ai,
                           img,
                           mask=mask,
                           npt_radial=nQ,
                           dark=dark,
                           dezinger=dezinger,
                           azimuth_range=azimuth_range)
            data[ifname] = i
            err[ifname] = e
            if saveChi:
                chi_fname = utils.removeExt(fname) + ".chi"
                utils.saveTxt(chi_fname,
                              q,
                              np.vstack((i, e)),
                              info=ai_as_str(ai),
                              overwrite=True)
            pbar.update(ifname + 1)
        pbar.finish()
        if saved is not None:
            files = np.concatenate((saved.orig.files, basenames))
            data = np.concatenate((saved.orig.data, data))
            err = np.concatenate((saved.orig.err, err))
        else:
            files = basenames
        twotheta_rad = utils.qToTwoTheta(q, wavelength=ai.wavelength * 1e10)
        twotheta_deg = utils.qToTwoTheta(q,
                                         wavelength=ai.wavelength * 1e10,
                                         asDeg=True)
        orig = dict(data=data.copy(),
                    err=err.copy(),
                    q=q.copy(),
                    twotheta_deg=twotheta_deg,
                    twotheta_rad=twotheta_rad,
                    files=files)
        ret = dict(folder=folder,
                   files=files,
                   orig=orig,
                   pyfai=ai_as_dict(ai),
                   pyfai_info=ai_as_str(ai),
                   mask=mask,
                   args=args)
        if not save_pyfai:
            ret['pyfai']['chia'] = None
            ret['pyfai']['dssa'] = None
            ret['pyfai']['qa'] = None
            ret['pyfai']['ttha'] = None

        ret = DataStorage(ret)

    else:
        ret = saved

    if ret is None: return None

    if qlims is not None:
        idx = (ret.orig.q >= qlims[0]) & (ret.orig.q <= qlims[1])
    else:
        idx = np.ones_like(ret.orig.q, dtype=bool)

    ret.orig.twotheta_deg = utils.qToTwoTheta(ret.orig.q,
                                              wavelength=ai.wavelength * 1e10,
                                              asDeg=True)
    ret.orig.twotheta_rad = utils.qToTwoTheta(ret.orig.q,
                                              wavelength=ai.wavelength * 1e10)

    ret.data = ret.orig.data[:, idx]
    ret.err = ret.orig.err[:, idx]
    ret.q = ret.orig.q[idx]

    ret.twotheta_rad = ret.orig.twotheta_rad[idx]
    ret.twotheta_deg = ret.orig.twotheta_deg[idx]

    if isinstance(monitor, str):
        if monitor == 'auto':
            monitor = ret.data.mean(1)
        else:
            raise ValueError(
                "'monitor' must be ndarray, 2-D tuple/list, 'auto' or None.")
    elif isinstance(monitor, (tuple, list)):
        if len(monitor) == 2:
            idx_norm = (ret.q >= monitor[0]) & (ret.q <= monitor[1])
            monitor = ret.data[:, idx_norm].mean(1)
        else:
            raise ValueError(
                "'monitor' must be ndarray, 2-D tuple/list, 'auto' or None.")
    elif not isinstance(monitor, np.ndarray) and monitor is not None:
        raise ValueError(
            "'monitor' must be ndarray, 2-D tuple/list, 'auto' or None.")

    if monitor is not None:
        ret["data_norm"] = ret.data / monitor[:, np.newaxis]
        ret["err_norm"] = ret.err / monitor[:, np.newaxis]
        ret["monitor"] = monitor[:, np.newaxis]
    else:
        ret["data_norm"] = None
        ret["err_norm"] = None
        ret["monitor"] = None

    # add info from logDict if provided
    if logDict is not None: ret['log'] = logDict
    # sometime saving is not necessary (if one has to do it after subtracting background
    if storageFile is not None and save: ret.save(storageFile)

    return ret