Пример #1
0
def _merge_two_curves(curve1: Curve, curve2: Curve, qmin, qmax, qsep, use_additive_constant=False):
    """Merge two scattering curves

    :param curve1: the first curve (longer distance)
    :type curve1: sastool.classes.curve.GeneralCurve
    :param curve2: the second curve (shorter distance)
    :type curve2: sastool.classes.curve.GeneralCurve
    :param qmin: lower bound of the interval for determining the scaling factor
    :type qmin: float
    :param qmax: upper bound of the interval for determining the scaling factor
    :type qmax: float
    :param qsep: separating (tailoring) point for the merge
    :type qsep: float
    :return: merged_curve, factor, background, stat
    :rtype tuple of a sastool.classes2.curve.Curve and a float
    """
    curve1=curve1.sanitize()
    curve2=curve2.sanitize()
    if len(curve1.trim(qmin, qmax)) > len(curve2.trim(qmin, qmax)):
        curve2_interp = curve2.trim(qmin, qmax)
        curve1_interp = curve1.interpolate(curve2_interp.q)
    else:
        curve1_interp = curve1.trim(qmin, qmax)
        curve2_interp = curve2.interpolate(curve1_interp.q)
    if use_additive_constant:
        bg_init = 0
    else:
        bg_init = FixedParameter(0)
    factor, bg, stat = nonlinear_odr(curve2_interp.Intensity, curve1_interp.Intensity,
                                     curve2_interp.Error, curve1_interp.Error,
                                     lambda x, factor, bg: x * factor + bg, [1.0, bg_init])
    return Curve.merge(curve1 - bg, curve2 * factor, qsep), factor, bg, stat
Пример #2
0
def assess_gc_fit(reffile=None, gcname='Glassy_Carbon'):
    ip = get_ipython()
    if reffile is None:
        reffile = ip.user_ns['_loaders'][0].get_subpath('config/GC_data_nm.dat')
    refcurve = Curve.new_from_file(reffile)
    f = plt.figure()
    f.add_subplot(1, 1, 1)
    rads = {}
    for fsn in sorted([h.fsn for h in ip.user_ns['_headers']['processed'] if h.title == gcname]):
        try:
            ex = load_exposure(fsn, raw=False, processed=True)
        except:
            continue
        rads[ex.header.fsn] = ex.radial_average(refcurve.q)
        del ex
    qmin = max([r.sanitize().q.min() for r in rads.values()])
    qmax = min([r.sanitize().q.max() for r in rads.values()])
    refcurve.trim(qmin, qmax).loglog('o', mfc='none', ms=10)
    for r in sorted(rads):
        rads[r].loglog('.', label='#{:d}'.format(r))
    plt.axis('tight')
    plt.legend(loc='best', numpoints=1)
    plt.xlabel('q (nm$^{-1}$)')
    plt.ylabel('$d\Sigma/d\Omega$ (cm$^{-1}$ sr$^{-1}$)')
    plt.grid(True, which='both')
    plt.draw()
Пример #3
0
def summarize(reintegrate=True, dist_tolerance=3, qranges=None,
              samples=None, raw=False, late_radavg=True, graph_ncols=3,
              std_multiplier=3, graph_extension='png',
              graph_dpi=80, correlmatrix_colormap='coolwarm',
              image_colormap='viridis', correlmatrix_logarithmic=True, cormaptest=True):
    """Summarize scattering patterns and curves for all samples defined 
    by the global `allsamplenames`.
    
    Inputs:
        reintegrate (bool, default=True): if the curves are to be obained
            by reintegrating the patterns. Otherwise 1D curves are loaded.
        dist_tolerance (float, default=3): sample-to-detector distances
            nearer than this are considered the same
        qranges (dict): a dictionary mapping approximate sample-to-detector
            distances (within dist_tolerance) to one-dimensional np.ndarrays
            of the desired q-range of the reintegration.
        samples (list or None): the names of the samples to summarize. If
            None, all samples defined by ``allsamplenames`` are used.
        raw (bool, default=False): if raw images are to be treated instead
            the evaluated ones (default).
        late_radavg (bool, default=True): if the scattering curves are to
            be calculated from the summarized scattering pattern. If False,
            scattering curves are calculated from each pattern and will be
            averaged.
        graph_ncols: the number of columns in graphs (2D patterns, 
            correlation matrices)
        std_multiplier: if the absolute value of the relative discrepancy 
            is larger than this limit, the exposure is deemed an outlier.
        graph_extension: the extension of the produced hardcopy files.
        graph_dpi: resolution of the graphs
        correlmatrix_colormap: name of the colormap to be used for the
            correlation matrices (resolved by matplotlib.cm.get_cmap())
        image_colormap: name of the colormap to be used for the scattering
            patterns (resolved by matplotlib.cm.get_cmap())
        correlmatrix_logarithmic: if the correlation matrix has to be
            calculated from the logarithm of the intensity.
    """
    if qranges is None:
        qranges = {}
    ip = get_ipython()
    data2d = {}
    data1d = {}
    headers_tosave = {}
    rowavg = {}
    if raw:
        writemarkdown('# Summarizing RAW images.')
        headers = ip.user_ns['_headers']['raw']
        rawpart = '_raw'  # this will be added in the filenames saved
    else:
        writemarkdown('# Summarizing CORRECTED images.')
        headers = ip.user_ns['_headers']['processed']
        rawpart = ''  # nothing will be added in the filenames saved

    if samples is None:
        samples = sorted(ip.user_ns['allsamplenames'])
    for samplename in samples:
        writemarkdown('## ' + samplename)
        headers_sample = [h for h in headers if h.title == samplename]
        data2d[samplename] = {}
        rowavg[samplename] = {}
        data1d[samplename] = {}
        headers_tosave[samplename] = {}
        dists = get_different_distances([h for h in headers if h.title == samplename], dist_tolerance)
        if not dists:
            writemarkdown('No measurements from sample, skipping.')
            continue
        fig_2d = plt.figure()
        fig_curves = plt.figure()
        fig_correlmatrices = plt.figure()
        distaxes = {}
        correlmatrixaxes = {}
        ncols = min(len(dists), graph_ncols)
        nrows = int(np.ceil(len(dists) / ncols))
        onedimaxes = fig_curves.add_axes((0.1, 0.3, 0.8, 0.5))
        onedimstdaxes = fig_curves.add_axes((0.1, 0.1, 0.8, 0.2))
        for distidx, dist in enumerate(dists):
            writemarkdown("### Distance " + str(dist) + " mm")
            headers_narrowed = [h for h in headers_sample if abs(float(h.distance) - dist) < dist_tolerance]
            distaxes[dist] = fig_2d.add_subplot(
                nrows, ncols, distidx + 1)
            correlmatrixaxes[dist] = fig_correlmatrices.add_subplot(
                nrows, ncols, distidx + 1)
            # determine the q-range to be used from the qranges argument.
            try:
                distkey_min = min([np.abs(k - dist)
                                   for k in qranges if np.abs(k - dist) < dist_tolerance])
            except ValueError:
                # no matching key in qranges dict
                qrange = None  # request auto-determination of q-range
            else:
                distkey = [
                    k for k in qranges if np.abs(k - dist) == distkey_min][0]
                qrange = qranges[distkey]

            (data1d[samplename][dist], data2d[samplename][dist], headers_tosave[samplename][dist]) = \
                _collect_data_for_summarization(headers_narrowed, raw, reintegrate, qrange)

            badfsns, badfsns_datcmp, tab, rowavg[samplename][dist] = _stabilityassessment(
                headers_tosave[samplename][dist],
                data1d[samplename][dist], dist,
                fig_correlmatrices,
                correlmatrixaxes[dist], std_multiplier, correlmatrix_colormap,
                os.path.join(ip.user_ns['saveto_dir'], 'correlmatrix_%s_%s' % (
                    samplename,
                    ('%.2f' % dist).replace('.', '_')) +
                             rawpart + '.npz'),
                logarithmic_correlmatrix=correlmatrix_logarithmic,
                cormaptest=cormaptest)

            if 'badfsns' not in ip.user_ns:
                ip.user_ns['badfsns'] = {}
            elif 'badfsns_datcmp' not in ip.user_ns:
                ip.user_ns['badfsns_datcmp'] = {}
            ip.user_ns['badfsns'] = set(ip.user_ns['badfsns']).union(badfsns)
            ip.user_ns['badfsns_datcmp'] = set(ip.user_ns['badfsns_datcmp']).union(badfsns_datcmp)
            display(tab)

            # Plot the image
            try:
                data2d[samplename][dist].imshow(axes=distaxes[dist], show_crosshair=False,
                                                norm=matplotlib.colors.LogNorm(),
                                                cmap=matplotlib.cm.get_cmap(image_colormap))
            except ValueError:
                print('Error plotting 2D image for sample %s, distance %.2f' % (samplename, dist))
            distaxes[dist].set_xlabel('q (' + qunit() + ')')
            distaxes[dist].set_ylabel('q (' + qunit() + ')')
            distaxes[dist].set_title(
                '%.2f mm (%d curve%s)' % (dist, len(headers_tosave[samplename][dist]),
                                          ['', 's'][len(headers_tosave[samplename][dist]) > 1]))

            # Plot the curves
            Istd = np.stack([c.Intensity for c in data1d[samplename][dist]], axis=1)
            for c, h in zip(data1d[samplename][dist], headers_tosave[samplename][dist]):
                color = 'green'
                if h.fsn in badfsns_datcmp:
                    color = 'magenta'
                if h.fsn in badfsns:
                    color = 'red'
                c.loglog(axes=onedimaxes, color=color)
            if Istd.shape[1] > 1:
                onedimstdaxes.loglog(data1d[samplename][dist][0].q, Istd.std(axis=1) / Istd.mean(axis=1) * 100, 'b-')
            if not late_radavg:
                data1d[samplename][dist] = Curve.average(
                    *data1d[samplename][dist])
            else:
                data1d[samplename][dist] = (
                    data2d[samplename][dist].radial_average(
                        qrange,
                        errorpropagation=3,
                        abscissa_errorpropagation=3, raw_result=False))
            data1d[samplename][dist].loglog(
                label='Average', lw=2, color='k', axes=onedimaxes)

            ##Saving image, headers, mask and curve
            # data2d[samplename][dist].write(
            #    os.path.join(ip.user_ns['saveto_dir'],
            #                 samplename + '_'+(
            #                     '%.2f' % dist).replace('.', '_') +
            #                 rawpart + '.npz'), plugin='CREDO Reduced')
            # data2d[samplename][dist].header.write(
            #    os.path.join(ip.user_ns['saveto_dir'],
            ###                 samplename + '_'+(
            #                     '%.2f' % dist).replace('.', '_') +
            #                 rawpart +'.log'), plugin='CREDO Reduced')
            # data2d[samplename][dist].mask.write_to_mat(
            #    os.path.join(ip.user_ns['saveto_dir'],
            #                 data2d[samplename][dist].mask.maskid+'.mat'))
            data1d[samplename][dist].save(os.path.join(ip.user_ns['saveto_dir'],
                                                       samplename + '_' + ('%.2f' % dist).replace('.',
                                                                                                  '_') + rawpart + '.txt'))

            # Report on qrange and flux
            q_ = data1d[samplename][dist].q
            qmin = q_[q_ > 0].min()
            writemarkdown('#### Q-range & flux')
            writemarkdown(
                '- $q_{min}$: ' + print_abscissavalue(qmin, headers_tosave[samplename][dist][0].wavelength, dist))
            writemarkdown('- $q_{max}$: ' + print_abscissavalue(data1d[samplename][dist].q.max(),
                                                                headers_tosave[samplename][dist][0].wavelength, dist))
            writemarkdown('- Number of $q$ points: ' + str(len(data1d[samplename][dist])))
            meastime = sum([h.exposuretime for h in headers_tosave[samplename][dist]])
            writemarkdown("- from %d exposures, total exposure time %.0f sec <=> %.2f hr" % (
                len(headers_tosave[samplename][dist]),
                meastime, meastime / 3600.))
            try:
                flux = [h.flux for h in headers_tosave[samplename][dist]]
                flux = ErrorValue(np.mean(flux), np.std(flux))
                writemarkdown("- beam flux (photon/sec): %s" % flux)
            except KeyError:
                writemarkdown("- *No information on beam flux: dealing with raw data.*")
        onedimaxes.set_xlabel('')
        onedimaxes.set_ylabel('$d\\Sigma/d\\Omega$ (cm$^{-1}$ sr$^{-1}$)')
        # plt.legend(loc='best')
        onedimaxes.grid(True, which='both')
        onedimaxes.axis('tight')
        onedimaxes.set_title(samplename)
        onedimstdaxes.set_xlabel('q (' + qunit() + ')')
        onedimstdaxes.set_ylabel('Rel.std.dev. of intensity (%)')
        onedimstdaxes.grid(True, which='both')
        onedimstdaxes.set_xlim(*onedimaxes.get_xlim())
        onedimstdaxes.set_xscale(onedimaxes.get_xscale())
        putlogo(fig_curves)
        putlogo(fig_2d)
        fig_2d.tight_layout()
        fig_correlmatrices.suptitle(samplename)
        fig_correlmatrices.tight_layout()
        fig_2d.savefig(
            os.path.join(ip.user_ns['auximages_dir'],
                         'averaging2D_' +
                         samplename + rawpart + '.' + graph_extension),
            dpi=graph_dpi)
        fig_curves.savefig(
            os.path.join(ip.user_ns['auximages_dir'],
                         'averaging1D_' +
                         samplename + rawpart + '.' + graph_extension),
            dpi=graph_dpi)
        putlogo(fig_correlmatrices)
        fig_correlmatrices.savefig(
            os.path.join(ip.user_ns['auximages_dir'],
                         'correlation_' +
                         samplename + rawpart + '.' + graph_extension),
            dpi=graph_dpi)
        writemarkdown("### Collected images from all distances")
        plt.show()
    writemarkdown("Updated badfsns list:")
    writemarkdown('[' + ', '.join(str(f) for f in ip.user_ns['badfsns']) + ']')
    writemarkdown("Updated badfsns list using datcmp:")
    writemarkdown('[' + ', '.join(str(f) for f in ip.user_ns['badfsns_datcmp']) + ']')
    ip.user_ns['_data1d'] = data1d
    ip.user_ns['_data2d'] = data2d
    ip.user_ns['_headers_sample'] = headers_tosave
    ip.user_ns['_rowavg'] = rowavg