Esempio n. 1
0
                        names=["q", "SA", "sigSA"],
                        delim_whitespace=True,
                        skiprows=1)

### correct the average-off protein-only curve for packing effects
spf_corrected_int = avg_off.SA / spf.SA
spf_corrected_error = spf_corrected_int * np.sqrt(
    (avg_off.sigSA / avg_off.SA)**2 + (spf.sigSA / spf.SA)**
    2)  ### https://terpconnect.umd.edu/~toh/models/ErrorPropagation.pdf
avg_off_spf_corrected = Trace(avg_off.q, np.empty_like(avg_off.q),
                              np.empty_like(avg_off.q), spf_corrected_error,
                              spf_corrected_int, np.empty_like(avg_off.q))

### scale back up to proper intensities
avg_off_spf_corrected.scale(avg_off,
                            qmin=0.03,
                            qmax=0.1,
                            approach="projection")
avg_off_spf_corrected_scaled = Trace(avg_off.q, np.empty_like(avg_off.q),
                                     np.empty_like(avg_off.q),
                                     avg_off_spf_corrected.scaled_sigSA,
                                     avg_off_spf_corrected.scaled_SA,
                                     np.empty_like(avg_off.q))
avg_off_spf_corrected_scaled.write_dat(
    avg_off_protein_only_file.replace('.dat', 'spf-corrected.dat'))
# plt.plot(avg_off.q,avg_off.SA, label='avg_off')
# plt.plot(avg_off_spf_corrected.q,avg_off_spf_corrected.SA, label='avg_off_spf_corr')
# plt.plot(avg_off_spf_corrected_scaled.q,avg_off_spf_corrected_scaled.SA, label='avg_off_spf_corr_scaled')

### add corrected protein-only signal to each TR difference signal
for file in tr_diff_files:
    name = file.name
Esempio n. 2
0
def all_vectors(parent,
                samp,
                reps,
                on_off_map,
                option=None,
                multitemp=None,
                iterations=None,
                temp=None):
    """
    Collect all curves within a directory, selecting only on-off pairs,
    and sorting into order of time-resolved data collection.
    This is useful for SVD analysis.
    
    Parameters:
    parent (pathlib object): the directory where files are stored
    samp (str): the name of the sample, as recorded in the filename
    reps (list of strings): values denoting repetition number
    on_off_map (dictionary): guide to on-off pairs
    option (T/F): filename idiosynchroncy, "on" label included in filename for on-off pairs
    multitemp (T/F): data storage idiosynchroncy, multiple temperatures collected in one run
    iterations (T/F): data collection / storage idiosynchroncy
    temp (list of strings): values denoting collection temperature
    
    Returns:
    all_vectors (list of Trace objects)
    all_labels (list of strings)
    tr_vectors_labels (list of tuples containing (np.array, str))
    """
    all_vectors = []
    all_labels = []
    tr_vectors_labels = []

    if args.multitemp:
        for iteration in iterations:

            for n in reps:
                # for off in on_off_map.values():
                for on, off in on_off_map.items():
                    on_string = ("{0}/{1}_{2}_{3}_{4}_{5}.tpkl".format(
                        parent, samp, iteration, temp, n, on))
                    off_string = ("{0}/{1}_{2}_{3}_{4}_{5}.tpkl".format(
                        parent, samp, iteration, temp, n, off))
                    try:
                        on_data = parse.parse(on_string)
                        on_data = Trace(on_data.q, np.empty_like(on_data.q),
                                        np.empty_like(on_data.q),
                                        on_data.sigSA, on_data.SA, on_data.Nj)
                        on_data.scale(reference, qmin=QMIN, qmax=QMAX)

                        off_data = parse.parse(off_string)
                        off_data = Trace(off_data.q, np.empty_like(off_data.q),
                                         np.empty_like(off_data.q),
                                         off_data.sigSA, off_data.SA,
                                         off_data.Nj)
                        off_data.scale(reference, qmin=QMIN, qmax=QMAX)
                        # off_scaled = Trace(off_data.q, np.empty_like(off_data.q), np.empty_like(off_data.q), off_data.scaled_sigSA, off_data.scaled_SA, off_data.Nj)
                        # off_vectors.append(off_scaled)
                        if on_data:
                            if off_data:
                                all_vectors.append(
                                    off_data.SA[reference.q > 0.03])
                                all_labels.append(off)
                                all_vectors.append(
                                    on_data.SA[reference.q > 0.03])
                                all_labels.append(on)
                                # sub_scaled = subtract_scaled_traces(on_data,off_data)
                                sub_scaled = on_data.subtract(off_data,
                                                              scaled=True)
                                tr_vectors_labels.append(
                                    (sub_scaled.SA[reference.q > 0.03], on))

                    except:
                        print(off_string + "\tfailed")

    else:

        for n in reps:
            # for off in on_off_map.values():
            for on, off in on_off_map.items():
                on_string = ("{0}/{1}_{2}_{3}.tpkl".format(
                    parent, samp, n, on))
                off_string = ("{0}/{1}_{2}_{3}.tpkl".format(
                    parent, samp, n, off))
                try:
                    on_data = parse.parse(on_string)
                    on_data = Trace(on_data.q, np.empty_like(on_data.q),
                                    np.empty_like(on_data.q), on_data.sigSA,
                                    on_data.SA, on_data.Nj)
                    on_data.scale(reference, qmin=QMIN, qmax=QMAX)

                    off_data = parse.parse(off_string)
                    off_data = Trace(off_data.q, np.empty_like(off_data.q),
                                     np.empty_like(off_data.q), off_data.sigSA,
                                     off_data.SA, off_data.Nj)
                    off_data.scale(reference, qmin=QMIN, qmax=QMAX)
                    # off_scaled = Trace(off_data.q, np.empty_like(off_data.q), np.empty_like(off_data.q), off_data.scaled_sigSA, off_data.scaled_SA, off_data.Nj)
                    # off_vectors.append(off_scaled)
                    if on_data:
                        if off_data:
                            all_vectors.append(off_data.SA[reference.q > 0.03])
                            all_labels.append(off)
                            all_vectors.append(on_data.SA[reference.q > 0.03])
                            all_labels.append(on)
                            # sub_scaled = subtract_scaled_traces(on_data,off_data)
                            sub_scaled = on_data.subtract(off_data,
                                                          scaled=True)
                            tr_vectors_labels.append(
                                (sub_scaled.SA[reference.q > 0.03], on))

                except:
                    print(off_string + "\tfailed")

    return all_vectors, all_labels, tr_vectors_labels
                    delim_whitespace=True,
                    skiprows=1)

for file in static_files:
    name = file.name
    orig = parse(name)
    ### correct the curve for packing effects
    spf_corrected_int = orig.SA / spf.SA
    spf_corrected_error = spf_corrected_int * np.sqrt(
        (orig.sigSA / orig.SA)**2 + (spf.sigSA / spf.SA)**
        2)  ### https://terpconnect.umd.edu/~toh/models/ErrorPropagation.pdf
    static_spf_corrected = Trace(orig.q, np.empty_like(orig.q),
                                 np.empty_like(orig.q), spf_corrected_error,
                                 spf_corrected_int, np.empty_like(orig.q))
    # ### scale back up to proper intensities
    static_spf_corrected.scale(orig,
                               qmin=0.03,
                               qmax=0.1,
                               approach="projection")
    static_spf_corrected_scaled = Trace(orig.q, np.empty_like(orig.q),
                                        np.empty_like(orig.q),
                                        static_spf_corrected.scaled_sigSA,
                                        static_spf_corrected.scaled_SA,
                                        np.empty_like(orig.q))
    static_spf_corrected_scaled.write_dat(
        name.replace('.dat', 'spf-corrected.dat'))

# plt.xscale('log')
# plt.legend()
# plt.show()
Esempio n. 4
0
def all_vectors(parent,
                samp,
                reps,
                on_off_map,
                option=None,
                multitemp=None,
                iterations=None,
                temp=None):

    all_vectors = []
    all_labels = []
    tr_vectors_labels = []

    if args.multitemp:
        for iteration in iterations:

            for n in reps:
                # for off in on_off_map.values():
                for on, off in on_off_map.items():
                    on_string = ("{0}/{1}_{2}_{3}_{4}_{5}.tpkl".format(
                        parent, samp, iteration, temp, n, on))
                    off_string = ("{0}/{1}_{2}_{3}_{4}_{5}.tpkl".format(
                        parent, samp, iteration, temp, n, off))
                    try:
                        on_data = parse.parse(on_string)
                        on_data = Trace(on_data.q, np.empty_like(on_data.q),
                                        np.empty_like(on_data.q),
                                        on_data.sigSA, on_data.SA, on_data.Nj)
                        on_data.scale(reference, qmin=QMIN, qmax=QMAX)

                        off_data = parse.parse(off_string)
                        off_data = Trace(off_data.q, np.empty_like(off_data.q),
                                         np.empty_like(off_data.q),
                                         off_data.sigSA, off_data.SA,
                                         off_data.Nj)
                        off_data.scale(reference, qmin=QMIN, qmax=QMAX)
                        # off_scaled = Trace(off_data.q, np.empty_like(off_data.q), np.empty_like(off_data.q), off_data.scaled_sigSA, off_data.scaled_SA, off_data.Nj)
                        # off_vectors.append(off_scaled)
                        if on_data:
                            if off_data:
                                all_vectors.append(
                                    off_data.SA[reference.q > 0.03])
                                all_labels.append(off)
                                all_vectors.append(
                                    on_data.SA[reference.q > 0.03])
                                all_labels.append(on)
                                # sub_scaled = subtract_scaled_traces(on_data,off_data)
                                sub_scaled = on_data.subtract(off_data,
                                                              scaled=True)
                                tr_vectors_labels.append(
                                    (sub_scaled.SA[reference.q > 0.03], on))

                    except:
                        print(off_string + "\tfailed")

    else:

        for n in reps:
            # for off in on_off_map.values():
            for on, off in on_off_map.items():
                on_string = ("{0}/{1}_{2}_{3}.tpkl".format(
                    parent, samp, n, on))
                off_string = ("{0}/{1}_{2}_{3}.tpkl".format(
                    parent, samp, n, off))
                try:
                    on_data = parse.parse(on_string)
                    on_data = Trace(on_data.q, np.empty_like(on_data.q),
                                    np.empty_like(on_data.q), on_data.sigSA,
                                    on_data.SA, on_data.Nj)
                    on_data.scale(reference, qmin=QMIN, qmax=QMAX)

                    off_data = parse.parse(off_string)
                    off_data = Trace(off_data.q, np.empty_like(off_data.q),
                                     np.empty_like(off_data.q), off_data.sigSA,
                                     off_data.SA, off_data.Nj)
                    off_data.scale(reference, qmin=QMIN, qmax=QMAX)
                    # off_scaled = Trace(off_data.q, np.empty_like(off_data.q), np.empty_like(off_data.q), off_data.scaled_sigSA, off_data.scaled_SA, off_data.Nj)
                    # off_vectors.append(off_scaled)
                    if on_data:
                        if off_data:
                            all_vectors.append(off_data.SA[reference.q > 0.03])
                            all_labels.append(off)
                            all_vectors.append(on_data.SA[reference.q > 0.03])
                            all_labels.append(on)
                            # sub_scaled = subtract_scaled_traces(on_data,off_data)
                            sub_scaled = on_data.subtract(off_data,
                                                          scaled=True)
                            tr_vectors_labels.append(
                                (sub_scaled.SA[reference.q > 0.03], on))

                except:
                    print(off_string + "\tfailed")

    return all_vectors, all_labels, tr_vectors_labels