Ejemplo n.º 1
0
def median(data, var = None, interval):
# filter the data with a median filter, per (interval) in (var)
# if var == None, interval is the number of data point
    if var is None:
        return datasets.downsample(dataset, size, averaging=np.nanmedian , error_est=None)
    else:
        filtered_lists = {}
        for key in data:
            filtered_lists[key] = []
        sorted = data.sort(var)
        i1 = 0
        while i1 < sorted.length:
            i0 = i1
            v0 = sorted[var][i0]
            v1 = v0
            while (i1 < soorted.length) and (v1 - v0 < interval):
                i1 += 1
                v1 = sorted[var][i1]
            for key in sorted:
                med = np.nanmedian(sorted[key][v0:v1])
                filtered_listes[key].append(nan)
Ejemplo n.º 2
0
def median(data, var=None, interval):
    # filter the data with a median filter, per (interval) in (var)
    # if var == None, interval is the number of data point
    if var is None:
        return datasets.downsample(dataset,
                                   size,
                                   averaging=np.nanmedian,
                                   error_est=None)
    else:
        filtered_lists = {}
        for key in data:
            filtered_lists[key] = []
        sorted = data.sort(var)
        i1 = 0
        while i1 < sorted.length:
            i0 = i1
            v0 = sorted[var][i0]
            v1 = v0
            while (i1 < soorted.length) and (v1 - v0 < interval):
                i1 += 1
                v1 = sorted[var][i1]
            for key in sorted:
                med = np.nanmedian(sorted[key][v0:v1])
                filtered_listes[key].append(nan)
Ejemplo n.º 3
0
                          column_mapping={
                              0: "T",
                              1: "X",
                              2: "Y"
                          },
                          has_header=False)

    # Rotate
    d["X2"] = -d["X"]
    d["Y2"] = -d["Y"]

    # Averaging methods take into account of correlation within 10 x time constant
    averager, err_est = signal.decorrelate_neighbour_averager(
        30, np.nanmean, estimate_error=True)

    d = datasets.downsample(d, 240, method=averager, error_est=err_est)

    fig, ax1 = plt.subplots()
    ax1.set_xlabel("$T$ / K")
    plot1 = ax1.errorbar(d["T"],
                         d["X2"] / 1.e-6,
                         2.0 * d.errors["X2"] / 1.e-6,
                         fmt='go',
                         label="Re($V$)")
    ax1.set_ylabel('Re($V$) $\mu$V', color='g')

    ax2 = ax1.twinx()
    plot2 = ax2.errorbar(d["T"],
                         d["Y2"] / 1.e-6,
                         2.0 * d.errors["Y2"] / 1.e-6,
                         fmt='bx',
Ejemplo n.º 4
0
def mean_filter(data):
    data = datasets.downsample(data, 9, method=np.nanmean)
    return data
Ejemplo n.º 5
0
def plot(fn, co, cos, ls, mk, label, subs):
    sub_a, sub_b, sub_c, sub_d = subs

    data = ppmsana.import_dc(fn)[ch]

    data = datasets.downsample(data,
                               3,
                               np.nanmedian,
                               error_est=errors.combined)
    corrected = tdrift.linear(data, "R", data["time"][0], data["R"][0],
                              data["time"][-1], data["R"][-1])

    down, up = transport.split_MR_down_up(corrected)

    upsym = transport.symmetrize_MR(up, down)
    downsym = transport.symmetrize_MR(down, up)

    d = datasets.merge(upsym, downsym)

    d = d.sort("H")
    d0 = d.mask(np.abs(d["H"]) < zero_H)
    n = d0["R"].shape[0]
    #print(n)
    R0 = np.mean(d0["R"])
    err2 = errors.combined(d0["R"], d0.errors["R"])

    d["mr"] = d["R"] / R0 - 1.0
    d.errors["mr"] = (d.errors["R"] + err2) / R0
    d1 = datasets.consolidate(d,
                              "H",
                              dH1,
                              np.nanmean,
                              error_est=errors.combined)

    sub_a.plot(d1["H"] / H_unit,
               d1["mr"] / mr_unit,
               marker=None,
               color=co,
               linestyle=ls,
               lw=lw,
               label=label)
    sub_c.plot(d1["H"] / H_unit * cos,
               d1["mr"] / mr_unit,
               marker=None,
               color=co,
               linestyle=ls,
               lw=lw,
               label=label)

    d = d1.mask(np.abs(d1["H"]) < H2)
    sub_b.errorbar(d["H"] / H_unit,
                   d["mr"] / mr_unit,
                   yerr=d.errors["mr"] / mr_unit,
                   marker=mk,
                   markerfacecolor="none",
                   color=co,
                   linestyle="none",
                   lw=lw2,
                   mew=mew2,
                   label=label)

    d = d1.mask(np.abs(d1["H"] * cos) < H2)
    sub_d.errorbar(d["H"] / H_unit * cos,
                   d["mr"] / mr_unit,
                   yerr=d.errors["mr"] / mr_unit,
                   marker=mk,
                   markerfacecolor="none",
                   color=co,
                   linestyle="none",
                   lw=lw2,
                   mew=mew2,
                   label=label)
Ejemplo n.º 6
0
ch = 1

zero_H = 200.0

if __name__ == '__main__':
    
    plt.xlabel("$H$ / Oe")
    plt.ylabel("$\Delta R(H)  / R(0)$")
    
    
    # 0d
    fn = r"20130516_13_RvH_4K_0D_Ch1.temp_Ch2.11P.EuS11.Al2O3.xx_Ch3.11P.EuS11.Al2O3.xy.dat"

    data = ppmsana.import_dc(fn)[ch]
    
    data = datasets.downsample(data, 3, np.nanmedian, error_est=errors.combined)
    corrected = tdrift.linear(data, "R", data["time"][0], data["R"][0], data["time"][-1], data["R"][-1])
    
    
    down, up = transport.split_MR_down_up(corrected)
  
    upsym = transport.symmetrize_MR(up, down)
    downsym = transport.symmetrize_MR(down, up)
    
    d = datasets.merge(upsym, downsym)
    
    d = d.sort("H")
    d0 = d.mask(np.abs(d["H"]) < zero_H)
    n = d0["R"].shape[0]
    #print(n)
    R0 = np.mean(d0["R"])