示例#1
0
def parallel_proc(data):
    # data = np.abs(data[abs(data - np.mean(data)) < 4 * np.std(data)])
    print('{} began work'.format(multiprocessing.current_process().name))
    data = np.abs(data)
    p = chi.fit(data)
    print('{} finished'.format(multiprocessing.current_process().name))
    return p
示例#2
0
def art_qi2(img, airmask, ncoils=12, erodemask=True):
    """
    Calculates **qi2**, the distance between the distribution
    of noise voxel (non-artifact background voxels) intensities, and a
    centered Chi distribution.

    :param numpy.ndarray img: input data
    :param numpy.ndarray airmask: input air mask without artifacts

    """
    from matplotlib import rc
    import seaborn as sn
    import matplotlib.pyplot as plt
    rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
    # rc('text', usetex=True)

    if erodemask:
        struc = nd.generate_binary_structure(3, 2)
        # Perform an opening operation on the background data.
        airmask = nd.binary_erosion(airmask, structure=struc).astype(np.uint8)

    # Artifact-free air region
    data = img[airmask > 0]
    data = data[data < np.percentile(data, 99.5)]
    maxvalue = int(data.max())
    nbins = maxvalue if maxvalue < 100 else 100

    # Estimate data pdf
    hist, bin_edges = np.histogram(data, density=True, bins=nbins)
    bin_centers = [np.mean(bin_edges[i:i+1]) for i in range(len(bin_edges)-1)]
    max_pos = np.argmax(hist)

    # Fit central chi distribution
    param = chi.fit(data, 2*ncoils, loc=bin_centers[max_pos])
    pdf_fitted = chi.pdf(bin_centers, *param[:-2], loc=param[-2], scale=param[-1])

    # Write out figure of the fitting
    out_file = op.abspath('background_fit.png')
    fig = plt.figure()
    ax1 = fig.add_subplot(111)
    sn.distplot(data, bins=nbins, norm_hist=True, kde=False, ax=ax1)
    #_, bins, _ = ax1.hist(data, nbins, normed=True, color='gray', linewidth=0)
    ax1.plot(bin_centers, pdf_fitted, 'k--', linewidth=1.2)
    fig.suptitle('Noise distribution on the air mask, and fitted chi distribution')
    ax1.set_xlabel('Intensity')
    ax1.set_ylabel('Frequency')
    fig.savefig(out_file, format='png', dpi=300)
    plt.close()

    # Find t2 (intensity at half width, right side)
    ihw = 0.5 * hist[max_pos]
    t2idx = 0
    for i in range(max_pos + 1, len(bin_centers)):
        if hist[i] < ihw:
            t2idx = i
            break

    # Compute goodness-of-fit (gof)
    return (float(np.abs(hist[t2idx:] - pdf_fitted[t2idx:]).sum() /
                  len(pdf_fitted[t2idx:])), out_file)
示例#3
0
def art_qi2(img, airmask, artmask, ncoils=1):
    """
    Calculates **qi2**, the distance between the distribution
    of noise voxel (non-artifact background voxels) intensities, and a
    centered Chi distribution.

    :param numpy.ndarray img: input data
    :param numpy.ndarray airmask: input air mask without artifacts

    """

    # Artifact-free air region
    data = img[airmask > 0]
    # Estimate data pdf
    hist, bin_edges = np.histogram(data, density=True, bins=128)
    bin_centers = [
        np.mean(bin_edges[i:i + 1]) for i in range(len(bin_edges) - 1)
    ]
    max_pos = np.argmax(hist)

    # Fit central chi distribution
    param = chi.fit(data, 2 * ncoils, loc=bin_centers[max_pos])
    pdf_fitted = chi.pdf(bin_centers,
                         *param[:-2],
                         loc=param[-2],
                         scale=param[-1])

    # Find t2 (intensity at half width, right side)
    ihw = 0.5 * hist[max_pos]
    t2idx = 0
    for i in range(max_pos + 1, len(bin_centers)):
        if hist[i] < ihw:
            t2idx = i
            break

    # Compute goodness-of-fit (gof)
    gof = np.abs(hist[t2idx:] - pdf_fitted[t2idx:]).sum() / airmask.sum()
    return float(art_qi1(airmask, artmask) + gof)
示例#4
0
def art_qi2(img,
            airmask,
            ncoils=12,
            erodemask=True,
            out_file='qi2_fitting.txt',
            min_voxels=1e3):
    r"""
    Calculates :math:`\text{QI}_2`, based on the goodness-of-fit of a centered
    :math:`\chi^2` distribution onto the intensity distribution of
    non-artifactual background (within the "hat" mask):


    .. math ::

        \chi^2_n = \frac{2}{(\sigma \sqrt{2})^{2n} \, (n - 1)!}x^{2n - 1}\, e^{-\frac{x}{2}}

    where :math:`n` is the number of coil elements.

    :param numpy.ndarray img: input data
    :param numpy.ndarray airmask: input air mask without artifacts

    """
    out_file = op.abspath(out_file)
    open(out_file, 'a').close()

    if erodemask:
        struc = nd.generate_binary_structure(3, 2)
        # Perform an opening operation on the background data.
        airmask = nd.binary_erosion(airmask, structure=struc).astype(np.uint8)

    # Artifact-free air region
    data = img[airmask > 0]

    # Background can only be fit if we have a min number of voxels
    if len(data[data > 0]) < min_voxels:
        return 0.0, out_file

    # Estimate data pdf
    dmax = np.percentile(data[data > 0], 99.9)
    hist, bin_edges = np.histogram(data[data > 0],
                                   density=True,
                                   range=(0.0, dmax),
                                   bins='doane')
    bin_centers = [
        float(np.mean(bin_edges[i:i + 1])) for i in range(len(bin_edges) - 1)
    ]
    max_pos = np.argmax(hist)
    json_out = {'x': bin_centers, 'y': [float(v) for v in hist]}

    # Fit central chi distribution
    param = chi.fit(data[data > 0], 2 * ncoils, loc=bin_centers[max_pos])
    pdf_fitted = chi.pdf(bin_centers,
                         *param[:-2],
                         loc=param[-2],
                         scale=param[-1])
    json_out['y_hat'] = [float(v) for v in pdf_fitted]

    # Find t2 (intensity at half width, right side)
    ihw = 0.5 * hist[max_pos]
    t2idx = 0
    for i in range(max_pos + 1, len(bin_centers)):
        if hist[i] < ihw:
            t2idx = i
            break

    json_out['x_cutoff'] = float(bin_centers[t2idx])

    # Compute goodness-of-fit (gof)
    gof = float(
        np.abs(hist[t2idx:] - pdf_fitted[t2idx:]).sum() /
        len(pdf_fitted[t2idx:]))

    # Clip values for sanity
    gof = 1.0 if gof > 1.0 else gof
    gof = 0.0 if gof < 0.0 else gof
    json_out['gof'] = gof

    with open(out_file, 'w' if PY3 else 'wb') as ofd:
        json.dump(json_out, ofd)

    return gof, out_file
示例#5
0
data_path = r'C:\Users\win10\Desktop\Projects\CYB\Experiment_Balint\CYB005\Data'
n_channels = 8
X = np.empty((n_channels, 0))
for i, file in enumerate(
        sorted([f for f in os.listdir(data_path) if f.endswith('.json')])):
    with open(data_path + '\\' + file) as json_file:
        dict_data = json.load(json_file)
        X = np.concatenate((X, dict_data["EMG"]), axis=1)
    if i >= 9:
        break

print("Loaded")
data = X[0, :]
data = (data - np.mean(data)) / np.std(data)
data = np.abs(data[abs(data - np.mean(data)) < 4 * np.std(data)])

params = chi.fit(data)

# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]

# Calculate fitted PDF and error with fit in distribution
transf = chi.cdf(data, loc=loc, scale=scale, *arg)
transf = norm.ppf(transf)
plt.figure()
plt.hist(transf, bins=50)

plt.show()
示例#6
0
def art_qi2(img, airmask, ncoils=12, erodemask=True, out_file='qi2_fitting.txt'):
    """
    Calculates **qi2**, the distance between the distribution
    of noise voxel (non-artifact background voxels) intensities, and a
    centered Chi distribution.

    :param numpy.ndarray img: input data
    :param numpy.ndarray airmask: input air mask without artifacts

    """
    out_file = op.abspath(out_file)
    open(out_file, 'a').close()

    if erodemask:
        struc = nd.generate_binary_structure(3, 2)
        # Perform an opening operation on the background data.
        airmask = nd.binary_erosion(airmask, structure=struc).astype(np.uint8)

    # Artifact-free air region
    data = img[airmask > 0]

    if np.all(data <= 0):
        return 0.0, out_file

    # Compute an upper bound threshold
    thresh = np.percentile(data[data > 0], 99.5)

    # If thresh is too low, for some reason there is no noise
    # in the background image (image was preprocessed, etc)
    if thresh < 1.0:
        return 0.0, out_file

    # Threshold image
    data = data[data < thresh]

    maxvalue = int(data.max())
    nbins = maxvalue if maxvalue < 100 else 100

    # Estimate data pdf
    hist, bin_edges = np.histogram(data, density=True, bins=nbins)
    bin_centers = [float(np.mean(bin_edges[i:i+1])) for i in range(len(bin_edges)-1)]
    max_pos = np.argmax(hist)
    json_out = {
        'x': bin_centers,
        'y': [float(v) for v in hist]
    }

    # Fit central chi distribution
    param = chi.fit(data, 2*ncoils, loc=bin_centers[max_pos])
    pdf_fitted = chi.pdf(bin_centers, *param[:-2], loc=param[-2], scale=param[-1])
    json_out['y_hat'] = [float(v) for v in pdf_fitted]

    # Find t2 (intensity at half width, right side)
    ihw = 0.5 * hist[max_pos]
    t2idx = 0
    for i in range(max_pos + 1, len(bin_centers)):
        if hist[i] < ihw:
            t2idx = i
            break

    json_out['x_cutoff'] = float(bin_centers[t2idx])

    # Compute goodness-of-fit (gof)
    gof = float(np.abs(hist[t2idx:] - pdf_fitted[t2idx:]).sum() / len(pdf_fitted[t2idx:]))

    # Clip values for sanity
    gof = 1.0 if gof > 1.0 else gof
    gof = 0.0 if gof < 0.0 else gof
    json_out['gof'] = gof

    with open(out_file, 'w' if PY3 else 'wb') as ofd:
        json.dump(json_out, ofd)

    return gof, out_file
示例#7
0
def art_qi2(img, airmask, ncoils=12, erodemask=True,
            out_file='qi2_fitting.txt', min_voxels=1e3):
    r"""
    Calculates :math:`\text{QI}_2`, based on the goodness-of-fit of a centered
    :math:`\chi^2` distribution onto the intensity distribution of
    non-artifactual background (within the "hat" mask):


    .. math ::

        \chi^2_n = \frac{2}{(\sigma \sqrt{2})^{2n} \, (n - 1)!}x^{2n - 1}\, e^{-\frac{x}{2}}

    where :math:`n` is the number of coil elements.

    :param numpy.ndarray img: input data
    :param numpy.ndarray airmask: input air mask without artifacts

    """
    out_file = op.abspath(out_file)
    open(out_file, 'a').close()

    if erodemask:
        struc = nd.generate_binary_structure(3, 2)
        # Perform an opening operation on the background data.
        airmask = nd.binary_erosion(airmask, structure=struc).astype(np.uint8)

    # Artifact-free air region
    data = img[airmask > 0]

    # Background can only be fit if we have a min number of voxels
    if len(data[data > 0]) < min_voxels:
        return 0.0, out_file

    # Estimate data pdf
    dmax = np.percentile(data[data > 0], 99.9)
    hist, bin_edges = np.histogram(data[data > 0], density=True,
                                   range=(0.0, dmax), bins='doane')
    bin_centers = [float(np.mean(bin_edges[i:i+1])) for i in range(len(bin_edges)-1)]
    max_pos = np.argmax(hist)
    json_out = {
        'x': bin_centers,
        'y': [float(v) for v in hist]
    }

    # Fit central chi distribution
    param = chi.fit(data[data > 0], 2*ncoils, loc=bin_centers[max_pos])
    pdf_fitted = chi.pdf(bin_centers, *param[:-2], loc=param[-2], scale=param[-1])
    json_out['y_hat'] = [float(v) for v in pdf_fitted]

    # Find t2 (intensity at half width, right side)
    ihw = 0.5 * hist[max_pos]
    t2idx = 0
    for i in range(max_pos + 1, len(bin_centers)):
        if hist[i] < ihw:
            t2idx = i
            break

    json_out['x_cutoff'] = float(bin_centers[t2idx])

    # Compute goodness-of-fit (gof)
    gof = float(np.abs(hist[t2idx:] - pdf_fitted[t2idx:]).sum() / len(pdf_fitted[t2idx:]))

    # Clip values for sanity
    gof = 1.0 if gof > 1.0 else gof
    gof = 0.0 if gof < 0.0 else gof
    json_out['gof'] = gof

    with open(out_file, 'w' if PY3 else 'wb') as ofd:
        json.dump(json_out, ofd)

    return gof, out_file
plt.plot(np.arange(len(losses_history)), losses_history)
avg = np.mean(losses_history)
plt.text(0.95,
         0.95,
         'Mean Losses = ' + str(avg),
         horizontalalignment='right',
         verticalalignment='top',
         transform=ax.transAxes)
std = pstdev(losses_history)
plt.text(0.95,
         0.85,
         'Stdev Losses = ' + str(std),
         horizontalalignment='right',
         verticalalignment='top',
         transform=ax.transAxes)
chi_r = chi.fit(losses_history)
avg_r = chi.mean(chi_r[0], loc=chi_r[1], scale=chi_r[2])
plt.text(0.95,
         0.75,
         'Overall Mean Losses (Chi-square) = ' + str(avg_r),
         horizontalalignment='right',
         verticalalignment='top',
         transform=ax.transAxes)
std_r = chi.std(chi_r[0], loc=chi_r[1], scale=chi_r[2])
plt.text(0.95,
         0.65,
         'Overall STDEV Losses (Chi-square) = ' + str(std_r),
         horizontalalignment='right',
         verticalalignment='top',
         transform=ax.transAxes)
int_r = chi.interval(0.95, chi_r[0], loc=chi_r[1], scale=chi_r[2])