def findPeakPyVersion(dataSample):
    peakPoints = []
    for i in narange(1., dataSample.size()):
        if ((dataSample[0, int(i - 1)] < dataSample[0, int(i)])
                and (dataSample[0, int(i)] < dataSample[0, int(i + 1)])):
            peakPoints.append(dataSample[int(i)])
    return np.array([peakPoints])
Ejemplo n.º 2
0
def _convert_to_onehot(labels):
    # use the original numpy functions
    from numpy import zeros as nzeros
    from numpy import arange as narange
    # to one-hot
    new_labels = nzeros((labels.size, labels.max() + 1))
    new_labels[narange(labels.size), labels] = 1.
    return new_labels
def calculateTripleFrequency(remainder, strobe):
    a = np.array([1., 2., 3.])
    remainder1 = remainder.copy()
    remainder2 = remainder1.copy() - 0.6
    strobeLen = strobe.shape[1]
    aLen = a.shape[1]
    fre_1 = nzeros([strobeLen, aLen], dtype=float)
    for i in narange(1., (strobeLen) + 1):
        for j in narange(1., (aLen) + 1):
            fre_1[int(i) - 1, int(j) - 1] = ndot(strobe[int(i) - 1],
                                                 a[int(j) - 1])

    t = np.shape(fre_1)
    iter = 1.
    remainder2Len = remainder2.shape[1]
    freq_plus = nzeros([t[0, 0] * t[0, 1], remainder2Len])
    freq_minus = nzeros([t[0, 0] * t[0, 1], remainder2Len])
    freq_plus_shift_negative1 = nzeros([t[0, 0] * t[0, 1], remainder2Len])
    freq_plus_shift_negative2 = nzeros([t[0, 0] * t[0, 1], remainder2Len])

    for i in narange(1., (t[0, 0]) + 1):
        for j in narange(1., (t[0, 1]) + 1):
            fre_11 = fre_1[int(i) - 1, int(j) - 1].copy()
            remainder_1 = remainder2[int(i) - 1, :].copy()
            for k in narange(1., remainder2Len + 1):
                if remainder_1[0, int(k) - 1] > 0.:
                    freq_plus[int(iter) - 1, int(k) -
                              1] = fre_11 + remainder_1[0, int(k) - 1]
                    freq_minus[int(iter) - 1, int(k) -
                               1] = fre_11 - remainder_1[0, int(k) - 1]
                    freq_plus_shift_negative1[
                        int(iter) - 1, int(k) -
                        1] = fre_11 + 30. - remainder_1[0, int(k) - 1]
                    freq_plus_shift_negative2[
                        int(iter) - 1, int(k) -
                        1] = fre_11 - 30. - remainder_1[0, int(k) - 1]

            iter = iter + 1.

    est_fre = np.vstack((freq_plus, freq_minus, freq_plus_shift_negative1,
                         freq_plus_shift_negative2))
    frequency1 = nround(est_fre)
    frequency2 = nfloor(est_fre)
    return [frequency1, frequency2]
def checkValidity_MultiFrquency(X1, Y1, f1, NFFT1):
    d1_list = []
    d2_list = []
    g2_list = []

    peak1 = np.sort(findPeakPyVersion((2. * X1[0:int(NFFT1 / 2. + 1.)])),
                    'descend')
    peak1Len = peak1.shape[1]
    chk_val1 = nzeros([peak1Len, 1])
    index1 = nzeros([peak1Len, 1])
    for i in narange(1., (peak1Len + 1)):
        chk_val1[int(i) -
                 1, :] = peak1[int(i) - 1] / (nmean(peak1[int(i):peak1Len]))
        if chk_val1[int(i) - 1, :] > 2.:
            index1[int(i) - 1, :] = np.nonzero(
                X1[0:NFFT1 / 2. + 1.] == peak1[:, int(i) - 1] / 2.)
            d1_list.append(f1[index1[int(i) - 1, :]])

    mean1 = nmean(peak1[0:peak1Len])
    k = 1.
    peak2 = np.sort(findPeakPyVersion((2. * Y1[0:NFFT1 / 2. + 1.])), 'descend')
    peak2Len = peak2.shape[1]
    chk_val = nzeros([peak2Len, 1])
    index2 = nzeros([peak2Len, 1])
    for i in narange(1., peak2Len):
        chk_val[int(i) -
                1, :] = peak2[0, int(i) - 1] / nmean(peak2[int(i):peak2Len])
        if chk_val[int(i) - 1, :] > 3.:
            g2_list.append(chk_val[int(i) - 1, :])
            index2[int(i) - 1, :] = np.nonzero(
                (Y1[0:NFFT1 / 2. + 1.] == peak2[:, int(i) - 1] / 2.))
            d2_list.append(f1[index2[int(i) - 1, :]])

    d2 = np.array([d2_list])
    d1 = np.array([d1_list])
    g2 = np.array([g2_list])

    if g2 >= 0.:
        g1 = nmean(g2)
    else:
        g1 = nmean(g2)

    return [g1, d1, d2]
def velocityPendCenter(pend_centers):
    time = 1. / 30.
    [r, c] = np.shape(pend_centers)
    VelocityMarker = nzeros([r, c], dtype=float)
    for i in narange(2., (r) + 1):
        VelocityMarker[int(i) - 1, int((c - 1.)) - 1] = (
            pend_centers[int(i) - 1, int((c - 1.)) - 1] - pend_centers[int(
                (i - 1.)) - 1, int((c - 1.)) - 1]) / time
        VelocityMarker[int(i) - 1, int(c) -
                       1] = (pend_centers[int(i) - 1, int(c) - 1] -
                             pend_centers[int(
                                 (i - 1.)) - 1, int(c) - 1]) / time

    return VelocityMarker
def FFT_MultiFrequency_update(s1, s2):
    Fs = np.array([[31.]])
    #% Sampling frequency
    T = 1. / Fs
    #% Sample time
    L = 512.
    #% Length of signal
    t = ndot(narange(0., L), T)

    NFFT1 = float(pow(2, pyNextPow2(L)))
    #% Next power of 2 from length of y
    s1[0, :] = s1[0, :] - nmean(s1[0, :])
    s2[0, :] = s2[0, :] - nmean(s2[0, :])
    X1 = nabs(np.fft(s1, NFFT1) / L)
    Y1 = nabs(np.fft(s2, NFFT1) / L)
    f1 = ndot(Fs / 2., np.linspace(0., 1., (NFFT1 / 2. + 1.)))

    [g1, d1, d2] = checkValidity_MultiFrquency(X1, Y1, f1, NFFT1)
    return [d1, X1, Y1, f1, NFFT1, d2, g1]
def mfreq_simulate2(frequency):
    nfreq = 4.
    nsampl = 33.
    sampling_option = 0.
    cam_fps = 15.
    prime1 = np.array([
        61., 67., 71., 73., 79., 83., 89., 97., 101., 103., 107., 109., 113.,
        127., 131., 137., 139., 149., 151., 157., 163., 167., 173., 179., 181.,
        191., 193., 197., 199., 211., 223., 227., 229.
    ])
    sFile = open('strobe_file.txt', 'w+')
    freq = np.random.rand(1., nfreq)
    frequencies = np.array([70., 100., 170., 230.])
    if sampling_option == 1.:
        sampling = nzeros([nsampl])
        for i in narange(1., (nsampl) + 1):
            stri = "give" + str(i) + "th frequency of strobe"
            print stri
            sampling[int(i) - 1] = float(raw_input())

    else:
        sampling = prime1.copy()

    print sampling
    remainder = nzeros([int(nfreq), int(nsampl)])
    remainder2 = remainder.copy()
    for j in narange(1., (nsampl) + 1):
        for i in narange(1., (nfreq) + 1):
            if nmod(
                    nfloor((np.minimum(
                        nmod(frequencies[int(i) - 1], sampling[int(j) - 1]),
                        (sampling[int(j) - 1] - nmod(frequencies[int(i) - 1],
                                                     sampling[int(j) - 1]))) /
                            cam_fps)), 2.) == 0.:
                remainder[int(i) - 1, int(j) - 1] = nmod(
                    np.minimum(
                        nmod(frequencies[int(i) - 1], sampling[int(j) - 1]),
                        (sampling[int(j) - 1] - np.mod(frequencies[int(i) - 1],
                                                       sampling[int(j) - 1]))),
                    cam_fps)
            else:
                remainder[int(i) - 1, int(j) - 1] = 15. - nmod(
                    np.minimum(
                        nmod(frequencies[int(i) - 1], sampling[int(j) - 1]),
                        (sampling[int(j) - 1] - np.mod(frequencies[int(i) - 1],
                                                       sampling[int(j) - 1]))),
                    cam_fps)

        remainder2[:, int(j) - 1] = np.sort(remainder[:, int(j) - 1])

    sFile.write("%f\n" % nfreq)
    sFile.write("%f\n" % nsampl)

    for j in narange(1., (nsampl) + 1):
        sFile.write("%f " % sampling[int(j) - 1])

    sFile.write("\n")
    for i in narange(1., (nfreq) + 1):
        for j in np.arange(1., (nsampl) + 1):
            sFile.write('%8.2f ' % remainder2[int(i) - 1, int(j) - 1])
        sFile.write('\n')

    sFile.close()
    return [frequencies, sampling]
Ejemplo n.º 8
0
def do_plot_dendrogram(data,
                       nclass=None,
                       datalinkg=None,
                       indnames=None,
                       method='ward',
                       metric='euclidean',
                       truncate_mode=None,
                       title="dendrogram",
                       titlefnsize=14,
                       ytitle=0.98,
                       xlabel=None,
                       xlabelpad=10,
                       xlabelrotation=0,
                       ylabel=None,
                       ylabelpad=10,
                       ylabelrotation=90,
                       labelfnsize=10,
                       labelrotation=0,
                       labelsize=10,
                       labelha='center',
                       labelva='top',
                       dendro_linewidth=2,
                       tickpad=2,
                       axeshiftfactor=150,
                       figsize=(14, 6),
                       wspace=0.0,
                       hspace=0.2,
                       top=0.92,
                       bottom=0.12,
                       left=0.05,
                       right=0.99):
    """
    plot SOM dendrogram
    """

    if datalinkg is None:
        # Performs hierarchical/agglomerative clustering on the condensed distance matrix data
        datalinkg = linkage(data, method=method, metric=metric)
    #
    Ncell = data.shape[0]
    minref = np.min(data)
    maxref = np.max(data)
    #
    fig = plt.figure(figsize=figsize, facecolor='w')
    fignum = fig.number  # numero de figure en cours ...
    plt.subplots_adjust(wspace=wspace,
                        hspace=hspace,
                        top=top,
                        bottom=bottom,
                        left=left,
                        right=right)
    #
    if nclass is None:
        # dendrogramme sans controle de color_threshold (on laisse par defaut ...)
        R_ = dendrogram(datalinkg,
                        p=Ncell,
                        truncate_mode=truncate_mode,
                        orientation='top',
                        leaf_font_size=6,
                        labels=indnames,
                        leaf_rotation=labelrotation)
    else:
        # calcule la limite de decoupage selon le nombre de classes ou clusters
        max_d = np.sum(datalinkg[[-nclass + 1, -nclass], 2]) / 2
        color_threshold = max_d

        with plt.rc_context({'lines.linewidth': dendro_linewidth
                             }):  # Temporarily override the default line width
            R_ = dendrogram(datalinkg,
                            p=Ncell,
                            truncate_mode=truncate_mode,
                            color_threshold=color_threshold,
                            orientation='top',
                            leaf_font_size=6,
                            labels=indnames,
                            leaf_rotation=labelrotation)

        plt.axhline(y=max_d, c='k')

    plt.tick_params(axis='x', reset=True)
    plt.tick_params(
        axis='x',
        which='major',
        direction='inout',
        length=7,
        width=dendro_linewidth,
        pad=tickpad,
        top=False,
        bottom=True,  # rotation_mode='anchor',
        labelrotation=labelrotation,
        labelsize=labelsize)

    if indnames is None:
        L = np.narange(Ncell)
    else:
        L_ = np.array(indnames)
    plt.xticks((np.arange(Ncell) * 10) + 5,
               L_[R_['leaves']],
               horizontalalignment=labelha,
               verticalalignment=labelva)
    #
    plt.grid(axis='y')
    if xlabel is not None:
        plt.xlabel(xlabel,
                   labelpad=xlabelpad,
                   rotation=xlabelrotation,
                   fontsize=labelfnsize)
    if ylabel is not None:
        plt.ylabel(ylabel,
                   labelpad=ylabelpad,
                   rotation=ylabelrotation,
                   fontsize=labelfnsize)
    if axeshiftfactor is not None:
        lax = plt.axis()
        daxy = (lax[3] - lax[2]) / axeshiftfactor
        plt.axis([lax[0], lax[1], lax[2] - daxy, lax[3]])
    plt.title(title, fontsize=titlefnsize, y=ytitle)

    return R_