Пример #1
0
def main():
    n_samples = 4096
    beta = [0, 1, 2]
    p = [0.52, 0.62, 0.72]
    slope = [-1.66, -0.45, -0.75]
    # O exercicio pede 3.85, mas 4 é caos puro, a figura fica perfeita
    rho = 3.85
    a0 = 0.001
    # Exercicio 1.1
    S1 = powernoise(beta[0], n_samples)
    S2 = powernoise(beta[1], n_samples)
    S3 = powernoise(beta[2], n_samples)
    # Exercicio 1.2
    S4 = logistic(rho, a0, n_samples)
    # Exercicio 1.3
    S5 = pre.standardize(S1 + S4)
    S6 = pre.standardize(S2 + S4)
    S7 = pre.standardize(S3 + S4)
    # Exercicio 1.4
    S8 = pmodel(noValues=n_samples, p=p[0], slope=slope[0])
    S9 = pmodel(noValues=n_samples, p=p[1], slope=slope[1])
    S10 = pmodel(noValues=n_samples, p=p[2], slope=slope[2])

    plt.figure(1)
    plt.plot(S4[0:n_samples - 1], S4[1:n_samples], 'b*')
    plt.xlabel('A[n]')
    plt.ylabel('A[n+1]')
    plt.title('Logistic Map')
    plt.grid(True)
    plt.show()
Пример #2
0
def main():
    n_samples = 4096
    beta = [0, 1, 2]
    p = [0.52, 0.62, 0.72]
    slope = [-1.66, -0.45, -0.75]
    rho = 4
    a0 = 0.001
    S1 = powernoise(beta[0], n_samples)
    S2 = powernoise(beta[1], n_samples)
    S3 = powernoise(beta[2], n_samples)
    S4 = logistic(rho, a0, n_samples)
    S5 = pre.standardize(S1, S4)
    S6 = pre.standardize(S2, S4)
    S7 = pre.standardize(S3, S4)
    S8 = pmodel(noValues=n_samples, p=p[0], slope=slope[0])
    S9 = pmodel(noValues=n_samples, p=p[1], slope=slope[1])
    S10 = pmodel(noValues=n_samples, p=p[2], slope=slope[2])

    plt.figure(1)
    plt.plot(S4[0:n_samples-1], S4[1:n_samples], 'b*')
    plt.xlabel('A[n]')
    plt.ylabel('A[n+1]')
    plt.title('Logistic Map')
    plt.grid(True)
    plt.show()
Пример #3
0
def compute_physics(tx, index_A, index_B, index_C, mean=[], std=[]):
    """add the features that have physics means,
    using the particle mass (index_A = 0) as the weight,
    and then standardize the features and return mean and std
    """
    tx_new = tx[:, index_A] * tx[:, index_B] / tx[:, index_C]
    return standardize(tx_new, mean, std)
Пример #4
0
def main():
    n_samples = 1024
    # Ruido vermelho
    S3 = powernoise(2, n_samples)

    # Caos, usado para gerar o sinal S7
    rho = 3.85
    a0 = 0.001
    S4 = logistic(rho, a0, n_samples)

    # Soma os sinais e normalizae modo que <A>=0 e std=1
    S7 = pre.standardize(S3 + S4)

    # Sinal gerado pelo pmodel
    S8 = pmodel(noValues=n_samples, p=0.52, slope=-1.66)

    data = S8

    z = np.linspace(0, 1024, 1024)

    data_norm = waipy.normalize(data)
    result = waipy.cwt(data_norm,
                       1,
                       1,
                       0.125,
                       2,
                       4 / 0.125,
                       0.72,
                       6,
                       mother='h',
                       name='S8')
    waipy.wavelet_plot('S8', z, data_norm, 0.03125, result)
Пример #5
0
def main():
    n_samples = 1024
    # Ruido vermelho
    S3 = powernoise(2, n_samples)

    # Caos, usado para gerar o sinal S7
    rho = 3.85
    a0 = 0.001
    S4 = logistic(rho, a0, n_samples)

    # Soma os sinais e normaliza de modo que <A>=0 e std=1
    S7 = pre.standardize(S3 + S4)

    # Sinal gerado pelo pmodel
    S8 = pmodel(noValues=n_samples, p=0.52, slope=-1.66)

    data = S3

    gammaS3, P_gammaS3, niS3 = SOC(data)
    plt.plot(np.log10(P_gammaS3),np.log10(niS3))
    plt.show()
    
    gammaS7, P_gammaS7, niS7 = SOC(data)
    plt.plot(np.log10(P_gammaS7),np.log10(niS7))
    plt.show()

    gammaS8, P_gammaS8, niS8 = SOC(data)
    plt.plot(np.log10(P_gammaS8),np.log10(niS8))
    plt.show()
Пример #6
0
def main():
    S1 = powernoise(0, 2048)
    S1 = pre.standardize(S1)
    fitted_pdf = np.zeros((9, 2048))
    distribution = [
        st.uniform, st.norm, st.beta, st.laplace, st.gamma, st.expon, st.chi2,
        st.cauchy, st.norm
    ]
    for n in range(0, 9, 1):
        fitted_pdf[n, :] = fit_distribution(S1, distribution[n])

    x = np.linspace(-5, 5, 2048)
    normal_pdf = st.norm.pdf(x)

    plt.plot(x,
             fitted_pdf[8, :],
             "red",
             label="Fitted normal dist",
             linestyle="--",
             linewidth=2)
    plt.plot(x,
             normal_pdf,
             "blue",
             label="Normal dist",
             linestyle=":",
             linewidth=2)
    plt.hist(S1, density=1, color="cyan", label="Data", alpha=.5)
    plt.title("Normal distribution fitting")
    plt.legend()
    plt.show()
Пример #7
0
def compute_theta(tx, index_theta, mean=[], std=[]):
    """compute the cosine value of the angle-like features,
    and then standardize the features and return mean and std
    """
    tx_new = np.cos(tx[:, index_theta])
    return standardize(tx_new, mean, std)
Пример #8
0
def compute_log(tx, index_log, mean=[], std=[]):
    """compute the log value of the given features,
    and then standardize the features and return mean and std
    """
    tx_new = np.log10(3 + abs(tx[:, index_log]))
    return standardize(tx_new, mean, std)
Пример #9
0
def main():
    """Funcao com o codigo principal do programa."""

    print("\nData Analysis for 3DBMO simulations...\n")

    # Desabilita as mensagens de erro do Numpy (warnings)
    old_settings = np.seterr(divide='ignore', invalid='ignore', over='ignore')

    # Carrega o arquivo de dados
    nomeArquivo = 'surftemp504.txt'
    data = np.genfromtxt(nomeArquivo, dtype='float32', filling_values=0)

    # Numero de amostras do sinal
    n_samples = 1024

    # Ruido vermelho
    S3 = powernoise(2, n_samples)

    # Caos, usado para gerar o sinal S7
    rho = 3.85
    a0 = 0.001
    S4 = logistic(rho, a0, n_samples)

    # Soma os sinais e normalizae modo que <A>=0 e std=1
    S7 = pre.standardize(S3 + S4)

    # Sinal gerado pelo pmodel
    S8 = pmodel(noValues=n_samples, p=0.52, slope=-1.66)

    # data = S8

    # Exibe os primeiro N valores do arquivo
    N = 10
    print("Original time series data (%d points): \n" % (len(data)))
    print("First %d points: %s\n" % (N, data[0:10]))
    print()

    #-----------------------------------------------------------------
    # Parametros gerais de plotagem
    #-----------------------------------------------------------------

    # Define os subplots
    fig = plt.figure()
    fig.subplots_adjust(hspace=.3, wspace=.2)

    # Tamanho das fontes
    tamanhoFonteEixoX = 16
    tamanhoFonteEixoY = 16
    tamanhoFonteTitulo = 16
    tamanhoFontePrincipal = 25

    # Titulo principal
    tituloPrincipal = '3DBMO Time Series Analysis'

    #-----------------------------------------------------------------
    # Plotagem da serie original
    #-----------------------------------------------------------------

    # Define as cores da plotagem
    corSerieOriginal = 'r'

    # Titulo dos eixos da serie original
    textoEixoX = 'Tempo'
    textoEixoY = 'Amplitude'
    textoTituloOriginal = 'Original Time Series Data'

    print("1. Plotting time series data...")

    # Plotagem da serie de dados
    #O = fig.add_subplot(1, 3, 1)
    O = fig.add_subplot(2, 1, 1)
    O.plot(data, '-', color=corSerieOriginal)
    O.set_title(textoTituloOriginal, fontsize=tamanhoFonteTitulo)
    O.set_xlabel(textoEixoX, fontsize=tamanhoFonteEixoX)
    O.set_ylabel(textoEixoY, fontsize=tamanhoFonteEixoY)
    O.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
    O.grid()

    #-----------------------------------------------------------------
    # Calculo e plotagem do PSD
    #-----------------------------------------------------------------

    # Calcula o PSD
    freqs, power, xdata, ydata, amp, index, powerlaw, INICIO, FIM = psd(data)

    # O valor do beta equivale ao index
    b = index

    # Define as cores da plotagem
    corPSD1 = 'k'
    corPSD2 = 'navy'

    # Titulo dos eixos do PSD
    textoPSDX = 'Frequencia (Hz)'
    textoPSDY = 'Potencia'
    textoTituloPSD = r'Power Spectrum Density $\beta$ = '

    print("2. Plotting Power Spectrum Density...")

    # Plotagem do PSD
    PSD = fig.add_subplot(2, 2, 3)
    PSD.plot(freqs, power, '-', color=corPSD1, alpha=0.7)
    PSD.plot(xdata, ydata, color=corPSD2, alpha=0.8)
    PSD.axvline(freqs[INICIO], color=corPSD2, linestyle='--')
    PSD.axvline(freqs[FIM], color=corPSD2, linestyle='--')
    PSD.plot(xdata,
             powerlaw(xdata, amp, index),
             'r-',
             linewidth=1.5,
             label='$%.4f$' % (b))
    PSD.set_xlabel(textoPSDX, fontsize=tamanhoFonteEixoX)
    PSD.set_ylabel(textoPSDY, fontsize=tamanhoFonteEixoY)
    PSD.set_title(textoTituloPSD + '%.4f' % (b),
                  loc='center',
                  fontsize=tamanhoFonteTitulo)
    PSD.set_yscale('log')
    PSD.set_xscale('log')
    PSD.grid()

    #-----------------------------------------------------------------
    # Calculo e plotagem do DFA
    #-----------------------------------------------------------------

    # Calcula o DFA 1D
    alfa, vetoutput, x, y, reta, erro = dfa1d(data, 1)

    # Verifica se o DFA possui um valor valido
    # Em caso afirmativo, faz a plotagem
    if not math.isnan(alfa):

        # Define as cores da plotagem
        corDFA = 'darkmagenta'

        # Titulo dos eixos do DFA
        textoDFAX = '$log_{10}$ (s)'
        textoDFAY = '$log_{10}$ F(s)'
        textoTituloDFA = r'Detrended Fluctuation Analysis $\alpha$ = '

        print("3. Plotting Detrended Fluctuation Analysis...")

        # Plotagem do DFA
        DFA = fig.add_subplot(2, 2, 4)
        DFA.plot(x,
                 y,
                 's',
                 color=corDFA,
                 markersize=4,
                 markeredgecolor='r',
                 markerfacecolor='None',
                 alpha=0.8)
        DFA.plot(x, reta, '-', color=corDFA, linewidth=1.5)
        DFA.set_title(textoTituloDFA + '%.4f' % (alfa),
                      loc='center',
                      fontsize=tamanhoFonteTitulo)
        DFA.set_xlabel(textoDFAX, fontsize=tamanhoFonteEixoX)
        DFA.set_ylabel(textoDFAY, fontsize=tamanhoFonteEixoY)
        DFA.grid()

    else:
        DFA = fig.add_subplot(2, 2, 4)
        DFA.set_title(textoTituloDFA + 'N.A.',
                      loc='center',
                      fontsize=tamanhoFonteTitulo)
        DFA.grid()

    #-----------------------------------------------------------------
    # Exibe e salva a figura
    #-----------------------------------------------------------------
    plt.suptitle(tituloPrincipal, fontsize=tamanhoFontePrincipal)
    nomeImagem = '3DBMO_PSD_DFA_2.png'
    fig.set_size_inches(15, 9)
    plt.savefig(nomeImagem, dpi=300, bbox_inches='tight', pad_inches=0.1)
    plt.show()
Пример #10
0
from pmodel import pmodel
import preprocess as pre
from powernoise import powernoise
import numpy as np

n_samples = 1024
# Ruido vermelho
S3 = powernoise(2, n_samples)

# Caos, usado para gerar o sinal S7
rho = 3.85
a0 = 0.001
S4 = logistic(rho, a0, n_samples)

# Soma os sinais e normalizae modo que <A>=0 e std=1
S7 = pre.standardize(S3 + S4)

# Sinal gerado pelo pmodel
S8 = pmodel(noValues=n_samples, p=0.52, slope=-1.66)

data = S8

n_samples = 1024

np.savetxt('S8.txt', data)
'''
ans = hurst(S3, skip_agg=True)
print(ans)

qorders = list(range(0, 50))
generalized_hurst_expornents = basic_dfa(S3, Q=qorders, skip_agg=True)
Пример #11
0
print 'Logistic Regression with Standardized Features:'

# print 'parsing...'

# parse train and test text files
train_x = get_features('spam_train.txt')
train_y = get_classification('spam_train.txt')

test_x = get_features('spam_test.txt')
test_y = get_classification('spam_test.txt')

# print 'standardizing features...'

# standardize features
train_x = standardize(train_x)
test_x = standardize(test_x)

# add 1 y-intercept column
train_x = add_ones(train_x)
test_x = add_ones(test_x)

# print 'calculating weights...'

# find W for logistic regression with gradient descent
w = logistic_regression(train_x, train_y)

# print 'predicting...'

# make predictions
train_predictions = predict_y(train_x, w)