Beispiel #1
0
    x1 = datasets[source]
    x = numpy.array(x1[x_months:])
    x_all = numpy.array(x1)

    y1 = datasets['lai']
    y = numpy.array(y1[x_months:])
    # y_all = numpy.array(y1)

    # We can rewrite the line equation as y = Ap,
    # where A = [[x 1]] and p = [[m], [c]].
    # Now use lstsq to solve for p:
    A = numpy.vstack([x, numpy.ones(len(x))]).T  # [[x 1]]
    lin_answer = numpy.linalg.lstsq(A, y, rcond=None)
    m, c = lin_answer[0]
    log.info(f'm {m}, c: {c}')

    y_pred = c + m * x_all

    calc_rmse(y, y_pred[x_months:])

    datasets[f'pred_{ds_var}'] = y_pred
    predictive_models.plot(timestamps, datasets)


if __name__ == '__main__':
    # load hdf5 measurement data.
    timestamps, datasets = load_data()
    # calculate_moving_mean()
    # solver_function(datasets)
    solver_function_v2(datasets)
Beispiel #2
0
    'FCN', 'FCN_bilstm', 'FCN_bilstm_separated', 'bilstm_FCN', 'resnet',
    'resnet_bilstm', 'resnet_bilstm_separated', 'bilstm_resnet', 'cnn',
    'cnn_lstm', 'cnn_bilstm', 'cnn_lstm_separated', 'cnn_lstm_separated2',
    'multi_cnn', 'multi_cnn_lstm', 'multi_cnn_bilstm', 'multi_cnn_bilstm2',
    'lstm_cnn'
]
"""
Load data
"""
# Hyperparameters
batch_size = 64  #min(X_train.shape[0]/10, 16)
epochs = 1500
dimensions = 3  #set en 3 o 4, dependiendo de la red neuronal a aplicar,
# si la primera capa es conv1d aplicar 3, si es conv2d, aplicar 4
for j in range(len(datasets)):
    X, y = load_datasets.load_data(direc, datasets[j], dimensions)
    print(X.shape)
    print(y.shape)

    X_train, X_test, y_train, y_test = model_selection.train_test_split(
        X, y, test_size=0.3, random_state=42, stratify=y)
    i = 1
    red = 'bilstm_resnet'

    standarize = True
    rescale = False
    normalize = False
    preprocesing = False
    # Procesamiento de los datos
    if standarize == True:
        X_train1, X_test1 = load_datasets.standardize_data(X_train, X_test)
Beispiel #3
0
def main():
    # load hdf5 measurement data.
    timestamps, datasets = load_data()
    calculate_corr(datasets)
Beispiel #4
0
def main():
    timestamps, datasets = load_data(conf['groupname'])
    # calculate_moving_mean()
    make_prediction(datasets)
    plot(timestamps, datasets)
Beispiel #5
0
from convolutional_network import CNN
from load_datasets import load_data
import sys

custom_datasets = load_data('custom')
canfar_datasets = load_data('canfar-100')
batch_size = 4
print 'Initializing the CNN'
convnet = CNN(datasets=canfar_datasets, batch_size=batch_size)
epochs = 10
print 'Training the CNN for ' + str(epochs) + ' epochs'
convnet.train(epochs)
test_set_x, test_set_y = canfar_datasets[2]
test_score = convnet.test(test_set_x, test_set_y, 500)
classify_result = convnet.classify(test_set_x, batch_size)
classify_result.reshape(10000, )
print 'Test Score Result:'
print test_score
from convolutional_network import CNN
from load_datasets import load_data
import sys

custom_datasets = load_data('custom')
canfar_datasets = load_data('canfar-100')
batch_size = 4
print 'Initializing the CNN'
convnet = CNN(datasets=canfar_datasets, batch_size=batch_size)
epochs = 10
print 'Training the CNN for ' + str(epochs) + ' epochs'
convnet.train(epochs)
test_set_x, test_set_y = canfar_datasets[2]
test_score = convnet.test(test_set_x, test_set_y, 500)
classify_result = convnet.classify(test_set_x, batch_size)
classify_result.reshape(10000,)
print 'Test Score Result:'
print test_score