Exemple #1
0
        Dense(units=nodes,
              kernel_initializer='uniform',
              activation='relu',
              input_dim=inputnum))
    model.add(
        Dense(units=nodes, kernel_initializer='uniform', activation='relu'))
    model.add(Dense(units=1, kernel_initializer='uniform'))

    # compile the model
    model.compile(loss='mean_squared_error', optimizer='adam', metrics=['mse'])
    return model


# load data files

df_train = load_Training_data(file)
df_test = load_test_data(file)

# filter dataframe by ID
if unit != 0:
    df_train = df_train[df_train['UNIT_ID'] == unit]
    df_test = df_test[df_test['UNIT_ID'] == unit]
    print('result filtered for ID', unit)

# create the training and testing sets from the dataframes
training_set = df_train.iloc[:, 2:].values
test_set = df_test.iloc[:, 2:].values

# scaling
scaler = MinMaxScaler((-1, 1))
training_scaled = scaler.fit_transform(training_set)
Exemple #2
0
    for i in range(len(test)):
        d = pred[i] - test[i]
        score = 0
        if d < 0:
            score = (math.exp(-(d / 10))) - 1
            result = result + score

        elif d > 0:
            score = (math.exp(d / 13)) - 1
            result = result + score
    return result


# Importing the training dataframe

dataset_train = load_Training_data(file)

# filter dataframe by ID
if unit != 0:
    dataset_train = dataset_train[dataset_train['UNIT_ID'] == unit]

# construct the dataset as numpy array

training_set = dataset_train.iloc[:, 2:].values

# Feature Scaling

sc = MinMaxScaler(feature_range=(-1, 1))
training_set_scaled = sc.fit_transform(training_set[:, 0:])

#restore original values of RUL