#### #training the first neural network #for Category 1 [LOS<=7] train_data = final_array[0:int(0.8 * len(final_array)), 0:len(final_array[0])] train_target = main_target.reshape(-1, 1)[0:int(0.8 * len(final_array)), 0] test_data = final_array[int(0.8 * len(final_array)):len(final_array), 0:len(final_array[0])] test_target = main_target.reshape( -1, 1)[int(0.8 * len(final_array)):len(final_array), 0] model = Sequential() model.add( keras.layers.core.Dense(len(train_data[0]), input_dim=len(train_data[0]), init='uniform', activation='relu', bias=True)) model.add( keras.layers.core.Dense(8, init='uniform', activation='relu', bias=True)) model.add(keras.layers.core.Dense(1, init='uniform', bias=True)) model.compile(loss='mean_squared_error', optimizer='adam') keras.layers.core.Dropout(0.1) model.fit(train_data, train_target, nb_epoch=150, batch_size=10) model.evaluate(train_data, train_target, batch_size=10) #training the 2nd Neural network #For category II LOS>7 #array_2 = scipy.delete(array_2,0,1); train_data_2 = array_2[0:int(0.9 * len(array_2)), 0:len(array_2[0])]
}, ignore_index=True) ################## #DNN import tensorflow as tf from keras.models import Sequential import pandas as pd from keras.layers import Dense from keras.models import Sequential from keras.layers import Dense model = Sequential() #Swish model.add(Dense(8, activation='swish', input_shape=(8, ))) model.add(Dense(8, activation='swish')) model.add(Dense(8, activation='swish')) model.add(Dense(1, activation='sigmoid')) model.compile(loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy']) model.fit(X_train, y_train, epochs=5, batch_size=1, verbose=1) y_pred = model.predict_classes(X_test) lrcm = confusion_matrix(y_test, y_pred) AlSumm = AlSumm.append(
for i in range(len(y_pred_local) - 1, -1, -1): if ((y_pred_iso[i] == -1) and (y_pred_local[i] == -1)): x_clean = np.delete(x_clean, i, axis=0) y_clean = np.delete(y_clean, i, axis=0) ############## CV for paramter tuning # x_ktrain, x_ktest, y_ktrain, y_ktest = train_test_split(x_clean, y_clean, test_size=0.4, random_state=0) # y_ktrain = keras.utils.to_categorical(y_ktrain, 3) ############# model construction model = Sequential() model.name = 'model' model.add( Dense(200, activation='relu', kernel_initializer='random_uniform', input_shape=(x_clean.shape[1], ))) model.add(Dropout(0.3)) model.add(Dense(3, activation='softmax')) optim = keras.optimizers.Adadelta() model.compile(optimizer=optim, loss='categorical_crossentropy', metrics=['accuracy']) # model.fit(x_ktrain, y_ktrain, batch_size=100, epochs=100, verbose=1) # y_kpred = np.argmax(model.predict(x_ktest), axis=1)
features = [] print('%i features identified as important:' % nb_features) indices = np.argsort(fsel.feature_importances_)[::-1][:nb_features] for f in range(nb_features): print("%d. feature %s (%f)" % (f + 1, data.columns[2+indices[f]], fsel.feature_importances_[indices[f]])) # XXX : take care of the feature order for f in sorted(np.argsort(fsel.feature_importances_)[::-1][:nb_features]): features.append(data.columns[2+f]) # Deep learning: # create model model = Sequential() model.add(Dense(12, input_dim=54, activation='relu')) model.add(Dense(256, activation='relu')) model.add(Dense(1, activation='sigmoid')) # Compile model model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) # Fit the model model.fit(X, y, epochs=10, batch_size=10) # evaluate the model scores = model.evaluate(X, y) print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100)) # Save model model.save('C:/Users/Rahul/Desktop/antivirus_demo-master/deep_calssifier/deep_classifier.h5')