def test_mutate_architecture(): model = Sequential([ Conv2D(filters=6, kernel_size=(3, 3), activation='relu', input_shape=(224, 224, 1)), # input shape 1 or 3? AveragePooling2D(), Conv2D(filters=16, kernel_size=(3, 3), activation='relu'), AveragePooling2D(), Flatten(), Dense(units=60, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dense(units=42, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dense(units=1, activation='sigmoid' ) # TODO change Units because we only have 2 classes to predict ]) indiv = {'architecture': model.get_config()} print(indiv['architecture']) mutate_architecture(indiv) print(indiv['architecture'])
def model_remove_dropout(model_train: keras.Sequential) -> keras.Sequential: """ Function to remove dropout layer prior to saving Sequential model """ # get layers names and classes model_layers = model_train.get_config()['layers'] layers_info = { layer['config']['name']: layer['class_name'] for layer in model_layers } if "Dropout" not in layers_info.values(): return model_train layers_selected = [ layer_name for layer_name, layer_class in layers_info.items() if layer_class != "Dropout" ] model = keras.Sequential() for layer_name in layers_selected: model.add(model_train.get_layer(name=layer_name)) return model
def trainningNetwork(bagOfWords, y): tiempo_i = time.time() Errores = np.ones(10) # Sens = np.zeros(10) # Espec = np.zeros(10) Precision = np.zeros(10) Recall = np.zeros(10) F1score = np.zeros(10) j = 0 kf = KFold(n_splits=10, shuffle=True) for train_index, test_index in kf.split(bagOfWords): #print("TRAIN:", train_index, "TEST:", test_index) X_train, X_test = bagOfWords[train_index], bagOfWords[test_index] y_train, y_test = y[train_index], y[test_index] #Instanciamos el modelo MLP model = Sequential() model.add( Dense(units=15, activation='relu', input_dim=bagOfWords.shape[1])) #Dropout model.add(Dropout(0.25)) model.add(Dense(units=50, activation='relu')) model.add(Dense(units=20, activation='relu')) model.add(Dense(units=50, activation='relu')) model.add(Dense(units=40, activation='relu')) model.add(Dense(units=40, activation='relu')) model.add(Dense(units=20, activation='relu')) model.add(Dense(1)) model.add(Activation('sigmoid')) # Model config model.get_config() # List all weight tensors model.get_weights() model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) #Train process model.fit(X_train, y_train, epochs=30) # dump(model, 'mlpModel.joblib') # Test ypred = model.predict(X_test) y_pred = [] for yp, yt in zip(ypred, y_test): if yp <= 0.5: yp = 0 else: yp = 1 y_pred.append(yp) #print(yp, '\t', yt) y_pred = np.asarray(y_pred) Errores[j] = classification_error(y_pred, y_test) #print('Error en la iteración: ', Errores[j]) precision, recall, f1score = error_measures(y_pred, y_test) # Sens[j] = sens # Espec[j] = esp Precision[j] = precision Recall[j] = recall F1score[j] = f1score j += 1 return model, Errores, Precision, Recall, F1score, time.time() - tiempo_i
from tensorflow.keras import Sequential from tensorflow.keras.layers import Flatten,Dense,Dropout # 模型定义 model = Sequential([ Flatten(input_shape=(28, 28)), Dense(units=200,activation='tanh'), Dropout(0.4), Dense(units=100,activation='tanh'), Dropout(0.4), Dense(units=10,activation='softmax') ]) # 保存模型结构 config = model.get_config() print(config) # In[2]: # 保存模型结构 json_config = model.to_json() print(json_config) # In[3]: import json
def build_fn(model_struct): if model_struct == 'LeNet': model = Sequential([ Conv2D(filters=6, kernel_size=(3, 3), activation='relu', input_shape=(224, 224, 1)), # input shape 1 or 3? AveragePooling2D(), Conv2D(filters=16, kernel_size=(3, 3), activation='relu'), AveragePooling2D(), Flatten(), Dense( units=60, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dense( units=42, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dense( units=1, activation='sigmoid' ) # TODO change Units because we only have 2 classes to predict ]) return model.get_config() elif model_struct == 'AlexNet': model = Sequential([ Conv2D(filters=96, input_shape=(224, 224, 1), kernel_size=(11, 11), activation='relu', strides=(4, 4), padding='valid'), # Should input shape be 1 or 3? MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'), Conv2D(filters=256, kernel_size=(11, 11), activation='relu', strides=(1, 1), padding='valid'), MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'), Conv2D(filters=384, kernel_size=(3, 3), activation='relu', strides=(1, 1), padding='valid'), Conv2D(filters=384, kernel_size=(3, 3), activation='relu', strides=(1, 1), padding='valid'), Conv2D(filters=256, kernel_size=(3, 3), activation='relu', strides=(1, 1), padding='valid'), MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'), Flatten(), Dense(4096, input_shape=(224 * 224, ), activation='relu'), Dropout(0.4), Dense( 4096, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dropout(0.4), Dense( 1000, activation='relu' ), # TODO change Units because we only have 2 classes to predict Dropout(0.4), Dense( 1, activation='sigmoid' ) # TODO change Units because we only have 2 classes to predict ]) return model.get_config() elif model_struct == 'Random': model = Sequential() model.add(random_conv(224, True)) while True: rand_val = random.random() try: if not any(isinstance(x, Flatten) for x in model.layers): # if there's not a Flatten layer yet input_size = min(model.layers[-1].output_shape[1], model.layers[-1].output_shape[2]) if input_size < 2 or len(model.layers) > 6: model.add(Flatten()) continue if rand_val < 0.3: model.add(random_conv(input_size, False)) elif rand_val < 0.6: model.add(random_pool(input_size, 'max')) elif rand_val < 0.9: model.add(random_pool(input_size, 'average')) else: model.add(Flatten()) else: # if there is a Flatten layer already if rand_val < 0.35: model.add(random_dropout()) elif rand_val < 0.7: model.add(random_dense(False)) else: break except tensorflow.errors.ResourceExhaustedError: print("Initial Architecture went OOM, retrying...") model.add(random_dense(True)) return model.get_config() print("model was not LeNet, AlexNet, or Random")