import fttlutils DATA_DIR = "../data/files" MODEL_DIR = os.path.join(DATA_DIR, "models") NUM_EPOCHS = 50 BATCH_SIZE = 32 # data X = np.loadtxt(os.path.join(DATA_DIR, "images-X.txt"), delimiter=",") y = np.loadtxt(os.path.join(DATA_DIR, "images-y.txt"), delimiter=",", dtype=np.int) Y = np_utils.to_categorical(y, nb_classes=5) np.random.seed(42) Xtrain, Xtest, Ytrain, Ytest = fttlutils.train_test_split( X, Y, test_size=0.3, random_state=42) print(Xtrain.shape, Xtest.shape, Ytrain.shape, Ytest.shape) # model # input: (None, 25088) imgvecs = Input(shape=(Xtrain.shape[1],), dtype="float32") # hidden layer: (None, 256) fc1 = Dense(256, activation="relu", init="he_uniform", name="dl1fc1")(imgvecs) fc1 = BatchNormalization()(fc1) fc1 = Dropout(0.5)(fc1) # output layer: (None, 5) predictions = Dense(5, activation="softmax", name="dl1preds")(fc1)
import numpy as np import os import fttlutils ##################### main ###################### DATA_DIR = "../data/files" MODEL_DIR = os.path.join(DATA_DIR, "models") # data X = np.loadtxt(os.path.join(DATA_DIR, "images-X.txt"), delimiter=",") y = np.loadtxt(os.path.join(DATA_DIR, "images-y.txt"), delimiter=",", dtype=np.int) Xtrain, Xtest, ytrain, ytest = fttlutils.train_test_split(X, y, test_size=0.3, random_state=42) print(Xtrain.shape, Xtest.shape, ytrain.shape, ytest.shape) # model clf = LogisticRegression() clf.fit(Xtrain, ytrain) ytest_ = clf.predict(Xtest) fttlutils.print_stats(ytest, ytest_, "LR Model") with open(os.path.join(MODEL_DIR, "lr-model.pkl"), "wb") as fmodel: pickle.dump(clf, fmodel)