Пример #1
0
def findingModelWithAutokeras(DataX, LabelsY, testData, testLabels,
                              categorizeLabels, lr):
    import autokeras as ak
    if categorizeLabels:
        LabelsY = labelsToClassification(LabelsY)
        testLabels = labelsToClassification(testLabels)

    DataX = np.swapaxes(DataX, 0, 1)
    DataX = np.swapaxes(DataX, 1, 4)
    DataX = np.squeeze(DataX)
    testData = np.swapaxes(testData, 0, 1)
    testData = np.swapaxes(testData, 1, 4)
    testData = np.squeeze(testData)

    print(shape(DataX))
    #print(DataX[0][0])

    model = ak.ImageClassifier(verbose=True, augment=True)
    model.fit(DataX, LabelsY, 10 * 60 * 60)
    model.final_fit(DataX, LabelsY, testData, testLabels, retrain=True)
    #x_train = np.random.rand(100, 30, 30, 1)
    #x_val  = np.random.rand(70, 30, 30, 1)
    #y_train = np.ceil(np.random.rand(100))
    #y_val = np.ceil(np.random.rand(70))
    #clf = ak.ImageClassifier(verbose=True)
    #clf.fit(x_train, y_train)
    pass
Пример #2
0
def main():

    # Set up training times
    TRAINING_TIMES = [
        60 * 60 * 1,	# 1 hours
        60 * 60 * 2,	# 2 hours
        60 * 60 * 3,	# 3 hours
        60 * 60 * 4,	# 4 hours
        60 * 60 * 5,	# 5 hours
    ]
    
    # Import data
    BASE_DATA_DIR = get_data_base_path()
    DATASET_PATH = os.path.join(BASE_DATA_DIR, 'STL_10', 'data_STL_10.h5')

    # Read in data
    h5file = h5py.File(DATASET_PATH)
    X = np.array(h5file['X'])
    y = np.array(h5file['y'])
    y = y.reshape((len(y), 1))

    # Shuffle data
    X, y = shuffleData(X, y)

    # Split data
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.3)
    del X, y

    # Normalize
    X_train = X_train.astype("float") / 255.0
    X_test = X_test.astype("float") / 255.0  

    # Loop over the number of seconds to allow the current Auto-Keras 
    # model to train for.
    for seconds in TRAINING_TIMES:
        # Initialize model
        model = ak.ImageClassifier(verbose=True)

        # Fit to training data, first time
        model.fit(X_train, y_train, time_limit=seconds)

        # Fit again
        model.final_fit(X_train, y_train, X_test, y_test, retrain=True)

        # Evaluate
        score = model.evaluate(X_test, y_test)
        predictions = model.predict(X_test)
        report = classification_report(y_test, predictions)

        # Write to disk
        filename = '{0}.txt'.format(seconds)
        FULL_FILE_PATH = os.path.join(TXT_OUTPUT_PATH, filename)
        with open(FULL_FILE_PATH, 'a') as fw:
            fw.write(report)
            fw.write('\nScore: {0}'.format(score))

        # Export Model
        model_name = 'autokeras_model_{0}.h5'.format(seconds)
        FULL_MODEL_PATH = os.path.join(MODEL_OUTPUT_PATH, model_name)
        model.export_autokeras_model(FULL_MODEL_PATH)
def train(width):
    digits = ["01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13"]
    features_list = []
    features_label = []
    # load labeled training / test data
    # loop over the 13 directories where each directory stores the images of a digit
    for digit in digits:
        label = digit
        training_directory = f'./training_set/{width}x{width}/{label}/'
        for filename in os.listdir(training_directory):
            if filename.endswith('.png') :
                training_digit_image = skimage.io.imread(training_directory + filename)
                # training_digit = color.rgb2gray(training_digit
        
                features_list.append(training_digit_image / 255)
                features_label.append(label)

    # store features array into a numpy array
    x_train  = np.array(features_list)
    y_train = to_categorical(np.array(features_label))

    # Initialize the image classifier.
    clf = ak.ImageClassifier(max_trials=10) # It tries 10 different models.
    # Feed the image classifier with training data.
    clf.fit(x_train, y_train)
Пример #4
0
    def image_classification(self,
                             num_classes: int = None,
                             multi_label: bool = False,
                             **kwargs) -> ak.ImageClassifier:
        """Image Classification.

        Args:
            num_classes (int, optional): Number of classes. Defaults to None.
            multi_label (bool, optional): The target is multi-labeled. Defaults to False.

        Returns:
            ak.ImageClassifier: AutoKERAS image classification class.
        """
        return ak.ImageClassifier(
            num_classes=num_classes,
            multi_label=multi_label,
            loss=self.loss,
            metrics=self.metrics,
            project_name=self.project_name,
            max_trials=self.max_trials,
            directory=self.directory,
            objective=self.objective,
            tuner=self.tuner,
            overwrite=self.overwrite,
            seed=self.seed,
            max_model_size=self.max_model_size,
            **kwargs,
        )
Пример #5
0
def mnist_example():
    # Loads dataset.
    (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
    x_train = x_train.reshape(x_train.shape + (1, ))
    x_test = x_test.reshape(x_test.shape + (1, ))

    #--------------------
    clf = ak.ImageClassifier(verbose=True, augment=False)

    print('Fitting...')
    start_time = time.time()
    clf.fit(x_train, y_train, time_limit=12 * 60 * 60)  # time_limit in secs.
    print('\tElapsed time = {}'.format(time.time() - start_time))

    print('Final Fitting...')
    start_time = time.time()
    clf.final_fit(x_train, y_train, x_test, y_test, retrain=True)
    print('\tElapsed time = {}'.format(time.time() - start_time))

    print('Evaluating...')
    start_time = time.time()
    accuracy = clf.evaluate(x_test, y_test)
    print('\tElapsed time = {}'.format(time.time() - start_time))

    print('Accuracy =', accuracy * 100)

    print('Predicting...')
    start_time = time.time()
    predictions = clf.predict(x_test)
    print('\tElapsed time = {}'.format(time.time() - start_time))

    print('Predictions =', predictions)
def train_model():
    train_data = np.load(open('bottleneck_features_train.npy'))
    nsamples, nx, ny, nz = train_data.shape
    train_data = train_data.reshape(
        (nsamples, nx * ny * nz)
    )  # scikit-learn expects 2d num arrays for the training dataset for a fit function
    train_labels = np.array([0] * (nb_train_samples / 2) + [1] *
                            (nb_train_samples / 2))
    clf = ak.ImageClassifier()
    clf.fit(train_data, train_labels)
    validation_data = np.load(open('bottleneck_features_validation.npy'))
    nsamples, nx, ny, nz = validation_data.shape
    validation_data = validation_data.reshape((nsamples, nx * ny * nz))
    validation_labels = np.array([0] * (nb_validation_samples / 2) + [1] *
                                 (nb_validation_samples / 2))
    clf.final_fit(train_data,
                  train_labels,
                  validation_data,
                  validation_labels,
                  retrain=True)

    y = clf.evaluate(validation_data, validation_labels)

    print("auto Transfer Learning accuracy: %f" % y)
    clf.load_searcher().load_best_model().produce_keras_model().save(
        'transfer_model.h5')
Пример #7
0
def main():

    labels = 'beaver,dolphin,otter,seal,whale,aquarium fish,flatfish,ray,shark,trout,orchids,poppies,roses,sunflowers,tulips,bottles,bowls,cans,cups,plates,apples,mushrooms,oranges,pears,sweet peppers,clock,computer keyboard,lamp,telephone,television,bed,chair,couch,table,wardrobe,bee,beetle,butterfly,caterpillar,cockroach,bear,leopard,lion,tiger,wolf,bridge,castle,house,road,skyscraper,cloud,forest,mountain,plain,sea,camel,cattle,chimpanzee,elephant,kangaroo,fox,porcupine,possum,raccoon,skunk,crab,lobster,snail,spider,worm,baby,boy,girl,man,woman,crocodile,dinosaur,lizard,snake,turtle,hamster,mouse,rabbit,shrew,squirrel,maple,oak,palm,pine,willow,bicycle,bus,motorcycle,pickup truck,train,lawn-mower,rocket,streetcar,tank,tractor'
    labelNames = labels.split(',')
    output_path = "report_file"
    
    training_times = [
#         60 * 20
        60 * 60 * 16    # 24 hours
    ]
    
    print('**************data loading **************')
    ((train_x,train_y),(test_x,test_y)) = cifar100.load_data()
    train_x = train_x.astype("float")  / 255.0
    test_x = test_x.astype("float")  / 255.0
    #labelNames = ["airplane", "automobile", "bird", "cat", "deer","dog", "frog", "horse", "ship", "truck"]
    for searching_time in training_times:
        print('*********************** This search will cost {} seconds. ***********************'.format(searching_time))
        
        model = ak.ImageClassifier(path="report_file/{}/".format(searching_time),verbose = True)
        
        model.fit(train_x,train_y,time_limit=searching_time)
        model.final_fit(train_x,train_y,test_x,test_y,retrain=True)
        score = model.evaluate(test_x,test_y)
        predictions = model.predict(test_x)
        report = classification_report(test_y,predictions,target_names=labelNames)
        save = os.path.join(output_path,"{}_seconds_search.txt".format(searching_time))
        f = open(save,"w")
        f.write(report)
        f.write("\nscore:{}".format(score))
        f.close()
        
        model.export_autokeras_model("report_file/{}/{}_model.h5".format(searching_time,searching_time))
        load_model("report_file/{}/{}_model.h5".format(searching_time,searching_time))
        plot_model(model, to_file="report_file/{}/{}_model.png".format(searching_time,searching_time))
Пример #8
0
def trainModel(dataSet = None, pathLoadDataSet = None, pathSaveModel = None, sizeTestModel = 10):
    saveModel = False
    if (dataSet == None):
        saveModel = True

    x_train, x_test, y_train, y_test = dadosTrainTest(dataSet, pathLoadDataSet)

    clf = ak.ImageClassifier(overwrite=True, multi_label=True, max_trials=sizeTestModel)
    clf.fit(x_train, y_train, batch_size=8, validation_split=0.10, epochs=25)

    # Export as a Keras Model.
    model = clf.export_model()

    if(dataSet == None):
        if(pathSaveModel == None):
            pathSaveModel = 'models'

        output = Dir.create(pathSaveModel)
        now = datetime.now()
        nameModel = output + "/model-ass-" + now.strftime("%Y%m%d%H%M%S%f")

        try:
            model.save(nameModel, save_format="tf")
        except:
            model.save(nameModel + ".h5")

        with open(nameModel + "-labes.csv", 'w') as outfile:
            writer = csv.writer(outfile)
            label = np.unique(y_train).tolist()
            writer.writerows(map(lambda x: [x], label))

    return model
Пример #9
0
def make_automl_model(args):
    return ak.ImageClassifier(
        num_classes=args.MODEL.NUM_CLASSES,
        loss='sparse_categorical_crossentropy',
        metrics = ['accuracy'],
        directory=args.OUTPUT_DIR,
        max_trials=args.MODEL.AUTOML_TRIALS,
        objective="val_loss")
Пример #10
0
def test_image_classifier(tmp_dir):
    train_x = common.generate_data(num_instances=100, shape=(32, 32, 3))
    train_y = common.generate_one_hot_labels(num_instances=100, num_classes=10)
    clf = ak.ImageClassifier(directory=tmp_dir, max_trials=2, seed=common.SEED)
    clf.fit(train_x, train_y, epochs=1, validation_split=0.2)
    keras_model = clf.export_model()
    assert clf.predict(train_x).shape == (len(train_x), 10)
    assert isinstance(keras_model, tf.keras.Model)
Пример #11
0
def test_img_clf_fit_call_auto_model_fit(fit, tmp_path):
    auto_model = ak.ImageClassifier(directory=tmp_path, seed=utils.SEED)

    auto_model.fit(x=utils.generate_data(num_instances=100, shape=(32, 32, 3)),
                   y=utils.generate_one_hot_labels(num_instances=100,
                                                   num_classes=10))

    assert fit.is_called
Пример #12
0
def build_model_3view_autokeras(X_train, X_test, y_train, y_test, input_shape):
    import autokeras as ak
    clf = ak.ImageClassifier(verbose=True)
    clf.fit(X_train, y_train)
    clf.final_fit(X_train, y_train, X_test, y_test, retrain=True)
    y = clf.evaluate(x_test, y_test)
    #results = clf.predict(X_test)
    results = clf.evaluate(x_test, y_test)
    print(results)
Пример #13
0
def ImageClassification():
    filename = "mnist.h5"
    X_train, Y_train = load_data(filename, "train")
    X_test, Y_test = load_data(filename, "test")
    X_valid, Y_valid = load_data(filename, "valid")

    model = ak.ImageClassifier(max_trials=3, overwrite=False)
    model.fit(X_train, Y_train, epochs=1, batch_size=64, validation_data=(X_valid, Y_valid))
    loss = model.evaluate(X_test, Y_test)
    print("Loss : {loss}")
Пример #14
0
def test_img_clf_init_hp2_equals_hp_of_a_model(clear_session, tmp_path):
    clf = ak.ImageClassifier(directory=tmp_path)
    clf.inputs[0].shape = (32, 32, 3)
    clf.outputs[0].in_blocks[0].output_shape = (10, )
    init_hp = task_specific.IMAGE_CLASSIFIER[2]
    hp = kerastuner.HyperParameters()
    hp.values = copy.copy(init_hp)

    clf.tuner.hypermodel.build(hp)

    assert set(init_hp.keys()) == set(hp._hps.keys())
Пример #15
0
def train():
    time_limit = [60 * 60 * 4]
    model = ak.ImageClassifier(verbose=True)
    train_x, train_y, test_x, test_y = gen_train_and_test_data()

    train_x, test_x = reshape_as_image((train_x, test_x))

    model.fit(train_x, train_y, time_limit=time_limit[0])
    model.final_fit(train_x, train_y, test_x, test_y, retrain=True)

    score = model.evaluate(test_x, test_y)
    predictions = model.predict(test_x)
Пример #16
0
def train_model():

    clf = ak.ImageClassifier(verbose=True, augment=False)
    train_data, train_labels = load_image_dataset(csv_file_path=train_data_dir+"/label.csv",
                                      images_path=train_data_dir)
    validation_data, validation_labels = load_image_dataset(csv_file_path=validation_data_dir+"/label.csv",
                                      images_path=validation_data_dir)
    clf.fit(train_data, train_labels)
    clf.final_fit(train_data, train_labels, validation_data, validation_labels, retrain=True)
    y = clf.evaluate(validation_data, validation_labels)
    print("auto CNN classifier accuracy: %f" % y)
    clf.load_searcher().load_best_model().produce_keras_model().save('shallowCNN_model.h5')
Пример #17
0
def main():
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    clf = ak.ImageClassifier(max_trials=10,
                             directory='tmp_dir',
                             overwrite=True)

    start_time = timeit.default_timer()
    clf.fit(x_train, y_train)
    stop_time = timeit.default_timer()

    accuracy = clf.evaluate(x_test, y_test)[1]
    print('Accuracy: {accuracy}%'.format(accuracy=round(accuracy * 100, 2)))
    print('Total time: {time} seconds.'.format(
        time=round(stop_time - start_time, 2)))
Пример #18
0
def test_automl():
    """
    Reference
    ----------
        https://www.pyimagesearch.com/2019/01/07/auto-keras-and-automl-a-getting-started-guide/
        https://www.simonwenkel.com/2018/09/02/autokeras-cifar10_100.html
    """
    (X_train, y_train), (X_test, y_test) = cifar10.load_data()
    X_train, X_test = X_train / 255.0, X_test / 255.0

    clf = ak.ImageClassifier(verbose=True)
    clf.fit(X_train, y_train, time_limit=10 * 60 * 60)
    clf.final_fit(X_train, y_train, X_test, y_test, retrain=True)
    print(clf.evaluate(X_test, y_test))
Пример #19
0
def build_autokeras_model(num_classes, train_generator, validation_generator):
    import autokeras as ak
    model = ak.ImageClassifier(verbose=True)

    trainX, trainY = train_generator.next()
    trainX = [x for x in trainX]
    trainY = [x for x in trainY]
    # Need to inverse the Y labels back to string before training
    model.fit(trainX, trainY, time_limit=10)
    model.final_fit(trainX, trainY, testX, testY, retrain=True)

    # evaluate the Auto-Keras model
    score = model.evaluate(testX, testY)
    predictions = model.predict(testX)
Пример #20
0
def test_image_classifier(tmp_path):
    train_x = utils.generate_data(num_instances=320, shape=(32, 32))
    train_y = utils.generate_one_hot_labels(num_instances=320, num_classes=10)
    clf = ak.ImageClassifier(
        directory=tmp_path,
        max_trials=2,
        seed=utils.SEED,
        distribution_strategy=tf.distribute.MirroredStrategy(),
    )
    clf.fit(train_x, train_y, epochs=1, validation_split=0.2)
    keras_model = clf.export_model()
    clf.evaluate(train_x, train_y)
    assert clf.predict(train_x).shape == (len(train_x), 10)
    assert isinstance(keras_model, tf.keras.Model)
Пример #21
0
def main():
    # initialize the output directory
    OUTPUT_PATH = "output"

    # initialize the list of trianing times that we'll allow
    # Auto-Keras to train for
    TRAINING_TIMES = [
        60 * 60,  # 1 hour
        60 * 60 * 2,  # 2 hours
        60 * 60 * 4,  # 4 hours
        60 * 60 * 8,  # 8 hours
        60 * 60 * 12,  # 12 hours
        60 * 60 * 24,  # 24 hours
    ]

    # load the training and testing data, then scale it into the
    # range [0, 1]
    print("[INFO] loading CIFAR-10 data...")
    ((trainX, trainY), (testX, testY)) = cifar10.load_data()
    trainX = trainX.astype("float") / 255.0
    testX = testX.astype("float") / 255.0

    # initialize the label names for the CIFAR-10 dataset
    labelNames = [
        "airplane", "automobile", "bird", "cat", "deer", "dog", "frog",
        "horse", "ship", "truck"
    ]

    # loop over the number of seconds to allow the current Auto-Keras
    # model to train for
    for seconds in TRAINING_TIMES:
        # train our Auto-Keras model
        print("[INFO] training model for {} seconds max...".format(seconds))
        model = ak.ImageClassifier(verbose=True)
        model.fit(trainX, trainY, time_limit=seconds)
        model.final_fit(trainX, trainY, testX, testY, retrain=True)

        # evaluate the Auto-Keras model
        score = model.evaluate(testX, testY)
        predictions = model.predict(testX)
        report = classification_report(testY,
                                       predictions,
                                       target_names=labelNames)

        # write the report to disk
        p = os.path.sep.join(OUTPUT_PATH, "{}.txt".format(seconds))
        f = open(p, "w")
        f.write(report)
        f.write("\nscore: {}".format(score))
        f.close()
Пример #22
0
    def generate_model(self,
                       x_train,
                       y_train,
                       x_validate,
                       y_validate,
                       x_test,
                       y_test,
                       class_weights=None):
        """Use AutoKeras to run a Neural Architecture Search"""

        model = ak.ImageClassifier(
            # Select a random seed value
            seed=29,
            # Decide how many models to try out
            max_trials=15,
            # Set our auto_model directory for storing candidate models
            directory=self.auto_model_dir)
        model.fit(
            x_train,
            y_train,
            # Perform regularization
            class_weight=class_weights,
            # Pass in our validation data
            validation_data=(x_validate, y_validate),
            # Set our callbacks
            callbacks=[self.tensorboard_callback],
            # Set our batch size per epoch
            batch_size=self.batch_size)

        # The best model found during the Neural Architecture Search
        model_name = "autokeras_model_" + self.time
        model_path = os.path.join(self.project_dir, "models", model_name)
        os.makedirs(os.path.dirname(model_path), exist_ok=True)
        model = model.export_model()
        model.save(model_path + ".h5")
        tflite_model = lite.TFLiteConverter.from_keras_model(model).convert()
        open(model_path + ".tflite", "wb").write(tflite_model)

        # Evaluate the model on the test data using `evaluate`
        print('\n# Evaluate on test data')
        results = model.evaluate(x_test, y_test)
        print('test loss, test acc:', results)

        # Generate some predictions
        print('\n# Generate predictions')
        print("INFO: Predictions is {}".format(model.predict(x_test)))
Пример #23
0
 def build_model(self) -> ak.AutoModel:
     model = None
     if self.data_type == 'image':
         if self.task_type == 'regression':
             model = ak.ImageRegressor()
         elif self.task_type == 'classification':
             model = ak.ImageClassifier()
     elif self.data_type == 'text':
         if self.task_type == 'regression':
             model = ak.TextRegressor()
         elif self.task_type == 'classification':
             model = ak.TextRegressor()
     elif self.data_type == 'csv':
         if self.task_type == 'regression':
             model = ak.StructuredDataRegressor()
         elif self.task_type == 'classification':
             model = ak.StructuredDataClassifier()
     return model
Пример #24
0
def build_model_basic_autokeras(X_train,
                                X_test,
                                y_train,
                                y_test,
                                view_key=None):
    view_field = {"cronal": (46, 53), "sagittal": (46, 63), "axial": (63, 53)}
    input_shape = (63, 53, 1)
    if view_key:
        input_shape = (view_field[view_key][0], view_field[view_key][1], 1)

    import autokeras as ak
    clf = ak.ImageClassifier(verbose=True)
    clf.fit(X_train, y_train)
    clf.final_fit(X_train, y_train, X_test, y_test, retrain=True)
    y = clf.evaluate(x_test, y_test)
    #results = clf.predict(X_test)
    results = clf.evaluate(x_test, y_test)
    print(results)
Пример #25
0
    def __init__(self,
                 problem_description,
                 backend,
                 max_trials=10000,
                 directory='.',
                 epochs=1,
                 batch_size=32,
                 validation_split=0.2):
        super(AutoKerasSearch, self).__init__(problem_description,
                                              backend,
                                              ranking_function=None)

        self.clf = ak.ImageClassifier(max_trials=max_trials,
                                      seed=self.random_seed,
                                      directory=directory)
        self.tuner = self.clf.tuner
        self.epochs = epochs
        self.batch_size = batch_size
        self.validation_split = validation_split
def main():
    # initialize the output directory
    outputPath = "output"

    # initialize the list of training times
    # that we allow autokeras to train
    trainingTimes = [
        60 * 60,     # 1 hour
        60 * 60 * 2  # 2 hour
    ] 

    # load the training and testing data
    print("Loading CIFAR-10 dataset...")
    ((trainX, trainY), (testX, testY)) = cifar10.load_data()
    
    # scale the data into the range [0, 1]
    trainX = trainX.astype("float") / 255.0
    testX = testX.astype("float") / 255.0

    # initialize the label names for the CIFAR-10 dataset
    labels = ["airplane", "automobile", "bird", "cat", "deer",
		"dog", "frog", "horse", "ship", "truck"]

    # loop over training times list
    for trainingTime in trainingTimes:
        print("Training model for {} seconds...".format(trainingTime))
        model = ak.ImageClassifier(verbose=True)
        model.fit(trainX, trainY, time_limit=trainingTime)
        model.final_fit(trainX, trainY, testX, testY, retrain=True)

        # evaluate the mode
        score = model.evaluate(testX, testY)
        preds = model.predict(testX)
        report = classification_report(testY, preds, target_names=labels)

        # save the report to disk
        p = os.path.sep.join(outputPath, "{}.txt".format(trainingTime))
        f = open(p, 'w')
        f.write(report)
        f.write("\nscore: {}".format(score))
        f.close()
Пример #27
0
def main():
    output_path = './output'
    if not os.path.exists(output_path):
        os.makedirs(output_path)

    def hours_to_seconds(hours):
        return hours * 60 * 60

    training_times = map(hours_to_seconds, [1, 2, 4, 8, 12, 24])

    if 'time' in arguments:
        training_times = [hours_to_seconds(arguments['time'])]

    print('[INFO] Loading CIFAR-10 dataset.')
    ((X_train, y_train), (X_test, y_test)) = cifar10.load_data()

    # Now, we need to normalize the data
    X_train = X_train.astype('float') / 255.0
    X_test = X_test.astype('float') / 255.0

    for seconds in training_times:
        print(f'[INFO] Training model for at most {seconds} seconds.')

        classifier = autokeras.ImageClassifier(verbose=True)

        # Trains and tries to find the best architecture.
        classifier.fit(X_train, y_train, time_limit=seconds)

        # Trains the best found architecture.
        classifier.final_fit(X_train, y_train, X_test, y_test, retrain=True)

        print('[INFO] Evaluating model.')
        score = classifier.evaluate(X_test, y_test)
        predictions = classifier.predict(X_test)
        report = classification_report(y_test, predictions, target_names=CIFAR_10_LABELS)

        print('[INFO] Saving report to disk.')
        path = os.path.sep.join([output_path, f'{seconds}.txt'])
        with open(path, 'w') as f:
            f.write(report)
            f.write(f'\nScore: {score}')
Пример #28
0
def main():
    ak.constant.Constant.MAX_BATCH_SIZE = 16
    ak.constant.Constant.MAX_LAYERS = 5
    ((trainX, trainY), (testX, testY)) = keras.datasets.cifar10.load_data()
    trainX = trainX.astype("float") / 255.0
    testX = testX.astype("float") / 255.0

    labels = [
        "airplane", "automobile", "bird", "cat", "deer", "dog", "frog",
        "horse", "ship", "truck"
    ]
    seconds = 3600

    model = ak.ImageClassifier(verbose=True)
    model.fit(trainX, trainY, time_limit=seconds)
    model.final_fit(trainX, trainY, testX, testY, retrain=True)

    #evaluate the model
    score = model.evaluate(testX, testY)
    predictions = model.predict(testX)
    report = classification_report(testY, predictions, target_names=labels)

    print(report)
Пример #29
0
def search_autokeras(args):
    """
    Just train.
    :param args: Just args.
    :return:
    """
    pool = Pool(cpu_count() - 2)
    if exist_pkl(args.base) and not args.force:
        print_("use the existing pkl file")
        train_collection, test_collection = get_pkl(args.base)
    else:
        print_("prepare data and dump into pkl file")
        collection = pre_prepare(cvt_abs_path(args.base), data_prepare,
                                 file_filter)
        train_collection, test_collection = split_train_test_set(collection)

    train_collection = pool.map(multi_prepare_record, train_collection)
    test_collection = pool.map(multi_prepare_record, test_collection)

    train_batch = list(zip(pool.map(generate_x_y_, train_collection)))
    test_batch = list(zip(pool.map(generate_x_y_, test_collection)))
    print('train size:', len(train_batch))
    print('test size:', len(test_batch))

    x_train = np.concatenate([e[0][0] for e in train_batch])
    y_train = np.concatenate([e[0][1] for e in train_batch])

    x_test = np.concatenate([e[0][0] for e in test_batch])
    y_test = np.concatenate([e[0][1] for e in test_batch])

    clf = ak.ImageClassifier(max_trials=10)
    clf.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=100)
    model = clf.export_model()
    target_folder = '/'.join([args.base, 'stock'])
    make_dirs(target_folder)
    target_path = '/'.join([target_folder, 'best.h5'])
    model.save(target_path)
Пример #30
0
    def __init__(self,
                 model_pars=None,
                 data_pars=None,
                 compute_pars=None,
                 out_pars=None):
        ### Model Structure        ################################

        if model_pars is None:
            self.model = None
            return self

        # Initialize the text classifier.
        # It tries n different models.
        if model_pars["model_name"] == "text":
            # Initialize the TextClassifier
            self.model = ak.TextClassifier(max_trials=model_pars['max_trials'])
        elif model_pars["model_name"] == "vision":
            # Initialize the ImageClassifier.
            self.model = ak.ImageClassifier(
                max_trials=model_pars['max_trials'])
        elif model_pars["model_name"] == "tabular_classifier":
            # Initialize the classifier.
            self.model = ak.StructuredDataClassifier(
                max_trials=model_pars['max_trials'])