# Get the first available GPU DEVICE_ID_LIST = GPUtil.getFirstAvailable() DEVICE_ID = DEVICE_ID_LIST[0] # grab first element from list # Set CUDA_VISIBLE_DEVICES to mask out all other GPUs than the first available device id os.environ["CUDA_VISIBLE_DEVICES"] = str(DEVICE_ID) except FileNotFoundError: print("GPU not found") if __name__ == '__main__': select_gpu() # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = cifar10.load_data() X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) clf = ImageClassifier(path='/home/haifeng/cifar10', verbose=True) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test) y = clf.evaluate(x_test, y_test) print(y) # MLP for Pima Indians Dataset with 10-fold cross validation # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores)) # split into input (X) and output (Y) variables # define 10-fold cross validation test harness
os.environ["CUDA_VISIBLE_DEVICES"] = str(DEVICE_ID) except FileNotFoundError: print("GPU not found") if __name__ == '__main__': select_gpu() # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = cifar10.load_data() X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) kernel_lambda = 0.01 for i in range(4): clf = ImageClassifier(searcher_type='bayesian', path='/home/haifeng/lambda', verbose=True, searcher_args={'trainer_args': {'max_iter_num': 10}, 'default_model_len': 10, 'kernel_lambda': kernel_lambda }) clf.fit(x_train, y_train, time_limit=3*60*60) # clf.final_fit(x_train, y_train, x_test, y_test) y = clf.evaluate(x_test, y_test) print(kernel_lambda, y) kernel_lambda *= 10 # MLP for Pima Indians Dataset with 10-fold cross validation # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores)) # split into input (X) and output (Y) variables
# Set CUDA_VISIBLE_DEVICES to mask out all other GPUs than the first available device id os.environ["CUDA_VISIBLE_DEVICES"] = str(DEVICE_ID) except FileNotFoundError: print("GPU not found") if __name__ == '__main__': select_gpu() # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = cifar10.load_data() X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) trainer_args = {'max_iter_num': 0} clf = ImageClassifier(searcher_type='bayesian', path='/Users/haifeng/cifar102', verbose=True, searcher_args={'trainer_args': { 'max_iter_num': 0 }}) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) # clf.final_fit(x_train, y_train) y = clf.evaluate(x_test, y_test) print(y) # MLP for Pima Indians Dataset with 10-fold cross validation # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores)) # split into input (X) and output (Y) variables # define 10-fold cross validation test harness
from keras.datasets import mnist from autokeras.classifier import ImageClassifier from autokeras.constant import Constant if __name__ == '__main__': (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) Constant.SEARCH_MAX_ITER = 0 clf = ImageClassifier(verbose=True) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y)
print("GPU not found") if __name__ == '__main__': select_gpu() # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = cifar10.load_data() X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) beta = 0.01 for i in range(4): clf = ImageClassifier(searcher_type='bayesian', path='/Users/haifeng/beta', verbose=True, searcher_args={ 'trainer_args': { 'max_iter_num': 10 }, 'default_model_len': 10, 'beta': beta }) clf.fit(x_train, y_train, time_limit=30) # clf.final_fit(x_train, y_train, x_test, y_test) y = clf.evaluate(x_test, y_test) print(beta, y) beta *= 10 # MLP for Pima Indians Dataset with 10-fold cross validation # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores))
except FileNotFoundError: print("GPU not found") if __name__ == '__main__': # select_gpu() os.environ["CUDA_VISIBLE_DEVICES"] = str(0) constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) print(x_test.shape) X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) clf = ImageClassifier(searcher_type='random', path='/tmp/mnist_random/', verbose=False) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) # clf.final_fit(x_train, y_train) y = clf.evaluate(x_test, y_test) print(y) # MLP for Pima Indians Dataset with 10-fold cross validation scores = clf.cross_validate(X, Y, 10) print(scores) print(np.mean(scores)) print(np.std(scores)) # split into input (X) and output (Y) variables # define 10-fold cross validation test harness
import sys from keras.datasets import mnist import numpy as np from autokeras import constant from autokeras.classifier import ImageClassifier if __name__ == '__main__': constant.MAX_MODEL_NUM = 1 constant.MAX_ITER_NUM = 1 constant.EPOCHS_EACH = 1 (x_train, y_train), (x_test, y_test) = mnist.load_data() X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) clf = ImageClassifier(searcher_type=sys.argv[1], path=sys.argv[2], verbose=False) clf.fit(x_train, y_train) y = clf.evaluate(x_test, y_test) # MLP for Pima Indians Dataset with 10-fold cross validation scores = clf.cross_validate(X, Y, 2) print(np.mean(scores)) print(np.std(scores)) # split into input (X) and output (Y) variables # define 10-fold cross validation test harness
select_gpu() # os.environ["CUDA_VISIBLE_DEVICES"] = str(3) # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) trainer_args = { 'max_iter_num': 12, 'batch_size': 128, 'optimizer': Adam, 'augment': False } clf = ImageClassifier(searcher_type='bayesian', path='/home/haifeng/mnist', verbose=True, trainer_args=trainer_args) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, trainer_args, retrain=True) y = clf.evaluate(x_test, y_test) print(y) # MLP for Pima Indians Dataset with 10-fold cross validation # model = clf.load_searcher().load_best_model() clf.verbose = True # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores)) # split into input (X) and output (Y) variables
if __name__ == '__main__': select_gpu() # os.environ["CUDA_VISIBLE_DEVICES"] = str(3) # constant.LIMIT_MEMORY = True (x_train, y_train), (x_test, y_test) = fashion_mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) X = np.concatenate((x_train, x_test)) Y = np.concatenate((y_train, y_test)) clf = ImageClassifier(searcher_type='bayesian', path='/home/haifeng/fashion', verbose=True, searcher_args={ 'trainer_args': { 'max_iter_num': 10, 'augment': False }, 'default_model_len': 3 }) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y) # MLP for Pima Indians Dataset with 10-fold cross validation # scores = clf.cross_validate(X, Y, 10) # print(scores) # print(np.mean(scores)) # print(np.std(scores))
# Load MNIST data and AutoKeras library from keras.datasets import mnist from autokeras.classifier import ImageClassifier # Set up test and training set (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) clf = ImageClassifier(verbose=True, searcher_args={'trainer_args': { 'max_iter_num': 5 }}) clf.fit(x_train, y_train, time_limit=5 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=False, trainer_args={'max_iter_num': 10}) y = clf.evaluate(x_test, y_test) print(y * 100) best_model = clf.load_searcher().load_best_model() print(best_model.n_layers) from torchvision import models
from autokeras.classifier import load_image_dataset from autokeras.classifier import ImageClassifier if __name__ == '__main__': x_train, y_train = load_image_dataset(csv_file_path="train2/label.csv", images_path="train2") print(x_train.shape) print(y_train.shape) x_test, y_test = load_image_dataset(csv_file_path="test2/label.csv", images_path="test2") print(x_test.shape) print(y_test.shape) clf = ImageClassifier() clf.fit(x_train, y_train) results = clf.predict(x_test)