def get_data(): """ Get data """ path = datasets.download_data(MNIST_URL, ext='.gz') with gzip.open(path, 'rb') as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding='latin-1') return map(tensor, (x_train, y_train, x_valid, y_valid))
def get_data(): path = datasets.download_data( 'http://deeplearning.net/data/mnist/mnist.pkl', ext='.gz') with gzip.open(path, 'rb') as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding='latin-1') return map(tensor, (x_train, y_train, x_valid, y_valid))
def load_data( ) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: path = datasets.download_data(MNIST_URL, ext=".gz") with gzip.open(path, "rb") as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding="latin-1") y_train, y_valid = index_to_one_hot(y_train), index_to_one_hot(y_valid) x_train, x_valid = normalize_row_wise(x_train), normalize_row_wise(x_valid) return x_train, y_train, x_valid, y_valid
def loadMNIST(): MNIST_URL = 'http://deeplearning.net/data/mnist/mnist.pkl' path = datasets.download_data(MNIST_URL, ext='.gz') with gzip.open(path, 'rb') as file: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(file, encoding='latin-1') return map(tensor, (x_train, y_train, x_valid, y_valid))
def get_mnist_data(): # Download the data path = datasets.download_data(MNIST_URL, ext='.gz') # Unpickle the dataset with gzip.open(path, 'rb') as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding='latin-1') # Convert from numpy arrays to tensors return map(tensor, (x_train, y_train, x_valid, y_valid))
def get_data(name): '''Get dataset by name. name: dataset name ''' if name not in name2url or name not in name2fn: raise Exception('Unrecognized dataset') path = datasets.download_data(name2url[name], ext='.gz') with gzip.open(path, 'rb') as f: ((xt, yt), (xv, yv), _) = pickle.load(f, encoding='latin-1') return name2fn[name](xt, yt, xv, yv)
def load_data(): MNIST_URL = "http://deeplearning.net/data/mnist/mnist.pkl" path = datasets.download_data(MNIST_URL, ext=".gz") with gzip.open(path, "rb") as f: ((x_train, y_train), (x_valid, y_valid), (x_test, y_test)) = pickle.load(f, encoding="latin-1") ## tensorify (x_train, y_train, x_valid, y_valid) = map(tensor, (x_train, y_train, x_valid, y_valid)) n, c = x_train.shape ## img = x_train[0] img.view(28, 28).type() # plt.imshow(img.view(28,28)) return (x_train, y_train, x_valid, y_valid)
# To run tests in console: # ! python run_notebook.py 01_matmul.ipynb # ## Get data #export from pathlib import Path from IPython.core.debugger import set_trace from fastai import datasets import pickle, gzip, math, torch, matplotlib as mpl import matplotlib.pyplot as plt from torch import tensor MNIST_URL = 'http://deeplearning.net/data/mnist/mnist.pkl' path = datasets.download_data(MNIST_URL, ext='.gz') path with gzip.open(path, 'rb') as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding='latin-1') x_train, y_train, x_valid, y_valid = map(tensor, (x_train, y_train, x_valid, y_valid)) n, c = x_train.shape x_train, x_train.shape, y_train, y_train.shape, y_train.min(), y_train.max() assert n == y_train.shape[0] == 50000 test_eq(c, 28 * 28) test_eq(y_train.min(), 0) test_eq(y_train.max(), 9)
def get_mnist(url = MNIST_URL): path = datasets.download_data(url, ext=".gz") with gzip.open(path, "rb") as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding="latin-1") return map(tensor, (x_train, y_train, x_valid, y_valid))
################################################# ### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ### ################################################# # file to edit: dev_nb/Lesson1_matmul.ipynb from pathlib import Path from fastai import datasets as FA_datasets import pickle, gzip, math, torch, matplotlib as mpl import matplotlib.pyplot as plt MNIST_URL = 'http://deeplearning.net/data/mnist/mnist.pkl' # Path.home() # real home dir of the computer HOME_DIR = Path('.').resolve() DATA_DIR = HOME_DIR / "data" HOME_DIR, DATA_DIR fname = DATA_DIR / 'mnist.pkl.gz' fpath = FA_datasets.download_data(MNIST_URL, fname, ext='.gz') fpath
def get_data(path_in, encoding_in='latin-1'): path = datasets.download_data(path_in, ext='.gz') with gzip.open(path, 'rb') as f: ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding=encoding_in) return map(tensor, (x_train, y_train, x_valid, y_valid))