def get_fashion_mnist_dataset( resize=None, root="./data-test/fashion-mnist", download=True, source_url=None, ): root = os.path.expanduser(root) trans = [] if resize: trans.append(transforms.Resize(resize)) trans.append(transforms.ToTensor()) transform = transforms.Compose(trans) mnist_train = vision.datasets.FashionMNIST( root=root, train=True, transform=transform, download=download, source_url=source_url, ) mnist_test = vision.datasets.FashionMNIST( root=root, train=False, transform=transform, download=download, source_url=source_url, ) return mnist_train, mnist_test
def load_data_mnist( batch_size, resize=None, root="./data/mnist", download=True, source_url=None ): """Download the MNIST dataset and then load into memory.""" root = os.path.expanduser(root) transformer = [] if resize: transformer += [transforms.Resize(resize)] transformer += [transforms.ToTensor()] transformer = transforms.Compose(transformer) mnist_train = vision.datasets.MNIST( root=root, train=True, transform=transformer, download=download, source_url=source_url, ) mnist_test = vision.datasets.MNIST( root=root, train=False, transform=transformer, download=download, source_url=source_url, ) train_iter = flow.utils.data.DataLoader( mnist_train, batch_size, shuffle=True, num_workers=2 ) test_iter = flow.utils.data.DataLoader( mnist_test, batch_size, shuffle=False, num_workers=2 ) return train_iter, test_iter
def load_data_fashion_mnist( batch_size, resize=None, root="./data-test/fashion-mnist", download=True, source_url=None, num_workers=0, ): """Download the Fashion-MNIST dataset and then load into memory.""" root = os.path.expanduser(root) trans = [] if resize: trans.append(transforms.Resize(resize)) trans.append(transforms.ToTensor()) transform = transforms.Compose(trans) mnist_train = vision.datasets.FashionMNIST( root=root, train=True, transform=transform, download=download, source_url=source_url, ) mnist_test = vision.datasets.FashionMNIST( root=root, train=False, transform=transform, download=download, source_url=source_url, ) train_iter = flow.utils.data.DataLoader( mnist_train, batch_size, shuffle=True, num_workers=num_workers ) test_iter = flow.utils.data.DataLoader( mnist_test, batch_size, shuffle=False, num_workers=num_workers ) return train_iter, test_iter
def _test(test_case): if os.getenv("ONEFLOW_TEST_CPU_ONLY"): device = flow.device("cpu") else: device = flow.device("cuda") net = Net() net.to(device) optimizer = optim.SGD(net.parameters(), lr=0.002, momentum=0.9) criterion = nn.CrossEntropyLoss() criterion.to(device) transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)), ]) train_epoch = 1 batch_size = 4 num_workers = 0 data_dir = os.path.join(os.getenv("ONEFLOW_TEST_CACHE_DIR", "./data-test"), "cifar10") train_iter, test_iter = load_data_cifar10( batch_size=batch_size, data_dir=data_dir, download=True, transform=transform, source_url= "https://oneflow-public.oss-cn-beijing.aliyuncs.com/datasets/cifar/cifar-10-python.tar.gz", num_workers=num_workers, ) final_loss = 0 for epoch in range(1, train_epoch + 1): # loop over the dataset multiple times running_loss = 0.0 for i, data in enumerate(train_iter, 1): # get the inputs; data is a list of [inputs, labels] inputs, labels = data inputs = inputs.to(dtype=flow.float32, device=device) labels = labels.to(dtype=flow.int64, device=device) # zero the parameter gradients optimizer.zero_grad() # forward + backward + optimize outputs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() # print statistics running_loss += loss.item() if i % 200 == 0: # print every 200 mini-batches final_loss = running_loss / 200 print("epoch: %d step: %5d loss: %.3f " % (epoch, i, final_loss)) running_loss = 0.0 break print("final loss : ", final_loss)
# Load a test image # ----------------- # Classic cat example! from PIL import Image img_url = "https://github.com/dmlc/mxnet.js/blob/main/data/cat.png?raw=true" img_path = download_testdata(img_url, "cat.png", module="data") img = Image.open(img_path).resize((224, 224)) # Preprocess the image and convert to tensor from flowvision import transforms my_preprocess = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), ]) img = my_preprocess(img) img = np.expand_dims(img.numpy(), 0) ###################################################################### # Import the graph to Relay # ------------------------- # Convert OneFlow graph to Relay graph. The input name can be arbitrary. class Graph(flow.nn.Graph): def __init__(self, module): super().__init__() self.m = module