Esempio n. 1
0
def build_dataloader():
    train_dataset = MNIST(root=gettempdir(), train=True, download=True)
    dataloader = DataLoader(
        train_dataset,
        transform=Compose([Normalize(mean=0.1307 * 255, std=0.3081 * 255), Pad(2), ToMode("CHW"),]),
        sampler=RandomSampler(dataset=train_dataset, batch_size=64),
    )
    return dataloader
Esempio n. 2
0
def test_stream_dataloader(batch, num_workers):
    dataset = MyStream(100, batch=batch)
    sampler = StreamSampler(batch_size=4)
    dataloader = DataLoader(
        dataset,
        sampler,
        Compose([Normalize(mean=(103, 116, 123), std=(57, 57, 58)), ToMode("CHW")]),
        num_workers=num_workers,
        preload=True,
    )

    check_set = set()

    for step, data in enumerate(dataloader):
        if step == 10:
            break
        assert data[0]._tuple_shape == (4, 3, 2, 2)
        assert data[1]._tuple_shape == (4,)
        for i in data[1]:
            assert i not in check_set
            check_set.add(i)
Esempio n. 3
0
star = time.time()
path = "unet_j.mge"


@trace
def train_func(data, label, net=None, optimizer=None):
    net.train()
    pred = net(data)
    loss = F.cross_entropy_with_softmax(pred, label)
    optimizer.backward(loss)
    return pred, loss


train_dataset = u_data("./data/train", order=["image", "mask"])
dataloader = DataLoader(train_dataset,
                        transform=Compose([ToMode('CHW')]),
                        sampler=RandomSampler(dataset=train_dataset,
                                              batch_size=4,
                                              drop_last=True))

unet = Unet(1, 4)
optimizer = optim.SGD(unet.parameters(), lr=0.05)

trace.enabled = True

total_epochs = 50
loss_src = 100000000
for epoch in range(total_epochs):
    total_loss = 0
    correct = 0
    total = 0
Esempio n. 4
0
root_dir = '/data/.cache/dataset/MNIST'
mnist_train_dataset = MNIST(root=root_dir, train=True, download=False)

mnist_test_dataset = MNIST(root=root_dir, train=False, download=False)

random_sampler = RandomSampler(dataset=mnist_train_dataset, batch_size=256)
sequential_sampler = SequentialSampler(dataset=mnist_test_dataset,
                                       batch_size=256)

mnist_train_dataloader = DataLoader(
    dataset=mnist_train_dataset,
    sampler=random_sampler,
    transform=Compose([
        RandomResizedCrop(output_size=28),
        # mean 和 std 分别是 MNIST 数据的均值和标准差,图片数值范围是 0~255
        #Normalize(mean=0.1307*255, std=0.3081*255),
        #Pad(2),
        # 'CHW'表示把图片由 (height, width, channel) 格式转换成 (channel, height, width) 格式
        #ToMode('CHW'),
    ]))
mnist_test_dataloader = DataLoader(
    dataset=mnist_test_dataset,
    sampler=sequential_sampler,
)

# model
from model import get_net

net = get_net()

optimizer = optim.Adam(
    net.parameters(),
Esempio n. 5
0
from megengine.data import DataLoader
from megengine.data.transform import ToMode, Pad, Normalize, Compose
from megengine.data.sampler import RandomSampler, SequentialSampler

# 如果使用 MegStudio 环境,请将 MNIST_DATA_PATH 为 /home/megstudio/dataset/MNIST/
MNIST_DATA_PATH = "./datasets/MNIST/"

# 获取训练数据集,如果本地没有数据集,请将 download 参数设置为 True
train_dataset = MNIST(root=MNIST_DATA_PATH, train=True, download=False)
test_dataset = MNIST(root=MNIST_DATA_PATH, train=False, download=False)

batch_size = 64
# 创建 Sampler
train_sampler = RandomSampler(train_dataset, batch_size=batch_size)
test_sampler = SequentialSampler(test_dataset, batch_size=batch_size)

# 数据预处理方式
transform = Compose([
    Normalize(mean=0.1307 * 255, std=0.3081 * 255),
    Pad(2),
    ToMode('CHW'),
])

# 创建 Dataloader
train_dataloader = DataLoader(train_dataset, train_sampler, transform)
test_dataloader = DataLoader(test_dataset, test_sampler, transform)

for X, y in train_dataloader:
    print("Shape of X: ", X.shape)  # [N, C, H, W]
    print("Shape of y: ", y.shape, y.dtype)
    break