Ejemplo n.º 1
0
import torch
from model import blocks
from model import networks
from data_loader.unified_dataloader import UnifiedKeypointDataloader
from model_fitting.train import fit
import os

th_count = 24

dataloader = UnifiedKeypointDataloader(batch_size=6, th_count=th_count)
backbone = networks.VGGNetBackbone(inplanes=64, block_counts=[2, 2, 4, 2])
net = networks.OpenPoseNet([backbone], 4, 1, blocks.PoseCNNStage, 10,
                           len(dataloader.trainloader.skeleton) * 2,
                           len(dataloader.trainloader.parts) + 1,
                           dataloader.trainloader.skeleton,
                           dataloader.trainloader.parts)
# net = networks.CocoPoseNet()

fit(net,
    dataloader.trainloader,
    dataloader.validationloader,
    postprocessing=dataloader.postprocessing,
    epochs=1000,
    lower_learning_period=3)
Ejemplo n.º 2
0
dataset_creator = DatasetCreator(root_dir='./dataset',
                                 names_file='data_loader/universalnames.json')

for i in range(6):
    for j in range(5):
        width = 16 * 2**i
        depth = 1 + j
        net = LSTMNet(len(dataset_creator.corpus), width, depth)
        net.cuda()

        trainset = dataset_creator.get_train_iterator()
        trainloader = torch.utils.data.DataLoader(trainset,
                                                  batch_size=256,
                                                  shuffle=True,
                                                  num_workers=th_count,
                                                  pin_memory=True)

        validationset = dataset_creator.get_validation_iterator()
        validationloader = torch.utils.data.DataLoader(validationset,
                                                       batch_size=1,
                                                       shuffle=False,
                                                       num_workers=th_count,
                                                       pin_memory=False)

        fit(net,
            trainloader,
            validationloader,
            chp_prefix="{}_{}".format(width, depth),
            epochs=100,
            lower_learning_period=10)
Ejemplo n.º 3
0
trainset = dataset_creator.get_train_iterator()
display_iterator_sample(trainset)

trainloader = torch.utils.data.DataLoader(trainset,
                                          batch_size=16,
                                          shuffle=True,
                                          num_workers=0)

net = ResNet(22)
net.cuda()

log_datatime = str(datetime.now().time())
loss_writer = SummaryWriter(os.path.join('logs', log_datatime, 'loss'))
accuracy_writer = SummaryWriter(os.path.join('logs', log_datatime, 'accuracy'))

validationset = dataset_creator.get_validation_iterator()
validationloader = torch.utils.data.DataLoader(validationset,
                                               batch_size=32,
                                               shuffle=False,
                                               num_workers=0)

fit(net,
    trainloader,
    validationloader,
    loss_writer,
    accuracy_writer,
    trainset.labels,
    epochs=100)

loss_writer.close()
accuracy_writer.close()
Ejemplo n.º 4
0
import torch
from model import blocks
from model import networks
from data_loader.unified_dataloader import UnifiedDataloader
from model_fitting.train import fit
import os

th_count = 24

dataloader = UnifiedDataloader(batch_size=32, th_count=th_count)

net = networks.AoANet(512, dataloader.vectorizer)
net.grad_backbone(False)

fit(net,
    dataloader.trainloader,
    dataloader.validationloader,
    epochs=1000,
    lower_learning_period=2)
Ejemplo n.º 5
0
import torch
from model import blocks
from model import networks
from data_loader.dataset_provider import SegmentationDatasetProvider
from model_fitting.train import fit, test
import os

th_count = 12
dataset_name = 'custom_car'

net_backbone = networks.ResNetBackbone(block=blocks.BasicBlock,
                                       block_counts=[3, 4, 6],
                                       inplanes=64)
net = networks.DeepLabV3Plus(net_backbone, 2)
data_provider = SegmentationDatasetProvider(net,
                                            batch_size=4,
                                            th_count=th_count)

fit(net,
    data_provider.trainloader,
    data_provider.validationloader,
    dataset_name=dataset_name,
    epochs=1000,
    lower_learning_period=5)

test(net, data_provider.testloader, dataset_name=dataset_name)
Ejemplo n.º 6
0
           (51, 'bowl'), (52, 'banana'), (53, 'apple'), (54, 'sandwich'),
           (55, 'orange'), (56, 'broccoli'), (57, 'carrot'), (58, 'hot dog'),
           (59, 'pizza'), (60, 'donut'), (61, 'cake'), (62, 'chair'),
           (63, 'couch'), (64, 'potted plant'), (65, 'bed'),
           (67, 'dining table'), (70, 'toilet'), (72, 'tv'), (73, 'laptop'),
           (74, 'mouse'), (75, 'remote'), (76, 'keyboard'), (77, 'cell phone'),
           (78, 'microwave'), (79, 'oven'), (80, 'toaster'), (81, 'sink'),
           (82, 'refrigerator'), (84, 'book'), (85, 'clock'), (86, 'vase'),
           (87, 'scissors'), (88, 'teddy bear'), (89, 'hair drier'),
           (90, 'toothbrush')]

backbone = networks.ResNetBackbone(inplanes=64,
                                   block=blocks.BasicBlock,
                                   block_counts=[3, 4, 6, 3])
net = networks.RetinaNet(backbone=[networks.FeaturePyramidBackbone, backbone],
                         classes=classes,
                         ratios=ratios)

coco_provider = CocoDetectionDatasetProvider(net,
                                             annDir=os.path.join(
                                                 '/Data', dataset_name),
                                             batch_size=8,
                                             th_count=th_count)

fit(net,
    coco_provider.trainloader,
    coco_provider.validationloader,
    dataset_name=dataset_name,
    box_transform=coco_provider.target_to_box_transform,
    epochs=1000,
    lower_learning_period=3)