Example #1
0
import progressbar
from model.densenet import DenseNet
from generator.imagenet import Imagenet
from utils.metrics import classification_accuracy

writer = SummaryWriter()
#net = testDense().cuda()

net = DenseNet(in_features=3, k=32, layers=[6, 12, 24, 16],
               num_classes=1000).cuda()
print "net done"
#DATASET
dataset = Imagenet("/home/lapis-ml/Desktop/imagenet/train_224/")
loader = DataLoader(dataset, batch_size=64, shuffle=True)
#OPTIM-LOSS
optimizer = Adam(params=net.parameters(), lr=0.01, weight_decay=10e-4)
#optimizer = SGD(params=net.parameters(),lr=0.1,momentum=0.9,weight_decay=10e-4,nesterov=True)
loss = nn.NLLLoss()
#IL GRAFO NON SI RIESCE A FARE
#writer.add_graph(net,net(Variable(torch.rand(1,3,32,32), requires_grad=True).cuda()))

batch_number = len(loader)
num_epochs = 300
logging_step = 100
#logging_image_step = 100
step = 0
widgets = [
    'Batch: ',
    progressbar.Counter(), '/',
    progressbar.FormatCustomText('%(total)s', {"total": batch_number}), ' ',
    progressbar.Bar(marker="-", left='[', right=']'), ' ',
Example #2
0
trainLoader, testLoader = get_dataloader(
    batch_size=args.train_batch_size, data_dir=args.data_dir
)

print("Batch Size : ", args.train_batch_size)
print("Test Batch Size : ", args.test_batch_size)
print("Number of batches in training set : ", trainLoader.__len__())
print("Number of batches in testing set : ", testLoader.__len__())

#  -----------------------------------------------------------------------
# Setup Model, Loss function & Optimizer
#  -----------------------------------------------------------------------
model = DenseNet(depth=100, growthRate=12, dropRate=0.25).to(device)
# model = BaseNet().to(device)
print(
    "\tTotal params: %.2fM" % (sum(p.numel() for p in model.parameters()) / 1000000.0)
)
print("Device : ", device)
if "cuda" in str(device):
    model = torch.nn.DataParallel(model, args.gpu_ids)
optimizer = torch.optim.SGD(
    model.parameters(),
    lr=args.lr,
    momentum=args.momentum,
    weight_decay=args.weight_decay,
)
criterion = nn.CrossEntropyLoss()


def adjust_learning_rate(optimizer, epoch):
    global state