示例#1
0
    def test_experiment_weightedMean_MNIST_predefModel_alexnet(self):
        with sf.test_mode():
            modelName = "alexnet"

            metadata = sf.Metadata(testFlag=True,
                                   trainFlag=True,
                                   debugInfo=True)
            dataMetadata = dc.DefaultData_Metadata(
                pin_memoryTest=True,
                pin_memoryTrain=True,
                epoch=1,
                test_howOftenPrintTrain=2,
                howOftenPrintTrain=3,
                resizeTo=Test_RunExperiment.MNIST_RESIZE)
            optimizerDataDict = {"learning_rate": 1e-3, "momentum": 0.9}

            obj = models.alexnet()
            smoothingMetadata = dc.Test_DefaultSmoothingOscilationWeightedMean_Metadata(
                test_weightIter=dc.DefaultWeightDecay(1.05),
                test_device='cpu',
                test_epsilon=1e-5,
                test_hardEpsilon=1e-7,
                test_weightsEpsilon=1e-6,
                test_weightSumContainerSize=3,
                test_weightSumContainerSizeStartAt=1,
                test_lossContainer=20,
                test_lossContainerDelayedStartAt=10)
            modelMetadata = dc.DefaultModel_Metadata(
                lossFuncDataDict={},
                optimizerDataDict=optimizerDataDict,
                device='cuda:0')

            data = dc.DefaultDataMNIST(dataMetadata)
            smoothing = dc.DefaultSmoothingOscilationWeightedMean(
                smoothingMetadata)
            model = dc.DefaultModelPredef(obj=obj,
                                          modelMetadata=modelMetadata,
                                          name=modelName)

            optimizer = optim.SGD(model.getNNModelModule().parameters(),
                                  lr=optimizerDataDict['learning_rate'],
                                  momentum=optimizerDataDict['momentum'])
            loss_fn = nn.CrossEntropyLoss()

            stat = dc.run(metadataObj=metadata,
                          data=data,
                          model=model,
                          smoothing=smoothing,
                          optimizer=optimizer,
                          lossFunc=loss_fn,
                          modelMetadata=modelMetadata,
                          dataMetadata=dataMetadata,
                          smoothingMetadata=smoothingMetadata)
示例#2
0
    def test_experiment_borderline_MNIST_predefModel_wide_resnet(self):
        with sf.test_mode():
            modelName = "wide_resnet"

            metadata = sf.Metadata(testFlag=True,
                                   trainFlag=True,
                                   debugInfo=True)
            dataMetadata = dc.DefaultData_Metadata(
                pin_memoryTest=True,
                pin_memoryTrain=True,
                epoch=1,
                test_howOftenPrintTrain=2,
                howOftenPrintTrain=3,
                resizeTo=Test_RunExperiment.MNIST_RESIZE)
            optimizerDataDict = {"learning_rate": 1e-3, "momentum": 0.9}

            obj = models.wide_resnet50_2()
            smoothingMetadata = dc.Test_DefaultSmoothingBorderline_Metadata(
                test_numbOfBatchAfterSwitchOn=5, test_device='cuda:0')
            modelMetadata = dc.DefaultModel_Metadata(
                lossFuncDataDict={},
                optimizerDataDict=optimizerDataDict,
                device='cuda:0')

            data = dc.DefaultDataMNIST(dataMetadata)
            smoothing = dc.DefaultSmoothingBorderline(smoothingMetadata)
            model = dc.DefaultModelPredef(obj=obj,
                                          modelMetadata=modelMetadata,
                                          name=modelName)

            optimizer = optim.SGD(model.getNNModelModule().parameters(),
                                  lr=optimizerDataDict['learning_rate'],
                                  momentum=optimizerDataDict['momentum'])
            loss_fn = nn.CrossEntropyLoss()

            stat = dc.run(metadataObj=metadata,
                          data=data,
                          model=model,
                          smoothing=smoothing,
                          optimizer=optimizer,
                          lossFunc=loss_fn,
                          modelMetadata=modelMetadata,
                          dataMetadata=dataMetadata,
                          smoothingMetadata=smoothingMetadata)
示例#3
0
    def test_experiment_pytorchSWA_CIFAR10_predefModel_alexnet(self):
        with sf.test_mode():
            modelName = "simpleConv"

            metadata = sf.Metadata(testFlag=True,
                                   trainFlag=True,
                                   debugInfo=True)
            dataMetadata = dc.DefaultData_Metadata(pin_memoryTest=True,
                                                   pin_memoryTrain=True,
                                                   epoch=1,
                                                   test_howOftenPrintTrain=2,
                                                   howOftenPrintTrain=3)
            optimizerDataDict = {"learning_rate": 1e-3, "momentum": 0.9}

            smoothingMetadata = dc.Test_DefaultPytorchAveragedSmoothing_Metadata(
                test_device='cuda:0')
            modelMetadata = dc.DefaultModel_Metadata(
                lossFuncDataDict={},
                optimizerDataDict=optimizerDataDict,
                device='cuda:0')

            data = dc.DefaultDataMNIST(dataMetadata)
            model = dc.DefaultModelSimpleConv(modelMetadata=modelMetadata)
            smoothing = dc.DefaultPytorchAveragedSmoothing(smoothingMetadata,
                                                           model=model)

            optimizer = optim.SGD(model.getNNModelModule().parameters(),
                                  lr=optimizerDataDict['learning_rate'],
                                  momentum=optimizerDataDict['momentum'])
            loss_fn = nn.CrossEntropyLoss()

            stat = dc.run(metadataObj=metadata,
                          data=data,
                          model=model,
                          smoothing=smoothing,
                          optimizer=optimizer,
                          lossFunc=loss_fn,
                          modelMetadata=modelMetadata,
                          dataMetadata=dataMetadata,
                          smoothingMetadata=smoothingMetadata)
    loop = 5

    #####################
    types = ('predefModel', 'CIFAR10', 'borderline')
    try:
        stats = []
        rootFolder = sf.Output.getTimeStr() +  ''.join(x + "_" for x in types) + "set"
        for r in range(loop):
            obj = models.alexnet()
            metadata.resetOutput()
            
            smoothingMetadata = dc.DefaultSmoothingBorderline_Metadata(numbOfBatchAfterSwitchOn=2000)
            modelMetadata = dc.DefaultModel_Metadata()

            stat=dc.run(numbOfRepetition=1, modelType=types[0], dataType=types[1], smoothingType=types[2], metadataObj=metadata, 
                modelMetadata=modelMetadata, dataMetadata=dataMetadata, smoothingMetadata=smoothingMetadata, modelPredefObj=obj,
                rootFolder=rootFolder)
            stats.append(stat)
        experiments.printStats(stats, metadata)
    except Exception as ex:
        experiments.printException(ex, types)


    #####################
    types = ('predefModel', 'CIFAR10', 'borderline')
    try:
        stats = []
        rootFolder = sf.Output.getTimeStr() +  ''.join(x + "_" for x in types) + "set"
        for r in range(loop):
            obj = models.alexnet()
            metadata.resetOutput()
示例#5
0
                                        modelMetadata=modelMetadata,
                                        name=modelPredefObjName,
                                        optimizer=optimizer,
                                        lossFunc=lossFunc)
            optimizer = optim.SGD(self.getNNModelModule().parameters(),
                                  lr=1e-3,
                                  momentum=0.9)
            loss_fn = nn.CrossEntropyLoss()

            stat = dc.run(modelType=types[0],
                          dataType=types[1],
                          smoothingType=types[2],
                          metadataObj=metadata,
                          modelMetadata=modelMetadata,
                          dataMetadata=dataMetadata,
                          smoothingMetadata=smoothingMetadata,
                          modelPredefObj=obj,
                          modelPredefObjName=modelName,
                          rootFolder=rootFolder,
                          runningAvgSize=runningAvgSize,
                          optimizer=optimizer,
                          lossFunc=loss_fn)
            for idx, s in enumerate(stat):
                s.saveSelf(name="stat" + str(idx))
            stats.append(stat.pop(
            ))  # weź pod uwagę tylko ostatni wynik (najlepiej wyćwiczony)
        experiments.printAvgStats(stats,
                                  metadata,
                                  runningAvgSize=runningAvgSize)
    except Exception as ex:
        experiments.printException(ex, types)
    types = ('predefModel', 'CIFAR10', 'disabled')
    try:
        stats = []
        rootFolder = prefix + sf.Output.getTimeStr() + ''.join(x + "_" for x in types)
        smoothingMetadata = dc.DisabledSmoothing_Metadata()

        for r in range(loop):

            obj = models.ResNet(block, layers, num_classes=num_classes)

            data = dc.DefaultDataCIFAR10(dataMetadata)
            model = dc.DefaultModelPredef(obj=obj, modelMetadata=modelMetadata, name=modelName)
            smoothing = dc.DisabledSmoothing(smoothingMetadata)

            optimizer = optim.SGD(model.getNNModelModule().parameters(), lr=optimizerDataDict['learning_rate'], 
                weight_decay=optimizerDataDict['weight_decay'], momentum=optimizerDataDict['momentum'])
            scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1)
            loss_fn = nn.CrossEntropyLoss()     

            stat=dc.run(metadataObj=metadata, data=data, model=model, smoothing=smoothing, optimizer=optimizer, lossFunc=loss_fn,
                modelMetadata=modelMetadata, dataMetadata=dataMetadata, smoothingMetadata=smoothingMetadata, rootFolder=rootFolder,
                schedulers=[([30, 60, 90, 120, 150, 180], scheduler)])

            stat.saveSelf(name="stat")

            stats.append(stat)
        experiments.printAvgStats(stats, metadata, runningAvgSize=runningAvgSize)
    except Exception as ex:
        experiments.printException(ex, types)