stepSGD = 1.0/(2*lNab)
    nodesFirst = 120
    nodesSecond = 84
    stepFW1 = 1.0/(8*lNab*delta1*delta1)
    stepFW2 = 1.0/(8*lNab*delta2*delta2)

    ## For reproducibility
    torch.manual_seed(564736827)

    weightsGraph = wConvSimGraphD['Ins'+str(ins)]
    weightsPyt = wConvSimPyt['Ins'+str(ins)]

    if methodToUse == 'SGD':
        name = 'Ins'+str(ins)+'_SGD'
        net = netToUse().to(device)
        FixLayerData(net, weightsPyt)
        optimPerLayer = [methods[0]]*10
        partParams = [[stepSGD]]*10
        start = time.time()
        torchOptim =  MixOptimizer(net.parameters(), optimPerLayer, partParams, device = device)
        RunExperiment(trainVal_loader, test_loader, epochs, torchOptim, lossFn, net, results_directory, name, device = device, saveAccToo = True)
        print('Running SGD took '+str(time.time() -start)+' seconds')
        print()
    elif methodToUse == 'SFW':
        name = 'Ins'+str(ins)+'_SFW'
        net = netToUse().to(device)
        FixLayerData(net, weightsGraph)
        ScaleParameters(net, indFWs, [delta1, delta2])
        optimPerLayer = [methods[0], methods[0], methods[0], methods[0], methods[1], methods[0], \
            methods[1], methods[0], methods[0], methods[0]]
        partParams = [[stepSGD]]*10
    print('Relu Model Structure')
    PrintModelStructure(netRELU)

    print('Sigmoid Model Structure')
    PrintModelStructure(netSigmoid)

    ## Let's now create the labels under different SNR levels.
    mapLabelsRELU = {}
    mapLabelsSigmoid = {}

    for i in range(instancesOfNetwork):
        for j, nameNZ  in enumerate(namesNZ):
            nameLayer = nameNZ+'_Ins'+str(i)
            ## For Relu
            FixLayerData(netRELU, mapLayerData[nameLayer])
            labelsRELU = CreateRealLabelsAndSNR(X_mat, netRELU, SNRLevels)
            for z, snrName in enumerate(namesSNR):
                mapLabelsRELU[nameNZ+'_Ins'+str(i)+'_'+snrName] = labelsRELU[z]
            ## For Sigmoid
            FixLayerData(netSigmoid, mapLayerData[nameLayer])
            labelsSigmoid = CreateRealLabelsAndSNR(X_mat, netSigmoid, SNRLevels)
            for z, snrName in enumerate(namesSNR):
                mapLabelsSigmoid[nameNZ+'_Ins'+str(i)+'_'+snrName] = labelsSigmoid[z]
    
    mapInitWsPyRELU = {}
    mapInitWsGraphRELU = {}
    mapInitWsGraphDRELU = {}
    mapInitWsPySigmoid = {}
    mapInitWsGraphSigmoid = {}
    mapInitWsGraphDSigmoid = {}
Example #3
0
    deltaVecToUse = [delta_vector[0], delta_vector[1]]

    for i in range(instances):
        keyForLabels = 'NZ'+nzAsStr+'_Ins'+str(i)+'_SNR'+snr
        keyForWeights = 'NZ'+nzAsStr+'_Ins'+str(i)
        labels_train = mapLabelsSigmoid[keyForLabels][0:100000]
        labels_val = mapLabelsSigmoid[keyForLabels][100000:120000]
        labels_test = mapLabelsSigmoid[keyForLabels][120000:220000]
        train_loader = CreateDataLoader(X_train, labels_train, batch_size, shuffle, use_cuda)
        test_loader = CreateDataLoader(X_test, labels_test, batch_sizeVal, shuffle, use_cuda)
        if methodToRun == 'SGD':
            weightsToUse = mapInitWsPySigmoid[keyForWeights]
            name = keyForLabels +'_lNabla'+str(lNab)+'_SGD'
            partParams = [[stepSGD], [stepSGD], [stepSGD]]
            net = netToUse(input_size, first_layer, second_layer, third_layer)
            FixLayerData(net, weightsToUse)
            optimPerLayer = [methods[0], methods[0], methods[0]]
            torchOptim =  MixOptimizer(net.parameters(), optimPerLayer, partParams)
            RunExpSavNetAndUsingTest(train_loader, test_loader, epochs, torchOptim, lossFn, net, results_directory, name, device = device)
        elif methodToRun == 'SGDT':
            weightsToUse = mapInitWsPySigmoid[keyForWeights]
            name = keyForLabels +'_lNabla'+str(lNab)+'_SGDT'
            partParams = [[stepSGD], [stepSGD], [stepSGD]]
            net = netToUse(input_size, first_layer, second_layer, third_layer)
            FixLayerData(net, weightsToUse)
            optimPerLayer = [methods[0], methods[0], methods[0]]
            torchOptim =  MixOptimizer(net.parameters(), optimPerLayer, partParams)
            RunExpSGDTSavNetAndUsingTest(train_loader, test_loader, epochs, torchOptim, lossFn, net, results_directory, name, paramIndToThres, nzVector, device = device)
        elif methodToRun == 'SFW':
            weightsToUse = mapInitWsGraphDSigmoid[keyForWeights]
            name = keyForLabels +'_lNabla'+str(lNab)+'_SFW'
Example #4
0
    ## Fix seed for Reproducibility
    torch.manual_seed(7654367)

    ## Run SGD and SGDT
    ## Parameters for SGDT
    nzInKey = auxForKey[0][2:]
    paramIndToThres = [0, 1]
    nzVector = [int(nzInKey), int(nzInKey)]
    for lNab in lNabla:
        name = keyOnLabel + '_lNabla' + str(lNab) + '_SGD'
        stepSGD = 1.0 / (2 * lNab)
        partParams = [[stepSGD], [stepSGD], [stepSGD]]
        ## Run SGD
        net = netToUse(input_size, first_layer, second_layer, third_layer)
        FixLayerData(net, weightsPytorch)
        optimPerLayer = [methods[0], methods[0], methods[0]]
        torchOptim = MixOptimizer(net.parameters(), optimPerLayer, partParams)
        RunExperiment(train_loader,
                      val_loader,
                      epochs,
                      torchOptim,
                      lossFn,
                      net,
                      results_directory,
                      name,
                      device=device)
        ## Run SGDT
        name = keyOnLabel + '_lNabla' + str(lNab) + '_SGDT'
        net = netToUse(input_size, first_layer, second_layer, third_layer)
        FixLayerData(net, weightsPytorch)