Exemplo n.º 1
0
batchNorm = [1, 0, 0, 1]
batchSize = 16000
hiddenLayer = [7951, 9927, 7256, 2594]

runLogsPath = "/xdisk/rlysecky/manojgopale/extra/keyPrediction_chip/scr/moreDataTrials/scr/allRuns.csv"
with open(runLogsPath, 'a') as f:
    ## modelName must be unique like run_<someNum>
    f.write("\n%s, %s, %s, %s, %s, %s, %s\n" %
            (modelName, numHiddenLayers, hiddenLayer, actList, dropList,
             batchNorm, batchSize))

t0_time = time.time()
#classifier = classify_general.Classifier(resultDir, modelName, x_train, y_train_oh, x_dev, y_dev_oh, x_test, y_test_oh, hiddenLayer, actList, dropList, batchNorm)
## Taking 1361 power traces only
classifier = classify_general.Classifier(resultDir, modelName, x_train,
                                         y_train_oh, x_dev, y_dev_oh, x_test,
                                         y_test_oh, hiddenLayer, actList,
                                         dropList, batchNorm, numPowerTraces)
t1_time = time.time()
print("\nTime to load the dataset in python for training is %s seconds\n" %
      (t1_time - t0_time))

## Train the model
startTime = time.time()
classifier.train(batchSize)
endTime = time.time()
trainTime = endTime - startTime
print("\nTime to train with batchSize= %s is %s seconds\n" %
      (batchSize, trainTime))

## Evaluate
classifier.evaluate()
optStr = "Adam" ## Hardcoding Adam for initial runs
learningRate = np.float_power(10,-3) ## Hardcoding epsilon and lr to default in initial runs
epsilonValue = np.float_power(10, -7)

#hiddenLayer = np.array([hiddenLayerDict["num"][i] for i in np.random.random_integers(0, len(hiddenLayerDict["num"])-1, numHiddenLayers).tolist()]) * np.array([hiddenLayerDict["factor"][i] for i in np.random.random_integers(0, len(hiddenLayerDict["factor"])-1, numHiddenLayers).tolist()])
## Get random integers between 100, 1500 , those seems to be giving better results
#hiddenLayer = np.random.randint(100, 10000, numHiddenLayers) 
hiddenLayer = [np.power(2,i) for i in np.random.random_integers(5,9, size=numHiddenLayers)]## runs 30 onwards with powers of 2

runLogsPath = "/xdisk/rlysecky/manojgopale/extra/gem5KeyPrediction/log/dataEnsemble/allRuns.csv"
with open(runLogsPath, 'a') as f:
	## modelName must be unique like run_<someNum>
	f.write("\n%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s\n" %(modelName, numHiddenLayers, hiddenLayer, actList, dropList, batchNorm, batchSize, trainSize, learningRate, epsilonValue, optStr, typeOfStd))

t0_time = time.time()
classifier = classify_general.Classifier(resultDir, modelName, x_train, y_train_oh, x_dev, y_dev_oh, x_dev, y_dev_oh, hiddenLayer, actList, dropList, batchNorm, numPowerTraces, "dataEnsemble", learningRate, epsilonValue, optStr)
t1_time = time.time()
print("\nTime to load the dataset in python for training is %s seconds\n" %(t1_time-t0_time))

## Train the model
startTime = time.time()
classifier.train(batchSize)
endTime = time.time()
trainTime = endTime - startTime
print("\nTime to train with batchSize= %s is %s seconds\n" %(batchSize, trainTime))

## Evaluate
classifier.evaluate()

##Save the model
classifier.saveModel()
epsilonValue = np.float_power(10, -7)

## when dataEnsemble is used
#MrunLogsPath = "/xdisk/rlysecky/manojgopale/extra/gem5KeyPrediction/log/dataEnsemble/allRuns.csv"
runLogsPath = "/xdisk/rlysecky/manojgopale/extra/gem5KeyPrediction/log/" + configName + "/allRuns.csv"
with open(runLogsPath, 'a') as f:
    ## modelName must be unique like run_<someNum>
    f.write("\n%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s\n" %
            (modelName, numHiddenLayers, hiddenLayer, actList, dropList,
             batchNorm, batchSize, trainSize, learningRate, epsilonValue,
             optStr, typeOfStd, configNames))

t0_time = time.time()
classifier = classify_general.Classifier(
    resultDir, modelName, x_train[:, :numPowerTraces], y_train_oh,
    x_dev[:, :numPowerTraces], y_dev_oh, x_dev[:, :numPowerTraces], y_dev_oh,
    hiddenLayer, actList, dropList, batchNorm, numPowerTraces, configName,
    learningRate, epsilonValue, optStr)
t1_time = time.time()
print("\nTime to load the dataset in python for training is %s seconds\n" %
      (t1_time - t0_time))

## Train the model
startTime = time.time()
classifier.train(batchSize)
endTime = time.time()
trainTime = endTime - startTime
print("\nTime to train with batchSize= %s is %s seconds\n" %
      (batchSize, trainTime))

## Evaluate