def test_dnn_trainModel_exceptions(): dnn = DNN("Name", 10, [40]) testDataArray = np.ndarray(shape=(20,10)) testDataArrayLabels = np.ndarray(shape=(20,1)) testDataArrayWeight = np.ndarray(shape=(20,1)) with pytest.raises(RuntimeError): dnn.trainModel(testDataArray, testDataArrayLabels,testDataArrayWeight, "someFolder") dnn.modelCompiled = True with pytest.raises(TypeError): dnn.trainModel("NotAnArray", testDataArrayLabels,testDataArrayWeight, "someFolder") with pytest.raises(TypeError): dnn.trainModel(testDataArray, testDataArrayLabels,"NotAnArray", "someFolder")
def setupDNN(modelDefinitions): thisDNN = DNN( identifier = modelDefinitions.config.net.name, inputDim = modelDefinitions.config.net.inputDimention, layerDims = modelDefinitions.config.net.layerDimentions, weightDecay = modelDefinitions.config.net.useWeightDecay, activation = modelDefinitions.config.net.activation, outputActivation = modelDefinitions.config.net.outputActivation, loss = modelDefinitions.config.net.loss, metric = ["acc"], batchSize = modelDefinitions.config.net.batchSize ) thisDNN.loadModel(modelDefinitions.trainingOutput ) return thisDNN
def test_dnn_trainModel(mocker): testDataArray = np.ndarray(shape=(20,10)) testDataArrayLabels = np.ndarray(shape=(20,1)) testDataArrayWeight = np.ndarray(shape=(20,1)) dnn = DNN("Name", 10, [40]) dnn.optimizer = "adam" dnn.buildModel() dnn.compileModel() dnn.isBinary = True m = mocker.MagicMock m.epoch = 100*[0] m.history = {"loss" : 99*[0]+[1]} mocker.patch.object(keras.models.Model , "fit", return_value = m) print(m) dnn.trainModel(testDataArray, testDataArrayLabels,testDataArrayWeight, "someFolder") assert dnn.StopValues == (100, {"loss" : 1})
def test_dnn_compileModel_exceptions(): dnn = DNN("Name", 10, [40]) with pytest.raises(RuntimeError): dnn.compileModel() dnn.optimizer = "adagrad" with pytest.raises(RuntimeError): dnn.compileModel()
def test_dnn_init(): dnn = DNN("Name", 10, [40]) assert dnn.name == "Name" assert dnn.inputDimention == 10 assert dnn.nLayer == 1 assert isinstance(dnn.layerDimention, list) assert dnn.layerDimention[0] == 40 assert dnn.activation == "relu" assert dnn.outputActivation == "softmax"
def trainDNN(config, batch=False, addMetrics=["MEM"]): logging.debug("Output folder") checkNcreateFolder(config.output, onlyFolder=True) logging.debug("Copying used config to outputfolder") shutil.copy2(config.path, config.output + "/usedConfig.cfg") logging.info("Initializing samples and data") allSample, data = initialize(config, incGenWeights=config.includeGenWeight) logging.info("Initializing DNN") thisDNN = DNN(identifier=config.net.name, inputDim=config.net.inputDimention, layerDims=config.net.layerDimentions, weightDecay=config.net.useWeightDecay, weightDecayLambda=config.net.weightDecayLambda, activation=config.net.activation, outputActivation=config.net.outputActivation, loss=config.net.loss, metric=["acc"], batchSize=config.net.batchSize) logging.info("Setting optimizer") logging.debug("In config: %s", config.net.optimizer) thisDNN.optimizer = config.net.optimizer logging.info("Building model") if config.net.loss == "binary_crossentropy": thisDNN.buildModel(nClasses=1, dropoutAll=config.net.dropoutAll, dropoutOutput=config.net.dropoutOutput, dropoutPercent=config.net.dropoutPercent) else: thisDNN.buildModel(nClasses=len(data.outputClasses), dropoutAll=config.net.dropoutAll, dropoutOutput=config.net.dropoutOutput, dropoutPercent=config.net.dropoutPercent) logging.info("Compiling model") thisDNN.compileModel() thisDNN.network.summary(print_fn=logging.warning) if not batch: input("Press ret") trainData = data.getTrainData() trainLabels = data.trainLabels trainWeights = data.trainTrainingWeights logging.info("Training DNN") thisDNN.trainModel(trainData, trainLabels, trainWeights, config.output, epochs=config.net.trainEpochs, valSplit=config.net.validationSplit, earlyStopping=config.net.doEarlyStopping, patience=config.net.StoppingPatience) testData = data.getTestData() testLabels = data.testLabels testWeights = data.testTrainingWeights #TODO: Make this configurable ROCMetrics = [] if (len(addMetrics) == 1 and addMetrics[0] == ""): addMetrics = [] for metric in addMetrics: ROCMetrics.append( (metric, data.getTestData(asMatrix=False)[metric].values)) logging.info("Model evaluation") thisDNN.evalModel(testData, testWeights, testLabels, trainData, trainWeights, trainLabels, config.trainingVariables, config.output, data.outputClasses, plotMetics=True, saveData=True, addROCMetrics=ROCMetrics, forceColors=config.forceColors) logging.info("Saving model") thisDNN.saveModel(config.output, data.transformations)
def test_dnn_compileModel(): dnn = DNN("Name", 10, [40]) dnn.optimizer = "adagrad" dnn.buildModel() assert dnn.compileModel(writeyml=True)
def test_dnn_buildModel(): dnn = DNN("Name", 10, [40]) assert dnn.buildModel(plot=True)
def test_dnn_init_exceptions(): with pytest.raises(TypeError): DNN("Name", 10, "Blubb") with pytest.raises(RuntimeError): DNN("Name", 10, [])