def runDE(baselineMin, baselineMax, modelObj, deRunCountParam, runCountParam, constraintFileNameParam): #import IO_Utility #print "### Executing D.E. " out_normscore, out_decVec = de(runCountParam, constraintFileNameParam, modelObj,baselineMin,baselineMax,deRunCountParam) tempObj = modelObj(constraintFileNameParam, runCountParam) objByDecVec = tempObj.getobjfromdecision(out_decVec) print "Normalized Score ...", out_normscore print "The objective scores after all minimization ....UnAc={}, UnPass={}".format(objByDecVec[0], objByDecVec[1])
def runDE(baselineMin, baselineMax, modelObj, deRunCountParam, runCountParam, constraintFileNameParam): #import IO_Utility #print "### Executing D.E. " out_normscore, out_decVec = de(runCountParam, constraintFileNameParam, modelObj, baselineMin, baselineMax, deRunCountParam) tempObj = modelObj(constraintFileNameParam, runCountParam) objByDecVec = tempObj.getobjfromdecision(out_decVec) print "Normalized Score ...", out_normscore print "The objective scores after all minimization ....UnAc={}, UnPass={}".format( objByDecVec[0], objByDecVec[1])
def __init__(self, num): self.id = num self.score = 0 self.de = de() self.nom = "" self.init_nom()
def tunetest(predict): de() The.option.tuning = False predicttest(predict,"Tuned_WHERE")
def tunetest(predict): de() The.option.tuning = False predicttest(predict, "Tuned_WHERE")
def test_res(self): for i in range(100): lance = de().lancer() self.assertTrue(lance >= 1) self.assertTrue(lance <= 6)
def test_de(self): self.assertTrue(len(de().model) == 6)