def getAdjustedExponential(params): dimPars = map(extractDimension, params['dimensions']) factor = params['adjustmentFactor'] for d in dimPars: d.decayRate = d.decayRate * 1. / factor parameters = e.Parameters(dimPars) return ae.getSelector(parameters, params['numSelectedPoints'])
def test_ExponentialFileInput(self): cf = Exponential.ExponentialCostFunction() cf.FileInit('Values/TestExponential.txt') guess = libUnfitPython.std_vector_double() guess[:] = [0, 0] cost = 0 for r in cf(guess): cost = cost + r * r self.assertAlmostEqual(cost, 1.58190784, cost_tol) self.lm.FindMin(cf, guess) self.assertAlmostEqual(guess[0], 0.99999682538, guess_tol) self.assertAlmostEqual(guess[1], 0.500017605642, guess_tol)
def test_ExponentialFileInput(self): cf = Exponential.ExponentialCostFunction() cf.FileInit('Values/TestExponential.txt') guess = libUnfitPython.std_vector_double() guess[:] = [0, 0] cost = 0 for r in cf(guess): cost = cost + r * r self.assertAlmostEqual(cost, 1.58190784, cost_tol) self.ga.FindMin(cf, guess) self.assertAlmostEqual(guess[0], 1.02684400144, guess_tol) self.assertAlmostEqual(guess[1], 0.517748487134, guess_tol)
def test_ExponentialDirectInput(self): cf = Exponential.ExponentialCostFunction([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [1.0, 0.6065, 0.3679, 0.2231, 0.1353, 0.0821, 0.0498, 0.0302, 0.0183, 0.0111, 0.0067]) guess = libUnfitPython.std_vector_double() guess[:] = [0, 0] cost = 0 for r in cf(guess): cost = cost + r * r self.assertAlmostEqual(cost, 1.58190784, cost_tol) self.ga.FindMin(cf, guess) self.assertAlmostEqual(guess[0], 1.02684400144, guess_tol) self.assertAlmostEqual(guess[1], 0.517748487134, guess_tol)
def extractDimension(dimPars): sweepWidth = dimPars['sweepWidth'] decayRate = dimPars['decayRate'] exponent = dimPars['exponent'] return e.DimensionParameters(sweepWidth, decayRate, exponent)
def getExponential(params): dimPars = map(extractDimension, params['dimensions']) parameters = e.Parameters(dimPars) return e.getSelector(parameters, params['numSelectedPoints'])
8/28: plt.xlabel("$k$") 8/29: plt.title("Probability mass funciton of a Poisson random \$\lambda$ values") 8/30: plt.show() 9/1: from pymc import DiscreteUniform, Exponential, deterministic, Poisson, Uniform 9/2: import numpy as np 9/3: disasters_array = \ np.array([ 4, 5, 4, 0, 1, 4, 3, 4, 0, 6, 3, 3, 4, 0, 2, 6, 3, 3, 5, 4, 5, 3, 1, 4, 4, 1, 5, 5, 3, 4, 2, 5, 2, 2, 3, 4, 2, 1, 3, 2, 2, 1, 1, 1, 1, 3, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1, 0, 3, 2, 2, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 2, 1, 0, 0, 0, 1, 1, 0, 2, 3, 3, 1, 1, 2, 1, 1, 1, 1, 2, 4, 2, 0, 0, 1, 4, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) 9/4: switchPoint = DiscreteUniform('switchpoint', lower=0, upper=110, doc='Switchpoint[year]') 9/5: early_mean = Exponential('early_mean', beta=1.) 9/6: late_mean = Exponential('late_mean', beta=1.) 9/7: @deterministic(plot=false) def rate(s=switchPoint, e=early_mean, l=late_mean): ''' Concatyente Poisson means ''' out = np.empty(len(disasters_array)) out[:s] = e out[s:] = l return out 9/8: @deterministic(plot=False) def rate(s=switchPoint, e=early_mean, l=late_mean): ''' Concatyente Poisson means ''' out = np.empty(len(disasters_array)) out[:s] = e
def getSelector(params, numPointsToSelect): return e.getSelector(params, numPointsToSelect)
# DISTRIBUCION EXPONENCIAL elif optionSelected == 3: os.system("cls") nombreDistribucion = "Distribución Exponencial" print("\t::", nombreDistribucion, "::") # Parametros necesarios para la generació1n media = float(input("->Ingrese la media :")) numeroDatos = int(input("->Ingrese el número de variables aleatorias a generar :")) #nivelDeSignificacia = float(input("->Ingrese el nivel de significancia para la prueba chi cuadrado :")) nivelDeSignificacia = 0.05 tipoDistribucion = st.expon(1, media) objExpon = exponDistribution.Exponential(numeroDatos, tipoDistribucion, nivelDeSignificacia, nombreDistribucion, media) legendHistogram = r'$\mu$' + "="+ str("{0:.2f}".format(objExpon.meanGenerated)) legendDensity = r'$\mu$' + "=" + str(objExpon.mean) axisX = np.linspace(1, objExpon.mean + 6 * objExpon.des, 100) objExpon.chiSquareTest() objExpon.graph(legendHistogram, legendDensity, axisX) # DISTRIBUCION ERLANG elif optionSelected == 4: os.system("cls") nombreDistribucion = "Distribución Erlang"
################################## ### Fibonacci Search start = time.time() # # Algorithem starts here x = Fibonacci.Search(dic , search) print("Fibonacci Search Result: " + str(x)) # # Algoritem stops here end = time.time() print("Fibonacci Search Time: " + str(end-start) ) print() ################################## ### Exponential Search start = time.time() # # Algorithem starts here x = Exponential.Search(dic , search) print("Exponential Search Result: " + str(x)) # # Algoritem stops here end = time.time() print("Exponential Search Time: " + str(end-start)) print() ################################## # NOTE: Only works with int, and not sting. # ### Interpolation Search # start = time.time() # # # Algorithem starts here # x = Interpolation.Search(dic , search) # print("Interpolation Search Result: " + str(x)) # # # Algoritem stops here
import Exponential as ex import ANNTrain as ann import pandas as pd import matplotlib.pyplot as plt data = pd.read_csv('eggs_new.csv') usage = data['CPU usage [%]'] exPred, exError = ex.expo(usage) mse_history = ann.ret_mse() annPred = [] for i in range(len(mse_history)): annPred.append(usage[i] + mse_history[i]) exContri = [] annContri = [] finPred = [] enError = [] enAcc = [] axis = [] actual = [] for i in exError: exContri.append(1/i) for i in mse_history:
def setUp(self): self.numToSelect = 840 dim = exp.DimensionParameters() params = exp.Parameters([dim]) self.selector = exp.getSelector(params, self.numToSelect) Test3D.setUp(self)