Beispiel #1
0
 def interpret(parameters: np.ndarray, classes: np.ndarray, interval: float):
     n_samples, n_components, n_classes = classes.shape
     assert parameters.ndim == 3
     assert parameters.shape == (n_samples, Weibull.N_PARAMETERS + 1, n_components)
     shapes = np.expand_dims(relu(parameters[:, 0, :]), 2).repeat(n_classes, 2)
     scales = np.expand_dims(relu(parameters[:, 1, :]), 2).repeat(n_classes, 2)
     proportions = np.expand_dims(softmax(parameters[:, 2, :], axis=1), 1)
     components = weibull_min.pdf(classes, shapes, scale=scales) * interval
     m, v, s, k = weibull_min.stats(shapes[:, :, 0], scale=scales[:, :, 0], moments="mvsk")
     return proportions, components, (m, np.sqrt(v), s, k)
Beispiel #2
0
def sim_weibull_min():
    c = 1.79
    mean, var, skew, kurt = weibull_min.stats(c, moments='mvsk')
    print(1 / mean)
    catches = 0
    for _ in range(10000):
        j = np.random.uniform() * 1000
        t_i = 0
        while t_i < j + 500:
            t_i += weibull_min.rvs(c)
            if j < t_i and t_i < j + 1:
                catches += 1
    print(catches / 10000)
Beispiel #3
0
def sim_weibull_min_v2():
    c = 1.79
    mean, var, skew, kurt = weibull_min.stats(c, moments='mvsk')
    catches = 0
    catches2 = 0
    total_t = 0
    for _ in range(20000):
        j = np.random.uniform() * 50
        t_i = 0
        tt = 0
        catches1 = -1
        while t_i < j + 100:
            t_i += weibull_min.rvs(c)
            if j < t_i and t_i < j + 30:
                tt = t_i
                catches += 1
                catches1 += 1
            total_t += max((tt - j), 0)
            catches2 += max(0, catches1)
    print(catches / 20000 / 30)
    print(catches2 / total_t)
from scipy.stats import weibull_min
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1, 1)

# Calculate a few first moments:

c = 1.79
mean, var, skew, kurt = weibull_min.stats(c, moments='mvsk')

# Display the probability density function (``pdf``):

x = np.linspace(weibull_min.ppf(0.01, c), weibull_min.ppf(0.99, c), 100)
ax.plot(x,
        weibull_min.pdf(x, c),
        'r-',
        lw=5,
        alpha=0.6,
        label='weibull_min pdf')

# Alternatively, the distribution object can be called (as a function)
# to fix the shape, location and scale parameters. This returns a "frozen"
# RV object holding the given parameters fixed.

# Freeze the distribution and display the frozen ``pdf``:

rv = weibull_min(c)
ax.plot(x, rv.pdf(x), 'k-', lw=2, label='frozen pdf')

# Check accuracy of ``cdf`` and ``ppf``:

vals = weibull_min.ppf([0.001, 0.5, 0.999], c)
def optimize1Model(modele, datedebut):
    """
        
        Modelise la courbe google trends par une fonction mathematique en optimisant ses parametres. Parmi les trois distributions, on retient la meilleure
        
        modele: str, le modele a optimiser
        
        datedebut: dict, dictionnaire associant a chaque modele sa date de lancement
        
        return: une liste avec l'ecart cummule entre la modelisation et la courbe brute pour les trois lois, la meilleure distribution et l'estimation de l'obsolescence associee.
        
        """
    modele = formatNameModel(modele)
    print('optimisation du modele : ', modele)
    donnees_brutes = recoverRawTrends(modele)

    ##recuperation ou calcul de la date de lancement
    launchdate = datedebut[modele]
    if not launchdate:
        launchdate = calculatelaunchdate(donnees_brutes["Mois"],
                                         donnees_brutes[modele])

    ##fonctions d'optimisations suivant la distribution choisie
    def objW(X):
        return objectif('w', X, launchdate.toordinal(), donnees_brutes[modele],
                        donnees_brutes["Mois"])

    def objA(X):
        return objectif('a', X, launchdate.toordinal(), donnees_brutes[modele],
                        donnees_brutes["Mois"])

    def objT(X):
        return objectif('t', X, launchdate.toordinal(), donnees_brutes[modele],
                        donnees_brutes["Mois"])

    ## minimizer
    x0W = [183, 25000, 1.15]  #modelisation initiale pour Weibull
    bndsW = ((150, 1500), (24000, 100000), (1.05, 1.9)
             )  #bornes des parametres de la loi de Weibull
    x0A = [250, 50, 0]  #modelisation initiale pour Alpha et triangulaire
    bndsA = (
        (100, 5000), (1, 100), (-400, 50)
    )  # bornes des parametres de la loi Alpha et triangulaires (entre 60 et 10000 jours, entre 1 et 100 unites ordonnée et entre -400 et 50 pour l'offset)
    resultW = minimize(objW,
                       x0W,
                       method='Powell',
                       bounds=bndsW,
                       options={
                           'maxiter': 100000,
                           'maxfev': 1000,
                           'return_all': True
                       })  #fonction d'optimisation de notre modelisation
    ecartW = objectif('w', resultW.x, launchdate.toordinal(),
                      donnees_brutes[modele], donnees_brutes["Mois"])
    min = 'w'
    resultA = minimize(objA,
                       x0A,
                       method='Powell',
                       bounds=bndsA,
                       options={
                           'maxiter': 100000,
                           'maxfev': 1000,
                           'return_all': True
                       })
    ecartA = objectif('a', resultA.x, launchdate.toordinal(),
                      donnees_brutes[modele], donnees_brutes["Mois"])
    if ecartA < ecartW:
        min = 'a'
    resultT = minimize(objT,
                       x0A,
                       method='Powell',
                       bounds=bndsA,
                       options={
                           'maxiter': 100000,
                           'maxfev': 1000,
                           'return_all': True
                       })
    ecartT = objectif('a', resultT.x, launchdate.toordinal(),
                      donnees_brutes[modele], donnees_brutes["Mois"])
    if (ecartT < ecartA and min == 'a') or (ecartT < ecartW and min == 'w'):
        min = 't'
    print('meilleure distribution : ' + min)
    print('Parametres optimisés : ', end='')
    if min == 'a':
        print(resultA.x)
        best = resultA
        minscore = round(
            alpha.ppf(0.8,
                      0.8,
                      loc=launchdate.toordinal() - resultA.x[0] / 10 +
                      resultA.x[2],
                      scale=resultA.x[0] / 1.7) - launchdate.toordinal())
    elif min == 't':
        print(resultT.x)
        best = resultT
        minscore = round(
            triang.stats(0.1,
                         launchdate.toordinal() +
                         resultT.x[2], resultT.x[0], 'm') +
            2.5 * math.sqrt(
                triang.stats(0.1,
                             launchdate.toordinal() +
                             resultT.x[2], resultT.x[0], 'v')) -
            launchdate.toordinal())
    else:
        print(resultW.x)
        best = resultW
        minscore = round(
            weibull_min.stats(resultW.x[2], launchdate.toordinal(),
                              resultW.x[0], 'm') +
            2.5 * math.sqrt(
                weibull_min.stats(resultW.x[2], launchdate.toordinal(),
                                  resultW.x[0], 'v')) - launchdate.toordinal())
    print()
    # montre_solution(min, best.x, launchdate.toordinal(), donnees_brutes[modele], donnees_brutes["Mois"], modele) #affichage de la solution

    return ([modele, resultA.x, resultT.x,
             resultW.x], [modele, ecartT, ecartA, ecartW, min, minscore])
def plot_distributions(data, label, output_dir):

    d = []
    for i in data:
        #if int(i) != 0: # and int(i) < 280000:
        d.append(i)

    x = np.linspace(min(data), max(data), len(data))  #np.sort(data)
    mu = np.mean(x)
    sigma = np.std(x)
    n_bins = 30

    fig, ax = plt.subplots(figsize=(4, 2.5))  #(4, 3))

    data = data1 = np.sort(d)
    mu = np.mean(data, keepdims=True)
    sigma = np.std(data)

    plt.xscale('log')
    ################################################
    #Data plot
    y = np.arange(1, len(data) + 1) / len(data)
    plt.plot(data, y, 'r-', linewidth=0.9, label="Data")

    ################################################
    #Exponential plot
    loc, scale = ss.expon.fit(data, floc=0)

    y = ss.expon.cdf(data, loc, scale)

    D, P = ss.kstest(data, lambda x: y)

    plt.plot(data,
             y,
             'm-.',
             linewidth=0.6,
             label="Exponential - KS D=" + str(round(D, 3)))
    print("Exponential KS D Value: " + str(D) + " - P value: " + str(P))

    ################################################

    #lognormal plot
    logdata = np.log(data)
    #estimated_mu, estimated_sigma, scale = ss.norm.fit(logdata)
    shape, loc, scale = ss.lognorm.fit(data, floc=0)

    #scale = estimated_mu
    #s = estimated_sigma
    #y = (1+scipy.special.erf((np.log(data)-scale)/(np.sqrt(2)*s)))/2 #ss.lognorm.cdf(data, s, scale)
    y = ss.lognorm.cdf(data, shape, loc, scale)

    D, P = ss.kstest(data, lambda x: y)

    plt.plot(data,
             y,
             'c:',
             linewidth=0.6,
             label="Lognormal - KS D=" + str(round(D, 3)))
    print("Lognormal KS D Value: " + str(D) + " - P value: " + str(P))
    #################################################
    #Weibull

    shape, loc, scale = ss.weibull_min.fit(data, floc=0)

    print("shape")
    print(shape)

    wei = ss.weibull_min(shape, loc,
                         scale)  # shape, loc, scale - creates weibull object
    #x = np.linspace(np.min(data), np.max(data), len(data))
    wei = ss.weibull_min(shape, loc, scale)

    meanw, var = weibull_min.stats(shape, loc, scale, moments='mv')

    D, P = ss.kstest(data, lambda x: wei.cdf(data))

    plt.plot(data,
             wei.cdf(data),
             'b-',
             linewidth=0.6,
             label="Weibull - KS D=" + str(round(D, 3)))

    #################################################
    #Gamma
    shape1, loc1, scale1 = gamma.fit(data, floc=0)

    y = gamma.cdf(x=data, a=shape1, loc=loc1, scale=scale1)

    D, P = ss.kstest(data, lambda x: y)

    plt.plot(data,
             y,
             'g--',
             linewidth=0.6,
             label="Gamma - KS D=" + str(round(D, 3)))

    plt.legend(edgecolor="black", prop={'size': 7})

    print("Weibull KS D Value: " + str(D) + " - P value: " + str(P))
    print(
        "---------------------------------------------------------------------------------------"
    )

    print("Weibull Mean: " + str(meanw / 60 / 60))
    print("Weibull Var: " + str(var / 60 / 60))
    # print(data)
    # print(wei.cdf(data))
    #################################################
    plt.xlabel('TBF (seconds)')
    plt.ylabel('Cumulative Probability')
    #plt.legend(framealpha=1,shadow=True, borderpad = 1, fancybox=True)
    plt.tight_layout()
    plt.savefig(output_dir + "plot_cdf_" + label + ".pdf")
Beispiel #7
0
def hesap(ax, bx):
    z, k, m, n, p, t = 0, 0, 0, 0, 0, 0
    studentGuvenAlt, aadmGuvenAlt, maadGuvenAlt, madmGuvenAlt, johnsonGuvenAlt, chenGuvenAlt = list(
    ), list(), list(), list(), list(), list()
    studentGuvenUst, aadmGuvenUst, maadGuvenUst, madmGuvenUst, johnsonGuvenUst, chenGuvenUst = list(
    ), list(), list(), list(), list(), list()

    workbook = xlwt.Workbook()
    sayfa = workbook.add_sheet("Sayfa1")
    sayfa.write(0, 1, "Student-t")
    sayfa.write(0, 3, "AADM-t")
    sayfa.write(0, 5, "MAAD-t")
    sayfa.write(0, 7, "MADM-t")
    sayfa.write(0, 9, "Johnson-t")
    sayfa.write(0, 11, "Chen-t")

    for item in range(0, 13):
        if item == 0:
            sayfa.write(1, 0, "n")
        elif item % 2 == 0:
            sayfa.write(1, item, "AW")

        else:
            sayfa.write(1, item, "CP")

    for i in range(5, 10):
        for j in range(1, 2501):
            x = weibull_min.rvs(ax / bx, size=i)
            mean, var, skew, kurt = weibull_min.stats(ax / bx, moments='mvsk')
            meanx = round(statistics.mean(x), 4)
            medianx = round(statistics.median(x), 4)
            stdevx = round(statistics.stdev(x), 4)
            aadmx = round((math.sqrt(math.pi / 2) / i) * sum(abs(x - medianx)),
                          4)
            maadx = round(statistics.median(abs(x - meanx)), 4)
            madmx = round(statistics.median(abs(x - medianx)), 4)

            toplam = 0
            for k in range(0, i):
                toplam = toplam + ((x[k] - meanx)**3)

            m3 = (i / ((i - 1) * (i - 2))) * toplam

            studentalt = round(meanx - cell[i - 5] * stdevx / math.sqrt(i), 4)
            studentust = round(meanx + cell[i - 5] * stdevx / math.sqrt(i), 4)
            aadmalt = round(meanx - cell[i - 5] * aadmx / math.sqrt(i), 4)
            aadmust = round(meanx + cell[i - 5] * aadmx / math.sqrt(i), 4)
            maadalt = round(meanx - cell[i - 5] * maadx / math.sqrt(i), 4)
            maadust = round(meanx + cell[i - 5] * maadx / math.sqrt(i), 4)
            madmalt = round(meanx - cell[i - 5] * madmx / math.sqrt(i), 4)
            madmust = round(meanx + cell[i - 5] * madmx / math.sqrt(i), 4)
            johnsonalt = round((meanx + (m3 / (6 * i * (stdevx**2)))) -
                               cell[i - 5] * math.sqrt(i) * stdevx, 4)
            johnsonust = round((meanx + (m3 / (6 * i * (stdevx**2)))) +
                               cell[i - 5] * math.sqrt(i) * stdevx, 4)
            chenalt = round(meanx - (cell[i - 5] + (
                ((m3 / (stdevx**3)) *
                 (1 + 2 * (cell[i - 5]**2))) / (6 * math.sqrt(i))) + (
                     (((m3 / (stdevx**3))**2) *
                      (cell[i - 5] + 2 *
                       (cell[i - 5])**2) / 9 * i)) + math.sqrt(i) * stdevx))
            chenust = round(meanx + (cell[i - 5] + (
                ((m3 / (stdevx**3)) *
                 (1 + 2 * (cell[i - 5]**2))) / (6 * math.sqrt(i))) + (
                     (((m3 / (stdevx**3))**2) *
                      (cell[i - 5] + 2 *
                       (cell[i - 5])**2) / 9 * i)) + math.sqrt(i) * stdevx))

            studentGuvenAlt.append(studentalt)
            studentGuvenUst.append(studentust)
            aadmGuvenAlt.append(aadmalt)
            aadmGuvenUst.append(aadmust)
            maadGuvenAlt.append(maadalt)
            maadGuvenUst.append(maadust)
            madmGuvenAlt.append(madmalt)
            madmGuvenUst.append(madmust)
            johnsonGuvenAlt.append(johnsonalt)
            johnsonGuvenUst.append(johnsonust)
            chenGuvenAlt.append(chenalt)
            chenGuvenUst.append(chenust)

            if studentalt <= mean <= studentust:
                z = z + 1

            if aadmalt <= mean <= aadmust:
                k = k + 1

            if madmalt <= mean <= madmust:
                m = m + 1

            if maadalt <= mean <= maadust:
                n = n + 1

            if johnsonalt <= mean <= johnsonust:
                p = p + 1

            if chenalt <= mean <= chenust:
                t = t + 1

        sayfa.write(i - 3, 0, f"{i}")
        sayfa.write(i - 3, 1, f"{round(z / 2500, 4)}")
        sayfa.write(
            i - 3, 2,
            f"{round(statistics.mean(studentGuvenUst) - statistics.mean(studentGuvenAlt), 4)}"
        )
        sayfa.write(i - 3, 3, f"{round(k / 2500, 4)}")
        sayfa.write(
            i - 3, 4,
            f"{round(statistics.mean(aadmGuvenUst) - statistics.mean(aadmGuvenAlt), 4)}"
        )
        sayfa.write(i - 3, 5, f"{round(n / 2500, 4)}")
        sayfa.write(
            i - 3, 6,
            f"{round(statistics.mean(maadGuvenUst) - statistics.mean(maadGuvenAlt), 4)}"
        )
        sayfa.write(i - 3, 7, f"{round(m / 2500, 4)}")
        sayfa.write(
            i - 3, 8,
            f"{round(statistics.mean(madmGuvenUst) - statistics.mean(madmGuvenAlt), 4)}"
        )
        sayfa.write(i - 3, 9, f"{round(p / 2500, 4)}")
        sayfa.write(
            i - 3, 10,
            f"{round(statistics.mean(johnsonGuvenUst) - statistics.mean(johnsonGuvenAlt), 4)}"
        )
        sayfa.write(i - 3, 11, f"{round(t / 2500, 4)}")
        sayfa.write(
            i - 3, 12,
            f"{round(statistics.mean(chenGuvenUst) - statistics.mean(chenGuvenAlt), 4)}"
        )

        workbook.save(f'W({ax} {bx}).xls')  # excelisim

        z, k, m, n, p, t = 0, 0, 0, 0, 0, 0
        studentGuvenAlt, aadmGuvenAlt, maadGuvenAlt, madmGuvenAlt, johnsonGuvenAlt, chenGuvenAlt = list(
        ), list(), list(), list(), list(), list()
        studentGuvenUst, aadmGuvenUst, maadGuvenUst, madmGuvenUst, johnsonGuvenUst, chenGuvenUst = list(
        ), list(), list(), list(), list(), list()
Beispiel #8
0
def generate_data(output_dir, CPU_factor, GPU_factor, MEM_factor):

	'''
	#generated wit mixture weibull in R
	alpha_mem = 1208
	beta_mem  = 0.871

	alpha_gpu = 59666
	beta_gpu  = 0.934

	alpha_cpu = 289484
	beta_cpu  = 2.778
	'''

	##alpha and beta of real data
	#'''
	alpha_mem = 295647
	beta_mem = 0.918
	
	alpha_gpu = 44721
	beta_gpu = 0.551

	alpha_cpu = 1595690
	beta_cpu = 0.758
	

	#'''
	####################################################################
	#extract real mtbf data
	x_cpu = []
	x_gpu = []
	x_mem = []
	file_name = "../Data/Titan_Data/GPU_mtbf_epoch1.txt"
	with open(file_name) as log:
		for line in log:
				x_gpu.append(int(line))
	
	file_name = "../Data/Titan_Data/CPU_mtbf_epoch1.txt"
	with open(file_name) as log:
		for line in log:
				x_cpu.append(int(line))

	file_name = "../Data/Titan_Data/MEM_mtbf_epoch1.txt"
	with open(file_name) as log:
		for line in log:
			x_mem.append(int(line))

	n_samples_gpu = int(int(len(x_gpu)) * float(GPU_factor))
	n_samples_cpu = int(int(len(x_cpu)) * float(CPU_factor))
	n_samples_mem = int(int(len(x_mem)) * float(MEM_factor))
	
	

	gpu = Weibull_Distribution(alpha=alpha_gpu,beta=beta_gpu).random_samples(n_samples_gpu)
	cpu = Weibull_Distribution(alpha=alpha_cpu,beta=beta_cpu).random_samples(n_samples_cpu)
	mem = Weibull_Distribution(alpha=alpha_mem,beta=beta_mem).random_samples(n_samples_mem)

	shape, loc, scale = ss.weibull_min.fit(gpu, floc=0)
	mean_gpu, var = weibull_min.stats(shape,loc,scale, moments='mv')
	print("gpu total failures = "+ str(n_samples_gpu))
	print("gpu shape = " + str(shape))
	print("gpu scale = " + str(scale))
	print("gpu Mean: "+str(mean_gpu))
	print("-----------------")
	shape, loc, scale = ss.weibull_min.fit(cpu, floc=0)
	mean_cpu, var = weibull_min.stats(shape,loc,scale, moments='mv')
	print("cpu total failures = "+ str(n_samples_cpu))
	print("cpu shape = " + str(shape))
	print("cpu scale = " + str(scale))
	print("cpu Mean: "+str(mean_cpu))
	print("-----------------")
	shape, loc, scale = ss.weibull_min.fit(mem, floc=0)
	mean_mem, var = weibull_min.stats(shape,loc,scale, moments='mv')
	print("mem total failures = "+ str(n_samples_mem))
	print("mem shape = " + str(shape))
	print("mem scale = " + str(scale))
	print("mem Mean: "+str(mean_mem))

	#new proportions
	t = 0
	t = n_samples_mem+n_samples_cpu+n_samples_gpu
	
	proportion_cpu = n_samples_cpu / t
	proportion_gpu = n_samples_gpu / t
	proportion_mem = n_samples_mem / t

	mixture_mean = mean_gpu * proportion_gpu + mean_mem * proportion_mem + mean_cpu * proportion_cpu
	print("-----------------")
	print("Total failures = "+ str(n_samples_mem+n_samples_cpu+n_samples_gpu))
	print("mixture mean without mixture= " + str(mixture_mean/60/60))

	print("-----------------")
	xvals = np.linspace(0,50,t)
	wei_cpu = Weibull_Distribution(alpha=alpha_cpu,beta=beta_cpu).CDF(xvals=xvals,show_plot=False)
	wei_gpu = Weibull_Distribution(alpha=alpha_gpu,beta=beta_gpu).CDF(xvals=xvals,show_plot=False)
	wei_mem = Weibull_Distribution(alpha=alpha_mem,beta=beta_mem).CDF(xvals=xvals,show_plot=False)
	Mixture_CDF = wei_gpu * proportion_gpu + wei_cpu * proportion_cpu + wei_mem * proportion_mem
	shape, loc, scale = ss.weibull_min.fit(Mixture_CDF, floc=0)
	meanw, var = weibull_min.stats(shape,loc,scale, moments='mv')
	print("Mix Mean: "+str(meanw/60/60))


	#save all data (cpu + gpu + mem)
	all_data = np.hstack([gpu,cpu,mem])
	file = open(output_dir + "TOTAL_mtbf_epoch1_exascale.txt", 'w+')
	for i in all_data:
		if int(i) != 0:
			file.write(str(int(i))+"\n")

	file.close()
	sys.exit()
Beispiel #9
0
 def get_moments(*args) -> dict:
     assert len(args) == len(WeibullDistribution.get_parameter_names())
     m, v, s, k = weibull_min.stats(*args, moments="mvsk")
     std = np.sqrt(v)
     moments = dict(mean=m, std=std, skewness=s, kurtosis=k)
     return moments