def success(listOfObjects):  
    l = listOfObjects
    res = l.strip('][').split(', ') 
    res = [re.sub(r'[\']', '', line) for line in res]
    #print(url_for('static', filename='css/style.css'))    
    
    x_cal0 = np.array([79975.5,0.5064,146.785,5.53])
    x_cal = scaler.transform(x_cal0.reshape(1, -1))[0]
    df_state2 = state_simil(df_state, x_cal) 
    df_state2['p_def'] =(np.random.rand(df_state2.shape[0])*0.1)+0.3
    
    fig_div = create_plot(df_state2) 
    p = 0.3238543
    fig_div2 = plot2(p)    
    
    with open("templates/plot1.html", 'w') as f:
        f.write(html_string.format(fig_div,fig_div2))
    
    
    return render_template('plot1.html')
Beispiel #2
0
X = np.linspace(0, Xmax, nX)  # Discretisation of distance
eps = (E - E0) * n * FRT  # adimensional potential waveform
delta = np.sqrt(D * t[-1])  # cm, diffusion layer thickness
K0 = ks * delta / D  # Normalised standard rate constant

#%% Simulation
for k in range(1, nT):
    # Boundary condition, Butler-Volmer:
    C[k, 0] = (C[k - 1, 1] +
               dX * K0 * np.exp(-alpha * eps[k])) / (1 + dX * K0 * (np.exp(
                   (1 - alpha) * eps[k]) + np.exp(-alpha * eps[k])))

    # Solving finite differences:
    for j in range(1, nX - 1):
        C[k, j] = C[k - 1, j] + lamb * (C[k - 1, j + 1] - 2 * C[k - 1, j] +
                                        C[k - 1, j - 1])

# Denormalising:
i = n * F * Ageo * D * cB * (-C[:, 2] + 4 * C[:, 1] - 3 * C[:, 0]) / (2 * dX *
                                                                      delta)
cR = C * cB
cO = cB - cR
x = X * delta
end = time.time()
print(end - start)

#%% Plot
p.plot(E, i, "$E$ / V", "$i$ / A")
p.plot2(x, cR[-1, :] * 1e6, x, cO[-1, :] * 1e6, "[R]", "[O]", "x / cm",
        "c($t_{end}$,$x$=0) / mM")
Beispiel #3
0
            cluster_mean = np.mean(cluster_data, axis=0)
            sse = np.sum(np.square(cluster_data - cluster_mean))

            t_sse.append(sse)

        return np.sum(t_sse)


if __name__ == '__main__':
    # Generate Data
    X, y = make_blobs(n_samples=300, centers=3, n_features=2,
                      random_state=0, cluster_std=0.6)
    # Plot Data
    #plot1(X)

    # K-Means Clustering
    kmeans = KMeans(k=3, max_iters=100)

    labels = kmeans.fit_predict(X)
    print(kmeans.inertia_)
    #plot2(X, labels)

    kmeans_sk = KMeansSK(n_clusters=3, max_iter=100, init='random')
    labels = kmeans_sk.fit_predict(X)

    plot2(X, labels)
    print(kmeans_sk.inertia_)


# min Ck |Xi - Ck|