def exampleB2_5(G=40, T= 1.174, gamma = 42.58, alpha = np.arange(180,60,-1)): #% Effect of Crushers #G = 40; % mT/m; #T = 1.174; % ms. #gamma = 42.58 % kHz/mT #alpha = [180:-1:60]; #% 2 cycles/mm. x = np.arange(0,1,0.01)/1000; #% m. Grot = 360*x*G*T*gamma; #% Rotation due to gradient at each voxel. Ms=[[1],[0],[0]]; #% Mstart Mend = [] for alpha_i in alpha: for Grot_i in Grot: M = Ms; M = zrot(Grot_i)*M; #% Crusher 1 M = xrot(alpha_i)*M; #% Refocusing pulse. M = zrot(Grot_i)*M; #% Crusher 2 Mend.append(M) # Mend(:,k)=M; I think this is what you want #end; #%figure(3); #%plot(abs(Mend(1,:)+i*Mend(2,:))); figure(1); Mxy = Mend[0]+1j*Mend[1]; plot(x*1000,real(Mxy),'k--'); hold on; axis([np.min(x)*1000, np.max(x)*1000, -1.2, 1.2]); plot([np.min(x), np.max(x)]*1000,[1 1]*np.abs(mean(Mxy)),'b-'); #hold off; #grid on; xlabel('Position (mm)'); ylabel('Signal'); legend('M_{xy}','Avg M_{xy}'); tt = sprintf('%d Degree Refocusing Angle',alpha(n)); title(tt); setprops(); drawnow; #%fig2tiff('crush',n); print(n) Mse(n) = np.abs(np.mean(Mxy)); figure(2); plot(alpha,Mse); #grid on; xlabel('Refocusing Angle (deg)'); ylabel('Spin Echo Signal'); title('Spin Echo vs Refoc. Angle'); a = plt.gca(); #axis([a(1:2) 0 1]); mrs.setprops();
import pandas as pd # load the dataset and view top five records dataset = pd.read_csv('Mall_Customer.csv') x = dataset.iloc[:, [3, 4]].values dataset.head() # using the elbow method to find the optimal number of clusters from sklearn.cluster import KMeans wcss = [] for i in range(1, 11): kmeans = KMeans(n_clusters=i, init='k-means++', random_state=42) kmeans.fit(x) wcss.append(kmeans.inertia_) plt.plot(range(1, 11), wcss) plt.title('The Elbow Method') plt.xlabel('Number of clusters') plt.ylabel('WCSS') plt.show() # train the K-Mean model using dataset kmeans = KMeans(n_clusters=5, init='k-means++', random_state=42) y_kmeans = kmeans.fit_predict(x) # visualising the clusters plt.scatter(x[y_hc == 0, 0], x[y_hc == 0, 1], s=100, c='red', label='Cluster 1') plt.scatter(x[y_hc == 1, 0],
layers.Dense(512, activation="relu", input_shape=(None, data.shape[-1]))) model1.add(layers.Dropout(0.5)) model1.add(layers.LSTM(32, dropout=0.5, return_sequences=True)) model1.add(layers.LSTM(64, dropout=0.5)) model1.add(layers.Dense(512, activation="relu")) model1.add(layers.Dropout(0.5)) model1add(layers.Dense(8, activation="softmax")) model1.summary() model1.compile(optimizer=optimizers.RMSprop(lr=0.0001), loss="categorical_crossentropy", metrics=["acc"]) history1 = model1.fit_generator(train_gen, steps_per_epoch=train_steps, epochs=200, validation_data=val_gen, validation_steps=val_steps) loss = history1.history["loss"] val_loss = history1.history["val_loss"] acc = history1.history["acc"] val_acc = history1.history["val_acc"] epochs = range(1, len(loss) + 1) plt.figure(figsize=(10, 5)) plt.plot(epochs, acc, "b", c="black", label="acc") plt.plot(epochs, val_acc, "b", c="green", label="val_acc") plt.title("Train and validation curve") plt.legend() model1.save("T1001.h5")
# load the dataset and view top five records dataset = pd.read_csv('Mall_Customer.csv') x = dataset.iloc[:, [3, 4]].values # train the Hierarchical Clustering model using dataset from sklearn.cluster import AgglomerativeClustering hc = AgglomerativeClustering(n_clusters = 5, affinity = 'euclidian', linkage = 'ward') y_hc = hc.fit_predict(x) # visualising the clusters plt.scatter(x[y_hc == 0, 0], x[y_hc == 0, 1], s = 100, c = 'red', label = 'Cluster 1') plt.scatter(x[y_hc == 1, 0], x[y_hc == 1, 1], s = 100, c = 'blue', label = 'Cluster 2') plt.scatter(x[y_hc == 2, 0], x[y_hc == 2, 1], s = 100, c = 'green', label = 'Cluster 3') plt.scatter(x[y_hc == 3, 0], x[y_hc == 3, 1], s = 100, c = 'cyan', label = 'Cluster 4') plt.scatter(x[y_hc == 4, 0], x[y_hc == 4, 1], s = 100, c = 'magenta', label = 'Cluster 5') plt.title('Clusters of customers') plt.xlabel('Annual Income (k$)') plt.ylabel('Spending Score (1-100)') plt.legend() plt.show() # https://www.kaggle.com/shwetabh123/mall-customers # 10 Models for Clustering # K-Means # Affinity Propagation # BIRCH # DBSCAN # Mini Batch K-Means