def means(X): mean = [] for col in range(len(X[0])): sum_val = 0.0 for rows in range(len(X)): sum_val = sum_val + X[rows][col] mean.append(sum_val / len(X)) return mean
def result(datapath, algorithm): result_value = [] for subjects in os.listdir(datapath): subject_path = os.path.join(datapath, subjects) for tasks in os.listdir(subject_path): mean = [] maxi = [] mini = [] var = [] ent = [] subject_list = [] task_list = [] subject_list.append(subjects) task_path = os.path.join(subject_path, tasks) if tasks == 'T8': tasks = '1' else: tasks = '0' task_list.append(tasks) for files in os.listdir(task_path): if files != '.DS_Store' and files != 'ECG_mV.txt': file_path = os.path.join(task_path, files) data = np.loadtxt(file_path, dtype='float') Mean = np.mean(data) mean.append(Mean) Variance = np.var(data) var.append(Variance) Minimum = min(data) mini.append(Minimum) Maximum = max(data) maxi.append(Maximum) Entropy = max(data) ent.append(Entropy) output = mean + var + mini + maxi + ent result_name = subject_list + task_list + output result_value.append(result_name) with open('results.csv', 'w') as myfile: wr = csv.writer(myfile, lineterminator='\n') wr.writerows(result_value) if algorithm == 'RF': randomForest() else: svm()
def normalize_data(data): std = [] mean = [] dataOut = data.copy for i in range(dataOut.shape[0]): std.append([np.std(dataOut, ddof=1) for j in range(dataOut.shape[0])]) mean.append([np.mean(dataOut) for k in range(dataOut.shape[0])]) std = list(itertools.chain(*std)) mean = list(itertools.chains(*mean)) for i in range(data.shape[0]): dataOut[i, :] = (dataOut[i, :] - mean[i]) / std[i] dataOut[np.isnan(dataOut)] = 0 return dataOut
def word_averaging(wv, words): all_words, mean = set(), [] for word in words: if isinstance(word, np.ndarray): mean.append(word) elif (word == 'NoneType'): print(word) elif word in wv.vocab: mean.append(wv.syn0norm[wv.vocab[word].index]) all_words.add(wv.vocab[word].index) if not mean: return np.zeros(wv.vector_size, ) mean = gensim.matutils.unitvec(np.array(mean).mean(axis=0)).astype( np.float32) return mean
def box_graph(means, times, data): mean = [] time = [] for i in range(4): mean.append((means[i][0], means[i][1], means[i][2], means[i][3])) time.append((times[i][0], times[i][1], times[i][2], times[i][3])) for i in range(4): sns.set() sns.set_style("whitegrid", {'grid.linestyle': '--'}) sns.set_context("paper", 1.5, {"lines.linewidth": 4}) sns.set_palette("winter_r", 8) sns.set('talk', 'whitegrid', 'dark', rc={ "lines.linewidth": 2, 'grid.linestyle': '--' }) fig, ax = plt.subplots() bp = ax.boxplot(mean[i], vert=True, patch_artist=True) # bp = ax.boxplot(time[i], vert=True, patch_artist=True) for box in bp['boxes']: box.set(color="black", linewidth=1.5) for box in bp['medians']: plt.setp(box, color="black", linewidth=1.5) for box in bp['caps']: plt.setp(box, color="black", linewidth=1.5) for box in bp['whiskers']: plt.setp(box, ls="solid", color="black", linewidth=1.5) for box, color in zip(bp["boxes"], sns.color_palette("Set3", 6)): box.set_facecolor(color) ax.set_xticklabels(['ST-GIB', 'CD-GIB', 'FD-GIB', 'TR-GIB']) plt.xlabel('layout') plt.ylabel('accuracy [%]') # plt.ylabel('completion time [ms]') # plt.ylim(1000, 10000) plt.ylim(0, 110) # ax.legend(bp["boxes"], ['ST-GIB', 'CD-GIB', 'FD-GIB', 'TR-GIB'], loc='upper right') plt.legend() plt.grid() plt.savefig('../src/trajectory/mean' + str(i + 1) + '.png') # plt.savefig('../src/trajectory/time' + str(i+1) + '.png') plt.close()
def evolve(self,generations = 550): #generations = 550 gen = [] best = [] mean = [] found = False cur_best,cur_mean = 0,0 for i in range(generations): self.sortIndivids() cur_best,cur_mean = self.getBestAndMean() gen.append(i) best.append(cur_best) mean.append(cur_mean) if cur_best==0 and not found: print('found solution in generation {}!\n'.format(i)) self.sorted_population[0][0].printState() found = True self.mateGrid() date_string = datetime.now().strftime("%H-%M-%S") print('\n\nending pop:\n') [print(tuple[1],tuple[0].state) for tuple in self.sorted_population] print('\nending mean:',cur_mean)