def main(): fileName = 'vectors.json' dim = 2 numberOfClasses = 2 drawPlot = True kmeans(fileName, dim, numberOfClasses, drawPlot)
robust = False if len(sys.argv) >= 5: robust = sys.argv[4] ### Centroids initialization centroids = functions.init_centroids(X_data, k) #%% t_ini = time.time() ### Execute kmeans algorithm etiquetas, centroids = functions.kmeans(X_data, numiter, centroids, p_dista=p_dista, indivs=this_indi, prev_i=[], robust=robust) ### Initialize previous centrois variable to be used in second phase of the ### technique centroids_p = centroids.copy() ### Append labels etiquetas_glo.append(etiquetas.copy()) ### Append centroids centroids_ite.append(centroids.copy())
UBIT = 'pkubal' import numpy as np import cv2 np.random.seed(sum([ord(c) for c in UBIT])) import matplotlib.pyplot as plt from functions import kmeans, updateCentroids, eucl_distance3d, kmeans3d, updateCentroids3d, quantaRaster X = [[5.9, 3.2], [4.6, 2.9], [6.2, 2.8], [4.7, 3.2], [5.5, 4.2], [5, 3], [4.9, 3.1], [6.7, 3.1], [5.1, 3.8], [6, 3]] centroids = [[6.2, 3.2], [6.6, 3.7], [6.5, 3]] colmap = {0: 'r', 1: 'g', 2: 'b'} # Computing kmeans point_centroid_dict = {} point_centroid_dict = kmeans(X, centroids, point_centroid_dict) graphX = [] graphY = [] for i, point in enumerate(X): graphX.append(np.array(point).flatten()[0]) graphY.append(np.array(point).flatten()[1]) color = colmap.get(point_centroid_dict.get(i)) plt.scatter(graphX[i], graphY[i], marker='^', facecolors=color, edgecolors=color) plt.annotate(point, (graphX[i], graphY[i])) plt.savefig("3/task3_iter1_a.jpg")
def image(): #print(request.get_json()) filename = request.get_json()['filename'] #nome do arquivo original data = request.get_json()['image'] #recebe uma string data = data.encode('utf-8') #transforma em binario usando utf-8 algorithm = request.get_json()['algorithm'] #algoritmo a ser realizado print("Recebido: ") print(" - Arquivo: ", filename) print(" - Algoritmo: ", algorithm) with open(filename, "wb") as fh: fh.write(base64.decodebytes(data)) #decodifica o codigo binario image = cv2.imread(filename) #image = cv2.imread('a.jpg') ################################################################################ if algorithm == 'histogram': fc.viewHistograms(image) with open('histogram.png', 'rb') as binary_file: hist_img_binary = binary_file.read() hist_base64Image = base64.b64encode(hist_img_binary) with open('red.png', 'rb') as binary_file2: red_img_binary = binary_file2.read() red_base64Image = base64.b64encode(red_img_binary) with open('green.png', 'rb') as binary_file3: gre_img_binary = binary_file3.read() gre_base64Image = base64.b64encode(gre_img_binary) with open('blue.png', 'rb') as binary_file4: blu_img_binary = binary_file4.read() blu_base64Image = base64.b64encode(blu_img_binary) with open('gray.png', 'rb') as binary_file5: gra_img_binary = binary_file5.read() gra_base64Image = base64.b64encode(gra_img_binary) #remove a imagem temporaria try: os.remove(filename) os.remove('histogram.png') os.remove('red.png') os.remove('green.png') os.remove('blue.png') os.remove('gray.png') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'histogram': hist_base64Image.decode('utf-8'), 'red': red_base64Image.decode('utf-8'), 'green': gre_base64Image.decode('utf-8'), 'blue': blu_base64Image.decode('utf-8'), 'gray': gra_base64Image.decode('utf-8') }) ############################################################################### if algorithm == 'bin': image64 = fc.binarizar(image, 64) image128 = fc.binarizar(image, 128) image200 = fc.binarizar(image, 200) cv2.imwrite('temp_img1.jpg', image64) cv2.imwrite('temp_img2.jpg', image128) cv2.imwrite('temp_img3.jpg', image200) with open('temp_img1.jpg', 'rb') as binary_file: bin64_binary = binary_file.read() bin64_base64Image = base64.b64encode(bin64_binary) with open('temp_img2.jpg', 'rb') as binary_file2: bin128_binary = binary_file2.read() bin128_base64Image = base64.b64encode(bin128_binary) with open('temp_img3.jpg', 'rb') as binary_file3: bin200_binary = binary_file3.read() bin200_base64Image = base64.b64encode(bin200_binary) try: os.remove(filename) except: print('fail remove2') return jsonify({ 'bin64': bin64_base64Image.decode('utf-8'), 'bin128': bin128_base64Image.decode('utf-8'), 'bin200': bin200_base64Image.decode('utf-8') }) ############################################################################### if algorithm == 'negative': imageOut = fc.negative(image) try: os.remove(filename) except: print('fail remove2') return jsonify(fc.preparaJson(imageOut)) ############################################################################### if algorithm == 'subsampling': fc.subamostragem(image) with open('sub4.png', 'rb') as binary_file: sub4_binary = binary_file.read() sub4_base64Image = base64.b64encode(sub4_binary) with open('sub8.png', 'rb') as binary_file2: sub8_binary = binary_file2.read() sub8_base64Image = base64.b64encode(sub8_binary) with open('sub16.png', 'rb') as binary_file3: sub16_binary = binary_file3.read() sub16_base64Image = base64.b64encode(sub16_binary) with open('sub32.png', 'rb') as binary_file4: sub32_binary = binary_file4.read() sub32_base64Image = base64.b64encode(sub32_binary) with open('sub64.png', 'rb') as binary_file5: sub64_binary = binary_file5.read() sub64_base64Image = base64.b64encode(sub64_binary) #remove a imagem temporaria try: os.remove(filename) os.remove('sub4.png') os.remove('sub8.png') os.remove('sub16.png') os.remove('sub32.png') os.remove('sub64.png') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'sub4': sub4_base64Image.decode('utf-8'), 'sub8': sub8_base64Image.decode('utf-8'), 'sub16': sub16_base64Image.decode('utf-8'), 'sub32': sub32_base64Image.decode('utf-8'), 'sub64': sub64_base64Image.decode('utf-8') }) ############################################################################### ############################################################################### if algorithm == 'hough': imageCircles1 = fc.houghCirculos(image, 1, 50) imageCircles2 = fc.houghCirculos(image, 1, 150) imageCircles3 = fc.houghCirculos(image, 1, 250) imageLines1 = fc.houghLinhas(image, 80) imageLines2 = fc.houghLinhas(image, 115) imageLines3 = fc.houghLinhas(image, 150) #escreve uma imagem temporaria com os resultados (nparray -> jpg) cv2.imwrite('temp_img1.jpg', imageCircles1) cv2.imwrite('temp_img2.jpg', imageCircles2) cv2.imwrite('temp_img3.jpg', imageCircles3) cv2.imwrite('temp_img4.jpg', imageLines1) cv2.imwrite('temp_img5.jpg', imageLines2) cv2.imwrite('temp_img6.jpg', imageLines3) #lê a imagem e a codifica para binário (jpg -> binario -> base64) with open('temp_img1.jpg', 'rb') as binary_file: img_binary1 = binary_file.read() circles1 = base64.b64encode(img_binary1) with open('temp_img2.jpg', 'rb') as binary_file: img_binary2 = binary_file.read() circles2 = base64.b64encode(img_binary2) with open('temp_img3.jpg', 'rb') as binary_file: img_binary3 = binary_file.read() circles3 = base64.b64encode(img_binary3) with open('temp_img4.jpg', 'rb') as binary_file: img_binary4 = binary_file.read() lines1 = base64.b64encode(img_binary4) with open('temp_img5.jpg', 'rb') as binary_file: img_binary5 = binary_file.read() lines2 = base64.b64encode(img_binary5) with open('temp_img6.jpg', 'rb') as binary_file: img_binary6 = binary_file.read() lines3 = base64.b64encode(img_binary6) #remove a imagem temporaria try: os.remove(filename) os.remove('temp_img1.jpg') os.remove('temp_img2.jpg') os.remove('temp_img3.jpg') os.remove('temp_img4.jpg') os.remove('temp_img5.jpg') os.remove('temp_img6.jpg') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'lines1': lines1.decode('utf-8'), 'lines2': lines2.decode('utf-8'), 'lines3': lines3.decode('utf-8'), 'circles1': circles1.decode('utf-8'), 'circles2': circles2.decode('utf-8'), 'circles3': circles3.decode('utf-8'), }) ################################################################################ if algorithm == 'sobel': fc.sobel(image) #lê a imagem e a codifica para binário (jpg -> binario -> base64) with open('absolut_3.jpg', 'rb') as binary_file: img_binary1 = binary_file.read() absolut_3 = base64.b64encode(img_binary1) with open('shift_3.jpg', 'rb') as binary_file: img_binary2 = binary_file.read() shift_3 = base64.b64encode(img_binary2) with open('absolut_5.jpg', 'rb') as binary_file: img_binary3 = binary_file.read() absolut_5 = base64.b64encode(img_binary3) with open('shift_5.jpg', 'rb') as binary_file: img_binary4 = binary_file.read() shift_5 = base64.b64encode(img_binary4) with open('absolut_7.jpg', 'rb') as binary_file: img_binary5 = binary_file.read() absolut_7 = base64.b64encode(img_binary5) with open('shift_7.jpg', 'rb') as binary_file: img_binary6 = binary_file.read() shift_7 = base64.b64encode(img_binary6) #remove a imagem temporaria try: os.remove(filename) os.remove('absolut_3.jpg') os.remove('absolut_5.jpg') os.remove('absolut_7.jpg') os.remove('shift_3.jpg') os.remove('shift_5.jpg') os.remove('shift_7.jpg') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'absolut3': absolut_3.decode('utf-8'), 'shift3': absolut_5.decode('utf-8'), 'absolut5': absolut_7.decode('utf-8'), 'shift5': shift_3.decode('utf-8'), 'absolut7': shift_5.decode('utf-8'), 'shift7': shift_7.decode('utf-8'), }) ################################################################################ if algorithm == 'laplace': cv2.imwrite('mask3.jpg', fc.laplaciano(image, 3)) cv2.imwrite('mask5.jpg', fc.laplaciano(image, 5)) cv2.imwrite('mask7.jpg', fc.laplaciano(image, 7)) #lê a imagem e a codifica para binário (jpg -> binario -> base64) with open('mask3.jpg', 'rb') as binary_file: img_binary1 = binary_file.read() mask3Image = base64.b64encode(img_binary1) with open('mask5.jpg', 'rb') as binary_file: img_binary2 = binary_file.read() mask5Image = base64.b64encode(img_binary2) with open('mask7.jpg', 'rb') as binary_file: img_binary3 = binary_file.read() mask7Image = base64.b64encode(img_binary3) #remove a imagem temporaria try: os.remove(filename) os.remove('mask3.jpg') os.remove('mask5.jpg') os.remove('mask7.jpg') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'mask3': mask3Image.decode('utf-8'), 'mask5': mask5Image.decode('utf-8'), 'mask7': mask7Image.decode('utf-8') }) ################################################################################ if algorithm == 'kmeans': kmeans1 = fc.kmeans(image, False, 3) kmeans2 = fc.kmeans(image, False, 5) kmeans3 = fc.kmeans(image, False, 7) kmeans4 = fc.kmeans(image, False, 10) cv2.imwrite('temp_img1.jpg', kmeans1) cv2.imwrite('temp_img2.jpg', kmeans2) cv2.imwrite('temp_img3.jpg', kmeans3) cv2.imwrite('temp_img4.jpg', kmeans4) with open('temp_img1.jpg', 'rb') as binary_file: img_binary1 = binary_file.read() imageKmeans1 = base64.b64encode(img_binary1) with open('temp_img2.jpg', 'rb') as binary_file: img_binary2 = binary_file.read() imageKmeans2 = base64.b64encode(img_binary2) with open('temp_img3.jpg', 'rb') as binary_file: img_binary3 = binary_file.read() imageKmeans3 = base64.b64encode(img_binary3) with open('temp_img4.jpg', 'rb') as binary_file: img_binary4 = binary_file.read() imageKmeans4 = base64.b64encode(img_binary4) try: os.remove(filename) os.remove('temp_img1.jpg') os.remove('temp_img2.jpg') os.remove('temp_img3.jpg') os.remove('temp_img4.jpg') except: print('fail remove') finally: #decodifica de binario e passa para utf-8 (string). (binario -> string) return jsonify({ 'kmeans1': imageKmeans1.decode('utf-8'), 'kmeans2': imageKmeans2.decode('utf-8'), 'kmeans3': imageKmeans3.decode('utf-8'), 'kmeans4': imageKmeans4.decode('utf-8'), })
datos_df_per1 = data_e[data_e['Date'] == year_i] datos_per1 = np.array(datos_df_per1[datos_df_per1.columns[3:]]) numdata = len(datos_per1) ### FKMP1 is only included in gapminder experiments if de_gapminder: t_ini = time.time() centroids = functions.init_centroids(datos_per1, k) centroids_p = centroids.copy() this_indi = pd.unique(datos_df_per1.country) ### kmeans on period 1 etiquetas, centroids = functions.kmeans(datos_per1, numiter, centroids, p_dista=p_dista, indivs=this_indi, prev_i=[]) ### Labels are the same in all the periods FKMP1 = etiquetas_glo_old.copy() for perio in range(len(FKMP1)): FKMP1.loc[perio] = np.append(perio, etiquetas.copy()) t_fin = (time.time() - t_ini) / 60 ### Append execution time to csv file df5 = pd.DataFrame(FKMP1) df5.to_csv('../data/outputs/FKMP1.csv', header=True, index=False) with open('../data/outputs/execution_time.txt', 'a+') as f: f.write(f'\n FKMP1: {t_fin}')