def print_hist(): global file_path hist_box = Tk() hist_box.title("Histogram") if file_path == "": Label(hist_box, text="Please select file first!").pack() exit = Button(hist_box, text="Exit", command=lambda: hist_box.destroy()).pack() else: ans = [] hist(ans, file_path) canvas = Canvas(hist_box, width=1000, height=600) canvas.pack() img = ImageTk.PhotoImage(Image.open("./histogram.png"), master=canvas) canvas.create_image(0, 0, anchor=NW, image=img) text = "Most frequent used word is : " + str( ans[0][0]) + "\n" + "Least frequent used word is : " + str( ans[0][1]) + "\n" + "Number of lines used in file: " + str( ans[0][2]) label = Label(hist_box, text=text).pack() exit = Button(hist_box, text="Exit", command=lambda: hist_box.destroy()).pack() mainloop()
def reporter(image_file=IMAGE_FILE, neighborhood=NEIGHBORHOOD, scale=SCALE, dpi=DPI, channel=CHANNEL, plane=PLANE, scope=SCOPE): """ Test usage: reporter('image.tiff') reporter('image.tiff', 8, 5, 1000) reporter('image.tiff', 8, 5, 500, 'R', 0, 5) """ bp(image_file, dpi, channel, plane) hist(image_file, scale, dpi) atc(image_file, dpi) ngbd(image_file, neighborhood, dpi) entropy(image_file)
def generate_sentence(): my_file = get_all_words("book.txt") my_histogram = hist(my_file) sentence = "" num_words = 10 for _ in range(num_words): word = sample(my_histogram) sentence += " " + word return sentence
async def turbid(turbidModel: TurbidModel): img_s = stringToRGB(turbidModel.skyImage) hist.hist(img_s, "sky.png") img_s = crop_img(img_s) b_s, g_s, r_s = cv2.split(img_s) img_w = stringToRGB(turbidModel.waterImage) hist.hist(img_w, "water.png") img_w = crop_img(img_w) b_w, g_w, r_w = cv2.split(img_w) img_c = stringToRGB(turbidModel.greyImage) hist.hist(img_c, "grey.png") img_c = crop_img(img_c) b_c, g_c, r_c = cv2.split(img_c) if turbidModel.DN_s is None: Rs = mean(r_s) else: Rs = turbidModel.DN_s if turbidModel.DN_w is None: Rw = mean(r_w) else: Rw = turbidModel.DN_w if turbidModel.DN_c is None: Rc = mean(r_c) else: Rc = turbidModel.DN_c if (turbidModel.alpha, turbidModel.S) is (None, None): Ls = radiance(Rs) Lw = radiance(Rw) Lc = radiance(Rc) else: Ls = radiance(Rs, turbidModel.alpha, turbidModel.S) Lw = radiance(Rw, turbidModel.alpha, turbidModel.S) Lc = radiance(Rc, turbidModel.alpha, turbidModel.S) Rrs = reflectance(Ls, Lw, Lc) turbid = turbidity(Rrs) return {"turbidity": turbid, "waterHist": RGBTostring("water.png"), "skyHist": RGBTostring("sky.png"), "greyHist": RGBTostring("grey.png")}
alto, ancho = double_e.shape[:2] mid = round(ancho / 2) sec_R = [0, 0, mid, alto] sec_L = [mid + 1, 0, ancho, alto] # separamos el ojo izquierdo y derecho Reye = double_e[sec_R[1]:sec_R[1] + sec_R[3], sec_R[0]:sec_R[0] + sec_R[2]] Leye = double_e[sec_L[1]:sec_L[1] + sec_L[3], sec_L[0]:sec_L[0] + sec_L[2]] ######################## EXTRACÍÓN DE PROPIEDADES PARA DETECCIÓN ################## # HISTOGRAMA hR = h.hist(Reye) hL = h.hist(Leye) hR_blanco = hR[0] hR_negro = hR[-1] hL_blanco = hL[0] hL_negro = hL[-1] # PROMEDIO mean_R = Reye.mean() * (Reye.shape[0] * Reye.shape[1]) mean_L = Leye.mean() * (Leye.shape[0] * Leye.shape[1]) # DESVIACIÓN ESTANDAR std_R = Reye.std() * (Reye.shape[0] * Reye.shape[1]) std_L = Leye.std() * (Leye.shape[0] * Leye.shape[1])
#!/usr/bin/env python import numpy as np import histogram import integrator as intr import matplotlib as plt N =100000 A = np.zeros(N) b = 5 n = 1000 for i in range(N): A[i] = intr.exponential(b) histogram.hist(A,n) #problem 2 [w,Sw] = intr.monte_carlo_3d(intr.density,[-.5,-.5,-1],[1,1,1],intr.sphere,100000) [x,Sw] = intr.monte_carlo_3d(intr.xmoment,[-.5,-.5,-1],[1,1,1],intr.sphere,100000) [y,Sw] = intr.monte_carlo_3d(intr.ymoment,[-.5,-.5,-1],[1,1,1],intr.sphere,100000) [z,Sw] = intr.monte_carlo_3d(intr.zmoment,[-.5,-.5,-1],[1,1,1],intr.sphere,100000) print(x/w,y/w,z/w)
def build_dictogram(self, word_list): '''Creates a histogram dictionary using the word_list property and returns it''' return hist(word_list)