def plot_timeVfreq(date_operator, collection): if collection == "drop": field = "pickup_time" elif collection == "pickup": field = "drop_time" else: raise Exception( "Error: the collection arguments to timeVfreq are 'drop', or 'pickup'. Not", collection) graph_size = {"hour": (0, 23), "dayOfYear": (1, 366), "dayOfWeek": (1, 8)} if date_operator not in graph_size.keys(): raise Exception( "Error: the date_operator argument to timeVfreq must be 'hour', 'dayOfWeek', or 'dayOfYear'. Not", date_operator) tvf = get_timeVfreq(date_operator, collection, field) min_freq = ndarray.min(tvf[1]) max_freq = ndarray.max(tvf[1]) fig = pyplot.figure(1, figsize=(7, 6)) myplot = fig.add_subplot(111) myplot.set_xlim(graph_size[date_operator]) norm = colors.Normalize(min_freq, max_freq) for i in range(len(tvf[0])): color = colors.rgb2hex(cm.cool(norm(tvf[1][i]))) pyplot.bar(tvf[0][i], tvf[1][i], color=color, ec=color, align='edge') pyplot.xlabel(date_operator) pyplot.ylabel("# of rides") pyplot.savefig(date_operator + "_" + collection, dpi=180)
def plot_timeVfreq(date_operator, collection): if collection == "drop": field = "pickup_time" elif collection == "pickup": field = "drop_time" else: raise Exception("Error: the collection arguments to timeVfreq are 'drop', or 'pickup'. Not", collection) graph_size = {"hour":(0, 23), "dayOfYear":(1,366), "dayOfWeek":(1,8)} if date_operator not in graph_size.keys(): raise Exception("Error: the date_operator argument to timeVfreq must be 'hour', 'dayOfWeek', or 'dayOfYear'. Not", date_operator) tvf = get_timeVfreq(date_operator, collection, field) min_freq = ndarray.min(tvf[1]) max_freq = ndarray.max(tvf[1]) fig = pyplot.figure(1, figsize=(7, 6)) myplot = fig.add_subplot(111) myplot.set_xlim(graph_size[date_operator]) norm = colors.Normalize(min_freq, max_freq) for i in range(len(tvf[0])): color = colors.rgb2hex(cm.cool(norm(tvf[1][i]))) pyplot.bar(tvf[0][i], tvf[1][i], color=color, ec=color, align='edge') pyplot.xlabel(date_operator) pyplot.ylabel("# of rides") pyplot.savefig(date_operator + "_" + collection, dpi=180)
def max(self, *args, **kwargs): """ Returns the maximum Date. For a description of the input parameters, please refer to numpy.max. """ obj = ndarray.max(self, *args, **kwargs) if not obj.shape: return Date(self.freq, obj) return obj
def BackgroundImage(request): chad = User.objects.get(id=1) image = ThreeDimensional.objects.filter(user=chad)[0] image_handle = nibabel.load(image.brain_image.file.name) image_data = image_handle.get_data() image_list_data = image_data.tolist() #for some reason ndarray.max returns an ndarray with 1 member #which is a numpy.flaot32. this converts all that to a regular python float max = ndarray.max(image_data).tolist() min = ndarray.min(image_data).tolist() json_object = { 'data' : image_list_data, 'max' : max, 'min' : min, } json_data = json.dumps(json_object) return HttpResponse(json_data, mimetype='application/json')
def make_validation_plots(opt, tlm, db): """ Make validation output plots. :param outdir: output directory :param tlm: telemetry :param db: database handle :returns: list of plot info including plot file names """ outdir = opt.outdir start = tlm['date'][0] stop = tlm['date'][-1] states = get_states(start, stop, db) # Create array of times at which to calculate PSMC temperatures, then do it logger.info('Calculating PSMC thermal model for validation') model = calc_model(opt.model_spec, states, start, stop) # Interpolate states onto the tlm.date grid # state_vals = cmd_states.interpolate_states(states, model.times) pred = {'1pdeaat': model.comp['1pdeaat'].mvals, 'pitch': model.comp['pitch'].mvals, 'tscpos': model.comp['sim_z'].mvals } idxs = Ska.Numpy.interpolate(np.arange(len(tlm)), tlm['date'], model.times, method='nearest') tlm = tlm[idxs] labels = {'1pdeaat': 'Degrees (C)', 'pitch': 'Pitch (degrees)', 'tscpos': 'SIM-Z (steps/1000)', } scales = {'tscpos': 1000.} fmts = {'1pdeaat': '%.2f', 'pitch': '%.3f', 'tscpos': '%d'} good_mask = np.ones(len(tlm),dtype='bool') for interval in model.bad_times: bad = ((tlm['date'] >= DateTime(interval[0]).secs) & (tlm['date'] < DateTime(interval[1]).secs)) good_mask[bad] = False plots = [] logger.info('Making PSMC model validation plots and quantile table') quantiles = (1, 5, 16, 50, 84, 95, 99) # store lines of quantile table in a string and write out later quant_table = '' quant_head = ",".join(['MSID'] + ["quant%d" % x for x in quantiles]) quant_table += quant_head + "\n" for fig_id, msid in enumerate(sorted(pred)): plot = dict(msid=msid.upper()) fig = plt.figure(10 + fig_id, figsize=(7, 3.5)) fig.clf() scale = scales.get(msid, 1.0) ticklocs, fig, ax = plot_cxctime(model.times, tlm[msid] / scale, fig=fig, fmt='-r') ticklocs, fig, ax = plot_cxctime(model.times, pred[msid] / scale, fig=fig, fmt='-b') if np.any(~good_mask) : ticklocs, fig, ax = plot_cxctime(model.times[~good_mask], tlm[msid][~good_mask] / scale, fig=fig, fmt='.c') ax.set_title(msid.upper() + ' validation') ax.set_ylabel(labels[msid]) ax.grid() filename = msid + '_valid.png' outfile = os.path.join(outdir, filename) logger.info('Writing plot file %s' % outfile) fig.savefig(outfile) plot['lines'] = filename # Make quantiles if msid == '1pdeaat': ok = (( tlm[msid] > 30.0 ) & good_mask ) ok2 =(( tlm[msid] > 40.0 ) & good_mask ) else: ok = np.ones(len(tlm[msid]), dtype=bool) diff = np.sort(tlm[msid][ok] - pred[msid][ok]) quant_line = "%s" % msid for quant in quantiles: quant_val = diff[(len(diff) * quant) // 100] plot['quant%02d' % quant] = fmts[msid] % quant_val quant_line += (',' + fmts[msid] % quant_val) quant_table += quant_line + "\n" for histscale in ('log', 'lin'): fig = plt.figure(20 + fig_id, figsize=(4, 3)) fig.clf() ax = fig.gca() ax.hist(diff / scale, bins=50, log=(histscale == 'log')) if msid == '1pdeaat': diff2=np.sort(tlm[msid][ok2] - pred[msid][ok2]) ax.hist(diff2 / scale, bins=50, log=(histscale == 'log'), color = 'red') ax.set_title(msid.upper() + ' residuals: data - model') ax.set_xlabel(labels[msid]) fig.subplots_adjust(bottom=0.18) filename = '%s_valid_hist_%s.png' % (msid, histscale) outfile = os.path.join(outdir, filename) logger.info('Writing plot file %s' % outfile) fig.savefig(outfile) plot['hist' + histscale] = filename plots.append(plot) filename = os.path.join(outdir, 'validation_quant.csv') logger.info('Writing quantile table %s' % filename) f = open(filename, 'w') f.write(quant_table) f.close() # If run_start is specified this is likely for regression testing # or other debugging. In this case write out the full predicted and # telemetered dataset as a pickle. if opt.run_start: filename = os.path.join(outdir, 'validation_data.pkl') logger.info('Writing validation data %s' % filename) f = open(filename, 'w') pickle.dump({'pred': pred, 'tlm': tlm}, f, protocol=-1) f.close() # adding stuff for resid plots--rje 6/24/14 fig = plt.figure(36) fig.clf() # this is the python equivalent of the IDL where() function # note parens are required for the & cases. msid='1pdeaat' hot_hrcs = ((tlm['tscpos'] < -85000.0 ) & ( pred[msid] > 40.0 ) & good_mask ) hot_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 ) & ( pred[msid] > 40.0 ) & good_mask ) hot_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 ) & ( pred[msid] > 40.0 ) & good_mask ) hot_acisi = ((tlm['tscpos'] > 80000.0 ) & ( pred[msid] > 40.0 ) & good_mask ) warm_hrcs = ((tlm['tscpos'] < -85000.0 ) & ( pred[msid] > 30.0 ) & ( pred[msid] < 40.0 ) & good_mask ) warm_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 )& ( pred[msid] > 30.0 ) & ( pred[msid] < 40.0 ) & good_mask ) warm_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 )& ( pred[msid] > 30.0 ) & ( pred[msid] < 40.0 ) & good_mask ) warm_acisi = ((tlm['tscpos'] > 80000.0 ) & ( pred[msid] > 30.0 ) & ( pred[msid] < 40.0 ) & good_mask ) cold_hrcs = ( (tlm['tscpos'] < -85000.0 ) & ( pred[msid] < 30.0 ) & good_mask ) cold_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 ) & ( pred[msid] < 30.0 ) & good_mask ) cold_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 ) & ( pred[msid] < 30.0 ) & good_mask ) cold_acisi = ( (tlm['tscpos'] > 80000.0 ) & ( pred[msid] < 30.0 ) & good_mask ) plt.plot(tlm['pitch'][hot_hrci], tlm[msid][hot_hrci] - pred[msid][hot_hrci], "ob", markersize=5) plt.plot(tlm['pitch'][hot_hrcs], tlm[msid][hot_hrcs] - pred[msid][hot_hrcs], "ok", markersize=5) plt.plot(tlm['pitch'][hot_aciss], tlm[msid][hot_aciss] - pred[msid][hot_aciss], "or", markersize=5) plt.plot(tlm['pitch'][hot_acisi], tlm[msid][hot_acisi] - pred[msid][hot_acisi], "og", markersize=5) plt.plot(tlm['pitch'][warm_hrci], tlm[msid][warm_hrci] - pred[msid][warm_hrci], "sb", markersize=3) plt.plot(tlm['pitch'][warm_hrcs], tlm[msid][warm_hrcs] - pred[msid][warm_hrcs], "sk", markersize=3) plt.plot(tlm['pitch'][warm_aciss], tlm[msid][warm_aciss] - pred[msid][warm_aciss], "sr", markersize=3) plt.plot(tlm['pitch'][warm_acisi], tlm[msid][warm_acisi] - pred[msid][warm_acisi], "sg", markersize=3) plt.plot(tlm['pitch'][cold_hrci], tlm[msid][cold_hrci] - pred[msid][cold_hrci], ".b", markersize=2) plt.plot(tlm['pitch'][cold_hrcs], tlm[msid][cold_hrcs] - pred[msid][cold_hrcs], ".k", markersize=2) plt.plot(tlm['pitch'][cold_aciss], tlm[msid][cold_aciss] - pred[msid][cold_aciss], ".r", markersize=2) plt.plot(tlm['pitch'][cold_acisi], tlm[msid][cold_acisi] - pred[msid][cold_acisi], ".g", markersize=2) # plt.plot(tlm['pitch'][htr_on], tlm[msid][htr_on] - pred[msid][htr_on], "*m", markersize=10) plt.ylabel('1PDEAAT Data - Model') plt.xlabel('pitch angle') plt.title('b,k,r,g=hrci,hrcs,aciss,acisi, mod.temp: 0<.<30<s<40<o') plt.grid() outfile=os.path.join(outdir,'1pdeaat_resid_pitch.png') fig.savefig(outfile) # adding stuff for resid plots--rje 6/24/14 fig = plt.figure(35) fig.clf() # this is the python equivalent of the IDL where() function # note parens are required for the & cases. fwd_hrcs = ((tlm['tscpos'] < -85000.0 ) & ( tlm['pitch'] < 65.0 ) & good_mask ) fwd_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 ) & ( tlm['pitch'] < 65.0 ) & good_mask ) fwd_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 ) & ( tlm['pitch'] < 65.0 ) & good_mask ) fwd_acisi = ((tlm['tscpos'] > 80000.0 ) & ( tlm['pitch'] < 65.0 ) & good_mask ) m80_hrcs = ((tlm['tscpos'] < -85000.0 ) & ( tlm['pitch'] > 65.0 ) & ( tlm['pitch'] < 80.0 ) & good_mask ) m80_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 )& ( tlm['pitch'] > 65.0 ) & ( tlm['pitch'] < 80.0 ) & good_mask ) m80_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 )& ( tlm['pitch'] > 65.0 ) & ( tlm['pitch'] < 80.0 ) & good_mask ) m80_acisi = ((tlm['tscpos'] > 80000.0 ) & ( tlm['pitch'] > 65.0 ) & ( tlm['pitch'] < 80.0 ) & good_mask ) mid_hrcs = ((tlm['tscpos'] < -85000.0 ) & ( tlm['pitch'] > 80.0 ) & ( tlm['pitch'] < 90.0 ) & good_mask ) mid_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 )& ( tlm['pitch'] > 80.0 ) & ( tlm['pitch'] < 90.0 ) & good_mask ) mid_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 )& ( tlm['pitch'] > 80.0 ) & ( tlm['pitch'] < 90.0 ) & good_mask ) mid_acisi = ((tlm['tscpos'] > 80000.0 ) & ( tlm['pitch'] > 80.0 ) & ( tlm['pitch'] < 90.0 ) & good_mask ) aft_hrcs = ( (tlm['tscpos'] < -85000.0 ) & ( tlm['pitch'] > 90.0 ) & good_mask ) aft_hrci = ( ( -85000.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 0.0 ) & ( tlm['pitch'] > 90.0 ) & good_mask ) aft_aciss = ( ( 0.0 < tlm['tscpos'] ) & ( tlm['tscpos'] < 80000.0 ) & ( tlm['pitch'] > 90.0 ) & good_mask ) aft_acisi = ( (tlm['tscpos'] > 80000.0 ) & ( tlm['pitch'] > 90.0 ) & good_mask ) msid='1pdeaat' plt.plot(pred[msid][fwd_hrci], tlm[msid][fwd_hrci] - pred[msid][fwd_hrci], "ob", markersize=5) plt.plot(pred[msid][fwd_hrcs], tlm[msid][fwd_hrcs] - pred[msid][fwd_hrcs], "ok", markersize=5) plt.plot(pred[msid][fwd_aciss], tlm[msid][fwd_aciss] - pred[msid][fwd_aciss], "or", markersize=5) plt.plot(pred[msid][fwd_acisi], tlm[msid][fwd_acisi] - pred[msid][fwd_acisi], "og", markersize=5) plt.plot(pred[msid][m80_hrci], tlm[msid][m80_hrci] - pred[msid][m80_hrci], "vb", markersize=5) plt.plot(pred[msid][m80_hrcs], tlm[msid][m80_hrcs] - pred[msid][m80_hrcs], "vk", markersize=5) plt.plot(pred[msid][m80_aciss], tlm[msid][m80_aciss] - pred[msid][m80_aciss], "vr", markersize=5) plt.plot(pred[msid][m80_acisi], tlm[msid][m80_acisi] - pred[msid][m80_acisi], "vg", markersize=5) plt.plot(pred[msid][mid_hrci], tlm[msid][mid_hrci] - pred[msid][mid_hrci], "^b", markersize=5) plt.plot(pred[msid][mid_hrcs], tlm[msid][mid_hrcs] - pred[msid][mid_hrcs], "^k", markersize=5) plt.plot(pred[msid][mid_aciss], tlm[msid][mid_aciss] - pred[msid][mid_aciss], "^r", markersize=5) plt.plot(pred[msid][mid_acisi], tlm[msid][mid_acisi] - pred[msid][mid_acisi], "^g", markersize=5) plt.plot(pred[msid][aft_hrci], tlm[msid][aft_hrci] - pred[msid][aft_hrci], ".b", markersize=2) plt.plot(pred[msid][aft_hrcs], tlm[msid][aft_hrcs] - pred[msid][aft_hrcs], ".k", markersize=2) plt.plot(pred[msid][aft_aciss], tlm[msid][aft_aciss] - pred[msid][aft_aciss], ".r", markersize=2) plt.plot(pred[msid][aft_acisi], tlm[msid][aft_acisi] - pred[msid][aft_acisi], ".g", markersize=2) maxmodeltemp=ndarray.max(pred[msid][good_mask]) maxresid=ndarray.max(tlm[msid][good_mask]-pred[msid][good_mask]) x = np.array(np.linspace(52.5-maxresid,maxmodeltemp,num=5)) my_y = 52.5 - x plt.plot( x, my_y ) plt.ylabel('Data - Model') plt.xlabel('1pdeaat Model') plt.title('blue,black,red,green=hrci,hrcs,aciss,acisi, 45<o<65<v<80<^<90<.') plt.grid() # raise ValueError outfile=os.path.join(outdir,'1pdeaat_resid.png') fig.savefig(outfile) return plots
def main_test(): # As a main function, this function tests other functions written in domain_functions.py: # - compute triangulation of domain # - nodes = np.array([[0., 0.], [0., 10.], [15., 10.], [15., 0.], [6., 10.], [9., 10.], [11., 0.], [2., 8.], [8., 7.], [11., 2.], [5., 5.]]) tri = spatial.Delaunay(nodes) print '' print 'Entered points, triangles by vertex, and neighbors:' print tri.points # Numbering of the points is in order of input print '' print tri.simplices # From point numbers, give the triangles, starting with # 0 print '' print tri.neighbors # Give the triangle number of neighbors, and -1 for boundary # Test which triangle: # print tri.find_simplex(np.array([2., 2.])) # Choose a simplex and facet control idx: num = 0 facet_id = 0 # Extract points: points = tri.points vertices = tri.simplices vertices_here = vertices[num, :] print '' print 'vertices_here:' print vertices_here # Extract normals: nhat_array = nhat_tri_facet(tri, num) print '' print 'Local normal vectors:' print nhat_array u_optimal, fg_array = u_for_F_v1(tri, num, 0.01, np.eye(2), facet_id) u_stay, fg_array_stay = u_for_stay_v1(tri, num, 0.01, np.eye(2)) print '' print 'Optimal u at each vertex:' print u_optimal print '' print 'fg_array for select triangle and facet:' print fg_array print '' print 'Optimal u to stay:' print u_stay fg_master_array = fg_for_domain(tri, 0.01, np.eye(2), 2) print '' print 'fg_master_array:' print fg_master_array x_array = points[vertices_here, 0] y_array = points[vertices_here, 1] x_lim = np.array([ndarray.min(x_array), ndarray.max(x_array)]) y_lim = np.array([ndarray.min(y_array), ndarray.max(y_array)]) domain_plot = 1 control_plot = 1 # Plot partitioning of domain: if domain_plot == 1: plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.triplot(nodes[:, 0], nodes[:, 1], tri.simplices.copy()) plt.plot(nodes[:, 0], nodes[:, 1], 'o') plt.title('Partitioned Domain') plt.grid(linestyle="--", linewidth=0.1, color='.25', zorder=-10) # Quiver velocity field to leave selected triangle: if control_plot == 1: X, Y = np.meshgrid(np.arange(x_lim[0] - 0.5, x_lim[1] + 0.5, 0.5), np.arange(y_lim[0] - 0.5, y_lim[1] + 0.5, 0.5)) U = fg_array[0, 0] * X + fg_array[0, 1] * Y + fg_array[0, 2] V = fg_array[1, 0] * X + fg_array[1, 1] * Y + fg_array[1, 2] fig = plt.figure() ax = fig.add_subplot(1, 1, 1) plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.title( 'Control Vector Field for Chosen Simplex - Exit Chosen Facet') ax.triplot(nodes[:, 0], nodes[:, 1], tri.simplices.copy()) ax.plot(nodes[:, 0], nodes[:, 1], 'o') ax.quiver(X, Y, U, V) ax.grid(linestyle="--", linewidth=0.1, color='.25', zorder=-10) # ax.set_xlim(x_lim) # ax.set_ylim(y_lim) X, Y = np.meshgrid(np.arange(x_lim[0] - 0.5, x_lim[1] + 0.5, 0.5), np.arange(y_lim[0] - 0.5, y_lim[1] + 0.5, 0.5)) U = fg_array_stay[0, 0] * X + fg_array_stay[0, 1] * Y + fg_array_stay[ 0, 2] V = fg_array_stay[1, 0] * X + fg_array_stay[1, 1] * Y + fg_array_stay[ 1, 2] fig = plt.figure() ax = fig.add_subplot(1, 1, 1) plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.title('Control Vector Field for Chosen Simplex - Remain') ax.triplot(nodes[:, 0], nodes[:, 1], tri.simplices.copy()) ax.plot(nodes[:, 0], nodes[:, 1], 'o') ax.quiver(X, Y, U, V) ax.grid(linestyle="--", linewidth=0.1, color='.25', zorder=-10) # ax.set_xlim(x_lim) # ax.set_ylim(y_lim) if (domain_plot == 1) or (control_plot == 1): plt.show() return # Execute test: # main_test()
def main_func(): image = cv2.imread('asl_alphabet.jpg') resized_image = cv2.resize(image, (570, 720)) # loading the pre-trained model model = load_model('SeqModel.h5') camera = cv2.VideoCapture(0) # 0 -> index of camera camera.set(3, 1080) camera.set(4, 720) predict_letter_list = [] word_str = '' word_list = [] while True: check, frame1 = camera.read() cv2.rectangle(frame1, (20, 70), (275, 325), (0, 255, 0), thickness=1) gray_frame = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY) median_blurred = cv2.medianBlur(gray_frame, 3) # blurring (median) the gray frame gaus_blurred = cv2.GaussianBlur( median_blurred, (3, 3), 0) # blurring (gaussian) the already blurred gray frame canny_frame = cv2.Canny(gaus_blurred, 30, 30) cropped_frame = canny_frame[71:325, 21:275] cropFrame_array = feed_preprocessing(cropped_frame) # predict predictions_prob = model.predict(cropFrame_array) # print(predictions_prob) # take the value with the highest probability most_prob = ndarray.max(predictions_prob) # print(most_prob) # [0] -> A , [1] -> B , [2]-> C # print(type(predictions_label)) --> numpy.ndarray predictions_label = model.predict_classes(cropFrame_array) # convert [0] to 0 for future operations string_label = functools.reduce(lambda x, y: x + str(y), predictions_label, '') integer_label = int(string_label) # 0 -> A, 1 -> B, 2-> C # print(integer_label) # matching the labels to the actual values # most_prob > number (where number determined based on measures) to avoid random results if most_prob > 0.65: # print(f'accuracy: {most_prob}') # print(f'letter {write_letter(integer_label)}') letter = write_letter(integer_label) # append the predicted letter into the list predict_letter_list.append(letter) print(predict_letter_list) # every x letters if len(predict_letter_list) % 15 == 0: # check that ALL (x) the letters in the list are the same if all(x == predict_letter_list[0] for x in predict_letter_list): if integer_label == 20: word_list += ' ' elif integer_label == 4: # trycatch should be added for empty list(deleting letter from empty word) try: word_list.pop() except: pass else: # add to the word that will be printed one of the identical items of the list word_list.append(predict_letter_list[0]) time.sleep(.5) print(word_list) # every x letter clear the list so the process repeats again from the start predict_letter_list.clear() # convert list into string for putText function word_str = "".join(word_list) frame1 = cv2.putText(frame1, 'Predicted Word: ' + word_str, (20, 60), cv2.FONT_HERSHEY_SIMPLEX, 2, (128, 0, 0), 3) # frame1 = cv2.putText(frame1, 'Predicted Letter: ' + write_letter(integer_label), (20, 500), # cv2.FONT_HERSHEY_SIMPLEX, 2, (128, 0, 0), 3) if not check: break cv2.imshow('WINDOW', frame1) cv2.imshow('CROPPED', cropped_frame) cv2.imshow('ALPHABET', resized_image) if frame1 is None: break keyboard = cv2.waitKey(1) & 0xff if keyboard == 27: # esc to terminate break camera.release() cv2.destroyAllWindows()