def get_pipeline(image): filepath = utils.get_filepath(app.config['INPUT_FOLDER'], image) if filepath is None: return not_found_error() utils.delete_images(app.config['OUTPUT_FOLDER_PIPELINES']) steps = request.get_json().get('steps') if steps is None: response = jsonify({ 'success': False, 'message': 'Field "steps" is required' }) response.status_code = 404 return response processing_lib.pipeline_individual(filepath, steps) original = ['/' + filepath, image] pipelines = utils.get_images(app.config['OUTPUT_FOLDER_PIPELINES']) steps_count = utils.count_folders(app.config['OUTPUT_FOLDER_STEPS']) for index in range(1, steps_count + 1): if next((x for x in pipelines if int(x[1].split('-')[0]) == index), None) is None: pipelines.append(('/static/img/fail.gif', '{}-{}'.format(index, str(uuid.uuid4()).split('-')[0]))) pipelines.sort(key=lambda x: int(x[1].split('-')[0])) response = jsonify({ 'success': True, 'original': original, 'pipeline': pipelines }) response.status_code = 200 return response
def processing(image): filepath = utils.get_filepath(app.config['INPUT_FOLDER'], image) if filepath is None: return redirect(url_for('index')) utils.delete_images(app.config['OUTPUT_FOLDER']) processing_lib.individual(filepath) original = ['/' + filepath, image] transformations = utils.get_images(app.config['OUTPUT_FOLDER']) transformations.sort(key=lambda x: x[1]) return render_template('processing.html', original=original, transformations=transformations)
def run_app(): warn_label = Label(frame, text='Select a date from the list', bg='#81D4FA') if clicked.get() == 'Select date': warn_label.place(relx=0.5, rely=0.2, relwidth=1, relheight=0.3, anchor='n') pass else: generate_barcodes(get_eans(clicked.get())) word_document(clicked.get()) os.startfile(documents_path) delete_images() root.destroy()
def processing(image): filepath = utils.get_filepath(app.config['INPUT_FOLDER'], image) if filepath is None: return not_found_error() utils.delete_images(app.config['OUTPUT_FOLDER']) processing_lib.individual(filepath) original = ['/' + filepath, image] transformations = utils.get_images(app.config['OUTPUT_FOLDER']) transformations.sort(key=lambda x: x[1]) response = jsonify({ 'success': True, 'original': original, 'transformations': transformations }) response.status_code = 200 return response
def pipeline(image): filepath = utils.get_filepath(app.config['INPUT_FOLDER'], image) if filepath is None: return redirect(url_for('index')) if request.method == 'POST': utils.delete_images(app.config['OUTPUT_FOLDER_PIPELINES']) list_transformations = request.form.get('list_transformations').split(',') processing_lib.pipeline(filepath, list_transformations) original = ['/' + filepath, image] pipelines = utils.get_images(app.config['OUTPUT_FOLDER_PIPELINES']) steps_count = utils.count_folders(app.config['OUTPUT_FOLDER_STEPS']) for index in range(1, steps_count + 1): if next((x for x in pipelines if int(x[1].split('-')[0]) == index), None) is None: pipelines.append(('/static/img/fail.gif', '{}-{}'.format(index, str(uuid.uuid4()).split('-')[0]))) pipelines.sort(key=lambda x: int(x[1].split('-')[0])) return render_template('pipeline.html', original=original, pipelines=pipelines)
def delete_image(image_id): appid = request.headers.get('app_id') cnt = df.delete_images(image_id, appid) out = { 'status': 'success', 'message': 'image {} has been deleted.'.format(image_id) } return jsonify(out)
def delete_images(): appid = request.headers.get('app_id') image_ids = [] try: image_ids = df.get_request_data(request).get('images', '').split(',') image_ids = [i.strip() for i in image_ids] except AttributeError as ae: image_ids = [] cnt = df.delete_images(image_ids, appid) out = {'status': 'success', 'message': 'images are deleted.'} return jsonify(out)
def main(image=0): if image == 0: init__file_results() images = utils.get_images(config['INPUT_FOLDER']) images.sort(key=lambda x: int(x[1])) images = images[image:] for image in images: print('Image {}'.format(image[1])) filepath = image[0][1:] text = ORIGINAL_TEXTS[image[1]] utils.delete_images(config['OUTPUT_FOLDER_PIPELINES']) time = default_timer() result_text, percentage = ocr.compare(text, filepath) time_end = default_timer() - time write_file_result( [image[1], 'original', percentage, text, result_text, time_end]) steps, times = processing_lib.pipeline(filepath, config['TRANSFORMATIONS']) pipelines = utils.get_images(config['OUTPUT_FOLDER_PIPELINES']) for pipeline in pipelines: time = default_timer() result_text, percentage = ocr.compare( text, utils.get_filepath(config['OUTPUT_FOLDER_PIPELINES'], pipeline[1])) time_end = default_timer() - time write_file_result([ image[1], '\r'.join(steps[int(pipeline[1].split('-')[0])]), percentage, text, result_text, times[int(pipeline[1].split('-')[0])] + time_end ])
def run(conf): ''' Visualises clusters of a given run. :param conf: :return: ''' #pp = pprint.PrettyPrinter() #pp.pprint(conf) # Make sure the global configuration is in place utils.run_global_visit_configuration(conf) #pdb.set_trace() clusterConf = utils.getValueForKeyPath(conf, 'postprocessing.clusters') if not clusterConf: print "No configuration for cluster postprocessing. Nothing to do." return 0 visitConf = utils.getValueForKeyPath(clusterConf, 'visit') if not visitConf: print "No configuration for visuals. Nothing to do." return 0 views = utils.getValueForKeyPath(visitConf, 'views') # Set up background gradient, axis labels etc. utils.setAnnotations(visitConf, 'annotationAttributes') # Set view and annotation attributes utils.setAnnotations(conf, 'postprocessing.clusters.visit.annotations') if not utils.getValueForKeyPath(conf, 'resume'): print "Removing results from previous runs" subprocess.call( "rm -rf images movies *.vtk *.vtr *tracking*.png *source*.png", shell=True) else: print "Removing intermediary files from previous runs" subprocess.call("rm -f *.vtk *.vtr *.vtu", shell=True) # Figure out the ending for the cluster vtk files conversion_config = utils.getValueForKeyPath( conf, 'postprocessing.clusters.meanie3D-cfm2vtk') if "--write-as-xml" in conversion_config: cluster_vtk_extension = ".vtu" else: cluster_vtk_extension = ".vtk" # Glob the netcdf directory or find the single file uses_time = utils.getValueForKeyPath(conf, 'uses_time') print "Current work directory: " + os.path.abspath(os.getcwd()) if uses_time: print "Processing file " + conf['source_directory'] netcdf_file = conf['source_directory'] else: print "Processing files in directory " + conf['source_directory'] netcdf_files = sorted(glob.glob(conf['source_directory'] + "/*.nc")) # Keep track of number of images to allow # forced re-set in time to circumvent the # Visit memory leak image_count = 0 index_range = [] if uses_time: t1 = int(utils.getValueForKeyPath(conf, 'start_time_index')) t2 = int(utils.getValueForKeyPath(conf, 'end_time_index')) index_range = range(t1, t2 + 1) else: index_range = range(len(netcdf_files)) time_index = -1 for index in index_range: # construct the cluster filename and find it # in the cluster directory if not uses_time: netcdf_file = netcdf_files[index] else: time_index = index netcdf_path, filename = os.path.split(netcdf_file) basename = os.path.splitext(filename)[0] if uses_time: basename = basename + "-" + str(time_index) cluster_file = conf[ 'cluster_directory'] + os.path.sep + basename + "-clusters.nc" # cluster file gets it's basename from the input file rather than the cluster file cluster_vtk_file = os.path.splitext( filename)[0] + "-clusters" + cluster_vtk_extension # label and displacement files are based on the cluster file name label_vtk_file = basename + "-clusters-centers.vtk" displacement_vtk_file = basename + "-clusters-displacements.vtk" print "netcdf_file = " + netcdf_file print "cluster_file = " + cluster_file # check if the files both exist if not os.path.exists(cluster_file): print "Cluster file does not exist. Skipping." continue # predict the filenames for checking on resume number_postfix = str(image_count).rjust(4, '0') source_open = False skip_source = False if conf['resume'] == True: exists = utils.images_exist(visitConf['views'], "source", image_count) if exists == "all": print "Source visualization " + number_postfix + " exists. Skipping." skip_source = True elif exists == "partial": print "Deleting partial visualization " + number_postfix utils.delete_images(conf, "source", image_count) if skip_source == False: if utils.getValueForKeyPath(clusterConf, 'createSourceMovie'): # Add ancillary background data utils.plotMapdata(visitConf, 'map') # Add timestamp if utils.getValueForKeyPath(clusterConf, 'showDateTime'): utils.add_datetime(clusterConf, netcdf_file, time_index) # Add source data and threshold it print "Plotting source data ..." start_time = time.time() utils.addPseudocolorPlots(netcdf_file, visitConf, 'source.plots', time_index) source_open = True visit.DrawPlots() utils.saveImagesForViews(views, "source") visit.DeleteAllPlots() visit.ClearWindow() print " done. (%.2f seconds)" % (time.time() - start_time) if utils.getValueForKeyPath(clusterConf, 'visualiseClusters'): skip = False if conf['resume'] == True: exists = utils.images_exist(visitConf['views'], "tracking", image_count) if exists == "all": print "Cluster visualization " + number_postfix + " exists. Skipping." skip = True elif exists == "partial": print "Deleting partial cluster visualization " + number_postfix utils.delete_images(conf, "tracking", image_count) if skip == False: # Run the conversion print "-- Converting clusters to .vtr --" start_time = time.time() params = "-f %s %s" \ % (cluster_file,utils.getValueForKeyPath(conf,'postprocessing.clusters.meanie3D-cfm2vtk')) if utils.getValueForKeyPath( conf, 'postprocessing.clusters.showDisplacementVectors'): params += " --write-displacement-vectors" if utils.getValueForKeyPath(conf, 'data.vtkDimensions'): vtkDimString = ",".join( utils.getValueForKeyPath(conf, 'data.vtkDimensions')) params += " --vtk-dimensions=%s" % vtkDimString # pdb.set_trace(); print "meanie3D-cfm2vtk %s" % params meanie3D.app.external.execute_command('meanie3D-cfm2vtk', params) print " done. (%.2f seconds)" % (time.time() - start_time) # Move cluster output file to individual file if uses_time: cluster_vtk_file_dst = basename + "-clusters.vtk" os.rename(cluster_vtk_file, cluster_vtk_file_dst) cluster_vtk_file = cluster_vtk_file_dst print "-- Rendering cluster scene --" start_time = time.time() # Add ancillary background data utils.plotMapdata(visitConf, 'map') # Add timestamp if utils.getValueForKeyPath(clusterConf, 'showDateTime'): utils.add_datetime(clusterConf, netcdf_file, time_index) # Add background source data if utils.getValueForKeyPath(clusterConf, 'showSourceBackground'): utils.addPseudocolorPlots(netcdf_file, visitConf, 'sourceBackground.plots', time_index) source_open = True # Add the clusters add_clusters(cluster_vtk_file, conf) # Add modes as labels labelConf = utils.getValueForKeyPath(visitConf, 'label') if labelConf: utils.addLabelPlot(label_vtk_file, labelConf) # Add displacement vectors if utils.getValueForKeyPath(clusterConf, 'showDisplacementVectors'): vectorConf = utils.getValueForKeyPath( visitConf, 'displacementVectors') if vectorConf: utils.addVectorPlot(displacement_vtk_file, vectorConf) visit.DrawPlots() utils.saveImagesForViews(views, "tracking") print " done. (%.2f seconds)" % (time.time() - start_time) # clean up visit.DeleteAllPlots() visit.ClearWindow() if source_open: visit.CloseDatabase(netcdf_file) visit.CloseDatabase(label_vtk_file) utils.close_pattern(basename + "*.vtr") utils.close_pattern(basename + "*.vtk") utils.close_pattern(basename + "*.vtu") if utils.getValueForKeyPath(conf, 'cleanup_vtk'): subprocess.call("rm -f *.vt*", shell=True) # periodically kill computing engine to # work around the memory leak fix image_count = image_count + 1 # TODO: check if this is still necessary and re-implement if it is. #if image_count % 100 == 0: # visit.CloseComputeEngine() # close mapstuff # TODO: might need to keep track of open database files and # close them. # Use imagemagick to use image sequences to make movies movieFormats = utils.getValueForKeyPath(clusterConf, 'movieFormats') if utils.getValueForKeyPath(clusterConf, 'createSourceMovie'): utils.createMoviesForViews(views, "source", movieFormats) if utils.getValueForKeyPath(clusterConf, 'createClusterMovie'): utils.createMoviesForViews(views, "tracking", movieFormats) # clean up print "Cleaning up ..." subprocess.call("mkdir images", shell=True) subprocess.call("mv *tracking_*.png images", shell=True) subprocess.call("mv *source_*.png images", shell=True) subprocess.call("mkdir movies", shell=True) subprocess.call("mv *source*.gif *tracking*.gif *.m4v movies", shell=True) if utils.getValueForKeyPath(conf, 'cleanup_vtk'): subprocess.call("rm -f *.vt* visitlog.py", shell=True) return
def run(conf): ''' Visualises clusters of a given run. :param conf: :return: ''' #pp = pprint.PrettyPrinter() #pp.pprint(conf) # Make sure the global configuration is in place utils.run_global_visit_configuration(conf) #pdb.set_trace() clusterConf = utils.getValueForKeyPath(conf,'postprocessing.clusters') if not clusterConf: print "No configuration for cluster postprocessing. Nothing to do." return 0 visitConf = utils.getValueForKeyPath(clusterConf,'visit') if not visitConf: print "No configuration for visuals. Nothing to do." return 0 views = utils.getValueForKeyPath(visitConf,'views') # Set up background gradient, axis labels etc. utils.setAnnotations(visitConf,'annotationAttributes') # Set view and annotation attributes utils.setAnnotations(conf,'postprocessing.clusters.visit.annotations') if not utils.getValueForKeyPath(conf,'resume'): print "Removing results from previous runs" subprocess.call("rm -rf images movies *.vtk *.vtr *tracking*.png *source*.png", shell=True) else: print "Removing intermediary files from previous runs" subprocess.call("rm -f *.vtk *.vtr *.vtu", shell=True) # Figure out the ending for the cluster vtk files conversion_config = utils.getValueForKeyPath(conf,'postprocessing.clusters.meanie3D-cfm2vtk') if "--write-as-xml" in conversion_config: cluster_vtk_extension = ".vtu" else: cluster_vtk_extension = ".vtk" # Glob the netcdf directory or find the single file uses_time = utils.getValueForKeyPath(conf,'uses_time') print "Current work directory: " + os.path.abspath(os.getcwd()) if uses_time: print "Processing file " + conf['source_directory'] netcdf_file = conf['source_directory'] else: print "Processing files in directory " + conf['source_directory'] netcdf_files = sorted(glob.glob(conf['source_directory']+"/*.nc")) # Keep track of number of images to allow # forced re-set in time to circumvent the # Visit memory leak image_count = 0 index_range = [] if uses_time: t1 = int(utils.getValueForKeyPath(conf,'start_time_index')) t2 = int(utils.getValueForKeyPath(conf,'end_time_index')) index_range = range(t1,t2+1) else: index_range = range(len(netcdf_files)) time_index = -1 for index in index_range: # construct the cluster filename and find it # in the cluster directory if not uses_time: netcdf_file = netcdf_files[index] else: time_index = index netcdf_path,filename = os.path.split(netcdf_file) basename = os.path.splitext(filename)[0] if uses_time: basename = basename + "-" + str(time_index) cluster_file = conf['cluster_directory'] + os.path.sep + basename + "-clusters.nc" # cluster file gets it's basename from the input file rather than the cluster file cluster_vtk_file = os.path.splitext(filename)[0] + "-clusters" + cluster_vtk_extension # label and displacement files are based on the cluster file name label_vtk_file = basename + "-clusters-centers.vtk" displacement_vtk_file = basename + "-clusters-displacements.vtk" print "netcdf_file = " + netcdf_file print "cluster_file = " + cluster_file # check if the files both exist if not os.path.exists(cluster_file): print "Cluster file does not exist. Skipping." continue # predict the filenames for checking on resume number_postfix = str(image_count).rjust(4,'0') source_open = False skip_source = False if conf['resume'] == True: exists = utils.images_exist(visitConf['views'],"source",image_count) if exists == "all": print "Source visualization " + number_postfix + " exists. Skipping." skip_source = True elif exists == "partial": print "Deleting partial visualization " + number_postfix utils.delete_images(conf,"source",image_count) if skip_source == False: if utils.getValueForKeyPath(clusterConf,'createSourceMovie'): # Add ancillary background data utils.plotMapdata(visitConf,'map') # Add timestamp if utils.getValueForKeyPath(clusterConf,'showDateTime'): utils.add_datetime(clusterConf,netcdf_file,time_index) # Add source data and threshold it print "Plotting source data ..." start_time = time.time() utils.addPseudocolorPlots(netcdf_file,visitConf,'source.plots',time_index) source_open = True visit.DrawPlots() utils.saveImagesForViews(views,"source") visit.DeleteAllPlots() visit.ClearWindow() print " done. (%.2f seconds)" % (time.time()-start_time) if utils.getValueForKeyPath(clusterConf,'visualiseClusters'): skip = False if conf['resume'] == True: exists = utils.images_exist(visitConf['views'],"tracking",image_count) if exists == "all": print "Cluster visualization "+number_postfix+" exists. Skipping." skip = True elif exists == "partial": print "Deleting partial cluster visualization " + number_postfix utils.delete_images(conf,"tracking",image_count) if skip == False: # Run the conversion print "-- Converting clusters to .vtr --" start_time = time.time() params = "-f %s %s" \ % (cluster_file,utils.getValueForKeyPath(conf,'postprocessing.clusters.meanie3D-cfm2vtk')) if utils.getValueForKeyPath(conf,'postprocessing.clusters.showDisplacementVectors'): params += " --write-displacement-vectors" if utils.getValueForKeyPath(conf,'data.vtkDimensions'): vtkDimString = ",".join(utils.getValueForKeyPath(conf,'data.vtkDimensions')) params += " --vtk-dimensions=%s" % vtkDimString # pdb.set_trace(); print "meanie3D-cfm2vtk %s" % params meanie3D.app.external.execute_command('meanie3D-cfm2vtk', params) print " done. (%.2f seconds)" % (time.time()-start_time) # Move cluster output file to individual file if uses_time: cluster_vtk_file_dst = basename + "-clusters.vtk" os.rename(cluster_vtk_file,cluster_vtk_file_dst) cluster_vtk_file = cluster_vtk_file_dst print "-- Rendering cluster scene --" start_time = time.time() # Add ancillary background data utils.plotMapdata(visitConf,'map') # Add timestamp if utils.getValueForKeyPath(clusterConf,'showDateTime'): utils.add_datetime(clusterConf,netcdf_file,time_index) # Add background source data if utils.getValueForKeyPath(clusterConf,'showSourceBackground'): utils.addPseudocolorPlots(netcdf_file,visitConf,'sourceBackground.plots',time_index) source_open = True # Add the clusters add_clusters(cluster_vtk_file, conf) # Add modes as labels labelConf = utils.getValueForKeyPath(visitConf,'label') if labelConf: utils.addLabelPlot(label_vtk_file,labelConf) # Add displacement vectors if utils.getValueForKeyPath(clusterConf,'showDisplacementVectors'): vectorConf = utils.getValueForKeyPath(visitConf,'displacementVectors') if vectorConf: utils.addVectorPlot(displacement_vtk_file,vectorConf) visit.DrawPlots() utils.saveImagesForViews(views,"tracking") print " done. (%.2f seconds)" % (time.time()-start_time) # clean up visit.DeleteAllPlots(); visit.ClearWindow() if source_open: visit.CloseDatabase(netcdf_file) visit.CloseDatabase(label_vtk_file) utils.close_pattern(basename+"*.vtr") utils.close_pattern(basename+"*.vtk") utils.close_pattern(basename+"*.vtu") if utils.getValueForKeyPath(conf,'cleanup_vtk'): subprocess.call("rm -f *.vt*", shell=True) # periodically kill computing engine to # work around the memory leak fix image_count=image_count+1; # TODO: check if this is still necessary and re-implement if it is. #if image_count % 100 == 0: # visit.CloseComputeEngine() # close mapstuff # TODO: might need to keep track of open database files and # close them. # Use imagemagick to use image sequences to make movies movieFormats = utils.getValueForKeyPath(clusterConf,'movieFormats') if utils.getValueForKeyPath(clusterConf,'createSourceMovie'): utils.createMoviesForViews(views,"source", movieFormats) if utils.getValueForKeyPath(clusterConf,'createClusterMovie'): utils.createMoviesForViews(views,"tracking", movieFormats) # clean up print "Cleaning up ..." subprocess.call("mkdir images", shell=True) subprocess.call("mv *tracking_*.png images", shell=True) subprocess.call("mv *source_*.png images", shell=True) subprocess.call("mkdir movies", shell=True) subprocess.call("mv *source*.gif *tracking*.gif *.m4v movies", shell=True) if utils.getValueForKeyPath(conf,'cleanup_vtk'): subprocess.call("rm -f *.vt* visitlog.py", shell=True) return
def api_root(): msg = 'Image-Classification-Toolkit permite realizar una clasificación dual a partir de dos conjuntos de imágenes (positivas y negativas) con los algoritmos SVM-KNN-BPNN-CNN y Transfer Learning .' utils.delete_images(app.config['FOLDER_POSITIVE']) utils.delete_images(app.config['FOLDER_NEGATIVE']) return render_template('index.html', msg=msg)