def start(self): if self.running_state == 'stopped': self.change_state('running') self.text.insert( tk.END, self.date_entry.get() + ' ' + time.strftime("%I:%M:%S") + ' : ' + 'start processing\n') self.text.see(tk.END) # check and create result folder check_folder(folder=os.path.join( self.params.dirs['root'], self.params.dirs['classification']), create=True) if not check_folder(folder=os.path.join( self.params.dirs['root'], self.params.dirs['classification'], self.params.dirs['results']), create=True): self.text.insert( tk.END, self.date_entry.get() + ' ' + time.strftime("%I:%M:%S") + ' : ' + 'results folder is created\n') self.text.see(tk.END) print('result folders are checked and created') else: self.text.insert( tk.END, self.date_entry.get() + ' ' + time.strftime("%I:%M:%S") + ' : ' + 'results folder exists\n') self.text.see(tk.END) self.onUpdate()
def listen_for_result(self): ''' Check if there is something in the queue ''' try: df_images_processed = self.thread_queue.get(0) if self.proc_mode == 'test': df_images_processed.to_csv(os.path.join( self.test_dir, 'classification_result.csv'), index=False) elif self.proc_mode == 'evaluate': df_images_processed.to_csv(os.path.join( self.eval_dir, 'classification_result.csv'), index=False) cont_table = self.create_contingency(df_images_processed) self.text.insert( tk.END, self.date_entry.get() + ' ' + time.strftime("%I:%M:%S") + ' : ' + 'creating contingency table..\n') self.text.see(tk.END) cont_table.to_csv(os.path.join(self.eval_dir, 'contingency_table.csv'), index=True) else: res_folder = os.path.join(self.params.dirs['root'], self.params.dirs['classification'], self.params.dirs['results']) check_folder(folder=res_folder, create=True) df_images_processed.to_csv(os.path.join( res_folder, 'classification_result.csv'), index=False) self.create_output(df_images_processed) self.thread_queue.task_done() self.change_state('checking') # elapsed = time.time() - t self.progress['value'] = 1 self.text.insert( tk.END, str(df_images_processed.shape[0]) + ' images were processed in ' + str("{0:.2f}".format(self.process.elapsed)) + ' secs\n') self.text.see(tk.END) if self.proc_mode == 'test' or self.proc_mode == 'evaluate': self.stop() else: self.onUpdate() except queue.Empty: self.change_state('running') self.progress['value'] = self.process.cur_progress self.after(1000, self.listen_for_result)
def create_output(self, df_images_processed): measure_dir = os.path.join(self.params.dirs['root'], self.params.dirs['measurement']) date_folders = df_images_processed['dir1'].unique() for dfo in date_folders: df_date = df_images_processed[df_images_processed['dir1'] == dfo] measure_folders = df_date['dir2'].unique() for mfo in measure_folders: # select items df_measure = df_date[df_date['dir2'] == mfo] # Check and create folders cur_dir = os.path.join(measure_dir, dfo, mfo) self.text.insert(tk.END, 'visiting : ' + cur_dir + '\n') res_folder1 = os.path.join(self.params.dirs['root'], self.params.dirs['classification'], self.params.dirs['results'], dfo) check_folder(folder=res_folder1, create=True) res_folder = os.path.join(res_folder1, mfo) check_folder(folder=res_folder, create=True) # Write to folders for index, df_image in df_measure.iterrows(): class_folder = os.path.join(res_folder, df_image['predicted_type']) check_folder(folder=class_folder, create=True) shutil.copy( os.path.join(df_image['root'], df_image['image_file']), os.path.join(class_folder, df_image['image_file'])) # Read log log_file = os.path.join(measure_dir, dfo, mfo, self.params.files['control']) log_dict = read_log(log_file) measured_volume = float( log_dict['Measured Volume'].split(' ')[0]) measure_datetime = log_dict['Sample DateTime'].split('\t') measure_date = measure_datetime[0].replace('.', '') measure_time = measure_datetime[1].replace(':', '') if not (type(measure_time) == str): measure_time = time.strftime("%H%M") if not (type(measure_date) == str): measure_date = time.strftime("%Y%m%d_%H%M") scaled = True if not (measured_volume > 0): scaled = False # Create XML cur_result = XMLWriter() cur_result.addAllCount(df_measure.shape[0], False) if scaled: cur_result.addAllCount( int(float(df_measure.shape[0]) / measured_volume), True) cur_result.addMeasuredVolume(measured_volume) taxons = df_measure.predicted_type.unique() for taxon in taxons: count = df_measure.predicted_type.value_counts()[taxon] cur_result.addTaxonStat(taxon, int(count), False) if scaled: cur_result.addTaxonStat( taxon, int(float(count) / measured_volume), True) xml_file = os.path.join( measure_dir, dfo, mfo, 'MeasureSum_' + measure_date + '_' + measure_time + '.xml') cur_result.save(targetFile=xml_file) xml_file_details = os.path.join( measure_dir, dfo, mfo, 'MeasureDetails_' + measure_date + '_' + measure_time + '.xml') file = open(xml_file_details, 'w') file.close()
def process_oneimage(self, image_file, man_type): predicted_label = None predicted_type = 'Others.Others.Unsure' final_type = None predicted_strength = None char_sizes = None if self.save_cropped: tmp_folder = os.path.join(os.path.curdir, 'tmp_crop') check_folder(tmp_folder, create=True) save_file = os.path.join(tmp_folder, os.path.basename(image_file)) category = 'cropped' else: save_file = '' category = '' img, char_sizes = classifications.create_image( image_file, cropped=True, correct_RGBShift=self.correct_RGBShift, save_file=save_file, category=category) if img is not None and char_sizes is not None: if self.params.processing['use_neural'] == 'True': predicted_label, predicted_strength = self.cnn_taxon.classify( img, char_sizes=char_sizes) predicted_type = self.params.type_dict_taxon[str( predicted_label)] else: predicted_strength = 65535 predicted_type = man_type final_type = predicted_type # Do checks on threshold, shape and size row = self.params.threshold_df_taxon.loc[ self.params.threshold_df_taxon['Taxon'] == predicted_type] if not row.empty: min_l = row['min_l'].values[0] max_l = row['max_l'].values[0] min_rate = row['min_rate'].values[0] max_rate = row['max_rate'].values[0] strength_tsh = row['strength_tsh'].values[0] if char_sizes[0] > 0: rate = char_sizes[1] / char_sizes[0] else: rate = -1 if predicted_strength < strength_tsh: final_type = 'Others.Others.Unsure' if char_sizes[0] < min_l: # char_sizes[0] larger axis length final_type = 'Others.Others.InappropriateSize' if char_sizes[0] > max_l: final_type = 'Others.Others.InappropriateSize' if rate < min_rate or rate > max_rate: final_type = 'Others.Others.InappropriateShape' # self.text.insert(tk.END, 'TAXON model result : '+'type : '+predicted_type+' ; strength : '+str(strength_str)+'\n') # self.text.see(tk.END) # print(t-time.time()) return predicted_type, final_type, predicted_strength, char_sizes
predicted_label, predicted_prob = cnn_class.classify(img, char_sizes=(maxl, minl)) contingency_table[label, predicted_label] += 1 # rows are actual labels, cols are predictions, df_images_processed['predicted_type'][index] = type_dict[str( predicted_label)] df_images_processed['prob_taxon'][index] = predicted_prob if write_folders: if image_file != image_file.replace( '__0', ''): # save only first from the rotated images cur_dir = os.path.join(save_dir, type_dict[str(predicted_label)]) check_folder(folder=cur_dir, create=True) shutil.copy( image_file.replace('__0', '').replace(curdb_dir, base_db), os.path.join(cur_dir, os.path.basename(image_file.replace('__0', '')))) if predicted_label != label: mis_item = [ image_file, type_dict[str(predicted_label)], type_dict[str(label)] ] misclassified.append(mis_item) # cont_table = pd.DataFrame( data=contingency_table, # values