def __init__(self, master=None): self.params = params() if not self.params.dirs: print('config file - failed to load') return tk.Frame.__init__(self, master, background="yellow") self.pack(fill="both", expand=True) self.createWidgets() if not self.params.files['model_trash'] == 'None': self.cnn_trash = classifications.cnn_classification( self.params.files['model_trash'], im_height=self.params.neural['im_height'], im_width=self.params.neural['im_width']) print('load model ' + self.params.files['model_trash']) self.cnn_taxon = classifications.cnn_classification( self.params.files['model_taxon'], im_height=self.params.neural['im_height'], im_width=self.params.neural['im_width']) print('load model ' + self.params.files['model_taxon']) if self.params.processing['auto_start'] == 'True': self.start() else: self.running_state = 'stopped'
def __init__(self, params): self.params = params self.cur_progress = 0 self.elapsed = 0 self.correct_RGBShift = self.params.processing['ch_shift'] == 'True' if not self.params.files['model_trash'] == 'None': self.cnn_trash = classifications.cnn_classification( self.params.files['model_trash'], im_height=self.params.neural['im_height'], im_width=self.params.neural['im_width']) print('load model ' + self.params.files['model_trash']) self.cnn_taxon = classifications.cnn_classification( self.params.files['model_taxon'], im_height=self.params.neural['im_height'], im_width=self.params.neural['im_width']) print('load model ' + self.params.files['model_taxon'])
def __init__(self, params): self.params = params self.cur_progress = 0 self.elapsed = 0 self.correct_RGBShift = self.params.processing['ch_shift'] == 'True' self.save_cropped = self.params.processing['save_cropped'] == 'True' self.cnn_taxon = classifications.cnn_classification( self.params.files['model_taxon'], model_output_layer=self.params.neural['model_output_layer']) self.classes = [] print('load model ' + self.params.files['model_taxon'])
def __init__(self, master=None): self.params = params() tk.Frame.__init__(self, master, background="green") self.pack(fill="both", expand=True) self.createWidgets() self.cnn = classifications.cnn_classification( self.params.files['model']) print('load model ' + self.params.files['model']) # check result folder check_folder(folder=os.path.join(self.params.dirs['root'], self.params.dirs['classification']), create=True) check_folder(folder=os.path.join(self.params.dirs['root'], self.params.dirs['classification'], self.params.dirs['results']), create=True) print('result folders are checked and created')
default=None) parser.add_argument('-s', action='store', dest='s', type=bool, required=False, default=False) parser.add_argument('-o', action='store', dest='o', type=str, required=False, default='') # create classfier cnn = classifications.cnn_classification() # Parse the arguments inargs = parser.parse_args() path_str = os.path.abspath(inargs.i) if inargs.b is None: logging.info('Single image process') cell_classifier(path_str, cnn=cnn, save_diag=inargs.s, out_dir=inargs.o) else: logging.info('Batch execution') batch_cell_classifier(path_str, cnn=cnn,
cfg = train_params(data_dir, training_id=training_id) typedict_file = os.path.join(cfg.train_dir, 'type_dict.csv') model_file = os.path.join(cfg.train_dir, 'cnn_model.dnn') type_dict = {} reader = csv.DictReader(open(typedict_file, 'rt'), delimiter=':') for row in reader: type_dict[row['label']] = row['type'] sorted_classes = OrderedDict(sorted(type_dict.items(), key=lambda x: x[1])).values() # LOAD MODEL cnn_class = classifications.cnn_classification(model_file) num_classes = cnn_class.pred.output.shape[0] df_test_image = pd.read_csv(cfg.test_image_list_file, delimiter=';') df_test_text = pd.read_csv(cfg.test_text_list_file, delimiter=';') samples = {} contingency_table = np.zeros((num_classes, num_classes)) misclassified = [] df_images_processed = df_test_image.reset_index(drop=True).copy() df_images_processed['predicted_type'] = None df_images_processed['prob_taxon'] = None pd.options.mode.chained_assignment = None # default='warn'