def predict_image(im_path): plankton = Segmentation(im_path, target_shape=(75, 75, 3)) plankton.segment() padded = plankton.get_padded() feat = plankton.get_features() feat = np.array(feat) padded = preprocess_input(np.array(padded, dtype=np.float32)) x_img = padded.reshape(1, padded.shape[0], padded.shape[1], padded.shape[2]) x_feat = feat.reshape(1, feat.shape[0]) x_feat = mms.transform(x_feat) y_hat = model.predict([x_img, x_feat]) valid = False labels = {} results = [] for i, y in enumerate(y_hat.flatten()): if y > 0.5: valid = True labels[i] = y if valid: #sort by value. lower prob to higher prob. (given it is above threshold prob) labels = sorted(labels.items(), key=lambda x: x[1], reverse=False) save_image(im_path, labels) label_hat = class_map[labels[0][0]] y_hat = [str(x) for x in y_hat.flatten()] results.append(im_path) results.append(label_hat) results.extend(y_hat) return results return None
def predict_image(im_path): plankton = Segmentation(im_path, target_shape=(75, 75, 3)) plankton.segment() padded = plankton.get_padded() feat = plankton.get_features() feat = np.array(feat) padded = preprocess_input(np.array(padded, dtype=np.float32)) x_img = padded.reshape(1, padded.shape[0], padded.shape[1], padded.shape[2]) x_feat = feat.reshape(1, feat.shape[0]) x_feat = mms.transform(x_feat) y_hat = model.predict([x_img, x_feat]) label_hat = class_map[np.argmax(y_hat)] save_image(im_path, y_hat) y_hat = [str(x) for x in y_hat.flatten()] results = [] results.append(im_path) results.append(label_hat) results.extend(y_hat) return results
mms = pickle.load(handle) X_img = np.empty((df.shape[0], input_shape[0], input_shape[1], input_shape[2]), dtype=np.uint8) X_feat = np.empty((df.shape[0], feat_shape[0])) y = np.empty((df.shape[0], n_classes), dtype=int) data_path = os.path.join(os.getcwd(), 'data') for i, (im_name, label) in tqdm(enumerate(zip(df.im_name, df.label))): im_dir = os.path.join(data_path, class_map[label]) im_path = os.path.join(im_dir, im_name) plankton = Segmentation(im_path, target_shape=input_shape) plankton.segment() padded = plankton.get_padded() aug = seq.augment_image(padded) X_img[i,] = preprocess_input(padded) X_feat[i,] = plankton.get_features() y[i,] = to_categorical(label, num_classes=n_classes) X_feat = mms.transform(X_feat) X_img, X_feat, y = shuffle(X_img, X_feat, y, random_state=0) print(X_img.shape) print(X_feat.shape) print(y.shape) X = [X_img, X_feat] checkpoint = ModelCheckpoint('./models/inception_v3_3k_cached.model', monitor='val_acc', verbose=1, mode='max', save_best_only=True, save_weights_only=False, period=1) reduceLROnPlato = ReduceLROnPlateau(monitor='val_loss', factor=0.5,
for c in classes: im_dir = os.path.join(data_path, c) exists = os.path.join(pad_path, c) if os.path.isdir(exists) is False: os.mkdir(exists) images = os.listdir(im_dir) for im_name in images: im_path = os.path.join(im_dir, im_name) im_pad_path = os.path.join(exists, im_name) plankton = Segmentation(im_path, target_shape=(75, 75, 3)) plankton.segment() padded = plankton.get_padded() padded = cv2.cvtColor(padded, cv2.COLOR_RGB2BGR) cv2.imwrite(im_pad_path, padded) feats[im_name] = plankton.get_features() feat_list.append(plankton.get_features()) with open('features.p', 'wb') as handle: pickle.dump(feats, handle, protocol=pickle.HIGHEST_PROTOCOL) df_feats = pd.DataFrame(data=feat_list, columns=plankton.get_columns()) df_feats.to_csv('extracted_features.csv', index=False) mms = MinMaxScaler() mms.fit(df_feats.values) with open('normalizer.p', 'wb') as handle: pickle.dump(mms, handle, protocol=pickle.HIGHEST_PROTOCOL)