def __save_model(self, output_files): if self.save_format == 'pickle': save_file = 'hoge.pkl' if len(output_files) != 1: raise RuntimeError('Invalid output file(s)') save_file = output_files[0]['file_path'] makedir_ifnot(os.path.dirname(save_file)) with open(save_file, 'wb') as f: pickle.dump(self.model, f, protocol=2) if self.verbose >= 1: print('Saved %s' % save_file) elif self.save_format == 'bdmodel': if not self.model.__class__.__name__ == 'FastL2LiR': raise NotImplementedError( 'BD model current supports only FastL2LiR models.') for s in output_files: makedir_ifnot(os.path.dirname(s['file_path'])) save_array(s['file_path'], getattr(self.model, s['src']), key=s['dst'], dtype=self.dtype, sparse=s['sparse']) if self.verbose >= 1: print('Saved %s' % s['file_path']) else: raise ValueError('Unsupported output format: %s' % self.save_format) return None
def test_load_save_dense_array(self): # ndim = 1 data = np.random.rand(10) save_array('./tmp/test_array_dense_ndim1.mat', data, key='testdata') testdata = load_array('./tmp/test_array_dense_ndim1.mat', key='testdata') np.testing.assert_array_equal(data, testdata) # ndim = 2 data = np.random.rand(3, 2) save_array('./tmp/test_array_dense_ndim2.mat', data, key='testdata') testdata = load_array('./tmp/test_array_dense_ndim2.mat', key='testdata') np.testing.assert_array_equal(data, testdata) # ndim = 3 data = np.random.rand(4, 3, 2) save_array('./tmp/test_array_dense_ndim3.mat', data, key='testdata') testdata = load_array('./tmp/test_array_dense_ndim3.mat', key='testdata') np.testing.assert_array_equal(data, testdata)
def test_load_save_sparse_array(self): # ndim = 1 data = np.random.rand(10) data[data < 0.8] = 0 save_array('./tmp/test_array_sparse_ndim1.mat', data, key='testdata', sparse=True) testdata = load_array('./tmp/test_array_sparse_ndim1.mat', key='testdata') np.testing.assert_array_equal(data, testdata) # ndim = 2 data = np.random.rand(3, 2) data[data < 0.8] = 0 save_array('./tmp/test_array_sparse_ndim2.mat', data, key='testdata', sparse=True) testdata = load_array('./tmp/test_array_sparse_ndim2.mat', key='testdata') np.testing.assert_array_equal(data, testdata) # ndim = 3 data = np.random.rand(4, 3, 2) data[data < 0.8] = 0 save_array('./tmp/test_array_sparse_ndim3.mat', data, key='testdata', sparse=True) testdata = load_array('./tmp/test_array_sparse_ndim3.mat', key='testdata') np.testing.assert_array_equal(data, testdata)
makedir_ifnot(results_dir_prediction) makedir_ifnot(results_dir_accuracy) start_time = time() # Predicted features for i, _ in enumerate(x_labels_unique): # Predicted features feat = np.array([y_pred[i, ]]) # To make feat shape 1 x M x N x ... # Save file name save_file = os.path.join(results_dir_prediction, '%s.mat' % label_names[i]) # Save save_array(save_file, feat, key='feat', dtype=np.float32, sparse=False) print('Saved %s' % results_dir_prediction) # Prediction accuracy save_file = os.path.join(results_dir_accuracy, 'accuracy.mat') save_array(save_file, accuracy, key='accuracy', dtype=np.float32, sparse=False) print('Saved %s' % save_file) print('Elapsed time (saving results): %f' % (time() - start_time)) distcomp.unlock(analysis_id)
# ----------------------------- print('Saving normalization parameters.') norm_param = { 'x_mean': x_mean, 'y_mean': y_mean, 'x_norm': x_norm, 'y_norm': y_norm } save_targets = [u'x_mean', u'y_mean', u'x_norm', u'y_norm'] for sv in save_targets: save_file = os.path.join(results_dir, sv + '.mat') if not os.path.exists(save_file): try: save_array(save_file, norm_param[sv], key=sv, dtype=np.float32, sparse=False) print('Saved %s' % save_file) except Exception: warnings.warn('Failed to save %s. Possibly double running.' % save_file) # Preparing learning # ------------------ model = FastL2LiR() model_param = {'alpha': alpha, 'n_feat': num_voxel[roi]} # Distributed computation setup # ----------------------------- makedir_ifnot('./tmp')
def extract_image_features(image_file, net, layers=[], crop_center=False, image_preproc=[], save_dir=None, verbose=False, progbar=False, return_features=True): ''' Extract DNN features of a given image. Parameters ---------- image_file : str or list (List of) path to the input image file(s). net : Caffe network instance layers : list List of DNN layers of which features are returned. crop_center : bool (default: False) Crop the center of an image or not. image_preproc : list (default: []) List of additional preprocessing functions. The function input/output should be a PIL.Image instance. The preprocessing functions are applied after RGB conversion, center-cropping, and resizing of the input image. save_dir : None or str (default: None) Save the features in the specified directory if not None. verbose : bool (default: False) Output verbose messages or not. return_features: bool (default: True) Return the extracted features or not. Returns ------- dict Dictionary in which keys are DNN layers and values are features. ''' if isinstance(image_file, str): image_file = [image_file] features_dict = {} if progbar: image_file = tqdm(image_file) for imgf in image_file: if verbose: print('Image: %s' % imgf) image_size = net.blobs['data'].data.shape[-2:] mean_img = net.transformer.mean['data'] # Open the image img = PIL.Image.open(imgf) # Convert non-RGB to RGB if img.mode == 'CMYK': img = img.convert('RGB') if img.mode == 'RGBA': bg = PIL.Image.new('RGB', img.size, (255, 255, 255)) bg.paste(img, mask=img.split()[3]) img = bg # Convert monochrome to RGB if img.mode == 'L': img = img.convert('RGB') # Center cropping if crop_center: w, h = img.size img = img.crop( ((w - min(img.size)) // 2, (h - min(img.size)) // 2, (w + min(img.size)) // 2, (h + min(img.size)) // 2)) # Resize img = img.resize(image_size, PIL.Image.BICUBIC) for p in image_preproc: img = p(img) img_array = np.array(img) try: img_array = np.float32(np.transpose(img_array, (2, 0, 1))[::-1]) - np.reshape( mean_img, (3, 1, 1)) except: import pdb pdb.set_trace() # Forwarding net.blobs['data'].reshape(1, 3, img_array.shape[1], img_array.shape[2]) net.blobs['data'].data[0] = img_array net.forward() # Get features for lay in layers: feat = net.blobs[lay].data.copy() if return_features: if lay in features_dict: features_dict.update( {lay: np.vstack([features_dict[lay], feat])}) else: features_dict.update({lay: feat}) if not save_dir is None: # Save the features save_dir_lay = os.path.join(save_dir, lay.replace('/', ':')) save_file = os.path.join( save_dir_lay, os.path.splitext(os.path.basename(imgf))[0] + '.mat') if not os.path.exists(save_dir_lay): os.makedirs(save_dir_lay) if os.path.exists(save_file): if verbose: print('%s already exists. Skipped.' % save_file) continue save_array(save_file, feat, key='feat', dtype=np.float32, sparse=False) if verbose: print('%s saved.' % save_file) if return_features: return features_dict else: return None