def test_save_load(): A = np.ones((2, INT_OVERFLOW), dtype='int8') A[0][100] = 100 npx.save('my_tensor', A) B = np.array(npx.load('my_tensor')) assert B[0].shape == (2, INT_OVERFLOW) assert B[0][0][100] == 100
def average(param_paths: Iterable[str]) -> Dict[str, torch.Tensor]: """ Averages parameters from a list of .params file paths. :param param_paths: List of paths to parameter files. :return: Averaged parameter dictionary. """ all_params = [] # type: List[Dict[str, torch.Tensor]] for path in param_paths: logger.info("Loading parameters from '%s'", path) try: params = torch.load(path, map_location=torch.device('cpu')) except: logger.info('Converting from MXNet') from mxnet import npx params = npx.load(path) params = { k: torch.from_numpy(v.asnumpy()) for k, v in params.items() } all_params.append(params) logger.info("%d models loaded", len(all_params)) utils.check_condition( all(all_params[0].keys() == p.keys() for p in all_params), "param names do not match across models") avg_params = {} # average arg_params for k in all_params[0]: tensors = [p[k] for p in all_params] avg_params[k] = utils.average_tensors(tensors) return avg_params
def test_np_save_load_ndarrays(): shapes = [(2, 0, 1), (0, ), (), (), (0, 4), (), (3, 0, 0, 0), (2, 1), (0, 5, 0), (4, 5, 6), (0, 0, 0)] array_list = [_np.random.randint(0, 10, size=shape) for shape in shapes] array_list = [np.array(arr, dtype=arr.dtype) for arr in array_list] # test save/load single ndarray for i, arr in enumerate(array_list): with TemporaryDirectory() as work_dir: fname = os.path.join(work_dir, 'dataset.npy') npx.save(fname, arr) arr_loaded = npx.load(fname) assert isinstance(arr_loaded, list) assert len(arr_loaded) == 1 assert _np.array_equal(arr_loaded[0].asnumpy(), array_list[i].asnumpy()) # test save/load a list of ndarrays with TemporaryDirectory() as work_dir: fname = os.path.join(work_dir, 'dataset.npy') npx.save(fname, array_list) array_list_loaded = mx.nd.load(fname) assert isinstance(arr_loaded, list) assert len(array_list) == len(array_list_loaded) assert all(isinstance(arr, np.ndarray) for arr in arr_loaded) for a1, a2 in zip(array_list, array_list_loaded): assert _np.array_equal(a1.asnumpy(), a2.asnumpy()) # test save/load a dict of str->ndarray arr_dict = {} keys = [str(i) for i in range(len(array_list))] for k, v in zip(keys, array_list): arr_dict[k] = v with TemporaryDirectory() as work_dir: fname = os.path.join(work_dir, 'dataset.npy') npx.save(fname, arr_dict) arr_dict_loaded = npx.load(fname) assert isinstance(arr_dict_loaded, dict) assert len(arr_dict_loaded) == len(arr_dict) for k, v in arr_dict_loaded.items(): assert k in arr_dict assert _np.array_equal(v.asnumpy(), arr_dict[k].asnumpy())
from mxnet import np, npx from mxnet.gluon import nn npx.set_np() x = np.arange(4) npx.save('x-file', x) x2 = npx.load('x-file') y = np.zeros(4) npx.save('x-files', [x, y]) x2, y2 = npx.load('x-files') (x2, y2) mydict = {'x': x, 'y': y} npx.save('mydict', mydict) mydict2 = npx.load('mydict') mydict2