def from_tar(f): """ Create a `Parameters` object from the given file. And the `Parameters` only contains the parameters in this file. It is adapted the parameters are same in the defined network and the given file. For example, it can be used in the inference. :param f: the initialized model file. :type f: tar file :return: A Parameters object. :rtype: Parameters. """ params = Parameters() tar = tarfile.TarFile(fileobj=f, mode='r') for finfo in tar: assert isinstance(finfo, tarfile.TarInfo) if finfo.name.endswith('.protobuf'): f = tar.extractfile(finfo) conf = ParameterConfig() conf.ParseFromString(f.read()) params.__append_config__(conf) for param_name in params.names(): f = tar.extractfile(param_name) params.deserialize(param_name, f) return params
def __rand_param_config__(name): conf = ParameterConfig() conf.name = name size = 1 for i in xrange(2): dim = random.randint(1, 1000) conf.dims.append(dim) size *= dim conf.size = size assert conf.IsInitialized() return conf
def from_tar(f): params = Parameters() tar = tarfile.TarFile(fileobj=f, mode='r') for finfo in tar: assert isinstance(finfo, tarfile.TarInfo) if finfo.name.endswith('.protobuf'): f = tar.extractfile(finfo) conf = ParameterConfig() conf.ParseFromString(f.read()) params.__append_config__(conf) for param_name in params.names(): f = tar.extractfile(param_name) params.deserialize(param_name, f) return params
def convert_BatchNorm_layer(self, params, name=None): scale = 1 / np.array(params[-1].data)[0] if np.array( params[-1].data)[0] != 0 else 0 for i in range(2): data = np.array(params[i].data) * scale file_name = "_%s.w%s" % (name, str(i + 1)) param_conf = ParameterConfig() param_conf.name = file_name dims = list(data.shape) assert len(dims) == 1 dims.insert(0, 1) param_conf.size = reduce(lambda a, b: a * b, dims) param_conf.dims.extend(dims) self.params[file_name] = (param_conf, data.flatten()) return name
def convert_Scale_layer(self, params, name=None): assert self.pre_layer_type == "BatchNorm" name = self.pre_layer_name for i in range(len(params)): data = np.array(params[i].data) suffix = "0" if i == 0 else "bias" file_name = "_%s.w%s" % (name, suffix) param_conf = ParameterConfig() param_conf.name = file_name dims = list(data.shape) assert len(dims) == 1 dims.insert(0, 1) param_conf.size = reduce(lambda a, b: a * b, dims) if i == 1: param_conf.dims.extend(dims) self.params[file_name] = (param_conf, data.flatten()) return name
def convert_InnerProduct_layer(self, params, name=None): for i in range(len(params)): data = np.array(params[i].data) if len(params) == 2: suffix = "0" if i == 0 else "bias" file_name = "_%s.w%s" % (name, suffix) else: file_name = "_%s.w%s" % (name, str(i)) data = np.transpose(data) param_conf = ParameterConfig() param_conf.name = file_name dims = list(data.shape) if len(dims) < 2: dims.insert(0, 1) param_conf.size = reduce(lambda a, b: a * b, dims) param_conf.dims.extend(dims) self.params[file_name] = (param_conf, data.flatten()) return name
def convert_fc_layer(self, params, params_names=None, name=None): for i in range(len(params)): data = params[i] if len(params) == 2: suffix = "0" if i == 0 else "bias" file_name = "_%s.w%s" % (name, suffix) if not ( params_names and params_names[i]) else params_names[i] else: file_name = "_%s.w%s" % (name, str(i)) if not ( params_names and params_names[i]) else params_names[i] param_conf = ParameterConfig() param_conf.name = file_name dims = list(data.shape) if len(dims) < 2: dims.insert(0, 1) param_conf.size = reduce(lambda a, b: a * b, dims) param_conf.dims.extend(dims) self.params[file_name] = (param_conf, data.flatten()) return name
def convert_bn_layer(self, params, params_names=None, name=None): params = [params[i] for i in (0, 2, 3, 1)] params_names = [params_names[i] for i in (0, 2, 3, 1)] if params_names else params_names for i in range(len(params)): data = params[i] file_name = "_%s.w%s" % (name, str(i)) if i < 3 else "_%s.w%s" % ( name, "bias") file_name = file_name if not ( params_names and params_names[i]) else params_names[i] param_conf = ParameterConfig() param_conf.name = file_name dims = list(data.shape) assert len(dims) == 1 dims.insert(0, 1) param_conf.size = reduce(lambda a, b: a * b, dims) param_conf.dims.extend(dims) self.params[file_name] = (param_conf, data.flatten()) return name