def num_inputs_outputs_from_config(cls, config): """ :type config: Config.Config :returns (num_inputs, num_outputs), where num_inputs is like num_outputs["data"][0], and num_outputs is a dict of data_key -> (dim, ndim), where data_key is e.g. "classes" or "data", dim is the feature dimension or the number of classes, and ndim is the ndim counted without batch-dim, i.e. ndim=1 means usually sparse data and ndim=2 means dense data. :rtype: (int,dict[str,(int,int)]) """ num_inputs = config.int('num_inputs', 0) target = config.value('target', 'classes') if config.is_typed('num_outputs'): num_outputs = config.typed_value('num_outputs') if not isinstance(num_outputs, dict): num_outputs = {target: num_outputs} num_outputs = num_outputs.copy() from Dataset import convert_data_dims from Util import BackendEngine num_outputs = convert_data_dims(num_outputs, leave_dict_as_is=BackendEngine.is_tensorflow_selected()) if "data" in num_outputs: num_inputs = num_outputs["data"][0] elif config.has('num_outputs'): num_outputs = {target: [config.int('num_outputs', 0), 1]} else: num_outputs = None dataset = None if config.list('train') and ":" not in config.value('train', ''): dataset = config.list('train')[0] if not config.is_typed('num_outputs') and dataset: try: _num_inputs = hdf5_dimension(dataset, 'inputCodeSize') * config.int('window', 1) except Exception: _num_inputs = hdf5_dimension(dataset, 'inputPattSize') * config.int('window', 1) try: _num_outputs = {target: [hdf5_dimension(dataset, 'numLabels'), 1]} except Exception: _num_outputs = hdf5_group(dataset, 'targets/size') for k in _num_outputs: _num_outputs[k] = [_num_outputs[k], len(hdf5_shape(dataset, 'targets/data/' + k))] if num_inputs: assert num_inputs == _num_inputs if num_outputs: assert num_outputs == _num_outputs num_inputs = _num_inputs num_outputs = _num_outputs if not num_inputs and not num_outputs and config.has("load"): from Network import LayerNetwork import h5py model = h5py.File(config.value("load", ""), "r") num_inputs, num_outputs = LayerNetwork._n_in_out_from_hdf_model(model) assert num_inputs and num_outputs, "provide num_inputs/num_outputs directly or via train" return num_inputs, num_outputs
def num_inputs_outputs_from_config(cls, config): """ :type config: Config.Config :rtype: (int,dict[str,(int,int)]) """ num_inputs = config.int('num_inputs', 0) target = config.value('target', 'classes') if config.is_typed('num_outputs'): num_outputs = config.typed_value('num_outputs') if not isinstance(num_outputs, dict): num_outputs = {target: num_outputs} num_outputs = num_outputs.copy() from Dataset import convert_data_dims num_outputs = convert_data_dims(num_outputs) if "data" in num_outputs: num_inputs = num_outputs["data"][0] elif config.has('num_outputs'): num_outputs = {target: [config.int('num_outputs', 0), 1]} else: num_outputs = None if not config.is_typed('num_outputs') and config.list('train') and ":" not in config.value('train', ''): try: _num_inputs = hdf5_dimension(config.list('train')[0], 'inputCodeSize') * config.int('window', 1) except Exception: _num_inputs = hdf5_dimension(config.list('train')[0], 'inputPattSize') * config.int('window', 1) try: _num_outputs = {target: [hdf5_dimension(config.list('train')[0], 'numLabels'), 1]} except Exception: _num_outputs = hdf5_group(config.list('train')[0], 'targets/size') for k in _num_outputs: _num_outputs[k] = [_num_outputs[k], len(hdf5_shape(config.list('train')[0], 'targets/data/' + k))] if num_inputs: assert num_inputs == _num_inputs if num_outputs: assert num_outputs == _num_outputs num_inputs = _num_inputs num_outputs = _num_outputs assert num_inputs and num_outputs, "provide num_inputs/num_outputs directly or via train" loss = cls.loss_from_config(config) #if loss in ('ctc', 'ce_ctc') or config.bool('add_blank', False): # for k in num_outputs: # num_outputs[k][0] += 1 # add blank return num_inputs, num_outputs
def num_inputs_outputs_from_config(cls, config): """ :type config: Config.Config :returns (num_inputs, num_outputs), where num_inputs is like num_outputs["data"][0], and num_outputs is a dict of data_key -> (dim, ndim), where data_key is e.g. "classes" or "data", dim is the feature dimension or the number of classes, and ndim is the ndim counted without batch-dim, i.e. ndim=1 means usually sparse data and ndim=2 means dense data. :rtype: (int,dict[str,(int,int)]) """ from Util import BackendEngine num_inputs = config.int('num_inputs', 0) target = config.value('target', 'classes') if config.is_typed('num_outputs'): num_outputs = config.typed_value('num_outputs') if not isinstance(num_outputs, dict): num_outputs = {target: num_outputs} num_outputs = num_outputs.copy() from Dataset import convert_data_dims num_outputs = convert_data_dims(num_outputs, leave_dict_as_is=BackendEngine.is_tensorflow_selected()) if "data" in num_outputs: num_inputs = num_outputs["data"] if isinstance(num_inputs, (list, tuple)): num_inputs = num_inputs[0] elif isinstance(num_inputs, dict): if "dim" in num_inputs: num_inputs = num_inputs["dim"] else: num_inputs = num_inputs["shape"][-1] else: raise TypeError("data key %r" % num_inputs) elif config.has('num_outputs'): num_outputs = {target: [config.int('num_outputs', 0), 1]} else: num_outputs = None dataset = None if config.list('train') and ":" not in config.value('train', ''): dataset = config.list('train')[0] if not config.is_typed('num_outputs') and dataset: # noinspection PyBroadException try: _num_inputs = hdf5_dimension(dataset, 'inputCodeSize') * config.int('window', 1) except Exception: _num_inputs = hdf5_dimension(dataset, 'inputPattSize') * config.int('window', 1) # noinspection PyBroadException try: _num_outputs = {target: [hdf5_dimension(dataset, 'numLabels'), 1]} except Exception: _num_outputs = hdf5_group(dataset, 'targets/size') for k in _num_outputs: _num_outputs[k] = [_num_outputs[k], len(hdf5_shape(dataset, 'targets/data/' + k))] if num_inputs: assert num_inputs == _num_inputs if num_outputs: assert num_outputs == _num_outputs num_inputs = _num_inputs num_outputs = _num_outputs if not num_inputs and not num_outputs and config.has("load") and BackendEngine.is_theano_selected(): from Network import LayerNetwork import h5py model = h5py.File(config.value("load", ""), "r") # noinspection PyProtectedMember num_inputs, num_outputs = LayerNetwork._n_in_out_from_hdf_model(model) assert num_inputs and num_outputs, "provide num_inputs/num_outputs directly or via train" return num_inputs, num_outputs