def init_network_from_config(self, config): self.pretrain = pretrainFromConfig(config) self.max_seqs = config.int('max_seqs', -1) self.compression = config.bool('compression', False) epoch, model_epoch_filename = self.get_epoch_model(config) assert model_epoch_filename or self.start_epoch if model_epoch_filename: print("loading weights from", model_epoch_filename, file=log.v2) last_model_hdf = h5py.File(model_epoch_filename, "r") else: last_model_hdf = None if config.bool('initialize_from_model', False): # That's only about the topology, not the params. print("initializing network topology from model", file=log.v5) assert last_model_hdf, "last model not specified. use 'load' in config. or don't use 'initialize_from_model'" network = LayerNetwork.from_hdf_model_topology(last_model_hdf) else: if self.pretrain: # This would be obsolete if we don't want to load an existing model. # In self.init_train_epoch(), we initialize a new model. network = self.pretrain.get_network_for_epoch(epoch or self.start_epoch) else: network = LayerNetwork.from_config_topology(config) # We have the parameters randomly initialized at this point. # In training, as an initialization, we can copy over the params of an imported model, # where our topology might slightly differ from the imported model. if config.value('import_model_train_epoch1', '') and self.start_epoch == 1: assert last_model_hdf old_network = LayerNetwork.from_hdf_model_topology(last_model_hdf) old_network.load_hdf(last_model_hdf) last_model_hdf.close() # Copy params to new network. from NetworkCopyUtils import intelli_copy_layer # network.hidden are the input + all hidden layers. for layer_name, layer in sorted(old_network.hidden.items()): print("Copy hidden layer %s" % layer_name, file=log.v3) intelli_copy_layer(layer, network.hidden[layer_name]) for layer_name, layer in sorted(old_network.output.items()): print("Copy output layer %s" % layer_name, file=log.v3) intelli_copy_layer(layer, network.output[layer_name]) print("Not copied hidden: %s" % sorted(set(network.hidden.keys()).difference(old_network.hidden.keys())), file=log.v3) print("Not copied output: %s" % sorted(set(network.output.keys()).difference(old_network.output.keys())), file=log.v3) # Maybe load existing model parameters. elif last_model_hdf: network.load_hdf(last_model_hdf) last_model_hdf.close() EngineUtil.maybe_subtract_priors(network, self.train_data, config) self.network = network if config.has('dump_json'): self.network_dump_json(config.value('dump_json', '')) self.print_network_info()
def init_network_from_config(self, config): self.pretrain = pretrainFromConfig(config) self.max_seqs = config.int('max_seqs', -1) epoch, model_epoch_filename = self.get_epoch_model(config) assert model_epoch_filename or self.start_epoch if model_epoch_filename: print >> log.v2, "loading weights from", model_epoch_filename last_model_hdf = h5py.File(model_epoch_filename, "r") else: last_model_hdf = None if config.bool('initialize_from_model', False): # That's only about the topology, not the params. print >> log.v5, "initializing network topology from model" assert last_model_hdf, "last model not specified. use 'load' in config. or don't use 'initialize_from_model'" network = LayerNetwork.from_hdf_model_topology(last_model_hdf) else: if self.pretrain: # This would be obsolete if we don't want to load an existing model. # In self.init_train_epoch(), we initialize a new model. network = self.pretrain.get_network_for_epoch(epoch or self.start_epoch) else: network = LayerNetwork.from_config_topology(config) # We have the parameters randomly initialized at this point. # In training, as an initialization, we can copy over the params of an imported model, # where our topology might slightly differ from the imported model. if config.value('import_model_train_epoch1', '') and self.start_epoch == 1: assert last_model_hdf old_network = LayerNetwork.from_hdf_model_topology(last_model_hdf) old_network.load_hdf(last_model_hdf) last_model_hdf.close() # Copy params to new network. from NetworkCopyUtils import intelli_copy_layer # network.hidden are the input + all hidden layers. for layer_name, layer in sorted(old_network.hidden.items()): print >> log.v3, "Copy hidden layer %s" % layer_name intelli_copy_layer(layer, network.hidden[layer_name]) for layer_name, layer in sorted(old_network.output.items()): print >> log.v3, "Copy output layer %s" % layer_name intelli_copy_layer(layer, network.output[layer_name]) print >> log.v3, "Not copied hidden: %s" % sorted(set(network.hidden.keys()).difference(old_network.hidden.keys())) print >> log.v3, "Not copied output: %s" % sorted(set(network.output.keys()).difference(old_network.output.keys())) # Maybe load existing model parameters. elif last_model_hdf: network.load_hdf(last_model_hdf) last_model_hdf.close() EngineUtil.maybe_subtract_priors(network, self.train_data, config) self.network = network if config.has('dump_json'): self.network_dump_json(config.value('dump_json', '')) self.print_network_info()
def saveCrnnNetwork(epoch, layers): """ :type epoch: int :type layers: list[(numpy.ndarray, numpy.ndarray)] """ print("Loading Crnn") from Network import LayerNetwork from NetworkHiddenLayer import ForwardLayer from NetworkOutputLayer import OutputLayer from Pretrain import pretrain_from_config from EngineBase import EngineBase pretrain = pretrain_from_config(config) is_pretrain_epoch = pretrain and epoch <= pretrain.get_train_num_epochs() modelFilename = config.value("model", None) assert modelFilename, "need 'model' in config" filename = EngineBase.epoch_model_filename(modelFilename, epoch, is_pretrain_epoch) assert not os.path.exists(filename), "already exists" if is_pretrain_epoch: network = pretrain.get_network_for_epoch(epoch) else: network = LayerNetwork.from_config_topology(config) nHiddenLayers = len(network.hidden) # print network topology print("Crnn Network layer topology:") print("input dim:", network.n_in) print("hidden layer count:", nHiddenLayers) print("output dim:", network.n_out["classes"]) print("net weights #:", network.num_params()) print("net params:", network.train_params_vars) print("net output:", network.output["output"]) assert network.n_in == inputDim #assert network.n_out == outputDim assert nHiddenLayers + 1 == layerCount # hidden + output layer assert len(layers) == layerCount for i, (layerName, hidden) in enumerate(sorted(network.hidden.items())): # Some checks whether this is a forward-layer. assert isinstance(hidden, ForwardLayer) saveCrnnLayer(hidden, *layers[i]) assert isinstance(network.output["output"], OutputLayer) saveCrnnLayer(network.output["output"], *layers[len(layers) - 1]) import h5py print(("Save Crnn model under %s" % filename)) model = h5py.File(filename, "w") network.save_hdf(model, epoch) model.close()
def test_config1_to_json_network_copy(): config = Config() config.update(config1_dict) orig_network = LayerNetwork.from_config_topology(config) orig_json_content = orig_network.to_json_content() pprint(orig_json_content) new_network = LayerNetwork.from_json(orig_json_content, orig_network.n_in, orig_network.n_out) assert_equal(orig_network.n_in, new_network.n_in) assert_equal(orig_network.n_out, new_network.n_out) new_json_content = new_network.to_json_content() if orig_json_content != new_json_content: print(dict_diff_str(orig_json_content, new_json_content)) assert_equal(orig_json_content, new_network.to_json_content())
def test_network_config1_init(): config = Config() config.update(config1_dict) network = LayerNetwork.from_config_topology(config) assert_in("hidden_0", network.hidden) assert_in("hidden_1", network.hidden) assert_equal(len(network.hidden), 2) assert_is_instance(network.hidden["hidden_0"], ForwardLayer) assert_equal(network.hidden["hidden_0"].layer_class, "hidden") assert_false(network.recurrent) json_content = network.to_json_content() pprint(json_content) assert_in("hidden_0", json_content) assert_equal(json_content["hidden_0"]["class"], "hidden") assert_in("hidden_1", json_content) assert_in("output", json_content)
def test_config2_bidirect_lstm(): config = Config() config.update(config2_dict) desc = LayerNetworkDescription.from_config(config) assert_true(desc.bidirectional) network = LayerNetwork.from_config_topology(config) net_json = network.to_json_content() pprint(net_json) assert_in("output", net_json) assert_in("hidden_0_fw", net_json) assert_in("hidden_0_bw", net_json) assert_in("hidden_1_fw", net_json) assert_in("hidden_1_bw", net_json) assert_in("hidden_2_fw", net_json) assert_in("hidden_2_bw", net_json) assert_equal(net_json["output"]["from"], ["hidden_2_fw", "hidden_2_bw"]) assert_equal(len(net_json), 7)
def main(argv): argparser = argparse.ArgumentParser(description='Dump network as JSON.') argparser.add_argument('crnn_config_file') argparser.add_argument('--epoch', default=1, type=int) argparser.add_argument('--out', default="/dev/stdout") args = argparser.parse_args(argv[1:]) init(configFilename=args.crnn_config_file, commandLineOptions=[]) pretrain = pretrainFromConfig(config) if pretrain: network = pretrain.get_network_for_epoch(args.epoch) else: network = LayerNetwork.from_config_topology(config) json_data = network.to_json_content() f = open(args.out, 'w') print(json.dumps(json_data, indent=2, sort_keys=True), file=f) f.close() rnn.finalize()
def main(argv): argparser = argparse.ArgumentParser(description='Dump network as JSON.') argparser.add_argument('crnn_config_file') argparser.add_argument('--epoch', default=1, type=int) argparser.add_argument('--out', default="/dev/stdout") args = argparser.parse_args(argv[1:]) init(configFilename=args.crnn_config_file, commandLineOptions=[]) pretrain = pretrainFromConfig(config) if pretrain: network = pretrain.get_network_for_epoch(args.epoch) else: network = LayerNetwork.from_config_topology(config) json_data = network.to_json_content() f = open(args.out, 'w') print >> f, json.dumps(json_data, indent=2, sort_keys=True) f.close() rnn.finalize()