def init(config_filename, cmd_line_opts, dataset_config_str): """ :param str config_filename: global config for CRNN :param list[str] cmd_line_opts: options for initConfig method :param str dataset_config_str: dataset via init_dataset_via_str() """ rnn.initBetterExchook() rnn.initThreadJoinHack() if config_filename: rnn.initConfig(config_filename, cmd_line_opts) rnn.initLog() else: log.initialize(verbosity=[5]) print("Returnn hdf_dump starting up.", file=log.v3) rnn.initFaulthandler() if config_filename: rnn.initData() rnn.printTaskProperties() assert isinstance(rnn.train_data, Dataset) return rnn.train_data else: assert dataset_config_str dataset = init_dataset_via_str(dataset_config_str) print("Source dataset:", dataset.len_info(), file=log.v3) return dataset
def init(config_str, verbosity): """ :param str config_str: either filename to config-file, or dict for dataset :param int verbosity: """ rnn.initBetterExchook() rnn.initThreadJoinHack() datasetDict = None configFilename = None if config_str.strip().startswith("{"): print("Using dataset %s." % config_str) datasetDict = eval(config_str.strip()) elif config_str.endswith(".hdf"): datasetDict = {"class": "HDFDataset", "files": [config_str]} print("Using dataset %r." % datasetDict) assert os.path.exists(config_str) else: configFilename = config_str print("Using config file %r." % configFilename) assert os.path.exists(configFilename) rnn.initConfig(configFilename=configFilename, default_config={"cache_size": "0"}) global config config = rnn.config config.set("log", None) config.set("log_verbosity", verbosity) if datasetDict: config.set("train", datasetDict) rnn.initLog() print("Returnn dump-dataset starting up.", file=log.v2) rnn.returnnGreeting() rnn.initFaulthandler() rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()
def init(config_filename, cmd_line_opts, dataset_config_str): """ :param str config_filename: global config for CRNN :param list[str] cmd_line_opts: options for initConfig method :param str dataset_config_str: dataset via init_dataset_via_str() """ rnn.initBetterExchook() rnn.initThreadJoinHack() if config_filename: rnn.initConfig(config_filename, cmd_line_opts) rnn.initLog() else: log.initialize(verbosity=[5]) print >> log.v3, "CRNN dump-dataset starting up." rnn.initFaulthandler() rnn.initConfigJsonNetwork() if config_filename: rnn.initData() rnn.printTaskProperties() assert isinstance(rnn.train_data, Dataset) return rnn.train_data else: assert dataset_config_str dataset = init_dataset_via_str(dataset_config_str) print >> log.v3, "Source dataset:", dataset.len_info() return dataset
def init(config_str): """ :param str config_str: either filename to config-file, or dict for dataset """ rnn.initBetterExchook() rnn.initThreadJoinHack() if config_str.startswith("{"): print("Using dataset %s." % config_str) datasetDict = eval(config_str) configFilename = None else: datasetDict = None configFilename = config_str print("Using config file %r." % configFilename) assert os.path.exists(configFilename) rnn.initConfig(configFilename=configFilename, commandLineOptions=[]) global config config = rnn.config config.set("log", None) if datasetDict: config.set("train", datasetDict) rnn.initLog() print("CRNN dump-dataset starting up.", file=log.v1) rnn.initFaulthandler() rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()
def init(configFilename, commandLineOptions): rnn.initBetterExchook() rnn.initThreadJoinHack() rnn.initConfig(configFilename, commandLineOptions) global config config = rnn.config rnn.initLog() print >> log.v3, "CRNN demo-dataset starting up" rnn.initFaulthandler() rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()
def test_rnn_initData(): hdf_fn = generate_hdf_from_dummy() from Config import Config config = Config({"cache_size": "0", "train": hdf_fn, "dev": hdf_fn}) import rnn rnn.config = config rnn.initData() train, dev = rnn.train_data, rnn.dev_data assert train and dev assert isinstance(train, HDFDataset) assert isinstance(dev, HDFDataset) assert train.cache_byte_size_total_limit == dev.cache_byte_size_total_limit == 0 assert train.cache_byte_size_limit_at_start == dev.cache_byte_size_limit_at_start == 0
def init(configFilename, commandLineOptions): rnn.initBetterExchook() rnn.initThreadJoinHack() rnn.initConfig(configFilename, commandLineOptions) global config config = rnn.config config.set("log", None) rnn.initLog() print("CRNN dump-dataset starting up.", file=log.v1) rnn.initFaulthandler() rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()
def init(configFilename=None): rnn.initBetterExchook() rnn.initThreadJoinHack() if configFilename: rnn.initConfig(configFilename, commandLineOptions=[]) rnn.initLog() else: log.initialize() print("CRNN collect-orth-symbols starting up.", file=log.v3) rnn.initFaulthandler() if configFilename: rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()
def init(configFilename=None): rnn.initBetterExchook() rnn.initThreadJoinHack() if configFilename: rnn.initConfig(configFilename, commandLineOptions=[]) rnn.initLog() else: log.initialize() print >> log.v3, "CRNN collect-orth-symbols starting up." rnn.initFaulthandler() if configFilename: rnn.initConfigJsonNetwork() rnn.initData() rnn.printTaskProperties()