def test_demo_returnn_as_framework(self): print("Prepare.") import subprocess import shutil from glob import glob from Util import get_login_username # echo via subprocess, because this stdout as well as the other will always be visible. subprocess.check_call( ["echo", "travis_fold:start:test_demo_returnn_as_framework"]) assert os.path.exists("setup.py") if glob("dist/*.tar.gz"): # we want it unique below for fn in glob("dist/*.tar.gz"): os.remove(fn) if os.path.exists("MANIFEST"): os.remove("MANIFEST") # auto-generated. will be recreated if os.path.exists("docs/crnn"): os.remove("docs/crnn" ) # this is auto-generated, and confuses setup.py sdist tmp_model_dir = "/tmp/%s/returnn-demo-as-framework" % get_login_username( ) if os.path.exists(tmp_model_dir): shutil.rmtree(tmp_model_dir, ignore_errors=True) print("setup.py sdist, to create package.") subprocess.check_call([py, "setup.py", "sdist"]) dist_fns = glob("dist/*.tar.gz") assert len(dist_fns) == 1 dist_fn = os.path.abspath(dist_fns[0]) pip_path = which_pip() print("Pip install Returnn.") in_virtual_env = hasattr( sys, 'real_prefix') # https://stackoverflow.com/questions/1871549/ cmd = [py, pip_path, "install"] if not in_virtual_env: cmd += ["--user"] cmd += ["-v", dist_fn] print("$ %s" % " ".join(cmd)) subprocess.check_call(cmd, cwd="/") print("Running demo now.") subprocess.check_call([py, "demo-returnn-as-framework.py"], cwd="demos") print("Success.") subprocess.check_call( ["echo", "travis_fold:end:test_demo_returnn_as_framework"])
optimize_move_layers_out = True debug_print_layer_output_template = True # trainer batching = "random" batch_size = 1000 max_seqs = 5 # TODO: 100 chunking = "0" truncation = -1 #gradient_clip = 10 #gradient_nan_inf_filter = True adam = True gradient_noise = 0.3 learning_rate = 0.0005 learning_rate_control = "newbob" learning_rate_control_relative_error_relative_lr = True #model = "./test/transformer-hmm" model = "/tmp/%s/crnn/%s/model" % ( get_login_username(), demo_name ) # https://github.com/tensorflow/tensorflow/issues/6537 num_epochs = 100 save_interval = 1 debug_mode = True debug_add_check_numerics_ops = True # log log_verbosity = 5
batch_size=5000, max_seqs=10, chunking="0", network={ "fw0": {"class": "rec", "unit": "NativeLstm2", "dropout": 0.1, "n_out": 10}, "output": {"class": "softmax", "loss": "ce", "from": ["fw0"]} }, # training nadam=True, learning_rate=0.01, num_epochs=100, debug_add_check_numerics_ops=True, model="/tmp/%s/returnn-demo-as-framework/model" % get_login_username(), cleanup_old_models=True, learning_rate_control="newbob_multi_epoch", learning_rate_control_relative_error_relative_lr=True, newbob_multi_num_epochs=3, newbob_multi_update_interval=1, newbob_learning_rate_decay=0.9, learning_rate_file="/tmp/%s/returnn-demo-as-framework/newbob.data" % get_login_username(), # log log_verbosity=3 )) engine = Engine(config) train_data = init_dataset({"class": "Task12AXDataset", "num_seqs": 1000, "name": "train"}) dev_data = init_dataset({"class": "Task12AXDataset", "num_seqs": 100, "name": "dev", "fixed_random_seed": 1})
network = TransformerNetwork(model_size=128, normalized_loss = True).build() search_output_layer = "decision" optimize_move_layers_out = True debug_print_layer_output_template=True # trainer batching = "random" batch_size = 1000 max_seqs = 5 # TODO: 100 chunking = "0" truncation = -1 #gradient_clip = 10 #gradient_nan_inf_filter = True adam = True gradient_noise = 0.3 learning_rate = 0.0005 learning_rate_control = "newbob" learning_rate_control_relative_error_relative_lr = True #model = "./test/transformer-hmm" model = "/tmp/%s/crnn/%s/model" % (get_login_username(), demo_name) # https://github.com/tensorflow/tensorflow/issues/6537 num_epochs = 100 save_interval = 1 debug_mode = True debug_add_check_numerics_ops = True # log log_verbosity = 5