def init(config_filename, log_verbosity, remaining_args=()): """ :param str config_filename: filename to config-file :param int log_verbosity: :param list[str] remaining_args: """ rnn.init_better_exchook() rnn.init_thread_join_hack() print("Using config file %r." % config_filename) assert os.path.exists(config_filename) rnn.init_config( config_filename=config_filename, command_line_options=remaining_args, extra_updates={ "use_tensorflow": True, "log": None, "log_verbosity": log_verbosity, "task": "search", }, default_config={ "debug_print_layer_output_template": True, } ) global config config = rnn.config rnn.init_log() print("Returnn %s starting up." % os.path.basename(__file__), file=log.v1) rnn.returnn_greeting() rnn.init_backend_engine() assert Util.BackendEngine.is_tensorflow_selected(), "this is only for TensorFlow" rnn.init_faulthandler() better_exchook.replace_traceback_format_tb() # makes some debugging easier rnn.init_config_json_network()
def setup(): """ Calls necessary setups. """ better_exchook.install() better_exchook.replace_traceback_format_tb() import _setup_sleepy_env # noqa
from __future__ import print_function import sys sys.path += ["."] # Python 3 hack from subprocess import Popen, PIPE, STDOUT, CalledProcessError import re import os import sys from glob import glob from nose.tools import assert_less, assert_in import better_exchook better_exchook.replace_traceback_format_tb() py = sys.executable print("Python:", py) def which_pip(): from Util import which # Before we look anywhere in PATH, check if there is some pip alongside to the Python executable. # This might be more reliable. dir_name, basename = py.rsplit("/", 1) if basename.startswith("python"): postfix = basename[len("python"):] pip_path = "%s/pip%s" % (dir_name, postfix) if os.path.exists(pip_path): return pip_path # Generic fallback. pip_path = which("pip") return pip_path
def main(): """ Parses command line arguments and executes commands """ # Setup recipe importer sys.meta_path.append(RecipeFinder) # Setup argument parser parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() parser.add_argument('--log_level', dest='log_level', metavar='LOG_LEVEL', type=int, default=20, help='log level, 10 for debug messages, 50 for only critical,' ' default: 20, ') parser.add_argument('--config', dest='config_files', action='append', default=[], help='config file for setup, defining which jobs to run') # Not fully supported at the moment # parser.add_argument('--settings', dest='settings_file', # default=gs.GLOBAL_SETTINGS_FILE_DEFAULT, # help='settings file, aka how to run the jobs') # parser.add_argument('-s', '--setting', dest='commandline_settings', action='append', # default=[], help='overwrite global settings directly via commandline') parser_manager = subparsers.add_parser('manager', aliases=['m'], conflict_handler='resolve', help="Load config files and start manager loop") parser_manager.set_defaults(func=manager) parser_manager.add_argument("-r", dest="run", default=False, action='store_true', help="Run the given task") parser_manager.add_argument("-co", dest="clear_once", action="store_true", default=False, help="Move jobs aside that are in an error " "state when the manager runs the first time") parser_manager.add_argument("--http", dest="http_port", default=None, type=int, help="Enables http server, takes " "port as argument") parser_manager.add_argument("--fs", "--filesystem", dest="filesystem", default=None, help="Start filesystem in given directory") parser_manager.add_argument('argv', metavar='ARGV', type=str, nargs='*', help='an additional way do ' 'define config files') parser_console = subparsers.add_parser('console', aliases=['c'], usage='sis console [-h] [--load LOAD_SIS_GRAPH] [ARGV [ARGV ...]]\n\n' 'Open console to debug sisyphus graph or job', help="Start console to interactively work on sis graph. Things like: " "Rerunning tasks, cleaning up the work directory, and debugging " "is best done here") parser_console.add_argument("--load", dest="load", default=[], action='append', help="load graph and start console") parser_console.add_argument("--skip_config", dest="not_load_config", default=False, action='store_true', help="do not load config files before starting the console") parser_console.add_argument("-c", dest="commands", default=[], action='append', help="Run commands after loading console") parser_console.add_argument('argv', metavar='ARGV', type=str, nargs='*', help='an additional way do define config files') parser_console.set_defaults(func=console) parser_worker = subparsers.add_parser(gs.CMD_WORKER, help='Start worker to compute job (for internally usage)') parser_worker.set_defaults(func=worker) parser_worker.add_argument('jobdir', metavar='JOBDIR', type=str, help='Job directory of the executed function') parser_worker.add_argument('task_name', metavar='TASK_NAME', type=str, help='Task name') parser_worker.add_argument('task_id', metavar='TASK_ID', type=int, nargs='?', default=None, help='Task id, if not set trying to ' 'read it from environment variables') parser_worker.add_argument('--force_resume', "--fr", default=False, action="store_true", help='force resume of non resumable tasks, good for debugging') parser_worker.add_argument('--engine', default='short', help='The engine running the Job') # Currently disabled parser, maybe used again in the future # # Disabled, since it's not used for now # parser_unittest = subparsers.add_parser('unittest', help='Run unittest tasks') # parser_unittest.set_defaults(func=unittest) # parser_unittest.add_argument("--do_not_run", dest="run", default=True, action='store_false', # help="Do not run the given task") # parser_unittest.add_argument('argv', metavar='ARGV', type=str, nargs='*', # help='an additional way do define config files') # Disabled since it's currently not working # parser_notebook = subparsers.add_parser('notebook', # usage='sis notebook [-h]\n\n' # 'Open notebook session', # help="Start notebook to interactively work on sis graph") # parser_notebook.add_argument("--load", dest="load", default=[], action='append', # help="load graph and start console") # parser_notebook.add_argument("--file", "-f", dest="filename", default='default', # help="load (and create if needed) this notebook file") # parser_notebook.set_defaults(func=helper.notebook) # Disabled since it's currently not working # parser_connect = subparsers.add_parser('connect', usage='sis connect [-h] [connect_file]\n\n' # 'Opens a console connected to given kernel', # help='Opens a console connected with given kernel') # parser_connect.add_argument('argv', metavar='ARGV', type=str, nargs='?', # default=[], help='connection_file') # parser_connect.set_defaults(func=helper.connect) args = parser.parse_args() if not hasattr(args, 'func'): parser.print_help() return # add argv to config_files if manager or console is called if args.func in [manager, console]: args.config_files += args.argv # Setup logging logging.basicConfig(format='[%(asctime)s] %(levelname)s: %(message)s', level=args.log_level) # Changing settings via commandline is currently not supported # Needs to ensure all parameters are passed correctly to worker, ignored since nobody requested it so far # update_global_settings_from_file(args.settings_file) # update_global_settings_from_list(args.commandline_settings) update_global_settings_from_file(gs.GLOBAL_SETTINGS_FILE_DEFAULT) if gs.USE_VERBOSE_TRACEBACK: if gs.VERBOSE_TRACEBACK_TYPE == "ipython": from IPython.core import ultratb sys.excepthook = ultratb.VerboseTB() elif gs.VERBOSE_TRACEBACK_TYPE == "better_exchook": # noinspection PyPackageRequirements import better_exchook better_exchook.install() better_exchook.replace_traceback_format_tb() else: raise Exception("invalid VERBOSE_TRACEBACK_TYPE %r" % gs.VERBOSE_TRACEBACK_TYPE) if gs.USE_SIGNAL_HANDLERS: from sisyphus.tools import maybe_install_signal_handers maybe_install_signal_handers() if args.func != manager: gs.JOB_AUTO_CLEANUP = False try: args.func(args) except BaseException as exc: if not isinstance(exc, SystemExit): logging.error("Main thread unhandled exception:") sys.excepthook(*sys.exc_info()) import threading non_daemon_threads = { thread for thread in threading.enumerate() if not thread.daemon and thread is not threading.main_thread()} if non_daemon_threads: logging.warning("Main thread exit. Still running non-daemon threads: %r" % non_daemon_threads)
import sys sys.path += ["."] # Python 3 hack import sys import numpy import theano import theano.scan_module.scan_op from nose.tools import assert_equal, assert_is, assert_is_instance import MultiBatchBeam from MultiBatchBeam import * import TheanoUtil import theano.printing from pprint import pprint import better_exchook better_exchook.replace_traceback_format_tb() TheanoUtil.monkey_patches() naive_multi_batch_beam = MultiBatchBeam._naive_multi_batch_beam def numpy_multi_batch_beam(array, start_idxs, batch_lens, beam_width, wrap_mode, pad_left=0, pad_right=0, idx_dim=0, batch_dim=1): array = T.as_tensor(array) start_idxs = T.as_tensor(start_idxs) batch_lens = T.as_tensor(batch_lens) beam_width = T.as_tensor(beam_width) op = MultiBatchBeamOp(wrap_mode, idx_dim, batch_dim) beam = op(array, start_idxs, batch_lens, beam_width, pad_left, pad_right) return beam.eval()