def test_Updater_add_check_numerics_ops(): class _Layer(DummyLayer): def _get_loss_value(self): return tf_compat.v1.log(self.x) from returnn.tf.network import TFNetwork, ExternData from returnn.config import Config with make_scope() as session: config = Config() config.set("debug_add_check_numerics_ops", True) network = TFNetwork(extern_data=ExternData(), train_flag=True) network.add_layer(name="output", layer_class=_Layer, initial_value=1.0) network.initialize_params(session=session) updater = Updater(config=config, network=network) updater.set_learning_rate(1.0, session=session) updater.set_trainable_vars(network.get_trainable_params()) updater.init_optimizer_vars(session=session) # Should succeed. session.run(updater.get_optim_op()) # One gradient descent step from ln(x), x = 1.0: gradient is 1.0 / x, thus x - 1.0 = 0.0. assert_almost_equal( session.run(network.get_default_output_layer().output.placeholder), 0.0) try: # Now, should fail. session.run(updater.get_optim_op()) except tf.errors.InvalidArgumentError as exc: print("Expected exception: %r" % exc) else: assert False, "should have raised an exception"
def init_config(config_filename=None, command_line_options=(), default_config=None, extra_updates=None): """ :param str|None config_filename: :param list[str]|tuple[str] command_line_options: e.g. ``sys.argv[1:]`` :param dict[str]|None default_config: :param dict[str]|None extra_updates: Initializes the global config. There are multiple sources which are used to init the config: * ``configFilename``, and maybe first item of ``commandLineOptions`` interpret as config filename * other options via ``commandLineOptions`` * ``extra_updates`` Note about the order/priority of these: * ``extra_updates`` * options from ``commandLineOptions`` * ``configFilename`` * config filename from ``commandLineOptions[0]`` * ``extra_updates`` * options from ``commandLineOptions`` ``extra_updates`` and ``commandLineOptions`` are used twice so that they are available when the config is loaded, which thus has access to them, and can e.g. use them via Python code. However, the purpose is that they overwrite any option from the config; that is why we apply them again in the end. ``commandLineOptions`` is applied after ``extra_updates`` so that the user has still the possibility to overwrite anything set by ``extra_updates``. """ global config config = Config() config_filenames_by_cmd_line = [] if command_line_options: # Assume that the first argument prefixed with "+" or "-" and all following is not a config file. i = 0 for arg in command_line_options: if arg[:1] in "-+": break config_filenames_by_cmd_line.append(arg) i += 1 command_line_options = command_line_options[i:] if default_config: config.update(default_config) if extra_updates: config.update(extra_updates) if command_line_options: config.parse_cmd_args(command_line_options) if config_filename: config.load_file(config_filename) for fn in config_filenames_by_cmd_line: config.load_file(fn) if extra_updates: config.update(extra_updates) if command_line_options: config.parse_cmd_args(command_line_options) # I really don't know where to put this otherwise: if config.bool("EnableAutoNumpySharedMemPickling", False): import returnn.util.task_system returnn.util.task_system.SharedMemNumpyConfig["enabled"] = True # Server default options if config.value('task', 'train') == 'server': config.set('num_inputs', 2) config.set('num_outputs', 1)