def parse_args(self, api_args, remove_opts=None): """Parse options and arguments, overrides OptionParser.parse_args. Args: api_args (list): Command line options if passed via Python as opposed to sys.argv remove_opts (list): List of standard options to remove before parsing. """ if self.auto_add: # Add common options after command-specific options. self.add_std_options() if remove_opts: for opt in remove_opts: try: self.remove_option(opt) except ValueError: pass (options, args) = OptionParser.parse_args(self, api_args) if len(args) < self.n_compulsory_args: self.error("Wrong number of arguments (too few)") elif not self.unlimited_args and \ len(args) > self.n_compulsory_args + self.n_optional_args: self.error("Wrong number of arguments (too many)") if self.jset: if options.templatevars_file: options.templatevars_file = os.path.abspath( os.path.expanduser(options.templatevars_file)) cylc.flow.flags.verbose = options.verbose cylc.flow.flags.debug = options.debug # Set up stream logging for CLI. Note: # 1. On choosing STDERR: Log messages are diagnostics, so STDERR is the # better choice for the logging stream. This allows us to use STDOUT # for verbosity agnostic outputs. # 2. Suite server programs will remove this handler when it becomes a # daemon. if options.debug or options.verbose: LOG.setLevel(logging.DEBUG) else: LOG.setLevel(logging.INFO) # Remove NullHandler before add the StreamHandler RSYNC_LOG.setLevel(logging.INFO) while LOG.handlers: LOG.handlers[0].close() LOG.removeHandler(LOG.handlers[0]) errhandler = logging.StreamHandler(sys.stderr) errhandler.setFormatter( CylcLogFormatter(timestamp=options.log_timestamp)) LOG.addHandler(errhandler) return (options, args)
def _open_logs(id_, no_detach): """Open Cylc log handlers for a flow run.""" if not no_detach: while LOG.handlers: LOG.handlers[0].close() LOG.removeHandler(LOG.handlers[0]) log_path = get_workflow_run_log_name(id_) LOG.addHandler(TimestampRotatingFileHandler(log_path, no_detach)) # Add file installation log file_install_log_path = get_workflow_file_install_log_name(id_) RSYNC_LOG.addHandler( TimestampRotatingFileHandler(file_install_log_path, no_detach))
def test_value_error_raises_system_exit( self, mocked_glbl_cfg, mocked_get_suite_run_log_name, ): """Test that a ValueError when writing to a log stream won't result in multiple exceptions (what could lead to infinite loop in some occasions. Instead, it **must** raise a SystemExit""" with tempfile.NamedTemporaryFile() as tf: # mock objects used when creating the file handler mocked = mock.MagicMock() mocked_glbl_cfg.return_value = mocked mocked.get.return_value = 100 mocked_get_suite_run_log_name.return_value = tf.name file_handler = TimestampRotatingFileHandler("suiteA", False) # next line is important as pytest can have a "Bad file descriptor" # due to a FileHandler with default "a" (pytest tries to r/w). file_handler.mode = "a+" # enable the logger LOG.setLevel(logging.INFO) LOG.addHandler(file_handler) # Disable raising uncaught exceptions in logging, due to file # handler using stdin.fileno. See the following links for more. # https://github.com/pytest-dev/pytest/issues/2276 & # https://github.com/pytest-dev/pytest/issues/1585 logging.raiseExceptions = False # first message will initialize the stream and the handler LOG.info("What could go") # here we change the stream of the handler old_stream = file_handler.stream file_handler.stream = mock.MagicMock() file_handler.stream.seek = mock.MagicMock() # in case where file_handler.stream.seek.side_effect = ValueError try: # next call will call the emit method and use the mocked stream LOG.info("wrong?!") self.fail("Exception SystemError was not raised") except SystemExit: pass finally: # clean up file_handler.stream = old_stream # for log_handler in LOG.handlers: # log_handler.close() file_handler.close() LOG.removeHandler(file_handler) logging.raiseExceptions = True
def _open_logs(reg, no_detach): """Open Cylc log handlers for a flow run.""" if not no_detach: while LOG.handlers: LOG.handlers[0].close() LOG.removeHandler(LOG.handlers[0]) suite_log_handler = get_suite_run_log_name(reg) LOG.addHandler(TimestampRotatingFileHandler(suite_log_handler, no_detach)) # Add file installation log file_install_log_path = get_suite_file_install_log_name(reg) handler = TimestampRotatingFileHandler(file_install_log_path, no_detach) RSYNC_LOG.addHandler(handler)
def test_value_error_raises_system_exit(self, mocked_glbl_cfg): """Test that a ValueError when writing to a log stream won't result in multiple exceptions (what could lead to infinite loop in some occasions. Instead, it **must** raise a SystemExit""" with tempfile.NamedTemporaryFile() as tf: # mock objects used when creating the file handler mocked = mock.MagicMock() mocked_glbl_cfg.return_value = mocked mocked.get_derived_host_item.return_value = tf.name mocked.get.return_value = 100 file_handler = TimestampRotatingFileHandler("suiteA", False) # next line is important as pytest can have a "Bad file descriptor" # due to a FileHandler with default "a" (pytest tries to r/w). file_handler.mode = "a+" # enable the logger LOG.setLevel(logging.INFO) LOG.addHandler(file_handler) # Disable raising uncaught exceptions in logging, due to file # handler using stdin.fileno. See the following links for more. # https://github.com/pytest-dev/pytest/issues/2276 & # https://github.com/pytest-dev/pytest/issues/1585 logging.raiseExceptions = False # first message will initialize the stream and the handler LOG.info("What could go") # here we change the stream of the handler old_stream = file_handler.stream file_handler.stream = mock.MagicMock() file_handler.stream.seek = mock.MagicMock() # in case where file_handler.stream.seek.side_effect = ValueError try: # next call will call the emit method and use the mocked stream LOG.info("wrong?!") self.fail("Exception SystemError was not raised") except SystemExit: pass finally: # clean up file_handler.stream = old_stream # for log_handler in LOG.handlers: # log_handler.close() file_handler.close() LOG.removeHandler(file_handler) logging.raiseExceptions = True
def test_ioerror_is_ignored(self, mocked_get_suite_source_dir): """Test that IOError's are ignored when closing Scheduler logs. When a disk errors occurs, the scheduler.close_logs method may result in an IOError. This, combined with other variables, may cause an infinite loop. So it is better that it is ignored.""" mocked_get_suite_source_dir.return_value = '.' options = Options() args = ["suiteA"] scheduler = Scheduler(is_restart=False, options=options, args=args) handler = mock.MagicMock() handler.close.side_effect = IOError handler.level = logging.INFO LOG.addHandler(handler) scheduler.close_logs() self.assertEqual(1, handler.close.call_count) LOG.removeHandler(handler)
def test_ioerror_is_ignored(self, mocked_suite_srv_files_mgr, mocked_suite_db_mgr, mocked_broadcast_mgr): """Test that IOError's are ignored when closing Scheduler logs. When a disk errors occurs, the scheduler.close_logs method may result in an IOError. This, combined with other variables, may cause an infinite loop. So it is better that it is ignored.""" mocked_suite_srv_files_mgr.return_value\ .get_suite_source_dir.return_value = "." options = Options() args = ["suiteA"] scheduler = Scheduler(is_restart=False, options=options, args=args) handler = mock.MagicMock() handler.close.side_effect = IOError handler.level = logging.INFO LOG.addHandler(handler) scheduler.close_logs() self.assertEqual(1, handler.close.call_count) LOG.removeHandler(handler)
def parse_args(self, api_args, remove_opts=None): """Parse options and arguments, overrides OptionParser.parse_args. Args: api_args (list): Command line options if passed via Python as opposed to sys.argv remove_opts (list): List of standard options to remove before parsing. """ if self.auto_add: # Add common options after command-specific options. self.add_std_options() if remove_opts: for opt in remove_opts: with suppress(ValueError): self.remove_option(opt) (options, args) = OptionParser.parse_args(self, api_args) if len(args) < self.n_compulsory_args: self.error("Wrong number of arguments (too few)") elif (not self.unlimited_args and len(args) > self.n_compulsory_args + self.n_optional_args): self.error("Wrong number of arguments (too many)") if self.jset and options.templatevars_file: options.templatevars_file = os.path.abspath( os.path.expanduser(options.templatevars_file)) cylc.flow.flags.verbosity = options.verbosity # Set up stream logging for CLI. Note: # 1. On choosing STDERR: Log messages are diagnostics, so STDERR is the # better choice for the logging stream. This allows us to use STDOUT # for verbosity agnostic outputs. # 2. Scheduler will remove this handler when it becomes a daemon. if options.verbosity < 0: LOG.setLevel(logging.WARNING) elif options.verbosity > 0: LOG.setLevel(logging.DEBUG) else: LOG.setLevel(logging.INFO) RSYNC_LOG.setLevel(logging.INFO) # Remove NullHandler before add the StreamHandler for log in (LOG, RSYNC_LOG): while log.handlers: log.handlers[0].close() log.removeHandler(log.handlers[0]) log_handler = logging.StreamHandler(sys.stderr) log_handler.setFormatter( CylcLogFormatter(timestamp=options.log_timestamp, dev_info=(options.verbosity > 2))) LOG.addHandler(log_handler) if self.segregated_log: setup_segregated_log_streams(LOG, log_handler) return (options, args)