def do_some_logs_multiprocessing(again): bxilog.cleanup() bxilog.basicConfig(filename=FILENAME, filemode='a', level=bxilog.LOWEST) while again.value: bxilog.out("Doing a simple log: %s", again) time.sleep(0.1) bxilog.out("Termination requested. Exiting.")
def my_function(logfilename): # Since a fork() has been made, the logging module must be initialized bxilog.basicConfig(filename=logfilename, filemode='w') # Using the bxilog.multiprocessing_target decorator guarantees that # uncaught exception will be reported by the logging system # and that the logging system will be cleanup properly (and therefore # flushed). bxilog.out("In subprocess") bxilog.flush() # Test 1 : simple trace message bxilog.trace("A simple trace message") # Test 2 : simple error message bxilog.error("A simple error message") # Test 3 : catch warning error try: raise ValueError("An expected exception in first subprocess") except: bxilog.exception("Handling an exception in subprocess", level=bxilog.WARNING) # Test 4 : # This will be catched thanks to the bxilog.multiprocessing_target() decorator # Otherwise, the exception will appear on the standard error as usual raise ValueError("An unexpected exception in second subprocess")
def test_uncaught(self): """Unit test for uncaught exception""" exe = os.path.join(os.path.dirname(__file__), "uncaught.py") filename = os.path.splitext(os.path.basename(exe))[0] + '.bxilog' try: bxilog.out("Invoking %s. It must create file: %s", exe, filename) subprocess.check_call([exe]) except subprocess.CalledProcessError as cpe: self.assertEqual(cpe.returncode, 1) filename = os.path.splitext(os.path.basename(exe))[0] + '.bxilog' with open(filename) as logfile: found = False pattern = '.*Uncaught Exception - exiting thread.*' regexp = re.compile(pattern) for line in logfile: if regexp.match(line): found = True self.assertTrue(found, "Pattern %s not found in %s" % (pattern, filename)) os.unlink(filename)
def test_threading(self): threads = [] for i in xrange(multiprocessing.cpu_count() * 2): thread = threading.Thread(target=do_some_logs_threading) bxilog.out("Starting new thread") thread.start() threads.append(thread) bxilog.out("Sleeping") time.sleep(0.5) bxilog.out("Requesting termination of %s threads", len(threads)) global __LOOP_AGAIN__ __LOOP_AGAIN__ = False for thread in threads: try: thread.join(5) except Error as e: bxilog.out("Exception: %s", e) self.assertFalse(thread.is_alive())
def test_threads_and_forks(self): processes = [] again = multiprocessing.Value(ctypes.c_bool, True, lock=False) for i in xrange(multiprocessing.cpu_count()): process = multiprocessing.Process(target=threads_in_process, args=(again, )) bxilog.out("Starting new process") process.start() processes.append(process) bxilog.out("Sleeping") time.sleep(0.5) bxilog.out("Requesting termination of %s processes", len(processes)) again.value = False for process in processes: try: process.join(5) except Error as e: bxilog.out("Exception: %s", e) self.assertFalse(process.is_alive())
def threads_in_process(again): global __LOOP_AGAIN__ __LOOP_AGAIN__ = True bxilog.cleanup() bxilog.basicConfig(filename=FILENAME, filemode='a', level=bxilog.LOWEST) threads = [] for i in xrange(multiprocessing.cpu_count()): thread = threading.Thread(target=do_some_logs_threading) bxilog.out("Starting new thread") thread.start() threads.append(thread) while again.value: time.sleep(0.05) bxilog.out("Requesting termination of %s threads", len(threads)) __LOOP_AGAIN__ = False for thread in threads: try: thread.join(5) except Error as e: bxilog.out("Exception: %s", e)
def _do_log(start, end): nb = 0 for i in xrange(start, end): bxilog.out("Message #%d sent from the child", i) nb += 1 return nb
def test_remote_logging_bind_simple(self): """ Process Parent receives logs from child process """ # Configure the log in the parent so that all logs received from the child # goes to a dedicated file from which we can count the number of messages # produced by the child tmpdir = tempfile.mkdtemp(suffix="tmp", prefix=BXIRemoteLoggerTest.__name__) all = os.path.join(tmpdir, 'all.bxilog') child = os.path.join(tmpdir, 'child.bxilog') parent_config = { 'handlers': ['all', 'child'], 'all': { 'module': 'bxi.base.log.file_handler', 'filters': ':lowest', 'path': all, 'append': True, }, 'child': { 'module': 'bxi.base.log.file_handler', 'filters': ':off,%s:lowest' % LOGGER_CMD, 'path': child, 'append': True, } } bxilog.set_config(configobj.ConfigObj(parent_config)) print("Logging output: all: %s, child: %s" % (all, child)) url = 'ipc://%s/rh-cfg.zock' % tmpdir logs_nb = 25 full_cmd_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), LOGGER_CMD) logger_output_file = os.path.join( tmpdir, os.path.splitext(LOGGER_CMD)[0] + '.bxilog') args = [ sys.executable, full_cmd_path, logger_output_file, url, 'False', '1', str(logs_nb) ] bxilog.out("Executing '%s': it must produce %d logs", ' '.join(args), logs_nb) popen = subprocess.Popen(args) bxilog.out("Starting logs reception thread on %s", url) receiver = remote_receiver.RemoteReceiver([url], bind=True) receiver.start() bxilog.out("Waiting for the child termination") popen.wait() rc = popen.returncode bxilog.out("Child exited with return code %s", rc) self.assertEquals(rc, logs_nb) # Wait a bit for the logs to be processed time.sleep(1) bxilog.out("Stopping the receiver") receiver.stop(True) bxilog.out("Flushing bxilog") bxilog.flush() with open(child) as file_: lines = file_.readlines() self.assertEquals(len(lines), logs_nb)
def do_some_logs_threading(): global __LOOP_AGAIN__ while __LOOP_AGAIN__: bxilog.out("Doing a simple log: %s", __LOOP_AGAIN__) time.sleep(0.1) bxilog.out("Termination requested. Exiting.")