Esempio n. 1
0
 def test_run(self):
     log_queue = queue.Queue()
     self.uut = LogPrinterThread(log_queue)
     log_queue.put(item='Sample message 1')
     log_queue.put(item='Sample message 2')
     log_queue.put(item='Sample message 3')
     self.assertEqual(self.uut.message_queue.qsize(), 3)
     with LogCapture() as capture:
         self.uut.start()
         while self.uut.message_queue.qsize() > 0:
             continue
         self.uut.running = False
         self.uut.join()
     capture.check(('root', 'INFO', 'Sample message 1'),
                   ('root', 'INFO', 'Sample message 2'),
                   ('root', 'INFO', 'Sample message 3'))
Esempio n. 2
0
class LogPrinterThreadTest(unittest.TestCase):
    def test_run(self):
        log_printer = TestPrinter()
        log_queue = queue.Queue()
        self.uut = LogPrinterThread(log_queue, log_printer)
        log_queue.put(item="Sample message 1")
        log_queue.put(item="Sample message 2")
        log_queue.put(item="Sample message 3")
        self.assertEqual(self.uut.message_queue.qsize(), 3)
        with retrieve_stdout() as stdout:
            self.uut.start()
            while self.uut.message_queue.qsize() > 0:
                continue
            self.uut.running = False
            self.uut.join()
            self.assertEqual(stdout.getvalue(), "Sample message 1\nSample message 2\nSample " "message 3\n")
Esempio n. 3
0
 def test_run(self):
     log_printer = TestPrinter()
     log_queue = queue.Queue()
     self.uut = LogPrinterThread(log_queue, log_printer)
     log_queue.put(item="Sample message 1")
     log_queue.put(item="Sample message 2")
     log_queue.put(item="Sample message 3")
     self.assertEqual(self.uut.message_queue.qsize(), 3)
     with retrieve_stdout() as stdout:
         self.uut.start()
         while self.uut.message_queue.qsize() > 0:
             continue
         self.uut.running = False
         self.uut.join()
         self.assertEqual(stdout.getvalue(),
                          "Sample message 1\nSample message 2\nSample "
                          "message 3\n")
Esempio n. 4
0
class LogPrinterThreadTest(unittest.TestCase):
    def test_run(self):
        log_printer = TestPrinter()
        log_queue = queue.Queue()
        self.uut = LogPrinterThread(log_queue, log_printer)
        log_queue.put(item='Sample message 1')
        log_queue.put(item='Sample message 2')
        log_queue.put(item='Sample message 3')
        self.assertEqual(self.uut.message_queue.qsize(), 3)
        with retrieve_stdout() as stdout:
            self.uut.start()
            while self.uut.message_queue.qsize() > 0:
                continue
            self.uut.running = False
            self.uut.join()
            self.assertEqual(
                stdout.getvalue(),
                'Sample message 1\nSample message 2\nSample '
                'message 3\n')
Esempio n. 5
0
def execute_section(section,
                    global_bear_list,
                    local_bear_list,
                    print_results,
                    log_printer):
    """
    Executes the section with the given bears.

    The execute_section method does the following things:
    1. Prepare a Process
      * Load files
      * Create queues
    2. Spawn up one or more Processes
    3. Output results from the Processes
    4. Join all processes

    :param section:          The section to execute.
    :param global_bear_list: List of global bears belonging to the section.
    :param local_bear_list:  List of local bears belonging to the section.
    :param print_results:    Prints all given results appropriate to the
                             output medium.
    :param log_printer:      The log_printer to warn to.
    :return:                 Tuple containing a bool (True if results were
                             yielded, False otherwise), a Manager.dict
                             containing all local results(filenames are key)
                             and a Manager.dict containing all global bear
                             results (bear names are key) as well as the
                             file dictionary.
    """
    local_bear_list = Dependencies.resolve(local_bear_list)
    global_bear_list = Dependencies.resolve(global_bear_list)

    try:
        running_processes = int(section['jobs'])
    except ValueError:
        log_printer.warn("Unable to convert setting 'jobs' into a number. "
                         "Falling back to CPU count.")
        running_processes = get_cpu_count()
    except IndexError:
        running_processes = get_cpu_count()

    processes, arg_dict = instantiate_processes(section,
                                                local_bear_list,
                                                global_bear_list,
                                                running_processes,
                                                log_printer)

    logger_thread = LogPrinterThread(arg_dict["message_queue"],
                                     log_printer)
    # Start and join the logger thread along with the processes to run bears
    processes.append(logger_thread)

    for runner in processes:
        runner.start()

    try:
        return (process_queues(processes,
                               arg_dict["control_queue"],
                               arg_dict["local_result_dict"],
                               arg_dict["global_result_dict"],
                               arg_dict["file_dict"],
                               print_results,
                               section,
                               log_printer),
                arg_dict["local_result_dict"],
                arg_dict["global_result_dict"],
                arg_dict["file_dict"])
    finally:
        logger_thread.running = False

        for runner in processes:
            runner.join()
Esempio n. 6
0
def execute_section(section,
                    global_bear_list,
                    local_bear_list,
                    print_results,
                    cache,
                    log_printer,
                    console_printer,
                    debug=False):
    """
    Executes the section with the given bears.

    The execute_section method does the following things:

    1. Prepare a Process
       -  Load files
       -  Create queues
    2. Spawn up one or more Processes
    3. Output results from the Processes
    4. Join all processes

    :param section:          The section to execute.
    :param global_bear_list: List of global bears belonging to the section.
                             Dependencies are already resolved.
    :param local_bear_list:  List of local bears belonging to the section.
                             Dependencies are already resolved.
    :param print_results:    Prints all given results appropriate to the
                             output medium.
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
                             a file cache buffer.
    :param log_printer:      The log_printer to warn to.
    :param console_printer:  Object to print messages on the console.
    :param debug:            Bypass multiprocessing and run bears in debug mode,
                             not catching any exceptions.
    :return:                 Tuple containing a bool (True if results were
                             yielded, False otherwise), a Manager.dict
                             containing all local results(filenames are key)
                             and a Manager.dict containing all global bear
                             results (bear names are key) as well as the
                             file dictionary.
    """
    if debug:
        running_processes = 1
    else:
        try:
            running_processes = int(section['jobs'])
        except ValueError:
            log_printer.warn("Unable to convert setting 'jobs' into a number. "
                             'Falling back to CPU count.')
            running_processes = get_cpu_count()
        except IndexError:
            running_processes = get_cpu_count()

    bears = global_bear_list + local_bear_list
    use_raw_files = set(bear.USE_RAW_FILES for bear in bears)

    if len(use_raw_files) > 1:
        log_printer.err("Bears that uses raw files can't be mixed with "
                        'Bears that uses text files. Please move the following '
                        'bears to their own section: ' +
                        ', '.join(bear.name for bear in bears
                                  if not bear.USE_RAW_FILES))
        return ((), {}, {}, {})

    # use_raw_files is expected to be only one object.
    # The if statement is to ensure this doesn't fail when
    # it's running on an empty run
    use_raw_files = use_raw_files.pop() if len(use_raw_files) > 0 else False

    processes, arg_dict = instantiate_processes(section,
                                                local_bear_list,
                                                global_bear_list,
                                                running_processes,
                                                cache,
                                                log_printer,
                                                console_printer=console_printer,
                                                debug=debug,
                                                use_raw_files=use_raw_files)

    logger_thread = LogPrinterThread(arg_dict['message_queue'],
                                     log_printer)
    # Start and join the logger thread along with the processes to run bears
    if not debug:
        # in debug mode the logging messages are directly processed by the
        # message_queue
        processes.append(logger_thread)

    for runner in processes:
        runner.start()

    try:
        return (process_queues(processes,
                               arg_dict['control_queue'],
                               arg_dict['local_result_dict'],
                               arg_dict['global_result_dict'],
                               arg_dict['file_dict'],
                               print_results,
                               section,
                               cache,
                               log_printer,
                               console_printer=console_printer,
                               debug=debug),
                arg_dict['local_result_dict'],
                arg_dict['global_result_dict'],
                arg_dict['file_dict'])
    finally:
        if not debug:
            # in debug mode multiprocessing and logger_thread are disabled
            # ==> no need for following actions
            logger_thread.running = False

            for runner in processes:
                runner.join()
Esempio n. 7
0
def execute_section(section, global_bear_list, local_bear_list, print_results,
                    cache, log_printer):
    """
    Executes the section with the given bears.

    The execute_section method does the following things:

    1. Prepare a Process
       -  Load files
       -  Create queues
    2. Spawn up one or more Processes
    3. Output results from the Processes
    4. Join all processes

    :param section:          The section to execute.
    :param global_bear_list: List of global bears belonging to the section.
    :param local_bear_list:  List of local bears belonging to the section.
    :param print_results:    Prints all given results appropriate to the
                             output medium.
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
                             a file cache buffer.
    :param log_printer:      The log_printer to warn to.
    :return:                 Tuple containing a bool (True if results were
                             yielded, False otherwise), a Manager.dict
                             containing all local results(filenames are key)
                             and a Manager.dict containing all global bear
                             results (bear names are key) as well as the
                             file dictionary.
    """
    local_bear_list = Dependencies.resolve(local_bear_list)
    global_bear_list = Dependencies.resolve(global_bear_list)

    try:
        running_processes = int(section['jobs'])
    except ValueError:
        log_printer.warn("Unable to convert setting 'jobs' into a number. "
                         "Falling back to CPU count.")
        running_processes = get_cpu_count()
    except IndexError:
        running_processes = get_cpu_count()

    processes, arg_dict = instantiate_processes(section, local_bear_list,
                                                global_bear_list,
                                                running_processes, cache,
                                                log_printer)

    logger_thread = LogPrinterThread(arg_dict["message_queue"], log_printer)
    # Start and join the logger thread along with the processes to run bears
    processes.append(logger_thread)

    for runner in processes:
        runner.start()

    try:
        return (process_queues(processes, arg_dict["control_queue"],
                               arg_dict["local_result_dict"],
                               arg_dict["global_result_dict"],
                               arg_dict["file_dict"], print_results, section,
                               cache,
                               log_printer), arg_dict["local_result_dict"],
                arg_dict["global_result_dict"], arg_dict["file_dict"])
    finally:
        logger_thread.running = False

        for runner in processes:
            runner.join()
Esempio n. 8
0
def execute_section(section,
                    global_bear_list,
                    local_bear_list,
                    print_results,
                    cache,
                    log_printer,
                    console_printer,
                    debug=False,
                    apply_single=False):
    # type: (object, object, object, object, object, object, object, object,
    # object) -> object
    """
    Executes the section with the given bears.

    The execute_section method does the following things:

    1. Prepare a Process
       -  Load files
       -  Create queues
    2. Spawn up one or more Processes
    3. Output results from the Processes
    4. Join all processes

    :param section:          The section to execute.
    :param global_bear_list: List of global bears belonging to the section.
                             Dependencies are already resolved.
    :param local_bear_list:  List of local bears belonging to the section.
                             Dependencies are already resolved.
    :param print_results:    Prints all given results appropriate to the
                             output medium.
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
                             a file cache buffer.
    :param log_printer:      The log_printer to warn to.
    :param console_printer:  Object to print messages on the console.
    :param debug:            Bypass multiprocessing and run bears in debug mode,
                             not catching any exceptions.
    :param apply_single:     The action that should be applied for all results.
                             If it's not selected, has a value of False.
    :return:                 Tuple containing a bool (True if results were
                             yielded, False otherwise), a Manager.dict
                             containing all local results(filenames are key)
                             and a Manager.dict containing all global bear
                             results (bear names are key) as well as the
                             file dictionary.
    """
    debug_bears = (False
                   if 'debug_bears' not in section or (
                       section['debug_bears'].value == 'False') else
                   typed_list(str)(section['debug_bears']))

    if debug or debug_bears:
        running_processes = 1
    else:
        try:
            running_processes = int(section['jobs'])
        except ValueError:
            logging.warning("Unable to convert setting 'jobs' into a number. "
                            'Falling back to CPU count.')
            running_processes = get_cpu_count()
        except IndexError:
            running_processes = get_cpu_count()

    bears = global_bear_list + local_bear_list
    use_raw_files = set(bear.USE_RAW_FILES for bear in bears)

    if len(use_raw_files) > 1:
        logging.error("Bears that uses raw files can't be mixed with Bears "
                      'that uses text files. Please move the following bears '
                      'to their own section: ' +
                      ', '.join(bear.name for bear in bears
                                if not bear.USE_RAW_FILES))
        return ((), {}, {}, {})

    # use_raw_files is expected to be only one object.
    # The if statement is to ensure this doesn't fail when
    # it's running on an empty run
    use_raw_files = use_raw_files.pop() if len(use_raw_files) > 0 else False

    processes, arg_dict = instantiate_processes(section,
                                                local_bear_list,
                                                global_bear_list,
                                                running_processes,
                                                cache,
                                                None,
                                                console_printer=console_printer,
                                                debug=debug,
                                                use_raw_files=use_raw_files,
                                                debug_bears=debug_bears)

    logger_thread = LogPrinterThread(arg_dict['message_queue'])
    # Start and join the logger thread along with the processes to run bears
    if not (debug or debug_bears):
        # in debug mode the logging messages are directly processed by the
        # message_queue
        processes.append(logger_thread)

    for runner in processes:
        runner.start()

    try:
        return (process_queues(processes,
                               arg_dict['control_queue'],
                               arg_dict['local_result_dict'],
                               arg_dict['global_result_dict'],
                               arg_dict['file_dict'],
                               print_results,
                               section,
                               cache,
                               None,
                               console_printer=console_printer,
                               debug=debug,
                               apply_single=apply_single,
                               debug_bears=debug_bears),
                arg_dict['local_result_dict'],
                arg_dict['global_result_dict'],
                arg_dict['file_dict'])
    finally:
        if not (debug or debug_bears):
            # in debug mode multiprocessing and logger_thread are disabled
            # ==> no need for following actions
            logger_thread.running = False

            for runner in processes:
                runner.join()
Esempio n. 9
0
def execute_section(section,
                    global_bear_list,
                    local_bear_list,
                    print_results,
                    cache,
                    log_printer,
                    console_printer,
                    debug=False):
    """
    Executes the section with the given bears.

    The execute_section method does the following things:

    1. Prepare a Process
       -  Load files
       -  Create queues
    2. Spawn up one or more Processes
    3. Output results from the Processes
    4. Join all processes

    :param section:          The section to execute.
    :param global_bear_list: List of global bears belonging to the section.
                             Dependencies are already resolved.
    :param local_bear_list:  List of local bears belonging to the section.
                             Dependencies are already resolved.
    :param print_results:    Prints all given results appropriate to the
                             output medium.
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
                             a file cache buffer.
    :param log_printer:      The log_printer to warn to.
    :param console_printer:  Object to print messages on the console.
    :param debug:            Bypass multiprocessing and run bears in debug mode,
                             not catching any exceptions.
    :return:                 Tuple containing a bool (True if results were
                             yielded, False otherwise), a Manager.dict
                             containing all local results(filenames are key)
                             and a Manager.dict containing all global bear
                             results (bear names are key) as well as the
                             file dictionary.
    """
    if debug:
        running_processes = 1
    else:
        try:
            running_processes = int(section['jobs'])
        except ValueError:
            log_printer.warn("Unable to convert setting 'jobs' into a number. "
                             'Falling back to CPU count.')
            running_processes = get_cpu_count()
        except IndexError:
            running_processes = get_cpu_count()

    processes, arg_dict = instantiate_processes(section,
                                                local_bear_list,
                                                global_bear_list,
                                                running_processes,
                                                cache,
                                                log_printer,
                                                console_printer=console_printer,
                                                debug=debug)

    logger_thread = LogPrinterThread(arg_dict['message_queue'],
                                     log_printer)
    # Start and join the logger thread along with the processes to run bears
    if not debug:
        # in debug mode the logging messages are directly processed by the
        # message_queue
        processes.append(logger_thread)

    for runner in processes:
        runner.start()

    try:
        return (process_queues(processes,
                               arg_dict['control_queue'],
                               arg_dict['local_result_dict'],
                               arg_dict['global_result_dict'],
                               arg_dict['file_dict'],
                               print_results,
                               section,
                               cache,
                               log_printer,
                               console_printer=console_printer,
                               debug=debug),
                arg_dict['local_result_dict'],
                arg_dict['global_result_dict'],
                arg_dict['file_dict'])
    finally:
        if not debug:
            # in debug mode multiprocessing and logger_thread are disabled
            # ==> no need for following actions
            logger_thread.running = False

            for runner in processes:
                runner.join()