Пример #1
0
 def test_members_empty(self):
     uut = FileCollector(log_printer=self.lp)
     uut._unfold_params()
     self.assertEqual(uut.log_printer, self.lp)
     self.assertEqual(uut._flat_dirs, [])
     self.assertEqual(uut._rec_dirs, [])
     self.assertEqual(uut._allowed_types, None)
     self.assertEqual(uut._ignored_dirs, [])
     self.assertEqual(uut._ignored_files, [])
Пример #2
0
    def test_from_section(self):
        self.assertRaises(TypeError, FileCollector.from_section, 5)

        test_section = Section("test")
        test_section.append(Setting("files", "test value"))
        test_section.append(Setting("files_regex", "test value"))
        test_section.append(Setting("flat_dirs", "test value"))
        test_section.append(Setting("rec_dirs", "test value"))
        test_section.append(Setting("ignored_dirs", "test value"))

        FileCollector.from_section(test_section)
Пример #3
0
    def __init__(self,
                 bear_kinds,
                 flat_bear_dirs=[],
                 rec_bear_dirs=[StringConstants.coalib_bears_root],
                 bear_names=None,
                 ignored_bears=None,
                 ignored_bear_dirs=None,
                 regex="",
                 log_printer=ConsolePrinter()):
        """
        The BearCollector searches the filesystem for python files containing Bears. It extracts the Bear classes (not
        instances) out of this files so the caller can instantiate them as he likes.

        :param bear_kinds: the KINDs of bears to be collected
        :param flat_bear_dirs: list of strings: directories from which bears should be collected (flat)
        :param rec_bear_dirs: list of strings: directories from which bears should be collected (recursive)
        :param bear_names: list of strings: names of bears that should be collected.
        :param ignored_bears: list of strings: names of bears that should not be collected, even if they match a regex.
        Default is none.
        :param ignored_bear_dirs: list of strings: directories from which bears should not be collected. Overrides
        anything else.
        :param regex: regex that match bears to be collected.
        :param log_printer: LogPrinter to handle logging of debug, warning and error messages
        """
        if bear_names is None:
            bear_names = []
        if ignored_bears is None:
            ignored_bears = []
        if ignored_bear_dirs is None:
            ignored_bear_dirs = []

        if not isinstance(bear_kinds, list):
            raise TypeError("bear_kinds should be of type list")
        if not isinstance(bear_names, list):
            raise TypeError("bear_names should be of type list")
        if not isinstance(ignored_bears, list):
            raise TypeError("ignored should be of type list")
        if not isinstance(regex, str):
            raise TypeError("regex should be of type string")

        FileCollector.__init__(self,
                               flat_dirs=flat_bear_dirs,
                               rec_dirs=rec_bear_dirs,
                               allowed_types=["py"],
                               ignored_dirs=ignored_bear_dirs,
                               log_printer=log_printer)

        self._bear_kinds = bear_kinds
        self._bear_names = bear_names
        self._ignored_bears = ignored_bears
        self._regex = self.prepare_regex(regex)
Пример #4
0
 def test_members_full(self):
     uut = FileCollector([],
                         "", [os.getcwd()], ["abc", "xyz"], [".PY", "c"],
                         [], [],
                         log_printer=self.lp)
     uut._unfold_params()
     self.assertEqual(uut.log_printer, self.lp)
     self.assertEqual(uut._flat_dirs, [os.getcwd()])
     self.assertEqual(uut._rec_dirs,
                      [os.path.abspath("abc"),
                       os.path.abspath("xyz")])
     self.assertEqual(uut._allowed_types, ["py", "c"])
     self.assertEqual(uut._ignored_files, [])
     self.assertEqual(uut._ignored_dirs, [])
Пример #5
0
 def test_nonexistent_directory(self):
     self.assertEqual(
         FileCollector(log_printer=QuietPrinter(),
                       flat_dirs=["bullshit"]).collect(), [])
     self.assertEqual(
         FileCollector(log_printer=QuietPrinter(),
                       rec_dirs=["bullshit"]).collect(), [])
     self.assertRaises(
         ZeroDivisionError,
         FileCollector(log_printer=LoudPrinter(),
                       flat_dirs=["bullshit"]).collect)
     self.assertRaises(
         ZeroDivisionError,
         FileCollector(log_printer=LoudPrinter(),
                       rec_dirs=["bullshit"]).collect)
Пример #6
0
    def collect(self):
        """
        :return: list of classes (not instances) of all collected bears
        """

        files = FileCollector.collect(
            self)  # needs to be upfront since it calls _unfold_params()
        bears = []

        for file in files:
            module_name = os.path.splitext(os.path.basename(file))[0]
            module_dir = os.path.dirname(file)
            if module_dir not in sys.path:
                sys.path.insert(0, module_dir)

            module = importlib.import_module(module_name)
            for name, p_object in inspect.getmembers(module):
                if name == "__additional_bears__" and isinstance(
                        p_object, list):
                    bears.extend(self._valid_bears_from_list(p_object))

                elif hasattr(p_object, "kind"):
                    if inspect.getfile(p_object) == file:
                        bear_kind = None
                        try:
                            bear_kind = p_object.kind()
                        except:
                            pass
                        if bear_kind in self._bear_kinds:
                            bears.append(p_object)

        bears = list(set(bears))
        self._items = bears
        return bears
Пример #7
0
 def test_regex(self):
     self.assertEqual(
         FileCollector(log_printer=QuietPrinter(),
                       flat_dirs=[self.tmp_dir],
                       regex="testfile.*\.c",
                       allowed_types=[]).collect(), [self.testfile2_path])
     self.assertEqual(
         FileCollector(log_printer=QuietPrinter(),
                       flat_dirs=[self.tmp_dir],
                       regex="tfile.*\.c",
                       allowed_types=[]).collect(), [])
     # Bad regex
     self.assertEqual(
         FileCollector(log_printer=QuietPrinter(),
                       flat_dirs=[self.tmp_dir],
                       regex="*testfile.*\.c",
                       allowed_types=[]).collect(), [])
Пример #8
0
    def test_unreadable_directory(self):
        if sys.version_info < (3, 4):
            import imp as importlib
        else:
            import importlib
        from unittest.mock import MagicMock

        os.listdir = MagicMock(side_effect=OSError)
        self.assertEqual(
            FileCollector(log_printer=QuietPrinter(),
                          flat_dirs=[os.getcwd()]).collect(), [])
        self.assertEqual(
            FileCollector(log_printer=QuietPrinter(),
                          rec_dirs=[os.getcwd()]).collect(), [])
        self.assertRaises(
            ZeroDivisionError,
            FileCollector(log_printer=LoudPrinter(),
                          flat_dirs=["bullshit"]).collect)
        self.assertRaises(
            ZeroDivisionError,
            FileCollector(log_printer=LoudPrinter(),
                          rec_dirs=["bullshit"]).collect)
        importlib.reload(os)
Пример #9
0
 def test_ignored_members(self):
     uut = FileCollector([], "", ["flat"], ["rec"], [], [], ["flat", "rec"])
     uut._unfold_params()
     self.assertEqual(uut._flat_dirs, [])
     self.assertEqual(uut._rec_dirs, [])
Пример #10
0
 def test_ignored_files(self):
     uut = FileCollector(rec_dirs=[self.tmp_dir],
                         ignored_files=[self.testfile2_path],
                         log_printer=self.lp)
     self.assertEqual(set(uut.collect()),
                      {self.testfile1_path, self.testfile3_path})
Пример #11
0
 def test_allowed(self):
     uut = FileCollector(rec_dirs=[self.tmp_dir],
                         allowed_types=[".py"],
                         log_printer=self.lp)
     self.assertEqual(set(uut.collect()),
                      {self.testfile1_path, self.testfile3_path})
Пример #12
0
 def test_flat(self):
     uut = FileCollector(flat_dirs=[self.tmp_dir], log_printer=self.lp)
     self.assertEqual(set(uut.collect()),
                      {self.testfile1_path, self.testfile2_path})
Пример #13
0
 def test_files(self):
     uut = FileCollector(files=["not_a_file", self.testfile1_path])
     self.assertEqual(set(uut.collect()), {self.testfile1_path})
     # Consecutive invocations shall be idempotent
     self.assertEqual(set(uut.collect()), {self.testfile1_path})
Пример #14
0
    def run(self):
        filename_list = FileCollector.from_section(self.section).collect()
        file_dict = self._get_file_dict(filename_list)

        manager = multiprocessing.Manager()
        global_bear_queue = multiprocessing.Queue()
        filename_queue = multiprocessing.Queue()
        local_result_dict = manager.dict()
        global_result_dict = manager.dict()
        message_queue = multiprocessing.Queue()
        control_queue = multiprocessing.Queue()

        for i in range(len(self.local_bear_list)):
            self.local_bear_list[i] = self.local_bear_list[i](self.section,
                                                              message_queue,
                                                              TIMEOUT=0.1)
        for i in range(len(self.global_bear_list)):
            self.global_bear_list[i] = self.global_bear_list[i](file_dict,
                                                                self.section,
                                                                message_queue,
                                                                TIMEOUT=0.1)

        running_processes = get_cpu_count()
        barrier = Barrier(parties=running_processes)

        bear_runner_args = {
            "file_name_queue": filename_queue,
            "local_bear_list": self.local_bear_list,
            "global_bear_list": self.global_bear_list,
            "global_bear_queue": global_bear_queue,
            "file_dict": file_dict,
            "local_result_dict": local_result_dict,
            "global_result_dict": global_result_dict,
            "message_queue": message_queue,
            "control_queue": control_queue,
            "barrier": barrier,
            "TIMEOUT": 0.1
        }
        processes = [
            BearRunner(**bear_runner_args) for i in range(running_processes)
        ]
        logger_thread = self.LogPrinterThread(message_queue,
                                              self.section.log_printer)
        processes.append(
            logger_thread
        )  # Start and join the logger thread along with the BearRunner's

        self._fill_queue(filename_queue, filename_list)
        self._fill_queue(global_bear_queue, range(len(self.global_bear_list)))

        for runner in processes:
            runner.start()

        # One process is the logger thread
        while running_processes > 1:
            try:
                control_elem, index = control_queue.get(timeout=0.1)
                if control_elem == CONTROL_ELEMENT.LOCAL:
                    self.section.interactor.print_results(
                        local_result_dict[index], file_dict)
                elif control_elem == CONTROL_ELEMENT.GLOBAL:
                    self.section.interactor.print_results(
                        global_result_dict[index], file_dict)
                elif control_elem == CONTROL_ELEMENT.FINISHED:
                    running_processes = sum((1 if process.is_alive() else 0)
                                            for process in processes)
            except queue.Empty:
                running_processes = sum(
                    (1 if process.is_alive() else 0) for process in processes)

        logger_thread.running = False
        self.section.interactor.finalize(file_dict)

        for runner in processes:
            runner.join()