def setUpClass(cls):
        cls.utils = TestUtilities(cls, None)
        cls.utils.setUpClass()
        cls.utils.pull_to_trunk()
        cls.repo = cls.utils.repo
        cls.repo_wrapper = cls.utils.repo_wrapper
        cls.saved_patches_dir = cls.utils.saved_patches_dir
        cls.base_branch = TRUNK

        # Invoke this to setup main output directory and avoid test failures while initing config
        ProjectUtils.get_output_child_dir(JIRA_UMBRELLA_DATA)
Example #2
0
    def init_logger(execution_mode: ExecutionMode,
                    console_debug=False,
                    postfix: str = None,
                    repos=None,
                    verbose=False):
        # get root logger
        logger = logging.getLogger()
        level = logging.DEBUG
        logger.setLevel(level)

        if execution_mode == ExecutionMode.PRODUCTION:
            log_file = ProjectUtils.get_default_log_file(PROJECT_NAME,
                                                         postfix=postfix)
        elif execution_mode == ExecutionMode.TEST:
            log_file = ProjectUtils.get_default_test_log_file(PROJECT_NAME,
                                                              postfix=postfix)
        else:
            raise ValueError(f"Unknown execution mode: {execution_mode}")

        # create file handler which logs even debug messages
        fh = TimedRotatingFileHandler(log_file, when="midnight")
        fh.suffix = "%Y_%m_%d.log"
        fh.setLevel(level)

        # create console handler with a higher log level
        ch = logging.StreamHandler(stream=sys.stdout)
        ch.setLevel(logging.INFO)
        if console_debug:
            ch.setLevel(level)

        formatter = logging.Formatter(
            "%(asctime)s - %(levelname)s - %(name)s - %(message)s")
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)
        # add the handlers to the logger
        logger.addHandler(fh)
        logger.addHandler(ch)

        # https://gitpython.readthedocs.io/en/stable/tutorial.html#git-command-debugging-and-customization
        # THIS WON'T WORK BECAUSE GITPYTHON MODULE IS LOADED BEFORE THIS CALL
        # os.environ["GIT_PYTHON_TRACE"] = "1"
        # https://github.com/gitpython-developers/GitPython/issues/222#issuecomment-68597780
        LOG.warning(
            "Cannot enable GIT_PYTHON_TRACE because repos list is empty!")
        if repos:
            for repo in repos:
                val = "full" if verbose else "1"
                type(repo.git).GIT_PYTHON_TRACE = val
        return log_file
    def init_logger(log_dir, console_debug=False):
        # get root logger
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)

        # create file handler which logs even debug messages
        log_file = ProjectUtils.get_default_log_file(PROJECT_NAME)
        fh = TimedRotatingFileHandler(os.path.join(log_dir, log_file),
                                      when='midnight')
        fh.suffix = '%Y_%m_%d.log'
        fh.setLevel(logging.DEBUG)

        # create console handler with a higher log level
        ch = logging.StreamHandler(stream=sys.stdout)
        ch.setLevel(logging.INFO)
        if console_debug:
            ch.setLevel(logging.DEBUG)

        formatter = logging.Formatter(
            '%(asctime)s - %(levelname)s - %(name)s - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # add the handlers to the logger
        logger.addHandler(fh)
        logger.addHandler(ch)
    def test_YARN_10496(self):
        project_out_root = ProjectUtils.get_test_output_basedir(PROJECT_NAME, allow_python_commons_as_project=True)
        result_basedir = FileUtils.join_path(project_out_root, "jira-data")
        FileUtils.ensure_dir_created(result_basedir)
        jira_id = "YARN-10496"
        jira_html_file = FileUtils.join_path(result_basedir, "jira.html")
        jira_list_file = FileUtils.join_path(result_basedir, "jira-list.txt")

        jira_html = JiraUtils.download_jira_html(
            "https://issues.apache.org/jira/browse/", jira_id, jira_html_file
        )
        jira_ids_and_titles = JiraUtils.parse_subjiras_and_jira_titles_from_umbrella_html(
            jira_html, jira_list_file, filter_ids=[jira_id]
        )

        expected_jira_ids = ['YARN-10169', 'YARN-10504', 'YARN-10505', 'YARN-10506', 'YARN-10512', 'YARN-10513',
                             'YARN-10521', 'YARN-10522', 'YARN-10524', 'YARN-10525', 'YARN-10531', 'YARN-10532',
                             'YARN-10535', 'YARN-10564', 'YARN-10565', 'YARN-10571', 'YARN-10573', 'YARN-10574',
                             'YARN-10576', 'YARN-10577', 'YARN-10578', 'YARN-10579', 'YARN-10581', 'YARN-10582',
                             'YARN-10583', 'YARN-10584', 'YARN-10587', 'YARN-10590', 'YARN-10592', 'YARN-10596',
                             'YARN-10598', 'YARN-10599', 'YARN-10600', 'YARN-10604', 'YARN-10605', 'YARN-10609',
                             'YARN-10614', 'YARN-10615', 'YARN-10620', 'YARN-10622', 'YARN-10624']
        all_list_items_found = all(id1 in jira_ids_and_titles.keys() for id1 in expected_jira_ids)
        self.assertTrue(all_list_items_found)

        expected_mappings = {'YARN-10624': 'Support max queues limit configuration in new auto created queue, consistent with old auto created.'}
        self.assertEqual(expected_mappings['YARN-10624'], jira_ids_and_titles['YARN-10624'])
        self.assertTrue(isinstance(jira_ids_and_titles['YARN-10624'], str))
 def setUpClass(self, repo_postfix=None, init_logging=True):
     if repo_postfix:
         self.repo_postfix = repo_postfix
     ProjectUtils.get_test_output_basedir(PROJECT_NAME)
     try:
         self.setup_repo()
         if init_logging:
             Setup.init_logger(execution_mode=ExecutionMode.TEST,
                               console_debug=False,
                               repos=[self.repo])
         self.reset_and_checkout_trunk()
     except InvalidGitRepositoryError:
         LOG.info(
             f"Cloning repo '{HADOOP_REPO_APACHE}' for the first time...")
         Repo.clone_from(HADOOP_REPO_APACHE,
                         self.sandbox_repo_path,
                         progress=ProgressPrinter("clone"))
         self.setup_repo(log=False)
         self.reset_and_checkout_trunk()
    def __init__(self, output_dir: str, args):
        self.request_limit = args.req_limit if hasattr(args, "req_limit") and args.req_limit else 1
        self.full_email_conf: FullEmailConfig = FullEmailConfig(args)
        self.jenkins_url = args.jenkins_url
        self.job_name = args.job_name
        self.num_prev_days = args.num_prev_days
        tc_filters_raw = args.tc_filters if hasattr(args, "tc_filters") and args.tc_filters else []
        self.tc_filters: List[TestcaseFilter] = [TestcaseFilter(*tcf.split(":")) for tcf in tc_filters_raw]
        if not self.tc_filters:
            LOG.warning("TESTCASE FILTER IS NOT SET!")

        self.send_mail: bool = not args.skip_mail
        self.enable_file_cache: bool = not args.disable_file_cache
        self.output_dir = ProjectUtils.get_session_dir_under_child_dir(FileUtils.basename(output_dir))
        self.full_cmd: str = OsUtils.determine_full_command_filtered(filter_password=True)
 def __init__(self, output_dir: str, args, branch_names: Dict[BranchType,
                                                              str]):
     self.output_dir = ProjectUtils.get_session_dir_under_child_dir(
         FileUtils.basename(output_dir))
     self.commit_author_exceptions = args.commit_author_exceptions
     self.console_mode = True if "console_mode" in args and args.console_mode else False
     self.save_to_file = not self.console_mode
     self.fail_on_missing_jira_id = False
     self.run_legacy_script = args.run_legacy_script
     self.legacy_compare_script_path = BranchComparatorConfig.find_git_compare_script(
     )
     self.matching_algorithm: CommitMatchingAlgorithm = args.algorithm
     self.branch_names = branch_names
     self.repo_type: RepoType = (RepoType[args.repo_type.upper()]
                                 if hasattr(args, "repo_type") else
                                 RepoType.DOWNSTREAM)
     self.full_cmd: str or None = None
Example #8
0
    def __init__(self, args):
        output_basedir = ProjectUtils.get_output_basedir(PROJECT_NAME)
        self.operation_mode = args.operation_mode
        self.validate_operation_mode()

        if self.operation_mode == OperationMode.GSHEET:
            self.gsheet_wrapper_normal = GSheetWrapper(args.gsheet_options)
            gsheet_options = copy.copy(args.gsheet_options)
            gsheet_options.worksheet = gsheet_options.worksheet + "_aggregated"
            self.gsheet_wrapper_aggregated = GSheetWrapper(gsheet_options)

        # Reuse UNIT_TEST_RESULT_AGGREGATOR's project name as we need the same credentials
        self.authorizer = GoogleApiAuthorizer(
            ServiceType.GMAIL,
            project_name=f"{UNIT_TEST_RESULT_AGGREGATOR}",
            secret_basedir=SECRET_PROJECTS_DIR,
            account_email="*****@*****.**")
        self.gmail_wrapper = GmailWrapper(self.authorizer,
                                          output_basedir=output_basedir)
Example #9
0
    def __init__(self, parser, args, output_dir: str):
        self._validate_args(parser, args)
        self.console_mode = getattr(args, "console mode", False)
        self.gmail_query = args.gmail_query
        self.smart_subject_query = args.smart_subject_query
        self.request_limit = getattr(args, "request_limit", 1000000)
        self.account_email: str = args.account_email
        self.testcase_filters = TestCaseFilters(
            TestCaseFilters.convert_raw_match_expressions_to_objs(
                getattr(args, "match_expression", None)),
            getattr(args, "aggregate_filters", []),
        )
        self.skip_lines_starting_with: List[str] = getattr(
            args, "skip_lines_starting_with", [])
        self.email_content_line_sep = getattr(args,
                                              "email_content_line_separator",
                                              DEFAULT_LINE_SEP)
        self.truncate_subject_with: str = getattr(args, "truncate_subject",
                                                  None)
        self.abbrev_tc_package: str = getattr(args, "abbrev_testcase_package",
                                              None)
        self.summary_mode = args.summary_mode
        self.output_dir = output_dir
        self.email_cache_dir = FileUtils.join_path(output_dir, "email_cache")
        self.session_dir = ProjectUtils.get_session_dir_under_child_dir(
            FileUtils.basename(output_dir))
        self.full_cmd: str = OsUtils.determine_full_command_filtered(
            filter_password=True)

        if self.operation_mode == OperationMode.GSHEET:
            worksheet_names: List[str] = [
                self.get_worksheet_name(tcf)
                for tcf in self.testcase_filters.ALL_VALID_FILTERS
            ]
            LOG.info(
                f"Adding worksheets to {self.gsheet_options.__class__.__name__}. "
                f"Generated worksheet names: {worksheet_names}")
            for worksheet_name in worksheet_names:
                self.gsheet_options.add_worksheet(worksheet_name)
 def __init__(self, output_dir: str, args, upstream_base_branch: str,
              upstream_repo: GitWrapper, downstream_repo: GitWrapper):
     self.output_dir = ProjectUtils.get_session_dir_under_child_dir(
         FileUtils.basename(output_dir))
     self.execution_mode = (ExecutionMode.MANUAL_BRANCH_MODE
                            if hasattr(args, "branches") and args.branches
                            else ExecutionMode.AUTO_BRANCH_MODE)
     self.downstream_branches = args.branches if hasattr(
         args, "branches") else []
     self.upstream_repo_path = upstream_repo.repo_path
     self.downstream_repo_path = downstream_repo.repo_path
     self.jira_id = args.jira_id
     self.output_dir = output_dir
     self.upstream_base_branch = upstream_base_branch
     self.force_mode = args.force_mode if hasattr(args,
                                                  "force_mode") else False
     self.ignore_changes = args.ignore_changes if hasattr(
         args, "ignore_changes") else False
     self.full_cmd: str or None = None
     self._validate(downstream_repo)
     self.umbrella_result_basedir = FileUtils.join_path(
         self.output_dir, self.jira_id)
 def output_dir(self):
     return ProjectUtils.get_test_output_child_dir(JENKINS_TEST_REPORTER)
Example #12
0
 def setup_dirs(self):
     self.project_out_root = ProjectUtils.get_output_basedir(PROJECT_NAME)
     self.yarn_patch_dir = ProjectUtils.get_output_child_dir(YARN_TASKS)
Example #13
0
 def setup_dirs():
     ProjectUtils.get_output_basedir(PROJECT_NAME)
     ProjectUtils.get_logs_dir()
Example #14
0
 def jira_patch_differ_dir(self):
     return ProjectUtils.get_output_child_dir(JIRA_PATCH_DIFFER)
 def jira_umbrella_data_dir(self):
     return ProjectUtils.get_test_output_child_dir(JIRA_UMBRELLA_DATA)
 def dummy_patches_dir(self):
     return ProjectUtils.get_test_output_child_dir(DUMMY_PATCHES)
Example #17
0
 def jenkins_test_reporter_output_dir(self):
     return ProjectUtils.get_output_child_dir(JENKINS_TEST_REPORTER)
 def sandbox_repo_path(self):
     return ProjectUtils.get_test_output_child_dir(SANDBOX_REPO +
                                                   self.repo_postfix)
Example #19
0
 def branch_comparator_output_dir(self):
     return ProjectUtils.get_output_child_dir(BRANCH_COMPARATOR)
 def db_copies_dir(self):
     return ProjectUtils.get_output_child_dir('db_copies')
 def log_dir(self):
     return ProjectUtils.get_output_child_dir('logs')
 def setup_dirs(self):
     self.project_out_root = ProjectUtils.get_output_basedir(PROJECT_NAME)
     self.search_basedir = self.options.search_basedir
     FileUtils.ensure_dir_created(self.search_basedir)
 def setUpClass(cls):
     # Invoke this to setup main output directory and avoid test failures while initing config
     cls.project_out_root = ProjectUtils.get_test_output_basedir(PROJECT_NAME)
     ProjectUtils.get_test_output_child_dir(JENKINS_TEST_REPORTER)
 def saved_patches_dir(self):
     return ProjectUtils.get_test_output_child_dir(SAVED_PATCHES)
 def exports_dir(self):
     return ProjectUtils.get_output_child_dir('exports')
Example #26
0
 def unit_test_result_aggregator_output_dir(self):
     return ProjectUtils.get_output_child_dir(UNIT_TEST_RESULT_AGGREGATOR)