Esempio n. 1
0
    def test_old_intercept_build(self):
        """
        Test log file parsing escape behaviour with clang-5.0 intercept-build.
        """
        logfile = os.path.join(self.__test_files, "intercept-old.json")

        # Scan-build-py shipping with clang-5.0 makes a logfile that contains:
        # -DVARIABLE=\"some value\" and --target=x86_64-linux-gnu
        #
        # The define is passed to the analyzer properly.

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0], r'-DVARIABLE="some')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "intercept-old-space.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, '/tmp/a b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Esempio n. 2
0
    def test_new_ldlogger(self):
        """
        Test log file parsing escape behaviour with after-#631 LD-LOGGER.
        """
        logfile = os.path.join(self.__test_files, "ldlogger-new.json")

        # LD-LOGGERS after http://github.com/Ericsson/codechecker/pull/631
        # now properly log the multiword arguments. When these are parsed by
        # the log_parser, the define's value will be passed to the analyzer.
        #
        # Logfile contains -DVARIABLE="some value"
        # and --target=x86_64-linux-gnu.

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0],
                         r'-DVARIABLE=some value')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "ldlogger-new-space.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Esempio n. 3
0
    def test_new_intercept_build(self):
        """
        Test log file parsing escapes with upstream (GitHub) intercept-build.
        """
        logfile = os.path.join(self.__test_files, "intercept-new.json")

        # Upstream scan-build-py creates an argument vector, as opposed to a
        # command string. This argument vector contains the define as it's
        # element in the following format:
        # -DVARIABLE=\"some value\"
        # and the target triplet, e.g.:
        # --target=x86_64-linux-gnu
        #
        # The define is passed to the analyzer properly.

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        self.assertEqual(build_action.analyzer_options[0],
                         r'-DVARIABLE="some value"')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "intercept-new-space.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, '/tmp/a b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Esempio n. 4
0
    def test_old_intercept_build(self):
        """
        Test log file parsing escape behaviour with clang-5.0 intercept-build.
        """
        # FIXME: Yes, the json is actually bad! The space should have been
        #        escaped by intercept-build along with the backslash.
        # Scan-build-py shipping with clang-5.0 makes a logfile that contains:
        # -DVARIABLE=\"some value\" and --target=x86_64-linux-gnu
        logfile = os.path.join(self.__test_files, "intercept-old.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertTrue(len(build_action.target) > 0)
        # FIXME: We should expect r'-DVARIABLE="some value"' with a fixed
        #        intercept-build.
        self.assertEqual(build_action.analyzer_options[0], r'-DVARIABLE="some')

        # Test source file with spaces.
        logfile = os.path.join(self.__test_files, "intercept-old-space.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, '/tmp/a b.cpp')
        self.assertEqual(build_action.lang, 'c++')
Esempio n. 5
0
def __get_run_name(input_list):
    """Create a runname for the stored analysis from the input list."""

    # Try to create a name from the metada JSON(s).
    names = set()
    for input_path in input_list:
        metafile = os.path.join(input_path, "metadata.json")
        if os.path.isdir(input_path) and os.path.exists(metafile):
            metajson = load_json_or_empty(metafile)

            if 'version' in metajson and metajson['version'] >= 2:
                for tool in metajson.get('tools', {}):
                    name = tool.get('run_name')
            else:
                name = metajson.get('name')

            if not name:
                name = "unnamed result folder"

            names.add(name)

    if len(names) == 1:
        name = names.pop()
        if name != "unnamed result folder":
            return name
    elif len(names) > 1:
        return "multiple projects: " + ', '.join(names)
    else:
        return False
Esempio n. 6
0
def process_config_file(args, subcommand_name):
    """ Handler to get config file options. """
    if 'config_file' not in args:
        return {}

    config_file = args.config_file
    if config_file and os.path.exists(config_file):
        if config_file.endswith(('.yaml', '.yml')):
            with open(config_file, encoding='utf-8', errors='ignore') as f:
                cfg = yaml.load(f, Loader=yaml.BaseLoader)
        else:
            cfg = load_json_or_empty(config_file, default={})

        # The subcommand name is analyze but the
        # configuration section name is analyzer.
        options = None
        if subcommand_name == 'analyze':
            options = get_analyze_options(cfg)
        elif subcommand_name == 'check':
            options = [*get_analyze_options(cfg), *cfg.get("parse", [])]
        else:
            options = cfg.get(subcommand_name, [])

        if options:
            LOG.info(
                "Extending command line options with %s options from "
                "'%s' file: %s", subcommand_name, args.config_file,
                ' '.join(options))

        return options
Esempio n. 7
0
def get_metadata(dir_path: str) -> Optional[Dict]:
    """ Get metadata from the given dir path or None if not exists. """
    metadata_file = os.path.join(dir_path, "metadata.json")
    if os.path.exists(metadata_file):
        return load_json_or_empty(metadata_file)

    return None
Esempio n. 8
0
    def __set_version(self):
        """
        Get the package version from the version config file.
        """
        vfile_data = load_json_or_empty(self.version_file)

        if not vfile_data:
            sys.exit(1)

        package_version = vfile_data['version']
        package_build_date = vfile_data['package_build_date']
        package_git_hash = vfile_data.get('git_hash')
        package_git_tag = vfile_data.get('git_describe', {}).get('tag')
        package_git_dirtytag = vfile_data.get('git_describe', {}).get('dirty')

        self.__package_version = package_version['major'] + '.' + \
            package_version['minor'] + '.' + \
            package_version['revision']

        self.__package_build_date = package_build_date
        self.__package_git_hash = package_git_hash

        self.__package_git_tag = package_git_tag
        if (LOG.getEffectiveLevel() == logger.DEBUG
                or LOG.getEffectiveLevel() == logger.DEBUG_ANALYZER):
            self.__package_git_tag = package_git_dirtytag
Esempio n. 9
0
    def test_source_file_contains_at_sign(self):
        """
        Test source file which path contains '@' sign in path.

        Source file path can contain '@' sign which doesn't mean it is a
        response file.
        """
        with tempfile.TemporaryDirectory(suffix='@') as tmp_dir:
            src_file_path = shutil.copy(self.src_file_path, tmp_dir)

            with open(self.compile_command_file_path,
                      "w",
                      encoding="utf-8",
                      errors="ignore") as f:
                f.write(
                    json.dumps([
                        dict(directory=tmp_dir,
                             command=f"g++ {src_file_path}",
                             file=src_file_path)
                    ]))

        build_actions, _ = log_parser.parse_unique_log(
            load_json_or_empty(self.compile_command_file_path),
            self.__this_dir)

        self.assertEqual(len(build_actions), 1)

        build_action = build_actions[0]
        self.assertEqual(build_action.source, src_file_path)
Esempio n. 10
0
    def __init__(self):
        """ Initialize web context. """
        self._lib_dir_path = os.environ.get('CC_LIB_DIR', '')
        self._data_files_dir_path = os.environ.get('CC_DATA_FILES_DIR', '')

        lcfg_dict = self.__get_package_layout()
        self.pckg_layout = lcfg_dict['runtime']

        # Use this environment variable for testing purposes only. This
        # variable helps to configure which labels to use in this context.
        labels_dir = os.path.join(self._data_files_dir_path, 'config',
                                  'labels')
        if 'CC_TEST_LABELS_DIR' in os.environ:
            labels_dir = os.environ['CC_TEST_LABELS_DIR']

        self._checker_labels = CheckerLabels(labels_dir)
        self.__system_comment_map = \
            load_json_or_empty(self.system_comment_map_file, {})
        self.__git_commit_urls = self.__get_git_commit_urls()
        self.__package_version = None
        self.__package_build_date = None
        self.__package_git_hash = None

        # This should be initialized in command line scripts based on the
        # given CLI options.
        self.codechecker_workspace = None

        self.__set_version()
Esempio n. 11
0
    def test_response_file_contains_source_file(self):
        """
        Test response file where the source file comes from the response file.
        """
        with open(self.compile_command_file_path,
                  "w",
                  encoding="utf-8",
                  errors="ignore") as build_json:
            build_json.write(
                json.dumps([
                    dict(directory=self.tmp_dir,
                         command="g++ @{0}".format(self.rsp_file_path),
                         file="@{0}".format(self.rsp_file_path))
                ]))

        with open(self.rsp_file_path, "w", encoding="utf-8",
                  errors="ignore") as rsp_file:
            rsp_file.write("""-DVARIABLE="some value" {0}""".format(
                self.src_file_path))

        logfile = os.path.join(self.compile_command_file_path)

        build_actions, _ = log_parser. \
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(len(build_action.analyzer_options), 1)
        self.assertEqual(build_action.source, self.src_file_path)
        self.assertEqual(build_action.analyzer_options[0], '-DVARIABLE=some')
Esempio n. 12
0
    def test_old_ldlogger(self):
        """
        Test log file parsing escape behaviour with pre-2017 Q2 LD-LOGGER.
        """
        logfile = os.path.join(self.__test_files, "ldlogger-old.json")

        # LD-LOGGER before http://github.com/Ericsson/codechecker/pull/631
        # used an escape mechanism that, when parsed by the log parser via
        # shlex, made CodeChecker parse arguments with multiword string
        # literals in them be considered as "file" (instead of compile option),
        # eventually ignored by the command builder, thus lessening analysis
        # accuracy, as defines were lost.
        #
        # Logfile contains "-DVARIABLE="some value"".
        #
        # There is no good way to back-and-forth convert in log_parser or
        # option_parser, so here we aim for a non-failing stalemate of the
        # define being considered a file and ignored, for now.

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(build_action.source, r'/tmp/a.cpp')
        self.assertEqual(len(build_action.analyzer_options), 1)
Esempio n. 13
0
def process_config_file(args, subcommand_name):
    """ Handler to get config file options. """
    if 'config_file' not in args:
        return {}

    if args.config_file and os.path.exists(args.config_file):
        cfg = load_json_or_empty(args.config_file, default={})

        # The subcommand name is analyze but the
        # configuration section name is analyzer.
        options = None
        if subcommand_name == 'analyze':
            options = get_analyze_options(cfg)
        elif subcommand_name == 'check':
            options = [*get_analyze_options(cfg), *cfg.get("parse", [])]
        else:
            options = cfg.get(subcommand_name, [])

        if options:
            LOG.info(
                "Extending command line options with %s options from "
                "'%s' file: %s", subcommand_name, args.config_file,
                ' '.join(options))

        return options
Esempio n. 14
0
    def __union_label_files(
            self, label_files: Iterable[str]
    ) -> Dict[str, DefaultDict[str, List[str]]]:
        """
        This function creates an union object of the given label files. The
        resulting object maps analyzers to the collection of their checkers
        with their labels:

        {
            "analyzer1": {
                "checker1": [ ... ]
                "checker2": [ ... ]
            },
            "analyzer2": {
                ...
            }
        }
        """
        all_labels = {}

        for label_file in label_files:
            data = load_json_or_empty(label_file)
            analyzer_labels = defaultdict(list)

            for checker, labels in data['labels'].items():
                analyzer_labels[checker].extend(labels)

            all_labels[data['analyzer']] = analyzer_labels

        return all_labels
Esempio n. 15
0
    def __get_git_commit_urls(self):
        """ Get commit urls from the configuration file. """
        git_commit_urls = load_json_or_empty(self.git_commit_urls_file, [])

        for git_commit_url in git_commit_urls:
            git_commit_url["regex"] = re.compile(git_commit_url["regex"])

        return git_commit_urls
Esempio n. 16
0
def get_instances(folder=None):
    """Returns the list of running servers for the current user."""

    # This method does NOT write the descriptor file.

    descriptor = __get_instance_descriptor_path(folder)
    instances = load_json_or_empty(descriptor, {}, lock=True)

    return [i for i in instances if __check_instance(i['hostname'], i['pid'])]
Esempio n. 17
0
    def load_compiler_info(file_path: str):
        """Load compiler information from a file."""
        ICI = ImplicitCompilerInfo
        ICI.compiler_info = {}

        contents = load_json_or_empty(file_path, {})
        for k, v in contents.items():
            k = json.loads(k)
            ICI.compiler_info[ICI.ImplicitInfoSpecifierKey(
                k[0], k[1], tuple(k[2]))] = v
Esempio n. 18
0
    def __get_package_layout(self):
        """ Get package layout configuration. """
        layout_cfg_file = os.path.join(self._data_files_dir_path, "config",
                                       "package_layout.json")

        LOG.debug('Reading config: %s', layout_cfg_file)
        lcfg_dict = load_json_or_empty(layout_cfg_file)

        if not lcfg_dict:
            raise ValueError(f"No configuration file '{layout_cfg_file}' can "
                             f"be found or it is empty!")

        return lcfg_dict
Esempio n. 19
0
    def __get_package_config(self):
        """ Get package configuration. """
        pckg_config_file = os.path.join(self._data_files_dir_path, "config",
                                        "config.json")

        LOG.debug('Reading config: %s', pckg_config_file)
        cfg_dict = load_json_or_empty(pckg_config_file)

        if not cfg_dict:
            raise ValueError(f"No configuration file '{pckg_config_file}' can "
                             f"be found or it is empty!")

        LOG.debug(cfg_dict)
        return cfg_dict
Esempio n. 20
0
    def test_response_file_contains_multiple_source_files(self):
        """
        Test response file where multiple source files come from the response
        file.
        """
        with open(self.compile_command_file_path,
                  "w",
                  encoding="utf-8",
                  errors="ignore") as build_json:
            build_json.write(
                json.dumps([
                    dict(directory=self.tmp_dir,
                         command="g++ @{0}".format(self.rsp_file_path),
                         file="@{0}".format(self.rsp_file_path))
                ]))

        a_file_path = os.path.join(self.tmp_dir, "a.cpp")
        with open(a_file_path, "w", encoding="utf-8",
                  errors="ignore") as src_file:
            src_file.write("int main() { return 0; }")

        b_file_path = os.path.join(self.tmp_dir, "b.cpp")
        with open(b_file_path, "w", encoding="utf-8",
                  errors="ignore") as src_file:
            src_file.write("void foo() {}")

        with open(self.rsp_file_path, "w", encoding="utf-8",
                  errors="ignore") as rsp_file:
            rsp_file.write("""-DVARIABLE="some value" {0} {1}""".format(
                a_file_path, b_file_path))

        logfile = os.path.join(self.compile_command_file_path)

        build_actions, _ = log_parser. \
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)

        self.assertEqual(len(build_actions), 2)

        a_build_action = [b for b in build_actions
                          if b.source == a_file_path][0]
        self.assertEqual(len(a_build_action.analyzer_options), 1)
        self.assertEqual(a_build_action.analyzer_options[0],
                         '-DVARIABLE=some value')

        b_build_action = [b for b in build_actions
                          if b.source == b_file_path][0]
        self.assertEqual(len(b_build_action.analyzer_options), 1)
        self.assertEqual(b_build_action.analyzer_options[0],
                         '-DVARIABLE=some value')
Esempio n. 21
0
    def test_include_rel_to_abs(self):
        """
        Test working directory prepending to relative include paths.
        """
        logfile = os.path.join(self.__test_files, "include.json")

        build_actions, _ = log_parser.\
            parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
        build_action = build_actions[0]

        self.assertEqual(len(build_action.analyzer_options), 4)
        self.assertEqual(build_action.analyzer_options[0], '-I')
        self.assertEqual(build_action.analyzer_options[1], '/include')
        self.assertEqual(build_action.analyzer_options[2], '-I/include')
        self.assertEqual(build_action.analyzer_options[3], '-I/tmp')
Esempio n. 22
0
 def __get_config_dict(self):
     """
     Get server config information from the configuration file. Raise
     ValueError if the configuration file is invalid.
     """
     LOG.debug(self.__configuration_file)
     cfg_dict = load_json_or_empty(self.__configuration_file, {},
                                   'server configuration')
     if cfg_dict != {}:
         check_file_owner_rw(self.__configuration_file)
     else:
         # If the configuration dict is empty, it means a JSON couldn't
         # have been parsed from it.
         raise ValueError("Server configuration file was invalid, or "
                          "empty.")
     return cfg_dict
Esempio n. 23
0
    def __get_tool_info(self) -> Tuple[str, str]:
        """ Get tool info.

        If this was called through CodeChecker, this function will return
        CodeChecker information, otherwise this tool (report-converter)
        information.
        """
        data_files_dir_path = os.environ.get('CC_DATA_FILES_DIR')
        if data_files_dir_path:
            analyzer_version_file_path = os.path.join(
                data_files_dir_path, 'config', 'analyzer_version.json')
            if os.path.exists(analyzer_version_file_path):
                data = load_json_or_empty(analyzer_version_file_path, {})
                version = data.get('version')
                if version:
                    return 'CodeChecker', f"{version['major']}." \
                        f"{version['minor']}.{version['revision']}"

        return __title__, __version__
Esempio n. 24
0
    def __init__(self, metadata_file_path):
        """ Initalize a metadata info parser. """
        self.cc_version: CodeCheckerVersion = None
        self.check_commands: CheckCommands = []
        self.check_durations: CheckDurations = []
        self.analyzer_statistics: AnalyzerStatistics = {}

        self.checkers: MetadataCheckers = {}
        self.enabled_checkers: EnabledCheckers = set()
        self.disabled_checkers: DisabledCheckers = set()
        self.checker_to_analyzer: CheckerToAnalyzer = dict()

        self.__metadata_dict = {}
        if os.path.isfile(metadata_file_path):
            self.__metadata_dict = load_json_or_empty(metadata_file_path, {})

            if 'version' in self.__metadata_dict:
                self.__process_metadata_info_v2()
            else:
                self.__process_metadata_info_v1()
Esempio n. 25
0
    def __init__(self, checker_labels_dir: str):
        if not os.path.isdir(checker_labels_dir):
            raise NotADirectoryError(
                f'{checker_labels_dir} is not a directory.')

        label_json_files: Iterable[str] = os.listdir(
            os.path.join(checker_labels_dir, 'analyzers'))

        self.__descriptions = {}

        if 'descriptions.json' in os.listdir(checker_labels_dir):
            self.__descriptions = load_json_or_empty(
                os.path.join(checker_labels_dir, 'descriptions.json'))

        label_json_files = map(
            lambda f: os.path.join(checker_labels_dir, 'analyzers', f),
            label_json_files)

        self.__data = self.__union_label_files(label_json_files)
        self.__check_json_format(self.__data)
Esempio n. 26
0
    def load_compiler_info(filename, compiler):
        """Load compiler information from a file."""
        contents = load_json_or_empty(filename, {})
        compiler_info = contents.get(compiler)
        if compiler_info is None:
            LOG.error("Could not find compiler %s in file %s", compiler,
                      filename)
            return

        ICI = ImplicitCompilerInfo

        if not ICI.compiler_info.get(compiler):
            ICI.compiler_info[compiler] = defaultdict(dict)

        # Load for language C
        ICI.compiler_info[compiler][ICI.c()]['compiler_includes'] = []
        c_lang_data = compiler_info.get(ICI.c())
        if c_lang_data:
            for element in map(shlex.split,
                               c_lang_data.get("compiler_includes")):
                element = [x for x in element if x != '-isystem']
                ICI.compiler_info[compiler][ICI.c()]['compiler_includes'] \
                    .extend(element)
            ICI.compiler_info[compiler][ICI.c()]['compiler_standard'] = \
                c_lang_data.get('compiler_standard')
            ICI.compiler_info[compiler][ICI.c()]['target'] = \
                c_lang_data.get('target')

        # Load for language C++
        ICI.compiler_info[compiler][ICI.cpp()]['compiler_includes'] = []
        cpp_lang_data = compiler_info.get(ICI.cpp())
        if cpp_lang_data:
            for element in map(shlex.split,
                               cpp_lang_data.get('compiler_includes')):
                element = [x for x in element if x != '-isystem']
                ICI.compiler_info[compiler][ICI.cpp()]['compiler_includes'] \
                    .extend(element)
            ICI.compiler_info[compiler][ICI.cpp()]['compiler_standard'] = \
                cpp_lang_data.get('compiler_standard')
            ICI.compiler_info[compiler][ICI.cpp()]['target'] = \
                cpp_lang_data.get('target')
Esempio n. 27
0
def enable_storage_of_analysis_statistics(workspace):
    """
    Enables storing analysis statistics information for the server.
    """

    server_config_filename = "server_config.json"

    cc_package = codechecker_package()
    original_auth_cfg = os.path.join(cc_package, 'config',
                                     server_config_filename)

    shutil.copy(original_auth_cfg, workspace)

    server_cfg_file = os.path.join(workspace, server_config_filename)

    scfg_dict = load_json_or_empty(server_cfg_file, {})
    scfg_dict["store"]["analysis_statistics_dir"] = \
        os.path.join(workspace, 'analysis_statistics')

    with open(server_cfg_file, 'w', encoding="utf-8", errors="ignore") as scfg:
        json.dump(scfg_dict, scfg, indent=2, sort_keys=True)
Esempio n. 28
0
def merge_metadata_json(metadata_files, num_of_report_dir=1):
    """ Merge content of multiple metadata files and return it as json. """

    if not metadata_files:
        return {}

    ret = {
        'version': 2,
        'num_of_report_dir': num_of_report_dir,
        'tools': []}

    for metadata_file in metadata_files:
        try:
            metadata_dict = load_json_or_empty(metadata_file, {})
            metadata = metadata_v1_to_v2(metadata_dict)
            for tool in metadata['tools']:
                ret['tools'].append(tool)
        except Exception as ex:
            LOG.warning('Failed to parse %s file with the following error: %s',
                        metadata_file, str(ex))

    return ret
Esempio n. 29
0
def enable_auth(workspace):
    """
    Create a dummy authentication-enabled configuration and
    an auth-enabled server.

    Running the tests only work if the initial value (in package
    server_config.json) is FALSE for authentication.enabled.
    """

    server_config_filename = "server_config.json"

    cc_package = codechecker_package()
    original_auth_cfg = os.path.join(cc_package, 'config',
                                     server_config_filename)

    shutil.copy(original_auth_cfg, workspace)

    server_cfg_file = os.path.join(workspace, server_config_filename)

    scfg_dict = load_json_or_empty(server_cfg_file, {})
    scfg_dict["authentication"]["enabled"] = True
    scfg_dict["authentication"]["method_dictionary"]["enabled"] = True
    scfg_dict["authentication"]["method_dictionary"]["auths"] = \
        ["cc:test", "john:doe", "admin:admin123", "colon:my:password",
         "admin_group_user:admin123", "regex_admin:blah",
         "permission_view_user:pvu"]
    scfg_dict["authentication"]["method_dictionary"]["groups"] = \
        {"admin_group_user": ["admin_GROUP"]}
    scfg_dict["authentication"]["regex_groups"]["enabled"] = True

    with open(server_cfg_file, 'w', encoding="utf-8", errors="ignore") as scfg:
        json.dump(scfg_dict, scfg, indent=2, sort_keys=True)

    # Create a root user.
    root_file = os.path.join(workspace, 'root.user')
    with open(root_file, 'w', encoding='utf-8', errors='ignore') as rootf:
        rootf.write(f"root:{sha256(b'root:root').hexdigest()}")
    os.chmod(root_file, stat.S_IRUSR | stat.S_IWUSR)
Esempio n. 30
0
    def __init__(self):
        LOG.debug("Loading clientside session config.")

        # Check whether user's configuration exists.
        session_cfg_file = get_password_file()
        LOG.info("Checking local passwords or tokens in %s", session_cfg_file)

        scfg_dict = {}

        user_home = os.path.expanduser("~")
        mistyped_cfg_file = os.path.join(user_home,
                                         ".codechecker.password.json")

        if os.path.exists(session_cfg_file):
            check_file_owner_rw(session_cfg_file)
            scfg_dict = load_json_or_empty(session_cfg_file, {},
                                           "user authentication")
            scfg_dict['credentials'] = \
                simplify_credentials(scfg_dict['credentials'])
            if not scfg_dict['credentials']:
                LOG.info("No saved tokens.")
            else:
                LOG.debug("Tokens or passwords were found for these hosts:")
                for k, v in scfg_dict['credentials'].items():
                    user, _ = v.split(":")
                    LOG.debug("  user '%s' host '%s'", user, k)
        elif os.path.exists(mistyped_cfg_file):
            LOG.warning("Typo in file name! Rename '%s' to '%s'.",
                        mistyped_cfg_file, session_cfg_file)
        else:
            LOG.info("Password file not found.")

        if not scfg_dict.get('credentials'):
            scfg_dict['credentials'] = {}

        self.__save = scfg_dict
        self.__autologin = scfg_dict.get('client_autologin', True)
        # Check and load token storage for user.
        self.token_file = get_session_file()
        LOG.info("Checking for local valid sessions.")

        with open(self.token_file, 'a+',
                  encoding="utf-8", errors="ignore") as f:
            f.seek(0)

            try:
                portalocker.lock(f, portalocker.LOCK_EX)

                token_dict = json.loads(f.read())

                check_file_owner_rw(self.token_file)

                self.__tokens = token_dict.get('tokens', {})
                LOG.debug("Found session information for these hosts:")
                for k, _ in self.__tokens.items():
                    LOG.debug("  %s", k)
            except json.JSONDecodeError:
                json.dump({'tokens': {}}, f)
                os.chmod(self.token_file, stat.S_IRUSR | stat.S_IWUSR)

                self.__tokens = {}
            finally:
                portalocker.unlock(f)