Exemple #1
0
def parse(f, context, metadata_dict, suppress_handler, skip_handler, steps,
          processed_path_hashes):
    """
    Prints the results in the given file to the standard output in a human-
    readable format.

    Returns the report statistics collected by the result handler.
    """

    if not f.endswith(".plist"):
        LOG.debug("Skipping input file '" + f + "' as it is not a plist.")
        return {}, set()

    LOG.debug("Parsing input file '" + f + "'")

    rh = plist_parser.PlistToPlaintextFormatter(suppress_handler,
                                                skip_handler,
                                                context.severity_map,
                                                processed_path_hashes)

    rh.print_steps = steps

    # Set some variables of the result handler to use the saved file.

    analyzed_source_file = "UNKNOWN"
    if 'result_source_files' in metadata_dict and \
            f in metadata_dict['result_source_files']:
        analyzed_source_file = \
            metadata_dict['result_source_files'][f]

    files, reports = rh.parse(f)

    plist_mtime = util.get_last_mod_time(f)

    changed_files = set()
    for source_file in files:
        if plist_mtime is None:
            # Failed to get the modification time for
            # a file mark it as changed.
            changed_files.add(source_file)
            LOG.warning(source_file +
                        ' is missing since the last analysis.')
            continue

        file_mtime = util.get_last_mod_time(source_file)
        if file_mtime > plist_mtime:
            changed_files.add(source_file)
            LOG.warning(source_file +
                        ' did change since the last analysis.')

    if changed_files:
        return {}, changed_files
    else:
        report_stats = rh.write(files, reports, analyzed_source_file)
        return report_stats, set()
Exemple #2
0
def parse(plist_file, metadata_dict, rh, file_report_map):
    """
    Prints the results in the given file to the standard output in a human-
    readable format.

    Returns the report statistics collected by the result handler.
    """

    if not plist_file.endswith(".plist"):
        LOG.debug("Skipping input file '%s' as it is not a plist.", plist_file)
        return set()

    LOG.debug("Parsing input file '%s'", plist_file)

    if 'result_source_files' in metadata_dict and \
            plist_file in metadata_dict['result_source_files']:
        analyzed_source_file = \
            metadata_dict['result_source_files'][plist_file]

        if analyzed_source_file not in file_report_map:
            file_report_map[analyzed_source_file] = []

    files, reports = rh.parse(plist_file)

    plist_mtime = util.get_last_mod_time(plist_file)

    changed_files = set()
    for source_file in files:
        if plist_mtime is None:
            # Failed to get the modification time for
            # a file mark it as changed.
            changed_files.add(source_file)
            LOG.warning('%s is missing since the last analysis.', source_file)
            continue

        file_mtime = util.get_last_mod_time(source_file)
        if file_mtime > plist_mtime:
            changed_files.add(source_file)
            LOG.warning('%s did change since the last analysis.', source_file)

    if not changed_files:
        for report in reports:
            file_path = report.file_path
            if file_path not in file_report_map:
                file_report_map[file_path] = []

            file_report_map[file_path].append(report)

    return changed_files
Exemple #3
0
    def collect_file_hashes_from_plist(plist_file):
        """
        Collects file content hashes and last modification times of files which
        can be found in the given plist file.

        :returns List of file paths which are in the processed plist file but
        missing from the user's disk.
        """
        missing_files = []
        try:
            files, _ = plist_parser.parse_plist(plist_file)

            for f in files:
                if not os.path.isfile(f):
                    missing_files.append(f)
                    missing_source_files.add(f)
                    continue

                content_hash = util.get_file_content_hash(f)
                hash_to_file[content_hash] = f
                file_to_hash[f] = content_hash
                file_to_mtime[f] = util.get_last_mod_time(f)

            return missing_files
        except Exception as ex:
            LOG.error('Parsing the plist failed: ' + str(ex))
Exemple #4
0
    def collect_file_hashes_from_plist(plist_file):
        try:
            files, _ = plist_parser.parse_plist(plist_file)

            for f in files:
                if not os.path.isfile(f):
                    return False

                content_hash = util.get_file_content_hash(f)
                hash_to_file[content_hash] = f
                file_to_hash[f] = content_hash
                file_to_mtime[f] = util.get_last_mod_time(f)

            return True
        except Exception as ex:
            LOG.error('Parsing the plist failed: ' + str(ex))
Exemple #5
0
def assemble_zip(inputs, zip_file, client):
    hash_to_file = {}
    # There can be files with same hash,
    # but different path.
    file_to_hash = {}
    file_to_mtime = {}
    missing_source_files = set()

    def collect_file_hashes_from_plist(plist_file):
        """
        Collects file content hashes and last modification times of files which
        can be found in the given plist file.

        :returns List of file paths which are in the processed plist file but
        missing from the user's disk.
        """
        missing_files = []
        try:
            files, _ = plist_parser.parse_plist(plist_file)

            for f in files:
                if not os.path.isfile(f):
                    missing_files.append(f)
                    missing_source_files.add(f)
                    continue

                content_hash = util.get_file_content_hash(f)
                hash_to_file[content_hash] = f
                file_to_hash[f] = content_hash
                file_to_mtime[f] = util.get_last_mod_time(f)

            return missing_files
        except Exception as ex:
            LOG.error('Parsing the plist failed: ' + str(ex))

    plist_report_files = []

    changed_files = set()
    for input_path in inputs:
        input_path = os.path.abspath(input_path)

        if not os.path.exists(input_path):
            raise OSError(errno.ENOENT, "Input path does not exist",
                          input_path)

        if os.path.isfile(input_path):
            files = [input_path]
        else:
            _, _, files = next(os.walk(input_path), ([], [], []))

        for f in files:
            plist_file = os.path.join(input_path, f)
            if f.endswith(".plist"):
                missing_files = collect_file_hashes_from_plist(plist_file)
                if not missing_files:
                    LOG.debug(
                        "Copying file '{0}' to ZIP assembly dir...".format(
                            plist_file))
                    plist_report_files.append(os.path.join(input_path, f))
                else:
                    LOG.warning(
                        "Skipping '%s' because it refers "
                        "the following missing source files: %s", plist_file,
                        missing_files)
            elif f == 'metadata.json':
                plist_report_files.append(os.path.join(input_path, f))
            elif f == 'skip_file':
                plist_report_files.append(os.path.join(input_path, f))

            plist_mtime = util.get_last_mod_time(plist_file)

            for k, v in file_to_mtime.items():
                if v > plist_mtime:
                    changed_files.add(k)

    if changed_files:
        changed_files = '\n'.join([' - ' + f for f in changed_files])
        LOG.warning("The following source file contents changed since the "
                    "latest analysis:\n{0}\nPlease analyze your project "
                    "again to update the reports!".format(changed_files))
        sys.exit(1)

    with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zipf:
        for pl in plist_report_files:
            _, plist_filename = os.path.split(pl)
            zip_target = os.path.join('reports', plist_filename)
            zipf.write(pl, zip_target)

        if len(hash_to_file) == 0:
            LOG.warning("There is no report to store. After uploading these "
                        "results the previous reports become resolved.")

        necessary_hashes = client.getMissingContentHashes(hash_to_file.keys())
        for f, h in file_to_hash.items():
            if h in necessary_hashes:
                LOG.debug(
                    "File contents for '{0}' needed by the server".format(f))

                zipf.write(f, os.path.join('root', f.lstrip('/')))

        zipf.writestr('content_hashes.json', json.dumps(file_to_hash))

    # Compressing .zip file
    with open(zip_file, 'rb') as source:
        compressed = zlib.compress(source.read(), zlib.Z_BEST_COMPRESSION)

    with open(zip_file, 'wb') as target:
        target.write(compressed)

    LOG.debug("[ZIP] Mass store zip written at '{0}'".format(zip_file))

    if missing_source_files:
        LOG.warning(
            "Missing source files: \n%s",
            '\n'.join(map(lambda f_: " - " + f_, missing_source_files)))