Пример #1
0
def run_anchore_analyzers(staging_dirs, imageDigest, imageId, localconfig):
    outputdir = staging_dirs["outputdir"]
    unpackdir = staging_dirs["unpackdir"]
    copydir = staging_dirs["copydir"]
    configdir = localconfig["service_dir"]

    myconfig = localconfig.get("services", {}).get("analyzer", {})
    if not myconfig.get("enable_hints", False):
        # install an empty hints file to ensure that any discovered hints overrides is ignored during analysis
        with open(os.path.join(unpackdir, "anchore_hints.json"), "w") as OFH:
            OFH.write(json.dumps({}))

    # run analyzers
    anchore_module_root = resource_filename("anchore_engine", "analyzers")
    analyzer_root = os.path.join(anchore_module_root, "modules")
    for f in list_analyzers():
        cmdstr = " ".join(
            [f, configdir, imageId, unpackdir, outputdir, unpackdir])
        if True:
            logger.info("Executing analyzer %s", f)
            timer = time.time()
            try:
                rc, sout, serr = utils.run_command(cmdstr)
                sout = utils.ensure_str(sout)
                serr = utils.ensure_str(serr)
                if rc != 0:
                    raise Exception("command failed: cmd=" + str(cmdstr) +
                                    " exitcode=" + str(rc) + " stdout=" +
                                    str(sout).strip() + " stderr=" +
                                    str(serr).strip())
                else:
                    logger.debug("command succeeded: cmd=" + str(cmdstr) +
                                 " stdout=" + str(sout).strip() + " stderr=" +
                                 str(serr).strip())
            except Exception as err:
                logger.error("command failed with exception - " + str(err))
            logger.debug("timing: specific analyzer time: {} - {}".format(
                f,
                time.time() - timer))

    analyzer_report = collections.defaultdict(dict)
    for analyzer_output in os.listdir(
            os.path.join(outputdir, "analyzer_output")):
        for analyzer_output_el in os.listdir(
                os.path.join(outputdir, "analyzer_output", analyzer_output)):
            data = anchore_engine.analyzers.utils.read_kvfile_todict(
                os.path.join(outputdir, "analyzer_output", analyzer_output,
                             analyzer_output_el))
            if data:
                analyzer_report[analyzer_output][analyzer_output_el] = {
                    "base": data
                }

    syft_results = anchore_engine.analyzers.syft.catalog_image(
        image=copydir, unpackdir=unpackdir)

    anchore_engine.analyzers.utils.merge_nested_dict(analyzer_report,
                                                     syft_results)

    return dict(analyzer_report)
Пример #2
0
def get_config():
    ret = {}
    logger.debug("fetching local anchore anchore_engine.configuration")
    if True:
        cmd = ["anchore", "--json", "system", "status", "--conf"]
        try:
            rc, sout, serr = anchore_engine.utils.run_command_list(cmd)
            sout = utils.ensure_str(sout)
            serr = utils.ensure_str(serr)
            ret = json.loads(sout)
        except Exception as err:
            logger.error(str(err))

    return ret
Пример #3
0
def get_tar_filenames(layertar):
    ret = []
    layertarfile = None
    try:
        logger.debug(
            "using tarfile library to get file names from tarfile={}".format(
                layertar))
        layertarfile = tarfile.open(layertar,
                                    mode="r",
                                    format=tarfile.PAX_FORMAT)
        ret = layertarfile.getnames()
    except:
        # python tarfile fils to unpack some docker image layers due to PAX header issue, try another method
        logger.debug(
            "using tar command to get file names from tarfile={}".format(
                layertar))
        tarcmd = "tar tf {}".format(layertar)
        try:
            ret = []
            rc, sout, serr = utils.run_command(tarcmd)
            sout = utils.ensure_str(sout)
            serr = utils.ensure_str(serr)
            if rc == 0 and sout:
                for line in sout.splitlines():
                    re.sub("/+$", "", line)
                    ret.append(line)
            else:
                raise Exception("rc={} sout={} serr={}".format(rc, sout, serr))
        except Exception as err:
            logger.error("command failed with exception - " + str(err))
            raise err

    finally:
        if layertarfile:
            layertarfile.close()

    return ret
Пример #4
0
def handle_tar_error(
    tarcmd,
    rc,
    sout,
    serr,
    unpackdir=None,
    rootfsdir=None,
    cachedir=None,
    layer=None,
    layertar=None,
    layers=[],
):
    handled = False

    handled_post_metadata = {
        "temporary_file_adds": [],
        "temporary_dir_adds": [],
    }

    slinkre = "tar: (.*): Cannot open: File exists"
    hlinkre = "tar: (.*): Cannot hard link to .(.*).: No such file or directory"
    missingfiles = []
    missingdirs = []
    for errline in serr.splitlines():
        patt = re.match(slinkre, errline)
        patt1 = re.match(hlinkre, errline)
        if patt:
            matchfile = patt.group(1)
            logger.debug("found 'file exists' error on name: " +
                         str(matchfile))
            if matchfile:
                badfile = os.path.join(rootfsdir, patt.group(1))
                if os.path.exists(badfile):
                    logger.debug("removing hierarchy: " + str(badfile))
                    shutil.rmtree(badfile)
                    handled = True
        elif patt1:
            missingfile = patt1.group(2)
            basedir = os.path.dirname(missingfile)
            logger.debug(
                "found 'hard link' error on name: {}".format(missingfile))
            if not os.path.exists(os.path.join(rootfsdir, missingfile)):
                missingfiles.append(missingfile)

            missingdir = None
            if not os.path.exists(os.path.join(rootfsdir, basedir)):
                missingdir = basedir
                missingdirs.append(missingdir)

    # only move on to further processing if the error is still not handled
    if not handled:
        if missingfiles:
            logger.info(
                "found {} missing hardlink destination files to extract from lower layers"
                .format(len(missingfiles)))

            for l in layers[layers.index("sha256:" + layer)::-1]:
                dighash, lname = l.split(":")
                ltar = get_layertarfile(unpackdir, cachedir, lname)

                tarcmd = "tar -C {} -x -f {}".format(rootfsdir, ltar)
                tarcmd_list = tarcmd.split() + missingfiles
                logger.debug(
                    "attempting to run command to extract missing hardlink targets from layer {}: {}....."
                    .format(l, tarcmd_list[:16]))

                rc, sout, serr = utils.run_command_list(tarcmd_list)
                sout = utils.ensure_str(sout)
                serr = utils.ensure_str(serr)
                # logger.debug("RESULT attempting to run command to extract missing hardlink target: {} : rc={} : serr={} : sout={}".format(tarcmd_list[:16], rc, serr, sout))

                newmissingfiles = []
                logger.debug(
                    "missing file count before extraction at layer {}: {}".
                    format(l, len(missingfiles)))
                for missingfile in missingfiles:
                    tmpmissingfile = os.path.join(rootfsdir, missingfile)
                    if os.path.exists(tmpmissingfile):
                        if (missingfile not in
                                handled_post_metadata["temporary_file_adds"]):
                            handled_post_metadata[
                                "temporary_file_adds"].append(missingfile)
                    else:
                        if missingfile not in newmissingfiles:
                            newmissingfiles.append(missingfile)
                missingfiles = newmissingfiles
                logger.debug(
                    "missing file count after extraction at layer {}: {}".
                    format(l, len(missingfiles)))

                newmissingdirs = []
                for missingdir in missingdirs:
                    tmpmissingdir = os.path.join(rootfsdir, missingdir)
                    if os.path.exists(tmpmissingdir):
                        if (missingdir not in
                                handled_post_metadata["temporary_dir_adds"]):
                            handled_post_metadata["temporary_dir_adds"].append(
                                missingdir)
                    else:
                        if missingdir not in newmissingdirs:
                            newmissingdirs.append(missingdir)
                missingdirs = newmissingdirs

                if not missingfiles:
                    logger.info(
                        "extraction of all missing files complete at layer {}".
                        format(l))
                    handled = True
                    break
                else:
                    logger.info(
                        "extraction of all missing files not complete at layer {}, moving on to next layer"
                        .format(l))

    logger.debug("tar error handled: {}".format(handled))
    return handled, handled_post_metadata