示例#1
0
def get_tar_filenames(layertar):
    ret = []
    layertarfile = None
    try:
        logger.debug("using tarfile library to get file names")
        layertarfile = tarfile.open(layertar,
                                    mode='r',
                                    format=tarfile.PAX_FORMAT)
        ret = layertarfile.getnames()
    except:
        # python tarfile fils to unpack some docker image layers due to PAX header issue, try another method
        logger.debug("using tar command to get file names")
        tarcmd = "tar tf {}".format(layertar)
        try:
            ret = []
            rc, sout, serr = utils.run_command(tarcmd)
            if rc == 0 and sout:
                for line in sout.splitlines():
                    re.sub("/+$", "", line)
                    ret.append(line)
            else:
                raise Exception("rc={} sout={} serr={}".format(rc, sout, serr))
        except Exception as err:
            logger.error("command failed with exception - " + str(err))
            raise err

    finally:
        if layertarfile:
            layertarfile.close()

    return (ret)
示例#2
0
def manifest_to_digest_shellout(rawmanifest):
    ret = None
    tmpmanifest = None
    try:
        fd, tmpmanifest = tempfile.mkstemp()
        os.write(fd, rawmanifest)
        os.close(fd)

        cmd = "skopeo manifest-digest {}".format(tmpmanifest)
        rc, sout, serr = run_command(cmd)
        if rc == 0 and re.match("^sha256:.*", sout):
            ret = sout.strip()
        else:
            logger.warn(
                "failed to calculate digest from schema v1 manifest: cmd={} rc={} sout={} serr={}"
                .format(cmd, rc, sout, serr))
            raise Exception(
                "failed to calculate digest from schema v1 manifest")
    except Exception as err:
        raise err
    finally:
        if tmpmanifest:
            os.remove(tmpmanifest)

    return (ret)
示例#3
0
def run_anchore_analyzers(staging_dirs, imageDigest, imageId):
    outputdir = staging_dirs['outputdir']
    unpackdir = staging_dirs['unpackdir']
    copydir = staging_dirs['copydir']

    # run analyzers
    anchore_module_root = resource_filename("anchore", "anchore-modules")
    analyzer_root = os.path.join(anchore_module_root, "analyzers")
    for f in os.listdir(analyzer_root):
        thecmd = os.path.join(analyzer_root, f)
        if re.match(".*\.py$", thecmd):
            cmdstr = " ".join(
                [thecmd, imageId, unpackdir, outputdir, unpackdir])
            if True:
                try:
                    rc, sout, serr = utils.run_command(cmdstr)
                    if rc != 0:
                        raise Exception("command failed: cmd=" + str(cmdstr) +
                                        " exitcode=" + str(rc) + " stdout=" +
                                        str(sout).strip() + " stderr=" +
                                        str(serr).strip())
                    else:
                        logger.debug("command succeeded: cmd=" + str(cmdstr) +
                                     " stdout=" + str(sout).strip() +
                                     " stderr=" + str(serr).strip())
                except Exception as err:
                    logger.error("command failed with exception - " + str(err))
                    #raise err

    analyzer_manifest = {}
    #TODO populate analyzer_manifest?
    analyzer_report = {}
    for analyzer_output in os.listdir(
            os.path.join(outputdir, "analyzer_output")):
        if analyzer_output not in analyzer_report:
            analyzer_report[analyzer_output] = {}

        for analyzer_output_el in os.listdir(
                os.path.join(outputdir, "analyzer_output", analyzer_output)):
            if analyzer_output_el not in analyzer_report[analyzer_output]:
                analyzer_report[analyzer_output][analyzer_output_el] = {
                    'base': {}
                }

            data = read_kvfile_todict(
                os.path.join(outputdir, "analyzer_output", analyzer_output,
                             analyzer_output_el))
            if data:
                analyzer_report[analyzer_output][analyzer_output_el][
                    'base'] = read_kvfile_todict(
                        os.path.join(outputdir, "analyzer_output",
                                     analyzer_output, analyzer_output_el))
            else:
                analyzer_report[analyzer_output].pop(analyzer_output_el, None)

        if not analyzer_report[analyzer_output]:
            analyzer_report.pop(analyzer_output, None)

    return (analyzer_report)
示例#4
0
def handle_tar_error(tarcmd, rc, sout, serr, unpackdir=None, rootfsdir=None, layer=None, layertar=None, layers=[]):
    handled = False
    handled_post_metadata = {}

    try:
        slinkre = "tar: (.*): Cannot open: File exists"
        hlinkre = "tar: (.*): Cannot hard link to `(.*)': No such file or directory"

        for errline in serr.splitlines():
            patt = re.match(slinkre, errline)
            patt1 = re.match(hlinkre, errline)
            if patt:
                matchfile = patt.group(1)
                logger.debug("found 'file exists' error on name: " + str(matchfile))
                if matchfile:
                    badfile = os.path.join(rootfsdir, patt.group(1))
                    if os.path.exists(badfile):
                        logger.debug("removing hierarchy: " + str(badfile))
                        shutil.rmtree(badfile)
                        handled = True
            elif patt1:
                missingfile = patt1.group(2)
                basedir = os.path.dirname(missingfile)
                logger.debug("found 'hard link' error on name: {}".format(missingfile))
                if not os.path.exists(os.path.join(rootfsdir, missingfile)):
                    for l in layers[layers.index("sha256:"+layer)::-1]:

                        missingdir = None
                        if not os.path.exists(os.path.join(rootfsdir, basedir)):
                            missingdir = basedir

                        tarcmd = "tar -C {} -x -f {} {}".format(rootfsdir, layertar, missingfile)
                        rc, sout, serr = utils.run_command(tarcmd)
                        sout = utils.ensure_str(sout)
                        serr = utils.ensure_str(serr)
                        if rc == 0:
                            if not handled_post_metadata.get('temporary_file_adds', False):
                                handled_post_metadata['temporary_file_adds'] = []
                            handled_post_metadata['temporary_file_adds'].append(missingfile)

                            if missingdir:
                                if not handled_post_metadata.get('temporary_dir_adds', False):
                                    handled_post_metadata['temporary_dir_adds'] = []
                                handled_post_metadata['temporary_dir_adds'].append(missingdir)

                            handled = True
                            break

    except Exception as err:
        raise err

    logger.debug("tar error handled: {}".format(handled))
    return(handled, handled_post_metadata)
def manifest_to_digest_shellout(rawmanifest):
    ret = None
    tmpmanifest = None
    try:
        fd, tmpmanifest = tempfile.mkstemp()
        os.write(fd, rawmanifest.encode("utf-8"))
        os.close(fd)

        localconfig = anchore_engine.configuration.localconfig.get_config()
        global_timeout = localconfig.get("skopeo_global_timeout", 0)
        try:
            global_timeout = int(global_timeout)
            if global_timeout < 0:
                global_timeout = 0
        except:
            global_timeout = 0

        if global_timeout:
            global_timeout_str = "--command-timeout {}s".format(global_timeout)
        else:
            global_timeout_str = ""

        cmd = "skopeo {} manifest-digest {}".format(global_timeout_str, tmpmanifest)
        rc, sout, serr = run_command(cmd)
        if rc == 0 and re.match("^sha256:.*", str(sout, "utf-8")):
            ret = sout.strip()
        else:
            logger.warn(
                "failed to calculate digest from schema v1 manifest: cmd={} rc={} sout={} serr={}".format(
                    cmd, rc, sout, serr
                )
            )
            raise SkopeoError(
                cmd=cmd,
                rc=rc,
                err=serr,
                out=sout,
                msg="Failed to calculate digest from schema v1 manifest",
            )
    except Exception as err:
        raise err
    finally:
        if tmpmanifest:
            os.remove(tmpmanifest)

    return ret
def run_anchore_analyzers(staging_dirs, imageDigest, imageId, localconfig):
    outputdir = staging_dirs['outputdir']
    unpackdir = staging_dirs['unpackdir']
    copydir = staging_dirs['copydir']
    configdir = localconfig['service_dir']

    # run analyzers
    anchore_module_root = resource_filename("anchore_engine", "analyzers")
    analyzer_root = os.path.join(anchore_module_root, "modules")
    for f in list_analyzers():
        cmdstr = " ".join([f, configdir, imageId, unpackdir, outputdir, unpackdir])
        if True:
            timer = time.time()
            try:
                rc, sout, serr = utils.run_command(cmdstr)
                sout = utils.ensure_str(sout)
                serr = utils.ensure_str(serr)
                if rc != 0:
                    raise Exception("command failed: cmd="+str(cmdstr)+" exitcode="+str(rc)+" stdout="+str(sout).strip()+" stderr="+str(serr).strip())
                else:
                    logger.debug("command succeeded: cmd="+str(cmdstr)+" stdout="+str(sout).strip()+" stderr="+str(serr).strip())
            except Exception as err:
                logger.error("command failed with exception - " + str(err))
            logger.debug("timing: specific analyzer time: {} - {}".format(f, time.time() - timer))

    analyzer_report = {}
    for analyzer_output in os.listdir(os.path.join(outputdir, "analyzer_output")):
        if analyzer_output not in analyzer_report:
            analyzer_report[analyzer_output] = {}

        for analyzer_output_el in os.listdir(os.path.join(outputdir, "analyzer_output", analyzer_output)):
            if analyzer_output_el not in analyzer_report[analyzer_output]:
                analyzer_report[analyzer_output][analyzer_output_el] = {'base': {}}

            data = read_kvfile_todict(os.path.join(outputdir, "analyzer_output", analyzer_output, analyzer_output_el))
            if data:
                analyzer_report[analyzer_output][analyzer_output_el]['base'] = read_kvfile_todict(os.path.join(outputdir, "analyzer_output", analyzer_output, analyzer_output_el))
            else:
                analyzer_report[analyzer_output].pop(analyzer_output_el, None)

        if not analyzer_report[analyzer_output]:
            analyzer_report.pop(analyzer_output, None)

    return analyzer_report
示例#7
0
def squash_orig(unpackdir, cachedir, layers):
    rootfsdir = unpackdir + "/rootfs"

    if os.path.exists(unpackdir + "/squashed.tar"):
        return (True)

    if not os.path.exists(rootfsdir):
        os.makedirs(rootfsdir)

    revlayer = list(layers)
    revlayer.reverse()

    l_excludes = {}
    l_opqexcludes = {
    }  # stores list of special files to exclude only for next layer (.wh..wh..opq handling)

    last_opqexcludes = {}  # opq exlcudes for the last layer

    for l in revlayer:
        htype, layer = l.split(":", 1)

        layertar = get_layertarfile(unpackdir, cachedir, layer)

        count = 0

        logger.debug("\tPass 1: " + str(layertar))
        layertarfile = tarfile.open(layertar,
                                    mode='r',
                                    format=tarfile.PAX_FORMAT)

        whpatt = re.compile(".*/\.wh\..*")
        whopqpatt = re.compile(".*/\.wh\.\.wh\.\.opq")

        l_opqexcludes[layer] = {}

        myexcludes = {}
        opqexcludes = {}

        for member in layertarfile.getmembers():
            # checks for whiteout conditions
            if whopqpatt.match(member.name):
                # found an opq entry, which means that this files in the next layer down (only) should not be included

                fsub = re.sub(r"\.wh\.\.wh\.\.opq", "", member.name, 1)

                # never include the whiteout file itself
                myexcludes[member.name] = True
                opqexcludes[fsub] = True

            elif whpatt.match(member.name):
                # found a normal whiteout, which means that this file in any lower layer should be excluded
                fsub = re.sub(r"\.wh\.", "", member.name, 1)

                # never include a whiteout file
                myexcludes[member.name] = True

                myexcludes[fsub] = True

            else:
                # if the last processed layer had an opq whiteout, check file to see if it lives in the opq directory
                if last_opqexcludes:
                    dtoks = member.name.split("/")
                    for i in range(0, len(dtoks)):
                        dtok = '/'.join(dtoks[0:i])
                        dtokwtrail = '/'.join(dtoks[0:i]) + "/"
                        if dtok in last_opqexcludes or dtokwtrail in last_opqexcludes:
                            l_opqexcludes[layer][member.name] = True
                            break

        # build up the list of excludes as we move down the layers
        for l in l_excludes.keys():
            myexcludes.update(l_excludes[l])

        l_excludes[layer] = myexcludes

        #last_opqexcludes = opqexcludes
        last_opqexcludes.update(opqexcludes)
        layertarfile.close()

    logger.debug("Pass 3: untarring layers with exclusions")

    imageSize = 0
    for l in layers:
        htype, layer = l.split(":", 1)

        layertar = get_layertarfile(unpackdir, cachedir, layer)

        imageSize = imageSize + os.path.getsize(layertar)

        # write out the exluded files, adding the per-layer excludes if present
        with open(unpackdir + "/efile", 'w') as OFH:
            for efile in l_excludes[layer]:
                OFH.write("%s\n" % efile)
            if layer in l_opqexcludes and l_opqexcludes[layer]:
                for efile in l_opqexcludes[layer]:
                    logger.debug("adding special for layer exclude: " +
                                 str(efile))
                    OFH.write("%s\n" % efile)

        retry = True
        success = False
        last_err = None
        max_retries = 10
        retries = 0
        while (not success) and (retry):
            tarcmd = "tar -C " + rootfsdir + " -x -X " + unpackdir + "/efile -f " + layertar
            logger.debug("untarring squashed tarball: " + str(tarcmd))
            try:
                rc, sout, serr = utils.run_command(tarcmd)
                if rc != 0:
                    logger.debug("tar error encountered, attempting to handle")
                    handled = handle_tar_error(tarcmd,
                                               rc,
                                               sout,
                                               serr,
                                               unpackdir=unpackdir,
                                               rootfsdir=rootfsdir,
                                               layer=layer,
                                               layertar=layertar)
                    if not handled:
                        raise Exception("command failed: cmd=" + str(tarcmd) +
                                        " exitcode=" + str(rc) + " stdout=" +
                                        str(sout).strip() + " stderr=" +
                                        str(serr).strip())
                    else:
                        logger.debug(
                            "tar error successfully handled, retrying")
                else:
                    logger.debug("command succeeded: stdout=" +
                                 str(sout).strip() + " stderr=" +
                                 str(serr).strip())
                    success = True
            except Exception as err:
                logger.error("command failed with exception - " + str(err))
                last_err = err
                success = False
                retry = False

            # safety net
            if retries > max_retries:
                retry = False
            retries = retries + 1

        if not success:
            if last_err:
                raise last_err
            else:
                raise Exception("unknown exception in untar")

    return ("done", imageSize)