예제 #1
0
def process_analyzer_job(system_user_auth, qobj, layer_cache_enable):
    global current_avg, current_avg_count

    timer = int(time.time())
    try:
        record = qobj['data']
        userId = record['userId']
        image_record = record['image_record']
        manifest = record['manifest']

        imageDigest = image_record['imageDigest']
        user_record = catalog.get_user(system_user_auth, userId)
        user_auth = (user_record['userId'], user_record['password'])

        # check to make sure image is still in DB
        try:
            image_records = catalog.get_image(user_auth,
                                              imageDigest=imageDigest)
            if image_records:
                image_record = image_records[0]
            else:
                raise Exception("empty image record from catalog")
        except Exception as err:
            logger.warn(
                "dequeued image cannot be fetched from catalog - skipping analysis ("
                + str(imageDigest) + ") - exception: " + str(err))
            return (True)

        logger.info("image dequeued for analysis: " + str(userId) + " : " +
                    str(imageDigest))

        try:
            logger.spew("TIMING MARK0: " + str(int(time.time()) - timer))

            last_analysis_status = image_record['analysis_status']
            image_record[
                'analysis_status'] = anchore_engine.subsys.taskstate.working_state(
                    'analyze')
            rc = catalog.update_image(user_auth, imageDigest, image_record)

            # disable the webhook call for image state transistion to 'analyzing'
            #try:
            #    for image_detail in image_record['image_detail']:
            #        fulltag = image_detail['registry'] + "/" + image_detail['repo'] + ":" + image_detail['tag']
            #        npayload = {
            #            'last_eval': {'imageDigest': imageDigest, 'analysis_status': last_analysis_status},
            #            'curr_eval': {'imageDigest': imageDigest, 'analysis_status': image_record['analysis_status']},
            #        }
            #        rc = anchore_engine.subsys.notifications.queue_notification(userId, fulltag, 'analysis_update', npayload)
            #except Exception as err:
            #    logger.warn("failed to enqueue notification on image analysis state update - exception: " + str(err))

            # actually do analysis
            registry_creds = catalog.get_registry(user_auth)
            image_data = perform_analyze(userId,
                                         manifest,
                                         image_record,
                                         registry_creds,
                                         layer_cache_enable=layer_cache_enable)

            imageId = None
            try:
                imageId = image_data[0]['image']['imageId']
            except Exception as err:
                logger.warn(
                    "could not get imageId after analysis or from image record - exception: "
                    + str(err))

            logger.debug("archiving analysis data")
            rc = catalog.put_document(user_auth, 'analysis_data', imageDigest,
                                      image_data)

            if rc:
                try:
                    logger.debug("extracting image content data")
                    image_content_data = {}
                    for content_type in anchore_engine.services.common.image_content_types:
                        try:
                            image_content_data[
                                content_type] = anchore_engine.services.common.extract_analyzer_content(
                                    image_data, content_type)
                        except:
                            image_content_data[content_type] = {}

                    if image_content_data:
                        logger.debug("adding image content data to archive")
                        rc = catalog.put_document(user_auth,
                                                  'image_content_data',
                                                  imageDigest,
                                                  image_content_data)

                    try:
                        logger.debug(
                            "adding image analysis data to image_record")
                        anchore_engine.services.common.update_image_record_with_analysis_data(
                            image_record, image_data)

                    except Exception as err:
                        raise err

                except Exception as err:
                    logger.warn(
                        "could not store image content metadata to archive - exception: "
                        + str(err))

                logger.debug("adding image record to policy-engine service (" +
                             str(userId) + " : " + str(imageId) + ")")
                try:
                    if not imageId:
                        raise Exception(
                            "cannot add image to policy engine without an imageId"
                        )

                    localconfig = anchore_engine.configuration.localconfig.get_config(
                    )
                    verify = localconfig['internal_ssl_verify']

                    client = anchore_engine.clients.policy_engine.get_client(
                        user=system_user_auth[0],
                        password=system_user_auth[1],
                        verify_ssl=verify)

                    try:
                        logger.debug(
                            "clearing any existing record in policy engine for image: "
                            + str(imageId))
                        rc = client.delete_image(user_id=userId,
                                                 image_id=imageId)
                    except Exception as err:
                        logger.warn("exception on pre-delete - exception: " +
                                    str(err))

                    request = ImageIngressRequest()
                    request.user_id = userId
                    request.image_id = imageId
                    request.fetch_url = 'catalog://' + str(
                        userId) + '/analysis_data/' + str(imageDigest)
                    logger.debug("policy engine request: " + str(request))
                    resp = client.ingress_image(request)
                    logger.debug("policy engine image add response: " +
                                 str(resp))

                    try:
                        # force a fresh CVE scan
                        resp = client.get_image_vulnerabilities(
                            user_id=userId,
                            image_id=imageId,
                            force_refresh=True)
                    except Exception as err:
                        logger.warn(
                            "post analysis CVE scan failed for image: " +
                            str(imageId))

                except Exception as err:
                    raise Exception(
                        "adding image to policy-engine failed - exception: " +
                        str(err))

                logger.debug("updating image catalog record analysis_status")

                last_analysis_status = image_record['analysis_status']
                image_record[
                    'analysis_status'] = anchore_engine.subsys.taskstate.complete_state(
                        'analyze')
                rc = catalog.update_image(user_auth, imageDigest, image_record)

                try:
                    annotations = {}
                    try:
                        annotations = json.loads(
                            image_record.get('annotations', {}))
                    except Exception as err:
                        logger.warn(
                            "could not marshal annotations from json - exception: "
                            + str(err))

                    for image_detail in image_record['image_detail']:
                        fulltag = image_detail['registry'] + "/" + image_detail[
                            'repo'] + ":" + image_detail['tag']
                        last_payload = {
                            'imageDigest': imageDigest,
                            'analysis_status': last_analysis_status,
                            'annotations': annotations
                        }
                        curr_payload = {
                            'imageDigest': imageDigest,
                            'analysis_status': image_record['analysis_status'],
                            'annotations': annotations
                        }
                        npayload = {
                            'last_eval': last_payload,
                            'curr_eval': curr_payload,
                        }
                        if annotations:
                            npayload['annotations'] = annotations

                        rc = anchore_engine.subsys.notifications.queue_notification(
                            userId, fulltag, 'analysis_update', npayload)
                except Exception as err:
                    logger.warn(
                        "failed to enqueue notification on image analysis state update - exception: "
                        + str(err))

            else:
                raise Exception("analysis archive failed to store")

            logger.info("analysis complete: " + str(userId) + " : " +
                        str(imageDigest))

            logger.spew("TIMING MARK1: " + str(int(time.time()) - timer))

            try:
                run_time = float(time.time() - timer)
                current_avg_count = current_avg_count + 1.0
                new_avg = current_avg + (
                    (run_time - current_avg) / current_avg_count)
                current_avg = new_avg
            except:
                pass

        except Exception as err:
            logger.exception("problem analyzing image - exception: " +
                             str(err))
            image_record[
                'analysis_status'] = anchore_engine.subsys.taskstate.fault_state(
                    'analyze')
            image_record[
                'image_status'] = anchore_engine.subsys.taskstate.fault_state(
                    'image_status')
            rc = catalog.update_image(user_auth, imageDigest, image_record)

    except Exception as err:
        logger.warn("job processing bailed - exception: " + str(err))
        raise err

    return (True)
예제 #2
0
def process_analyzer_job(system_user_auth, qobj):
    global current_avg, current_avg_count

    timer = int(time.time())
    try:
        record = qobj['data']
        userId = record['userId']
        image_record = record['image_record']
        imageDigest = image_record['imageDigest']
        user_record = catalog.get_user(system_user_auth, userId)
        user_auth = (user_record['userId'], user_record['password'])

        # check to make sure image is still in DB
        try:
            image_records = catalog.get_image(user_auth, imageDigest=imageDigest)
            if image_records:
                image_record = image_records[0]
            else:
                raise Exception("empty image record from catalog")
        except Exception as err:
            logger.warn("dequeued image cannot be fetched from catalog - skipping analysis (" + str(
                imageDigest) + ") - exception: " + str(err))
            return (True)

        logger.info("image dequeued for analysis: " + str(userId) + " : " + str(imageDigest))

        try:
            logger.spew("TIMING MARK0: " + str(int(time.time()) - timer))
            image_record['analysis_status'] = anchore_engine.subsys.taskstate.working_state('analyze')
            rc = catalog.update_image(user_auth, imageDigest, image_record)

            # actually do analysis

            # for pullstring in pullstrings.keys():
            for image_detail in image_record['image_detail']:
                pullstring = image_detail['registry'] + "/" + image_detail['repo'] + "@" + image_detail['digest']
                fulltag = image_detail['registry'] + "/" + image_detail['repo'] + ":" + image_detail['tag']

                imageId = None
                if 'imageId' in image_detail and image_detail['imageId']:
                    imageId = image_detail['imageId']

                logger.info("analysis starting: " + str(userId) + " : " + str(imageDigest) + " : " + str(fulltag) + " : " + str(imageId))

                logger.spew("TIMING MARKX: " + str(int(time.time()) - timer))

                registry_creds = catalog.get_registry(user_auth)

                image_data, query_data = perform_analyze(userId, pullstring, fulltag, image_detail, registry_creds)
                logger.spew("TIMING MARKY: " + str(int(time.time()) - timer))

                logger.debug("archiving query data")
                rc = catalog.put_document(user_auth, 'query_data', imageDigest, query_data)
                if rc:
                    logger.debug("storing image query data to catalog")
                else:
                    raise Exception("query archive failed to store")

                if not imageId:
                    try:
                        imageId = image_data[0]['image']['imageId']
                    except Exception as err:
                        logger.warn("could not get imageId after analysis or from image record - exception: " + str(err))

                logger.debug("archiving analysis data")
                rc = catalog.put_document(user_auth, 'analysis_data', imageDigest, image_data)
                if rc:
                    try:
                        logger.debug("extracting image content data")
                        image_content_data = {}
                        for content_type in anchore_engine.services.common.image_content_types:
                            try:
                                image_content_data[content_type] = anchore_engine.services.common.extract_analyzer_content(image_data, content_type)
                            except:
                                image_content_data[content_type] = {}

                        if image_content_data:
                            logger.debug("adding image content data to archive")
                            rc = catalog.put_document(user_auth, 'image_content_data', imageDigest, image_content_data)

                        image_summary_data = {}
                        try:
                            image_summary_data = anchore_engine.services.common.extract_analyzer_content(image_data, 'metadata')
                        except:
                            image_summary_data = {}
                        if image_summary_data:
                            logger.debug("adding image summary data to archive")
                            rc = catalog.put_document(user_auth, 'image_summary_data', imageDigest, image_summary_data)

                    except Exception as err:
                        logger.warn("could not store image content metadata to archive - exception: " + str(err))

                    logger.debug("adding image record to policy-engine service (" + str(userId) + " : " + str(imageId) + ")")
                    try:
                        if not imageId:
                            raise Exception("cannot add image to policy engine without an imageId")

                        localconfig = anchore_engine.configuration.localconfig.get_config()
                        verify = localconfig['internal_ssl_verify']

                        client = anchore_engine.clients.policy_engine.get_client(user=system_user_auth[0], password=system_user_auth[1], verify_ssl=verify)

                        try:
                            logger.debug("clearing any existing record in policy engine for image: " + str(imageId))
                            rc = client.delete_image(user_id=userId, image_id=imageId)
                        except Exception as err:
                            logger.warn("exception on pre-delete - exception: " + str(err))

                        request = ImageIngressRequest()
                        request.user_id = userId
                        request.image_id = imageId
                        request.fetch_url='catalog://'+str(userId)+'/analysis_data/'+str(imageDigest)
                        logger.debug("policy engine request: " + str(request))
                        resp = client.ingress_image(request)
                        logger.debug("policy engine image add response: " + str(resp))
                        try:
                            # force a fresh CVE scan
                            resp = client.get_image_vulnerabilities(user_id=userId, image_id=imageId, force_refresh=True)
                        except Exception as err:
                            logger.warn("post analysis CVE scan failed for image: " + str(imageId))

                    except Exception as err:
                        raise Exception("adding image to policy-engine failed - exception: " + str(err))

                    logger.debug("updating image catalog record analysis_status")
                    image_record['analysis_status'] = anchore_engine.subsys.taskstate.complete_state('analyze')
                    rc = catalog.update_image(user_auth, imageDigest, image_record)
                else:
                    raise Exception("analysis archive failed to store")

                logger.info("analysis complete: " + str(userId) + " : " + str(imageDigest) + " : " + str(fulltag))
            logger.spew("TIMING MARK1: " + str(int(time.time()) - timer))

            try:
                run_time = float(time.time() - timer)
                current_avg_count = current_avg_count + 1.0
                new_avg = current_avg + ((run_time - current_avg) / current_avg_count)
                current_avg = new_avg
            except:
                pass

        except Exception as err:
            logger.exception("problem analyzing image - exception: " + str(err))
            image_record['analysis_status'] = anchore_engine.subsys.taskstate.fault_state('analyze')
            image_record['image_status'] = anchore_engine.subsys.taskstate.fault_state('image_status')
            rc = catalog.update_image(user_auth, imageDigest, image_record)

    except Exception as err:
        logger.warn("job processing bailed - exception: " + str(err))
        raise err

    return (True)
예제 #3
0
def process_analyzer_job(system_user_auth, qobj, layer_cache_enable):
    global servicename  #current_avg, current_avg_count

    timer = int(time.time())
    event = None
    try:
        logger.debug('dequeued object: {}'.format(qobj))

        record = qobj['data']
        userId = record['userId']
        imageDigest = record['imageDigest']
        manifest = record['manifest']

        user_record = catalog.get_user(system_user_auth, userId)
        user_auth = (user_record['userId'], user_record['password'])

        # check to make sure image is still in DB
        try:
            image_records = catalog.get_image(user_auth,
                                              imageDigest=imageDigest)
            if image_records:
                image_record = image_records[0]
            else:
                raise Exception("empty image record from catalog")
        except Exception as err:
            logger.warn(
                "dequeued image cannot be fetched from catalog - skipping analysis ("
                + str(imageDigest) + ") - exception: " + str(err))
            return (True)

        logger.info("image dequeued for analysis: " + str(userId) + " : " +
                    str(imageDigest))
        if image_record[
                'analysis_status'] != anchore_engine.subsys.taskstate.base_state(
                    'analyze'):
            logger.debug(
                "dequeued image is not in base state - skipping analysis")
            return (True)

        try:
            logger.spew("TIMING MARK0: " + str(int(time.time()) - timer))

            last_analysis_status = image_record['analysis_status']
            image_record[
                'analysis_status'] = anchore_engine.subsys.taskstate.working_state(
                    'analyze')
            rc = catalog.update_image(user_auth, imageDigest, image_record)

            # disable the webhook call for image state transistion to 'analyzing'
            #try:
            #    for image_detail in image_record['image_detail']:
            #        fulltag = image_detail['registry'] + "/" + image_detail['repo'] + ":" + image_detail['tag']
            #        npayload = {
            #            'last_eval': {'imageDigest': imageDigest, 'analysis_status': last_analysis_status},
            #            'curr_eval': {'imageDigest': imageDigest, 'analysis_status': image_record['analysis_status']},
            #        }
            #        rc = anchore_engine.subsys.notifications.queue_notification(userId, fulltag, 'analysis_update', npayload)
            #except Exception as err:
            #    logger.warn("failed to enqueue notification on image analysis state update - exception: " + str(err))

            # actually do analysis
            registry_creds = catalog.get_registry(user_auth)
            try:
                image_data = perform_analyze(
                    userId,
                    manifest,
                    image_record,
                    registry_creds,
                    layer_cache_enable=layer_cache_enable)
            except AnchoreException as e:
                event = events.AnalyzeImageFail(user_id=userId,
                                                image_digest=imageDigest,
                                                error=e.to_dict())
                raise

            imageId = None
            try:
                imageId = image_data[0]['image']['imageId']
            except Exception as err:
                logger.warn(
                    "could not get imageId after analysis or from image record - exception: "
                    + str(err))

            try:
                logger.debug("archiving analysis data")
                rc = catalog.put_document(user_auth, 'analysis_data',
                                          imageDigest, image_data)
            except Exception as e:
                err = CatalogClientError(
                    msg='Failed to upload analysis data to catalog', cause=e)
                event = events.ArchiveAnalysisFail(user_id=userId,
                                                   image_digest=imageDigest,
                                                   error=err.to_dict())
                raise err

            if rc:
                try:
                    logger.debug("extracting image content data")
                    image_content_data = {}
                    for content_type in anchore_engine.services.common.image_content_types + anchore_engine.services.common.image_metadata_types:
                        try:
                            image_content_data[
                                content_type] = anchore_engine.services.common.extract_analyzer_content(
                                    image_data,
                                    content_type,
                                    manifest=manifest)
                        except:
                            image_content_data[content_type] = {}

                    if image_content_data:
                        logger.debug("adding image content data to archive")
                        rc = catalog.put_document(user_auth,
                                                  'image_content_data',
                                                  imageDigest,
                                                  image_content_data)

                    try:
                        logger.debug(
                            "adding image analysis data to image_record")
                        anchore_engine.services.common.update_image_record_with_analysis_data(
                            image_record, image_data)

                    except Exception as err:
                        raise err

                except Exception as err:
                    logger.warn(
                        "could not store image content metadata to archive - exception: "
                        + str(err))

                logger.debug("adding image record to policy-engine service (" +
                             str(userId) + " : " + str(imageId) + ")")
                try:
                    if not imageId:
                        raise Exception(
                            "cannot add image to policy engine without an imageId"
                        )

                    localconfig = anchore_engine.configuration.localconfig.get_config(
                    )
                    verify = localconfig['internal_ssl_verify']

                    client = anchore_engine.clients.policy_engine.get_client(
                        user=system_user_auth[0],
                        password=system_user_auth[1],
                        verify_ssl=verify)

                    try:
                        logger.debug(
                            "clearing any existing record in policy engine for image: "
                            + str(imageId))
                        rc = client.delete_image(user_id=userId,
                                                 image_id=imageId)
                    except Exception as err:
                        logger.warn("exception on pre-delete - exception: " +
                                    str(err))

                    logger.info('Loading image: {} {}'.format(userId, imageId))
                    request = ImageIngressRequest(
                        user_id=userId,
                        image_id=imageId,
                        fetch_url='catalog://' + str(userId) +
                        '/analysis_data/' + str(imageDigest))
                    logger.debug("policy engine request: " + str(request))
                    resp = client.ingress_image(request)
                    logger.debug("policy engine image add response: " +
                                 str(resp))

                except Exception as err:
                    import traceback
                    traceback.print_exc()
                    newerr = PolicyEngineClientError(
                        msg='Adding image to policy-engine failed',
                        cause=str(err))
                    event = events.LoadAnalysisFail(user_id=userId,
                                                    image_digest=imageDigest,
                                                    error=newerr.to_dict())
                    raise newerr

                logger.debug("updating image catalog record analysis_status")

                last_analysis_status = image_record['analysis_status']
                image_record[
                    'analysis_status'] = anchore_engine.subsys.taskstate.complete_state(
                        'analyze')
                image_record['analyzed_at'] = int(time.time())
                rc = catalog.update_image(user_auth, imageDigest, image_record)

                try:
                    annotations = {}
                    try:
                        if image_record.get('annotations', '{}'):
                            annotations = json.loads(
                                image_record.get('annotations', '{}'))
                    except Exception as err:
                        logger.warn(
                            "could not marshal annotations from json - exception: "
                            + str(err))

                    for image_detail in image_record['image_detail']:
                        fulltag = image_detail['registry'] + "/" + image_detail[
                            'repo'] + ":" + image_detail['tag']
                        last_payload = {
                            'imageDigest': imageDigest,
                            'analysis_status': last_analysis_status,
                            'annotations': annotations
                        }
                        curr_payload = {
                            'imageDigest': imageDigest,
                            'analysis_status': image_record['analysis_status'],
                            'annotations': annotations
                        }
                        npayload = {
                            'last_eval': last_payload,
                            'curr_eval': curr_payload,
                        }
                        if annotations:
                            npayload['annotations'] = annotations

                        rc = anchore_engine.subsys.notifications.queue_notification(
                            userId, fulltag, 'analysis_update', npayload)
                except Exception as err:
                    logger.warn(
                        "failed to enqueue notification on image analysis state update - exception: "
                        + str(err))

            else:
                err = CatalogClientError(
                    msg='Failed to upload analysis data to catalog',
                    cause='Invalid response from catalog API - {}'.format(
                        str(rc)))
                event = events.ArchiveAnalysisFail(user_id=userId,
                                                   image_digest=imageDigest,
                                                   error=err.to_dict())
                raise err

            logger.info("analysis complete: " + str(userId) + " : " +
                        str(imageDigest))

            logger.spew("TIMING MARK1: " + str(int(time.time()) - timer))

            try:
                run_time = float(time.time() - timer)
                #current_avg_count = current_avg_count + 1.0
                #new_avg = current_avg + ((run_time - current_avg) / current_avg_count)
                #current_avg = new_avg

                anchore_engine.subsys.metrics.histogram_observe(
                    'anchore_analysis_time_seconds',
                    run_time,
                    buckets=[
                        1.0, 5.0, 10.0, 30.0, 60.0, 120.0, 300.0, 600.0,
                        1800.0, 3600.0
                    ],
                    status="success")
                #anchore_engine.subsys.metrics.counter_inc('anchore_images_analyzed_total')

                #localconfig = anchore_engine.configuration.localconfig.get_config()
                #service_record = {'hostid': localconfig['host_id'], 'servicename': servicename}
                #anchore_engine.subsys.servicestatus.set_status(service_record, up=True, available=True, detail={'avg_analysis_time_sec': current_avg, 'total_analysis_count': current_avg_count}, update_db=True)

            except Exception as err:
                logger.warn(str(err))
                pass

        except Exception as err:
            run_time = float(time.time() - timer)
            logger.exception("problem analyzing image - exception: " +
                             str(err))
            anchore_engine.subsys.metrics.histogram_observe(
                'anchore_analysis_time_seconds',
                run_time,
                buckets=[
                    1.0, 5.0, 10.0, 30.0, 60.0, 120.0, 300.0, 600.0, 1800.0,
                    3600.0
                ],
                status="fail")
            image_record[
                'analysis_status'] = anchore_engine.subsys.taskstate.fault_state(
                    'analyze')
            image_record[
                'image_status'] = anchore_engine.subsys.taskstate.fault_state(
                    'image_status')
            rc = catalog.update_image(user_auth, imageDigest, image_record)
        finally:
            if event:
                try:
                    catalog.add_event(user_auth, event)
                except:
                    logger.error(
                        'Ignoring error creating analysis failure event')

    except Exception as err:
        logger.warn("job processing bailed - exception: " + str(err))
        raise err

    return (True)
예제 #4
0
def get_image_summary(user_auth, image_record):
    ret = {}
    if image_record['analysis_status'] != taskstate.complete_state('analyze'):
        return (ret)

    # augment with image summary data, if available
    try:
        try:
            image_summary_data = catalog.get_document(
                user_auth, 'image_summary_data', image_record['imageDigest'])
        except:
            image_summary_data = {}

        if not image_summary_data:
            # (re)generate image_content_data document
            logger.debug("generating image summary data from analysis data")
            image_data = catalog.get_document(user_auth, 'analysis_data',
                                              image_record['imageDigest'])

            image_content_data = {}
            for content_type in anchore_engine.services.common.image_content_types:
                try:
                    image_content_data[
                        content_type] = anchore_engine.services.common.extract_analyzer_content(
                            image_data, content_type)
                except:
                    image_content_data[content_type] = {}
            if image_content_data:
                logger.debug("adding image content data to archive")
                rc = catalog.put_document(user_auth, 'image_content_data',
                                          image_record['imageDigest'],
                                          image_content_data)

            image_summary_data = {}
            try:
                image_summary_data = anchore_engine.services.common.extract_analyzer_content(
                    image_data, 'metadata')
            except:
                image_summary_data = {}
            if image_summary_data:
                logger.debug("adding image summary data to archive")
                rc = catalog.put_document(user_auth, 'image_summary_data',
                                          image_record['imageDigest'],
                                          image_summary_data)

        image_summary_metadata = copy.deepcopy(image_summary_data)
        if image_summary_metadata:
            logger.debug("getting image summary data")

            summary_record = {}

            adm = image_summary_metadata['anchore_distro_meta']

            summary_record['distro'] = adm.pop('DISTRO', 'N/A')
            summary_record['distro_version'] = adm.pop('DISTROVERS', 'N/A')

            air = image_summary_metadata['anchore_image_report']
            airm = air.pop('meta', {})
            al = air.pop('layers', [])
            ddata = air.pop('docker_data', {})

            summary_record['layer_count'] = str(len(al))
            summary_record['dockerfile_mode'] = air.pop(
                'dockerfile_mode', 'N/A')
            summary_record['arch'] = ddata.pop('Architecture', 'N/A')
            summary_record['image_size'] = str(int(airm.pop('sizebytes', 0)))

            ret = summary_record

    except Exception as err:
        logger.warn("cannot get image summary data for image: " +
                    str(image_record['imageDigest']) + " : " + str(err))

    return (ret)
예제 #5
0
def get_content(request_inputs, content_type, doformat=False):
    user_auth = request_inputs['auth']
    method = request_inputs['method']
    bodycontent = request_inputs['bodycontent']
    params = request_inputs['params']

    return_object = {}
    httpcode = 500
    userId, pw = user_auth
    try:
        if content_type not in anchore_engine.services.common.image_content_types + anchore_engine.services.common.image_metadata_types:
            httpcode = 404
            raise Exception("content type (" + str(content_type) +
                            ") not available")

        tag = params.pop('tag', None)
        imageDigest = params.pop('imageDigest', None)
        digest = params.pop('digest', None)

        image_reports = catalog.get_image(user_auth,
                                          tag=tag,
                                          digest=digest,
                                          imageDigest=imageDigest)
        for image_report in image_reports:
            if image_report['analysis_status'] != taskstate.complete_state(
                    'analyze'):
                httpcode = 404
                raise Exception("image is not analyzed - analysis_status: " +
                                image_report['analysis_status'])

            imageDigest = image_report['imageDigest']

            if content_type == 'manifest':
                try:
                    image_manifest_data = catalog.get_document(
                        user_auth, 'manifest_data', imageDigest)
                except Exception as err:
                    raise anchore_engine.services.common.make_anchore_exception(
                        err,
                        input_message="cannot fetch content data {} from archive"
                        .format(content_type),
                        input_httpcode=500)

                image_content_data = {'manifest': image_manifest_data}
            else:
                try:
                    image_content_data = catalog.get_document(
                        user_auth, 'image_content_data', imageDigest)
                except Exception as err:
                    raise anchore_engine.services.common.make_anchore_exception(
                        err,
                        input_message="cannot fetch content data from archive",
                        input_httpcode=500)

                # special handler for dockerfile contents from old method to new
                if content_type == 'dockerfile' and not image_content_data.get(
                        'dockerfile', None):
                    try:
                        if image_report.get('dockerfile_mode',
                                            None) == 'Actual':
                            for image_detail in image_report.get(
                                    'image_detail', []):
                                if image_detail.get('dockerfile', None):
                                    logger.debug(
                                        "migrating old dockerfile content form into new"
                                    )
                                    image_content_data[
                                        'dockerfile'] = image_detail.get(
                                            'dockerfile', "").decode('base64')
                                    catalog.put_document(
                                        user_auth, 'image_content_data',
                                        imageDigest, image_content_data)
                                    break
                    except Exception as err:
                        logger.warn(
                            "cannot fetch/decode dockerfile contents from image_detail - {}"
                            .format(err))

                if content_type not in image_content_data:
                    httpcode = 404
                    raise Exception(
                        "image content of type (" + str(content_type) +
                        ") was not an available type at analysis time for this image"
                    )

            return_object[imageDigest] = make_response_content(
                content_type, image_content_data[content_type])

        httpcode = 200
    except Exception as err:
        return_object = anchore_engine.services.common.make_response_error(
            err, in_httpcode=httpcode)
        httpcode = return_object['httpcode']

    return (return_object, httpcode)