示例#1
0
def post_analysis_result():
    adapter = AnalysisResultsStore()
    adapter.connect()
    document_id = adapter.store_document(request.json)
    _LOGGER.info("Analyzer result stored with document_id %r", document_id)
    return jsonify({'document_id': document_id}), 201, {
        'ContentType': 'application/json'
    }
示例#2
0
def post_analysis_result():  # Ignore PyDocStyleBear
    adapter = AnalysisResultsStore()
    adapter.connect()
    document_id = adapter.store_document(request.json)
    _LOGGER.info("Analyzer result stored with document_id %r", document_id)
    return (
        jsonify({"document_id": document_id}),
        201,
        {
            "ContentType": "application/json"
        },
    )
示例#3
0
from thoth.storages import AnalysisResultsStore
from thoth.storages import InspectionResultsStore
from thoth.storages import PackageAnalysisResultsStore
from thoth.storages import ProvenanceResultsStore
from thoth.storages import DependencyMonkeyReportsStore
from thoth.common import init_logging
from thoth.common import OpenShift
import thoth.metrics_exporter.metrics as metrics

init_logging()

_LOGGER = logging.getLogger(__name__)

_MONITORED_STORES = (
    AdvisersResultsStore(),
    AnalysisResultsStore(),
    InspectionResultsStore(),
    ProvenanceResultsStore(),
    PackageAnalysisResultsStore(),
    SolverResultsStore(),
    DependencyMonkeyReportsStore(),
)

_NAMESPACES_VARIABLES = [
    "THOTH_FRONTEND_NAMESPACE",
    "THOTH_MIDDLETIER_NAMESPACE",
    "THOTH_BACKEND_NAMESPACE",
    "THOTH_AMUN_NAMESPACE",
    "THOTH_AMUN_INSPECTION_NAMESPACE",
]
示例#4
0
def _fixture_adapter():
    """Retrieve an adapter to build logs."""
    return AnalysisResultsStore(deployment_name=_DEPLOYMENT_NAME,
                                prefix=_BUCKET_PREFIX,
                                **CEPH_INIT_KWARGS)
示例#5
0
def post_analyze(
    image: str,
    debug: bool = False,
    registry_user: str = None,
    registry_password: str = None,
    environment_type: str = None,
    origin: str = None,
    verify_tls: bool = True,
    force: bool = False,
):
    """Run an analyzer in a restricted namespace."""
    parameters = locals()
    force = parameters.pop("force", None)
    # Set default environment type if none provided. As we are serving user's
    # requests, we always analyze external container images.
    parameters["environment_type"] = parameters.get(
        "runtime_environment") or "runtime"
    parameters["is_external"] = True

    # Always extract metadata to check for authentication issues and such.
    metadata_req = _do_get_image_metadata(image,
                                          registry_user=registry_user,
                                          registry_password=registry_password,
                                          verify_tls=verify_tls)

    if metadata_req[1] != 200:
        # There was an error extracting metadata, tuple holds dictionary with error report and HTTP status code.
        return metadata_req

    metadata = metadata_req[0]
    # We compute digest of parameters so we do not reveal any authentication specific info.
    parameters_digest = _compute_digest_params(parameters)
    cache = AnalysesCacheStore()
    cache.connect()
    cached_document_id = metadata["digest"] + "+" + parameters_digest

    if not force:
        try:
            return (
                {
                    "analysis_id":
                    cache.retrieve_document_record(cached_document_id).pop(
                        "analysis_id"),
                    "cached":
                    True,
                    "parameters":
                    parameters,
                },
                202,
            )
        except CacheMiss:
            pass

    parameters["job_id"] = _OPENSHIFT.generate_id("package-extract")
    response, status_code = _send_schedule_message(
        parameters, package_extract_trigger_message,
        PackageExtractTriggerContent)
    analysis_by_digest_store = AnalysisByDigest()
    analysis_by_digest_store.connect()
    analysis_by_digest_store.store_document(metadata["digest"], response)

    if status_code == 202:
        cache.store_document_record(cached_document_id,
                                    {"analysis_id": response["analysis_id"]})

        # Store the request for traceability.
        store = AnalysisResultsStore()
        store.connect()
        store.store_request(parameters["job_id"], parameters)

    return response, status_code
示例#6
0
def post_build(
    build_detail: typing.Dict[str, typing.Any],
    *,
    base_registry_password: typing.Optional[str] = None,
    base_registry_user: typing.Optional[str] = None,
    base_registry_verify_tls: bool = True,
    output_registry_password: typing.Optional[str] = None,
    output_registry_user: typing.Optional[str] = None,
    output_registry_verify_tls: bool = True,
    debug: bool = False,
    environment_type: typing.Optional[str] = None,
    force: bool = False,
    origin: typing.Optional[str] = None,
) -> typing.Tuple[typing.Dict[str, typing.Any], int]:
    """Run analysis on a build."""
    output_image = build_detail.get("output_image")
    base_image = build_detail.get("base_image")
    build_log = build_detail.get("build_log")

    if not output_image and not base_image and not build_log:
        return {"error": "No base, output nor build log provided"}, 400

    buildlog_analysis_id = None
    buildlog_document_id = None
    if build_log:
        buildlog_document_id, buildlog_analysis_id = _store_build_log(
            build_log, force=force)

    message_parameters = {
        "base_image_analysis_id":
        None,  # Assigned below.
        "base_image":
        base_image,
        "base_registry_password":
        base_registry_password,
        "base_registry_user":
        base_registry_user,
        "base_registry_verify_tls":
        base_registry_verify_tls,
        "output_image_analysis_id":
        None,  # Assigned below.
        "output_image":
        output_image,
        "output_registry_password":
        output_registry_password,
        "output_registry_user":
        output_registry_user,
        "output_registry_verify_tls":
        output_registry_verify_tls,
        "environment_type":
        environment_type,
        "buildlog_document_id":
        buildlog_document_id,
        "buildlog_parser_id":
        None
        if buildlog_analysis_id else OpenShift.generate_id("buildlog-parser"),
        "origin":
        origin,
        "debug":
        debug,
        "job_id":
        OpenShift.generate_id("build-analysis"),
    }

    cache = AnalysesCacheStore()
    cache.connect()

    # Handle the base container image used during the build process.
    base_image_analysis = None
    base_image_analysis_id = None
    base_cached_document_id = None
    base_image_analysis_cached = False
    if base_image:
        base_image_info = {
            "image": base_image,
            "registry_user": base_registry_user,
            "registry_password": base_registry_password,
            "verify_tls": base_registry_verify_tls,
        }
        metadata_req = _do_get_image_metadata(**base_image_info)

        if metadata_req[1] != 200:
            # There was an error extracting metadata, tuple holds dictionary with error report and HTTP status code.
            return metadata_req

        base_image_metadata = metadata_req[0]
        # We compute digest of parameters so we do not reveal any authentication specific info.
        parameters_digest = _compute_digest_params(base_image)
        base_cached_document_id = base_image_metadata[
            "digest"] + "+" + parameters_digest

        base_image_analysis_id = OpenShift.generate_id("package-extract")
        if not force:
            try:
                base_image_analysis_id = cache.retrieve_document_record(
                    base_cached_document_id).pop("analysis_id")
                base_image_analysis_cached = True
            except CacheMiss:
                pass

        base_image_analysis = {
            "analysis_id": base_image_analysis_id,
            "cached": base_image_analysis_cached,
            "parameters": {
                "base_image": base_image,
                # "registry_password": base_registry_password,
                # "registry_user": base_registry_user,
                "registry_verify_tls": base_registry_verify_tls,
            },
        }

        analysis_by_digest_store = AnalysisByDigest()
        analysis_by_digest_store.connect()
        analysis_by_digest_store.store_document(base_image_metadata["digest"],
                                                base_image_analysis)

    # Handle output ("resulting") container image used during the build process.
    output_image_analysis = None
    output_image_analysis_id = None
    output_cached_document_id = None
    output_image_analysis_cached = False
    if output_image:
        output_image_info = {
            "image": output_image,
            "registry_user": output_registry_user,
            "registry_password": output_registry_password,
            "verify_tls": output_registry_verify_tls,
        }
        metadata_req = _do_get_image_metadata(**output_image_info)

        if metadata_req[1] != 200:
            # There was an error extracting metadata, tuple holds dictionary with error report and HTTP status code.
            return metadata_req

        output_image_metadata = metadata_req[0]
        # We compute digest of parameters so we do not reveal any authentication specific info.
        parameters_digest = _compute_digest_params(output_image)
        output_cached_document_id = output_image_metadata[
            "digest"] + "+" + parameters_digest

        output_image_analysis_id = OpenShift.generate_id("package-extract")
        if not force:
            try:
                output_image_analysis_id = cache.retrieve_document_record(
                    output_cached_document_id).pop("analysis_id")
                output_image_analysis_cached = True
            except CacheMiss:
                pass

        output_image_analysis = {
            "analysis_id": output_image_analysis_id,
            "cached": output_image_analysis_cached,
            "parameters": {
                "output_image": output_image,
                # "registry_password": output_registry_password,
                # "registry_user": output_registry_user,
                "registry_verify_tls": output_registry_verify_tls,
            },
        }

        analysis_by_digest_store = AnalysisByDigest()
        analysis_by_digest_store.connect()
        analysis_by_digest_store.store_document(
            output_image_metadata["digest"], output_image_analysis)

    message_parameters[
        "base_image_analysis_id"] = base_image_analysis_id if not base_image_analysis_cached else None
    message_parameters["output_image_analysis_id"] = (
        output_image_analysis_id if not output_image_analysis_cached else None)

    if build_log:
        pass

    response, status = _send_schedule_message(message_parameters,
                                              build_analysis_trigger_message,
                                              BuildAnalysisTriggerContent)
    if status != 202:
        # We do not return response directly as it holds data flattened and to make sure secrets are propagated back.
        return response, status

    # Store all the ids to caches once the message is sent so subsequent calls work as expected.

    if base_cached_document_id:
        cache.store_document_record(base_cached_document_id,
                                    {"analysis_id": base_image_analysis_id})

    if output_cached_document_id:
        cache.store_document_record(output_cached_document_id,
                                    {"analysis_id": output_image_analysis_id})

    if build_log and not buildlog_analysis_id:
        buildlogs_cache = BuildLogsAnalysesCacheStore()
        buildlogs_cache.connect()
        cached_document_id = _compute_digest_params(build_log)
        buildlogs_cache.store_document_record(
            cached_document_id,
            {"analysis_id": message_parameters["buildlog_parser_id"]})

    if base_image_analysis or output_image_analysis:
        store = AnalysisResultsStore()
        store.connect()
        if base_image_analysis_id:
            store.store_request(base_image_analysis_id, base_image_analysis)
        if output_image_analysis:
            store.store_request(output_image_analysis_id,
                                output_image_analysis)

    return {
        "base_image_analysis": base_image_analysis,
        "output_image_analysis": output_image_analysis,
        "buildlog_analysis": {
            "analysis_id":
            buildlog_analysis_id or message_parameters["buildlog_parser_id"],
            "cached":
            buildlog_analysis_id is not None,
        },
        "buildlog_document_id": buildlog_document_id,
    }, 202