Esempio n. 1
0
def post_dependency_monkey_python(
    input: Dict[str, Any],
    seed: Optional[int] = None,
    dry_run: bool = False,
    decision: Optional[str] = None,
    debug: bool = False,
    count: Optional[int] = None,
    limit_latest_versions: Optional[int] = None,
):
    """Run dependency monkey on the given application stack to produce all the possible software stacks."""
    parameters = {
        "requirements": input.pop("requirements"),
        "context": input.pop("context"),
        "pipeline": input.pop("pipeline", None),
        "predictor": input.pop("predictor", None),
        "predictor_config": input.pop("predictor_config", None),
        "runtime_environment": input.pop("runtime_environment", None),
        "job_id": OpenShift.generate_id("dependency-monkey"),
        "seed": seed,
        "dry_run": dry_run,
        "decision": decision,
        "debug": debug,
        "count": count,
        "limit_latest_versions": limit_latest_versions,
    }

    store = DependencyMonkeyRequestsStore()
    store.connect()
    store.store_request(parameters["job_id"], parameters)

    # These parts are reused from the stored request and are not sent via messages.
    parameters.pop("requirements")
    parameters.pop("context")
    parameters.pop("pipeline")
    parameters.pop("runtime_environment")

    return _do_schedule(
        parameters,
        _OPENSHIFT.schedule_dependency_monkey,
        stack_output=Configuration.THOTH_DEPENDENCY_MONKEY_STACK_OUTPUT,
    )
Esempio n. 2
0
def post_build(
    build_detail: typing.Dict[str, typing.Any],
    *,
    base_registry_password: typing.Optional[str] = None,
    base_registry_user: typing.Optional[str] = None,
    base_registry_verify_tls: bool = True,
    output_registry_password: typing.Optional[str] = None,
    output_registry_user: typing.Optional[str] = None,
    output_registry_verify_tls: bool = True,
    debug: bool = False,
    environment_type: typing.Optional[str] = None,
    force: bool = False,
    origin: typing.Optional[str] = None,
) -> typing.Tuple[typing.Dict[str, typing.Any], int]:
    """Run analysis on a build."""
    output_image = build_detail.get("output_image")
    base_image = build_detail.get("base_image")
    build_log = build_detail.get("build_log")

    if not output_image and not base_image and not build_log:
        return {"error": "No base, output nor build log provided"}, 400

    buildlog_analysis_id = None
    buildlog_document_id = None
    if build_log:
        buildlog_document_id, buildlog_analysis_id = _store_build_log(
            build_log, force=force)

    message_parameters = {
        "base_image_analysis_id":
        None,  # Assigned below.
        "base_image":
        base_image,
        "base_registry_password":
        base_registry_password,
        "base_registry_user":
        base_registry_user,
        "base_registry_verify_tls":
        base_registry_verify_tls,
        "output_image_analysis_id":
        None,  # Assigned below.
        "output_image":
        output_image,
        "output_registry_password":
        output_registry_password,
        "output_registry_user":
        output_registry_user,
        "output_registry_verify_tls":
        output_registry_verify_tls,
        "environment_type":
        environment_type,
        "buildlog_document_id":
        buildlog_document_id,
        "buildlog_parser_id":
        None
        if buildlog_analysis_id else OpenShift.generate_id("buildlog-parser"),
        "origin":
        origin,
        "debug":
        debug,
        "job_id":
        OpenShift.generate_id("build-analysis"),
    }

    cache = AnalysesCacheStore()
    cache.connect()

    # Handle the base container image used during the build process.
    base_image_analysis = None
    base_image_analysis_id = None
    base_cached_document_id = None
    base_image_analysis_cached = False
    if base_image:
        base_image_info = {
            "image": base_image,
            "registry_user": base_registry_user,
            "registry_password": base_registry_password,
            "verify_tls": base_registry_verify_tls,
        }
        metadata_req = _do_get_image_metadata(**base_image_info)

        if metadata_req[1] != 200:
            # There was an error extracting metadata, tuple holds dictionary with error report and HTTP status code.
            return metadata_req

        base_image_metadata = metadata_req[0]
        # We compute digest of parameters so we do not reveal any authentication specific info.
        parameters_digest = _compute_digest_params(base_image)
        base_cached_document_id = base_image_metadata[
            "digest"] + "+" + parameters_digest

        base_image_analysis_id = OpenShift.generate_id("package-extract")
        if not force:
            try:
                base_image_analysis_id = cache.retrieve_document_record(
                    base_cached_document_id).pop("analysis_id")
                base_image_analysis_cached = True
            except CacheMiss:
                pass

        base_image_analysis = {
            "analysis_id": base_image_analysis_id,
            "cached": base_image_analysis_cached,
            "parameters": {
                "base_image": base_image,
                # "registry_password": base_registry_password,
                # "registry_user": base_registry_user,
                "registry_verify_tls": base_registry_verify_tls,
            },
        }

        analysis_by_digest_store = AnalysisByDigest()
        analysis_by_digest_store.connect()
        analysis_by_digest_store.store_document(base_image_metadata["digest"],
                                                base_image_analysis)

    # Handle output ("resulting") container image used during the build process.
    output_image_analysis = None
    output_image_analysis_id = None
    output_cached_document_id = None
    output_image_analysis_cached = False
    if output_image:
        output_image_info = {
            "image": output_image,
            "registry_user": output_registry_user,
            "registry_password": output_registry_password,
            "verify_tls": output_registry_verify_tls,
        }
        metadata_req = _do_get_image_metadata(**output_image_info)

        if metadata_req[1] != 200:
            # There was an error extracting metadata, tuple holds dictionary with error report and HTTP status code.
            return metadata_req

        output_image_metadata = metadata_req[0]
        # We compute digest of parameters so we do not reveal any authentication specific info.
        parameters_digest = _compute_digest_params(output_image)
        output_cached_document_id = output_image_metadata[
            "digest"] + "+" + parameters_digest

        output_image_analysis_id = OpenShift.generate_id("package-extract")
        if not force:
            try:
                output_image_analysis_id = cache.retrieve_document_record(
                    output_cached_document_id).pop("analysis_id")
                output_image_analysis_cached = True
            except CacheMiss:
                pass

        output_image_analysis = {
            "analysis_id": output_image_analysis_id,
            "cached": output_image_analysis_cached,
            "parameters": {
                "output_image": output_image,
                # "registry_password": output_registry_password,
                # "registry_user": output_registry_user,
                "registry_verify_tls": output_registry_verify_tls,
            },
        }

        analysis_by_digest_store = AnalysisByDigest()
        analysis_by_digest_store.connect()
        analysis_by_digest_store.store_document(
            output_image_metadata["digest"], output_image_analysis)

    message_parameters[
        "base_image_analysis_id"] = base_image_analysis_id if not base_image_analysis_cached else None
    message_parameters["output_image_analysis_id"] = (
        output_image_analysis_id if not output_image_analysis_cached else None)

    if build_log:
        pass

    response, status = _send_schedule_message(message_parameters,
                                              build_analysis_trigger_message,
                                              BuildAnalysisTriggerContent)
    if status != 202:
        # We do not return response directly as it holds data flattened and to make sure secrets are propagated back.
        return response, status

    # Store all the ids to caches once the message is sent so subsequent calls work as expected.

    if base_cached_document_id:
        cache.store_document_record(base_cached_document_id,
                                    {"analysis_id": base_image_analysis_id})

    if output_cached_document_id:
        cache.store_document_record(output_cached_document_id,
                                    {"analysis_id": output_image_analysis_id})

    if build_log and not buildlog_analysis_id:
        buildlogs_cache = BuildLogsAnalysesCacheStore()
        buildlogs_cache.connect()
        cached_document_id = _compute_digest_params(build_log)
        buildlogs_cache.store_document_record(
            cached_document_id,
            {"analysis_id": message_parameters["buildlog_parser_id"]})

    if base_image_analysis or output_image_analysis:
        store = AnalysisResultsStore()
        store.connect()
        if base_image_analysis_id:
            store.store_request(base_image_analysis_id, base_image_analysis)
        if output_image_analysis:
            store.store_request(output_image_analysis_id,
                                output_image_analysis)

    return {
        "base_image_analysis": base_image_analysis,
        "output_image_analysis": output_image_analysis,
        "buildlog_analysis": {
            "analysis_id":
            buildlog_analysis_id or message_parameters["buildlog_parser_id"],
            "cached":
            buildlog_analysis_id is not None,
        },
        "buildlog_document_id": buildlog_document_id,
    }, 202