Beispiel #1
0
def _instantiate_project(
    requirements: str,
    requirements_locked: Optional[str] = None,
    *,
    runtime_environment: RuntimeEnvironment = None,
    constraints: Optional[str] = None,
):
    """Create Project instance based on arguments passed to CLI."""
    try:
        with open(requirements, "r") as requirements_file:
            requirements = requirements_file.read()
    except (OSError, FileNotFoundError):
        # We we gather values from env vars, un-escape new lines.
        requirements = requirements.replace("\\n", "\n")

    if requirements_locked:
        try:
            with open(requirements_locked, "r") as requirements_file:
                requirements_locked = requirements_file.read()
        except (OSError, FileNotFoundError):
            # We we gather values from env vars, un-escape new lines.
            requirements_locked = requirements_locked.replace("\\n", "\n")

    pipfile = Pipfile.from_string(requirements)
    pipfile_lock = None
    if requirements_locked and requirements_locked != "null":
        pipfile_lock = PipfileLock.from_string(requirements_locked, pipfile)

    constraints_instance = None
    if constraints:
        try:
            with open(constraints, "r") as constraints_file:
                constraints_content = constraints_file.read()
        except (OSError, FileNotFoundError):
            # We we gather values from env vars, un-escape new lines.
            constraints_content = constraints.replace("\\n", "\n")

        try:
            constraints_instance = Constraints.from_dict(
                json.loads(constraints_content))
        except json.decoder.JSONDecodeError:
            constraints_instance = Constraints.from_string(constraints_content)

    runtime_environment = runtime_environment or RuntimeEnvironment.from_dict(
        {})
    if not runtime_environment.platform:
        runtime_environment.platform = _DEFAULT_PLATFORM

    project = Project(
        pipfile=pipfile,
        pipfile_lock=pipfile_lock,
        runtime_environment=runtime_environment,
        constraints=constraints_instance or Constraints(),
    )

    return project
 def test_verify_multiple_should_include(self, builder_context: PipelineBuilderContext) -> None:
     """Verify multiple should_include calls do not loop endlessly."""
     builder_context.project.constraints = Constraints.from_file(
         str(self.data_dir / "constraints" / "constraints_0.txt")
     )
     builder_context.project.runtime_environment.python_version = "3.9"
     self.verify_multiple_should_include(builder_context)
 def test_not_include(self, builder_context: PipelineBuilderContext) -> None:
     """Test not including this pipeline unit."""
     builder_context.project.runtime_environment.python_version = "1.9"
     builder_context.project.constraints = Constraints.from_file(
         str(self.data_dir / "constraints" / "constraints_0.txt")
     )
     assert list(self.UNIT_TESTED.should_include(builder_context)) == []
 def test_include(self, builder_context: PipelineBuilderContext) -> None:
     """Test including this pipeline unit."""
     builder_context.project.runtime_environment.python_version = "3.9"
     builder_context.project.constraints = Constraints.from_file(
         str(self.data_dir / "constraints" / "constraints_0.txt")
     )
     assert list(self.UNIT_TESTED.should_include(builder_context)) == [
         {"package_name": "numpy", "specifier": "~=2.0"}
     ]
    def test_include_version(self, builder_context: PipelineBuilderContext, context: "Context") -> None:
        """Test not including and running this pipeline unit if version is not provided in constraints."""
        builder_context.project.runtime_environment.python_version = "3.9"
        builder_context.project.constraints = Constraints.from_file(
            str(self.data_dir / "constraints" / "constraints_1.txt")
        )
        config = list(self.UNIT_TESTED.should_include(builder_context))

        assert len(config) == 1
        assert config == [{"package_name": "numpy", "specifier": "*"}]

        unit = self.UNIT_TESTED()
        unit.update_configuration(config[0])

        package_version_1 = PackageVersion(
            name="numpy", version="==0.0.1", index=Source("https://pypi.org/simple"), develop=True
        )

        with unit.assigned_context(context):
            unit.pre_run()
            assert list(unit.run((pv for pv in (package_version_1,)))) == [package_version_1]
Beispiel #6
0
def post_advise_python(
    input: dict,
    recommendation_type: typing.Optional[str] = None,
    count: typing.Optional[int] = None,
    limit: typing.Optional[int] = None,
    source_type: typing.Optional[str] = None,
    debug: bool = False,
    force: bool = False,
    dev: bool = False,
    origin: typing.Optional[str] = None,
    token: typing.Optional[str] = None,
    github_event_type: typing.Optional[str] = None,
    github_check_run_id: typing.Optional[int] = None,
    github_installation_id: typing.Optional[int] = None,
    github_base_repo_url: typing.Optional[str] = None,
):
    """Compute results for the given package or package stack using adviser."""
    parameters = locals()
    # Translate request body parameters.
    parameters["application_stack"] = parameters["input"].pop(
        "application_stack", None)
    parameters["justification"] = parameters["input"].pop(
        "justification", None)
    parameters["stack_info"] = parameters["input"].pop("stack_info", None)
    parameters["kebechet_metadata"] = parameters["input"].pop(
        "kebechet_metadata", None)
    parameters["labels"] = parameters["input"].pop("labels", None)

    token = parameters.pop("token", None)

    authenticated = False
    if token is not None:
        if Configuration.API_TOKEN != token:
            return {"error": "Bad token supplied"}, 401

        authenticated = True
    else:
        for k in _ADVISE_PROTECTED_FIELDS:
            if parameters[k] is not None:
                return {
                    "error":
                    f"Parameter {k!r} requires token to be set to perform authenticated request"
                }, 401

    # Always try to parse runtime environment so that we have it available in JSON reports in a unified form.
    try:
        parameters["runtime_environment"] = RuntimeEnvironment.from_dict(
            parameters["input"].pop("runtime_environment", {})).to_dict()
    except Exception as exc:
        return {
            "parameters": parameters,
            "error": f"Failed to parse runtime environment: {str(exc)}"
        }

    try:
        constraints = Constraints.from_string(
            parameters["input"].pop("constraints", None) or "")
    except Exception as exc:
        return {
            "parameters": parameters,
            "error": f"Invalid constraints supplied: {str(exc)}"
        }, 400

    parameters["library_usage"] = parameters["input"].pop(
        "library_usage", None)
    parameters.pop("input")
    force = parameters.pop("force", False)

    if parameters[
            "library_usage"]:  # Sort library usage to hit cache properly.
        for key, value in (parameters["library_usage"].get("report")
                           or {}).items():
            parameters["library_usage"]["report"][key] = sorted(value)

    try:
        project = Project.from_strings(
            parameters["application_stack"]["requirements"],
            parameters["application_stack"].get("requirements_lock"),
            runtime_environment=RuntimeEnvironment.from_dict(
                parameters["runtime_environment"]),
            constraints=constraints,
        )
    except ThothPythonException as exc:
        return {
            "parameters": parameters,
            "error": f"Invalid application stack supplied: {str(exc)}"
        }, 400
    except Exception:
        return {
            "parameters": parameters,
            "error": "Invalid application stack supplied"
        }, 400

    # We could rewrite this to a decorator and make it shared with provenance
    # checks etc, but there are small glitches why the solution would not be
    # generic enough to be used for all POST endpoints.
    adviser_cache = AdvisersCacheStore()
    adviser_cache.connect()

    timestamp_now = int(time.mktime(datetime.datetime.utcnow().timetuple()))
    if authenticated:
        cached_document_id = _compute_digest_params(
            dict(
                **project.to_dict(),
                count=parameters["count"],
                limit=parameters["limit"],
                library_usage=parameters["library_usage"],
                recommendation_type=recommendation_type,
                origin=origin,
                source_type=source_type.upper() if source_type else None,
                dev=dev,
                debug=parameters["debug"],
                github_event_type=parameters["github_event_type"],
                github_check_run_id=parameters["github_check_run_id"],
                github_installation_id=parameters["github_installation_id"],
                github_base_repo_url=parameters["github_base_repo_url"],
                kebechet_metadata=parameters["kebechet_metadata"],
                labels=parameters["labels"],
            ))
    else:
        cached_document_id = _compute_digest_params(
            dict(
                **project.to_dict(),
                count=parameters["count"],
                limit=parameters["limit"],
                library_usage=parameters["library_usage"],
                recommendation_type=recommendation_type,
                dev=dev,
                debug=parameters["debug"],
                labels=parameters["labels"],
            ))

    if not force:
        try:
            cache_record = adviser_cache.retrieve_document_record(
                cached_document_id)
            if cache_record[
                    "timestamp"] + Configuration.THOTH_CACHE_EXPIRATION > timestamp_now:
                return {
                    "analysis_id": cache_record.pop("analysis_id"),
                    "cached": True,
                    "authenticated": authenticated,
                    "parameters": parameters,
                }, 202
        except CacheMiss:
            pass

    # Enum type is checked on thoth-common side to avoid serialization issue in user-api side when providing response
    parameters["source_type"] = source_type.upper() if source_type else None
    parameters["constraints"] = constraints.to_dict()
    parameters["job_id"] = _OPENSHIFT.generate_id("adviser")
    # Remove data passed via Ceph.
    message = dict(**parameters, authenticated=authenticated)
    message.pop("application_stack")
    message.pop("runtime_environment")
    message.pop("library_usage")
    message.pop("labels")
    message.pop("constraints")
    response, status = _send_schedule_message(message,
                                              adviser_trigger_message,
                                              AdviserTriggerContent,
                                              with_authentication=True,
                                              authenticated=authenticated)

    if status == 202:
        adviser_cache.store_document_record(
            cached_document_id, {
                "analysis_id": response["analysis_id"],
                "timestamp": timestamp_now
            })

        # Store the request for traceability.
        store = AdvisersResultsStore()
        store.connect()
        store.store_request(parameters["job_id"], parameters)

    return response, status