Exemplo n.º 1
0
def run_kebechet_administrator():
    """Run Kebechet Administrator to determine the repositories on which Kebechet will be triggered internally."""
    # We check if all the necessary env variables have been set correctly.
    _input_validation()

    _LOGGER.info(f"Kebechet administrator triggered by: {Configuration.MESSAGE_TYPE}")
    # If input validation passes, we call the specific handler to generate the messages for the producer.
    _message_handler[Configuration.MESSAGE_TYPE](Configuration)

    _LOGGER.info(f"Number of messages to be sent: {len(output_messages)}")
    _LOGGER.debug(f"Messages to be sent: {output_messages}")
    # Store message to file that need to be sent.
    store_messages(output_messages)

    set_messages_metrics(
        metric_messages_sent=metric_messages_sent,
        message_type=kebechet_run_url_trigger_message.base_name,
        service_version=__service_version__,
        number_messages_sent=len(output_messages),
        trigger_message=Configuration.MESSAGE_TYPE,
    )

    set_schema_metrics()

    send_metrics()
Exemplo n.º 2
0
def main():
    """Create issues warning users of data purge and how to continue injesting knowledge from Thoth."""
    # this takes care of accidentally passing a filter on empty strings to the database which will usually result in no
    # entries being found
    os_name = os.getenv("PURGE_OS_NAME") if os.getenv(
        "PURGE_OS_NAME") else None
    os_version = os.getenv("PURGE_OS_VERSION") if os.getenv(
        "PURGE_OS_VERSION") else None
    python_version = os.getenv("PURGE_PYTHON_VERSION") if os.getenv(
        "PURGE_PYTHON_VERSION") else None

    all_installations = GRAPH.get_kebechet_github_installation_info_with_software_environment_all(
        os_name=os_name,
        os_version=os_version,
        python_version=python_version,
    )
    available_software_runtimes = GRAPH.get_solved_python_package_versions_software_environment_all(
    )
    gh = GithubService(
        token=os.getenv("GITHUB_KEBECHET_TOKEN"),
        github_app_id=os.getenv("GITHUB_APP_ID"),
        github_private_key_path=os.getenv("GITHUB_PRIVATE_KEY_PATH"),
    )

    number_issues_total = len(all_installations)
    number_issues_created = 0

    for i in all_installations:
        try:
            p = gh.get_project(namespace=i["slug"].split("/")[0],
                               repo=i["repo_name"])
            # We shouldn't have to check if the issue exists because the purge job is run for each env only once
            p.create_issue(
                title=
                f"{os_name}:{os_version}py{python_version} being purged from Thoth DB",
                body=_ISSUE_BODY.format(
                    os_name=os_name,
                    os_version=os_version,
                    python_version=python_version,
                    available_runtimes=json.dumps(available_software_runtimes,
                                                  indent=4),
                ),
                private=i["private"],
                labels=["bot"],
            )

            number_issues_created += 1

        except Exception as e:
            _LOGGER.error(
                f"Could not create issue for {i['slug']} because: {e!r}")

    set_schema_metrics()
    number_purge_issues_created.labels(
        component="workflow-helpers",
        env=Configuration.THOTH_DEPLOYMENT_NAME).set(number_issues_created)
    number_purge_issues_total.labels(
        component="workflow-helpers",
        env=Configuration.THOTH_DEPLOYMENT_NAME).set(number_issues_total)
    send_metrics()
Exemplo n.º 3
0
def parse_provenance_checker_output() -> None:
    """Investigate on unresolved packages in provenance-checker output."""
    provenance_checker_run_path = Path(os.environ["FILE_PATH"])

    file_found = True

    unresolved_packages = []

    if not provenance_checker_run_path.exists():
        _LOGGER.warning(f"Cannot find the file on this path: {provenance_checker_run_path}")
        file_found = False

    if file_found:

        with open(provenance_checker_run_path, "r") as f:
            content = json.load(f)

        report = content["result"]["report"]

        if report:
            unresolved_packages = _parse_provenance_check_report(report=report)
        else:
            _LOGGER.warning("Report in the document is empty.")

        if not unresolved_packages:
            _LOGGER.warning("No packages to be solved with priority identified.")

        else:
            _LOGGER.info(f"Identified the following unresolved packages: {unresolved_packages}")

    solver = None  # No solver: all available solvers will be scheduled for unsolved package
    output_messages = []

    for package in unresolved_packages:

        message_input = UnresolvedPackageContents(
            component_name=__COMPONENT_NAME__,
            service_version=__service_version__,
            package_name=package["package_name"],
            package_version=package["package_version"],
            index_url=package["index_url"],
            solver=solver,
        ).dict()

        # We store the message to put in the output file here.
        output_messages.append({"topic_name": unresolved_package_message.base_name, "message_contents": message_input})

    # Store message to file that need to be sent.
    store_messages(output_messages)

    set_messages_metrics(
        metric_messages_sent=metric_messages_sent,
        message_type=unresolved_package_message.base_name,
        service_version=__service_version__,
        number_messages_sent=len(output_messages),
    )
    set_schema_metrics()

    send_metrics()
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""This task is used to update the Graph schema for Thoth project."""

import logging

from thoth.workflow_helpers import __service_version__
from thoth.storages import GraphDatabase
from thoth.workflow_helpers.common import send_metrics

_LOGGER = logging.getLogger("thoth.graph_schema_update")
_LOGGER.info("Thoth workflow-helpers task: graph_schema_update v%s",
             __service_version__)


def update_schema() -> None:
    """Perform schema update for the graph database."""
    graph = GraphDatabase()
    graph.connect()

    graph.initialize_schema()


if __name__ == "__main__":
    send_metrics()
    update_schema()
Exemplo n.º 5
0
def parse_adviser_output() -> None:
    """Investigate on unresolved packages in adviser output."""
    adviser_run_path = Path(os.environ["FILE_PATH"])

    file_found = True
    unresolved_found = True

    unresolved_packages = []
    packages_to_solve = {}

    if not adviser_run_path.exists():
        _LOGGER.warning(
            f"Cannot find the file on this path: {adviser_run_path}")
        file_found = False

    if file_found:

        with open(adviser_run_path, "r") as f:
            content = json.load(f)

        report = content["result"]["report"]

        if report:
            errors_details = report.get("_ERROR_DETAILS")
            if errors_details:
                unresolved_packages = errors_details["unresolved"]

        if not unresolved_packages:
            _LOGGER.warning(
                "No packages to be solved with priority identified.")
            unresolved_found = False

        if unresolved_found:
            _LOGGER.info(
                f"Identified the following unresolved packages: {unresolved_packages}"
            )

            parameters = content["result"]["parameters"]
            runtime_environment = parameters["project"].get(
                "runtime_environment")

            solver = OpenShift.obtain_solver_from_runtime_environment(
                runtime_environment=runtime_environment)

            requirements = parameters["project"].get("requirements")

            pipfile = Pipfile.from_dict(requirements)
            packages = pipfile.packages.packages
            dev_packages = pipfile.dev_packages.packages

            for package_name in unresolved_packages:

                if package_name in packages:
                    packages_to_solve[package_name] = packages[package_name]

                if package_name in dev_packages:
                    packages_to_solve[package_name] = dev_packages[
                        package_name]

            _LOGGER.info(
                f"Unresolved packages identified.. {packages_to_solve}")

    output_messages = []

    for package, package_info in packages_to_solve.items():

        message_input = UnresolvedPackageContents(
            component_name=__COMPONENT_NAME__,
            service_version=__service_version__,
            package_name=package_info.name,
            package_version=package_info.version,
            index_url=package_info.index,
            solver=solver,
        ).dict()

        # We store the message to put in the output file here.
        output_messages.append({
            "topic_name": unresolved_package_message.base_name,
            "message_contents": message_input
        })

    # Store message to file that need to be sent.
    store_messages(output_messages)

    set_messages_metrics(
        metric_messages_sent=metric_messages_sent,
        message_type=unresolved_package_message.base_name,
        service_version=__service_version__,
        number_messages_sent=len(output_messages),
    )

    set_schema_metrics()

    send_metrics()