Beispiel #1
0
    def new_version(self, base_version=None):
        """
        Create a new RepositoryVersion for this Repository.

        Creation of a RepositoryVersion should be done in a RQ Job.

        Args:
            repository (pulpcore.app.models.Repository): to create a new version of
            base_version (pulpcore.app.models.RepositoryVersion): an optional repository version
                whose content will be used as the set of content for the new version

        Returns:
            pulpcore.app.models.RepositoryVersion: The Created RepositoryVersion

        """
        with transaction.atomic():
            version = RepositoryVersion(
                repository=self,
                number=int(self.next_version),
                base_version=base_version)
            version.save()

            if base_version:
                # first remove the content that isn't in the base version
                version.remove_content(version.content.exclude(pk__in=base_version.content))
                # now add any content that's in the base_version but not in version
                version.add_content(base_version.content.exclude(pk__in=version.content))

            if Task.current() and not self.sub_repo:
                resource = CreatedResource(content_object=version)
                resource.save()
            return version
Beispiel #2
0
def migrate_from_pulp2(
    migration_plan_pk, validate=False, dry_run=False, skip_corrupted=False
):
    """
    Main task to migrate from Pulp 2 to Pulp 3.

    Schedule other tasks based on the specified Migration Plan.

    Args:
        migration_plan_pk (str): The migration plan PK.
        validate (bool): If True, don't migrate unless validation is successful.
        dry_run (bool): If True, nothing is migrated, only validation happens.
        skip_corrupted (bool): If True, corrupted content is skipped during migration,
                               no task failure.
    """

    # MongoDB connection initialization
    connection.initialize()

    plan = MigrationPlan.objects.get(pk=migration_plan_pk)
    missing_resources = plan.get_missing_resources()

    if (validate or dry_run) and missing_resources:
        raise PlanValidationError(
            "Validation failed: resources missing {}".format(missing_resources)
        )

    if dry_run:
        return

    task_group = TaskGroup(description="Migration Sub-tasks")
    task_group.save()
    GroupProgressReport(
        message="Repo version creation",
        code="create.repo_version",
        task_group=task_group,
    ).save()
    GroupProgressReport(
        message="Distribution creation",
        code="create.distribution",
        task_group=task_group,
    ).save()
    current_task = Task.current()
    current_task.task_group = task_group
    current_task.save()
    resource = CreatedResource(content_object=task_group)
    resource.save()

    # TODO: if plan is empty for a plugin, only migrate downloaded content

    pre_migrate_all_without_content(plan)
    pre_migrate_all_content(plan)
    handle_outdated_resources(plan)
    migrate_repositories(plan)
    migrate_importers(plan)
    migrate_content(plan, skip_corrupted=skip_corrupted)
    create_repoversions_publications_distributions(plan)

    task_group.finish()
Beispiel #3
0
def get_created_collection_versions():
    current_task = Task.current()
    created_resources = current_task.created_resources.filter(
        content_type_id=ContentType.objects.get_for_model(CollectionVersion))

    # TODO: replace with values_list
    created_collection_versions = []
    for created_resource in created_resources:
        collection_version = created_resource.content_object

        created_collection_versions.append(collection_version)

    return created_collection_versions
Beispiel #4
0
    def emit(self, record):
        """
        Log `record` into the `CollectionImport.messages` field of the current task.

        Args:
            record (logging.LogRecord): The record to log.

        """
        # This import cannot occur at import time because Django attempts to instantiate it early
        # which causes an unavoidable circular import as long as this needs to import any model
        from .models import CollectionImport
        from pulpcore.plugin.models import Task

        collection_import = CollectionImport.objects.get(
            task=Task.current().pulp_id)
        collection_import.add_log_record(record)
        collection_import.save()
Beispiel #5
0
def migrate_from_pulp2(migration_plan_pk, validate=False, dry_run=False):
    """
    Main task to migrate from Pulp 2 to Pulp 3.

    Schedule other tasks based on the specified Migration Plan.

    Args:
        migration_plan_pk (str): The migration plan PK.
        validate (bool): If True, don't migrate unless validation is successful.
        dry_run (bool): If True, nothing is migrated, only validation happens.
    """
    def get_repo_types(plan):
        """
        Create mappings for pulp 2 repository types.

        Identify type by inspecting content of a repo.
        One mapping is repo_id -> repo_type, the other is repo_type -> list of repo_ids.

        It's used later during pre-migration and identification of removed repos from pulp 2

        Args:
            plan(MigrationPlan): A Migration Plan

        Returns:
            repo_id_to_type(dict): mapping from a pulp 2 repo_id to a plugin/repo type
            type_to_repo_ids(dict): mapping from a plugin/repo type to the list of repo_ids

        """
        repo_id_to_type = {}
        type_to_repo_ids = defaultdict(set)

        # mapping content type -> plugin/repo type, e.g. 'docker_blob' -> 'docker'
        content_type_to_plugin = {}

        for plugin in plan.get_plugin_plans():
            for content_type in plugin.migrator.pulp2_content_models:
                content_type_to_plugin[
                    content_type] = plugin.migrator.pulp2_plugin

            repos = set(plugin.get_repositories())
            repos |= set(plugin.get_importers_repos())
            repos |= set(plugin.get_distributors_repos())

            for repo in repos:
                repo_id_to_type[repo] = plugin.type
            type_to_repo_ids[plugin.type].update(repos)

        # TODO: optimizations.
        # It looks at each content at the moment. Potential optimizations:
        #  - This is a big query, paginate?
        #  - Filter by repos from the plan
        #  - Query any but one record for a repo
        for rec in RepositoryContentUnit.objects().\
                only('repo_id', 'unit_type_id').as_pymongo().no_cache():
            repo_id = rec['repo_id']
            unit_type_id = rec['unit_type_id']

            # a type for a repo is already known or this content/repo type is not supported
            if repo_id in repo_id_to_type or unit_type_id not in content_type_to_plugin:
                continue
            plugin_name = content_type_to_plugin[unit_type_id]
            repo_id_to_type[repo_id] = plugin_name
            type_to_repo_ids[plugin_name].add(repo_id)

        return repo_id_to_type, type_to_repo_ids

    # MongoDB connection initialization
    connection.initialize()

    plan = MigrationPlan.objects.get(pk=migration_plan_pk)
    missing_resources = plan.get_missing_resources()

    if (validate or dry_run) and missing_resources:
        raise PlanValidationError(
            "Validation failed: resources missing {}".format(
                missing_resources))

    if dry_run:
        return

    task_group = TaskGroup(description="Migration Sub-tasks")
    task_group.save()
    GroupProgressReport(message="Repo version creation",
                        code="create.repo_version",
                        task_group=task_group).save()
    GroupProgressReport(message="Distribution creation",
                        code="create.distribution",
                        task_group=task_group).save()
    current_task = Task.current()
    current_task.task_group = task_group
    current_task.save()
    resource = CreatedResource(content_object=task_group)
    resource.save()

    # call it here and not inside steps below to generate mapping only once
    repo_id_to_type, type_to_repo_ids = get_repo_types(plan)

    # TODO: if plan is empty for a plugin, only migrate downloaded content

    pre_migrate_all_without_content(plan, type_to_repo_ids, repo_id_to_type)
    pre_migrate_all_content(plan)
    mark_removed_resources(plan, type_to_repo_ids)
    delete_old_resources(plan)
    migrate_repositories(plan)
    migrate_importers(plan)
    migrate_content(plan)
    create_repoversions_publications_distributions(plan)

    task_group.finish()
Beispiel #6
0
def import_collection(
    temp_file_pk,
    repository_pk=None,
    expected_namespace=None,
    expected_name=None,
    expected_version=None,
):
    """
    Create a Collection from an uploaded artifact and optionally validate its expected metadata.

    This task provides optional validation of the `namespace`, `name`, and `version` metadata
    attributes. If the Artifact fails validation or parsing, the Artifact is deleted and the
    Collection is not created.

    This task performs a CollectionImport object get_or_create() to allow import messages to be
    logged.

    Args:
        temp_file_pk (str): The pk of the PulpTemporaryFile to create the Collection from.

    Keyword Args:
        repository_pk (str): Optional. If specified, a new RepositoryVersion will be created for the
            Repository and any new Collection content associated with it.
        expected_namespace (str): Optional. The namespace is validated against the namespace
            specified in the Collection's metadata. If it does not match a ImporterError is
            raised.
        expected_name (str): Optional. The name is validated against the name specified in the
            Collection's metadata. If it does not match a ImporterError is raised.
        expected_version (str): Optional. The version is validated against the version specified in
            the Collection's metadata. If it does not match a ImporterError is raised.

    Raises:
        ImporterError: If the `expected_namespace`, `expected_name`, or `expected_version` do not
            match the metadata in the tarball.

    """
    CollectionImport.objects.get_or_create(task_id=Task.current().pulp_id)

    temp_file = PulpTemporaryFile.objects.get(pk=temp_file_pk)
    filename = CollectionFilename(expected_namespace, expected_name,
                                  expected_version)
    log.info(f"Processing collection from {temp_file.file.name}")
    user_facing_logger = logging.getLogger(
        "pulp_ansible.app.tasks.collection.import_collection")

    try:
        with temp_file.file.open() as artifact_file:
            with tarfile.open(fileobj=artifact_file, mode="r") as tar:
                manifest_data = json.load(
                    get_file_obj_from_tarball(tar, "MANIFEST.json",
                                              temp_file.file.name))
                files_data = json.load(
                    get_file_obj_from_tarball(tar, "FILES.json",
                                              temp_file.file.name))
            url = _get_backend_storage_url(artifact_file)
            artifact_file.seek(0)
            importer_result = process_collection(artifact_file,
                                                 filename=filename,
                                                 file_url=url,
                                                 logger=user_facing_logger)
            artifact = Artifact.from_pulp_temporary_file(temp_file)
            importer_result["artifact_url"] = reverse("artifacts-detail",
                                                      args=[artifact.pk])
            collection_version = create_collection_from_importer(
                importer_result)
            collection_version.manifest = manifest_data
            collection_version.files = files_data
            collection_version.save()

    except ImporterError as exc:
        log.info(f"Collection processing was not successful: {exc}")
        temp_file.delete()
        raise
    except Exception as exc:
        user_facing_logger.error(
            f"Collection processing was not successful: {exc}")
        temp_file.delete()
        raise

    ContentArtifact.objects.create(
        artifact=artifact,
        content=collection_version,
        relative_path=collection_version.relative_path,
    )
    CreatedResource.objects.create(content_object=collection_version)

    if repository_pk:
        repository = AnsibleRepository.objects.get(pk=repository_pk)
        content_q = CollectionVersion.objects.filter(pk=collection_version.pk)
        with repository.new_version() as new_version:
            new_version.add_content(content_q)
        CreatedResource.objects.create(content_object=repository)
Beispiel #7
0
def curate_all_synclist_repository(upstream_repository_name, **kwargs):
    """When upstream_repository has changed, update all synclists repos associated with it.

    The synclist repos will be updated to upstream_repository

    This will create a lot of curate_synclist_repository tasks.
    It will create a TaskGroup containing those tasks.

    If neccasary, it may create many TaskGroups.

    It may need to schedule a series of TaskGroups, potentially
    in order of priority.

    This task need to be cancelable."""

    upstream_repository = AnsibleRepository.objects.get(
        name=upstream_repository_name)
    synclist_qs = models.SyncList.objects.filter(
        upstream_repository=upstream_repository)

    task_group = TaskGroup.objects.create(
        description=
        f"Curating all synclists repos that curate from {upstream_repository_name}"
    )
    task_group.save()

    CreatedResource.objects.create(content_object=task_group)

    current_task = Task.current()
    current_task.task_group = task_group
    current_task.save()

    GroupProgressReport(
        message="Synclists curating upstream repo",
        code="synclist.curate",
        total=synclist_qs.count(),
        task_group=task_group,
    ).save()

    with ProgressReport(
            message="Synclists curating upstream repo task",
            code="synclist.curate.log",
            total=synclist_qs.count(),
    ) as task_progress_report:

        synclist_iter = synclist_qs.iterator()
        while True:
            batch = list(
                itertools.islice(synclist_iter, settings.SYNCLIST_BATCH_SIZE))
            if not batch:
                break

            # TODO: filter down to just synclists that have a synclist repo
            # locks need to be Model or str not int
            synclist_ids = [synclist.id for synclist in batch]
            locks = [synclist.repository for synclist in batch]

            enqueue_with_reservation(
                curate_synclist_repository_batch,
                locks,
                args=(synclist_ids, ),
                task_group=task_group,
            )
            task_progress_report.increment()

            progress_report = task_group.group_progress_reports.filter(
                code="synclist.curate")
            progress_report.update(done=F("done") + len(synclist_ids))

    log.info(
        "Finishing curating %s synclist repos based on %s update",
        synclist_qs.count(),
        upstream_repository,
    )

    task_group.finish()
Beispiel #8
0
def write_solver_debug_data(solver, problems, mapping, full=False):
    """Dump the state of the solver including actions decided upon and problems encountered."""
    from pulpcore.plugin.models import Task
    from pathlib import Path

    debugdata_dir = Path("/var/tmp/pulp") / str(Task.current().pulp_id)
    debugdata_dir.mkdir(parents=True, exist_ok=True)
    logger.info("Writing solver debug data to {}".format(debugdata_dir))

    transaction = solver.transaction()
    summary_path = debugdata_dir / "depsolving_summary.txt"

    reason_desc_map = {
        solv.Solver.SOLVER_REASON_UNRELATED: (
            "SOLVER_REASON_UNRELATED",
            "The package status did not change as it was not related to any job.",
        ),
        solv.Solver.SOLVER_REASON_UNIT_RULE: (
            "SOLVER_REASON_UNIT_RULE",
            "The package was installed/erased/kept because of a unit rule, "
            "i.e. a rule where all literals but one were false.",
        ),
        solv.Solver.SOLVER_REASON_KEEP_INSTALLED: (
            "SOLVER_REASON_KEEP_INSTALLED",
            "The package was chosen when trying to keep as many packages installed as possible.",
        ),
        solv.Solver.SOLVER_REASON_RESOLVE_JOB: (
            "SOLVER_REASON_RESOLVE_JOB",
            "The decision happened to fulfill a job rule.",
        ),
        solv.Solver.SOLVER_REASON_UPDATE_INSTALLED: (
            "SOLVER_REASON_UPDATE_INSTALLED",
            "The decision happened to fulfill a package update request.",
        ),
        solv.Solver.SOLVER_REASON_RESOLVE: (
            "SOLVER_REASON_RESOLVE",
            "The package was installed to fulfill package dependencies.",
        ),
        solv.Solver.SOLVER_REASON_WEAKDEP: (
            "SOLVER_REASON_WEAKDEP",
            "The package was installed because of a weak dependency (Recommends or Supplements).",
        ),
        solv.Solver.SOLVER_REASON_RECOMMENDED: (
            "SOLVER_REASON_RECOMMENDED",
            "The package was installed because of a weak dependency (Recommends or Supplements).",
        ),
        solv.Solver.SOLVER_REASON_SUPPLEMENTED: (
            "SOLVER_REASON_SUPPLEMENTED",
            "The package was installed because of a weak dependency (Recommends or Supplements).",
        ),
    }

    rule_desc_map = {
        solv.Solver.SOLVER_RULE_UNKNOWN: (
            "SOLVER_RULE_UNKNOWN",
            "A rule of an unknown class. You should never encounter those.",
        ),
        solv.Solver.SOLVER_RULE_PKG: ("SOLVER_RULE_PKG", "A package dependency rule."),
        solv.Solver.SOLVER_RULE_UPDATE: (
            "SOLVER_RULE_UPDATE",
            "A rule to implement the update policy of installed packages. Every installed "
            "package has an update rule that consists of the packages that may replace the "
            "installed package.",
        ),
        solv.Solver.SOLVER_RULE_FEATURE: (
            "SOLVER_RULE_FEATURE",
            "Feature rules are fallback rules used when an update rule is disabled. They "
            "include all packages that may replace the installed package ignoring the update "
            "policy, i.e. they contain downgrades, arch changes and so on. Without them, the "
            "solver would simply erase installed packages if their update rule gets disabled.",
        ),
        solv.Solver.SOLVER_RULE_JOB: (
            "SOLVER_RULE_JOB",
            "Job rules implement the job given to the solver.",
        ),
        solv.Solver.SOLVER_RULE_DISTUPGRADE: (
            "SOLVER_RULE_DISTUPGRADE",
            "These are simple negative assertions that make sure that only packages are kept "
            "that are also available in one of the repositories.",
        ),
        solv.Solver.SOLVER_RULE_INFARCH: (
            "SOLVER_RULE_INFARCH",
            "Infarch rules are also negative assertions, they disallow the installation of "
            "packages when there are packages of the same name but with a better architecture.",
        ),
        solv.Solver.SOLVER_RULE_CHOICE: (
            "SOLVER_RULE_CHOICE",
            "Choice rules are used to make sure that the solver prefers updating to installing "
            "different packages when some dependency is provided by multiple packages with "
            "different names. The solver may always break choice rules, so you will not see them "
            "when a problem is found.",
        ),
        solv.Solver.SOLVER_RULE_LEARNT: (
            "SOLVER_RULE_LEARNT",
            "These rules are generated by the solver to keep it from running into the same "
            "problem multiple times when it has to backtrack. They are the main reason why "
            "a sat solver is faster than other dependency solver implementations.",
        ),
        # Special dependency rule types:
        solv.Solver.SOLVER_RULE_PKG_NOT_INSTALLABLE: (
            "SOLVER_RULE_PKG_NOT_INSTALLABLE",
            "This rule was added to prevent the installation of a package of an architecture "
            "that does not work on the system.",
        ),
        solv.Solver.SOLVER_RULE_PKG_NOTHING_PROVIDES_DEP: (
            "SOLVER_RULE_PKG_NOTHING_PROVIDES_DEP",
            "The package contains a required dependency which was not provided by any package.",
        ),
        solv.Solver.SOLVER_RULE_PKG_REQUIRES: (
            "SOLVER_RULE_PKG_REQUIRES",
            "Similar to SOLVER_RULE_PKG_NOTHING_PROVIDES_DEP, but in this case some packages "
            "provided the dependency but none of them could be installed due to other "
            "dependency issues.",
        ),
        solv.Solver.SOLVER_RULE_PKG_SELF_CONFLICT: (
            "SOLVER_RULE_PKG_SELF_CONFLICT",
            "The package conflicts with itself. This is not allowed by older rpm versions.",
        ),
        solv.Solver.SOLVER_RULE_PKG_CONFLICTS: (
            "SOLVER_RULE_PKG_CONFLICTS",
            "To fulfill the dependencies two packages need to be installed, but one of the "
            "packages contains a conflict with the other one.",
        ),
        solv.Solver.SOLVER_RULE_PKG_SAME_NAME: (
            "SOLVER_RULE_PKG_SAME_NAME",
            "The dependencies can only be fulfilled by multiple versions of a package, but "
            "installing multiple versions of the same package is not allowed.",
        ),
        solv.Solver.SOLVER_RULE_PKG_OBSOLETES: (
            "SOLVER_RULE_PKG_OBSOLETES",
            "To fulfill the dependencies two packages need to be installed, but one of the "
            "packages obsoletes the other one.",
        ),
        solv.Solver.SOLVER_RULE_PKG_IMPLICIT_OBSOLETES: (
            "SOLVER_RULE_PKG_IMPLICIT_OBSOLETES",
            "To fulfill the dependencies two packages need to be installed, but one of the "
            "packages has provides a dependency that is obsoleted by the other one. See the "
            "POOL_FLAG_IMPLICITOBSOLETEUSESPROVIDES flag.",
        ),
        solv.Solver.SOLVER_RULE_PKG_INSTALLED_OBSOLETES: (
            "SOLVER_RULE_PKG_INSTALLED_OBSOLETES",
            "To fulfill the dependencies a package needs to be installed that is obsoleted "
            "by an installed package. See the POOL_FLAG_NOINSTALLEDOBSOLETES flag.",
        ),
        solv.Solver.SOLVER_RULE_JOB_NOTHING_PROVIDES_DEP: (
            "SOLVER_RULE_JOB_NOTHING_PROVIDES_DEP",
            "The user asked for installation of a package providing a specific dependency, but "
            "no available package provides it.",
        ),
        solv.Solver.SOLVER_RULE_JOB_UNKNOWN_PACKAGE: (
            "SOLVER_RULE_JOB_UNKNOWN_PACKAGE",
            "The user asked for installation of a package with a specific name, but no available "
            "package has that name.",
        ),
        solv.Solver.SOLVER_RULE_JOB_PROVIDED_BY_SYSTEM: (
            "SOLVER_RULE_JOB_PROVIDED_BY_SYSTEM",
            "The user asked for the erasure of a dependency that is provided by the system "
            "(i.e. for special hardware or language dependencies), this cannot be done with "
            "a job.",
        ),
        solv.Solver.SOLVER_RULE_JOB_UNSUPPORTED: (
            "SOLVER_RULE_JOB_UNSUPPORTED",
            "The user asked for something that is not yet implemented, e.g. the installation "
            "of all packages at once.",
        ),
    }

    with summary_path.open("wt") as summary:

        print("Problems Encountered:", file=summary)
        print("=====================", file=summary)
        for problem in problems:
            print(str(problem), file=summary)
        print(file=summary)

        print("Packages transferred:", file=summary)
        print("=====================", file=summary)
        print(file=summary)

        for solvable in transaction.newsolvables():
            (reason, rule) = solver.describe_decision(solvable)

            print(
                "{name}-{evr}.{arch}".format(
                    name=solvable.name, evr=solvable.evr, arch=solvable.arch
                ),
                file=summary,
            )

            (reason_name, reason_description) = reason_desc_map[reason]
            (unit_id, from_repo) = mapping.get_unit_id(solvable)
            print(
                "    Pulp Content unit '{}' from repo '{}'".format(unit_id, from_repo), file=summary
            )
            print("    Reason: {} - {}".format(reason_name, reason_description), file=summary)
            print("    Rules:", file=summary)
            for info in rule.allinfos():
                (rule_name, rule_description) = rule_desc_map[info.type]
                print("        {} - {}".format(rule_name, rule_description), file=summary)
                if info.solvable:
                    pkg = str(info.solvable)
                    dep = str(info.dep)
                    print(
                        "            Because package '{}' requires '{}'".format(pkg, dep),
                        file=summary,
                    )

            print(file=summary)

    if full:
        solver.write_testcase(str(debugdata_dir))