Esempio n. 1
0
File: base.py Progetto: asmacdo/pulp
def general_update(instance_id, app_label, serializer_name, *args, **kwargs):
    """
    Update a model

    The model instance is identified using the app_label, id, and serializer name. The serializer is
    used to perform validation.

    Args:
        id (str): the id of the model
        app_label (str): the Django app label of the plugin that provides the model
        serializer_name (str): name of the serializer class for the model
        data (dict): dictionary whose keys represent the fields of the model and their corresponding
            values.
        partial (bool): When true, only the fields specified in the data dictionary are updated.
            When false, any fields missing from the data dictionary are assumed to be None and
            their values are updated as such.

    Raises:
        :class:`rest_framework.exceptions.ValidationError`: When serializer instance can't be saved
            due to validation error. This theoretically should never occur since validation is
            performed before the task is dispatched.
    """
    data = kwargs.pop('data', None)
    partial = kwargs.pop('partial', False)
    serializer_class = get_plugin_config(app_label).named_serializers[serializer_name]
    instance = serializer_class.Meta.model.objects.get(pk=instance_id).cast()
    serializer = serializer_class(instance, data=data, partial=partial)
    serializer.is_valid(raise_exception=True)
    serializer.save()
Esempio n. 2
0
def general_update(instance_id, app_label, serializer_name, *args, **kwargs):
    """
    Update a model

    The model instance is identified using the app_label, id, and serializer name. The serializer is
    used to perform validation.

    Args:
        id (str): the id of the model
        app_label (str): the Django app label of the plugin that provides the model
        serializer_name (str): name of the serializer class for the model
        data (dict): dictionary whose keys represent the fields of the model and their corresponding
            values.
        partial (bool): When true, only the fields specified in the data dictionary are updated.
            When false, any fields missing from the data dictionary are assumed to be None and
            their values are updated as such.

    Raises:
        :class:`rest_framework.exceptions.ValidationError`: When serializer instance can't be saved
            due to validation error. This theoretically should never occur since validation is
            performed before the task is dispatched.
    """
    data = kwargs.pop('data', None)
    partial = kwargs.pop('partial', False)
    serializer_class = get_plugin_config(app_label).named_serializers[serializer_name]
    instance = serializer_class.Meta.model.objects.get(pk=instance_id).cast()
    serializer = serializer_class(instance, data=data, partial=partial)
    serializer.is_valid(raise_exception=True)
    serializer.save()
Esempio n. 3
0
def export_content(export, repository_version):
    """
    Export db-content, and the db-content of the owning repositories

    Args:
        export (django.db.models.PulpExport): export instance that's doing the export
        repository_version (django.db.models.RepositoryVersion): RepositoryVersion being exported
        last_export (django.db.models.PulpExport): previous export of the 'owning' Exporter
    """
    dest_dir = os.path.join(
        "repository-{}_{}".format(
            str(repository_version.repository.pulp_id), repository_version.number
        )
    )
    # export the resources pulpcore is responsible for
    resource = ContentResource(repository_version)
    _write_export(export.tarfile, resource, dest_dir)

    resource = ContentArtifactResource(repository_version)
    _write_export(export.tarfile, resource, dest_dir)

    # find and export any ModelResource found in pulp_<repo-type>.app.modelresource
    plugin_name = repository_version.repository.pulp_type.split(".")[0]
    cfg = get_plugin_config(plugin_name)
    if cfg.exportable_classes:
        for cls in cfg.exportable_classes:
            _write_export(export.tarfile, cls(repository_version), dest_dir)
Esempio n. 4
0
def import_repository_version(destination_repo_pk, source_repo_pk, tar_path):
    """
    Import a repository version from a Pulp export.

    Args:
        destination_repo_pk (str): Primary key of Repository to import into.
        source_repo_pk (str): Primary key of the Repository in the export.
        tar_path (str): A path to export tar.
    """
    dest_repo = Repository.objects.get(pk=destination_repo_pk)

    with tempfile.TemporaryDirectory() as temp_dir:
        # Extract the repo file for the repo info
        with tarfile.open(tar_path, "r:gz") as tar:
            tar.extract(REPO_FILE, path=temp_dir)

        with open(os.path.join(temp_dir, REPO_FILE), "r") as repo_data_file:
            data = json.load(repo_data_file)

        src_repo = next(repo for repo in data if repo["pulp_id"] == source_repo_pk)
        rv_path = os.path.join(temp_dir, _repo_version_path(src_repo))

        if dest_repo.pulp_type != src_repo["pulp_type"]:
            raise ValidationError(
                _(
                    "Repository type mismatch: {src_repo} ({src_type}) vs {dest_repo} "
                    "({dest_type})."
                ).format(
                    src_repo=src_repo["name"],
                    src_type=src_repo["pulp_type"],
                    dest_repo=dest_repo.name,
                    dest_type=dest_repo.pulp_type,
                )
            )

        # Extract the repo version files
        with tarfile.open(tar_path, "r:gz") as tar:
            for mem in tar.getmembers():
                if re.match(fr"^{_repo_version_path(src_repo)}/.+", mem.name):
                    tar.extract(mem, path=temp_dir)

        # Untyped Content
        content_path = os.path.join(rv_path, CONTENT_FILE)
        c_result = _import_file(content_path, ContentResource)
        content = Content.objects.filter(pk__in=[r.object_id for r in c_result.rows])

        # Content Artifacts
        ca_path = os.path.join(rv_path, CA_FILE)
        _import_file(ca_path, ContentArtifactResource)

        # Content
        plugin_name = src_repo["pulp_type"].split(".")[0]
        cfg = get_plugin_config(plugin_name)
        for res_class in cfg.exportable_classes:
            filename = f"{res_class.__module__}.{res_class.__name__}.json"
            _import_file(os.path.join(rv_path, filename), res_class)

        # Create the repo version
        with dest_repo.new_version() as new_version:
            new_version.set_content(content)
Esempio n. 5
0
def export_content(export, repository_version):
    """
    Export db-content, and the db-content of the owning repositories

    Args:
        export (django.db.models.PulpExport): export instance that's doing the export
        repository_version (django.db.models.RepositoryVersion): RepositoryVersion being exported
    """
    def _combine_content_mappings(map1, map2):
        """Combine two content mapping dicts into one by combining ids for for each key."""
        result = {}
        for key in map1.keys() | map2.keys():
            result[key] = list(set(map1.get(key, []) + map2.get(key, [])))
        return result

    dest_dir = os.path.join("repository-{}_{}".format(
        str(repository_version.repository.name), repository_version.number))

    # Export the connection between content and artifacts
    resource = ContentArtifactResource(repository_version)
    _write_export(export.tarfile, resource, dest_dir)

    # content mapping is used by repo versions with subrepos (eg distribution tree repos)
    content_mapping = {}

    # find and export any ModelResource found in pulp_<repo-type>.app.modelresource
    plugin_name = repository_version.repository.pulp_type.split(".")[0]
    cfg = get_plugin_config(plugin_name)
    if cfg.exportable_classes:
        for cls in cfg.exportable_classes:
            resource = cls(repository_version)
            _write_export(export.tarfile, resource, dest_dir)

            if hasattr(resource,
                       "content_mapping") and resource.content_mapping:
                content_mapping = _combine_content_mappings(
                    content_mapping, resource.content_mapping)

    msg = (
        f"Exporting content for {plugin_name} "
        f"repository-version {repository_version.repository.name}/{repository_version.number}"
    )
    content_count = repository_version.content.count()
    data = dict(
        message=msg,
        code="export.repo.version.content",
        total=content_count,
        done=content_count,
        state=TASK_STATES.COMPLETED,
    )
    pb = ProgressReport(**data)
    pb.save()

    if content_mapping:
        # write the content mapping to tarfile
        cm_json = json.dumps(content_mapping).encode("utf8")
        info = tarfile.TarInfo(name=f"{dest_dir}/content_mapping.json")
        info.size = len(cm_json)
        export.tarfile.addfile(info, io.BytesIO(cm_json))
Esempio n. 6
0
def get_plugin_storage_path(plugin_app_label):
    """
    Returns the path to the plugin's storage

    An interface for finding the path to a plugin's persistent storage location. It is
    designed to be used by plugins that need to store more than just
    :class:`~pulpcore.plugin.models.Artifact` models.

    Args:
        plugin_app_label (str): Django app label of the pulp plugin

    Returns:
        String containing the absolute path to the plugin's storage on the filesystem.

    Raises:
        :class:`~pulpcore.exceptions.plugin.MissingPlugin`: When plugin with the requested app
            label is not installed.
    """
    get_plugin_config(plugin_app_label)
    return os.path.join("/var/lib/pulp/shared", plugin_app_label, "")
Esempio n. 7
0
def get_plugin_storage_path(plugin_app_label):
    """
    Returns the path to the plugin's storage

    An interface for finding the path to a plugin's persistent storage location. It is
    designed to be used by plugins that need to store more than just
    :class:`~pulpcore.plugin.models.Artifact` models.

    Args:
        plugin_app_label (str): Django app label of the pulp plugin

    Returns:
        String containing the absolute path to the plugin's storage on the filesystem.

    Raises:
        :class:`~pulpcore.exceptions.plugin.MissingPlugin`: When plugin with the requested app
            label is not installed.
    """
    get_plugin_config(plugin_app_label)
    return os.path.join('/var/lib/pulp/shared', plugin_app_label, '')
Esempio n. 8
0
def general_delete(instance_id, app_label, serializer_name):
    """
    Delete a model

    The model instance is identified using the app_label, id, and serializer name.

    Args:
        id (str): the id of the model
        app_label (str): the Django app label of the plugin that provides the model
        serializer_name (str): name of the serializer class for the model
    """
    serializer_class = get_plugin_config(app_label).named_serializers[serializer_name]
    instance = serializer_class.Meta.model.objects.get(pk=instance_id).cast()
    instance.delete()
Esempio n. 9
0
File: base.py Progetto: asmacdo/pulp
def general_delete(instance_id, app_label, serializer_name):
    """
    Delete a model

    The model instance is identified using the app_label, id, and serializer name.

    Args:
        id (str): the id of the model
        app_label (str): the Django app label of the plugin that provides the model
        serializer_name (str): name of the serializer class for the model
    """
    serializer_class = get_plugin_config(app_label).named_serializers[serializer_name]
    instance = serializer_class.Meta.model.objects.get(pk=instance_id).cast()
    instance.delete()
Esempio n. 10
0
def general_create(app_label, serializer_name, *args, **kwargs):
    """
    Create a model instance.

    Raises:
        ValidationError: If the serializer is not valid

    """
    data = kwargs.pop('data', None)
    serializer_class = get_plugin_config(app_label).named_serializers[serializer_name]
    serializer = serializer_class(data=data)
    serializer.is_valid(raise_exception=True)
    serializer.save()
    resource = CreatedResource(content_object=serializer.instance)
    resource.save()
Esempio n. 11
0
def general_multi_delete(instance_ids):
    """
    Delete a list of model instances in a transaction

    The model instances are identified using the id, app_label, and serializer_name.

    Args:
        instance_ids (list): List of tupels of id, app_label, serializer_name
    """
    instances = []
    for instance_id, app_label, serializer_name in instance_ids:
        serializer_class = get_plugin_config(
            app_label).named_serializers[serializer_name]
        instances.append(
            serializer_class.Meta.model.objects.get(pk=instance_id).cast())
    with transaction.atomic():
        for instance in instances:
            instance.delete()
Esempio n. 12
0
def general_create(app_label, serializer_name, *args, **kwargs):
    """
    Create a model instance.

    Raises:
        ValidationError: If the serializer is not valid

    """
    data = kwargs.pop("data", None)
    context = kwargs.pop("context", {})
    serializer_class = get_plugin_config(
        app_label).named_serializers[serializer_name]
    serializer = serializer_class(data=data, context=context)
    serializer.is_valid(raise_exception=True)
    serializer.save()
    instance = serializer_class.Meta.model.objects.get(
        pk=serializer.instance.pk).cast()
    resource = CreatedResource(content_object=instance)
    resource.save()
Esempio n. 13
0
def general_create_from_temp_file(app_label, serializer_name, *args, **kwargs):
    """
    Create a model instance from contents stored in a temporary Artifact.

    A caller should always pass the dictionary "data", as a keyword argument, containing the
    href to the temporary Artifact. Otherwise, the function does nothing.

    This function calls the function general_create() to create a model instance.
    Data passed to that function already contains a serialized artifact converted
    to PulpTemporaryUploadFile that will be deleted afterwards.
    """
    data = kwargs.pop("data", None)
    if data and "artifact" in data:
        named_model_view_set = get_plugin_config(
            app_label).viewsets_module.NamedModelViewSet
        artifact = named_model_view_set.get_resource(data.pop("artifact"),
                                                     Artifact)

        data["file"] = PulpTemporaryUploadedFile.from_file(artifact.file)

        general_create(app_label, serializer_name, data=data, *args, **kwargs)
        artifact.delete()
Esempio n. 14
0
def import_repository_version(importer_pk, destination_repo_pk,
                              source_repo_name, tar_path):
    """
    Import a repository version from a Pulp export.

    Args:
        importer_pk (str): Importer we are working with
        destination_repo_pk (str): Primary key of Repository to import into.
        source_repo_name (str): Name of the Repository in the export.
        tar_path (str): A path to export tar.
    """
    dest_repo = Repository.objects.get(pk=destination_repo_pk)
    importer = PulpImporter.objects.get(pk=importer_pk)

    pb = ProgressReport(
        message=f"Importing content for {dest_repo.name}",
        code="import.repo.version.content",
        state=TASK_STATES.RUNNING,
    )
    pb.save()

    with tempfile.TemporaryDirectory() as temp_dir:
        # Extract the repo file for the repo info
        with tarfile.open(tar_path, "r:gz") as tar:
            tar.extract(REPO_FILE, path=temp_dir)

        with open(os.path.join(temp_dir, REPO_FILE), "r") as repo_data_file:
            data = json.load(repo_data_file)

        src_repo = next(repo for repo in data
                        if repo["name"] == source_repo_name)

        if dest_repo.pulp_type != src_repo["pulp_type"]:
            raise ValidationError(
                _("Repository type mismatch: {src_repo} ({src_type}) vs {dest_repo} "
                  "({dest_type}).").format(
                      src_repo=src_repo["name"],
                      src_type=src_repo["pulp_type"],
                      dest_repo=dest_repo.name,
                      dest_type=dest_repo.pulp_type,
                  ))

        rv_name = ""
        # Extract the repo version files
        with tarfile.open(tar_path, "r:gz") as tar:
            for mem in tar.getmembers():
                match = re.search(
                    fr"(^repository-{source_repo_name}_[0-9]+)/.+", mem.name)
                if match:
                    rv_name = match.group(1)
                    tar.extract(mem, path=temp_dir)

        if not rv_name:
            raise ValidationError(
                _("No RepositoryVersion found for {}").format(rv_name))

        rv_path = os.path.join(temp_dir, rv_name)
        # Content
        plugin_name = src_repo["pulp_type"].split(".")[0]
        cfg = get_plugin_config(plugin_name)

        resulting_content_ids = []
        for res_class in cfg.exportable_classes:
            filename = f"{res_class.__module__}.{res_class.__name__}.json"
            a_result = _import_file(os.path.join(rv_path, filename),
                                    res_class,
                                    do_raise=False)
            # django import-export can have a problem with concurrent-imports that are
            # importing the same 'thing' (e.g., a Package that exists in two different
            # repo-versions that are being imported at the same time). We will try an import
            # that will simply record errors as they happen (rather than failing with an exception)
            # first. If errors happen, we'll do one retry before we give up on this repo-version's
            # import.
            if a_result.has_errors():
                log.info(
                    _("...{} import-errors encountered importing {} from {}, retrying"
                      ).format(a_result.totals["error"], filename, rv_name))
                # Second attempt, we allow to raise an exception on any problem.
                # This will either succeed, or log a fatal error and fail.
                try:
                    a_result = _import_file(os.path.join(rv_path, filename),
                                            res_class)
                except Exception as e:  # noqa log on ANY exception and then re-raise
                    log.error(
                        _("FATAL import-failure importing {} from {}").format(
                            filename, rv_name))
                    raise

            resulting_content_ids.extend(row.object_id for row in a_result.rows
                                         if row.import_type in ("new",
                                                                "update"))

        # Once all content exists, create the ContentArtifact links
        ca_path = os.path.join(rv_path, CA_FILE)
        _import_file(ca_path, ContentArtifactResource)

        # see if we have a content mapping
        mapping_path = f"{rv_name}/{CONTENT_MAPPING_FILE}"
        mapping = {}
        with tarfile.open(tar_path, "r:gz") as tar:
            if mapping_path in tar.getnames():
                tar.extract(mapping_path, path=temp_dir)
                with open(os.path.join(temp_dir, mapping_path),
                          "r") as mapping_file:
                    mapping = json.load(mapping_file)

        if mapping:
            # use the content mapping to map content to repos
            for repo_name, content_ids in mapping.items():
                repo = _destination_repo(importer, repo_name)
                content = Content.objects.filter(upstream_id__in=content_ids)
                with repo.new_version() as new_version:
                    new_version.set_content(content)
        else:
            # just map all the content to our destination repo
            content = Content.objects.filter(pk__in=resulting_content_ids)
            with dest_repo.new_version() as new_version:
                new_version.set_content(content)

        content_count = content.count()
        pb.total = content_count
        pb.done = content_count
        pb.state = TASK_STATES.COMPLETED
        pb.save()

    gpr = TaskGroup.current().group_progress_reports.filter(
        code="import.repo.versions")
    gpr.update(done=F("done") + 1)
Esempio n. 15
0
def import_repository_version(importer_pk, destination_repo_pk, source_repo_name, tar_path):
    """
    Import a repository version from a Pulp export.

    Args:
        importer_pk (str): Importer we are working with
        destination_repo_pk (str): Primary key of Repository to import into.
        source_repo_name (str): Name of the Repository in the export.
        tar_path (str): A path to export tar.
    """
    dest_repo = Repository.objects.get(pk=destination_repo_pk)
    importer = PulpImporter.objects.get(pk=importer_pk)

    pb = ProgressReport(
        message=f"Importing content for {dest_repo.name}",
        code="import.repo.version.content",
        state=TASK_STATES.RUNNING,
    )
    pb.save()

    with tempfile.TemporaryDirectory() as temp_dir:
        # Extract the repo file for the repo info
        with tarfile.open(tar_path, "r:gz") as tar:
            tar.extract(REPO_FILE, path=temp_dir)

        with open(os.path.join(temp_dir, REPO_FILE), "r") as repo_data_file:
            data = json.load(repo_data_file)

        src_repo = next(repo for repo in data if repo["name"] == source_repo_name)
        rv_path = os.path.join(temp_dir, _repo_version_path(src_repo))

        if dest_repo.pulp_type != src_repo["pulp_type"]:
            raise ValidationError(
                _(
                    "Repository type mismatch: {src_repo} ({src_type}) vs {dest_repo} "
                    "({dest_type})."
                ).format(
                    src_repo=src_repo["name"],
                    src_type=src_repo["pulp_type"],
                    dest_repo=dest_repo.name,
                    dest_type=dest_repo.pulp_type,
                )
            )

        # Extract the repo version files
        with tarfile.open(tar_path, "r:gz") as tar:
            for mem in tar.getmembers():
                if re.match(fr"^{_repo_version_path(src_repo)}/.+", mem.name):
                    tar.extract(mem, path=temp_dir)

        # Content
        plugin_name = src_repo["pulp_type"].split(".")[0]
        cfg = get_plugin_config(plugin_name)

        resulting_content_ids = []
        for res_class in cfg.exportable_classes:
            filename = f"{res_class.__module__}.{res_class.__name__}.json"
            a_result = _import_file(os.path.join(rv_path, filename), res_class)
            resulting_content_ids.extend(
                row.object_id for row in a_result.rows if row.import_type in ("new", "update")
            )

        # Once all content exists, create the ContentArtifact links
        ca_path = os.path.join(rv_path, CA_FILE)
        _import_file(ca_path, ContentArtifactResource)

        # see if we have a content mapping
        mapping_path = f"{_repo_version_path(src_repo)}/{CONTENT_MAPPING_FILE}"
        mapping = {}
        with tarfile.open(tar_path, "r:gz") as tar:
            if mapping_path in tar.getnames():
                tar.extract(mapping_path, path=temp_dir)
                with open(os.path.join(temp_dir, mapping_path), "r") as mapping_file:
                    mapping = json.load(mapping_file)

        if mapping:
            # use the content mapping to map content to repos
            for repo_name, content_ids in mapping.items():
                repo = _destination_repo(importer, repo_name)
                content = Content.objects.filter(upstream_id__in=content_ids)
                with repo.new_version() as new_version:
                    new_version.set_content(content)
        else:
            # just map all the content to our destination repo
            content = Content.objects.filter(pk__in=resulting_content_ids)
            with dest_repo.new_version() as new_version:
                new_version.set_content(content)

        content_count = content.count()
        pb.total = content_count
        pb.done = content_count
        pb.state = TASK_STATES.COMPLETED
        pb.save()

    gpr = TaskGroup.current().group_progress_reports.filter(code="import.repo.versions")
    gpr.update(done=F("done") + 1)
Esempio n. 16
0
def pulp_import(importer_pk, path):
    """
    Import a Pulp export into Pulp.

    Args:
        importer_pk (str): Primary key of PulpImporter to do the import
        path (str): Path to the export to be imported
    """
    def import_file(fpath, resource_class):
        log.info(_("Importing file {}.").format(fpath))
        with open(fpath, "r") as json_file:
            data = Dataset().load(json_file.read(), format="json")
            resource = resource_class()
            return resource.import_data(data, raise_errors=True)

    def destination_repo(source_repo_name):
        """Find the destination repository based on source repo's name."""
        if importer.repo_mapping and importer.repo_mapping.get(
                source_repo_name):
            dest_repo_name = importer.repo_mapping[source_repo_name]
        else:
            dest_repo_name = source_repo_name
        return Repository.objects.get(name=dest_repo_name)

    def repo_version_path(temp_dir, src_repo):
        """Find the repo version path in the export based on src_repo json."""
        src_repo_version = int(src_repo["next_version"]) - 1
        return os.path.join(
            temp_dir, f"repository-{src_repo['pulp_id']}_{src_repo_version}")

    log.info(_("Importing {}.").format(path))
    importer = PulpImporter.objects.get(pk=importer_pk)
    pulp_import = PulpImport.objects.create(importer=importer,
                                            task=Task.current(),
                                            params={"path": path})
    CreatedResource.objects.create(content_object=pulp_import)

    with tempfile.TemporaryDirectory() as temp_dir:
        with tarfile.open(path, "r|gz") as tar:
            tar.extractall(path=temp_dir)

        # Artifacts
        ar_result = import_file(os.path.join(temp_dir, ARTIFACT_FILE),
                                ArtifactResource)
        for row in ar_result.rows:
            artifact = Artifact.objects.get(pk=row.object_id)
            base_path = os.path.join('artifact', artifact.sha256[0:2],
                                     artifact.sha256[2:])
            src = os.path.join(temp_dir, base_path)
            dest = os.path.join(settings.MEDIA_ROOT, base_path)

            if not default_storage.exists(dest):
                with open(src, 'rb') as f:
                    default_storage.save(dest, f)

        # Repo Versions
        with open(os.path.join(temp_dir, REPO_FILE), "r") as repo_data_file:
            data = json.load(repo_data_file)

            for src_repo in data:
                try:
                    dest_repo = destination_repo(src_repo["name"])
                except Repository.DoesNotExist:
                    log.warn(
                        _("Could not find destination repo for {}. "
                          "Skipping.").format(src_repo["name"]))
                    continue

                rv_path = repo_version_path(temp_dir, src_repo)

                # Untyped Content
                content_path = os.path.join(rv_path, CONTENT_FILE)
                c_result = import_file(content_path, ContentResource)
                content = Content.objects.filter(
                    pk__in=[r.object_id for r in c_result.rows])

                # Content Artifacts
                ca_path = os.path.join(rv_path, CA_FILE)
                import_file(ca_path, ContentArtifactResource)

                # Content
                plugin_name = src_repo["pulp_type"].split('.')[0]
                cfg = get_plugin_config(plugin_name)
                for res_class in cfg.exportable_classes:
                    filename = f"{res_class.__module__}.{res_class.__name__}.json"
                    import_file(os.path.join(rv_path, filename), res_class)

                # Create the repo version
                with dest_repo.new_version() as new_version:
                    new_version.set_content(content)

    return importer