def test_delete_success(fast_poller, requests_mocker, client):
    """delete succeeds and returns spawned tasks"""

    repo = Repository(id="some-repo")
    repo.__dict__["_client"] = client

    requests_mocker.delete(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/",
        json={"spawned_tasks": [{
            "task_id": "task1"
        }, {
            "task_id": "task2"
        }]},
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        json=[
            {
                "task_id": "task1",
                "state": "finished"
            },
            {
                "task_id": "task2",
                "state": "skipped"
            },
        ],
    )

    # It should have succeeded, with the tasks as retrieved from Pulp
    assert sorted(repo.delete().result()) == [
        Task(id="task1", succeeded=True, completed=True),
        Task(id="task2", succeeded=True, completed=True),
    ]
def test_remove_no_type_ids(fast_poller, requests_mocker, client):
    """Remove succeeds when given no type_ids."""

    repo = Repository(id="some-repo")
    repo.__dict__["_client"] = client

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/",
        [{"json": {"spawned_tasks": [{"task_id": "task1"}, {"task_id": "task2"}]}}],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [
            {
                "json": [
                    {"task_id": "task1", "state": "finished"},
                    {"task_id": "task2", "state": "skipped"},
                ]
            }
        ],
    )

    assert repo.remove_content().result() == [
        Task(id="task1", completed=True, succeeded=True),
        Task(id="task2", completed=True, succeeded=True),
    ]
Exemple #3
0
    def copy_content(
        self, from_repository, to_repository, criteria=None, options=CopyOptions()
    ):
        self._ensure_alive()

        from_id = from_repository.id
        to_id = to_repository.id

        found = list(from_repository.search_content(criteria).result())

        # RPM signature filter: if signatures are required, unsigned RPMs are not
        # included in the copy.
        # Because we don't model this flag on distributor objects and because in
        # practice it's set to True, we default to True.
        if options.require_signed_rpms is not False:
            found = [u for u in found if not isinstance(u, RpmUnit) or u.signing_key]

        # Units are being copied to this repo, so that value obviously must appear
        # in repository_memberships from now on.
        found = [attr.evolve(unit, repository_memberships=[to_id]) for unit in found]

        with self._state.lock:
            # Now put the found units into the destination repo.
            # Any kind of merging or replacing of units is handled within this step.
            self._state.insert_repo_units(to_id, found)

            # Arbitrarily limit the number of units included per task. The point is
            # to enforce that the caller doesn't expect any specific number of tasks.
            tasks = []
            while found:
                next_batch = found[:5]
                found = found[5:]
                tasks.append(
                    Task(
                        id=self._state.next_task_id(),
                        repo_id=from_id,
                        completed=True,
                        succeeded=True,
                        units=units.with_key_only(next_batch),
                    )
                )

            if not tasks:
                # This indicates that nothing was found at all.
                # That's fine, just return a task with empty units.
                tasks.append(
                    Task(
                        id=self._state.next_task_id(),
                        repo_id=from_id,
                        completed=True,
                        succeeded=True,
                        units=[],
                    )
                )

        return f_proxy(f_return(tasks))
Exemple #4
0
def test_failed_task():
    """from_data sets attributes appropriately for a failed task"""
    task = Task.from_data({"task_id": "some-task", "state": "error"})
    assert task == Task(
        id="some-task",
        completed=True,
        succeeded=False,
        error_summary="Pulp task [some-task] failed: <unknown error>",
        error_details="Pulp task [some-task] failed: <unknown error>",
    )
Exemple #5
0
def test_canceled_task():
    """from_data sets attributes appropriately for a canceled task"""
    task = Task.from_data({"task_id": "some-task", "state": "canceled"})
    assert task == Task(
        id="some-task",
        completed=True,
        succeeded=False,
        error_summary="Pulp task [some-task] was canceled",
        error_details="Pulp task [some-task] was canceled",
    )
Exemple #6
0
def test_integer_result():
    """from_data tolerates integer values in 'result' field.

    This test exists because the 'result' field can contain different types
    (e.g. None, integer, dict) based on the type of task which was executed.
    In some cases, we want to parse the dict. The test protects against
    unconditionally assuming we always have a dict.
    """
    task = Task.from_data({"task_id": "some-task", "state": "finished", "result": 123})
    assert task == Task(id="some-task", completed=True, succeeded=True)
Exemple #7
0
def test_set_maintenance(client, requests_mocker):
    maintenance_report = {
        "last_updated": "2019-08-15T14:21:12Z",
        "last_updated_by": "pubtools.pulplib",
        "repos": {
            "repo1": {
                "message": "Maintenance Mode Enabled",
                "owner": "pubtools.pulplib",
                "started": "2019-08-15T14:21:12Z",
            }
        },
    }
    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/search/",
        [{
            "json": [{
                "id": "redhat-maintenance",
                "notes": {
                    "_repo-type": "iso-repo"
                }
            }]
        }],
    )

    report = MaintenanceReport._from_data(maintenance_report)

    with patch("pubtools.pulplib.FileRepository.upload_file") as mocked_upload:
        with patch("pubtools.pulplib.Repository.publish") as mocked_publish:
            upload_task = Task(id="upload-task",
                               completed=True,
                               succeeded=True)
            publish_task = [
                Task(id="publish-task", completed=True, succeeded=True)
            ]

            mocked_upload.return_value = f_return(upload_task)
            mocked_publish.return_value = f_return(publish_task)

            # set_maintenance.result() should return whatever publish.result() returns
            assert client.set_maintenance(report).result() is publish_task

    # upload_file should be called with (file_obj, 'repos.json')
    args = mocked_upload.call_args
    report_file = args[0][0]
    report = MaintenanceReport()._from_data(json.loads(report_file.read()))

    assert len(report.entries) == 1
    assert report.entries[0].repo_id == "repo1"
    assert report.last_updated_by == "pubtools.pulplib"

    # search repo, upload and publish should be called once each
    assert requests_mocker.call_count == 1
    assert mocked_publish.call_count == 1
    assert mocked_upload.call_count == 1
Exemple #8
0
def test_copy_with_criteria(fast_poller, requests_mocker, client):
    """Copy with criteria succeeds, and serializes criteria correctly."""

    src = Repository(id="src-repo")
    dest = Repository(id="dest-repo")

    src.__dict__["_client"] = client
    dest.__dict__["_client"] = client

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/dest-repo/actions/associate/",
        [{"json": {"spawned_tasks": [{"task_id": "task1"}, {"task_id": "task2"}]}}],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [
            {
                "json": [
                    {"task_id": "task1", "state": "finished"},
                    {"task_id": "task2", "state": "skipped"},
                ]
            }
        ],
    )

    crit = Criteria.and_(
        Criteria.with_unit_type(RpmUnit),
        Criteria.with_field("name", Matcher.in_(["bash", "glibc"])),
    )

    # Copy should succeed, and return the tasks (in this case with no matches)
    assert sorted(client.copy_content(src, dest, crit), key=lambda t: t.id) == [
        Task(id="task1", completed=True, succeeded=True),
        Task(id="task2", completed=True, succeeded=True),
    ]

    hist = requests_mocker.request_history

    # First request should have been the associate.
    assert (
        hist[0].url
        == "https://pulp.example.com/pulp/api/v2/repositories/dest-repo/actions/associate/"
    )

    # It should have encoded our criteria object as needed by the Pulp API.
    assert hist[0].json() == {
        "criteria": {
            "filters": {"unit": {"name": {"$in": ["bash", "glibc"]}}},
            "type_ids": ["rpm", "srpm"],
        },
        "source_repo_id": "src-repo",
    }
Exemple #9
0
def test_publish_with_options(requests_mocker, client):
    """publish passes expected config into distributors based on publish options"""
    repo = YumRepository(
        id="some-repo",
        distributors=(
            Distributor(id="yum_distributor", type_id="yum_distributor"),
            Distributor(id="cdn_distributor", type_id="rpm_rsync_distributor"),
        ),
    )
    repo.__dict__["_client"] = client

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/publish/",
        [
            {"json": {"spawned_tasks": [{"task_id": "task1"}]}},
            {"json": {"spawned_tasks": [{"task_id": "task2"}]}},
        ],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [
            {"json": [{"task_id": "task1", "state": "finished"}]},
            {"json": [{"task_id": "task2", "state": "finished"}]},
        ],
    )

    options = PublishOptions(
        clean=True, force=True, origin_only=True, rsync_extra_args=["-a"]
    )

    # It should have succeeded, with the tasks as retrieved from Pulp
    assert sorted(repo.publish(options)) == [
        Task(id="task1", succeeded=True, completed=True),
        Task(id="task2", succeeded=True, completed=True),
    ]

    req = requests_mocker.request_history

    # The yum_distributor request should have set force_full, but not
    # delete since it's not recognized by that distributor
    assert req[0].json()["override_config"] == {"force_full": True}

    # The cdn_distributor request should have set force_full, delete
    # and content_units_only
    assert req[2].json()["override_config"] == {
        "force_full": True,
        "delete": True,
        "content_units_only": True,
        "rsync_extra_args": ["-a"],
    }
Exemple #10
0
def test_publish_order(requests_mocker, client):
    """publish runs docker/rsync distributors in correct order"""
    repo = ContainerImageRepository(
        id="some-repo",
        distributors=(
            Distributor(
                id="docker_web_distributor_name_cli", type_id="docker_distributor_web"
            ),
            Distributor(id="cdn_distributor", type_id="docker_rsync_distributor"),
            Distributor(
                id="cdn_distributor_unprotected", type_id="docker_rsync_distributor"
            ),
        ),
    )
    repo.__dict__["_client"] = client

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/publish/",
        [
            {"json": {"spawned_tasks": [{"task_id": "task1"}]}},
            {"json": {"spawned_tasks": [{"task_id": "task2"}]}},
            {"json": {"spawned_tasks": [{"task_id": "task3"}]}},
        ],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [
            {"json": [{"task_id": "task1", "state": "finished"}]},
            {"json": [{"task_id": "task2", "state": "finished"}]},
            {"json": [{"task_id": "task3", "state": "finished"}]},
        ],
    )

    # It should have succeeded, with the tasks as retrieved from Pulp
    assert sorted(repo.publish().result()) == [
        Task(id="task1", succeeded=True, completed=True),
        Task(id="task2", succeeded=True, completed=True),
        Task(id="task3", succeeded=True, completed=True),
    ]

    req = requests_mocker.request_history
    ids = [r.json()["id"] for r in req if r.url.endswith("/publish/")]

    # It should have triggered these distributors in this order
    assert ids == [
        "cdn_distributor",
        "cdn_distributor_unprotected",
        "docker_web_distributor_name_cli",
    ]
Exemple #11
0
def test_retries(requests_mocker):
    """Poller retries failing task searches to Pulp"""
    poller = TaskPoller(requests.Session(), "https://pulp.example.com/")

    desc = mock.Mock()
    desc.result = {"spawned_tasks": [{"task_id": "task1"}]}

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [
            # First response fails
            dict(status_code=400),
            # Second response fails again in an odd way (truncated JSON)
            dict(headers={"Content-Type": "application/json"}, text='["not valid!'),
            # Finally works at the third response
            dict(json=[{"task_id": "task1", "state": "finished"}]),
        ],
    )

    # First poll doesn't touch descriptor
    poller([desc])
    desc.yield_result.assert_not_called()
    desc.yield_exception.assert_not_called()

    # Second poll doesn't touch descriptor
    poller([desc])
    desc.yield_result.assert_not_called()
    desc.yield_exception.assert_not_called()

    # Third poll finally succeeds
    poller([desc])
    desc.yield_result.assert_called_once_with(
        [Task(id="task1", completed=True, succeeded=True)]
    )
    desc.yield_exception.assert_not_called()
Exemple #12
0
    def _do_unassociate(self, repo_id, type_ids):
        repo_f = self.get_repository(repo_id)
        if repo_f.exception():
            return repo_f

        current = self._repo_units.get(repo_id, set())
        removed = set()
        kept = set()

        for unit in current:
            if type_ids is None or unit.content_type_id in type_ids:
                removed.add(unit)
            else:
                kept.add(unit)

        self._repo_units[repo_id] = kept

        task = Task(
            id=self._next_task_id(),
            repo_id=repo_id,
            completed=True,
            succeeded=True,
            units=removed,
        )

        return f_return([task])
def test_gc_error(mock_logger):
    """logs error when repo delete task returns an error reponse"""
    repo = {
        "id": "rhel-test-garbage-collect-7-days-old",
        "notes": {
            "pub_temp_repo": True,
            "created": _get_time_created(7)
        },
    }

    controller = _get_fake_controller(repo)
    gc = GarbageCollect()
    arg = ["", "--pulp-url", "http://some.url", "--verbose"]

    with patch("sys.argv", arg):
        with patch.object(controller.client,
                          "_delete_repository") as repo_delete:
            with patch("pubtools._pulp.task.PulpTask.pulp_client",
                       controller.client):
                repo_delete.return_value = f_return([
                    Task(
                        id="12334",
                        completed=True,
                        succeeded=False,
                        error_summary="Error occured",
                    )
                ])
                gc.main()

    mock_logger.error.assert_any_call("Error occured")
def test_remove_with_type_ids(fast_poller, requests_mocker, client):
    """Remove succeeds when given specific type_ids."""

    repo = Repository(id="some-repo")
    repo.__dict__["_client"] = client

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/",
        [{"json": {"spawned_tasks": [{"task_id": "task1"}]}}],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        [{"json": [{"task_id": "task1", "state": "finished"}]}],
    )

    assert repo.remove_content(type_ids=["type1", "type2"]).result() == [
        Task(id="task1", completed=True, succeeded=True)
    ]

    # It should have passed those type_ids to Pulp
    req = requests_mocker.request_history
    assert (
        req[0].url
        == "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/"
    )
    assert req[0].json() == {"criteria": {"type_ids": ["type1", "type2"]}}
Exemple #15
0
    def _do_import(
        self, repo_id, upload_id, unit_type_id, unit_key, unit_metadata=None
    ):
        repo_f = self.get_repository(repo_id)
        if repo_f.exception():
            # Repo can't be found, let that exception propagate
            return repo_f

        repo = repo_f.result()

        with self._state.lock:
            # Get the uploaded content we're about to import; though it's not
            # guaranteed to be present (e.g. erratum has no file).
            # If not present, we just use an empty BytesIO.
            upload_content = self._state.uploads_pending.pop(upload_id, six.BytesIO())
            upload_content.seek(0)

            new_units = self._state.unitmaker.make_units(
                unit_type_id, unit_key, unit_metadata, upload_content, repo_id
            )
            new_units = [
                attr.evolve(u, repository_memberships=[repo.id]) for u in new_units
            ]

            self._state.insert_repo_units(repo_id, new_units)

            task = Task(id=self._state.next_task_id(), completed=True, succeeded=True)

            # upload_history is a deprecated field, data is maintained for iso only.
            if unit_type_id == "iso":
                self._state.upload_history.append(
                    Upload(repo, [task], unit_key["name"], unit_key["checksum"])
                )

        return f_return([task])
Exemple #16
0
    def _delete_distributor(self, repo_id, distributor_id):
        with self._lock:
            repo_f = self.get_repository(repo_id)
            if repo_f.exception():
                # Repo can't be found, let that exception propagate
                return repo_f

            repo = repo_f.result()
            new_distributors = [
                dist for dist in repo.distributors if dist.id != distributor_id
            ]
            dist_found = new_distributors != repo.distributors

            if not dist_found:
                # Deleting something which already doesn't exist is fine
                return f_return([])

            idx = self._repositories.index(repo)
            self._repositories[idx] = attr.evolve(
                repo, distributors=new_distributors)

            return f_return([
                Task(
                    id=self._next_task_id(),
                    completed=True,
                    succeeded=True,
                    tags=[
                        "pulp:repository:%s" % repo_id,
                        "pulp:repository_distributor:%s" % distributor_id,
                        "pulp:action:remove_distributor",
                    ],
                )
            ])
Exemple #17
0
def test_gc_error(mock_logger):
    """logs error when repo delete task returns an error reponse"""
    repo = Repository(
        id="rhel-test-garbage-collect-7-days-old",
        created=_get_created(7),
        is_temporary=True,
    )
    controller = _get_fake_controller(repo)
    gc = GarbageCollect()
    arg = ["", "--pulp-url", "http://some.url"]

    with patch("sys.argv", arg):
        with patch.object(controller.client,
                          "_delete_repository") as repo_delete:
            with _patch_pulp_client(controller.client):
                repo_delete.return_value = f_return([
                    Task(
                        id="12334",
                        completed=True,
                        succeeded=False,
                        error_summary="Error occured",
                    )
                ])
                gc.main()

    mock_logger.error.assert_any_call("Error occured")
Exemple #18
0
    def copy_content(self, from_repository, to_repository, criteria=None):
        self._ensure_alive()

        from_id = from_repository.id
        to_id = to_repository.id

        found = list(from_repository.search_content(criteria).result())

        # Units are being copied to this repo, so that value obviously must appear
        # in repository_memberships from now on.
        found = [
            attr.evolve(unit, repository_memberships=[to_id]) for unit in found
        ]

        # Now put the found units into the destination repo.
        # Any kind of merging or replacing of units is handled within this step.
        self._insert_repo_units(to_id, found)

        # Arbitrarily limit the number of units included per task. The point is
        # to enforce that the caller doesn't expect any specific number of tasks.
        tasks = []
        while found:
            next_batch = found[:5]
            found = found[5:]
            tasks.append(
                Task(
                    id=self._next_task_id(),
                    repo_id=from_id,
                    completed=True,
                    succeeded=True,
                    units=units.with_key_only(next_batch),
                ))

        if not tasks:
            # This indicates that nothing was found at all.
            # That's fine, just return a task with empty units.
            tasks.append(
                Task(
                    id=self._next_task_id(),
                    repo_id=from_id,
                    completed=True,
                    succeeded=True,
                    units=[],
                ))

        return f_proxy(f_return(tasks))
def test_delete_success(fast_poller, requests_mocker, client):
    """delete succeeds and returns spawned tasks"""

    repo = Repository(
        id="some-repo",
        distributors=[
            Distributor(id="dist1", type_id="type1", repo_id="some-repo"),
            Distributor(id="dist2", type_id="type2", repo_id="some-repo"),
        ],
    )
    repo._set_client(client)

    requests_mocker.delete(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/distributors/dist1/",
        json={"spawned_tasks": [{
            "task_id": "task1"
        }, {
            "task_id": "task2"
        }]},
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        json=[
            {
                "task_id": "task1",
                "state": "finished"
            },
            {
                "task_id": "task2",
                "state": "skipped"
            },
        ],
    )

    # It should succeed, with the tasks as retrieved from Pulp
    dist = repo.distributors[0]
    delete_dist = dist.delete()
    assert sorted(delete_dist) == [
        Task(id="task1", succeeded=True, completed=True),
        Task(id="task2", succeeded=True, completed=True),
    ]

    # And should now be detached
    with pytest.raises(DetachedException):
        dist.delete()
Exemple #20
0
def test_task_repo_id_from_tags():
    """repo_id is initialized from tags where possible"""
    task = Task.from_data(
        {
            "task_id": "some-task",
            "state": "finished",
            "tags": ["pulp:foo:bar", "pulp:repository:some-repo"],
        }
    )
    assert task.repo_id == "some-repo"
Exemple #21
0
    def _do_sync(self, repo_id,
                 sync_config):  # pylint:disable = unused-argument
        repo_f = self.get_repository(repo_id)
        if repo_f.exception():
            # Repo can't be found, let that exception propagate
            return repo_f

        task = Task(id=self._next_task_id(), completed=True, succeeded=True)

        self._sync_history.append(Sync(repo_f.result(), [task], sync_config))

        return f_return([task])
Exemple #22
0
def test_can_search_task(client, requests_mocker):
    """search_task issues tasks/search POST request as expected."""
    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        json=[
            {
                "task_id": "task1",
                "state": "finished",
                "tags": ["pulp:repository:repo1", "pulp:action:publish"],
            },
            {
                "task_id": "task2",
                "state": "error",
                "tags": ["pulp:repository:repo1", "pulp:action:publish"],
            },
        ],
    )

    tasks_f = client.search_task()
    tasks = [task for task in tasks_f.result()]
    # task objects are returned
    assert sorted(tasks) == [
        Task(
            id="task1",
            completed=True,
            succeeded=True,
            tags=["pulp:repository:repo1", "pulp:action:publish"],
        ),
        Task(
            id="task2",
            completed=True,
            succeeded=False,
            tags=["pulp:repository:repo1", "pulp:action:publish"],
            error_summary="Pulp task [task2] failed: <unknown error>",
            error_details="Pulp task [task2] failed: <unknown error>",
        ),
    ]
    # api is called once
    assert requests_mocker.call_count == 1
Exemple #23
0
    def _publish_repository(self, repo, distributors_with_config):
        repo_f = self.get_repository(repo.id)
        if repo_f.exception():
            # Repo can't be found, let that exception propagate
            return repo_f

        tasks = []
        for _ in distributors_with_config:
            tasks.append(
                Task(id=self._next_task_id(), completed=True, succeeded=True))

        self._publish_history.append(Publish(repo, tasks))

        return f_return(tasks)
def test_search_task():
    controller = FakeController()

    task1 = Task(
        id="task1",
        completed=True,
        succeeded=True,
        tags=[
            "pulp:repository:repo1",
            "pulp:action:publish",
        ],
    )
    task2 = Task(
        id="task2",
        completed=True,
        succeeded=True,
        tags=[
            "pulp:repository:repo1",
            "pulp:action:import_upload",
        ],
    )

    controller.insert_task(task1)
    controller.insert_task(task2)

    # The tasks I inserted should be present
    assert controller.tasks == [task1, task2]

    client = controller.client

    crit = Criteria.with_field("tags", "pulp:action:publish")
    resp = client.search_task(crit).result().data
    assert resp == [task1]

    crit2 = Criteria.with_field("id", "task2")
    resp2 = client.search_task(crit2).result().data
    assert resp2 == [task2]
Exemple #25
0
    def _do_import(self, repo_id, upload_id, unit_type_id, unit_key):
        # pylint: disable=unused-argument
        repo_f = self.get_repository(repo_id)
        if repo_f.exception():
            # Repo can't be found, let that exception propagate
            return repo_f

        repo = repo_f.result()

        task = Task(id=self._next_task_id(), completed=True, succeeded=True)

        self._upload_history.append(
            Upload(repo, [task], unit_key["name"], unit_key["checksum"]))

        return f_return([task])
Exemple #26
0
    def _delete_repository(self, repo_id):
        with self._lock:
            found = False
            for idx, repo in enumerate(self._repositories):
                if repo.id == repo_id:
                    found = True
                    break

            if not found:
                # Deleting something which already doesn't exist is fine
                return f_return([])

            self._repositories.pop(idx)  # pylint: disable=undefined-loop-variable
            return f_return([
                Task(id=self._next_task_id(), completed=True, succeeded=True)
            ])
Exemple #27
0
def test_task_error():
    """from_data sets error-related attributes appropriately"""
    data = {
        "task_id": "failed-task",
        "state": "error",
        "error": {
            "code": "ABC00123",
            "description": "Simulated error",
            "data": {
                "message": "message from data",
                "details": {"errors": ["another message", "and another"]},
            },
        },
        "traceback": textwrap.dedent(
            """
            Traceback (most recent call last):
                File "/usr/lib/python2.7/site-packages/celery/app/trace.py", line 367, in trace_task
                    R = retval = fun(*args, **kwargs)
                File "/home/vagrant/devel/pulp/server/pulp/server/db/querysets.py", line 119, in get_or_404
                    raise pulp_exceptions.MissingResource(**kwargs)
                MissingResource: Missing resource(s): repo_id=zoo, distributor_id=iso_distributor
            """
        ).strip(),
    }
    task = Task.from_data(data)
    assert (
        task.error_summary
        == "Pulp task [failed-task] failed: ABC00123: Simulated error"
    )
    assert (
        task.error_details
        == textwrap.dedent(
            """
            Pulp task [failed-task] failed: ABC00123: Simulated error:
              message from data
              another message
              and another
              Traceback (most recent call last):
                  File "/usr/lib/python2.7/site-packages/celery/app/trace.py", line 367, in trace_task
                      R = retval = fun(*args, **kwargs)
                  File "/home/vagrant/devel/pulp/server/pulp/server/db/querysets.py", line 119, in get_or_404
                      raise pulp_exceptions.MissingResource(**kwargs)
                  MissingResource: Missing resource(s): repo_id=zoo, distributor_id=iso_distributor
            """
        ).strip()
    )
def test_delete_absent(fast_poller, requests_mocker, client):
    """delete of an object which already doesn't exist is successful"""

    # Two handles to same repo
    repo1 = Repository(id="some-repo")
    repo1.__dict__["_client"] = client

    repo2 = Repository(id="some-repo")
    repo2.__dict__["_client"] = client

    requests_mocker.delete(
        "https://pulp.example.com/pulp/api/v2/repositories/some-repo/",
        [
            # first attempt to delete
            dict(json={"spawned_tasks": [{
                "task_id": "task1"
            }]}),
            # second attempt fails as 404 because it already was deleted
            dict(
                status_code=404,
                json={
                    "http_status": 404,
                    "http_request_method": "DELETE"
                },
            ),
        ],
    )

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/tasks/search/",
        json=[{
            "task_id": "task1",
            "state": "finished"
        }],
    )

    # Delete via first handle succeeds with the spawned task
    assert sorted(repo1.delete().result()) == [
        Task(id="task1", succeeded=True, completed=True)
    ]

    # Delete via second handle also succeeds, but with no tasks
    assert sorted(repo2.delete().result()) == []
Exemple #29
0
    def _do_unassociate(self, repo_id, criteria=None):
        repo_f = self.get_repository(repo_id)
        if repo_f.exception():
            return repo_f

        with self._state.lock:
            current = self._state.repo_unit_keys.get(repo_id, set())
            units_with_key = [
                {"key": key, "unit": self._state.units_by_key[key]} for key in current
            ]
            removed_units = set()
            kept_keys = set()

            criteria = criteria or Criteria.true()
            # validating the criteria here like in actual scenario.
            pulp_search = search_for_criteria(
                criteria, type_hint=Unit, unit_type_accum=None
            )

            # raise an error if criteria with filters doesn't include type_ids
            if pulp_search.filters and not pulp_search.type_ids:
                raise ValueError(
                    "Criteria to remove_content must specify at least one unit type!"
                )

            for unit_with_key in units_with_key:
                unit = unit_with_key["unit"]
                if match_object(criteria, unit):
                    removed_units.add(unit)
                else:
                    kept_keys.add(unit_with_key["key"])

            self._state.repo_unit_keys[repo_id] = kept_keys

            task = Task(
                id=self._state.next_task_id(),
                repo_id=repo_id,
                completed=True,
                succeeded=True,
                units=units.with_key_only(removed_units),
            )

        return f_return([task])
def test_search_null_and():
    """Search with an empty AND gives an error."""
    controller = FakeController()

    dist1 = Distributor(id="yum_distributor",
                        type_id="yum_distributor",
                        repo_id="repo1")
    repo1 = Repository(id="repo1", distributors=[dist1])

    controller.insert_repository(repo1)
    controller.insert_task(Task(id="abc123"))

    client = controller.client
    crit = Criteria.and_()
    assert "Invalid AND in search query" in str(
        client.search_repository(crit).exception())
    assert "Invalid AND in search query" in str(
        client.search_distributor(crit).exception())
    assert "Invalid AND in search query" in str(
        client.search_task(crit).exception())