예제 #1
0
def test_can_update_repo():
    controller = FakeController()

    controller.insert_repository(
        FileRepository(id="repo", product_versions=["a", "b", "c"]))

    client = controller.client

    # Should be able to get the repo.
    repo = client.get_repository("repo").result()

    # Let's try putting it back. Note we try changing both some mutable
    # and immutable fields here.
    update_f = client.update_repository(
        attr.evolve(repo, eng_product_id=123, product_versions=["d", "b"]))

    # The update should succeed (and return None)
    assert update_f.result() is None

    # Try getting the same repo back.
    repo_updated = client.get_repository("repo").result()

    # It should be equal to this:
    assert repo_updated == FileRepository(
        id="repo",
        # product_versions is mutable, so it's what we asked for (with
        # values canonicalized by sorting)
        product_versions=["b", "d"],
        # eng_product_id is not mutable, so that update was ignored
        eng_product_id=None,
    )
예제 #2
0
    def load_initial(self):
        """Initial load of data into the fake, in the case where no state
        has previously been persisted.

        This will populate a hardcoded handful of repos which are expected
        to always be present in a realistically configured rhsm-pulp server.
        """
        self.ctrl.insert_repository(FileRepository(id="redhat-maintenance"))
        self.ctrl.insert_repository(FileRepository(id="all-iso-content"))
        self.ctrl.insert_repository(YumRepository(id="all-rpm-content"))
예제 #3
0
def get_task_instance(on, *repos):

    iso_distributor = Distributor(
        id="iso_distributor",
        type_id="iso_distributor",
        relative_url="root",
        repo_id="redhat-maintenance",
    )
    maint_repo = FileRepository(id="redhat-maintenance",
                                distributors=[iso_distributor])

    if on:
        task_instance = FakeSetMaintenanceOn()
        task_instance.pulp_client_controller.insert_repository(maint_repo)
    else:
        task_instance = FakeSetMaintenanceOff()
        task_instance.pulp_client_controller.insert_repository(maint_repo)
        # if unset maintenance mode, we need to pre-set maintenance first
        report = task_instance.pulp_client.get_maintenance_report().result()
        report = report.add([repo.id for repo in repos])
        task_instance.pulp_client.set_maintenance(report)

    for repo in list(repos):
        task_instance.pulp_client_controller.insert_repository(repo)

    return task_instance
예제 #4
0
def test_clear_file_skip_publish(command_tester):
    """Clearing a repo with file content while skipping publish succeeds."""

    task_instance = FakeClearRepo()

    repo = FileRepository(
        id="some-filerepo",
        eng_product_id=123,
        relative_url="some/publish/url",
        mutable_urls=[],
    )

    files = [FileUnit(path="hello.txt", size=123, sha256sum="a" * 64)]

    task_instance.pulp_client_controller.insert_repository(repo)
    task_instance.pulp_client_controller.insert_units(repo, files)

    # It should run with expected output.
    command_tester.test(
        task_instance.main,
        [
            "test-clear-repo",
            "--pulp-url",
            "https://pulp.example.com/",
            "--skip",
            "foo,publish,bar",
            "some-filerepo",
        ],
    )

    # It should not have published Pulp repos
    assert task_instance.pulp_client_controller.publish_history == []
def test_can_upload_history(tmpdir):
    """repo.upload_file() succeeds with fake client and populates upload_history.

    Note that upload_history is deprecated, but remains working for now.
    """
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    upload_f = repo1.upload_file(str(somefile))

    # The future should resolve successfully
    tasks = upload_f.result()

    # The task should be successful.
    assert tasks[0].succeeded

    # The change should be reflected in the controller's upload history
    history = controller.upload_history

    digest = "fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb"
    assert len(history) == 1
    assert history[0].repository == repo1
    assert history[0].tasks == tasks
    assert history[0].name == somefile.basename
    assert history[0].sha256 == digest
def test_client_no_cache_errors(monkeypatch):
    """caching_pulp_client does not cache failed get_repository calls"""

    with TaskWithPulpClient() as task:
        monkeypatch.setattr(
            sys,
            "argv",
            [
                "",
                "--pulp-url",
                "http://some.url",
            ],
        )

        # Try getting a repo *before* it's added to the client.
        repo1 = task.caching_pulp_client.get_repository("test-repo")

        # Now add the repo and get it again.
        task.pulp_ctrl.insert_repository(FileRepository(id="test-repo"))

        repo2 = task.caching_pulp_client.get_repository("test-repo")
        repo3 = task.caching_pulp_client.get_repository("test-repo")

        # The first fetch should fail since the repo didn't exist yet.
        assert repo1.exception()

        # Since it failed, it should not have been cached and returned again.
        assert repo1 is not repo2

        # But caching worked as usual for the next two calls.
        assert repo2 is repo3

        # And those calls succeeded.
        assert repo2.result().id == "test-repo"
def test_client_caches(monkeypatch):
    """caching_pulp_client caches the result of calls to get_repository"""

    with TaskWithPulpClient() as task:
        monkeypatch.setattr(
            sys,
            "argv",
            [
                "",
                "--pulp-url",
                "http://some.url",
            ],
        )

        # Add some repo
        task.pulp_ctrl.insert_repository(FileRepository(id="test-repo"))

        # Let's try getting it via the caching client.
        with task.caching_pulp_client as client:
            repo1 = task.caching_pulp_client.get_repository("test-repo")
            repo2 = task.caching_pulp_client.get_repository("test-repo")

        # Due to the caching, it should give me back *exactly* the same
        # object in both cases.
        assert repo1 is repo2

        # And it should fetch OK
        assert repo1.result().id == "test-repo"
def test_upload_checks_repos(tmpdir):
    """Upload fails if upload apparently succeeded in pulp client, yet the item
    still is missing from all Pulp repos."""

    testfile = tmpdir.join("myfile")
    testfile.write("hello")

    pulp_ctrl = FakeController()
    repo = FileRepository(id="some-repo")
    pulp_ctrl.insert_repository(repo)

    item = NeverInReposItem(pushsource_item=FilePushItem(
        name="test", src=str(testfile), dest=["some-repo"]))
    item = item.with_checksums()

    ctx = item.upload_context(pulp_ctrl.client)
    upload_f = item.ensure_uploaded(ctx)

    # The upload attempt should fail.
    exc = upload_f.exception()

    # It should tell us why & which item.
    assert (
        "item supposedly uploaded successfully, but remains missing from Pulp:"
        in str(exc))
    assert "FilePushItem(name='test'" in str(exc)
def test_can_update_repo(requests_mocker, client):
    requests_mocker.put(
        "https://pulp.example.com/pulp/api/v2/repositories/my-repo/",
        text="null",
        headers={"Content-Type": "application/json"},
    )

    repo = FileRepository(
        id="my-repo", eng_product_id=123, product_versions=["1.0", "1.1"]
    )

    update_f = client.update_repository(repo)

    # It should succeed.
    update_f.result()

    # It should have done a single request.
    assert len(requests_mocker.request_history) == 1

    req = requests_mocker.request_history[0]

    # Should have been a PUT to the appropriate API.
    assert req.method == "PUT"
    assert req.url == "https://pulp.example.com/pulp/api/v2/repositories/my-repo/"

    # Should have requested exactly this update - only the mutable notes
    assert req.json() == {
        "delta": {
            "notes": {
                # Note the serialization into embedded JSON here.
                "product_versions": '["1.0","1.1"]'
            }
        }
    }
def test_can_reupload_file_meta(tmpdir):
    """Can overwrite by uploading same content twice with different metadata."""
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    time1 = datetime.datetime(2021, 12, 14, 14, 44, 0)
    time2 = datetime.datetime(2022, 1, 2, 3, 4, 5)

    upload_f = repo1.upload_file(
        str(somefile),
        description="My great file",
        cdn_path="/foo/bar.txt",
        cdn_published=time1,
    )

    # The future should resolve successfully
    tasks = upload_f.result()

    # The task should be successful.
    assert tasks[0].succeeded

    # Now upload again, but with some different values.
    upload_f = repo1.upload_file(str(somefile),
                                 description="My even better file",
                                 cdn_published=time2)

    # The future should resolve successfully
    tasks = upload_f.result()

    # The task should be successful.
    assert tasks[0].succeeded

    # File should now be in repo.
    units_in_repo = list(repo1.search_content())

    # Should result in just one unit, as the second upload effectively merges
    # with the existing unit.
    assert len(units_in_repo) == 1
    unit = units_in_repo[0]

    # Sanity check we got the right thing.
    assert unit.path == "some-file.txt"

    # Extra fields should be equal to the values passed in at the most
    # recent upload.
    assert unit.description == "My even better file"
    assert unit.cdn_published == time2

    # cdn_path was provided at the first upload and not the second.
    # In such cases it is expected that the field is wiped out, as you must
    # always provide *all* metadata at once.
    assert unit.cdn_path is None
예제 #11
0
def fake_controller(fake_state_path):
    """Yields a pulplib FakeController which has been pre-populated with
    repos used by staged-mixed.
    """
    fake = PersistentFake(state_path=fake_state_path)
    fake.load_initial()
    controller = fake.ctrl

    # Add the repositories which are referenced from the staging area.
    controller.insert_repository(FileRepository(id="iso-dest1"))
    controller.insert_repository(
        FileRepository(id="iso-dest2", arch="x86_64", eng_product_id=123))
    controller.insert_repository(
        YumRepository(id="dest1", arch="x86_64", eng_product_id=123))
    controller.insert_repository(YumRepository(id="dest2"))

    yield controller
예제 #12
0
def test_update_missing_repo():
    controller = FakeController()
    client = controller.client

    # Try to update something which is previously unknown to the client.
    update_f = client.update_repository(FileRepository(id="whatever"))

    # It should fail telling us the repo doesn't exist
    assert "repository not found: whatever" in str(update_f.exception())
예제 #13
0
def test_upload_file_contains_unicode(client, requests_mocker):
    file_obj = StringIO("哈罗")
    upload_id = "cfb1fed0-752b-439e-aa68-fba68eababa3"

    requests_mocker.put(
        "https://pulp.example.com/pulp/api/v2/content/uploads/%s/0/" % upload_id,
        json=[],
    )

    repo_id = "repo1"
    repo = FileRepository(id=repo_id)
    repo.__dict__["_client"] = client

    upload_f = client._do_upload_file(upload_id, file_obj)

    assert upload_f.result() == (
        "478f4808df7898528c7f13dc840aa321c4109f5c9f33bad7afcffc0253d4ff8f",
        6,
    )
def test_can_upload_units(tmpdir):
    """repo.upload_file() succeeds with fake client and populates units."""
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    otherfile = tmpdir.join("another.txt")
    otherfile.write("ahoy there")

    upload1_f = repo1.upload_file(str(somefile))
    upload2_f = repo1.upload_file(str(otherfile),
                                  relative_url="another/path.txt")

    for f in [upload1_f, upload2_f]:
        # The future should resolve successfully
        tasks = f.result()

        # The task should be successful.
        assert tasks[0].succeeded

    # If I now search for content in that repo, or content across all repos...
    units_in_repo = sorted(repo1.search_content().result(),
                           key=lambda u: u.sha256sum)
    units_all = sorted(client.search_content().result(),
                       key=lambda u: u.sha256sum)

    # They should be equal
    assert units_all == units_in_repo

    # And they should be this
    assert units_in_repo == [
        FileUnit(
            path="another/path.txt",
            size=10,
            sha256sum=
            "94c0c9d847ecaa45df01999676db772e5cb69cc54e1ff9db31d02385c56a86e1",
            repository_memberships=["repo1"],
            unit_id="d4713d60-c8a7-0639-eb11-67b367a9c378",
        ),
        FileUnit(
            path="some-file.txt",
            size=29,
            sha256sum=
            "fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb",
            repository_memberships=["repo1"],
            unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd",
        ),
    ]
def test_update_checks_state():
    """Update fails if update apparently succeeded in pulp client, yet the item
    doesn't match the desired state."""

    pulp_unit = FileUnit(
        unit_id="some-file-unit",
        path="some/file.txt",
        size=5,
        sha256sum=
        "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063",
        description="a test file",
        repository_memberships=["some-repo"],
    )

    pulp_ctrl = FakeController()
    repo = FileRepository(id="some-repo")
    pulp_ctrl.insert_repository(repo)
    pulp_ctrl.insert_units(repo, [pulp_unit])

    item = NeverUpToDateItem(
        pushsource_item=FilePushItem(
            name="some/file.txt",
            sha256sum=
            "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063",
            dest=["some-repo"],
        ),
        pulp_unit=pulp_unit,
        pulp_state=State.NEEDS_UPDATE,
    )

    # Try updating it.
    update_f = item.ensure_uptodate(pulp_ctrl.client)

    # The update attempt should fail.
    exc = update_f.exception()

    # It should tell us why.
    assert (
        "item supposedly updated successfully, but actual and desired state still differ:"
        in str(exc))

    # It should tell us the item we failed to process.
    assert "item:         FilePushItem(name='some/file.txt'" in str(exc)

    # It should show the current and desired field values:

    # The 'current unit', i.e. the state after we updated, reversed the original
    # description.
    assert re.search(r"current unit: FileUnit.*elif tset a", str(exc))

    # The 'desired unit', i.e. the reason we still don't consider the unit up-to-date,
    # wants to reverse the description back again...
    assert re.search(r"desired unit: FileUnit.*a test file", str(exc))
예제 #16
0
def test_get_relative_url_with_file_object(tmpdir):
    repo = FileRepository(id="some-repo")
    file_obj = StringIO()

    with pytest.raises(ValueError):
        repo._get_relative_url(file_obj, None)

    with pytest.raises(ValueError):
        repo._get_relative_url(file_obj, "some/path/")

    assert repo._get_relative_url(file_obj, "path/foo.txt") == "path/foo.txt"
def test_update_repo_fails(requests_mocker, client):
    requests_mocker.put(
        "https://pulp.example.com/pulp/api/v2/repositories/my-repo/", status_code=400
    )

    repo = FileRepository(
        id="my-repo", eng_product_id=123, product_versions=["1.0", "1.1"]
    )

    update_f = client.update_repository(repo)

    # It should fail, since the HTTP request failed.
    assert "400 Client Error" in str(update_f.exception())
def test_replace_file(tmpdir):
    """repo.upload_file() behaves as expected when replacing a file of the same name."""
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    otherfile = tmpdir.join("another.txt")
    otherfile.write("ahoy there")

    # Upload both files, using the same relative_url for each.
    repo1.upload_file(str(somefile), relative_url="darmok-jalad.txt").result()
    repo1.upload_file(str(otherfile), relative_url="darmok-jalad.txt").result()

    # If I now search for content in that repo, or content across all repos...
    units_in_repo = sorted(repo1.search_content().result(),
                           key=lambda u: u.sha256sum)
    units_all = sorted(client.search_content().result(),
                       key=lambda u: u.sha256sum)

    # I should find that only the second uploaded file is still present in the repo.
    assert units_in_repo == [
        FileUnit(
            path="darmok-jalad.txt",
            size=10,
            sha256sum=
            "94c0c9d847ecaa45df01999676db772e5cb69cc54e1ff9db31d02385c56a86e1",
            repository_memberships=["repo1"],
            unit_id="d4713d60-c8a7-0639-eb11-67b367a9c378",
        )
    ]

    # However, both units should still exist in the system; the first uploaded unit
    # has become an orphan.
    assert units_all == units_in_repo + [
        FileUnit(
            path="darmok-jalad.txt",
            size=29,
            sha256sum=
            "fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb",
            content_type_id="iso",
            repository_memberships=[],
            unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd",
        )
    ]
예제 #19
0
def test_upload_file_verylarge(client, requests_mocker):
    """Client can upload a 2GB file successfully."""

    file_size = 2000000000
    file_obj = ZeroesIO(file_size)

    upload_id = "cfb1fed0-752b-439e-aa68-fba68eababa3"

    # We will be uploading many chunks and we need to mock the 'offset' for each one...
    # Note this must align with the client's internal CHUNK_SIZE.
    chunk_size = 1024 * 1024
    for i in range(0, 3000):
        requests_mocker.put(
            "https://pulp.example.com/pulp/api/v2/content/uploads/%s/%d/"
            % (upload_id, i * chunk_size),
            json=[],
        )

    repo_id = "repo1"
    repo = FileRepository(id=repo_id)
    repo.__dict__["_client"] = client

    # It should be able to upload successfully.
    upload_f = client._do_upload_file(upload_id, file_obj)

    assert upload_f.result() == (
        # We should get the right checksum and size back, which proves all
        # the data was read correctly.
        #
        # If you want to verify this checksum, try:
        #
        #   dd if=/dev/zero bs=1000000 count=2000 status=none | sha256sum
        #
        "2e0c654b6cba3a1e816726bae0eac481eb7fd0351633768c3c18392e0f02b619",
        file_size,
    )
예제 #20
0
def test_no_implemented(command_tester):
    task_instance = SetMaintenance()

    controller = FakeController()
    controller.insert_repository(FileRepository(id="redhat-maintenance"))
    client = controller.client

    arg = [
        "test-maintenance", "--pulp-url", "http://some.url", "--repo-ids",
        "repo1"
    ]

    with patch("pubtools._pulp.services.PulpClientService.pulp_client",
               client):
        with pytest.raises(NotImplementedError):
            with patch("sys.argv", arg):
                task_instance.main()
def test_upload_nonexistent_file_raises():
    """repo.upload_file() with nonexistent file fails with fake client"""
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1")

    # If file's not found, Python 2 raises IOError and Python 3 raises
    # FileNotFoundError. The latter one is not defined in Python 2.
    if sys.version_info < (3, ):
        exception = IOError
    else:
        exception = FileNotFoundError
    with pytest.raises(exception):
        upload_f = repo1.upload_file("nonexistent_file").result()
예제 #22
0
def test_clear_empty_repo(command_tester, fake_collector):
    """Clearing a repo which is already empty succeeds."""

    task_instance = FakeClearRepo()

    repo = FileRepository(id="some-filerepo")

    task_instance.pulp_client_controller.insert_repository(repo)

    command_tester.test(
        task_instance.main,
        [
            "test-clear-repo", "--pulp-url", "https://pulp.example.com/",
            "some-filerepo"
        ],
    )

    # No push items recorded
    assert not fake_collector.items
def test_upload_file_meta_wrong_fields(tmpdir):
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    # This should immediately give an error
    with pytest.raises(ValueError) as excinfo:
        repo1.upload_file(str(somefile),
                          description="My great file",
                          size=48,
                          other="whatever")

    # It should give an indication of the problem
    assert "Not mutable FileUnit field(s): other, size" in str(excinfo.value)
def test_upload_repo_absent_raises(tmpdir):
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1")

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    repo_copy1 = client.get_repository("repo1")
    repo_copy2 = client.get_repository("repo1")

    # if repo's deleted
    assert repo_copy1.delete().result()

    exception = repo_copy2.upload_file(str(somefile)).exception()

    assert isinstance(exception, PulpException)
    assert "repo1 not found" in str(exception)
예제 #25
0
def test_set_maintenance():

    controller = FakeController()

    maintain_repo = FileRepository(id="redhat-maintenance")

    controller.insert_repository(maintain_repo)
    # now the maintenance repo is empty

    client = controller.client

    report = client.get_maintenance_report().result()

    assert isinstance(report, MaintenanceReport)
    # return an empty report
    assert report.entries == []
    # there's no entries in the report

    report = report.add(repo_ids=["repo1", "repo2"])
    client.set_maintenance(report).result()
    # add entries to report and set maintenance

    # upload_file and publish should be called once each
    assert len(controller.upload_history) == 1
    assert len(controller.publish_history) == 1

    # get_maintenance_report should give a report object
    report = client.get_maintenance_report().result()
    assert report.last_updated_by == "ContentDelivery"
    assert len(report.entries) == 2

    report = report.remove(repo_ids=["repo1"], owner="jazhang@hostname")
    client.set_maintenance(report).result()

    # the report in repo should have updated
    report = client.get_maintenance_report().result()
    assert report.last_updated_by == "jazhang@hostname"
    assert len(report.entries) == 1
    assert report.entries[0].repo_id == "repo2"
def test_update_invalidates(monkeypatch):
    """update_repository should invalidate the cache for that repository"""

    with TaskWithPulpClient() as task:
        monkeypatch.setattr(
            sys,
            "argv",
            [
                "",
                "--pulp-url",
                "http://some.url",
            ],
        )

        # Add some repo
        task.pulp_ctrl.insert_repository(
            FileRepository(id="test-repo", product_versions=["a", "b"])
        )

        # Let's try getting it via the caching client.
        repo1 = task.caching_pulp_client.get_repository("test-repo").result()
        repo2 = task.caching_pulp_client.get_repository("test-repo").result()

        # Initially consistent
        assert repo1.product_versions == ["a", "b"]
        assert repo2.product_versions == ["a", "b"]

        # Let's update the repo
        task.caching_pulp_client.update_repository(
            attr.evolve(repo1, product_versions=["new", "versions"])
        ).result()

        # Let's get the repo again...
        repo3 = task.caching_pulp_client.get_repository("test-repo")

        # The cache should have been smart enough to realize it can't
        # return the old cached value since the repo was updated.
        assert repo3.product_versions == ["new", "versions"]
def test_upload_file_verylarge():
    """Fake client can upload a 2GB file successfully."""
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    file_size = 2000000000
    file_obj = ZeroesIO(file_size)

    upload_f = repo1.upload_file(file_obj, relative_url="big-file")

    # The future should resolve successfully
    tasks = upload_f.result()

    # The task should be successful.
    assert tasks[0].succeeded

    # I should be able to find the corresponding unit.
    units_all = sorted(client.search_content().result(),
                       key=lambda u: u.sha256sum)

    assert units_all == [
        FileUnit(
            path="big-file",
            size=2000000000,
            # If you want to verify this checksum, try:
            #
            #   dd if=/dev/zero bs=1000000 count=2000 status=none | sha256sum
            #
            sha256sum=
            "2e0c654b6cba3a1e816726bae0eac481eb7fd0351633768c3c18392e0f02b619",
            repository_memberships=["repo1"],
            unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd",
        )
    ]
def test_can_upload_file_meta(tmpdir):
    controller = FakeController()

    controller.insert_repository(FileRepository(id="repo1"))

    client = controller.client
    repo1 = client.get_repository("repo1").result()

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    upload_f = repo1.upload_file(
        str(somefile),
        description="My great file",
        cdn_path="/foo/bar.txt",
        version="2.0",
    )

    # The future should resolve successfully
    tasks = upload_f.result()

    # The task should be successful.
    assert tasks[0].succeeded

    # File should now be in repo.
    units_in_repo = list(repo1.search_content())
    assert len(units_in_repo) == 1
    unit = units_in_repo[0]

    # Sanity check we got the right thing.
    assert unit.path == "some-file.txt"

    # Extra fields we passed during upload should be present here.
    assert unit.description == "My great file"
    assert unit.cdn_path == "/foo/bar.txt"
    assert unit.version == "2.0"
예제 #29
0
def test_upload_file(client, requests_mocker, tmpdir, caplog):
    """test upload a file to a repo in pulp"""

    logging.getLogger().setLevel(logging.INFO)
    caplog.set_level(logging.INFO)

    repo_id = "repo1"
    repo = FileRepository(id=repo_id)
    repo.__dict__["_client"] = client

    client._CHUNK_SIZE = 20

    request_body = {
        "_href":
        "/pulp/api/v2/content/uploads/cfb1fed0-752b-439e-aa68-fba68eababa3/",
        "upload_id": "cfb1fed0-752b-439e-aa68-fba68eababa3",
    }
    upload_id = request_body["upload_id"]

    import_report = {
        "result": {},
        "error": {},
        "spawned_tasks": [{
            "_href": "/pulp/api/v2/tasks/task1/",
            "task_id": "task1"
        }],
    }

    tasks_report = [{"task_id": "task1", "state": "finished"}]

    somefile = tmpdir.join("some-file.txt")
    somefile.write(b"there is some binary data:\x00\x01\x02")

    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/content/uploads/",
        json=request_body)
    requests_mocker.put(
        "https://pulp.example.com/pulp/api/v2/content/uploads/%s/0/" %
        upload_id,
        json=[],
    )
    requests_mocker.put(
        "https://pulp.example.com/pulp/api/v2/content/uploads/%s/20/" %
        upload_id,
        json=[],
    )
    requests_mocker.post(
        "https://pulp.example.com/pulp/api/v2/repositories/%s/actions/import_upload/"
        % repo_id,
        json=import_report,
    )
    requests_mocker.post("https://pulp.example.com/pulp/api/v2/tasks/search/",
                         json=tasks_report)
    requests_mocker.delete(
        "https://pulp.example.com/pulp/api/v2/content/uploads/%s/" % upload_id,
        json=[])

    assert repo.upload_file(str(somefile)).result() == [
        Task(id="task1", succeeded=True, completed=True)
    ]

    # the 6th request call might not be done in time, try 1000
    # times with .01 sec sleep before next try.
    for i in range(1000):
        time.sleep(0.01)
        try:
            assert requests_mocker.call_count == 6
        except AssertionError:
            if i != 999:
                continue
            else:
                raise
        else:
            break

    # 4th call should be import, check if right unit_key's passed
    import_request = requests_mocker.request_history[3].json()
    import_unit_key = {
        u"name": somefile.basename,
        u"checksum":
        u"fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb",
        u"size": 29,
    }
    assert import_request["unit_key"] == import_unit_key

    messages = caplog.messages

    # It should tell us about the upload
    assert (
        "Uploading some-file.txt to repo1 [cfb1fed0-752b-439e-aa68-fba68eababa3]"
        in messages)

    # task's spawned and completed
    assert "Created Pulp task: task1" in messages
    assert "Pulp task completed: task1" in messages
예제 #30
0
def test_upload_detached():
    """upload_file raises if called on a detached repo"""
    with pytest.raises(DetachedException):
        FileRepository(id="some-repo").upload_file("some-file")