def test_staged_simple_files():
    staged_dir = os.path.join(DATADIR, "simple_files")
    source = Source.get("staged:" + staged_dir)

    files = list(source)

    files.sort(key=lambda item: item.src)

    # It should load all the expected files with fields filled in by metadata
    assert files == [
        FilePushItem(
            name="test.txt",
            state="PENDING",
            src=os.path.join(staged_dir, "dest1/ISOS/test.txt"),
            dest=["dest1"],
            md5sum=None,
            sha256sum=
            "d8301c5f72f16455dbc300f3d1bef8972424255caad103cc6c7ba7dc92d90ca8",
            origin=staged_dir,
            build=None,
            signing_key=None,
            description=None,
        ),
        FilePushItem(
            name="some-file.txt",
            state="PENDING",
            src=os.path.join(staged_dir, "dest2/FILES/some-file"),
            dest=["dest2"],
            md5sum=None,
            sha256sum=
            "315f5bdb76d078c43b8ac0064e4a0164612b1fce77c869345bfc94c75894edd3",
            origin=staged_dir,
            build=None,
            signing_key=None,
            description=None,
            version="1.2.3",
        ),
        FilePushItem(
            name="some-iso",
            state="PENDING",
            src=os.path.join(staged_dir, "dest2/ISOS/some-iso"),
            dest=["dest2"],
            md5sum=None,
            sha256sum=
            "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb",
            origin=staged_dir,
            build=None,
            signing_key=None,
            description="My wonderful ISO",
        ),
    ]
def test_upload_checks_repos(tmpdir):
    """Upload fails if upload apparently succeeded in pulp client, yet the item
    still is missing from all Pulp repos."""

    testfile = tmpdir.join("myfile")
    testfile.write("hello")

    pulp_ctrl = FakeController()
    repo = FileRepository(id="some-repo")
    pulp_ctrl.insert_repository(repo)

    item = NeverInReposItem(pushsource_item=FilePushItem(
        name="test", src=str(testfile), dest=["some-repo"]))
    item = item.with_checksums()

    ctx = item.upload_context(pulp_ctrl.client)
    upload_f = item.ensure_uploaded(ctx)

    # The upload attempt should fail.
    exc = upload_f.exception()

    # It should tell us why & which item.
    assert (
        "item supposedly uploaded successfully, but remains missing from Pulp:"
        in str(exc))
    assert "FilePushItem(name='test'" in str(exc)
예제 #3
0
def test_display_order_invalid(value):
    """Verify that ValueError is raised when attempting to set display_order
    to an out of range value."""

    with pytest.raises(ValueError) as excinfo:
        FilePushItem(name="item", display_order=value)
    assert "display_order must be within range -99999 .. 99999" in str(
        excinfo.value)
def test_load_filters():
    """Push items are filtered to supported Pulp destinations."""

    ctx = Context()
    phase = LoadPushItems(
        ctx,
        ["fake:"],
        allow_unsigned=True,
        pre_push=False,
    )

    # Set up these items to be generated by pushsource.
    # It simulates the ET case where some files are generated having
    # both pulp repo IDs and FTP paths.
    fake_items = [
        FilePushItem(name="file1",
                     dest=["some-repo", "other-repo", "/some/path"]),
        FilePushItem(name="file2", dest=["/some/path", "/other/path"]),
        FilePushItem(name="file3", dest=["final-repo"]),
    ]
    Source.register_backend("fake", lambda: fake_items)

    # Let it run to completion...
    with phase:
        pass

    # It should have succeeded
    assert not ctx.has_error

    # Now let's get everything from the output queue.
    all_outputs = []
    while True:
        item = phase.out_queue.get()
        if item is Phase.FINISHED:
            break
        all_outputs.append(item.pushsource_item)

    # We should have got this:
    assert all_outputs == [
        # we get file1, but only repo IDs have been kept.
        FilePushItem(name="file1", dest=["some-repo", "other-repo"]),
        # we don't get file2 at all, since dest was filtered down to nothing.
        # we get file3 exactly as it was, since no changes were needed.
        FilePushItem(name="file3", dest=["final-repo"]),
    ]
예제 #5
0
    def push_item_for_file(self, unit):
        out = {}

        out["state"] = "DELETED"
        out["origin"] = "pulp"
        out["name"] = unit.path
        out["sha256sum"] = unit.sha256sum

        return FilePushItem(**out)
예제 #6
0
    def push_item_for_file(self, unit, dest, state):
        out = {}

        out["state"] = state
        out["origin"] = "pulp"
        out["name"] = unit.path
        out["sha256sum"] = unit.sha256sum
        out["dest"] = [dest]

        return FilePushItem(**out)
def test_update_checks_state():
    """Update fails if update apparently succeeded in pulp client, yet the item
    doesn't match the desired state."""

    pulp_unit = FileUnit(
        unit_id="some-file-unit",
        path="some/file.txt",
        size=5,
        sha256sum=
        "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063",
        description="a test file",
        repository_memberships=["some-repo"],
    )

    pulp_ctrl = FakeController()
    repo = FileRepository(id="some-repo")
    pulp_ctrl.insert_repository(repo)
    pulp_ctrl.insert_units(repo, [pulp_unit])

    item = NeverUpToDateItem(
        pushsource_item=FilePushItem(
            name="some/file.txt",
            sha256sum=
            "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063",
            dest=["some-repo"],
        ),
        pulp_unit=pulp_unit,
        pulp_state=State.NEEDS_UPDATE,
    )

    # Try updating it.
    update_f = item.ensure_uptodate(pulp_ctrl.client)

    # The update attempt should fail.
    exc = update_f.exception()

    # It should tell us why.
    assert (
        "item supposedly updated successfully, but actual and desired state still differ:"
        in str(exc))

    # It should tell us the item we failed to process.
    assert "item:         FilePushItem(name='some/file.txt'" in str(exc)

    # It should show the current and desired field values:

    # The 'current unit', i.e. the state after we updated, reversed the original
    # description.
    assert re.search(r"current unit: FileUnit.*elif tset a", str(exc))

    # The 'desired unit', i.e. the reason we still don't consider the unit up-to-date,
    # wants to reverse the description back again...
    assert re.search(r"desired unit: FileUnit.*a test file", str(exc))
def test_keep_prepush_no_dest_items():
    """Push item filtering keeps items with no dest if pre-pushable."""

    ctx = Context()
    phase = LoadPushItems(
        ctx,
        ["fake:"],
        allow_unsigned=True,
        pre_push=True,
    )

    fake_items = [
        FilePushItem(name="file", dest=["some-repo"]),
        RpmPushItem(name="rpm", dest=[]),
    ]
    Source.register_backend("fake", lambda: fake_items)

    # Let it run to completion...
    with phase:
        pass

    # It should have succeeded
    assert not ctx.has_error

    # Now let's get everything from the output queue.
    all_outputs = []
    while True:
        item = phase.out_queue.get()
        if item is Phase.FINISHED:
            break
        all_outputs.append(item.pushsource_item)

    # We should have got this:
    assert all_outputs == [
        # get file as usual
        FilePushItem(name="file", dest=["some-repo"]),
        # even though this item has no destination, we still get it since rpms
        # support pre-push and pre_push was enabled.
        RpmPushItem(name="rpm", dest=[]),
    ]
예제 #9
0
def test_collect_dupes():
    """Collect phase filters out duplicate items during iteration."""

    ctx = Context()
    phase = Collect(context=ctx, collector=None)

    # Set up some items to put onto the queue.
    files = [
        PulpFilePushItem(pushsource_item=FilePushItem(
            name="file%s" % i,
            dest=["some-repo"],
            src="/tmp/file%s" % i,
            state="PENDING",
        )) for i in range(0, 10)
    ]

    # Let's add some duplicates of what's already there, just with an
    # updated state.
    files.append(
        attr.evolve(
            files[0],
            pushsource_item=attr.evolve(files[0].pushsource_item,
                                        state="EXISTS"),
        ))
    files.append(
        attr.evolve(
            files[0],
            pushsource_item=attr.evolve(files[0].pushsource_item,
                                        state="PUSHED"),
        ))
    files.append(
        attr.evolve(
            files[4],
            pushsource_item=attr.evolve(files[4].pushsource_item,
                                        state="WHATEVER"),
        ))

    # Sanity check: now we have this many files
    assert len(files) == 13

    # Put everything on the queue...
    for item in files:
        phase.in_queue.put(item)

    # Put this so that iteration will end
    phase.in_queue.put(Phase.FINISHED)

    # Now let's see how iteration over it will work out
    got_items = []
    for batch in phase.iter_for_collect():
        got_items.extend([i.pushsource_item for i in batch])

    # We got this many items - 3 dupes filtered, so only 10
    assert len(got_items) == 10

    # And let's check exactly what we got:
    # - order should be the same as the input, but...
    # - items at index 0 and 4 use their last submitted STATE rather
    #   than the original
    assert got_items == [
        FilePushItem(
            name="file0",
            dest=["some-repo"],
            src="/tmp/file0",
            state="PUSHED",
        ),
        FilePushItem(
            name="file1",
            dest=["some-repo"],
            src="/tmp/file1",
            state="PENDING",
        ),
        FilePushItem(
            name="file2",
            dest=["some-repo"],
            src="/tmp/file2",
            state="PENDING",
        ),
        FilePushItem(
            name="file3",
            dest=["some-repo"],
            src="/tmp/file3",
            state="PENDING",
        ),
        FilePushItem(
            name="file4",
            dest=["some-repo"],
            src="/tmp/file4",
            state="WHATEVER",
        ),
        FilePushItem(
            name="file5",
            dest=["some-repo"],
            src="/tmp/file5",
            state="PENDING",
        ),
        FilePushItem(
            name="file6",
            dest=["some-repo"],
            src="/tmp/file6",
            state="PENDING",
        ),
        FilePushItem(
            name="file7",
            dest=["some-repo"],
            src="/tmp/file7",
            state="PENDING",
        ),
        FilePushItem(
            name="file8",
            dest=["some-repo"],
            src="/tmp/file8",
            state="PENDING",
        ),
        FilePushItem(
            name="file9",
            dest=["some-repo"],
            src="/tmp/file9",
            state="PENDING",
        ),
    ]
def test_load_blocking_vs_nonblocking(tmpdir):
    """Verify that the phase efficiently handles both items where with_checksums
    will block, and items where with_checksums will immediately return.
    """
    ctx = Context()
    in_queue = ctx.new_queue()

    # Add various push items onto the queue.
    all_filenames = []
    for i in range(0, 16):
        filename = "file%s" % i
        all_filenames.append(filename)
        filepath = tmpdir.join(filename)
        filepath.write(str(i))

        # Do a 50-50 mix between:
        # - even items: checksums are already known
        # - odd items: checksums are not known (and calculating them
        #   is not instantaneous)
        #
        if i % 2 == 0:
            item = FilePushItem(
                name=filename, src=str(filepath), md5sum=FAKE_MD5, sha256sum=FAKE_SHA256
            )
        else:
            item = SlowFilePushItem(name=filename, src=str(filepath))

        in_queue.put(PulpFilePushItem(pushsource_item=item))

    in_queue.put(Phase.FINISHED)

    # Prepare the phase for loading checksums.
    phase = LoadChecksums(
        context=ctx,
        in_queue=in_queue,
        # Don't care about update_push_items for this test
        update_push_items=lambda *_: (),
    )

    # Let it run...
    with phase:
        pass

    # Should not have been any errors
    assert not ctx.has_error

    # Now let's get everything from the output queue.
    all_outputs = []
    while True:
        item = phase.out_queue.get()
        if item is Phase.FINISHED:
            break
        all_outputs.append(item)

    # Check the order of the files we've got:
    names = [i.pushsource_item.name for i in all_outputs]

    # Naturally we should have got all the same names back as we put in
    assert sorted(names) == sorted(all_filenames)

    # However, all the *even* names should have come first - and in the same
    # order as the input queue - because those had checksums available and
    # so could be yielded immediately.
    assert names[0:8] == [
        "file0",
        "file2",
        "file4",
        "file6",
        "file8",
        "file10",
        "file12",
        "file14",
    ]
def test_push_copy_fails(fake_controller, fake_nocopy_push, fake_state_path,
                         command_tester, caplog):
    """Test that push detects and fails in the case where a Pulp content copy
    claims to succeed, but doesn't put expected content in the target repo.

    While not expected to happen under normal conditions, there have historically
    been a handful of Pulp bugs or operational issues which can trigger this.
    """
    client = fake_controller.client

    iso_dest1 = client.get_repository("iso-dest1").result()
    iso_dest2 = client.get_repository("iso-dest2").result()

    # Make this file exist but not in all the desired repos.
    existing_file = FileUnit(
        path="some-file",
        sha256sum=
        "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb",
        size=46,
    )
    fake_controller.insert_units(iso_dest1, [existing_file])

    # Unit is now in iso-dest1.
    # Set up a pushsource backend which requests push of the same content
    # to both (iso-dest1, iso-dest2).
    Source.register_backend(
        "test",
        lambda: [
            FilePushItem(
                # Note: a real push item would have to have 'src' pointing at an
                # existing file here. It's OK to omit that if the checksum exactly
                # matches something already in Pulp.
                name="some-file",
                sha256sum=
                "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb",
                dest=["iso-dest1", "iso-dest2"],
            )
        ],
    )

    args = [
        "",
        "--source",
        "test:",
        "--pulp-url",
        "https://pulp.example.com/",
    ]

    run = functools.partial(entry_point, cls=lambda: fake_nocopy_push)

    # Ask it to push.
    with pytest.raises(SystemExit) as excinfo:
        command_tester.test(
            run,
            args,
            # Can't guarantee a stable log order.
            compare_plaintext=False,
            compare_jsonl=False,
        )

    # It should have failed.
    assert excinfo.value.code == 59

    # It should tell us why it failed.
    msg = ("Fatal error: Pulp unit not present in repo(s) iso-dest2 "
           "after copy: FileUnit(path='some-file'")
    assert msg in caplog.text