def test__filter__matches__release(self) -> None: with open(Path(self.tempdir.name) / "requirements.txt", "w") as fh: fh.write( """\ # This is needed for workshop 1 # foo==1.2.0 # via -r requirements.in """ ) mock_config( f"""\ [plugins] enabled = project_requirements project_requirements_pinned [allowlist] requirements_path = {self.tempdir.name} requirements = requirements.txt """ ) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1) pkg._metadata = { "info": {"name": "foo"}, "releases": {"1.2.0": {}, "1.2.1": {}}, } pkg.filter_all_releases(mirror.filters.filter_release_plugins()) self.assertEqual({"1.2.0": {}}, pkg.releases)
def test__filter__matches__release(self) -> None: mock_config("""\ [plugins] enabled = allowlist_release [whitelist] packages = foo==1.2.0 """) mirror = Mirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg._metadata = { "info": { "name": "foo" }, "releases": { "1.2.0": {}, "1.2.1": {} }, } pkg._filter_all_releases(mirror.filters.filter_release_plugins()) self.assertEqual(pkg.releases, {"1.2.0": {}})
def master(package_json): from bandersnatch.master import Master class FakeReader: async def read(self, *args): return b"" class FakeAiohttpClient: headers = {"X-PYPI-LAST-SERIAL": "1"} async def __aenter__(self): return self async def __aexit__(self, *args): pass @property def content(self, *args): return FakeReader() async def json(self, *args): return package_json master = Master("https://pypi.example.com") master.rpc = mock.Mock() master.session = asynctest.MagicMock() master.session.get = asynctest.MagicMock(return_value=FakeAiohttpClient()) master.session.request = asynctest.MagicMock( return_value=FakeAiohttpClient()) return master
def test_latest_releases_uninitialized(self): _mock_config(self.config_contents) bandersnatch.filter.filter_release_plugins() mirror = Mirror(".", Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg.info = {"name": "foo", "version": "2.0.0"} pkg.releases = { "1.0.0": {}, "1.1.0": {}, "1.1.1": {}, "1.1.2": {}, "1.1.3": {}, "2.0.0": {}, } pkg._filter_releases() assert pkg.releases == { "1.0.0": {}, "1.1.0": {}, "1.1.1": {}, "1.1.2": {}, "1.1.3": {}, "2.0.0": {}, }
async def get_package_from_pypi(package_name, plugin_path): """ Download a package from PyPI. :param name: name of the package to download from PyPI :return: String path to the package """ config = BandersnatchConfig().config config["mirror"]["master"] = "https://pypi.org" config["mirror"]["workers"] = "1" config["mirror"]["directory"] = plugin_path if not config.has_section("plugins"): config.add_section("plugins") config["plugins"]["enabled"] = "blocklist_release\n" if not config.has_section("allowlist"): config.add_section("allowlist") config["plugins"]["enabled"] += "allowlist_release\nallowlist_project\n" config["allowlist"]["packages"] = "\n".join([package_name]) os.makedirs(os.path.join(plugin_path, "dist"), exist_ok=True) async with Master("https://pypi.org/") as master: mirror = BandersnatchMirror(homedir=plugin_path, master=master) name = Requirement(package_name).name result = await mirror.synchronize([name]) package_found = False for package in result[name]: current_path = os.path.join(plugin_path, package) destination_path = os.path.join(plugin_path, "dist", os.path.basename(package)) shutil.move(current_path, destination_path) package_found = True return package_found
def test__casing__no__affect(self) -> None: mock_config("""\ [mirror] storage-backend = filesystem workers = 2 [plugins] enabled = allowlist_release [allowlist] packages = Foo<=1.2.0 """) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1) pkg._metadata = { "info": { "name": "foo" }, "releases": { "1.2.0": {}, "1.2.1": {} }, } pkg.filter_all_releases(mirror.filters.filter_release_plugins()) self.assertEqual(pkg.releases, {"1.2.0": {}})
def test__filter__find_files(self) -> None: absolute_file_path = Path(self.tempdir.name) / "requirements.txt" with open(absolute_file_path, "w") as fh: fh.write("""\ # This is needed for workshop 1 # foo==1.2.0 # via -r requirements.in """) mock_config(f"""\ [mirror] storage-backend = filesystem workers = 2 [plugins] enabled = project_requirements [allowlist] requirements = {absolute_file_path} """) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) mirror.packages_to_sync = { "foo": "", "bar": "", "baz": "", } mirror._filter_packages() self.assertEqual({"foo": ""}, mirror.packages_to_sync)
def test__dont__filter__prereleases(self) -> None: mock_config( """\ [plugins] enabled = allowlist_release [allowlist] packages = foo<=1.2.0 """ ) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1) pkg._metadata = { "info": {"name": "foo"}, "releases": { "1.1.0a2": {}, "1.1.1beta1": {}, "1.2.0": {}, "1.2.1": {}, "1.2.2alpha3": {}, "1.2.3rc1": {}, }, } pkg.filter_all_releases(mirror.filters.filter_release_plugins()) self.assertEqual(pkg.releases, {"1.1.0a2": {}, "1.1.1beta1": {}, "1.2.0": {}})
async def test_get_latest_json(monkeypatch: MonkeyPatch) -> None: config = FakeConfig() executor = ThreadPoolExecutor(max_workers=2) json_path = Path(gettempdir()) / f"unittest_{os.getpid()}.json" master = Master("https://unittest.org") master.url_fetch = do_nothing # type: ignore await get_latest_json(master, json_path, config, executor) # type: ignore
def test_latest_releases_keep_stable(self) -> None: mock_config(self.config_contents) mirror = Mirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg._metadata = { "info": { "name": "foo", "version": "2.0.0" }, # stable version "releases": { "1.0.0": {}, "1.1.0": {}, "1.1.1": {}, "1.1.2": {}, "1.1.3": {}, "2.0.0": {}, # <= stable version, keep it "2.0.1b1": {}, "2.0.1b2": {}, # <= most recent, keep it }, } pkg._filter_all_releases(mirror.filters.filter_release_plugins()) assert pkg.releases == {"2.0.1b2": {}, "2.0.0": {}}
def test__filter__varying__specifiers(self) -> None: mock_config( """\ [mirror] storage-backend = filesystem [plugins] enabled = allowlist_project [allowlist] packages = foo==1.2.3 bar~=3.0,<=1.5 """ ) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) mirror.packages_to_sync = { "foo": "", "bar": "", "snu": "", } mirror._filter_packages() self.assertEqual({"foo": "", "bar": ""}, mirror.packages_to_sync)
def test_latest_releases_uninitialized(self) -> None: mock_config(self.config_contents) mirror = Mirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg._metadata = { "info": { "name": "foo", "version": "2.0.0" }, "releases": { "1.0.0": {}, "1.1.0": {}, "1.1.1": {}, "1.1.2": {}, "1.1.3": {}, "2.0.0": {}, }, } pkg._filter_all_releases(mirror.filters.filter_release_plugins()) assert pkg.releases == { "1.0.0": {}, "1.1.0": {}, "1.1.1": {}, "1.1.2": {}, "1.1.3": {}, "2.0.0": {}, }
def test__filter__matches__package(self): with open(TEST_CONF, "w") as testconfig_handle: testconfig_handle.write( """\ [plugins] enabled = whitelist_release_pyversion [whitelist] python_versions = foo """ ) instance = BandersnatchConfig() instance.config_file = TEST_CONF instance.load_configuration() mirror = Mirror(".", Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg.info = {"name": "foo"} pkg.releases = {"1.2.0": [{'python_version': 'foo'}, {'python_version': 'foo2'}], "1.2.1": [{'python_version': 'foo2'}]} pkg._filter_releases() self.assertListEqual(list(pkg.releases.keys()), ["1.2.0"]) self.assertEqual(len(pkg.releases["1.2.0"]), 1) self.assertEqual(pkg.releases["1.2.0"][0], {'python_version': 'foo'})
async def run(self): """ If includes is specified, then only sync those,else try to sync all other packages """ # TODO Change Bandersnatch internal API to take proxy settings in from config parameters if self.remote.proxy_url: environ['http_proxy'] = self.remote.proxy_url # local & global timeouts defaults to 10secs and 5 hours async with Master(self.remote.url) as master: if self.remote.proxy_url: environ.pop('http_proxy') deferred_download = self.remote.policy != Remote.IMMEDIATE workers = self.remote.download_concurrency or self.remote.DEFAULT_DOWNLOAD_CONCURRENCY with ProgressReport( message="Fetching Project Metadata", code="sync.fetching.project" ) as p: pmirror = PulpMirror( serial=0, # Serial currently isn't supported by Pulp master=master, workers=workers, deferred_download=deferred_download, python_stage=self, progress_report=p, ) packages_to_sync = None if self.remote.includes: packages_to_sync = [ Requirement(pkg).name for pkg in self.remote.includes ] await pmirror.synchronize(packages_to_sync)
def master(package_json: Dict[str, Any]) -> "Master": from bandersnatch.master import Master class FakeReader: async def read(self, *args: Any) -> bytes: return b"" class FakeAiohttpClient: headers = {"X-PYPI-LAST-SERIAL": "1"} async def __aenter__(self) -> "FakeAiohttpClient": return self async def __aexit__(self, *args: Any) -> None: pass @property def content(self) -> "FakeReader": return FakeReader() async def json(self, *args: Any) -> Dict[str, Any]: return package_json def session_side_effect(*args: Any, **kwargs: Any) -> Any: if args[0].startswith("https://not-working.example.com"): raise AssertionError("Requested for expected not-working URL") else: return FakeAiohttpClient() master = Master("https://pypi.example.com") master.rpc = mock.Mock() # type: ignore master.session = mock.MagicMock() master.session.get.side_effect = session_side_effect master.session.request.side_effect = session_side_effect return master
def test__filter__commented__out(self) -> None: mock_config("""\ [mirror] storage-backend = filesystem workers = 2 [plugins] enabled = allowlist_project [allowlist] packages = foo==1.2.3 # inline comment # bar """) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) mirror.packages_to_sync = { "foo": "", "bar": "", "snu": "", } mirror._filter_packages() self.assertEqual({"foo": ""}, mirror.packages_to_sync)
async def test_get_latest_json_404(tmp_path: Path) -> None: class FakeArgs: delete = True dry_run = False json_update = True workers = 2 fa = FakeArgs() fc = FakeConfig() master = Master(fc.get("mirror", "master")) url_fetch_404 = AsyncMock(side_effect=ClientResponseError( code=404, history=(), request_info=None)) master.url_fetch = url_fetch_404 # type: ignore jsonpath = tmp_path / "web" / "json" jsonpath.mkdir(parents=True) jsonfile = jsonpath / "bandersnatch" jsonfile.touch() all_package_files: List[str] = [] await verify(master, fc, "bandersnatch", tmp_path, all_package_files, fa) # type: ignore # noqa: E501 assert not jsonfile.exists() assert not all_package_files
async def test_verify_url_exception(tmp_path: Path) -> None: class FakeArgs: delete = True dry_run = False json_update = False workers = 2 fa = FakeArgs() fc = FakeConfig() master = Master(fc.get("mirror", "master")) url_fetch_404 = AsyncMock(side_effect=ClientResponseError( code=404, history=(), request_info=None)) master.url_fetch = url_fetch_404 # type: ignore jsonpath = tmp_path / "web" / "json" jsonpath.mkdir(parents=True, exist_ok=True) jsonfile = jsonpath / "bandersnatch" with jsonfile.open("w") as f: f.write( '{"releases":{"1.0":["url":"https://unittests.org/packages/a0/a0/a0a0/package-1.0.0.exe"}]}}' # noqa: E501 ) all_package_files: List[str] = [] await verify(master, fc, "bandersnatch", tmp_path, all_package_files, fa) # type: ignore # noqa: E501 assert jsonfile.exists() assert not all_package_files
def master(package_json: Dict[str, Any]) -> "Master": from bandersnatch.master import Master class FakeReader: async def read(self, *args: Any) -> bytes: return b"" class FakeAiohttpClient: headers = {"X-PYPI-LAST-SERIAL": "1"} async def __aenter__(self) -> "FakeAiohttpClient": return self async def __aexit__(self, *args: Any) -> None: pass @property def content(self) -> "FakeReader": return FakeReader() async def json(self, *args: Any) -> Dict[str, Any]: return package_json master = Master("https://pypi.example.com") master.rpc = mock.Mock() # type: ignore master.session = mock.MagicMock() master.session.get = mock.MagicMock(return_value=FakeAiohttpClient()) master.session.request = mock.MagicMock(return_value=FakeAiohttpClient()) return master
def test_plugin_check_match(self) -> None: mock_config(self.config_contents) mirror = Mirror(Path("."), Master(url="https://foo.bar.com")) mirror.packages_to_sync = {"foo-good": "", "foo-evil": "", "foo-neutral": ""} mirror._filter_packages() assert list(mirror.packages_to_sync.keys()) == ["foo-good"]
def master(requests): from bandersnatch.master import Master master = Master("https://pypi.example.com") master.rpc = mock.Mock() master.session = mock.Mock() master.session.get = requests return master
def test_plugin_check_match(self): _mock_config(self.config_contents) bandersnatch.filter.filter_release_plugins() mirror = Mirror(".", Master(url="https://foo.bar.com")) mirror.packages_to_sync = {"foo-good": {}, "foo-evil": {}, "foo-neutral": {}} mirror._filter_packages() assert list(mirror.packages_to_sync.keys()) == ["foo-good"]
async def test_verify_producer(monkeypatch: MonkeyPatch) -> None: fm = FakeMirror("test_async_verify") fc = configparser.ConfigParser() fc["mirror"] = {} fc["mirror"]["verifiers"] = "2" master = Master("https://unittest.org") json_files = ["web/json/bandersnatch", "web/json/black"] monkeypatch.setattr(bandersnatch.verify, "verify", do_nothing) await verify_producer(master, fc, [], fm.mirror_base, json_files, mock.Mock(), None)
def test_plugin_check_match(self): _mock_config(self.config_contents) bandersnatch.filter.filter_release_plugins() mirror = Mirror(".", Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg.releases = {"foo-1.2.0rc2": {}, "foo-1.2.0": {}, "foo-1.2.0alpha2": {}} pkg._filter_releases() assert pkg.releases == {"foo-1.2.0": {}}
def setUp_mirror(self) -> None: self.mirror = Mirror(self.mirror_path, Master(url="https://foo.bar.com")) pkg = Package("foobar", 1, self.mirror) pkg._metadata = { "info": { "name": "foobar", "version": "1.0" }, "releases": mock.Mock(), } self.pkgs.append(pkg)
async def test_delete_packages() -> None: args = _fake_args() config = _fake_config() master = Master("https://unittest.org") with TemporaryDirectory() as td: td_path = Path(td) config["mirror"]["directory"] = td web_path = td_path / "web" json_path = web_path / "json" json_path.mkdir(parents=True) pypi_path = web_path / "pypi" pypi_path.mkdir(parents=True) simple_path = web_path / "simple" # Setup web tree with some json, package index.html + fake blobs for package_name in args.pypi_packages: package_simple_path = simple_path / package_name package_simple_path.mkdir(parents=True) package_index_path = package_simple_path / "index.html" package_index_path.touch() package_json_str = MOCK_JSON_TEMPLATE.replace( "PKGNAME", package_name) package_json_path = json_path / package_name with package_json_path.open("w") as pjfp: pjfp.write(package_json_str) legacy_json_path = pypi_path / package_name / "json" legacy_json_path.parent.mkdir() legacy_json_path.symlink_to(package_json_path) package_json = loads(package_json_str) for _version, blobs in package_json["releases"].items(): for blob in blobs: url_parts = urlparse(blob["url"]) blob_path = web_path / url_parts.path[1:] blob_path.parent.mkdir(parents=True, exist_ok=True) blob_path.touch() # See we have a correct mirror setup assert find(web_path) == EXPECTED_WEB_BEFORE_DELETION args.dry_run = True assert await delete_packages(config, args, master) == 0 args.dry_run = False with patch("bandersnatch.delete.logger.info") as mock_log: assert await delete_packages(config, args, master) == 0 assert mock_log.call_count == 1 # See we've deleted it all assert find(web_path) == EXPECTED_WEB_AFTER_DELETION
def setUp_mirror(self) -> None: self.master = Master(url="https://foo.bar.com") self.mirror = BandersnatchMirror(self.mirror_path, self.master, self.backend) pkg = Package("foobar", serial=1) pkg._metadata = { "info": { "name": "foobar", "version": "1.0" }, "releases": mock.Mock(), } self.pkgs.append(pkg)
def test_plugin_check_match(self) -> None: mock_config(self.config_contents) mirror = Mirror(Path("."), Master(url="https://foo.bar.com")) pkg = Package("foo", 1, mirror) pkg._metadata = { "info": {"name": "foo", "version": "foo-1.2.0"}, "releases": {"foo-1.2.0rc2": {}, "foo-1.2.0": {}, "foo-1.2.0alpha2": {}}, } pkg._filter_all_releases(mirror.filters.filter_release_plugins()) assert pkg.releases == {"foo-1.2.0": {}}
def test__filter__nomatch_package(self) -> None: mock_config("""\ [blocklist] plugins = blocklist_project packages = foo """) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) mirror.packages_to_sync = {"foo2": ""} mirror._filter_packages() self.assertIn("foo2", mirror.packages_to_sync.keys())
def test_latest_releases_ensure_reusable(self) -> None: """ Tests the filter multiple times to ensure no state is preserved and thus is reusable between packages """ mock_config(self.config_contents) mirror = BandersnatchMirror(Path("."), Master(url="https://foo.bar.com")) pkg1 = Package("foo", 1) pkg1._metadata = { "info": { "name": "foo", "version": "2.0.0" }, "releases": { "0.1.1": {}, "0.1.2": {}, "0.1.3": {}, "1.0.0": {}, "1.1.0": {}, "1.2.0": {}, "2.0.0": {}, }, } pkg2 = Package("bar", 1) pkg2._metadata = { "info": { "name": "bar", "version": "0.3.0" }, "releases": { "0.1.0": {}, "0.1.1": {}, "0.1.2": {}, "0.1.3": {}, "0.1.4": {}, "0.1.5": {}, "0.2.0": {}, "0.3.0": {}, }, } pkg1.filter_all_releases(mirror.filters.filter_release_plugins()) pkg2.filter_all_releases(mirror.filters.filter_release_plugins()) assert pkg1.releases == {"1.2.0": {}, "2.0.0": {}} assert pkg2.releases == {"0.2.0": {}, "0.3.0": {}}