def test_sha256(self): self.assertEqual( checksum_str(b'', 'SHA256'), 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855') self.assertEqual( checksum_str(self.text, 'SHA256'), 'e3d4a1135181fe156d61455615bb6296198e8ca5b2f20ddeb85cb4cd27f62320')
def test_sha3_512(self): try: self.assertEqual(checksum_str(b'', 'SHA3_512'), 'a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26') self.assertEqual(checksum_str(self.text, 'SHA3_512'), '6634c004dc31822fa65c2f1e2e3bbf0cfa35085653cca1ca9ca42f8f3f13c908405e0b665918146181c9fc9a9d793fc05429d669c35a55517820dfaa071425ca') except DigestException: self.skipTest('SHA3_512 implementation not available')
def test_streebog512(self): try: self.assertEqual(checksum_str(b'', 'STREEBOG512'), '8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a') self.assertEqual(checksum_str(self.text, 'STREEBOG512'), '330f5c26437f4e22c0163c72b12e93b8c27202f0750627355bdee43a0e0b253c90fbf0a27adbe5414019ff01ed84b7b240a1da1cbe10fae3adffc39c2d87a51f') except DigestException: self.skipTest('STREEBOG512 implementation not available')
def test_rmd160(self): try: self.assertEqual(checksum_str(b'', 'RMD160'), '9c1185a5c5e9fc54612808977ee8f548b2258d31') self.assertEqual(checksum_str(self.text, 'RMD160'), 'fc453174f63fc011d6f64abd2c45fb6a53c8239b') except DigestException: self.skipTest('RMD160 implementation not available')
def test_streebog256(self): try: self.assertEqual(checksum_str(b'', 'STREEBOG256'), '3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb') self.assertEqual(checksum_str(self.text, 'STREEBOG256'), '4992f1239c46f15b89e7b83ded4d83fb5966da3692788a4a1a6d118f78c08444') except DigestException: self.skipTest('STREEBOG256 implementation not available')
def test_blake2s(self): try: self.assertEqual(checksum_str(b'', 'BLAKE2S'), '69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9') self.assertEqual(checksum_str(self.text, 'BLAKE2S'), '823ab2429f27690450efe888b0404d092fe2ee72a9bd63d5342c251b4dbb373d') except DigestException: self.skipTest('BLAKE2S implementation not available')
def test_rmd160(self): try: self.assertEqual(checksum_str(b'', 'RMD160'), '9c1185a5c5e9fc54612808977ee8f548b2258d31') self.assertEqual(checksum_str(self.text, 'RMD160'), 'fc453174f63fc011d6f64abd2c45fb6a53c8239b') except DigestException: self.skipTest('RMD160 implementation not available')
def test_sha3_256(self): try: self.assertEqual(checksum_str(b'', 'SHA3_256'), 'a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a') self.assertEqual(checksum_str(self.text, 'SHA3_256'), '932fc0498ebb865406f9b6606280939283aa8a148562e39fd095a5d22bdec5c6') except DigestException: self.skipTest('SHA3_256 implementation not available')
def test_blake2b(self): try: self.assertEqual(checksum_str(b'', 'BLAKE2B'), '786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce') self.assertEqual(checksum_str(self.text, 'BLAKE2B'), '84cb3c88838c7147bc9797c6525f812adcdcb40137f9c075963e3a3ed1fe06aaeeb4d2bb5589bad286864dc1aa834cfc4d66b8d7e4d4a246d91d45ce3a6eee43') except DigestException: self.skipTest('BLAKE2B implementation not available')
def test_whirlpool(self): try: self.assertEqual(checksum_str(b'', 'WHIRLPOOL'), '19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3') self.assertEqual(checksum_str(self.text, 'WHIRLPOOL'), '8f556a079b87057f19e0880eed6d833e40c916f4b133196f6842281a2517873074d399832470c11ee251696b4844a10197714a069ba3e3415c8a4eced8f91b48') except DigestException: self.skipTest('WHIRLPOOL implementation not available')
def test_sha512(self): self.assertEqual( checksum_str(b'', 'SHA512'), 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' ) self.assertEqual( checksum_str(self.text, 'SHA512'), 'c8eaa902d48a2c82c2185a92f1c8bab8115c63c8d7a9966a8e8e81b07abcb9762f4707a6b27075e9d720277ba9fec072a59840d6355dd2ee64681d8f39a50856' )
def test_rmd160(self): try: self.assertEqual(checksum_str(b"", "RMD160"), "9c1185a5c5e9fc54612808977ee8f548b2258d31") self.assertEqual( checksum_str(self.text, "RMD160"), "fc453174f63fc011d6f64abd2c45fb6a53c8239b", ) except DigestException: self.skipTest("RMD160 implementation not available")
def test_sha3_512(self): try: self.assertEqual( checksum_str(b'', 'SHA3_512'), 'a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26' ) self.assertEqual( checksum_str(self.text, 'SHA3_512'), '6634c004dc31822fa65c2f1e2e3bbf0cfa35085653cca1ca9ca42f8f3f13c908405e0b665918146181c9fc9a9d793fc05429d669c35a55517820dfaa071425ca' ) except DigestException: self.skipTest('SHA3_512 implementation not available')
def test_blake2s(self): try: self.assertEqual( checksum_str(b'', 'BLAKE2S'), '69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9' ) self.assertEqual( checksum_str(self.text, 'BLAKE2S'), '823ab2429f27690450efe888b0404d092fe2ee72a9bd63d5342c251b4dbb373d' ) except DigestException: self.skipTest('BLAKE2S implementation not available')
def test_blake2b(self): try: self.assertEqual( checksum_str(b'', 'BLAKE2B'), '786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce' ) self.assertEqual( checksum_str(self.text, 'BLAKE2B'), '84cb3c88838c7147bc9797c6525f812adcdcb40137f9c075963e3a3ed1fe06aaeeb4d2bb5589bad286864dc1aa834cfc4d66b8d7e4d4a246d91d45ce3a6eee43' ) except DigestException: self.skipTest('BLAKE2B implementation not available')
def test_streebog512(self): try: self.assertEqual( checksum_str(b'', 'STREEBOG512'), '8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a' ) self.assertEqual( checksum_str(self.text, 'STREEBOG512'), '330f5c26437f4e22c0163c72b12e93b8c27202f0750627355bdee43a0e0b253c90fbf0a27adbe5414019ff01ed84b7b240a1da1cbe10fae3adffc39c2d87a51f' ) except DigestException: self.skipTest('STREEBOG512 implementation not available')
def test_streebog256(self): try: self.assertEqual( checksum_str(b'', 'STREEBOG256'), '3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb' ) self.assertEqual( checksum_str(self.text, 'STREEBOG256'), '4992f1239c46f15b89e7b83ded4d83fb5966da3692788a4a1a6d118f78c08444' ) except DigestException: self.skipTest('STREEBOG256 implementation not available')
def test_whirlpool(self): try: self.assertEqual( checksum_str(b'', 'WHIRLPOOL'), '19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3' ) self.assertEqual( checksum_str(self.text, 'WHIRLPOOL'), '8f556a079b87057f19e0880eed6d833e40c916f4b133196f6842281a2517873074d399832470c11ee251696b4844a10197714a069ba3e3415c8a4eced8f91b48' ) except DigestException: self.skipTest('WHIRLPOOL implementation not available')
def test_sha3_256(self): try: self.assertEqual( checksum_str(b'', 'SHA3_256'), 'a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a' ) self.assertEqual( checksum_str(self.text, 'SHA3_256'), '932fc0498ebb865406f9b6606280939283aa8a148562e39fd095a5d22bdec5c6' ) except DigestException: self.skipTest('SHA3_256 implementation not available')
def get_path(self, filename): fnhash = checksum_str(filename.encode('utf8'), self.algo) ret = '' for c in self.cutoffs: assert c % 4 == 0 c = c // 4 ret += fnhash[:c] + '/' fnhash = fnhash[c:] return ret + filename
def test_content_hash_layout(self): self.assertFalse(ContentHashLayout.verify_args(("content-hash", ))) self.assertTrue( ContentHashLayout.verify_args(("content-hash", "SHA1", "8"))) self.assertFalse( ContentHashLayout.verify_args( ("content-hash", "INVALID-HASH", "8"))) self.assertTrue( ContentHashLayout.verify_args(("content-hash", "SHA1", "4:8:12"))) self.assertFalse( ContentHashLayout.verify_args(("content-hash", "SHA1", "3"))) self.assertFalse( ContentHashLayout.verify_args(("content-hash", "SHA1", "junk"))) self.assertFalse( ContentHashLayout.verify_args( ("content-hash", "SHA1", "4:8:junk"))) filename = DistfileName( "foo-1.tar.gz", digests=dict((algo, checksum_str(b"", hashname=algo)) for algo in MANIFEST2_HASH_DEFAULTS), ) # Raise KeyError for a hash algorithm SHA1 which is not in MANIFEST2_HASH_DEFAULTS. self.assertRaises(KeyError, ContentHashLayout("SHA1", "4").get_path, filename) # Raise AttributeError for a plain string argument. self.assertRaises(AttributeError, ContentHashLayout("SHA512", "4").get_path, str(filename)) self.assertEqual( ContentHashLayout("SHA512", "4").get_path(filename), "c/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ) self.assertEqual( ContentHashLayout("SHA512", "8").get_path(filename), "cf/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ) self.assertEqual( ContentHashLayout("SHA512", "8:16").get_path(filename), "cf/83e1/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ) self.assertEqual( ContentHashLayout("SHA512", "8:16:24").get_path(filename), "cf/83e1/357eef/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", )
def test_content_hash_layout(self): self.assertFalse(ContentHashLayout.verify_args(('content-hash', ))) self.assertTrue( ContentHashLayout.verify_args(('content-hash', 'SHA1', '8'))) self.assertFalse( ContentHashLayout.verify_args( ('content-hash', 'INVALID-HASH', '8'))) self.assertTrue( ContentHashLayout.verify_args(('content-hash', 'SHA1', '4:8:12'))) self.assertFalse( ContentHashLayout.verify_args(('content-hash', 'SHA1', '3'))) self.assertFalse( ContentHashLayout.verify_args(('content-hash', 'SHA1', 'junk'))) self.assertFalse( ContentHashLayout.verify_args( ('content-hash', 'SHA1', '4:8:junk'))) filename = DistfileName( 'foo-1.tar.gz', digests=dict((algo, checksum_str(b'', hashname=algo)) for algo in MANIFEST2_HASH_DEFAULTS), ) # Raise KeyError for a hash algorithm SHA1 which is not in MANIFEST2_HASH_DEFAULTS. self.assertRaises(KeyError, ContentHashLayout('SHA1', '4').get_path, filename) # Raise AttributeError for a plain string argument. self.assertRaises(AttributeError, ContentHashLayout('SHA512', '4').get_path, str(filename)) self.assertEqual( ContentHashLayout('SHA512', '4').get_path(filename), 'c/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' ) self.assertEqual( ContentHashLayout('SHA512', '8').get_path(filename), 'cf/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' ) self.assertEqual( ContentHashLayout('SHA512', '8:16').get_path(filename), 'cf/83e1/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' ) self.assertEqual( ContentHashLayout('SHA512', '8:16:24').get_path(filename), 'cf/83e1/357eef/cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e' )
def test_filename_hash_layout_get_filenames(self): filename = DistfileName( "foo-1.tar.gz", digests=dict((algo, checksum_str(b"", hashname=algo)) for algo in MANIFEST2_HASH_DEFAULTS), ) layouts = ( FlatLayout(), FilenameHashLayout("SHA1", "4"), FilenameHashLayout("SHA1", "8"), FilenameHashLayout("SHA1", "8:16"), FilenameHashLayout("SHA1", "8:16:24"), ContentHashLayout("SHA512", "8:8:8"), ) for layout in layouts: distdir = tempfile.mkdtemp() try: path = os.path.join(distdir, layout.get_path(filename)) try: os.makedirs(os.path.dirname(path)) except OSError: pass with open(path, "wb") as f: pass file_list = list(layout.get_filenames(distdir)) self.assertTrue(len(file_list) > 0) for filename_result in file_list: if isinstance(filename_result, DistfileName): self.assertTrue( filename_result.digests_equal(filename)) else: self.assertEqual(filename_result, str(filename)) finally: shutil.rmtree(distdir)
def test_sha1(self): self.assertEqual(checksum_str(b"", "SHA1"), "da39a3ee5e6b4b0d3255bfef95601890afd80709") self.assertEqual(checksum_str(self.text, "SHA1"), "5c572017d4e4d49e4aa03a2eda12dbb54a1e2e4f")
def test_md5(self): self.assertEqual(checksum_str(b'', 'MD5'), 'd41d8cd98f00b204e9800998ecf8427e') self.assertEqual(checksum_str(self.text, 'MD5'), '094c3bf4732f59b39d577e9726f1e934')
def test_sha1(self): self.assertEqual(checksum_str(b'', 'SHA1'), 'da39a3ee5e6b4b0d3255bfef95601890afd80709') self.assertEqual(checksum_str(self.text, 'SHA1'), '5c572017d4e4d49e4aa03a2eda12dbb54a1e2e4f')
def test_md5(self): self.assertEqual(checksum_str(b'', 'MD5'), 'd41d8cd98f00b204e9800998ecf8427e') self.assertEqual(checksum_str(self.text, 'MD5'), '094c3bf4732f59b39d577e9726f1e934')
def test_sha1(self): self.assertEqual(checksum_str(b'', 'SHA1'), 'da39a3ee5e6b4b0d3255bfef95601890afd80709') self.assertEqual(checksum_str(self.text, 'SHA1'), '5c572017d4e4d49e4aa03a2eda12dbb54a1e2e4f')
def test_sha512(self): self.assertEqual(checksum_str(b'', 'SHA512'), 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e') self.assertEqual(checksum_str(self.text, 'SHA512'), 'c8eaa902d48a2c82c2185a92f1c8bab8115c63c8d7a9966a8e8e81b07abcb9762f4707a6b27075e9d720277ba9fec072a59840d6355dd2ee64681d8f39a50856')
def test_sha256(self): self.assertEqual(checksum_str(b'', 'SHA256'), 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855') self.assertEqual(checksum_str(self.text, 'SHA256'), 'e3d4a1135181fe156d61455615bb6296198e8ca5b2f20ddeb85cb4cd27f62320')
def _testEbuildFetch( self, loop, scheme, host, orig_distfiles, ebuilds, content, server, playground, ro_distdir, ): mirror_layouts = ( ( "[structure]", "0=filename-hash BLAKE2B 8", "1=flat", ), ( "[structure]", "1=filename-hash BLAKE2B 8", "0=flat", ), ( "[structure]", "0=content-hash SHA512 8:8:8", "1=flat", ), ) fetchcommand = portage.util.shlex_split( playground.settings["FETCHCOMMAND"]) fetch_bin = portage.process.find_binary(fetchcommand[0]) if fetch_bin is None: self.skipTest("FETCHCOMMAND not found: {}".format( playground.settings["FETCHCOMMAND"])) eubin = os.path.join(playground.eprefix, "usr", "bin") os.symlink(fetch_bin, os.path.join(eubin, os.path.basename(fetch_bin))) resumecommand = portage.util.shlex_split( playground.settings["RESUMECOMMAND"]) resume_bin = portage.process.find_binary(resumecommand[0]) if resume_bin is None: self.skipTest("RESUMECOMMAND not found: {}".format( playground.settings["RESUMECOMMAND"])) if resume_bin != fetch_bin: os.symlink(resume_bin, os.path.join(eubin, os.path.basename(resume_bin))) root_config = playground.trees[playground.eroot]["root_config"] portdb = root_config.trees["porttree"].dbapi def run_async(func, *args, **kwargs): with ForkExecutor(loop=loop) as executor: return loop.run_until_complete( loop.run_in_executor( executor, functools.partial(func, *args, **kwargs))) for layout_lines in mirror_layouts: settings = config(clone=playground.settings) layout_data = "".join("{}\n".format(line) for line in layout_lines) mirror_conf = MirrorLayoutConfig() mirror_conf.read_from_file(io.StringIO(layout_data)) layouts = mirror_conf.get_all_layouts() content["/distfiles/layout.conf"] = layout_data.encode("utf8") distfiles = {} for k, v in orig_distfiles.items(): filename = DistfileName( k, digests=dict((algo, checksum_str(v, hashname=algo)) for algo in MANIFEST2_HASH_DEFAULTS), ) distfiles[filename] = v # mirror path for layout in layouts: content["/distfiles/" + layout.get_path(filename)] = v # upstream path content["/distfiles/{}.txt".format(k)] = v shutil.rmtree(settings["DISTDIR"]) os.makedirs(settings["DISTDIR"]) with open(os.path.join(settings["DISTDIR"], "layout.conf"), "wt") as f: f.write(layout_data) if any( isinstance(layout, ContentHashLayout) for layout in layouts): content_db = os.path.join(playground.eprefix, "var/db/emirrordist/content.db") os.makedirs(os.path.dirname(content_db), exist_ok=True) try: os.unlink(content_db) except OSError: pass else: content_db = None # Demonstrate that fetch preserves a stale file in DISTDIR when no digests are given. foo_uri = { "foo": ("{scheme}://{host}:{port}/distfiles/foo".format( scheme=scheme, host=host, port=server.server_port), ) } foo_path = os.path.join(settings["DISTDIR"], "foo") foo_stale_content = b"stale content\n" with open(foo_path, "wb") as f: f.write(b"stale content\n") self.assertTrue( bool(run_async(fetch, foo_uri, settings, try_mirrors=False))) with open(foo_path, "rb") as f: self.assertEqual(f.read(), foo_stale_content) with open(foo_path, "rb") as f: self.assertNotEqual(f.read(), distfiles["foo"]) # Use force=True to update the stale file. self.assertTrue( bool( run_async(fetch, foo_uri, settings, try_mirrors=False, force=True))) with open(foo_path, "rb") as f: self.assertEqual(f.read(), distfiles["foo"]) # Test force=True with FEATURES=skiprocheck, using read-only DISTDIR. # FETCHCOMMAND is set to temporarily chmod +w DISTDIR. Note that # FETCHCOMMAND must perform atomic rename itself due to read-only # DISTDIR. with open(foo_path, "wb") as f: f.write(b"stale content\n") orig_fetchcommand = settings["FETCHCOMMAND"] orig_distdir_mode = os.stat(settings["DISTDIR"]).st_mode temp_fetchcommand = os.path.join(eubin, "fetchcommand") with open(temp_fetchcommand, "w") as f: f.write(""" set -e URI=$1 DISTDIR=$2 FILE=$3 trap 'chmod a-w "${DISTDIR}"' EXIT chmod ug+w "${DISTDIR}" %s mv -f "${DISTDIR}/${FILE}.__download__" "${DISTDIR}/${FILE}" """ % orig_fetchcommand.replace("${FILE}", "${FILE}.__download__")) settings[ "FETCHCOMMAND"] = '"%s" "%s" "${URI}" "${DISTDIR}" "${FILE}"' % ( BASH_BINARY, temp_fetchcommand, ) settings.features.add("skiprocheck") settings.features.remove("distlocks") os.chmod(settings["DISTDIR"], 0o555) try: self.assertTrue( bool( run_async(fetch, foo_uri, settings, try_mirrors=False, force=True))) finally: settings["FETCHCOMMAND"] = orig_fetchcommand os.chmod(settings["DISTDIR"], orig_distdir_mode) settings.features.remove("skiprocheck") settings.features.add("distlocks") os.unlink(temp_fetchcommand) with open(foo_path, "rb") as f: self.assertEqual(f.read(), distfiles["foo"]) # Test emirrordist invocation. emirrordist_cmd = ( portage._python_interpreter, "-b", "-Wd", os.path.join(self.bindir, "emirrordist"), "--distfiles", settings["DISTDIR"], "--config-root", settings["EPREFIX"], "--delete", "--repositories-configuration", settings.repositories.config_string(), "--repo", "test_repo", "--mirror", ) if content_db is not None: emirrordist_cmd = emirrordist_cmd + ( "--content-db", content_db, ) env = settings.environ() env["PYTHONPATH"] = ":".join( filter( None, [PORTAGE_PYM_PATH] + os.environ.get("PYTHONPATH", "").split(":"), )) for k in distfiles: try: os.unlink(os.path.join(settings["DISTDIR"], k)) except OSError: pass proc = loop.run_until_complete( asyncio.create_subprocess_exec(*emirrordist_cmd, env=env)) self.assertEqual(loop.run_until_complete(proc.wait()), 0) for k in distfiles: with open( os.path.join(settings["DISTDIR"], layouts[0].get_path(k)), "rb") as f: self.assertEqual(f.read(), distfiles[k]) if content_db is not None: loop.run_until_complete( self._test_content_db( emirrordist_cmd, env, layouts, content_db, distfiles, settings, portdb, )) # Tests only work with one ebuild at a time, so the config # pool only needs a single config instance. class config_pool: @staticmethod def allocate(): return settings @staticmethod def deallocate(settings): pass def async_fetch(pkg, ebuild_path): fetcher = EbuildFetcher( config_pool=config_pool, ebuild_path=ebuild_path, fetchonly=False, fetchall=True, pkg=pkg, scheduler=loop, ) fetcher.start() return fetcher.async_wait() for cpv in ebuilds: metadata = dict( zip( Package.metadata_keys, portdb.aux_get(cpv, Package.metadata_keys), )) pkg = Package( built=False, cpv=cpv, installed=False, metadata=metadata, root_config=root_config, type_name="ebuild", ) settings.setcpv(pkg) ebuild_path = portdb.findname(pkg.cpv) portage.doebuild_environment(ebuild_path, "fetch", settings=settings, db=portdb) # Test good files in DISTDIR for k in settings["AA"].split(): os.stat(os.path.join(settings["DISTDIR"], k)) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) # Test digestgen with fetch os.unlink( os.path.join(os.path.dirname(ebuild_path), "Manifest")) for k in settings["AA"].split(): os.unlink(os.path.join(settings["DISTDIR"], k)) with ForkExecutor(loop=loop) as executor: self.assertTrue( bool( loop.run_until_complete( loop.run_in_executor( executor, functools.partial(digestgen, mysettings=settings, myportdb=portdb), )))) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) # Test missing files in DISTDIR for k in settings["AA"].split(): os.unlink(os.path.join(settings["DISTDIR"], k)) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) # Test empty files in DISTDIR for k in settings["AA"].split(): file_path = os.path.join(settings["DISTDIR"], k) with open(file_path, "wb") as f: pass self.assertEqual(os.stat(file_path).st_size, 0) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) # Test non-empty files containing null bytes in DISTDIR for k in settings["AA"].split(): file_path = os.path.join(settings["DISTDIR"], k) with open(file_path, "wb") as f: f.write(len(distfiles[k]) * b"\0") self.assertEqual( os.stat(file_path).st_size, len(distfiles[k])) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) # Test PORTAGE_RO_DISTDIRS settings["PORTAGE_RO_DISTDIRS"] = '"{}"'.format(ro_distdir) orig_fetchcommand = settings["FETCHCOMMAND"] orig_resumecommand = settings["RESUMECOMMAND"] try: settings["FETCHCOMMAND"] = settings["RESUMECOMMAND"] = "" for k in settings["AA"].split(): file_path = os.path.join(settings["DISTDIR"], k) os.rename(file_path, os.path.join(ro_distdir, k)) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): file_path = os.path.join(settings["DISTDIR"], k) self.assertTrue(os.path.islink(file_path)) with open(file_path, "rb") as f: self.assertEqual(f.read(), distfiles[k]) os.unlink(file_path) finally: settings.pop("PORTAGE_RO_DISTDIRS") settings["FETCHCOMMAND"] = orig_fetchcommand settings["RESUMECOMMAND"] = orig_resumecommand # Test local filesystem in GENTOO_MIRRORS orig_mirrors = settings["GENTOO_MIRRORS"] orig_fetchcommand = settings["FETCHCOMMAND"] try: settings["GENTOO_MIRRORS"] = ro_distdir settings["FETCHCOMMAND"] = settings["RESUMECOMMAND"] = "" self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) finally: settings["GENTOO_MIRRORS"] = orig_mirrors settings["FETCHCOMMAND"] = orig_fetchcommand settings["RESUMECOMMAND"] = orig_resumecommand # Test readonly DISTDIR orig_distdir_mode = os.stat(settings["DISTDIR"]).st_mode try: os.chmod(settings["DISTDIR"], 0o555) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) finally: os.chmod(settings["DISTDIR"], orig_distdir_mode) # Test parallel-fetch mode settings["PORTAGE_PARALLEL_FETCHONLY"] = "1" try: self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) for k in settings["AA"].split(): os.unlink(os.path.join(settings["DISTDIR"], k)) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) finally: settings.pop("PORTAGE_PARALLEL_FETCHONLY") # Test RESUMECOMMAND orig_resume_min_size = settings[ "PORTAGE_FETCH_RESUME_MIN_SIZE"] try: settings["PORTAGE_FETCH_RESUME_MIN_SIZE"] = "2" for k in settings["AA"].split(): file_path = os.path.join(settings["DISTDIR"], k) os.unlink(file_path) with open(file_path + _download_suffix, "wb") as f: f.write(distfiles[k][:2]) self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) for k in settings["AA"].split(): with open(os.path.join(settings["DISTDIR"], k), "rb") as f: self.assertEqual(f.read(), distfiles[k]) finally: settings[ "PORTAGE_FETCH_RESUME_MIN_SIZE"] = orig_resume_min_size # Test readonly DISTDIR + skiprocheck, with FETCHCOMMAND set to temporarily chmod DISTDIR orig_fetchcommand = settings["FETCHCOMMAND"] orig_distdir_mode = os.stat(settings["DISTDIR"]).st_mode for k in settings["AA"].split(): os.unlink(os.path.join(settings["DISTDIR"], k)) try: os.chmod(settings["DISTDIR"], 0o555) settings["FETCHCOMMAND"] = ( '"%s" -c "chmod ug+w \\"${DISTDIR}\\"; %s; status=\\$?; chmod a-w \\"${DISTDIR}\\"; exit \\$status"' % (BASH_BINARY, orig_fetchcommand.replace('"', '\\"'))) settings.features.add("skiprocheck") settings.features.remove("distlocks") self.assertEqual( loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0) finally: settings["FETCHCOMMAND"] = orig_fetchcommand os.chmod(settings["DISTDIR"], orig_distdir_mode) settings.features.remove("skiprocheck") settings.features.add("distlocks")
def test_md5(self): self.assertEqual(checksum_str(b"", "MD5"), "d41d8cd98f00b204e9800998ecf8427e") self.assertEqual(checksum_str(self.text, "MD5"), "094c3bf4732f59b39d577e9726f1e934")