def cache(self, *, filename: str, algorithm: str, hash: str) -> Optional[str]: """Cache a file revision with hash in XDG cache, unless it already exists. :param str filename: path to the file to cache. :param str algorithm: algorithm used to calculate the hash as understood by hashlib. :param str hash: hash for filename calculated with algorithm. :returns: path to cached file. """ # First we verify calculated_hash = calculate_hash(filename, algorithm=algorithm) if calculated_hash != hash: logger.warning("Skipping caching of {!r} as the expected " "hash does not match the one " "provided".format(filename)) return None cached_file_path = os.path.join(self.file_cache, algorithm, hash) os.makedirs(os.path.dirname(cached_file_path), exist_ok=True) try: if not os.path.isfile(cached_file_path): # this must not be hard-linked, as rebuilding a snap # with changes should invalidate the cache, hence avoids # using fileutils.link_or_copy. shutil.copyfile(filename, cached_file_path) except OSError: logger.warning("Unable to cache file {}.".format(cached_file_path)) return None return cached_file_path
def cache(self, *, filename, algorithm, hash): """Cache a file revision with hash in XDG cache, unless it already exists. :param str filename: path to the file to cache. :param str algorithm: algorithm used to calculate the hash as understood by hashlib. :param str hash: hash for filename calculated with algorithm. :returns: path to cached file. """ # First we verify calculated_hash = calculate_hash(filename, algorithm=algorithm) if calculated_hash != hash: logger.warning( "Skipping caching of {!r} as the expected " "hash does not match the one " "provided".format(filename) ) return None cached_file_path = os.path.join(self.file_cache, algorithm, hash) os.makedirs(os.path.dirname(cached_file_path), exist_ok=True) try: if not os.path.isfile(cached_file_path): # this must not be hard-linked, as rebuilding a snap # with changes should invalidate the cache, hence avoids # using fileutils.link_or_copy. shutil.copyfile(filename, cached_file_path) except OSError: logger.warning("Unable to cache file {}.".format(cached_file_path)) return None return cached_file_path
def verify(self, path: str) -> None: if not os.path.exists(path): raise errors.DownloadNotFoundError(path=path) calculated_hash = calculate_hash(path, algorithm="sha3_384") if self.sha3_384 != calculated_hash: raise errors.SHAMismatchError(path=path, expected=self.sha3_384, calculated=calculated_hash)
def test_cache_and_retrieve(self, random_data_file, file_cache, algo): calculated_hash = calculate_hash(random_data_file, algorithm=algo) cached_file = file_cache.cache(filename=random_data_file, algorithm=algo, hash=calculated_hash) leaf_path = os.path.join(algo, calculated_hash) assert cached_file.endswith(leaf_path) retrieved_file = file_cache.get(algorithm=algo, hash=calculated_hash) assert retrieved_file.endswith(leaf_path)
def test_cache_file_copy_error(self): with open('hash_file', 'w') as f: f.write('random stub data') calculated_hash = calculate_hash('hash_file', algorithm=self.algo) with patch('shutil.copyfile') as mock_copyfile: mock_copyfile.side_effect = OSError() file = self.file_cache.cache(filename='hash_file', algorithm=self.algo, hash=calculated_hash) self.assertThat(file, Is(None))
def test_cache_file_copy_error(self): with open('hash_file', 'w') as f: f.write('random stub data') calculated_hash = calculate_hash('hash_file', algorithm=self.algo) with patch('shutil.copyfile') as mock_copyfile: mock_copyfile.side_effect = OSError() file = self.file_cache.cache(filename='hash_file', algorithm=self.algo, hash=calculated_hash) self.assertThat(file, Is(None))
def test_cache_and_retrieve(self): with open("hash_file", "w") as f: f.write("random stub data") calculated_hash = calculate_hash("hash_file", algorithm=self.algo) file = self.file_cache.cache( filename="hash_file", algorithm=self.algo, hash=calculated_hash ) leaf_path = os.path.join(self.algo, calculated_hash) self.assertThat(file, EndsWith(leaf_path)) retrieved_file = self.file_cache.get(algorithm=self.algo, hash=calculated_hash) self.assertThat(retrieved_file, EndsWith(leaf_path))
def test_cache_file_copy_error(self, monkeypatch, random_data_file, file_cache, algo): calculated_hash = calculate_hash(random_data_file, algorithm=algo) def fake_copy(*args, **kwargs): raise OSError() monkeypatch.setattr(shutil, "copyfile", fake_copy) cached_file = file_cache.cache(filename=random_data_file, algorithm=algo, hash=calculated_hash) assert cached_file is None
def verify_checksum(source_checksum: str, checkfile: str) -> Tuple: """Verifies that checkfile corresponds to the given source_checksum. :param str source_checksum: algorithm/hash expected for checkfile. :param str checkfile: the file to calculate the sum for with the algorithm defined in source_checksum. :raises ValueError: if source_checksum is not of the form algorithm/hash. :raises DigestDoesNotMatchError: if checkfile does not match the expected hash calculated with the algorithm defined in source_checksum. :returns: a tuple consisting of the algorithm and the hash. """ algorithm, digest = split_checksum(source_checksum) calculated_digest = calculate_hash(checkfile, algorithm=algorithm) if digest != calculated_digest: raise errors.DigestDoesNotMatchError(digest, calculated_digest) return (algorithm, digest)
def verify_checksum(source_checksum, checkfile): """Verifies that checkfile corresponds to the given source_checksum. :param str source_checksum: algorithm/hash expected for checkfile. :param str checkfile: the file to calculate the sum for with the algorithm defined in source_checksum. :raises ValueError: if source_checksum is not of the form algorightm/hash. :raises DigestDoesNotMatchError: if checkfile does not match the expected hash calculated with the algorithm defined in source_checksum. :returns: a tuple consisting of the algorithm and the hash. """ algorithm, digest = split_checksum(source_checksum) calculated_digest = calculate_hash(checkfile, algorithm=algorithm) if digest != calculated_digest: raise errors.DigestDoesNotMatchError(digest, calculated_digest) return (algorithm, digest)
def test_pull_sdk(self): with tarfile.open('test-sdk.tar', 'w') as test_sdk_tar: open('test-sdk', 'w').close() test_sdk_tar.add('test-sdk') with mock.patch.dict( dotnet._SDKS_AMD64['2.0.0'], {'checksum': 'sha256/{}'.format( file_utils.calculate_hash( 'test-sdk.tar', algorithm='sha256'))}): plugin = dotnet.DotNetPlugin( 'test-part', self.options, self.project) with mock.patch.object( sources.Tar, 'download', return_value='test-sdk.tar'): plugin.pull() self.assertThat( os.path.join('parts', 'test-part', 'dotnet', 'sdk', 'test-sdk'), FileExists())
def _download_and_cache(self) -> str: request = requests.get(self.url, stream=True, allow_redirects=True) if not request.ok: raise errors.BuildImageRequestError( base=self.base, status_code=request.status_code ) # First create the prefix as tempfile.TemporaryDirectory does not do that for you os.makedirs(self._image_cache.file_cache, exist_ok=True) with tempfile.TemporaryDirectory( prefix=self._image_cache.file_cache ) as tmp_dir: download_file = os.path.join(tmp_dir, "{}-vm".format(self.base)) download_requests_stream(request, download_file) calculated_digest = calculate_hash(download_file, algorithm=self.algorithm) if self.checksum != calculated_digest: raise errors.BuildImageChecksumError( expected=self.checksum, calculated=calculated_digest, algorithm=self.algorithm, ) return self._image_cache.cache( filename=download_file, algorithm=self.algorithm, hash=self.checksum )
def setUp(self): super().setUp() class Options: build_attributes = [] dotnet_runtime_version = dotnet._RUNTIME_DEFAULT dotnet_version = dotnet._VERSION_DEFAULT self.options = Options() # Only amd64 is supported for now. patcher = mock.patch( "snapcraft.ProjectOptions.deb_arch", new_callable=mock.PropertyMock, return_value="amd64", ) patcher.start() self.addCleanup(patcher.stop) def fake_urlopen(request): return FakeResponse(request.full_url, checksum) class FakeResponse: def __init__(self, url: str, checksum: str) -> None: self._url = url self._checksum = checksum def read(self): return json.dumps({ "releases": [{ "release-version": "2.0.9", "sdk": { "runtime-version": "2.0.9", "files": [{ "name": "dotnet-sdk-linux-x64.tar.gz", "rid": "linux-x64", "url": "https://download.microsoft.com/download/f/c/1/fc16c864-b374-4668-83a2-f9f880928b2d/dotnet-sdk-2.1.202-linux-x64.tar.gz", "hash": "c1b07ce8849619ca505aafd2983bcdd7141536ccae243d4249b0c9665daf107e03a696ad5f1d95560142cd841a0888bbf5f1a8ff77d3bdc3696b5873481f0998", }], }, }] }).encode("utf-8") with tarfile.open("test-sdk.tar", "w") as test_sdk_tar: open("test-sdk", "w").close() test_sdk_tar.add("test-sdk") checksum = file_utils.calculate_hash("test-sdk.tar", algorithm="sha512") patcher = mock.patch("urllib.request.urlopen") urlopen_mock = patcher.start() urlopen_mock.side_effect = fake_urlopen self.addCleanup(patcher.stop) original_check_call = snapcraft.internal.common.run patcher = mock.patch("snapcraft.internal.common.run") self.mock_check_call = patcher.start() self.addCleanup(patcher.stop) def side_effect(cmd, *args, **kwargs): if cmd[0].endswith("dotnet"): pass else: original_check_call(cmd, *args, **kwargs) self.mock_check_call.side_effect = side_effect
def setUp(self): super().setUp() class Options: build_attributes = [] dotnet_runtime_version = dotnet._RUNTIME_DEFAULT self.options = Options() self.project = snapcraft.ProjectOptions() # Only amd64 is supported for now. patcher = mock.patch( "snapcraft.ProjectOptions.deb_arch", new_callable=mock.PropertyMock, return_value="amd64", ) patcher.start() self.addCleanup(patcher.stop) def fake_urlopen(request): return FakeResponse(request.full_url, checksum) class FakeResponse: def __init__(self, url: str, checksum: str) -> None: self._url = url self._checksum = checksum def read(self): if self._url.endswith("releases.json"): data = json.dumps( [ { "version-runtime": dotnet._RUNTIME_DEFAULT, "blob-sdk": "https://dotnetcli.blob.core.windows.net/dotnet/" "Sdk/2.1.4/", "sdk-linux-x64": "dotnet-sdk-2.1.4-linux-x64.tar.gz", "checksums-sdk": "https://dotnetcli.blob.core.windows.net/dotnet/" "checksums/2.1.4-sdk-sha.txt", }, {"version-sdk": "2.1.104"}, ] ).encode("utf-8") else: # A checksum file with a list of checksums and archives. # We fill in the computed checksum used in the pull test. data = bytes( dedent( """\ Hash: SHA512 05fe90457a8b77ad5a5eb2f22348f53e962012a {} dotnet-sdk-2.1.4-linux-x64.tar.gz """ ).format(self._checksum), "utf-8", ) return data with tarfile.open("test-sdk.tar", "w") as test_sdk_tar: open("test-sdk", "w").close() test_sdk_tar.add("test-sdk") checksum = file_utils.calculate_hash("test-sdk.tar", algorithm="sha512") patcher = mock.patch("urllib.request.urlopen") urlopen_mock = patcher.start() urlopen_mock.side_effect = fake_urlopen self.addCleanup(patcher.stop) original_check_call = snapcraft.internal.common.run patcher = mock.patch("snapcraft.internal.common.run") self.mock_check_call = patcher.start() self.addCleanup(patcher.stop) def side_effect(cmd, *args, **kwargs): if cmd[0].endswith("dotnet"): pass else: original_check_call(cmd, *args, **kwargs) self.mock_check_call.side_effect = side_effect
def setUp(self): super().setUp() class Options: build_attributes = [] dotnet_runtime_version = dotnet._RUNTIME_DEFAULT self.options = Options() self.project = snapcraft.ProjectOptions() # Only amd64 is supported for now. patcher = mock.patch('snapcraft.ProjectOptions.deb_arch', new_callable=mock.PropertyMock, return_value='amd64') patcher.start() self.addCleanup(patcher.stop) def fake_urlopen(request): return FakeResponse(request.full_url, checksum) class FakeResponse: def __init__(self, url: str, checksum: str) -> None: self._url = url self._checksum = checksum def read(self): if self._url.endswith('releases.json'): data = json.dumps([ { 'version-runtime': dotnet._RUNTIME_DEFAULT, 'blob-sdk': 'https://dotnetcli.blob.core.windows.net/dotnet/' 'Sdk/2.1.4/', 'sdk-linux-x64': 'dotnet-sdk-2.1.4-linux-x64.tar.gz', 'checksums-sdk': 'https://dotnetcli.blob.core.windows.net/dotnet/' 'checksums/2.1.4-sdk-sha.txt' }, { 'version-sdk': '2.1.104' }, ]).encode('utf-8') else: # A checksum file with a list of checksums and archives. # We fill in the computed checksum used in the pull test. data = bytes( dedent("""\ Hash: SHA512 05fe90457a8b77ad5a5eb2f22348f53e962012a {} dotnet-sdk-2.1.4-linux-x64.tar.gz """).format(self._checksum), 'utf-8') return data with tarfile.open('test-sdk.tar', 'w') as test_sdk_tar: open('test-sdk', 'w').close() test_sdk_tar.add('test-sdk') checksum = file_utils.calculate_hash('test-sdk.tar', algorithm='sha512') patcher = mock.patch('urllib.request.urlopen') urlopen_mock = patcher.start() urlopen_mock.side_effect = fake_urlopen self.addCleanup(patcher.stop) original_check_call = snapcraft.internal.common.run patcher = mock.patch('snapcraft.internal.common.run') self.mock_check_call = patcher.start() self.addCleanup(patcher.stop) def side_effect(cmd, *args, **kwargs): if cmd[0].endswith('dotnet'): pass else: original_check_call(cmd, *args, **kwargs) self.mock_check_call.side_effect = side_effect