Exemple #1
0
def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir):
    src_dir = tmpdir / 'src'
    dst_dir = tmpdir / 'dst'
    src_included_file = src_dir.joinpath('file.txt')
    src_excluded_dir = src_dir.joinpath(exclude_dir)
    src_excluded_file = src_dir.joinpath(exclude_dir, 'file.txt')
    src_included_dir = src_dir.joinpath('subdir', exclude_dir)

    # set up source directory
    src_excluded_dir.mkdir(parents=True)
    src_included_dir.mkdir(parents=True)
    src_included_file.touch()
    src_excluded_file.touch()

    dst_included_file = dst_dir.joinpath('file.txt')
    dst_excluded_dir = dst_dir.joinpath(exclude_dir)
    dst_excluded_file = dst_dir.joinpath(exclude_dir, 'file.txt')
    dst_included_dir = dst_dir.joinpath('subdir', exclude_dir)

    src_link = Link(path_to_url(src_dir))
    unpack_file_url(src_link, dst_dir, download_dir=None)
    assert not os.path.isdir(dst_excluded_dir)
    assert not os.path.isfile(dst_excluded_file)
    assert os.path.isfile(dst_included_file)
    assert os.path.isdir(dst_included_dir)
Exemple #2
0
    def test_unpack_file_url_download_bad_hash(self, tmpdir, data,
                                               monkeypatch):
        """
        Test when existing download has different hash from the file url
        fragment
        """
        self.prep(tmpdir, data)

        # add in previous download (copy simple-2.0 as simple-1.0 so it's wrong
        # hash)
        dest_file = os.path.join(self.download_dir, self.dist_file)
        copy(self.dist_path2, dest_file)

        with open(self.dist_path, 'rb') as f:
            dist_path_md5 = hashlib.md5(f.read()).hexdigest()
        with open(dest_file, 'rb') as f:
            dist_path2_md5 = hashlib.md5(f.read()).hexdigest()

        assert dist_path_md5 != dist_path2_md5

        url = '{}#md5={}'.format(self.dist_url.url, dist_path_md5)
        dist_url = Link(url)
        unpack_file_url(dist_url,
                        self.build_dir,
                        download_dir=self.download_dir,
                        hashes=Hashes({'md5': [dist_path_md5]}))

        # confirm hash is for simple1-1.0
        # the previous bad download has been removed
        with open(dest_file, 'rb') as f:
            assert hashlib.md5(f.read()).hexdigest() == dist_path_md5
Exemple #3
0
 def test_unpack_file_url_and_download(self, tmpdir, data):
     self.prep(tmpdir, data)
     unpack_file_url(self.dist_url,
                     self.build_dir,
                     download_dir=self.download_dir)
     assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
     assert os.path.isfile(os.path.join(self.download_dir, self.dist_file))
Exemple #4
0
 def test_unpack_file_url_thats_a_dir(self, tmpdir, data):
     self.prep(tmpdir, data)
     dist_path = data.packages.joinpath("FSPkg")
     dist_url = Link(path_to_url(dist_path))
     unpack_file_url(dist_url, self.build_dir,
                     download_dir=self.download_dir)
     assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
Exemple #5
0
 def test_unpack_file_url_bad_hash(self, tmpdir, data, monkeypatch):
     """
     Test when the file url hash fragment is wrong
     """
     self.prep(tmpdir, data)
     url = '{}#md5=bogus'.format(self.dist_url.url)
     dist_url = Link(url)
     with pytest.raises(HashMismatch):
         unpack_file_url(dist_url,
                         self.build_dir,
                         hashes=Hashes({'md5': ['bogus']}))
Exemple #6
0
    def test_unpack_file_url_download_already_exists(self, tmpdir,
                                                     data, monkeypatch):
        self.prep(tmpdir, data)
        # add in previous download (copy simple-2.0 as simple-1.0)
        # so we can tell it didn't get overwritten
        dest_file = os.path.join(self.download_dir, self.dist_file)
        copy(self.dist_path2, dest_file)
        with open(self.dist_path2, 'rb') as f:
            dist_path2_md5 = hashlib.md5(f.read()).hexdigest()

        unpack_file_url(self.dist_url, self.build_dir,
                        download_dir=self.download_dir)
        # our hash should be the same, i.e. not overwritten by simple-1.0 hash
        with open(dest_file, 'rb') as f:
            assert dist_path2_md5 == hashlib.md5(f.read()).hexdigest()