def test_merge_bundle_dirs(mock_merge_files, tree_1, tree_2, result_tree, merge_file_executions):
    with tempDir() as dir_1, tempDir() as dir_2, tempDir() as dir_3:
        write_file_tree(tree_1, dir_1)
        write_file_tree(tree_2, dir_2)
        write_file_tree(result_tree, dir_3)
        _merge_bundle_dirs(dir_1, dir_2)
        assert_directories_equal(dir_2, dir_3)
    assert mock_merge_files.call_count == merge_file_executions
Example #2
0
def assert_expected_files(source_path, expected_files, tmpdir):
    """
    Check that the source path includes expected files in directories.

    Stages for not empty directory checks:
    1. If we check `deps` directory, extract package in `deps`
    2. Download and extract expected package from expected_files
    3. Compare files recursively with expected ones
    4. Delete downloaded and extracted temporary data

    :param str source_path: local path for checking
    :param dict expected_files: Dict with expected file data:
        {<directory_name>: <archive_URL>}
    :param tmpdir: Testing function for providing temporary directory
    """
    for dir_to_check, archive_url in expected_files.items():
        # A directory path to check
        test_path = os.path.join(source_path, dir_to_check)
        # If there is no link to expected archive, the directory should be empty
        if not archive_url:
            assert (
                len(os.listdir(test_path)) == 0
            ), f"Directory: {test_path} not found or not empty as expected."
        else:
            dir_identifier = dir_to_check.replace("/", "_")
            # A directory path with extracted deps
            deps_data_path = os.path.join(tmpdir, f"test_source_{dir_identifier}")
            # An archive path with expected data
            expected_archive = os.path.join(tmpdir, f"archive_{dir_identifier}.tar.gz")
            # A directory path with extracted expected data
            expected_data_path = os.path.join(tmpdir, f"expected_data_{dir_identifier}")
            # Dependencies instead of source code are saved as archives.
            # Therefore we should extract it firstly
            if os.path.isdir(test_path):
                package_root_dir = test_path
            else:
                archive_path = test_path
                shutil.unpack_archive(archive_path, deps_data_path)
                # deps_data_path is unique and contains only one expected package
                package_root_dir = os.path.join(deps_data_path, os.listdir(deps_data_path)[0])
            download_archive(archive_url, expected_archive)
            shutil.unpack_archive(expected_archive, expected_data_path)
            # Root directory for expected data of package or dependency
            expected_package_root_dir = os.path.join(
                expected_data_path, os.listdir(expected_data_path)[0]
            )
            assert os.path.isdir(
                expected_package_root_dir
            ), f"Wrong directory path {expected_package_root_dir}."
            # Compare and assert files in directory with expected data
            assert_directories_equal(package_root_dir, expected_package_root_dir)
            # Delete temporary data
            for temp_data in [deps_data_path, expected_data_path, expected_archive]:
                if os.path.exists(deps_data_path):
                    shutil.rmtree(temp_data)
Example #3
0
def test_merge_files(file_1_content, file_2_content, result_file_content):
    with tempDir() as dir_1, tempDir() as dir_2, tempDir() as dir_3:
        write_file_tree({"list": file_1_content}, dir_1)
        write_file_tree({"list": file_2_content}, dir_2)
        write_file_tree({"list": result_file_content}, dir_3)
        _merge_files("{}/list".format(dir_1), "{}/list".format(dir_2))
        with open("{}/list".format(dir_2), "r") as f:
            print(f.read())
        with open("{}/list".format(dir_3), "r") as f:
            print(f.read())
        assert_directories_equal(dir_2, dir_3)