def test_http_get_all(test_env): """ Check the API endpoint to get all requests. Process: * Pre-populate data in Cachito by submitting basic requests and waiting for them to complete. * Verify the submitted requests are not incorrectly marked as "in_progress". * Verify the submitted requests can be found by using the state=complete filter. Checks: * Check that response code is 200 * Check that filtering parameters are working properly (page, per_page, and state) """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) requests_amount = test_env["http_get_all"]["requests_amount"] submitted_requests = [] payload = { "repo": test_env["packages"]["gomod"]["repo"], "ref": test_env["packages"]["gomod"]["ref"], "pkg_managers": test_env["packages"]["gomod"]["pkg_managers"], } initial_responses = [ client.create_new_request(payload) for _ in range(requests_amount) ] for initial_response in initial_responses: completed_response = client.wait_for_complete_request(initial_response) response_data = completed_response.data submitted_requests.append(response_data["id"]) assert_no_requests_in_progress_state(client, requests_amount, submitted_requests) assert_completed_requests(client, submitted_requests)
def test_git_dir_not_included_by_default(test_env, default_requests, tmpdir): """ Check that the bundle does not include the .git file objects by default. Process: * Send new request to Cachito API * Send request to download appropriate bundle from Cachito Checks: * Check that response code is 200 * Check that state is "complete" * Check the downloaded data are in gzip format and valid * Check that downloaded data does not contain any .git files """ response = default_requests["gomod"].complete_response utils.assert_properly_completed_response(response) client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) client.download_and_extract_archive(response.id, tmpdir) file_name_tar = tmpdir.join(f"download_{str(response.id)}.tar.gz") with tarfile.open(file_name_tar, mode="r:gz") as tar: git_files = { member.name for member in tar.getmembers() if path.basename(member.name) == ".git" } assert not git_files, ( f"#{response.id}: There are unexpected .git files in archive {file_name_tar}: " f"{git_files}" )
def test_gomod_vendor_without_flag(test_env): """ Validate failing of gomod vendor request without flag. Checks: * The request failed with expected error message """ env_data = utils.load_test_data( "gomod_packages.yaml")["vendored_without_flag"] client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) initial_response = client.create_new_request(payload={ "repo": env_data["repo"], "ref": env_data["ref"], "pkg_managers": env_data["pkg_managers"], }, ) completed_response = client.wait_for_complete_request(initial_response) if test_env.get("strict_mode_enabled"): assert completed_response.status == 200 assert completed_response.data["state"] == "failed" error_msg = ( 'The "gomod-vendor" flag must be set when your repository has vendored dependencies' ) assert error_msg in completed_response.data["state_reason"], ( f"#{completed_response.id}: Request failed correctly, but with unexpected message: " f"{completed_response.data['state_reason']}. Expected message was: {error_msg}" ) else: utils.assert_properly_completed_response(completed_response)
def test_creating_new_request(test_env, default_requests): """ Send a new request to the Cachito API. Checks: * Check that response code is 201 * Check that response contains id number, same ref and repo as in request, state_reason is: The request was initiated """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) response_created_req = default_requests["gomod"].initial_response assert response_created_req.status == 201 assert "id" in response_created_req.data assert response_created_req.id > 0 response_specific_req = client.fetch_request(response_created_req.id) assert response_created_req.id == response_specific_req.id response_pkg_managers = set(response_created_req.data["pkg_managers"]) assert set( test_env["packages"]["gomod"]["pkg_managers"]) == response_pkg_managers assert test_env["packages"]["gomod"]["ref"] == response_created_req.data[ "ref"] assert test_env["packages"]["gomod"]["repo"] == response_created_req.data[ "repo"] assert test_env["packages"]["gomod"]["ref"] == response_specific_req.data[ "ref"] assert test_env["packages"]["gomod"]["repo"] == response_specific_req.data[ "repo"] assert response_created_req.data[ "state_reason"] == "The request was initiated"
def test_packages(env_package, env_name, test_env, tmpdir): """ Validate data in the package request according to pytest env_name and env_package parameter. Process: Send new request to the Cachito API Send request to check status of existing request Checks: * Check that the request completes successfully * Check that expected packages are identified in response * Check that expected dependencies are identified in response * Check response parameters of the package * Check that the source tarball includes the application source code * Check that the source tarball includes expected deps directory * Check: The content manifest is successfully generated and contains correct content """ env_data = utils.load_test_data(f"{env_package}.yaml")[env_name] client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) payload = { "repo": env_data["repo"], "ref": env_data["ref"], "pkg_managers": env_data.get("pkg_managers", []), "flags": env_data.get("flags", []), } if env_name == "implicit_gomod": payload.pop("pkg_managers") initial_response = client.create_new_request(payload=payload) completed_response = client.wait_for_complete_request(initial_response) response_data = completed_response.data expected_response_data = env_data["response_expectations"] utils.assert_elements_from_response(response_data, expected_response_data) client.download_and_extract_archive(completed_response.id, tmpdir) source_path = tmpdir.join(f"download_{str(completed_response.id)}") expected_files = env_data["expected_files"] utils.assert_expected_files(source_path, expected_files, tmpdir) purl = env_data.get("purl", "") deps_purls = [] source_purls = [] if "dep_purls" in env_data: deps_purls = [{"purl": x} for x in env_data["dep_purls"]] if "source_purls" in env_data: source_purls = [{"purl": x} for x in env_data["source_purls"]] if purl: image_contents = [{ "dependencies": deps_purls, "purl": purl, "sources": source_purls }] else: image_contents = env_data["image_contents"] utils.assert_content_manifest(client, completed_response.id, image_contents)
def test_invalid_content_manifest_request(test_env): """ Send an invalid content-manifest request to the Cachito API. Checks: * Check that the response code is 404 """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) with pytest.raises(requests.HTTPError) as e: client.fetch_content_manifest(request_id=0) assert e.value.response.status_code == 404 assert e.value.response.json() == {"error": "The requested resource was not found"}
def test_run_app_from_bundle(test_env, default_requests, tmpdir): """ Check that downloaded bundle could be used to run the application. Process: * Send new request to Cachito API * Download a bundle from the request * Run go build * Run the application Checks: * Check that the state of request is complete * Check that the bundle is properly downloaded * Check that the application runs successfully """ response = default_requests["gomod"].complete_response utils.assert_properly_completed_response(response) client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) client.download_and_extract_archive(response.id, tmpdir) bundle_dir = tmpdir.join(f"download_{str(response.id)}") app_name = test_env["run_app"]["app_name"] app_binary_file = str(tmpdir.join(app_name)) subprocess.run( [ "go", "build", "-o", app_binary_file, str(bundle_dir.join("app", "main.go")) ], env={ "GOPATH": str(bundle_dir.join("deps", "gomod")), "GOCACHE": str(bundle_dir.join("deps", "gomod")), "GOMODCACHE": "{}/pkg/mod".format(str(bundle_dir.join("deps", "gomod"))), }, cwd=str(bundle_dir.join("app")), check=True, ) assert path.exists( app_binary_file ), f"#{response.id}: Path for application binary file {app_binary_file} does not exist" sp = subprocess.run([app_binary_file, "--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) assert sp.returncode == 0
def test_various_packages(test_env): client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) for pkg_manager, package in test_env["various_packages"].items(): initial_response = client.create_new_request(payload={ "repo": package["repo"], "ref": package["ref"], "pkg_managers": [pkg_manager], }, ) completed_response = client.wait_for_complete_request(initial_response) utils.assert_properly_completed_response(completed_response) assert len(completed_response.data["dependencies"] ) == package["dependencies_count"]
def test_valid_content_manifest_request(test_env, default_requests): """ Send a valid content-manifest request to the Cachito API. Checks: * Check that the response code is 200 * Check validation of the response data with content manifest JSON schema """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) pkg_managers = test_env["content_manifest"]["pkg_managers"] for pkg_manager in pkg_managers: initial_response = default_requests[pkg_manager].initial_response content_manifest_response = client.fetch_content_manifest(initial_response.id) assert content_manifest_response.status == 200 response_data = content_manifest_response.data utils.assert_content_manifest_schema(response_data)
def test_git_dir_included_by_flag(test_env, tmpdir): """ Check that the bundle includes the .git file objects when include-git-dir flag is used. Process: * Send new request to Cachito API * Send request to download appropriate bundle from Cachito Checks: * Check that response code is 200 * Check that state is "complete" * Check the downloaded data are in gzip format and valid * Check that downloaded data contains app/.git file object, directory """ package_info = test_env["packages"]["gomod"] client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) initial_response = client.create_new_request(payload={ "repo": package_info["repo"], "ref": package_info["ref"], "pkg_managers": package_info["pkg_managers"], "flags": ["include-git-dir"], }, ) response = client.wait_for_complete_request(initial_response) utils.assert_properly_completed_response(response) client.download_and_extract_archive(response.id, tmpdir) file_name_tar = tmpdir.join(f"download_{str(response.id)}.tar.gz") with tarfile.open(file_name_tar, mode="r:gz") as tar: git_files = { member.name for member in tar.getmembers() if path.basename(member.name) == ".git" } assert git_files == { "app/.git" }, (f"#{response.id}: There are unexpected, or missing, .git files in archive {file_name_tar}: " f"{git_files}")
def test_get_all_verbose(test_env): """ Check the API endpoint to get all requests with the verbose flag enabled. Process: * Create a request and wait for it to complete * Send request get_all to the Cachito API with verbose Checks: * Check that response code is 200 * Check that verbose is working properly """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) initial_response = client.create_new_request(payload={ "repo": test_env["packages"]["gomod"]["repo"], "ref": test_env["packages"]["gomod"]["ref"], "pkg_managers": test_env["packages"]["gomod"]["pkg_managers"], }, ) client.wait_for_complete_request(initial_response) query_params = { "per_page": 1, "page": 1, "state": "complete", "verbose": True } request_id = initial_response.id found_request = False while not found_request: response = client.fetch_all_requests(query_params, all_pages=False) assert response.status == 200 if response.data["items"][0]["id"] == request_id: found_request = True query_params["page"] += 1 expected_request_data = client.fetch_request(request_id).data assert response.data["items"][0] == expected_request_data
def default_requests(test_env): """ Create a new request for every package manager in Cachito. :param test_env: Test environment configuration :return: a dict of packages with initial and completed responses from the Cachito API :rtype: dict """ client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) result_requests = {} packages = test_env["packages"] for package_name in packages: initial_response = client.create_new_request( payload={ "repo": packages[package_name]["repo"], "ref": packages[package_name]["ref"], "pkg_managers": packages[package_name]["pkg_managers"], }, ) completed_response = client.wait_for_complete_request(initial_response) result_requests[package_name] = DefaultRequest(initial_response, completed_response) return result_requests
def test_using_cached_packages(self, tmpdir, test_env): """ Check that the cached packages are used instead of downloading them from repo again. Preconditions: * On git instance prepare an empty repository Process: * Clone the package from the upstream repository * Create empty commit on new test branch and push it to the prepared repository * Send new request to Cachito API which would fetch data from the prepared repository * Delete branch with the corresponding commit * Send the same request to Cachito API Checks: * Check that the state of the first request is complete * Check that the commit is not available in the repository after the branch is deleted * Check that the state of the second request is complete """ generated_suffix = "".join( random.choice(string.ascii_letters + string.digits) for x in range(10) ) branch_name = f"test-{generated_suffix}" repo = git.repo.Repo.clone_from(self.env_data["seed_repo"]["https_url"], tmpdir) remote = repo.create_remote("test", url=self.env_data["test_repo"]["ssh_url"]) assert remote.exists(), f"Remote {remote.name} does not exist" # set user configuration, if available if self.git_user: repo.config_writer().set_value("user", "name", self.git_user).release() if self.git_email: repo.config_writer().set_value("user", "email", self.git_email).release() try: repo.create_head(branch_name).checkout() repo.git.commit("--allow-empty", m="Commit created in integration test for Cachito") repo.git.push("-u", remote.name, branch_name) commit = repo.head.commit.hexsha client = utils.Client( test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout"), ) response = client.create_new_request( payload={ "repo": self.env_data["test_repo"]["https_url"], "ref": commit, "pkg_managers": self.env_data["test_repo"]["pkg_managers"], }, ) first_response = client.wait_for_complete_request(response) utils.assert_properly_completed_response(first_response) assert repo.git.branch( "-a", "--contains", commit ), f"Commit {commit} is not in branches (it should be there)." finally: delete_branch_and_check(branch_name, repo, remote, [commit]) response = client.create_new_request( payload={ "repo": self.env_data["test_repo"]["https_url"], "ref": commit, "pkg_managers": self.env_data["test_repo"]["pkg_managers"], }, ) second_response = client.wait_for_complete_request(response) utils.assert_properly_completed_response(second_response) assert first_response.data["ref"] == second_response.data["ref"] assert first_response.data["repo"] == second_response.data["repo"] assert set(first_response.data["pkg_managers"]) == set(second_response.data["pkg_managers"]) first_pkgs = utils.make_list_of_packages_hashable(first_response.data["packages"]) second_pkgs = utils.make_list_of_packages_hashable(second_response.data["packages"]) assert first_pkgs == second_pkgs first_deps = utils.make_list_of_packages_hashable(first_response.data["dependencies"]) second_deps = utils.make_list_of_packages_hashable(second_response.data["dependencies"]) assert first_deps == second_deps
def test_pip_with_cached_deps(self, test_env, tmpdir): """ Test pip package with cached dependency. The test verifies that even after deleting dependency Cachito will provide cached version. The test supports only remote repos. Local version will be skipped. Stages: 1. Make changes in dependency repository: * create new branch * push 2 new commits 2. Make changes in requirements.txt in original repository: * add VCS and remote source archive dependencies based on commits from 1. * push changes with new commit 3. Create Cachito request and verify it [1] 4. Delete branch in dependency repository 5. Create Cachito request and verify it [1] [1] Verifications: * The request completes successfully. * A single pip package is identified. Dependencies are correctly listed under “.dependencies” and under “.packages | select(.type == “pip”) | .dependencies”. * The source tarball includes the application source code under the app directory. * The source tarball includes the dependencies and dev dependencies source code under deps/pip directory. * The content manifest is successfully generated and contains correct content. """ env_data = utils.load_test_data("cached_dependencies.yaml")["cached_deps"] self.use_local = env_data["use_local"] if self.use_local: pytest.skip("The local repos are not supported for the test") self.git_user = env_data.get("git_user") self.git_email = env_data.get("git_email") # Download dependency repo into a new directory dep_repo_dir = os.path.join(tmpdir, "dep") generated_suffix = "".join( random.choice(string.ascii_letters + string.digits) for _ in range(10) ) self.branch = f"test-{generated_suffix}" self.cloned_dep_repo = clone_repo_in_new_dir( env_data["ssh_dep_repo"], self.branch, dep_repo_dir ) # set user configuration, if available if self.git_user: self.cloned_dep_repo.config_writer().set_value("user", "name", self.git_user).release() if self.git_email: self.cloned_dep_repo.config_writer().set_value( "user", "email", self.git_email ).release() # Make changes in dependency repo # We need 2 commits: # 1st for remote source archive dependency # 2nd for VCS dependency new_dep_commits = [] for _ in range(2): self.cloned_dep_repo.git.commit( "--allow-empty", m="Commit created in integration test for Cachito" ) new_dep_commits.append(self.cloned_dep_repo.head.object.hexsha) # Push changes self.dep_repo_origin = self.cloned_dep_repo.remote(name="origin") self.dep_repo_origin.push(self.branch) # Download the archive with first commit changes archive_name = os.path.join(tmpdir, f"{new_dep_commits[0]}.zip") utils.download_archive( f"{env_data['dep_archive_baseurl']}{new_dep_commits[0]}.zip", archive_name ) # Get the archive hash dep_hash = utils.get_sha256_hash_from_file(archive_name) # Download the main repo into a new dir main_repo_dir = os.path.join(tmpdir, "main") self.cloned_main_repo = clone_repo_in_new_dir( env_data["ssh_main_repo"], self.branch, main_repo_dir ) if self.git_user: self.cloned_main_repo.config_writer().set_value("user", "name", self.git_user).release() if self.git_email: self.cloned_main_repo.config_writer().set_value( "user", "email", self.git_email ).release() # Add new dependencies into the main repo with open(os.path.join(main_repo_dir, "requirements.txt"), "a") as f: f.write( f"{env_data['dep_archive_baseurl']}{new_dep_commits[0]}" f".zip#egg=appr&cachito_hash=sha256:{dep_hash}\n" ) f.write(f"git+{env_data['https_dep_repo']}@{new_dep_commits[1]}#egg=appr\n") diff_files = self.cloned_main_repo.git.diff(None, name_only=True) self.cloned_main_repo.git.add(diff_files) self.cloned_main_repo.git.commit("-m", "test commit") self.main_repo_commit = self.cloned_main_repo.head.object.hexsha self.main_repo_origin = self.cloned_main_repo.remote(name="origin") self.main_repo_origin.push(self.branch) # Create new Cachito request client = utils.Client( test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout") ) payload = { "repo": env_data["https_main_repo"], "ref": self.main_repo_commit, "pkg_managers": env_data["pkg_managers"], } try: initial_response = client.create_new_request(payload=payload) completed_response = client.wait_for_complete_request(initial_response) finally: # Delete the dependency branch delete_branch_and_check( self.branch, self.cloned_dep_repo, self.dep_repo_origin, new_dep_commits ) replace_rules = { "FIRST_DEP_COMMIT": new_dep_commits[0], "SECOND_DEP_COMMIT": new_dep_commits[1], "FIRST_DEP_HASH": dep_hash, "MAIN_REPO_COMMIT": self.main_repo_commit, } update_expected_data(env_data, replace_rules) assert_successful_cached_request(completed_response, env_data, tmpdir, client) # Create new Cachito request to test cached deps initial_response = client.create_new_request(payload=payload) completed_response = client.wait_for_complete_request(initial_response) assert_successful_cached_request(completed_response, env_data, tmpdir, client)
def test_check_downloaded_output(test_env, default_requests, tmpdir): """ Check that the bundle has all the necessities. Process: * Send new request to Cachito API * Send request to download appropriate bundle from Cachito Checks: * Check that response code is 200 * Check that state is "complete" * Check the downloaded data are in gzip format and valid * Check that dir deps/gomod/… contains cached dependencies * Check that dir app/ contains application source code * Check that the same full path filename is not duplicated """ response = default_requests["gomod"].complete_response utils.assert_properly_completed_response(response) client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) file_name = tmpdir.join(f"download_{str(response.id)}") client.download_and_extract_archive(response.id, tmpdir) pkg_managers = test_env["downloaded_output"]["pkg_managers"] dependencies_path = path.join("deps", "gomod", "pkg", "mod", "cache", "download") names = [i["name"] for i in response.data["dependencies"] if i["type"] in pkg_managers] for dependency in names: package_name = utils.escape_path_go(dependency) dependency_path = path.join(file_name, dependencies_path, package_name) assert path.exists( dependency_path ), f"#{response.id}: Dependency path does not exist: {dependency_path}" go_mod_path = path.join(file_name, "app", "go.mod") assert path.exists( go_mod_path ), f"#{response.id}: File go.mod does not exist in location: {go_mod_path}" with open(go_mod_path, "r") as file: module_names = [] for line in file: if line.startswith("module "): module_names.append(line.split()[-1]) break expected_packages = [ i["name"] for i in response.data["packages"] if i["type"] in pkg_managers ] assert set(module_names) == set(expected_packages) list_go_files = [] for app_path in Path(path.join(file_name, "app")).rglob("*.go"): list_go_files.append(app_path) assert len(list_go_files) > 0 file_name_tar = tmpdir.join(f"download_{str(response.id)}.tar.gz") with tarfile.open(file_name_tar, mode="r:gz") as tar: members = tar.getmembers() path_names = set() for dependency in members: assert dependency.name not in path_names, ( f"#{response.id}: There is an unexpected duplicate {dependency.name} " f"in archive {file_name_tar}" ) path_names.add(dependency.name)
def test_dependency_replacement(test_env, tmpdir): """ Check that proper versions of dependencies were used. Process: * Send new request to Cachito API to fetch retrodep with another version of dependency package * Download a bundle archive Checks: * Check that the state of request is complete * Check that in the response there is a key "replaces" with dict values which was replaced * Check that dir deps/gomod/pkg/mod/cache/download/github.com/pkg/errors/@v/… contains only the required version * Check that app/go.mod file has replace directive for the specified package """ dependency_replacements = test_env["dep_replacement"][ "dependency_replacements"] client = utils.Client(test_env["api_url"], test_env["api_auth_type"], test_env.get("timeout")) response_created_req = client.create_new_request(payload={ "repo": test_env["packages"]["gomod"]["repo"], "ref": test_env["packages"]["gomod"]["ref"], "pkg_managers": test_env["packages"]["gomod"]["pkg_managers"], "dependency_replacements": dependency_replacements, }, ) response = client.wait_for_complete_request(response_created_req) utils.assert_properly_completed_response(response) names_replaced_dependencies = { i["replaces"]["name"] for i in response.data["dependencies"] if i["replaces"] is not None } supposed_replaced_dependencies = set(i["name"] for i in dependency_replacements) assert names_replaced_dependencies == supposed_replaced_dependencies bundle_dir_name = tmpdir.join(f"download_{str(response.id)}") client.download_and_extract_archive(response.id, tmpdir) for dependency in dependency_replacements: dep_name = utils.escape_path_go(dependency["name"]) dependency_version_file = path.join( bundle_dir_name, "deps", "gomod", "pkg", "mod", "cache", "download", dep_name, "@v", "list", ) assert path.exists(dependency_version_file), ( f"#{response.id}: Path for version of dependency " f"{dep_name} does not exist: {dependency_version_file}") with open(dependency_version_file, "r") as file: lines = {line.rstrip() for line in file.readlines()} assert dependency["version"] in lines, ( f"#{response.id}: File {dependency_version_file} does not contain" f" version {dependency['version']} that should have replaced the original one." ) go_mod_path = path.join(bundle_dir_name, "app", "go.mod") assert path.exists( go_mod_path ), f"#{response.id}: File go.mod does not exist in location: {go_mod_path}" with open(go_mod_path, "r") as file: go_mod_replace = [] for line in file: if line.startswith("replace "): go_mod_replace.append({ "name": line.split()[-2], "type": "gomod", "version": line.split()[-1] }) sorted_dep_replacements = utils.make_list_of_packages_hashable( dependency_replacements) sorted_go_mod_replace = utils.make_list_of_packages_hashable( go_mod_replace) assert sorted_go_mod_replace == sorted_dep_replacements
import os import utils import threading import rich.console from prompt_toolkit import prompt from prompt_toolkit.document import Document from prompt_toolkit.history import FileHistory from prompt_toolkit.validation import Validator from prompt_toolkit.completion import WordCompleter commands = ['ls', 'cd', 'mkdir', 'download', 'upload', 'rm', 'quit'] currentDir = '' console = rich.console.Console() client = utils.Client('imap.exmail.qq.com') class myThread(threading.Thread): def __init__(self, threadID, name, counter): threading.Thread.__init__(self) self.threadID = threadID self.name = name self.counter = counter self.recv = recv def run(self): print("开始线程:" + self.name) print("退出线程:" + self.name) class UpdateWord(Validator): def __init__(self, completer: WordCompleter):