def create_bundle_archive(request_id): """ Create the bundle archive to be downloaded by the user. :param int request_id: the request the bundle is for """ set_request_state(request_id, "in_progress", "Assembling the bundle archive") request = get_request(request_id) bundle_dir = RequestBundleDir(request_id) log.debug("Using %s for creating the bundle for request %d", bundle_dir, request_id) log.info("Creating %s", bundle_dir.bundle_archive_file) def filter_git_dir(tar_info): return tar_info if os.path.basename(tar_info.name) != ".git" else None tar_filter = filter_git_dir if "include-git-dir" in request.get("flags", []): tar_filter = None with tarfile.open(bundle_dir.bundle_archive_file, mode="w:gz") as bundle_archive: # Add the source to the bundle. This is done one file/directory at a time in the parent # directory in order to exclude the app/.git folder. for item in bundle_dir.source_dir.iterdir(): arc_name = os.path.join("app", item.name) bundle_archive.add(str(item), arc_name, filter=tar_filter) # Add the dependencies to the bundle bundle_archive.add(str(bundle_dir.deps_dir), "deps") set_request_state(request_id, "complete", "Completed successfully")
def test_get_request(mock_get_request_or_fail): mock_get_request_or_fail.return_value = {"id": 42, "state": "complete"} assert utils.get_request(42) == {"id": 42, "state": "complete"} mock_get_request_or_fail.assert_called_once_with( 42, connect_error_msg="The connection failed while getting request 42: {exc}", status_error_msg="Failed to get request 42: {exc}", )
def process_fetched_sources(request_id): """Generate files for request and updates the request with packages/dependencies counts.""" request = get_request(request_id) create_bundle_archive(request_id, request.get("flags", [])) save_bundle_archive_checksum(request_id) data = aggregate_packages_data(request_id, request["pkg_managers"]) packages_count = len(data.packages) dependencies_count = len(data.all_dependencies) set_packages_and_deps_counts(request_id, packages_count, dependencies_count) return packages_count, dependencies_count
def fetch_pip_source(request_id, package_configs=None): """ Resolve and fetch pip dependencies for a given request. :param int request_id: the Cachito request ID this is for :param list package_configs: the list of optional package configurations submitted by the user """ version_output = run_cmd(["pip", "--version"], {}) log.info(f"pip version: {version_output.strip()}") validate_pip_config() bundle_dir: RequestBundleDir = RequestBundleDir(request_id) log.info("Configuring Nexus for pip for the request %d", request_id) set_request_state(request_id, "in_progress", "Configuring Nexus for pip") pip_repo_name = get_pypi_hosted_repo_name(request_id) raw_repo_name = get_raw_hosted_repo_name(request_id) prepare_nexus_for_pip_request(pip_repo_name, raw_repo_name) log.info("Fetching dependencies for request %d", request_id) package_configs = package_configs or [{}] packages_data = [] requirement_file_paths = [] for pkg_cfg in package_configs: pkg_path = pkg_cfg.get("path", ".") source_dir = bundle_dir.app_subpath(pkg_path).source_dir set_request_state( request_id, "in_progress", f"Fetching dependencies at the {pkg_path!r} directory", ) request = get_request(request_id) pkg_and_deps_info = resolve_pip( source_dir, request, requirement_files=pkg_cfg.get("requirements_files"), build_requirement_files=pkg_cfg.get("requirements_build_files"), ) # defer custom requirement files creation to use the Nexus password in the URLs for requirement_file_path in pkg_and_deps_info.pop("requirements"): requirement_file_paths.append(requirement_file_path) # defer DB operations to use the Nexus password in the env vars packages_data.append(pkg_and_deps_info) log.info("Finalizing the Nexus configuration for pip for the request %d", request_id) set_request_state(request_id, "in_progress", "Finalizing the Nexus configuration for pip") username = get_hosted_repositories_username(request_id) password = finalize_nexus_for_pip_request(pip_repo_name, raw_repo_name, username) # Set environment variables and config files pip_config_files = [] for requirement_file_path in requirement_file_paths: custom_requirement_file = _get_custom_requirement_config_file( requirement_file_path, bundle_dir.source_root_dir, raw_repo_name, username, password) if custom_requirement_file: pip_config_files.append(custom_requirement_file) raw_url = get_pypi_hosted_repo_url(request_id) pip_index_url = get_index_url(raw_url, username, password) env_vars = {"PIP_INDEX_URL": {"value": pip_index_url, "kind": "literal"}} ca_cert = nexus.get_ca_cert() if ca_cert: ca_cert_path = os.path.join("app", "package-index-ca.pem") env_vars["PIP_CERT"] = {"value": ca_cert_path, "kind": "path"} pip_config_files.append(make_base64_config_file(ca_cert, ca_cert_path)) worker_config = get_worker_config() env_vars.update( worker_config.cachito_default_environment_variables.get("pip", {})) update_request_env_vars(request_id, env_vars) packages_json_data = PackagesData() for pkg_cfg, pkg_data in zip(package_configs, packages_data): pkg_subpath = os.path.normpath(pkg_cfg.get("path", ".")) pkg_info = pkg_data["package"] pkg_deps = pkg_data["dependencies"] packages_json_data.add_package(pkg_info, pkg_subpath, pkg_deps) packages_json_data.write_to_file(bundle_dir.pip_packages_data) if pip_config_files: update_request_with_config_files(request_id, pip_config_files)
def fetch_yarn_source(request_id: int, package_configs: List[dict] = None): """ Resolve and fetch yarn dependencies for a given request. This function uses the Python ``os.path`` library to manipulate paths, so the path to the configuration files may differ in format based on the system the Cachito worker is deployed on (i.e. Linux vs Windows). :param int request_id: the Cachito request ID this is for :param list package_configs: the list of optional package configurations submitted by the user :raise CachitoError: if the task fails """ if package_configs is None: package_configs = [] validate_yarn_config() bundle_dir = RequestBundleDir(request_id) subpaths = [ os.path.normpath(c["path"]) for c in package_configs if c.get("path") ] if not subpaths: # Default to the root of the application source subpaths = [os.curdir] _verify_yarn_files(bundle_dir, subpaths) log.info("Configuring Nexus for yarn for the request %d", request_id) set_request_state(request_id, "in_progress", "Configuring Nexus for yarn") repo_name = get_yarn_proxy_repo_name(request_id) prepare_nexus_for_js_request(repo_name) yarn_config_files = [] downloaded_deps = set() for i, subpath in enumerate(subpaths): log.info("Fetching the yarn dependencies for request %d in subpath %s", request_id, subpath) set_request_state( request_id, "in_progress", f'Fetching the yarn dependencies at the "{subpath}" directory', ) request = get_request(request_id) package_source_path = str(bundle_dir.app_subpath(subpath).source_dir) try: package_and_deps_info = resolve_yarn(package_source_path, request, skip_deps=downloaded_deps) except CachitoError: log.exception("Failed to fetch yarn dependencies for request %d", request_id) raise downloaded_deps = downloaded_deps | package_and_deps_info[ "downloaded_deps"] log.info( "Generating the yarn configuration files for request %d in subpath %s", request_id, subpath, ) remote_package_source_path = os.path.normpath( os.path.join("app", subpath)) if package_and_deps_info["package.json"]: package_json_str = json.dumps( package_and_deps_info["package.json"], indent=2) package_json_path = os.path.join(remote_package_source_path, "package.json") yarn_config_files.append( make_base64_config_file(package_json_str, package_json_path)) if package_and_deps_info["lock_file"]: yarn_lock_str = _yarn_lock_to_str( package_and_deps_info["lock_file"]) yarn_lock_path = os.path.join(remote_package_source_path, "yarn.lock") yarn_config_files.append( make_base64_config_file(yarn_lock_str, yarn_lock_path)) if i == 0: default_env = get_worker_config( ).cachito_default_environment_variables env_vars = { **default_env.get("npm", {}), **default_env.get("yarn", {}) } else: env_vars = None package = package_and_deps_info["package"] update_request_with_package(request_id, package, env_vars, package_subpath=subpath) update_request_with_deps(request_id, package, package_and_deps_info["deps"]) log.info("Finalizing the Nexus configuration for yarn for the request %d", request_id) set_request_state(request_id, "in_progress", "Finalizing the Nexus configuration for yarn") username = get_yarn_proxy_repo_username(request_id) password = finalize_nexus_for_js_request(username, repo_name) log.info("Generating the .npmrc file(s)") proxy_repo_url = get_yarn_proxy_repo_url(request_id) yarn_config_files.extend( generate_npmrc_config_files(proxy_repo_url, username, password, subpaths)) log.info("Adding empty .yarnrc file(s)") for subpath in subpaths: yarnrc_path = os.path.normpath(os.path.join("app", subpath, ".yarnrc")) yarn_config_files.append(make_base64_config_file("", yarnrc_path)) update_request_with_config_files(request_id, yarn_config_files)
def fetch_gomod_source(request_id, dep_replacements=None, package_configs=None): """ Resolve and fetch gomod dependencies for a given request. :param int request_id: the Cachito request ID this is for :param list dep_replacements: dependency replacements with the keys "name" and "version"; only supported with a single path :param list package_configs: the list of optional package configurations submitted by the user :raises CachitoError: if the dependencies could not be retrieved """ version_output = run_cmd(["go", "version"], {}) log.info(f"Go version: {version_output.strip()}") config = get_worker_config() if package_configs is None: package_configs = [] bundle_dir: RequestBundleDir = RequestBundleDir(request_id) subpaths = [os.path.normpath(c["path"]) for c in package_configs if c.get("path")] if not subpaths: # Default to the root of the application source subpaths = [os.curdir] invalid_gomod_files = _find_missing_gomod_files(bundle_dir, subpaths) if invalid_gomod_files: invalid_files_print = "; ".join(invalid_gomod_files) file_suffix = "s" if len(invalid_gomod_files) > 1 else "" # missing gomod files is supported if there is only one path referenced if config.cachito_gomod_ignore_missing_gomod_file and len(subpaths) == 1: log.warning("go.mod file missing for request at %s", invalid_files_print) return raise CachitoError( "The {} file{} must be present for the gomod package manager".format( invalid_files_print.strip(), file_suffix ) ) if len(subpaths) > 1 and dep_replacements: raise CachitoError( "Dependency replacements are only supported for a single go module path." ) env_vars = { "GOCACHE": {"value": "deps/gomod", "kind": "path"}, "GOPATH": {"value": "deps/gomod", "kind": "path"}, "GOMODCACHE": {"value": "deps/gomod/pkg/mod", "kind": "path"}, } env_vars.update(config.cachito_default_environment_variables.get("gomod", {})) update_request_env_vars(request_id, env_vars) packages_json_data = PackagesData() for i, subpath in enumerate(subpaths): log.info( "Fetching the gomod dependencies for request %d in subpath %s", request_id, subpath ) set_request_state( request_id, "in_progress", f'Fetching the gomod dependencies at the "{subpath}" directory', ) request = get_request(request_id) gomod_source_path = str(bundle_dir.app_subpath(subpath).source_dir) try: gomod = resolve_gomod( gomod_source_path, request, dep_replacements, bundle_dir.source_dir ) except CachitoError: log.exception("Failed to fetch gomod dependencies for request %d", request_id) raise module_info = gomod["module"] packages_json_data.add_package(module_info, subpath, gomod["module_deps"]) # add package deps for package in gomod["packages"]: pkg_info = package["pkg"] package_subpath = _package_subpath(module_info["name"], pkg_info["name"], subpath) packages_json_data.add_package(pkg_info, package_subpath, package.get("pkg_deps", [])) packages_json_data.write_to_file(bundle_dir.gomod_packages_data)
def fetch_npm_source(request_id, package_configs=None): """ Resolve and fetch npm dependencies for a given request. This function uses the Python ``os.path`` library to manipulate paths, so the path to the configuration files may differ in format based on the system the Cachito worker is deployed on (i.e. Linux vs Windows). :param int request_id: the Cachito request ID this is for :param list package_configs: the list of optional package configurations submitted by the user :raise CachitoError: if the task fails """ version_output = run_cmd(["npm", "--version"], {}) log.info(f"npm version: {version_output.strip()}") version_output = run_cmd(["node", "--version"], {}) log.info(f"Node.js version: {version_output.strip()}") if package_configs is None: package_configs = [] validate_npm_config() bundle_dir: RequestBundleDir = RequestBundleDir(request_id) log.debug("Checking if the application source uses npm") subpaths = [ os.path.normpath(c["path"]) for c in package_configs if c.get("path") ] if not subpaths: # Default to the root of the application source subpaths = [os.curdir] _verify_npm_files(bundle_dir, subpaths) log.info("Configuring Nexus for npm for the request %d", request_id) set_request_state(request_id, "in_progress", "Configuring Nexus for npm") repo_name = get_npm_proxy_repo_name(request_id) prepare_nexus_for_js_request(repo_name) npm_config_files = [] downloaded_deps = set() packages_json_data = PackagesData() for i, subpath in enumerate(subpaths): log.info("Fetching the npm dependencies for request %d in subpath %s", request_id, subpath) set_request_state( request_id, "in_progress", f'Fetching the npm dependencies at the "{subpath}" directory"', ) request = get_request(request_id) package_source_path = str(bundle_dir.app_subpath(subpath).source_dir) try: package_and_deps_info = resolve_npm(package_source_path, request, skip_deps=downloaded_deps) except CachitoError: log.exception("Failed to fetch npm dependencies for request %d", request_id) raise downloaded_deps = downloaded_deps | package_and_deps_info[ "downloaded_deps"] log.info( "Generating the npm configuration files for request %d in subpath %s", request_id, subpath, ) remote_package_source_path = os.path.normpath( os.path.join("app", subpath)) if package_and_deps_info["package.json"]: package_json_str = json.dumps( package_and_deps_info["package.json"], indent=2) package_json_path = os.path.join(remote_package_source_path, "package.json") npm_config_files.append( make_base64_config_file(package_json_str, package_json_path)) if package_and_deps_info["lock_file"]: package_lock_str = json.dumps(package_and_deps_info["lock_file"], indent=2) lock_file_name = package_and_deps_info["lock_file_name"] lock_file_path = os.path.join(remote_package_source_path, lock_file_name) npm_config_files.append( make_base64_config_file(package_lock_str, lock_file_path)) if i == 0: env_vars = get_worker_config( ).cachito_default_environment_variables.get("npm", {}) update_request_env_vars(request_id, env_vars) pkg_info = package_and_deps_info["package"] pkg_deps = package_and_deps_info["deps"] packages_json_data.add_package(pkg_info, subpath, pkg_deps) packages_json_data.write_to_file(bundle_dir.npm_packages_data) log.info("Finalizing the Nexus configuration for npm for the request %d", request_id) set_request_state(request_id, "in_progress", "Finalizing the Nexus configuration for npm") username = get_npm_proxy_username(request_id) password = finalize_nexus_for_js_request(username, repo_name) log.info("Generating the .npmrc file(s)") proxy_repo_url = get_npm_proxy_repo_url(request_id) npm_config_files.extend( generate_npmrc_config_files(proxy_repo_url, username, password, subpaths)) update_request_with_config_files(request_id, npm_config_files)