def test_post_retries_and_raises_error(self, requests_mock, client): requests_mock.post("https://snyk.io/api/v1/sample", status_code=500, json={}) client = SnykClient("token", tries=4, delay=0, backoff=2) with pytest.raises(SnykError): client.post("sample", {}) assert requests_mock.call_count == 4
def run_assesment(self): snyk_token_path = get_default_token_path() snyk_token = get_token(snyk_token_path) org_id = get_org_id(snyk_token_path) client = SnykClient(snyk_token) org_client = client.organizations.get(org_id) print("Testing package %s@%s" % (self.name, self.version)) print("=====================") snyk_result = org_client.test_python(self.name, self.version) vulnerabilities_output = snyk_result.issues.vulnerabilities license_issues_output = snyk_result.issues.licenses severity_order = ['critical', 'high', 'medium', 'low'] if vulnerabilities_output is not None: for v in vulnerabilities_output: formatted_vulnerability = { 'id': v.id, 'title': v.title, 'url': v.url, 'package': v.package, 'version': v.version, 'identifiers': v.identifiers['CVE'], 'severity': v.severity, 'language': v.language, 'package_manager': v.packageManager, 'is_upgradable': v.isUpgradable, 'is_patchable': v.isPatchable } self.vulnerabilities.append(formatted_vulnerability) self.vulnerabilities = sorted(self.vulnerabilities, key=lambda k: severity_order.index(k['severity'])) if license_issues_output is not None: for i in license_issues_output: formatted_license_issue = { 'id': i.id, 'title': i.title, 'url': i.url, 'package': i.package, 'version': i.version, 'severity': i.severity, 'is_ignored': i.isIgnored, 'is_patched': i.isPatched, 'language': i.language, 'priority_score': i.priorityScore, 'package_manager': i.packageManager } self.license_issues.append(formatted_license_issue) self.license_issues = sorted(self.license_issues, key=lambda k: severity_order.index(k['severity']))
help="GitHub integration ID - get this from Settings->Integrations", required=True, ) parser.add_argument( "--manifestFiles", nargs="*", help= 'Leave this empty to import all or make a list of paths/to/build/files (ex "build.gradle" or "someModule/pom.xml")', required=False, ) return parser.parse_args() snyk_token = get_token("snyk-api-token") args = parse_command_line_args() org_id = args.orgId github_org = args.githubOrg repo_name = args.repoName github_integration_id = args.githubIntegrationId manifest_files = args.manifestFiles client = SnykClient(snyk_token) org = client.organizations.get(org_id) integration = org.integrations.get(github_integration_id) if manifest_files: job = integration.import_git(github_org, repo_name, files=manifest_files) else: job = integration.import_git(github_org, repo_name) print(job)
import argparse from snyk import SnykClient from utils import get_default_token_path, get_token def parse_command_line_args(): parser = argparse.ArgumentParser(description="Snyk API Examples") parser.add_argument("--orgId", type=str, help="The Snyk Organisation ID", required=True) return parser.parse_args() snyk_token_path = get_default_token_path() snyk_token = get_token(snyk_token_path) args = parse_command_line_args() org_id = args.orgId client = SnykClient(token=snyk_token) for proj in client.organizations.get(org_id).projects.all(): print("\nProject name: %s" % proj.name) print(" Issues Found:") print(" High : %s" % proj.issueCountsBySeverity.high) print(" Medium: %s" % proj.issueCountsBySeverity.medium) print(" Low : %s" % proj.issueCountsBySeverity.low)
def test_overriding_user_agent(self): ua = "test" client = SnykClient("token", user_agent=ua) assert client.api_headers["User-Agent"] == ua
def test_overriding_api_url(self): url = "https://notsnyk.io/api/v1" client = SnykClient("token", url) assert client.api_url == url
def client(self): return SnykClient("token")
def _get_org(api_token: str, org_id: str) -> OrganizationManager: client = SnykClient(api_token) return client.organizations.get(org_id)
COMPLETED_PROJECT_IMPORTS_FILE.write("org,project,success\n") REPOS_SKIPPED_ON_ERROR_FILE = open( "%s_repos-skipped-on-error.csv" % LOG_PREFIX, "w") REPOS_SKIPPED_ON_ERROR_FILE.write("org,repo,status\n") UPDATED_PROJECT_BRANCHES_FILE = open( "%s_updated-project-branches.csv" % LOG_PREFIX, "w") UPDATED_PROJECT_BRANCHES_FILE.write("org,project_name,project_id,new_branch\n") UPDATE_PROJECT_BRANCHES_ERRORS_FILE = open( "%s_update-project-branches-errors.csv" % LOG_PREFIX, "w") UPDATE_PROJECT_BRANCHES_ERRORS_FILE.write( "org,project_name,project_id,new_branch\n") PENDING_REMOVAL_MAX_CHECKS = 45 PENDING_REMOVAL_CHECK_INTERVAL = 20 snyk_client = SnykClient(SNYK_TOKEN) if (GITHUB_TOKEN): GITHUB_ENABLED = True gh_client = create_github_client(GITHUB_TOKEN) if (GITHUB_ENTERPRISE_HOST): GITHUB_ENTERPRISE_ENABLED = True gh_enterprise_client = create_github_enterprise_client( GITHUB_ENTERPRISE_TOKEN, GITHUB_ENTERPRISE_HOST) def parse_command_line_args(): """Parse command-line arguments""" parser = argparse.ArgumentParser()
def get_project_tree(snyk_token, org_id, project_id): client = SnykClient(snyk_token) res_dep_graph = client.organizations.get(org_id).projects.get(project_id).dependency_graph print(res_dep_graph) print("\nPackages (Flat List):") for pkg in res_dep_graph.pkgs: print("%s | %s" % (pkg.id, pkg.info)) all_packages = res_dep_graph.pkgs print("\nGraph data:") graph = res_dep_graph.graph root_node_id = graph.rootNodeId nodes = graph.nodes for node in nodes: print("%s | %s" % (node.nodeId, node.pkgId)) child_nodes = node.deps if len(child_nodes) > 0: for child_node in child_nodes: print(" - Child: %s" % child_node) print() # Convert the all_packages to a lookup map by package ID packages_lookup_map = {} for pkg in all_packages: print(pkg) package_id = pkg.id packages_lookup_map[package_id] = { "package_name": pkg.info.name, "package_version": pkg.info.version, } # Get licenses for all dependencies in the project lst_res_license = client.organizations.get(org_id).projects.get(project_id).dependencies.all() # make into a lookup table by package_id package_id_to_license_info_map = {} # package_id -> { license info } for r in lst_res_license: package_id = r.id licenses = r.licenses package_id_to_license_info_map[package_id] = licenses print("\n\npackage_id_to_license_info_map:") print(package_id_to_license_info_map) # Get the license issues and then enhance package_id_to_license_info_map with the license classification or none issues = client.organizations.get(org_id).projects.get(project_id).issueset.all().issues license_issues_list = issues.licenses # map to lookup table license_issues_lookup_map = {license_issue.id: license_issue.severity for license_issue in license_issues_list} for pkgId, licensesList in package_id_to_license_info_map.items(): for l in licensesList: license_id = l.id print(license_id) if license_id in license_issues_lookup_map: print("append additional info") severity = license_issues_lookup_map[license_id] l.severity = severity else: l.severity = "none" # lookup the license id in license_issues_lookup_map and see if there's an issue # add a 'classification' to the licenseInfo # Convert nodes to a dictionary by nodeId node_lookup_map = {} for node in nodes: node_id = node.nodeId package_id = node.pkgId node_lookup_map[node_id] = { "pkgId": node.pkgId, # TODO: Pull in the packages_name and package_version from packages_lookup_map "package_name": packages_lookup_map[package_id]["package_name"], "package_version": packages_lookup_map[package_id]["package_version"], "deps": node.deps, } print(node_lookup_map) root_node_package_id = node_lookup_map[root_node_id]["pkgId"] # Enhance node_lookup_map with license data from package_id_to_license_info_map for node_id in node_lookup_map.keys(): if node_id == root_node_id: continue # TODO: figure out how to get the project licenses print(node_id) # because if there's more than one node with the same package@version, it uses package@version|i to delinate them node_id_package_id = node_id.split("|")[0] licenses_info = package_id_to_license_info_map[node_id_package_id] node_lookup_map[node_id]["licenses"] = licenses_info # Now create a new structure based on node_lookup_map which is a deeply nested structure of the same data project_structured_tree = {} def get_node_to_append(node_id, base_path): # might make sense to rename get_dependencies obj = node_lookup_map[node_id] pkgId = obj["pkgId"] print("node_id: %s" % pkgId) path = "" if not base_path: path = pkgId else: path = "%s > %s" % (base_path, pkgId) child_nodes = [] for d in obj["deps"]: child_node_id = d["nodeId"] child_node = get_node_to_append(child_node_id, path) child_nodes.append(child_node) node_to_append = { "pkgId": pkgId, "package_name": obj["package_name"], "package_version": obj["package_version"], "path": path, "licenses": obj.get("licenses"), "dependencies": child_nodes, } return node_to_append # print(root_node_package_id) project_dependencies_structure = get_node_to_append(root_node_id, "") project_structured_tree = {"project": project_dependencies_structure} return project_structured_tree