def test_golang_utils_with_invalid_pkg(): """Test golang functions with a invalid pkg.""" go_obj = GolangUtils("some_junk_name") assert go_obj.mode == "Not Found" assert not go_obj.get_all_versions() assert not go_obj.get_latest_version() assert not go_obj.get_gh_link() assert not go_obj.get_license() go_obj = GolangUtils("blitiri.com.ar/go/chasquid/internal/smtp") assert go_obj.mode == "Not Found" assert not go_obj.get_all_versions()
def get_versions_for_golang_package(package_name, latest=False, dual_values=False): """Get all versions for given golang package. :param package_name: str, package name :param latest: boolean value, to return only the latest version :param dual_values: boolean value, to return both version list and latest version :return list, list of versions """ go_util = GolangUtils(package_name) latest_ver = go_util.get_latest_version() all_ver = go_util.get_all_versions() if latest: return latest_ver if dual_values: return {'versions': all_ver, 'latest_version': latest_ver} return all_ver
def test_golang_utils_with_invalid_pkg(): """Test golang functions with a invalid pkg.""" go_obj = GolangUtils("some_junk_name") assert go_obj.mode == "Not Found" assert not go_obj.get_all_versions() assert not go_obj.get_latest_version() assert not go_obj.get_gh_link() assert not go_obj.get_license()
def execute(self, arguments): """Task code. :param arguments: dictionary with task arguments :return: {}, results """ result_data = {'status': 'success', 'details': []} metadata_dict = { 'description': '', 'name': arguments.get('name'), 'version': arguments.get('version'), 'ecosystem': arguments.get('ecosystem') } result_data['details'].append(metadata_dict) # Store base file required by Data importer store_data_to_s3(arguments, StoragePool.get_connected_storage('S3InItData'), result_data) # Get the license for package golang_util = GolangUtils(arguments.get('name')) license = golang_util.get_license() if license is not None: metadata_dict['declared_licenses'] = license else: metadata_dict['declared_licenses'] = [] # Store metadata file for being used in Data-Importer store_data_to_s3(arguments, StoragePool.get_connected_storage('S3MetaData'), result_data) return arguments
def execute(self, arguments): """Task code. :param arguments: dictionary with task arguments :return: {}, results """ result_data = {'status': 'unknown', 'summary': [], 'details': {}} if arguments['ecosystem'] == 'golang': go_obj = GolangUtils(arguments.get('name')) url = go_obj.get_gh_link() if url: arguments['url'] = url else: return result_data # For testing purposes, a repo may be specified at task creation time if self._repo_name is None: # Otherwise, get the repo name from URL self._repo_name = self._get_repo_name(arguments['url']) if self._repo_name is None: # Not a GitHub hosted project return result_data repo_url = urljoin(self.configuration.GITHUB_API + "repos/", self._repo_name) repo = {} try: repo = get_response(repo_url) if not repo: raise NotABugFatalTaskError( 'Page not found on {}'.format(repo_url)) except NotABugTaskError as e: logger.error(e) result_data['status'] = 'success' issues = {} # Get Repo Statistics notoriety = self._get_repo_stats(repo) if notoriety: issues.update(notoriety) issues['topics'] = repo.get('topics', []) issues['license'] = repo.get('license') or {} # Get Commit Statistics last_year_commits = self._get_last_years_commits(repo.get('url', '')) commits = { 'last_year_commits': { 'sum': sum(last_year_commits), 'weekly': last_year_commits } } t_stamp = datetime.datetime.utcnow() refreshed_on = {'updated_on': t_stamp.strftime("%Y-%m-%d %H:%M:%S")} issues.update(refreshed_on) issues.update(commits) # Get PR/Issue details for previous Month and Year gh_pr_issue_details = get_gh_pr_issue_counts(self._repo_name) issues.update(gh_pr_issue_details) result_data['details'] = issues # Store github details for being used in Data-Importer store_data_to_s3(arguments, StoragePool.get_connected_storage('S3GitHub'), result_data) return result_data
def test_golang_utils_with_valid_pkg(): """Test golang functions with a valid pkg.""" go_obj = GolangUtils("github.com/grafana/grafana") assert go_obj.mode == "mod" assert "6.1.4" in go_obj.get_all_versions() assert "v6.1.4" not in go_obj.get_all_versions() assert go_obj.get_latest_version() is not None assert go_obj.get_gh_link() == "https://github.com/grafana/grafana" assert go_obj.get_license()[0] == "Apache-2.0" assert go_obj.get_module()[0] == "github.com/grafana/grafana" go_obj = GolangUtils("k8s.io/kubelet") assert go_obj.mode == "mod" go_obj.license = None assert go_obj.get_license()[0] == "Apache-2.0" assert go_obj.get_gh_link() == "https://github.com/kubernetes/kubelet" assert go_obj.get_module()[0] == "k8s.io/kubelet" assert go_obj.get_module()[1] == "github.com/kubernetes/kubelet"
def test_golang_utils_with_valid_pkg2(): """Test golang functions with a valid pkg.""" go_obj = GolangUtils("github.com/containous/traefik/api") assert go_obj.mode == "pkg" assert "1.7.26" in go_obj.get_all_versions() assert go_obj.get_latest_version() is not None assert go_obj.get_license()[0] == "MIT" assert go_obj.get_gh_link() == "https://github.com/containous/traefik" go_obj = GolangUtils("github.com/ryanuber/columnize") assert go_obj.mode == "mod" assert go_obj.get_gh_link() == "https://github.com/ryanuber/columnize" assert go_obj.get_license()[0] == "MIT" go_obj = GolangUtils("github.com/qor/admin") assert go_obj.mode == "mod" assert go_obj.get_gh_link() == "https://github.com/qor/admin" assert str(go_obj.get_license()[0]) == "not legal advice" go_obj = GolangUtils("code.cloudfoundry.org/gorouter/proxy/handler") assert len(go_obj.get_license()) == 4 assert go_obj.get_module()[0] == "code.cloudfoundry.org/gorouter" assert go_obj.get_module()[1] == "github.com/cloudfoundry/gorouter"
def _parse_golang_data(self, vuln_data, eco): """Parse data for golang eco.""" total_vuln = 0 delta_mode = self.helper.is_delta_mode_on() if len(vuln_data) != 0: for data in vuln_data: # If delta mode is on & the modificationTime doesnt fall in the range, then ignore. if delta_mode and not self._is_date_in_range( data['modificationTime']): logger.debug("No new updates for {}".format(data['id'])) continue pkg = data['package'] logger.debug("Fetching details for package: {}".format(pkg)) try: if len(data['vulnerableVersions']) == 0: # In this case, we use the data to remove the vuln from the graph logger.info( "False positive found. {i}".format(i=data['id'])) self._add_data_for_false_positives(eco, data, pkg) continue """ This is done so that we dont fetch the same pkg data again & again if more than 1 vuln for the same pkg is present.""" if pkg not in self.CVE_DATA[eco]: go_utils = GolangUtils(pkg) versions = go_utils.get_all_versions() """ From the available options we have in scraping, if we get the details then only we can go ahead fetch and create nodes, else we need to ignore for the time being.""" if versions: # As we are relying on web scraping, we might get None in some cases. latest_version = go_utils.get_latest_version( ) or "" gh_link = go_utils.get_gh_link() or "" lic = go_utils.get_license() or [] mod = go_utils.get_module() or [] self.CVE_DATA[eco][ pkg] = self._generate_default_cve_obj( eco, pkg, versions, latest_version, gh_link, lic, mod) else: # TODO we need to decide what we need to do when we dont find any data. logger.info( "No details about the pkg {} found.".format( pkg)) self.SNYK_REPORT['details'][eco]['pvt_pkgs'][ data['id']] = { 'name': pkg } continue logger.info("Processing {}".format(data['id'])) versions = self.CVE_DATA[eco][pkg]['all_ver'] data['ecosystem'] = eco data['moduleName'] = self.CVE_DATA[eco][pkg]['moduleName'] data['affected'] = [] vuln_versions = data['vulnerableVersions'] if versions: data['rules'] = self._get_version_rules(vuln_versions) data['affected'] = self._get_affected_versions( data['rules'], versions) # Create edges for vuln only when affected versions found. if len(data['affected']) != 0: self.CVE_DATA[eco][pkg]['affected'].extend( data['affected']) self.CVE_DATA[eco][pkg]['affected'] = list( set(self.CVE_DATA[eco][pkg]['affected'])) else: """ This will make sure vuln node gets created which can be used for commit hash usecase, even when affected versions not found""" logger.info("No affected versions for {}".format( data['id'])) total_vuln += 1 data = self._set_additional_fields(data) data = self._set_commit_hash_rules( data, self.CVE_DATA[eco][pkg]['gh_link']) # In Snyk feed, for some golang vuln, they dont have this field. if 'vulnerableHashes' in data: del data['vulnerableHashes'] self.SNYK_REPORT['details'][eco]['ingest'][data['id']] = { 'name': pkg, 'premium': data['pvtVuln'], 'affected_version_count': len(data['affected']), 'status': "skipped" } self.DELTA_FEED[eco].append(data) self.CVE_DATA[eco][pkg]['vulnerabilities'].append(data) except ValueError: logger.error( "Encountered a Value Error while trying to fetch versions for " "{e} -> {p}".format(e=eco, p=pkg)) except AttributeError: logger.error( "Encountered an Attribute Error while trying to fetch details for " "{e} -> {p}".format(e=eco, p=pkg)) logger.info("{} Data".format(eco).center(50, '-')) logger.info("Total affected packages: {}".format( len(self.CVE_DATA[eco]))) logger.info("Total vulnerabilities: {}".format(total_vuln)) logger.debug(self.CVE_DATA[eco])