def test_merge_dicts(self): dict1 = {'a': '1', 'b': '2'} dict2 = {'a': '4', 'c': '3'} dict3 = {'x': 'x', 'y': 'y', 'a': 'q'} res = merge_dicts(dict1, dict2) self.assertEqual(len(res), 3) self.assertEqual(res['a'], '4') self.assertEqual(res['b'], '2') self.assertEqual(res['c'], '3') res = merge_dicts(dict1, dict2, dict3) self.assertEqual(len(res), 5) self.assertEqual(res['a'], 'q') self.assertEqual(res['b'], '2') self.assertEqual(res['c'], '3') self.assertEqual(res['x'], 'x') self.assertEqual(res['y'], 'y') res = merge_dicts(dict1, None) self.assertEqual(len(res), 2) self.assertEqual(res['a'], '1') self.assertEqual(res['b'], '2') res = merge_dicts(dict1, 7) self.assertEqual(len(res), 2) self.assertEqual(res['a'], '1') self.assertEqual(res['b'], '2')
def download_twistcli(self, cli_file_name): os_type = platform.system().lower() headers = merge_dicts( get_default_get_headers(bc_integration.bc_source, bc_integration.bc_source_version), get_auth_header(bc_integration.bc_api_key)) response = requests.request( 'GET', f"{self.docker_image_scanning_base_url}/twistcli/download?os={os_type}", headers=headers) open(cli_file_name, 'wb').write(response.content) st = os.stat(cli_file_name) os.chmod(cli_file_name, st.st_mode | stat.S_IEXEC) logging.debug(f'TwistCLI downloaded and has execute permission')
def report_results(self, docker_image_name, dockerfile_path, dockerfile_content, twistcli_scan_result): headers = merge_dicts( get_default_post_headers(bc_integration.bc_source, bc_integration.bc_source_version), get_auth_header(bc_integration.bc_api_key)) vulnerabilities = list( map( lambda x: { 'cveId': x['id'], 'status': x.get('status', 'open'), 'severity': x['severity'], 'packageName': x['packageName'], 'packageVersion': x['packageVersion'], 'link': x['link'], 'cvss': x.get('cvss'), 'vector': x.get('vector'), 'description': x.get('description'), 'riskFactors': x.get('riskFactors'), 'publishedDate': x.get('publishedDate') or (datetime.now() - timedelta( days=x.get('publishedDays', 0))).isoformat() }, twistcli_scan_result['results'][0].get('vulnerabilities', []))) payload = { 'sourceId': bc_integration.repo_id, 'branch': bc_integration.repo_branch, 'dockerImageName': docker_image_name, 'dockerFilePath': dockerfile_path, 'dockerFileContent': dockerfile_content, 'sourceType': bc_integration.bc_source, 'vulnerabilities': vulnerabilities } response = requests.request( 'POST', f"{self.docker_image_scanning_base_url}/report", headers=headers, json=payload) response.raise_for_status()
def _get_fixes_for_file(self, check_type, filename, file_contents, failed_checks): errors = list( map( lambda c: { 'resourceId': c.resource, 'policyId': self.bc_integration.ckv_to_bc_id_mapping[c.check_id], 'startLine': c.file_line_range[0], 'endLine': c.file_line_range[1] }, failed_checks)) payload = { 'filePath': filename, 'fileContent': file_contents, 'framework': check_type, 'errors': errors } headers = merge_dicts( get_default_post_headers(self.bc_integration.bc_source, self.bc_integration.bc_source_version), get_auth_header(self.bc_integration.bc_api_key)) response = requests.request('POST', self.fixes_url, headers=headers, json=payload) if response.status_code != 200: error_message = extract_error_message(response) raise Exception( f'Get fixes request failed with response code {response.status_code}: {error_message}' ) logging.debug(f'Response from fixes API: {response.content}') fixes = json.loads(response.content) if response.content else None if not fixes or type(fixes) != list: logging.warning( f'Unexpected fixes API response for file {filename}; skipping fixes for this file' ) return None return fixes[0]
def _get_suppressions_from_platform(self): headers = merge_dicts(get_default_get_headers(self.bc_integration.bc_source, self.bc_integration.bc_source_version), get_auth_header(self.bc_integration.bc_api_key)) response = requests.request('GET', self.suppressions_url, headers=headers) if response.status_code != 200: error_message = extract_error_message(response) raise Exception(f'Get suppressions request failed with response code {response.status_code}: {error_message}') # filter out custom policies and non-checkov policies suppressions = [s for s in json.loads(response.content) if self._suppression_valid_for_run(s)] for suppression in suppressions: suppression['checkovPolicyId'] = self.bc_integration.bc_id_mapping[suppression['policyId']] return suppressions
def _get_fixes_for_file(self, filename, file_contents, failed_checks): errors = list( map( lambda c: { 'resourceId': c.resource, 'policyId': self.bc_integration.ckv_to_bc_id_mapping[c.check_id], 'startLine': c.file_line_range[0], 'endLine': c.file_line_range[1] }, failed_checks)) payload = { 'filePath': filename, 'fileContent': file_contents, 'errors': errors } headers = merge_dicts( get_default_post_headers(self.bc_integration.bc_source, self.bc_integration.bc_source_version), get_auth_header(self.bc_integration.bc_api_key)) response = requests.request('POST', self.fixes_url, headers=headers, json=payload) if response.status_code != 200: error_message = extract_error_message(response) raise Exception( f'Get fixes request failed with response code {response.status_code}: {error_message}' ) fixes = json.loads(response.content) return fixes[0]
def get_default_post_headers(client, client_version): return merge_dicts(DEV_API_POST_HEADERS, get_version_headers(client, client_version))