def log_scan_summary_info(scan_summaries):
    scan_status_view = [{
        'status': ss['status'],
        'createdAt': ss['createdAt'],
        'statusMessage': ss.get('statusMessage'),
        'scanId': object_id(ss),
    } for ss in scan_summaries]
    logging.debug("Scan statuses: {}".format(scan_status_view))
예제 #2
0
    def _get_scans(self, code_location_obj):
        # TODO: Scans are returned in reverse chronological order, but should we be safe and sort here?
        scan_summaries = self.hub.get_codelocation_scan_summaries(code_location_obj = code_location_obj).get("items", [])
        for scan_summary in scan_summaries:
            scan_id = object_id(scan_summary)
            url = self.hub.get_apibase() + "/v1/scans/{}".format(scan_id)
            response = hub.execute_get(url)
            scan_details = response.json() if response.status_code == 200 else None
            scan_summary['scan_details'] = scan_details

        # Check that they all share the same code (scan) location name
        names = set([s['scan_details']['name'] for s in scan_summaries])
        assert len(names) == 1, "Uh oh, all the scans for a given code (scan) location should have the same name"

        return scan_summaries
예제 #3
0
if args.name:
    parameters = {'q': 'name:{}'.format(args.name)}
else:
    parameters = {}

if args.unmapped:
    code_locations = hub.get_codelocations(limit=10000,
                                           unmapped=True,
                                           parameters=parameters)
else:
    code_locations = hub.get_codelocations(limit=10000, parameters=parameters)

code_locations = code_locations.get('items', [])

if args.scan_summaries:
    for code_location in code_locations:
        scan_summaries = hub.get_codelocation_scan_summaries(
            code_location_obj=code_location).get('items', [])
        code_location['scan_summaries'] = scan_summaries
        if args.scan_details:
            for scan in scan_summaries:
                scan_id = object_id(scan)
                # This uses a private API endpoint that can, and probably will, break in the future
                # HUB-15330 is the (internal) JIRA ticket # asking that the information in this endpoint
                # be made part of the public API
                url = hub.get_apibase() + "/v1/scans/{}".format(scan_id)
                scan_details = hub.execute_get(url).json()
                scan['scan_details'] = scan_details

print(json.dumps(code_locations))
예제 #4
0
if args.save_dt:
    with open(".last_run", "w") as f:
        f.write(datetime.now().isoformat())

logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s',
                    stream=sys.stderr,
                    level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)

hub = HubInstance()

project = hub.get_project_by_name(args.project_name)

version = hub.get_version_by_name(project, args.version)
version_id = object_id(version)

vulnerable_components_url = hub.get_link(
    version, "vulnerable-components") + "?limit=9999"
custom_headers = {
    'Accept': 'application/vnd.blackducksoftware.bill-of-materials-6+json'
}
response = hub.execute_get(vulnerable_components_url,
                           custom_headers=custom_headers)
vulnerable_bom_components = response.json().get('items', [])

for i, vuln in enumerate(vulnerable_bom_components):
    source = vuln['vulnerabilityWithRemediation']['source']
    vuln_name = vuln['vulnerabilityWithRemediation']['vulnerabilityName']

    # Retrieve additional details about the vulnerability
def get_scan_summaries(scan_location, snippet_scan=False):
    '''Find and return scan summary information and project-version information for the given scan location (name)
    '''
    scan_locations = hub.get_codelocations(
        parameters={'q': 'name:{}'.format(scan_location)})
    all_scan_summaries = []
    most_recent_scan_summaries = []
    all_project_version_ids = set()
    all_project_version_urls = set()
    for scan_location in scan_locations.get('items', []):
        mapped_project_version = scan_location.get('mappedProjectVersion')

        if mapped_project_version:
            mapped_project_version_id = mapped_project_version.split('/')[-1]
            all_project_version_ids.add(mapped_project_version_id)
            all_project_version_urls.add(mapped_project_version)

        scan_location_id = object_id(scan_location)

        scan_summaries = hub.get_codelocation_scan_summaries(scan_location_id)
        scan_summaries = scan_summaries.get('items', [])
        scan_summaries = sorted(scan_summaries, key=lambda ss: ss['updatedAt'])

        all_scan_summaries.extend(scan_summaries)
        if snippet_scan:
            # When using a snippet scan we need to look at the two most recent
            most_recent = scan_summaries[-2:]
        else:
            # Otherwise, we can look at the single most recent
            most_recent = scan_summaries[-1:]
        most_recent_scan_summaries.extend(most_recent)

    all_scan_summary_ids = list(
        set([object_id(ss) for ss in all_scan_summaries]))
    most_recent_scan_summary_ids = list(
        set([object_id(ss) for ss in most_recent_scan_summaries]))

    if all_project_version_ids:
        assert len(
            all_project_version_ids
        ) == 1, "The must be one, and only one, project-version this scan location is mapped to"

        project_version_id = list(all_project_version_ids)[0]
    else:
        project_version_id = None

    if all_project_version_urls:
        assert len(
            all_project_version_urls
        ) == 1, "The must be one, and only one, project-version this scan location is mapped to"
        project_version_url = list(all_project_version_urls)[0]
    else:
        project_version_url = None

    # To find the right jobs we use the "oldest" createdAt dt from the
    # pertinent scan summaries
    later_than = min([ss['createdAt'] for ss in most_recent_scan_summaries])

    return {
        'all_scan_summaries': all_scan_summaries,
        'all_scan_summary_ids': all_scan_summary_ids,
        'most_recent_scan_summaries': most_recent_scan_summaries,
        'most_recent_scan_summary_ids': most_recent_scan_summary_ids,
        'project_version_id': project_version_id,
        'project_version_url': project_version_url,
        'later_than': later_than
    }
def main():
    logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
    logging.getLogger("requests").setLevel(logging.WARNING)
    logging.getLogger("urllib3").setLevel(logging.WARNING)

    target_project = hub.get_project_by_name(args.project_name)
    if not target_project:
        logging.error("Project {} not found.".format(args.project_name))
        sys.exit(1)

    logging.debug("Found target project {}".format(args.project_name))

    target_version = hub.get_version_by_name(target_project, args.version_name)
    protex_import_version = hub.get_version_by_name(target_project,
                                                    args.protex_import_version)

    if not target_version:
        logging.debug("Version {} not found for project {}".format(
            args.version_name, args.project_name))
        sys.exit(1)

    logging.debug("Found target version {} in project {}".format(
        args.version_name, args.project_name))

    if not protex_import_version:
        logging.debug(
            "Protex import version {} not found for project {}".format(
                args.protex_import_version, args.project_name))
        sys.exit(1)

    logging.debug("Found protex import version {} in project {}".format(
        args.protex_import_version, args.project_name))

    target_project_id = object_id(target_project)
    target_version_id = object_id(target_version)
    protex_import_version_id = object_id(protex_import_version)

    ########
    #
    # Get Snippets from the target version and map them by their source path
    #
    ########
    snippet_data = hub.get_snippet_bom_entries(target_project_id,
                                               target_version_id)
    # TODO: len(snippet_data['items']) showing 43 snippet matches when in the GUI it shows 44, hmmm...
    snippet_path_map = get_snippet_path_map(snippet_data)

    logging.debug("***********Project Snippets ***************************")
    logging.debug("# Snippet Files: {}".format(snippet_data['totalCount']))
    logging.debug("Snippet file list:")
    for snippet_name_and_file_path in get_snippet_names_and_file_paths(
            snippet_data):
        logging.debug(snippet_name_and_file_path)

    #######
    #
    # Confirm the snippets if the files they go with match files in the Protex BOM components (i.e. the source
    # files within the Protex BOM)
    #
    #######
    protex_components = hub.get_version_components(protex_import_version)
    total_snippets_confirmed = 0

    for protext_bom_component in protex_components['items']:
        total_snippets_confirmed = total_snippets_confirmed + process_bom_component(
            target_project_id, target_version_id, protex_import_version_id, protext_bom_component, snippet_path_map, \
            override_snippet_component=args.override_snippet_component, use_best_match=args.use_best_match)

    logging.debug(
        "Confirmed: {} snippets for project {}, version {}, using Protex BOM import {}"
        .format(total_snippets_confirmed, args.project_name, args.version_name,
                args.protex_import_version))