def process(output, dependencies): """Return the correct value for cpu_socket_count output.""" # process the internal dmi cpu socket count result dmi_cpu_socket_count = \ dependencies.get('internal_cpu_socket_count_dmi') if dmi_cpu_socket_count and dmi_cpu_socket_count.get('rc') == 0: dmi_status = dmi_cpu_socket_count.get('stdout_lines') dmi_count = 0 for status in dmi_status: if 'status: populated' in status.lower(): dmi_count += 1 if dmi_count > 0: return dmi_count # process the cpuinfo socket count as a fallback cpuinfo_cpu_socket_count = \ dependencies.get('internal_cpu_socket_count_cpuinfo') if cpuinfo_cpu_socket_count and \ cpuinfo_cpu_socket_count.get('rc') == 0 and \ cpuinfo_cpu_socket_count.get('stdout_lines'): cpuinfo_count = cpuinfo_cpu_socket_count.get('stdout_lines', [0])[0] if is_int(cpuinfo_count): if convert_to_int(cpuinfo_count) != 0 and \ convert_to_int(cpuinfo_count) <= 8: return convert_to_int(cpuinfo_count) # assign the socket_count to the cpu_count as a last resort cpu_count = dependencies.get('cpu_count') if is_int(cpu_count): return convert_to_int(cpu_count) return None
def deployments(request, pk=None): """Lookup and return a deployment system report.""" if not is_int(pk): error = { 'report_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) validate_filters(request.query_params) filters = filter_keys(request.query_params) report = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if report.status != DeploymentsReport.STATUS_COMPLETE: return Response({'detail': 'Deployment report %s could not be created.' ' See server logs.' % report.details_report.id}, status=status.HTTP_424_FAILED_DEPENDENCY) # pylint: disable=no-else-return if request.query_params.get('group_count', None): report_dict = build_grouped_report( report, request.query_params.get('group_count')) return Response(report_dict) elif filters: report_dict = build_filtered_report(report, filters) return Response(report_dict) else: report_dict = build_cached_json_report(report) return Response(report_dict)
def reports(request, pk=None): """Lookup and return reports.""" reports_dict = dict() if pk is not None: if not is_int(pk): error = { 'report_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) reports_dict['report_id'] = pk # details details_data = get_object_or_404(DetailsReport.objects.all(), report_id=pk) serializer = DetailsReportSerializer(details_data) json_details = serializer.data json_details.pop('cached_csv', None) reports_dict['details_json'] = json_details # deployments deployments_data = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if deployments_data.status != DeploymentsReport.STATUS_COMPLETE: deployments_id = deployments_data.details_report.id return Response({'detail': 'Deployment report %s could not be created.' ' See server logs.' % deployments_id}, status=status.HTTP_424_FAILED_DEPENDENCY) reports_dict['deployments_json'] = \ build_cached_json_report(deployments_data) return Response(reports_dict)
def deployments(request, pk=None): """Lookup and return a deployment system report.""" if not is_int(pk): error = { 'report_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) mask_report = request.query_params.get('mask', False) report = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if report.status != DeploymentsReport.STATUS_COMPLETE: return Response({'detail': 'Deployment report %s could not be created.' ' See server logs.' % report.details_report.id}, status=status.HTTP_424_FAILED_DEPENDENCY) deployments_report = build_cached_json_report(report, mask_report) if deployments_report: return Response(deployments_report) error = {'detail': 'Deployments report %s could not be masked. ' 'Report version %s. ' 'Rerun the scan to generate a masked deployments report.' % (report.id, report.report_version)} return(Response(error, status=status.HTTP_428_PRECONDITION_REQUIRED))
def insights(request, pk=None): """Lookup and return a insights system report.""" if not is_int(pk): error = {'report_id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) report = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if report.status != DeploymentsReport.STATUS_COMPLETE: return Response( { 'detail': 'Insights report %s could not be created. ' 'See server logs.' % report.details_report.id }, status=status.HTTP_424_FAILED_DEPENDENCY) if report.cached_insights: return _create_report_slices(report, json.loads(report.cached_insights)) error = { 'detail': 'Insights report %s was not generated. Report version %s.' 'See server logs.' % (report.id, report.report_version) } return Response(error, status=404)
def to_internal_value(self, data): """Create internal value.""" if not is_int(data): raise ValidationError(_(messages.SJ_SCAN_IDS_INV)) int_data = convert_to_int(data) actual_scan = Scan.objects.filter(id=int_data).first() if actual_scan is None: raise ValidationError(_(messages.SJ_SCAN_DO_NOT_EXIST % int_data)) return actual_scan
def process(output, dependencies): """Process internal_xen_guest output.""" result = output.get('stdout_lines') if isinstance(result, list): result = [line for line in result if line] return bool(result) and\ is_int(result[0]) and \ convert_to_int(result[0]) > 0 return False
def retrieve(self, request, pk=None): # pylint: disable=unused-argument """Get a host credential.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) host_cred = get_object_or_404(self.queryset, pk=pk) serializer = CredentialSerializer(host_cred) cred = format_credential(serializer.data) return Response(cred)
def retrieve(self, request, pk=None): """Get a scan job.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) scan = get_object_or_404(self.queryset, pk=pk) serializer = ScanJobSerializer(scan) json_scan = serializer.data json_scan = expand_scanjob(json_scan) return Response(json_scan)
def details(request, pk=None): """Lookup and return a details system report.""" if pk is not None: if not is_int(pk): error = {'report_id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) detail_data = get_object_or_404(DetailsReport.objects.all(), report_id=pk) serializer = DetailsReportSerializer(detail_data) json_details = serializer.data http_accept = request.META.get('HTTP_ACCEPT') if http_accept and 'text/csv' not in http_accept: json_details.pop('cached_csv', None) return Response(json_details)
def deployments(request, pk=None): """Lookup and return a deployment system report.""" validate_filters(request.query_params) if pk is not None: if not is_int(pk): error = {'report_id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) report = build_report(pk, request.query_params) if report is not None: return Response(report) return Response(status=status.HTTP_404_NOT_FOUND)
def process(output, dependencies): """Return the correct value for cpu core count output.""" cpu_socket_count = dependencies.get('cpu_socket_count') cpu_core_per_socket = dependencies.get('cpu_core_per_socket') cpu_count = dependencies.get('cpu_count') cpu_hyperthreading = dependencies.get('cpu_hyperthreading') virt_type = dependencies.get('virt_type') # if the virt_type is vmware and cpu_count exists # then return cpu_count if virt_type and virt_type == 'vmware' and is_int(cpu_count): return convert_to_int(cpu_count) # if the cpu_core_per_socket & the cpu_socket_count are present # return the product of the two if is_int(cpu_core_per_socket) and is_int(cpu_socket_count): return convert_to_int(cpu_core_per_socket) * \ convert_to_int(cpu_socket_count) if is_int(cpu_count): if cpu_hyperthreading: return convert_to_int(cpu_count) / 2 # if there is no threading, return the cpu count return convert_to_int(cpu_count) return None
def retrieve(self, request, pk=None): # pylint: disable=unused-argument """Get a source.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) source = get_object_or_404(self.queryset, pk=pk) serializer = SourceSerializer(source) json_source = serializer.data # Create expanded host cred JSON format_source(json_source) return Response(json_source)
def validate_merge_jobs(self, data): """Validate merge jobs.""" # pylint: disable=no-self-use error = { 'jobs': [] } if not isinstance(data, dict) or \ data.get('jobs') is None: error.get('jobs').append(_(messages.SJ_MERGE_JOB_REQUIRED)) raise ValidationError(error) job_ids = data.get('jobs') if not isinstance(job_ids, list): error.get('jobs').append(_(messages.SJ_MERGE_JOB_NOT_LIST)) raise ValidationError(error) job_id_count = len(job_ids) if job_id_count < 2: error.get('jobs').append(_(messages.SJ_MERGE_JOB_TOO_SHORT)) raise ValidationError(error) non_integer_values = [ job_id for job_id in job_ids if not is_int(job_id)] if bool(non_integer_values): error.get('jobs').append(_(messages.SJ_MERGE_JOB_NOT_INT)) raise ValidationError(error) job_ids = [int(job_id) for job_id in job_ids] unique_id_count = len(set(job_ids)) if unique_id_count != job_id_count: error.get('jobs').append(_(messages.SJ_MERGE_JOB_NOT_UNIQUE)) raise ValidationError(error) jobs = ScanJob.objects.filter(pk__in=job_ids).order_by('-end_time') actual_job_ids = [job.id for job in jobs] missing_jobs = set(job_ids) - set(actual_job_ids) if bool(missing_jobs): message = _(messages.SJ_MERGE_JOB_NOT_FOUND) % ( ', '.join([str(i) for i in missing_jobs])) error.get('jobs').append(message) raise ValidationError(error) incomplete_jobs = [job.id for job in jobs if job.status not in [ ScanTask.FAILED, ScanTask.COMPLETED]] if bool(incomplete_jobs): jobs_str = ( ', '.join([str(i) for i in incomplete_jobs])) message = _(messages.SJ_MERGE_JOB_NOT_COMPLETE % jobs_str) error.get('jobs').append(message) raise ValidationError(error) return jobs.filter(scan_type=ScanTask.SCAN_TYPE_INSPECT)
def deployments(request, pk=None): """Lookup and return a deployment system report.""" if not is_int(pk): error = { 'report_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) report = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if report.status != DeploymentsReport.STATUS_COMPLETE: return Response({'detail': 'Deployment report %s could not be created.' ' See server logs.' % report.details_report.id}, status=status.HTTP_424_FAILED_DEPENDENCY) return Response(build_cached_json_report(report))
def cancel(self, request, pk=None): """Cancel the running scan.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) scan = get_object_or_404(self.queryset, pk=pk) if (scan.status == ScanTask.COMPLETED or scan.status == ScanTask.FAILED or scan.status == ScanTask.CANCELED): err_msg = _(messages.NO_CANCEL) return JsonResponse({'non_field_errors': [err_msg]}, status=400) scan.cancel() cancel_scan.send(sender=self.__class__, instance=scan) serializer = ScanJobSerializer(scan) json_scan = serializer.data expand_scanjob(json_scan) return Response(json_scan, status=200)
def _validate_merge_report(data): """Validate merge reports. :param data: dict with list of report ids :returns QuerySet DetailsReport """ # pylint: disable=no-self-use error = { 'reports': [] } if not isinstance(data, dict) or \ data.get('reports') is None: error.get('reports').append(_(messages.REPORT_MERGE_REQUIRED)) raise ValidationError(error) report_ids = data.get('reports') if not isinstance(report_ids, list): error.get('reports').append(_(messages.REPORT_MERGE_NOT_LIST)) raise ValidationError(error) report_id_count = len(report_ids) if report_id_count < 2: error.get('reports').append(_(messages.REPORT_MERGE_TOO_SHORT)) raise ValidationError(error) non_integer_values = [ report_id for report_id in report_ids if not is_int(report_id)] if bool(non_integer_values): error.get('reports').append(_(messages.REPORT_MERGE_NOT_INT)) raise ValidationError(error) report_ids = [int(report_id) for report_id in report_ids] unique_id_count = len(set(report_ids)) if unique_id_count != report_id_count: error.get('reports').append(_(messages.REPORT_MERGE_NOT_UNIQUE)) raise ValidationError(error) reports = DetailsReport.objects.filter(pk__in=report_ids).order_by('-id') actual_report_ids = [report.id for report in reports] missing_reports = set(report_ids) - set(actual_report_ids) if bool(missing_reports): message = _(messages.REPORT_MERGE_NOT_FOUND) % ( ', '.join([str(i) for i in missing_reports])) error.get('reports').append(message) raise ValidationError(error) return reports
def handle_result_filters(request): """Get the associated filter parameters or return validation errors. @param request: The incoming request @returns: A tuple of ordering filter, status filter, and source_id filter """ valid_orderging_filters = ['name', 'status', '-name', '-status'] valid_status_filters = ['success', 'failed', 'unreachable'] ordering_param = 'ordering' default_ordering = 'status' status_param = 'status' source_id_param = 'source_id' ordering_filter = request.query_params.get(ordering_param, default_ordering) status_filter = request.query_params.get(status_param, None) source_id_filter = request.query_params.get(source_id_param, None) # validate query params if ordering_filter and ordering_filter not in valid_orderging_filters: valid_list = ', '.join(valid_orderging_filters) message = _(messages.QUERY_PARAM_INVALID % (ordering_param, valid_list)) error = { 'detail': [message] } raise ValidationError(error) if status_filter and status_filter not in valid_status_filters: valid_list = ', '.join(valid_status_filters) message = _(messages.QUERY_PARAM_INVALID % (status_param, valid_list)) error = { 'detail': [message] } raise ValidationError(error) if source_id_filter and not is_int(source_id_filter): message = _(messages.QUERY_PARAM_INVALID % (source_id_param, 'source identifiers')) error = { 'detail': [message] } raise ValidationError(error) return (ordering_filter, status_filter, source_id_filter)
def restart(self, request, pk=None): """Restart a paused scan.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) scan = get_object_or_404(self.queryset, pk=pk) if scan.status == ScanTask.PAUSED: scan.restart() restart_scan.send(sender=self.__class__, instance=scan) serializer = ScanJobSerializer(scan) json_scan = serializer.data expand_scanjob(json_scan) return Response(json_scan, status=200) elif scan.status == ScanTask.RUNNING: err_msg = _(messages.ALREADY_RUNNING) return JsonResponse({'non_field_errors': [err_msg]}, status=400) err_msg = _(messages.NO_RESTART) return JsonResponse({'non_field_errors': [err_msg]}, status=400)
def pause(self, request, pk=None): """Pause the running scan.""" if not pk or (pk and not is_int(pk)): error = {'id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) scan = get_object_or_404(self.queryset, pk=pk) if scan.status == ScanTask.RUNNING: # Kill job before changing job state pause_scan.send(sender=self.__class__, instance=scan) scan.pause() serializer = ScanJobSerializer(scan) json_scan = serializer.data json_scan = expand_scanjob(json_scan) return Response(json_scan, status=200) elif scan.status == ScanTask.PAUSED: err_msg = _(messages.ALREADY_PAUSED) return JsonResponse({'non_field_errors': [err_msg]}, status=400) err_msg = _(messages.NO_PAUSE) return JsonResponse({'non_field_errors': [err_msg]}, status=400)
def deployments(request, pk=None): """Lookup and return a deployment system report.""" validate_filters(request.query_params) if pk is not None: if not is_int(pk): error = { 'report_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) report = build_report(pk, request.query_params) if report is not None: return Response(report) report_fact_collection = get_object_or_404( FactCollection.objects.all(), pk=pk) return Response({'detail': 'Deployment report %s could not be created.' ' See server logs.' % report_fact_collection.id}, status=status.HTTP_424_FAILED_DEPENDENCY)
def reports(request, pk=None): """Lookup and return reports.""" reports_dict = dict() mask_report = request.query_params.get('mask', False) if pk is not None: if not is_int(pk): error = {'report_id': [_(messages.COMMON_ID_INV)]} raise ValidationError(error) reports_dict['report_id'] = pk # details details_data = get_object_or_404(DetailsReport.objects.all(), report_id=pk) serializer = DetailsReportSerializer(details_data) json_details = serializer.data if validate_query_param_bool(mask_report): json_details = mask_details_facts(json_details) json_details.pop('cached_csv', None) reports_dict['details_json'] = json_details # deployments deployments_data = get_object_or_404(DeploymentsReport.objects.all(), report_id=pk) if deployments_data.status != DeploymentsReport.STATUS_COMPLETE: deployments_id = deployments_data.details_report.id return Response( { 'detail': 'Deployment report %s could not be created.' ' See server logs.' % deployments_id }, status=status.HTTP_424_FAILED_DEPENDENCY) deployments_report = build_cached_json_report(deployments_data, mask_report) if deployments_report: reports_dict['deployments_json'] = deployments_report return Response(reports_dict) error = { 'detail': 'Deployments report %s could not be masked. ' 'Rerun the scan to generate a masked deployments report.' % (pk) } return (Response(error, status=status.HTTP_428_PRECONDITION_REQUIRED))
def jobs(request, pk=None): """Get the jobs of a scan.""" # pylint: disable=invalid-name if pk is not None: if not is_int(pk): return Response(status=status.HTTP_404_NOT_FOUND) result = [] scan = get_object_or_404(Scan.objects.all(), pk=pk) if request.method == 'GET': job_queryset = get_job_queryset_query_set(scan, request.query_params) paginator = StandardResultsSetPagination() page = paginator.paginate_queryset(job_queryset, request) if page is not None: serializer = ScanJobSerializer(page, many=True) for scan in serializer.data: json_scan = expand_scanjob(scan) result.append(json_scan) return paginator.get_paginated_response(serializer.data) for job in job_queryset: job_serializer = ScanJobSerializer(job) job_json = job_serializer.data job_json = expand_scanjob(job_serializer.data) result.append(job_json) return Response(result) else: job_data = {} job_data['scan'] = pk job_serializer = ScanJobSerializer(data=job_data) job_serializer.is_valid(raise_exception=True) job_serializer.save() scanjob_obj = ScanJob.objects.get(pk=job_serializer.data['id']) scanjob_obj.log_current_status() start_scan.send(sender=ScanViewSet.__class__, instance=scanjob_obj) return Response(job_serializer.data, status=status.HTTP_201_CREATED)
def get(self, request): """Lookup and return all system reports.""" self.validate_filters(request.query_params) fact_collection_id = request.query_params.get( 'fact_collection_id', None) if fact_collection_id is None: collection_report_list = [] # Find all distinct fact_collection_ids fact_collection_value_set = SystemFingerprint.objects.all().values( 'fact_collection_id').distinct() # For each id, build a report and add to results array for fact_collection_value in fact_collection_value_set: fact_collection_id = fact_collection_value[ 'fact_collection_id'] report = self.build_report(fact_collection_id, request.query_params) if report is not None: collection_report_list.append(report) else: logger.error( 'System Fingerprint with fact_collection_id ' + '%s no longer exists', fact_collection_id) return Response(collection_report_list) else: if not is_int(fact_collection_id): error = { 'fact_collection_id': [_(messages.COMMON_ID_INV)] } raise ValidationError(error) report = self.build_report(fact_collection_id, request.query_params) if report is not None: return Response(report) return Response(status=status.HTTP_404_NOT_FOUND)