Esempio n. 1
0
    def render(self, reports_dict, media_type=None, renderer_context=None):
        """Render all reports as gzip."""
        # pylint: disable=arguments-differ,unused-argument,too-many-locals
        # pylint: disable=too-many-branches,too-many-statements
        if not bool(reports_dict):
            return None

        files_data = dict()

        report_id = reports_dict.get('report_id')
        # Collect Json Data
        details_json = reports_dict.get('details_json')
        deployments_json = reports_dict.get('deployments_json')
        if any(value is None
               for value in [report_id, details_json, deployments_json]):
            return None
        details_name = create_filename('details', 'json', report_id)
        files_data[details_name] = details_json
        deployments_name = create_filename('deployments', 'json', report_id)
        files_data[deployments_name] = deployments_json
        # Collect CSV Data
        details_csv = create_details_csv(details_json)
        deployments_csv = create_deployments_csv(deployments_json)
        if any(value is None for value in [details_csv, deployments_json]):
            return None
        details_csv_name = create_filename('details', 'csv', report_id)
        files_data[details_csv_name] = details_csv
        deployments_csv_name = create_filename('deployments', 'csv', report_id)
        files_data[deployments_csv_name] = deployments_csv
        tar_buffer = create_tar_buffer(files_data)
        if tar_buffer is None:
            logger.error(messages.REPORTS_TAR_ERROR)
            return None
        return tar_buffer
    def render(self,
               data,
               accepted_media_type=None,
               renderer_context=None):
        """Render all reports as gzip."""
        # pylint: disable=too-many-locals
        reports_dict = data
        if not bool(reports_dict):
            return None
        report_id = reports_dict.get('report_id')
        # Collect Json Data
        details_json = reports_dict.get('details_json')
        deployments_json = reports_dict.get('deployments_json')
        if any(value is None for value in [report_id,
                                           details_json,
                                           deployments_json]):
            return None

        # Collect CSV Data
        details_csv = create_details_csv(details_json)
        deployments_csv = create_deployments_csv(deployments_json)
        if any(value is None for value in [details_csv, deployments_csv]):
            return None

        # grab hashes
        details_json_hash = create_hash(details_json, 'json')
        deployments_json_hash = create_hash(deployments_json, 'json')
        details_csv_hash = create_hash(details_csv, 'csv')
        deployments_csv_hash = create_hash(deployments_csv, 'csv')

        # create the file names
        details_json_name = create_filename('details', 'json', report_id)
        deployments_json_name = create_filename(
            'deployments', 'json', report_id)
        details_csv_name = create_filename('details', 'csv', report_id)
        deployments_csv_name = create_filename('deployments', 'csv', report_id)
        sha256sum_name = create_filename('SHA256SUM', None, report_id)

        # map the file names to the file data
        files_data = {
            details_json_name: details_json,
            deployments_json_name: deployments_json,
            details_csv_name: details_csv,
            deployments_csv_name: deployments_csv,
            sha256sum_name:
                details_json_hash + '  details.json\n' +
                deployments_json_hash + '  deployments.json\n' +
                details_csv_hash + '  details.csv\n' +
                deployments_csv_hash + '  deployments.csv'
        }

        tar_buffer = create_tar_buffer(files_data)
        if tar_buffer is None:
            logger.error(messages.REPORTS_TAR_ERROR)
            return None
        return tar_buffer
Esempio n. 3
0
def _create_report_slices(report, insights_hosts):
    """Process facts and convert to fingerprints.

    :param report: DeploymentReport used to create slices
    :param insights_hosts: insights host JSON objects
    :returns: list containing report meta-data and report slices
    """
    # pylint: disable=too-many-locals
    slice_size_limit = settings.QPC_INSIGHTS_REPORT_SLICE_SIZE
    number_hosts = len(insights_hosts)

    if number_hosts % slice_size_limit:
        number_of_slices = number_hosts // slice_size_limit + 1
        hosts_per_slice = number_hosts // number_of_slices + 1
    else:
        number_of_slices = number_hosts // slice_size_limit
        hosts_per_slice = number_hosts // number_of_slices

    insights_report_pieces = {}
    metadata_report_slices = {}
    source_metadata = {
        'report_platform_id': str(report.report_platform_id),
        'report_type': 'insights',
        'report_version': report.report_version,
        'qpc_server_report_id': report.id,
        'qpc_server_version': server_version(),
        'qpc_server_id': ServerInformation.create_or_retreive_server_id()
    }
    metadata = {
        'report_id': str(report.report_platform_id),
        'host_inventory_api_version': '1.0',
        'source': 'qpc',
        'source_metadata': source_metadata,
        'report_slices': metadata_report_slices
    }
    insights_report_pieces[create_filename('metadata', 'json',
                                           report.id)] = metadata
    list_slice_start = 0
    list_slice_end = hosts_per_slice
    for i in range(0, number_hosts, hosts_per_slice):
        hosts = []
        hosts = insights_hosts[list_slice_start:list_slice_end]

        report_slice_id = str(uuid.uuid4())
        report_slice_filename = create_filename(report_slice_id, 'json',
                                                report.id)
        metadata_report_slices[report_slice_id] = {'number_hosts': len(hosts)}
        report_slice = {'report_slice_id': report_slice_id, 'hosts': hosts}
        insights_report_pieces[report_slice_filename] = report_slice
        list_slice_start = list_slice_end
        if list_slice_end + hosts_per_slice < number_hosts:
            list_slice_end = hosts_per_slice * (i + 2)
        else:
            list_slice_end = number_hosts
    return Response(insights_report_pieces)
Esempio n. 4
0
    def test_get_insights_report_200_generate_exists(self):
        """Retrieve insights report."""
        url = '/api/v1/reports/1/insights/'
        self.deployments_report.cached_insights = json.dumps(
            self.insights_hosts)
        self.deployments_report.save()
        with patch('api.insights_report.view.get_object_or_404',
                   return_value=self.deployments_report):
            response = self.client.get(url)
        self.assertEqual(response.status_code, 200)
        response_json = response.json()

        self.assertIn(create_filename('metadata', 'json', 1),
                      response_json.keys())
        for key in response_json:
            self.assertIn('report_id_1/', key)
Esempio n. 5
0
    def test_get_insights_report_200_generate_exists(self):
        """Retrieve insights report."""
        deployment_report = DeploymentReportFactory(
            status=DeploymentsReport.STATUS_COMPLETE,
        )
        url = f"/api/v1/reports/{deployment_report.id}/insights/"
        response = self.client.get(url)
        self.assertEqual(response.status_code, 200)
        response_json = response.json()

        self.assertIn(
            create_filename("metadata", "json", deployment_report.id),
            response_json.keys(),
        )
        for key in response_json:
            self.assertIn(f"report_id_{deployment_report.id}/", key)
    def render(self, data, accepted_media_type=None, renderer_context=None):
        """Render report as json gzip."""
        report_dict = data
        if not bool(report_dict):
            return None

        report_id = report_dict.get('report_id')
        report_type = report_dict.get('report_type')
        if report_id is None:
            return None
        if report_type is None:
            file_name = '%s.json' % time.strftime('%Y%m%d%H%M%S')
        else:
            file_name = create_filename(report_type, 'json', report_id)
        file_data = {file_name: report_dict}
        tar_buffer = create_tar_buffer(file_data)
        return tar_buffer
Esempio n. 7
0
    def test_get_insights_report_200_exists(self):
        """Retrieve insights report."""
        deployment_report = DeploymentReportFactory(
            number_of_fingerprints=11,
            status=DeploymentsReport.STATUS_COMPLETE,
        )
        url = f"/api/v1/reports/{deployment_report.id}/insights/"
        # mock slice size so we can expect 2 slices on this test
        with override_settings(QPC_INSIGHTS_REPORT_SLICE_SIZE=10):
            response = self.client.get(url)
        self.assertEqual(response.status_code, 200, response.json())
        response_json = response.json()

        self.assertIn(
            create_filename("metadata", "json", deployment_report.id),
            response_json.keys(),
        )
        report_slices = {}
        metadata_filename = f"report_id_{deployment_report.id}/metadata.json"
        for key in response_json:
            self.assertIn(f"report_id_{deployment_report.id}/", key)
            if key != metadata_filename:
                report_slices[key] = response_json[key]
        # metadata slice number_hosts matches the actual
        # number of hosts in a slice
        report_slices_in_metadata = response_json[metadata_filename]["report_slices"]
        self.assertEqual(len(report_slices_in_metadata), 2)
        total_returned_hosts_num = 0
        for key_1, key_2 in zip(report_slices_in_metadata, report_slices):
            self.assertEqual(report_slices_in_metadata[key_1]['number_hosts'],
                             len(report_slices[key_2]['hosts']))
            # used later to check for the total size
            total_returned_hosts_num += len(report_slices[key_2]['hosts'])
        # no hosts lost
        returned_host_names = {
            host["bios_uuid"]
            for slice_key in report_slices  # pylint: disable=consider-using-dict-items
            for host in report_slices[slice_key]["hosts"]
        }
        expected_host_names = {
            host.bios_uuid for host in deployment_report.system_fingerprints.all()
        }
        self.assertSetEqual(returned_host_names, expected_host_names)
        # sum of all hosts in a slice is equal to
        # the total number of host (before call)
        self.assertEqual(total_returned_hosts_num, len(expected_host_names))
    def render(self, report_dict, media_type=None, renderer_context=None):
        """Render report as json gzip."""
        # pylint: disable=arguments-differ,unused-argument,too-many-locals
        # pylint: disable=too-many-branches,too-many-statements

        if not bool(report_dict):
            return None

        report_id = report_dict.get('report_id')
        report_type = report_dict.get('report_type')
        if report_id is None:
            return None
        if report_type is None:
            file_name = '%s.json' % time.strftime('%Y%m%d%H%M%S')
        else:
            file_name = create_filename(report_type, 'json', report_id)
        file_data = {file_name: report_dict}
        tar_buffer = create_tar_buffer(file_data)
        return tar_buffer
Esempio n. 9
0
    def test_get_insights_report_200_exists(self):
        """Retrieve insights report."""
        url = '/api/v1/reports/1/insights/'
        expected_hosts = [{'unique_id': i} for i in range(10001)]
        self.deployments_report.cached_insights = json.dumps(expected_hosts)
        self.deployments_report.save()
        with patch('api.insights_report.view.get_object_or_404',
                   return_value=self.deployments_report):
            response = self.client.get(url)
        self.assertEqual(response.status_code, 200)
        response_json = response.json()

        self.assertIn(create_filename('metadata', 'json', 1),
                      response_json.keys())
        report_slices = {}
        for key in response_json:
            self.assertIn('report_id_1/', key)
            if key != 'report_id_1/metadata.json':
                report_slices[key] = response_json[key]
        # metadata slice number_hosts matches the actual
        # number of hosts in a slice
        report_slices_in_metadata = \
            response_json['report_id_1/metadata.json']['report_slices']
        total_returned_hosts_num = 0
        for key_1, key_2 in zip(report_slices_in_metadata, report_slices):
            self.assertEqual(report_slices_in_metadata[key_1]['number_hosts'],
                             len(report_slices[key_2]['hosts']))
            # used later to check for the total size
            total_returned_hosts_num += len(report_slices[key_2]['hosts'])
        # no hosts lost
        returned_host_ids = {
            host['unique_id']
            for slice_key in report_slices
            for host in report_slices[slice_key]['hosts']
        }
        expected_host_ids = {host['unique_id'] for host in expected_hosts}
        self.assertSetEqual(returned_host_ids, expected_host_ids)
        # sum of all hosts in a slice is equal to
        # the total number of host (before call)
        self.assertEqual(total_returned_hosts_num, len(expected_hosts))