def render(self, report_dict, media_type=None, renderer_context=None): """Render deployment report as CSV.""" # pylint: disable=arguments-differ,unused-argument,too-many-locals if not bool(report_dict): return None csv_helper = CSVHelper() report_buffer = StringIO() csv_writer = csv.writer(report_buffer, delimiter=',') report_id = report_dict.get('report_id') systems_list = report_dict.get('report') csv_writer.writerow(['Report']) csv_writer.writerow([report_id]) csv_writer.writerow([]) csv_writer.writerow([]) if not systems_list: return None csv_writer.writerow(['Report:']) headers = csv_helper.generate_headers( systems_list, exclude={'id', 'report_id', 'metadata'}) if SOURCES_KEY in headers: headers += self.source_headers headers = sorted(list(set(headers))) # Add source heaaders csv_writer.writerow(headers) for system in systems_list: row = [] system_sources = system.get(SOURCES_KEY) if system_sources is not None: sources_info = self._compute_source_info(system_sources) else: sources_info = None for header in headers: fact_value = None if header in self.source_headers: if sources_info is not None: fact_value = sources_info.get(header) elif header == 'entitlements': fact_value = system.get(header) for entitlement in fact_value: entitlement.pop('metadata') else: fact_value = system.get(header) row.append(csv_helper.serialize_value(header, fact_value)) csv_writer.writerow(row) csv_writer.writerow([]) csv_content = report_buffer.getvalue() return csv_content
def test_csv_generate_headers(self): """Test csv_generate_headers method.""" fact_list = [{'header1': 'value1'}, {'header2': 'value2'}, {'header1': 'value2', 'header3': 'value3'}] headers = CSVHelper.generate_headers(fact_list) self.assertEqual(3, len(headers)) expected = set(['header1', 'header2', 'header3']) self.assertSetEqual(expected, set(headers))
def render(self, report_dict, media_type=None, renderer_context=None): """Render FactCollection as CSV.""" # pylint: disable=arguments-differ,unused-argument,too-many-locals if not bool(report_dict): return None csv_helper = CSVHelper() report_buffer = StringIO() csv_writer = csv.writer(report_buffer, delimiter=',') fact_collection_id = report_dict.get('fact_collection_id') systems_list = report_dict.get('report') csv_writer.writerow(['Fact Collection']) csv_writer.writerow([fact_collection_id]) csv_writer.writerow([]) csv_writer.writerow([]) if not systems_list: return None csv_writer.writerow(['Report:']) headers = csv_helper.generate_headers( systems_list, exclude=set(['id', 'fact_collection_id', 'metadata'])) csv_writer.writerow(headers) for system in systems_list: row = [] for header in headers: fact_value = system.get(header) row.append(csv_helper.serialize_value(header, fact_value)) csv_writer.writerow(row) csv_writer.writerow([]) csv_content = report_buffer.getvalue() return csv_content
def setUp(self): """Create test case setup.""" self.csv_helper = CSVHelper()
class CommonUtilTest(TestCase): """Tests common util functions.""" # pylint: disable=no-self-use,too-many-arguments,invalid-name # pylint: disable=too-many-locals,too-many-branches def setUp(self): """Create test case setup.""" self.csv_helper = CSVHelper() def test_csv_serialize_empty_values(self): """Test csv helper with empty values.""" # Test Empty case value = self.csv_helper.serialize_value('header', {}) self.assertEqual('', value) value = self.csv_helper.serialize_value('header', []) self.assertEqual('', value) def test_csv_serialize_dict_1_key(self): """Test csv helper with 1 key dict.""" # Test flat 1 entry test_python = {'key': 'value'} value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '{key:value}') def test_csv_serialize_list_1_value(self): """Test csv helper with 1 item list.""" test_python = ['value'] value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '[value]') def test_csv_serialize_dict_2_keys(self): """Test csv helper with 2 key dict.""" # Test flat with 2 entries test_python = OrderedDict() test_python['key1'] = 'value1' test_python['key2'] = 'value2' value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '{key1:value1;key2:value2}') def test_csv_serialize_list_2_values(self): """Test csv helper with 2 item list.""" test_python = ['value1', 'value2'] value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '[value1;value2]') def test_csv_serialize_dict_nested(self): """Test csv helper with dict containing nested list/dict.""" # Test nested test_python = OrderedDict() test_python['key'] = 'value' test_python['dict'] = {'nkey': 'nvalue'} test_python['list'] = ['a'] value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '{key:value;dict:{nkey:nvalue};list:[a]}') def test_csv_serialize_list_nested(self): """Test csv helper with list containing nested list/dict.""" test_python = ['value', {'nkey': 'nvalue'}, ['a']] value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, '[value;{nkey:nvalue};[a]]') def test_csv_serialize_ansible_value(self): """Test csv helper with ansible dict.""" # Test ansible error test_python = {'rc': 0} value = self.csv_helper.serialize_value('header', test_python) self.assertEqual(value, CSVHelper.ANSIBLE_ERROR_MESSAGE) def test_csv_generate_headers(self): """Test csv_generate_headers method.""" fact_list = [{ 'header1': 'value1' }, { 'header2': 'value2' }, { 'header1': 'value2', 'header3': 'value3' }] headers = CSVHelper.generate_headers(fact_list) self.assertEqual(3, len(headers)) expected = set(['header1', 'header2', 'header3']) self.assertSetEqual(expected, set(headers))
def render(self, fact_collection_dict, media_type=None, renderer_context=None): """Render detailed report as CSV.""" # pylint: disable=arguments-differ,unused-argument,too-many-locals report_id = fact_collection_dict.get('id') if report_id is None: return None fact_collection = FactCollection.objects.filter( id=report_id).first() if fact_collection is None: return None # Check for a cached copy of csv csv_content = fact_collection.csv_content if csv_content: logger.debug('Using cached csv results for fact collection %d', report_id) return csv_content logger.debug('No cached csv results for fact collection %d', report_id) csv_helper = CSVHelper() fact_collection_dict_buffer = StringIO() csv_writer = csv.writer(fact_collection_dict_buffer, delimiter=',') sources = fact_collection_dict.get('sources') csv_writer.writerow(['Report', 'Number Sources']) if sources is None: csv_writer.writerow([report_id, 0]) return fact_collection_dict_buffer.getvalue() csv_writer.writerow([report_id, len(sources)]) csv_writer.writerow([]) csv_writer.writerow([]) for source in sources: csv_writer.writerow(['Source']) csv_writer.writerow( ['Server Identifier', 'Source Name', 'Source Type']) csv_writer.writerow([ source.get('server_id'), source.get('source_name'), source.get('source_type')]) csv_writer.writerow(['Facts']) fact_list = source.get('facts') if not fact_list: # write a space line and move to next csv_writer.writerow([]) continue headers = csv_helper.generate_headers(fact_list) csv_writer.writerow(headers) for fact in fact_list: row = [] for header in headers: fact_value = fact.get(header) row.append(csv_helper.serialize_value(header, fact_value)) csv_writer.writerow(row) csv_writer.writerow([]) csv_writer.writerow([]) logger.debug('Caching csv results for fact collection %d', report_id) csv_content = fact_collection_dict_buffer.getvalue() fact_collection.csv_content = csv_content fact_collection.save() return csv_content
def render(self, report_dict, media_type=None, renderer_context=None): """Render deployment report as CSV.""" # pylint: disable=arguments-differ,unused-argument,too-many-locals # pylint: disable=too-many-branches if not bool(report_dict): return None report_id = report_dict.get('report_id') if report_id is None: return None deployment_report = DeploymentsReport.objects.filter( report_id=report_id).first() if deployment_report is None: return None # Check for a cached copy of csv cached_csv = deployment_report.cached_csv if cached_csv: logger.info('Using cached csv results for deployment report %d', report_id) return cached_csv logger.info('No cached csv results for deployment report %d', report_id) csv_helper = CSVHelper() deployment_report_buffer = StringIO() csv_writer = csv.writer(deployment_report_buffer, delimiter=',') systems_list = report_dict.get('system_fingerprints') csv_writer.writerow(['Report']) csv_writer.writerow([report_id]) csv_writer.writerow([]) csv_writer.writerow([]) if not systems_list: return None csv_writer.writerow(['Report:']) headers = csv_helper.generate_headers( systems_list, exclude={'id', 'report_id', 'metadata'}) if SOURCES_KEY in headers: headers += self.source_headers headers = sorted(list(set(headers))) # Add source headers csv_writer.writerow(headers) for system in systems_list: row = [] system_sources = system.get(SOURCES_KEY) if system_sources is not None: sources_info = self._compute_source_info(system_sources) else: sources_info = None for header in headers: fact_value = None if header in self.source_headers: if sources_info is not None: fact_value = sources_info.get(header) elif header == 'entitlements': fact_value = system.get(header) for entitlement in fact_value: entitlement.pop('metadata') else: fact_value = system.get(header) row.append(csv_helper.serialize_value(header, fact_value)) csv_writer.writerow(sanitize_row(row)) csv_writer.writerow([]) logger.info('Caching csv results for deployment report %d', report_id) cached_csv = deployment_report_buffer.getvalue() deployment_report.cached_csv = cached_csv deployment_report.save() return cached_csv