def test_update(self): """Test updating an existing row.""" saver = ReportStatsDBAccessor("myreport", self.manifest_id) returned_obj = saver._get_db_obj_query() self.assertEqual(returned_obj.first().report_name, "myreport") saver.update( cursor_position=33, last_completed_datetime=parser.parse("2011-1-1 11:11:11"), last_started_datetime=parser.parse("2022-2-2 22:22:22"), etag="myetag", ) last_completed = saver.get_last_completed_datetime() self.assertEqual(last_completed.year, 2011) self.assertEqual(last_completed.month, 1) self.assertEqual(last_completed.day, 1) self.assertEqual(last_completed.hour, 11) self.assertEqual(last_completed.minute, 11) self.assertEqual(last_completed.second, 11) last_started = saver.get_last_started_datetime() self.assertEqual(last_started.year, 2022) self.assertEqual(last_started.month, 2) self.assertEqual(last_started.day, 2) self.assertEqual(last_started.hour, 22) self.assertEqual(last_started.minute, 22) self.assertEqual(last_started.second, 22) self.assertEqual(saver.get_etag(), "myetag") saver.delete() returned_obj = saver._get_db_obj_query() self.assertIsNone(returned_obj.first())
def test_initializer_preexisting_report(self): """Test getting a new accessor stats on a preexisting report.""" saver = ReportStatsDBAccessor('myreport') saver.update(cursor_position=33, last_completed_datetime='1/1/2011 11:11:11', last_started_datetime='2/2/22 22:22:22', etag='myetag') saver.commit() self.assertIsNotNone(saver._session) # Get another accessor for the same report and verify we get back the right information. saver2 = ReportStatsDBAccessor('myreport') last_completed = saver2.get_last_completed_datetime() self.assertEqual(last_completed.year, 2011) self.assertEqual(last_completed.month, 1) self.assertEqual(last_completed.day, 1) self.assertEqual(last_completed.hour, 11) self.assertEqual(last_completed.minute, 11) self.assertEqual(last_completed.second, 11) self.assertEqual(saver.get_etag(), 'myetag') saver.remove() saver.commit() saver.close_session() saver2.close_session()
def download_report(self, date_time): """ Download CUR for a given date. Args: date_time (DateTime): The starting datetime object Returns: ([{}]) List of dictionaries containing file path and compression. """ manifest = self._get_manifest(date_time) reports = manifest.get('reportKeys') cur_reports = [] for report in reports: report_dictionary = {} local_s3_filename = utils.get_local_file_name(report) stats_recorder = ReportStatsDBAccessor(local_s3_filename) stored_etag = stats_recorder.get_etag() report_path = self.bucket_path + '/' + report LOG.info('Downloading %s with credential %s', report_path, self.credential) file_name, etag = self.download_file(report_path, stored_etag) stats_recorder.update(etag=etag) stats_recorder.commit() report_dictionary['file'] = file_name report_dictionary['compression'] = 'GZIP' cur_reports.append(report_dictionary) return cur_reports
def test_remove_temp_cur_files(self): """Test to remove temporary cost usage files.""" cur_dir = tempfile.mkdtemp() manifest_data = {"assemblyId": "6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5"} manifest = '{}/{}'.format(cur_dir, 'koku-Manifest.json') with open(manifest, 'w') as outfile: json.dump(manifest_data, outfile) file_list = [{'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-koku-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3)}, {'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-koku-2.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3)}, {'file': '2aeb9169-2526-441c-9eca-d7ed015d52bd-koku-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=2)}, {'file': '6c8487e8-c590-4e6a-b2c2-91a2375c0bad-koku-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=1)}, {'file': '6c8487e8-c590-4e6a-b2c2-91a2375d0bed-koku-1.csv.gz', 'processed_date': None}] expected_delete_list = [] for item in file_list: path = '{}/{}'.format(cur_dir, item['file']) f = open(path, 'w') obj = self.manifest_accessor.get_manifest(self.assembly_id, self.provider_id) stats = ReportStatsDBAccessor(item['file'], obj.id) stats.update(last_completed_datetime=item['processed_date']) stats.commit() stats.close_session() f.close() if not item['file'].startswith(manifest_data.get('assemblyId')) and item['processed_date']: expected_delete_list.append(path) removed_files = self.processor.remove_temp_cur_files(cur_dir) self.assertEqual(sorted(removed_files), sorted(expected_delete_list)) shutil.rmtree(cur_dir)
def download_report(self, date_time): """ Download CUR for a given date. Args: date_time (DateTime): The starting datetime object Returns: ([{}]) List of dictionaries containing file path and compression. """ LOG.info('Current date is %s. Attempting to get manifest...', str(date_time)) manifest = self._get_manifest(date_time) reports = manifest.get('reportKeys') cur_reports = [] for report in reports: report_dictionary = {} local_s3_filename = utils.get_local_file_name(report) stats_recorder = ReportStatsDBAccessor(local_s3_filename) stored_etag = stats_recorder.get_etag() file_name, etag = self.download_file(report, stored_etag) stats_recorder.update(etag=etag) stats_recorder.commit() stats_recorder.close_session() report_dictionary['file'] = file_name report_dictionary['compression'] = self.report.get('Compression') cur_reports.append(report_dictionary) return cur_reports
def test_remove_temp_cur_files(self): """Test to remove temporary usage report files.""" insights_local_dir = tempfile.mkdtemp() manifest_data = {"uuid": "6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5"} manifest = '{}/{}'.format(insights_local_dir, 'manifest.json') with open(manifest, 'w') as outfile: json.dump(manifest_data, outfile) file_list = [ { 'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3), }, { 'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-ocp-2.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3), }, { 'file': '2aeb9169-2526-441c-9eca-d7ed015d52bd-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=2), }, { 'file': '6c8487e8-c590-4e6a-b2c2-91a2375c0bad-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=1), }, { 'file': '6c8487e8-c590-4e6a-b2c2-91a2375d0bed-ocp-1.csv.gz', 'processed_date': None, }, ] expected_delete_list = [] for item in file_list: path = '{}/{}'.format(insights_local_dir, item['file']) f = open(path, 'w') stats = ReportStatsDBAccessor(item['file'], None) stats.update(last_completed_datetime=item['processed_date']) stats.commit() stats.close_session() f.close() if (not item['file'].startswith(manifest_data.get('uuid')) and item['processed_date']): expected_delete_list.append(path) removed_files = self.ocp_processor.remove_temp_cur_files( insights_local_dir, manifest_id=None) self.assertEqual(sorted(removed_files), sorted(expected_delete_list)) shutil.rmtree(insights_local_dir)
def test_update(self): """Test updating an existing row.""" saver = ReportStatsDBAccessor('myreport', self.manifest_id) saver.commit() returned_obj = saver._get_db_obj_query() self.assertEqual(returned_obj.first().report_name, 'myreport') saver.update( cursor_position=33, last_completed_datetime='1/1/2011 11:11:11', last_started_datetime='2/2/22 22:22:22', etag='myetag', ) saver.commit() self.assertEqual(saver.get_cursor_position(), 33) last_completed = saver.get_last_completed_datetime() self.assertEqual(last_completed.year, 2011) self.assertEqual(last_completed.month, 1) self.assertEqual(last_completed.day, 1) self.assertEqual(last_completed.hour, 11) self.assertEqual(last_completed.minute, 11) self.assertEqual(last_completed.second, 11) last_started = saver.get_last_started_datetime() self.assertEqual(last_started.year, 2022) self.assertEqual(last_started.month, 2) self.assertEqual(last_started.day, 2) self.assertEqual(last_started.hour, 22) self.assertEqual(last_started.minute, 22) self.assertEqual(last_started.second, 22) saver.set_cursor_position(42) saver.commit() self.assertEqual(saver.get_cursor_position(), 42) self.assertEqual(saver.get_etag(), 'myetag') saver.update(cursor_position=100) saver.commit() self.assertEqual(saver.get_cursor_position(), 100) saver.delete() saver.commit() returned_obj = saver._get_db_obj_query() self.assertIsNone(returned_obj.first()) saver.close_session()
def test_initializer_preexisting_report(self): """Test getting a new accessor stats on a preexisting report.""" saver = ReportStatsDBAccessor("myreport", self.manifest_id) saver.update( cursor_position=33, last_completed_datetime="2011-1-1 11:11:11", last_started_datetime="2022-2-2 22:22:22", etag="myetag", ) self.assertIsNotNone(saver._obj) # Get another accessor for the same report and verify we get back the right information. saver2 = ReportStatsDBAccessor("myreport", self.manifest_id) last_completed = saver2.get_last_completed_datetime() self.assertEqual(last_completed.year, 2011) self.assertEqual(last_completed.month, 1) self.assertEqual(last_completed.day, 1) self.assertEqual(last_completed.hour, 11) self.assertEqual(last_completed.minute, 11) self.assertEqual(last_completed.second, 11) self.assertEqual(saver.get_etag(), "myetag")
def test_remove_temp_cur_files(self): """Test to remove temporary usage report files.""" insights_local_dir = tempfile.mkdtemp() cluster_id = 'my-ocp-cluster' manifest_date = "2018-05-01" manifest_data = { "uuid": "6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5", "cluster_id": cluster_id, "date": manifest_date } manifest = '{}/{}'.format(insights_local_dir, 'manifest.json') with open(manifest, 'w') as outfile: json.dump(manifest_data, outfile) file_list = [{ 'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3) }, { 'file': '6e019de5-a41d-4cdb-b9a0-99bfba9a9cb5-ocp-2.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=3) }, { 'file': '2aeb9169-2526-441c-9eca-d7ed015d52bd-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=2) }, { 'file': '6c8487e8-c590-4e6a-b2c2-91a2375c0bad-ocp-1.csv.gz', 'processed_date': datetime.datetime(year=2018, month=5, day=1) }, { 'file': '6c8487e8-c590-4e6a-b2c2-91a2375d0bed-ocp-1.csv.gz', 'processed_date': None }] expected_delete_list = [] for item in file_list: path = '{}/{}'.format(insights_local_dir, item['file']) f = open(path, 'w') obj = self.manifest_accessor.get_manifest(self.assembly_id, self.provider_id) stats = ReportStatsDBAccessor(item['file'], obj.id) stats.update(last_completed_datetime=item['processed_date']) stats.commit() stats.close_session() f.close() if not item['file'].startswith( manifest_data.get('uuid')) and item['processed_date']: expected_delete_list.append(path) fake_dir = tempfile.mkdtemp() with patch.object(Config, 'INSIGHTS_LOCAL_REPORT_DIR', fake_dir): destination_dir = '{}/{}/{}'.format( fake_dir, cluster_id, month_date_range(parser.parse(manifest_date))) os.makedirs(destination_dir, exist_ok=True) removed_files = self.ocp_processor.remove_temp_cur_files( insights_local_dir) self.assertEqual(sorted(removed_files), sorted(expected_delete_list)) shutil.rmtree(insights_local_dir) shutil.rmtree(fake_dir)