def test_problems_writing(self): """ Exception is thrown when we cannot write to the directory """ self.file_manager.write_data = Mock(side_effect=PublicReportIOError('Boom!')) wr = WriteReportTask('12345', date.today(), self.results, self.file_manager) wr.run()
def remove_recurrent_report(self, report_id): """ Removes a series of recurrent, public reports from persistent storage Parameters: report_id : unique identifier for the report, a string """ try: path = self.get_public_report_path(report_id, recurrent=True) if not os.path.isdir(path): msg = '"{0}" is not a scheduled report'.format(path) self.logger.exception(msg) raise PublicReportIOError(msg) shutil.rmtree(path) except IOError: msg = 'Could not remove concatenate public report {0} at "{1}"'.format( report_id, path) self.logger.exception(msg) raise PublicReportIOError(msg)
def remove_file(self, file_path): """ Parameters file_path : The path to the file to be deleted Returns PublicReportIOError if an IOError was raised when deleting the public report """ try: if os.path.isfile(file_path): os.remove(file_path) else: raise PublicReportIOError( 'Could not remove public report at: ' '{0} as it does not exist'.format(file_path)) except IOError: msg = 'Could not remove public report at: {0}'.format(file_path) self.logger.exception(msg) raise PublicReportIOError(msg)
def test_public_report_state_change_error(self): """ Persistent Report should propagate IO errors from PublicReportFileManager """ self.app = Mock() self.logger = Mock(spec=RootLogger) file_manager = PublicReportFileManager(self.logger, '/some/fake/absolute/path') file_manager.write_data = Mock( side_effect=PublicReportIOError('Boom!')) # do not write anything to disk file_manager.get_public_report_path = Mock() ReportStore.make_report_public(self.reports[0].id, self.reports[0].user_id, file_manager, 'testing data')
def coalesce_recurrent_reports(self, report_id): """ Coalesces a series of recurrent, public reports into a single JSON object. The coalesced object is a single dictionary mapping report END DATES to individual report objects. Parameters report_id : unique identifier for the report, a string Returns A JSON object containing the coalesced reports """ try: coalesced_reports = {} path = self.get_public_report_path(report_id, recurrent=True) if not os.path.isdir(path): msg = '"{0}" is not a scheduled report'.format(path) self.logger.exception(msg) raise PublicReportIOError(msg) # Get a list of filenames with COALESCED_REPORT_FILE at 1st position, # so that new individual reports override the current full report. filenames = os.listdir(path) if COALESCED_REPORT_FILE in filenames: filenames.remove(COALESCED_REPORT_FILE) filenames.insert(0, COALESCED_REPORT_FILE) for f in filenames: full_path = os.sep.join((path, f)) if os.path.isfile(full_path): with open(full_path, 'r') as saved_report: try: data = json.load(saved_report) _merge_run(coalesced_reports, data) except KeyError, e: msg = 'Key "{}" not in JSON file "{}"'.format( e, full_path) self.logger.exception(msg) except ValueError: msg = 'Error parsing JSON file "{}"'.format( full_path) self.logger.exception(msg)
def write_data(self, file_path, data): """ Writes data to a given path Parameters file_path : The path to which we are writing the public report data: String content to write Returns PublicReportIOError if an IOError was raised when creating the public report """ try: with open(file_path, 'w') as saved_report: saved_report.write(data) except IOError: msg = 'Could not create public report at: {0}'.format(file_path) self.logger.exception(msg) raise PublicReportIOError(msg)
def remove_old_report_files(self, report_id, days_ago=10): """ Removes the individual (single-day) report files from the report folder that are older than 'days_ago'. The full report file will be left intact. """ limit_day = today() - timedelta(days=days_ago) path = self.get_public_report_path(report_id, recurrent=True) if not os.path.isdir(path): msg = '"{0}" is not a scheduled report'.format(path) self.logger.exception(msg) raise PublicReportIOError(msg) for filename in os.listdir(path): if filename != COALESCED_REPORT_FILE: file_date = parse_date_from_public_report_file(filename) if file_date <= limit_day: full_path = os.sep.join((path, filename)) self.remove_file(full_path)
_merge_run(coalesced_reports, data) except KeyError, e: msg = 'Key "{}" not in JSON file "{}"'.format( e, full_path) self.logger.exception(msg) except ValueError: msg = 'Error parsing JSON file "{}"'.format( full_path) self.logger.exception(msg) return coalesced_reports except IOError: msg = 'Could not concatenate public report {0}'.format(report_id) self.logger.exception(msg) raise PublicReportIOError(msg) def _merge_run(coalesced, data): """ Helper function, handles merging of new report results into an existing dictionary that represents the output of a recurrent report. Correctly merges both timeseries and non-timeseries results into a timeseries-like format. Parameters coalesced : the coalesced report to update, could be an empty dictionary data : the json result of the new report """ coalesced.setdefault('parameters', data['parameters']) coalesced.setdefault('result', {})