def load_export_inactive_items_to_csv(logger, config_settings): """ Attempt to parse export_inactive_items from config settings. Value of true or false is expected. True means the CSV exporter will include inactive items. False means the CSV exporter will exclude inactive items. :param logger: the logger :param config_settings: config settings loaded from config file :return: value of export_inactive_items_to_csv if valid, else DEFAULT_EXPORT_INACTIVE_ITEMS_TO_CSV """ try: if config_settings['export_options']['merge_rows'] is True: logger.info( 'Merge rows is enabled, turning on the export of inactive items.' ) export_inactive_items_to_csv = True else: export_inactive_items_to_csv = config_settings['export_options'][ 'export_inactive_items'] if not isinstance(export_inactive_items_to_csv, bool): logger.info( 'Invalid export_inactive_items value from configuration file, defaulting to true' ) export_inactive_items_to_csv = DEFAULT_EXPORT_INACTIVE_ITEMS_TO_CSV return export_inactive_items_to_csv except Exception as ex: log_critical_error( logger, ex, 'Exception parsing export_inactive_items from the configuration file, defaulting to {0}' .format(str(DEFAULT_EXPORT_INACTIVE_ITEMS_TO_CSV))) return DEFAULT_EXPORT_INACTIVE_ITEMS_TO_CSV
def save_exported_media_to_file(logger, export_dir, media_file, filename, extension): """ Write exported media item to disk at specified location with specified file name. Any existing file with the same name will be overwritten. :param logger: the logger :param export_dir: path to directory for exports :param media_file: media file to write to disc :param filename: filename to give exported image :param extension: extension to give exported image """ if not os.path.exists(export_dir): logger.info( "Creating directory at {0} for media files.".format(export_dir)) os.makedirs(export_dir) file_path = os.path.join(export_dir, filename + '.' + extension) if os.path.isfile(file_path): logger.info('Overwriting existing report at ' + file_path) try: with open(file_path, 'wb') as out_file: shutil.copyfileobj(media_file.raw, out_file) del media_file except Exception as ex: log_critical_error(logger, ex, 'Exception while writing' + file_path + ' to file')
def load_setting_sync_delay(logger, config_settings): """ Attempt to parse delay between sync loops from config settings :param logger: the logger :param config_settings: config settings loaded from config file :return: extracted sync delay if valid, else DEFAULT_SYNC_DELAY_IN_SECONDS """ try: sync_delay = config_settings['export_options']['sync_delay_in_seconds'] sync_delay_is_valid = re.match('^[0-9]+$', str(sync_delay)) if sync_delay_is_valid and sync_delay >= 0: if sync_delay < DEFAULT_SYNC_DELAY_IN_SECONDS: '{0} seconds'.format( logger.info( 'Sync delay is less than the minimum recommended value of ' + str(DEFAULT_SYNC_DELAY_IN_SECONDS))) return sync_delay else: logger.info( 'Invalid sync_delay_in_seconds from the configuration file, defaulting to {0}' .format(str(DEFAULT_SYNC_DELAY_IN_SECONDS))) return DEFAULT_SYNC_DELAY_IN_SECONDS except Exception as ex: log_critical_error( logger, ex, 'Exception parsing sync_delay from the configuration file, defaulting to {0}' .format(str(DEFAULT_SYNC_DELAY_IN_SECONDS))) return DEFAULT_SYNC_DELAY_IN_SECONDS
def get_filename_item_id(logger, config_settings): """ Attempt to parse item_id for file naming from config settings :param logger: the logger :param config_settings: config settings loaded from config file :return: item_id extracted from config_settings if valid, else None """ try: filename_item_id = config_settings['export_options']['filename'] if filename_item_id is not None: if len(filename_item_id) > 36: logger.critical( 'You can only specify one value for the filename. Please remove any additional item ' 'IDs and try again. For more complex title rules, consider setting the title rules ' 'within iAuditor. Defaulting to Audit ID.') if filename_item_id == 'f3245d42-ea77-11e1-aff1-0800200c9a66': logger.critical( 'Date fields are not compatible with the title rule feature. Defaulting to Audit ID' ) else: return filename_item_id else: return None except Exception as ex: log_critical_error( logger, ex, 'Exception retrieving setting "filename" from the configuration file' ) return None
def load_setting_preference_mapping(logger, config_settings): """ Attempt to parse preference settings from config settings :param logger: the logger :param config_settings: config settings loaded from config file :return: export preference mapping if valid, else None """ try: preference_mapping = {} preference_settings = config_settings['export_options']['preferences'] if preference_settings is not None: preference_lines = preference_settings.split(' ') for preference in preference_lines: template_id = preference[:preference.index(':')] if template_id not in preference_mapping.keys(): preference_mapping[template_id] = preference return preference_mapping except KeyError: logger.debug('No preference key in the configuration file') return None except Exception as ex: log_critical_error( logger, ex, 'Exception getting preferences from the configuration file') return None
def update_actions_sync_marker_file(logger, date_modified, config_name): """ Replaces the contents of the actions sync marker file with the the date/time string provided :param logger: The logger :param date_modified: ISO string """ last_successful_file = set_last_successful_file_name(config_name, 'actions') try: with open(last_successful_file, 'w') as actions_sync_marker_file: actions_sync_marker_file.write(date_modified) except Exception as ex: log_critical_error(logger, ex, 'Unable to open ' + last_successful_file + ' for writing') exit()
def load_setting_media_sync_offset(logger, config_settings): """ :param logger: the logger :param config_settings: config settings loaded from config file :return: media sync offset parsed from file, else default media sync offset defined as global constant """ try: media_sync_offset = config_settings['export_options']['media_sync_offset_in_seconds'] if media_sync_offset is None or media_sync_offset < 0 or not isinstance(media_sync_offset, int): media_sync_offset = DEFAULT_MEDIA_SYNC_OFFSET_IN_SECONDS return media_sync_offset except Exception as ex: log_critical_error(logger, ex, 'Exception parsing media sync offset from config file') return DEFAULT_MEDIA_SYNC_OFFSET_IN_SECONDS
def load_setting_export_path(logger, config_settings): """ Attempt to extract export path from config settings :param config_settings: config settings loaded from config file :param logger: the logger :return: export path, None if path is invalid or missing """ try: export_path = config_settings['export_options']['export_path'] if export_path is not None: return export_path else: return None except Exception as ex: log_critical_error(logger, ex, 'Exception getting export path from the configuration file') return None
def save_web_report_link_to_file(logger, export_dir, web_report_data): """ Write Web Report links to 'web-report-links.csv' on disk at specified location Any existing file with the same name will be appended to :param logger: the logger :param export_dir: path to directory for exports :param web_report_data: Data to write to CSV: Template ID, Template name, Audit ID, Audit name, Web Report link """ if not os.path.exists(export_dir): logger.info("Creating directory at {0} for Web Report links.".format( export_dir)) os.makedirs(export_dir) file_path = os.path.join(export_dir, 'web-report-links.csv') if os.path.isfile(file_path): logger.info('Appending Web Report link to ' + file_path) try: with open(file_path, 'ab') as web_report_link_csv: wr = csv.writer(web_report_link_csv, dialect='excel', quoting=csv.QUOTE_ALL) wr.writerow(web_report_data) web_report_link_csv.close() except Exception as ex: log_critical_error( logger, ex, 'Exception while writing' + file_path + ' to file') else: logger.info('Creating ' + file_path) logger.info('Appending web report to ' + file_path) try: with open(file_path, 'wb') as web_report_link_csv: wr = csv.writer(web_report_link_csv, dialect='excel', quoting=csv.QUOTE_ALL) wr.writerow([ 'Template ID', 'Template Name', 'Audit ID', 'Audit Name', 'Web Report Link' ]) wr.writerow(web_report_data) web_report_link_csv.close() except Exception as ex: log_critical_error( logger, ex, 'Exception while writing' + file_path + ' to file')
def save_exported_document(logger, export_dir, export_doc, filename, extension): """ Write exported document to disk at specified location with specified file name. Any existing file with the same name will be overwritten. :param logger: the logger :param export_dir: path to directory for exports :param export_doc: export document to write :param filename: filename to give exported document :param extension: extension to give exported document """ path = os.path.join(export_dir, extension) file_path = os.path.join(export_dir, extension, filename + '.' + extension) create_directory_if_not_exists(logger, path) if os.path.isfile(file_path): logger.info('Overwriting existing report at ' + file_path) try: with open(file_path, 'wb') as export_file: export_file.write(export_doc) except Exception as ex: log_critical_error(logger, ex, 'Exception while writing' + file_path + ' to file')
def load_setting_api_access_token(logger, config_settings): """ Attempt to parse API token from config settings :param logger: the logger :param config_settings: config settings loaded from config file :return: API token if valid, else None """ try: api_token = config_settings['API']['token'] token_is_valid = re.match('^[a-f0-9]{64}$', api_token) if token_is_valid: logger.debug('API token matched expected pattern') return api_token else: logger.error('API token failed to match expected pattern') return None except Exception as ex: log_critical_error(logger, ex, 'Exception parsing API token from config.yaml') return None