def enable_file_logging(self, toolname, num_logs=50, log_dir=None): """ Find a tmp location for file logging """ from c_misc import backup_file import c_path if log_dir is None: log_dir = '' for tmp_path in [os.getenv('TEMP'), os.getenv('TMP')]: if c_path.validate_dir(tmp_path): log_dir = c_path.join(tmp_path, '{0}_logs'.format(toolname)) break else: self.warning( 'CANNOT FIND A LOCAL TEMP DIRECTORY TO CREATE LOGS') # Set Log Folder flids = [] if log_dir: log_dir = c_path.normalize(log_dir) log_file = c_path.join(log_dir, '{0}_log.txt'.format(toolname)) log_file_debug = c_path.join(log_dir, '{0}_log_debug.txt'.format(toolname)) # Create the logs directory retval, reterr = True, '' try: c_path.create_dir(log_dir) except Exception as e: retval, reterr = False, str(e) if not retval: self.warning( 'CANNOT CREATE DIRECTORY FOR LOGGING FILE: {0}\nPath: {1}'. format(reterr, log_dir)) elif num_logs > 1 and c_path.validate_file( log_file) and not backup_file(log_file, num_logs): self.warning( 'FAILED TO BACKUP LOGGING FILE: \nPath: {0}'.format( log_file)) elif num_logs > 1 and c_path.validate_file( log_file_debug) and not backup_file( log_file_debug, num_logs): self.warning( 'FAILED TO BACKUP DEBUG LOGGING FILE: \nPath: {0}'.format( log_file_debug)) else: flids.append(self.add_file_logger(log_file, self.INFO, 'w')) self.info('Logging to {0}'.format(log_file)) # Only create debug file if debug is enabled if self.verbosity < self.INFO: flids.append( self.add_file_logger(log_file_debug, self.verbosity, 'w')) self.debug('Debug logging to {0}'.format(log_file_debug)) return flids
def __init__(self, modulePath): """ Initializes internal variables. Parameters: 1. modulePath (str): path to the module to be loaded. Attributes: 1. path (str): path to the module loaded. 2. name (str): name of the module loaded. 3. fd (fd): File Descriptor of the module loaded 4. handle (obj): Handle to the object loaded """ if not c_path.validate_file(modulePath): raise AttributeError('Module does not exist') moduleName, extention = os.path.splitext(os.path.basename(modulePath)) moduleFd = open(modulePath, 'r') try: moduleHandle = imp.load_source(moduleName, modulePath, moduleFd) except Exception: logger.debug(traceback.format_exc()) moduleFd.close() raise self.path = modulePath self.name = moduleName self.fd = moduleFd self.handle = moduleHandle
def __init__(self, config_module, config_path): assert isinstance(config_path, str) self.root = None self.config_path = c_path.normalize(config_path) logger.debug2('Checking config file existence at ' + self.config_path) if not c_path.validate_file(self.config_path): raise RuntimeError('Cannot read config file at ' + self.config_path) logger.debug2( 'Using generateDs APIs to parse config and validate syntax') try: doc = config_module.parsexml_(self.config_path) rootNode = doc.getroot() rootClass = config_module.get_root_tag(rootNode)[1] self.root = rootClass.factory() self.root.build(rootNode) except Exception as e: raise type(e), type(e)('Error parsing config file: ' + str(e) + '\n' ' ' + 'config_path: ' + self.config_path), sys.exc_info()[2] doc = None logger.debug2('Parsed config using generateDs')
def backup_file(filePath, maxBackups=10): """ Create backup for the file. File.txt -> File_1.txt Parameters: 1. filePath: File to be backed up 2. maxBackups: Maximum number of backups to create in the location Return: 1. returnValue: True if file back up is successful """ returnValue = False filename, extention = os.path.splitext(filePath) if c_path.validate_file(filePath) and c_path.validate_file_write(filePath): for index in reversed(range(0, maxBackups)): backup_file_path = filename + '_{0}'.format(index + 1) + extention origFile = filename + '_{0}'.format(index) + extention if c_path.validate_file(backup_file_path): try: os.remove(backup_file_path) except Exception: logger.debug(traceback.format_exc()) raise CoreError( CoreErrorCode.GENERIC_FAILURE, 'Removing file: {0}'.format(sys.exc_info()[1])) if c_path.validate_file(origFile): try: os.rename(origFile, backup_file_path) except Exception: logger.debug(traceback.format_exc()) raise CoreError( CoreErrorCode.GENERIC_FAILURE, 'Renaming file: {0}'.format(sys.exc_info()[1])) backup_file_path = filename + '_{0}'.format(1) + extention f_retValue, f_retErr = c_path.copyFile(filePath, backup_file_path, force=True) if not f_retValue: raise CoreError(CoreErrorCode.GENERIC_FAILURE, 'Backing up: {0}'.format(f_retErr)) else: returnValue = True return returnValue
def backup_file(filePath, maxBackups=10): """ Create backup for the file. File.txt -> File_1.txt Parameters: 1. filePath: File to be backed up 2. maxBackups: Maximum number of backups to create in the location Return: 1. returnValue: True if file back up is successful """ returnValue = False filename, extention = os.path.splitext(filePath) if c_path.validate_file(filePath) and c_path.validate_file_write(filePath): for index in reversed(range(0, maxBackups)): backup_file_path = filename + '_{0}'.format(index + 1) + extention origFile = filename + '_{0}'.format(index) + extention if c_path.validate_file(backup_file_path): try: os.remove(backup_file_path) except Exception: logger.debug(traceback.format_exc()) raise CoreError(CoreErrorCode.GENERIC_FAILURE, 'Removing file: {0}'.format(sys.exc_info()[1])) if c_path.validate_file(origFile): try: os.rename(origFile, backup_file_path) except Exception: logger.debug(traceback.format_exc()) raise CoreError(CoreErrorCode.GENERIC_FAILURE, 'Renaming file: {0}'.format(sys.exc_info()[1])) backup_file_path = filename + '_{0}'.format(1) + extention f_retValue, f_retErr = c_path.copyFile(filePath, backup_file_path, force=True) if not f_retValue: raise CoreError(CoreErrorCode.GENERIC_FAILURE, 'Backing up: {0}'.format(f_retErr)) else: returnValue = True return returnValue
def extractZip(directory, zipfilename): """extract all files from zip package to directory""" if c_path.validate_dir_write(directory) and c_path.validate_file(zipfilename): zip_handler = zipfile.ZipFile(zipfilename, "r") try: zip_handler.extractall(directory) finally: zip_handler.close() else: raise RuntimeError('cannot access directory or zip file')
def enable_file_logging(self, toolname, num_logs=50, log_dir=None): """ Find a tmp location for file logging """ from c_misc import backup_file import c_path if log_dir is None: log_dir = '' for tmp_path in [os.getenv('TEMP'), os.getenv('TMP')]: if c_path.validate_dir(tmp_path): log_dir = c_path.join(tmp_path, '{0}_logs'.format(toolname)) break else: self.warning('CANNOT FIND A LOCAL TEMP DIRECTORY TO CREATE LOGS') # Set Log Folder if log_dir: log_file = c_path.join(log_dir, '{0}_log.txt'.format(toolname)) log_file_debug = c_path.join(log_dir, '{0}_log_debug.txt'.format(toolname)) # Create the logs directory retval, reterr = True, '' try: c_path.create_dir(log_dir); except Exception as e: retval, reterr = False, str(e) if not retval: self.warning('CANNOT CREATE DIRECTORY FOR LOGGING FILE: {0}\n Path: {1}'.format(reterr, log_dir)) elif num_logs > 1 and c_path.validate_file(log_file) and not backup_file(log_file, num_logs): self.warning('FAILED TO BACKUP LOGGING FILE: \n Path: {0}'.format(log_file)) elif num_logs > 1 and c_path.validate_file(log_file_debug) and not backup_file(log_file_debug, num_logs): self.warning('FAILED TO BACKUP DEBUG LOGGING FILE: \n Path: {0}'.format(log_file_debug)) else: self.add_file_logger(log_file, self.INFO, 'w') self.info('Logging to {0}'.format(log_file)) # Only create debug file if debug is enabled if self.verbosity < self.INFO: self.add_file_logger(log_file_debug, self.verbosity, 'w') self.debug('Debug logging to {0}'.format(log_file_debug)) return log_file, log_file_debug return '', ''
def __init__(self, config_path): assert isinstance(config_path, str) self.root = None self.config_path = c_path.normalize(config_path) logger.debug2('Checking config file existence at ' + self.config_path) if not c_path.validate_file(self.config_path): raise RuntimeError('Cannot read config file at ' + self.config_path) logger.debug2('Using JSON module to parse the config file.') try: with open(config_path, 'r') as fp: self.root = self.SelfGeneratingClass('Root', json.load(fp, object_hook=_json_str_hook)) except Exception as e: raise type(e), type(e)('Error parsing config file: ' + str(e) + '\n' ' ' + 'config_path: ' + self.config_path), sys.exc_info()[2]