def ConfigureLogging(self): ''' Set up the logging. This function only needs to be overridden if new behavior is needed''' logger = logging.getLogger('') logger.setLevel(self.GetLoggingLevel("base")) # Adjust console mode depending on mode. edk2_logging.setup_section_level() edk2_logging.setup_console_logging(self.GetLoggingLevel("con")) log_directory = os.path.join(self.GetWorkspaceRoot(), self.GetLoggingFolderRelativeToRoot()) txtlogfile = self.GetLoggingLevel("txt") if (txtlogfile is not None): logfile, filelogger = edk2_logging.setup_txt_logger( log_directory, self.GetLoggingFileName("txt"), txtlogfile) self.log_filename = logfile md_log_file = self.GetLoggingLevel("md") if (md_log_file is not None): md_file, md_logger = edk2_logging.setup_markdown_logger( log_directory, self.GetLoggingFileName("md"), md_log_file) logging.info( "Log Started: " + datetime.strftime(datetime.now(), "%A, %B %d, %Y %I:%M%p")) return
def test_can_create_console_logger(self): console_logger = edk2_logging.setup_console_logging(False, False) self.assertIsNot(console_logger, None, "We created a console logger") edk2_logging.stop_logging(console_logger)
def main(): # setup main console as logger logger = logging.getLogger('') logger.setLevel(logging.NOTSET) console = edk2_logging.setup_console_logging(False) logger.addHandler(console) ErrorCode = 0 auto_fetch = False input_config_remotes = None # arg parse args = get_cli_options() if args.debug: console.setLevel(logging.DEBUG) logging.info("Log Started: " + datetime.datetime.strftime( datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p")) args.cache_dir = CommonFilePathHandler(args.cache_dir) logging.debug("OMNICACHE dir: {0}".format(args.cache_dir)) # input config file for adding new entries if args.input_config_file is not None: args.input_config_file = CommonFilePathHandler(args.input_config_file) if not os.path.isfile(args.input_config_file): logging.critical( "Invalid -c argument given. File ({0}) isn't valid".format( args.input_config_file)) return -4 logging.debug("Args: " + str(args)) omnicache_config = None # config object omnicache_config_file = os.path.join(args.cache_dir, OMNICACHE_FILENAME) if args.new: if os.path.isdir(args.cache_dir): logging.critical( "--new argument given but OMNICACHE path already exists!") return -1 InitOmnicache(args.cache_dir) auto_fetch = True if args.init: if os.path.isdir(args.cache_dir): if os.path.isfile(omnicache_config_file): logging.debug( "OMNICACHE already exists. No need to initialize") else: InitOmnicache(args.cache_dir) auto_fetch = True # Check to see if exists if not os.path.isdir(args.cache_dir): logging.critical("OMNICACHE path invalid.") return -2 # load config omnicache_config = OmniCacheConfig(omnicache_config_file) os.chdir(args.cache_dir) if (len(args.add) > 0): auto_fetch = True for inputdata in args.add: if len(inputdata) == 2: AddEntry(omnicache_config, inputdata[0], inputdata[1]) elif len(inputdata) == 3: AddEntry(omnicache_config, inputdata[0], inputdata[1], bool(inputdata[2])) else: logging.critical( "Invalid Add Entry. Should be <name> <url> <Sync Tags optional default=False>" ) return -3 if (args.input_config_file is not None): (count, input_config_remotes) = AddEntriesFromConfig(omnicache_config, args.input_config_file) if (count > 0): auto_fetch = True if len(args.remove) > 0: for inputdata in args.remove: RemoveEntry(omnicache_config, inputdata) # if we need to scan if args.scan is not None: logging.critical("OMNICACHE is scanning the folder %s.") if not os.path.isdir(args.scan): logging.error("Invalid scan directory") return -4 reposFound = dict() # iterate through top level directories dirs = os.listdir(args.scan) while len(dirs) > 0: item = dirs.pop() itemDir = os.path.join(args.scan, item) if os.path.isfile(itemDir): continue logging.info("Scanning %s for a git repo" % item) gitDir = os.path.join(itemDir, ".git") # Check if it's a directory or a file (submodules usually have a file instead of a folder) if os.path.isdir(gitDir) or os.path.isfile(gitDir): repo = Repo(itemDir) if repo.url: if repo.url not in reposFound: reposFound[repo.url] = item else: logging.warning( "Skipping previously found repo at %s with url %s" % (item, repo.url)) else: # if repo.url is none logging.error("Url not found for git repo at: %s" % itemDir) # check for submodules if repo.submodules: for submodule in repo.submodules: dirs.append(os.path.join(item, submodule)) else: logging.error("Git repo not found at %s" % itemDir) # go through all the URLs found for url in reposFound: omnicache_config.Add(reposFound[url], url) omnicache_config.Save() if (args.fetch or (auto_fetch and not args.no_fetch)): logging.critical("Updating OMNICACHE") # as an optimization, if input config file provided, only fetch remotes specified in input config # otherwise, fetch all remotes in the OmniCache if (input_config_remotes is not None): remotes = (x["name"] for x in input_config_remotes) else: remotes = omnicache_config.remotes.keys() for remote in remotes: ret = FetchEntry(omnicache_config.remotes[remote]["name"], ("tag" in omnicache_config.remotes[remote])) if (ret != 0) and (ErrorCode == 0): ErrorCode = ret if args.list: ret = ConsistencyCheckCacheConfig(omnicache_config) if (ret != 0) and (ErrorCode == 0): ErrorCode = ret print("List OMNICACHE content\n") if len(omnicache_config.remotes) == 0: logging.warning("No Remotes to show") for remote in omnicache_config.remotes.values(): rstring = "Name: {0}\n Url: {1}\n Sync Tags: {2}".format( remote["name"], remote["url"], ("tag" in remote)) print(" " + rstring + "\n\n") print("To use your OMNICACHE set the env variable:") print("set OMNICACHE_PATH=" + args.cache_dir) return ErrorCode