def ParseSymbolFile(symFile): try: symbolMap = {} publicCount = 0 funcCount = 0 for lineNum, line in enumerate(symFile.readlines()): if line[0:7] == "PUBLIC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 4: LogDebug("Line " + str(lineNum + 1) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[3:]) publicCount += 1 elif line[0:5] == "FUNC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 5: LogDebug("Line " + str(lineNum + 1) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[4:]) funcCount += 1 except Exception as e: LogError("Error parsing SYM file {}: {}".format(symFile, e)) return None logString = "Found " + str(len(symbolMap.keys())) + " unique entries from " logString += str(publicCount) + " PUBLIC lines, " + str( funcCount) + " FUNC lines" LogDebug(logString) return SymbolInfo(symbolMap)
def Get(self, lib): path = self.MakePath(lib[0], lib[1]) symbolInfo = None try: with bz2.BZ2File(path, 'rb') as f: symbolInfo = pickle.load(f) except (IOError, EOFError, pickle.PickleError) as ex: LogError("Could not load pickled lib {}: {}".format(path, ex)) try: os.remove(path) LogMessage("Removed unreadable pickled lib {}".format(path)) except Exception as ex2: LogError("Could not remove unreadable pickled file {}: {}".format(path, ex2)) except Exception as ex: LogError("Unexpected error loading pickled lib[{}]: {}".format(path, ex)) return symbolInfo
def get_reflow_profiles(self): profilestrings = gReflowProfileStringRE.findall(self.log) for profilestring in profilestrings: try: base64compressed = self._get_concatenated_base64(profilestring[0]) compressed = base64.b64decode(base64compressed) profile = zlib.decompress(compressed) yield profile except: LogError("decoding or uncompressing failed")
def get_build_dir(self, platform): for info in self._get_jobs_on_platform(platform): if "blob" in info and "logurl" in info["blob"]: build_log_url = info["blob"]["logurl"] return build_log_url[0:build_log_url.rfind('/') + 1] LogError( "The try push with revision {rev} does not have a build for platform {platform}." .format(rev=self.rev, platform=platform)) return ""
def get_system_lib_symbols(self): symbolstrings = gSymbolStringRE.findall(self.log) for symbolstring in symbolstrings: try: base64d = self._get_concatenated_base64(symbolstring[0]) compressed = base64.b64decode(base64d) path = tempfile.mktemp(".zip") f = open(path, "w") f.write(compressed) f.close() yield zipfile.ZipFile(path, "r") except: LogError("reading system library symbols failed")
def ReadConfigFile(): configFileData = [] if len(sys.argv) == 1: return True elif len(sys.argv) > 2: LogError("Usage: symbolicationWebService.py [<config file>]") return False elif len(sys.argv) == 2: try: configParser = ConfigParser.ConfigParser() # Make parser case-sensitive configParser.optionxform = str configFile = open(sys.argv[1], "r") configParser.readfp(configFile) configFile.close() except ConfigParser.Error as e: LogError("Unable to parse config file " + sys.argv[1] + ": " + str(e)) except: LogError("Unable to open config file " + sys.argv[1]) return False # Check for section names if set(configParser.sections()) != set( ["General", "SymbolPaths", "SymbolURLs"]): LogError( "Config file should be made up of three sections: 'General', 'SymbolPaths' and 'SymbolURLs'" ) return False generalSectionOptions = configParser.items("General") for (option, value) in generalSectionOptions: if option not in gOptions: LogError("Unknown config option '" + option + "' in the 'General' section of config file") return False elif type(gOptions[option]) == int: try: value = int(value) except ValueError: LogError("Integer value expected for config option '" + option + "'") return False gOptions[option] = value # Get the list of symbol paths from the config file configPaths = configParser.items("SymbolPaths") if configPaths: # Drop defaults if config file entries exist gOptions["symbolPaths"] = [path for name, path in configPaths] # Get the list of symbol URLs from the config file configURLs = configParser.items("SymbolURLs") if configURLs: gOptions["symbolURLs"] = [url for name, url in configURLs] return True
def FetchSymbolsFromFile(self, path): try: symFile = open(path, "r") except Exception as e: LogTrace("Error opening file " + path + ": " + str(e)) return None LogMessage("Parsing SYM file at " + path) try: symbolMap = {} lineNum = 0 publicCount = 0 funcCount = 0 for line in symFile: lineNum += 1 if line[0:7] == "PUBLIC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 4: LogTrace("Line " + str(lineNum) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[3:]) publicCount += 1 elif line[0:5] == "FUNC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 5: LogTrace("Line " + str(lineNum) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[4:]) funcCount += 1 except Exception as e: LogError("Error parsing SYM file " + path) return None logString = "Found " + str(len( symbolMap.keys())) + " unique entries from " logString += str(publicCount) + " PUBLIC lines, " + str( funcCount) + " FUNC lines" LogTrace(logString) return SymbolInfo(symbolMap)
def PrefetchRecentSymbolFiles(self): try: mruSymbols = [] with open(self.sOptions["mruSymbolStateFile"], "rb") as f: mruSymbols = json.load( f)["symbols"][:self.sOptions["maxMRUSymbolsPersist"]] LogMessage("Going to prefetch %d recent symbol files" % len(mruSymbols)) self.sUpdateMRUFile = False for libName, breakpadId in mruSymbols: sym = self.GetLibSymbolMap(libName, breakpadId) if sym is None: LogTrace("Failed to prefetch symbols for (%s,%s)" % (libName, breakpadId)) LogMessage("Finished prefetching recent symbol files") except IOError: LogError("Error reading MRU symbols state file") finally: self.sUpdateMRUFile = True
def _get_jobs_on_platform(self, platform): if not platform in self.treeherder_platformnames: LogError( "Unknown try platform {platform}.".format(platform=platform)) return job_property_index_id = self.treeherder_data[ "job_property_names"].index("id") for result in self.treeherder_data["results"]: for th_platform in result["platforms"]: if th_platform["name"] != self.treeherder_platformnames[ platform]: continue for group in th_platform["groups"]: for job in group["jobs"]: job_id = job[job_property_index_id] job_info = self._get_json( "https://treeherder.mozilla.org/api/project/try/artifact/?job_id=%d&name=Job+Info&type=json" % job_id) for info in job_info: yield info
def log_error(self, *args): LogError(args[0] % tuple(args[1:]))
def PrefetchRecentSymbolFiles(self): global PREFETCHED_LIBS LogMessage("Prefetching recent symbol files") # Schedule next timer callback interval = self.sOptions['prefetchInterval'] * 60 * 60 self.sCallbackTimer = threading.Timer(interval, self.PrefetchRecentSymbolFiles) self.sCallbackTimer.start() thresholdTime = time.time() - \ self.sOptions['prefetchThreshold'] * 60 * 60 symDirsToInspect = {} for pdbName in PREFETCHED_LIBS: symDirsToInspect[pdbName] = [] topLibPath = self.sOptions['symbolPaths'][ 'FIREFOX'] + os.sep + pdbName try: symbolDirs = os.listdir(topLibPath) for symbolDir in symbolDirs: candidatePath = topLibPath + os.sep + symbolDir mtime = os.path.getmtime(candidatePath) if mtime > thresholdTime: symDirsToInspect[pdbName].append( (mtime, candidatePath)) except Exception as e: LogError("Error while pre-fetching: " + str(e)) LogMessage("Found " + str(len(symDirsToInspect[pdbName])) + " new " + pdbName + " recent dirs") # Only prefetch the most recent N entries symDirsToInspect[pdbName].sort(reverse=True) symDirsToInspect[pdbName] = symDirsToInspect[ pdbName][:self.sOptions['prefetchMaxSymbolsPerLib']] # Don't fetch symbols already in cache. # Ideally, mutex would be held from check to insert in self.sCache, # but we don't want to hold the lock during I/O. This won't cause # inconsistencies. self.sCacheLock.acquire() try: for pdbName in symDirsToInspect: for (mtime, symbolDirPath) in symDirsToInspect[pdbName]: pdbId = os.path.basename(symbolDirPath) if pdbName in self.sCache and \ pdbId in self.sCache[pdbName]: symDirsToInspect[pdbName].remove( (mtime, symbolDirPath)) finally: self.sCacheLock.release() # Read all new symbol files in at once fetchedSymbols = {} fetchedCount = 0 for pdbName in symDirsToInspect: # The corresponding symbol file name ends with .sym symFileName = re.sub(r"\.[^\.]+$", ".sym", pdbName) for (mtime, symbolDirPath) in symDirsToInspect[pdbName]: pdbId = os.path.basename(symbolDirPath) symbolFilePath = symbolDirPath + os.sep + symFileName symbolInfo = self.FetchSymbolsFromFile(symbolFilePath) if symbolInfo: # Stop if the prefetched items are bigger than the cache if fetchedCount + symbolInfo.GetEntryCount() > \ self.sOptions["maxCacheEntries"]: break fetchedSymbols[(pdbName, pdbId)] = symbolInfo fetchedCount += symbolInfo.GetEntryCount() else: LogError("Couldn't fetch .sym file symbols for " + symbolFilePath) continue # Insert new symbols into global symbol cache self.sCacheLock.acquire() try: # Make room for the new symbols self.MaybeEvict(fetchedCount) for (pdbName, pdbId) in fetchedSymbols: if pdbName not in self.sCache: self.sCache[pdbName] = {} if pdbId in self.sCache[pdbName]: continue newSymbolFile = fetchedSymbols[(pdbName, pdbId)] self.sCache[pdbName][pdbId] = newSymbolFile self.sCacheCount += newSymbolFile.GetEntryCount() # Move new symbols to front of MRU list to give them a chance self.UpdateMruList(pdbName, pdbId) finally: self.sCacheLock.release() LogMessage("Finished prefetching recent symbol files")
def FetchSymbolsFromFile(self, path): try: symFile = open(path, "r") except Exception as e: LogTrace("Error opening file " + path + ": " + str(e)) return None LogMessage("Parsing SYM file at " + path) try: symbolMap = {} lineNum = 0 publicCount = 0 funcCount = 0 if path.endswith(".sym"): for line in symFile: lineNum += 1 if line[0:7] == "PUBLIC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 4: LogTrace("Line " + str(lineNum) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[3:]) publicCount += 1 elif line[0:5] == "FUNC ": line = line.rstrip() fields = line.split(" ") if len(fields) < 5: LogTrace("Line " + str(lineNum) + " is messed") continue address = int(fields[1], 16) symbolMap[address] = " ".join(fields[4:]) funcCount += 1 elif path.endswith(".nmsym"): addressLength = 0 for line in symFile: lineNum += 1 if line.startswith(" "): continue if addressLength == 0: addressLength = line.find(" ") address = int(line[0:addressLength], 16) # Some lines have the form # "address space letter space symbol", # some have the form "address space symbol". # The letter has a meaning, but we ignore it. if line[addressLength + 2] == " ": symbol = line[addressLength + 3:].rstrip() else: symbol = line[addressLength + 1:].rstrip() symbolMap[address] = symbol publicCount += 1 except Exception as e: LogError("Error parsing SYM file " + path) return None logString = "Found " + \ str(len(symbolMap.keys())) + " unique entries from " logString += str(publicCount) + " PUBLIC lines, " + \ str(funcCount) + " FUNC lines" LogTrace(logString) return SymbolInfo(symbolMap)
def LogError(self, string): LogError(string, self.remoteIp)
def ForwardRequest(self, indexes, stack, modules, symbolicatedStack): LogTrace("Forwarding " + str(len(stack)) + " PCs for symbolication") try: url = self.symFileManager.sOptions["remoteSymbolServer"] rawModules = [] moduleToIndex = {} newIndexToOldIndex = {} for moduleIndex, m in modules: l = [m.libName, m.breakpadId] newModuleIndex = len(rawModules) rawModules.append(l) moduleToIndex[m] = newModuleIndex newIndexToOldIndex[newModuleIndex] = moduleIndex rawStack = [] for entry in stack: moduleIndex = entry[0] offset = entry[1] module = self.combinedMemoryMap[moduleIndex] newIndex = moduleToIndex[module] rawStack.append([newIndex, offset]) requestVersion = 4 while True: requestObj = { "symbolSources": self.symbolSources, "stacks": [rawStack], "memoryMap": rawModules, "forwarded": self.forwardCount + 1, "version": requestVersion } requestJson = json.dumps(requestObj) headers = {"Content-Type": "application/json"} requestHandle = urllib2.Request(url, requestJson, headers) try: response = urllib2.urlopen(requestHandle) except Exception as e: if requestVersion == 4: # Try again with version 3 requestVersion = 3 continue raise e succeededVersion = requestVersion break except Exception as e: LogError("Exception while forwarding request: " + str(e)) return try: responseJson = json.loads(response.read()) except Exception as e: LogError( "Exception while reading server response to forwarded request: " + str(e)) return try: if succeededVersion == 4: responseKnownModules = responseJson['knownModules'] for newIndex, known in enumerate(responseKnownModules): if known and newIndex in newIndexToOldIndex: self.knownModules[newIndexToOldIndex[newIndex]] = True responseSymbols = responseJson['symbolicatedStacks'][0] else: responseSymbols = responseJson[0] if len(responseSymbols) != len(stack): LogError( str(len(responseSymbols)) + " symbols in response, " + str(len(stack)) + " PCs in request!") return for index in range(0, len(stack)): symbol = responseSymbols[index] originalIndex = indexes[index] symbolicatedStack[originalIndex] = symbol except Exception as e: LogError( "Exception while parsing server response to forwarded request: " + str(e)) return