def Main():
    global gSymFileManager, gOptions

    if not ReadConfigFile():
        return 1

    SetTracingEnabled(gOptions["enableTracing"] > 0)

    # Create the .SYM cache manager singleton
    gSymFileManager = SymFileManager(gOptions)

    # Prefetch recent symbols
    gSymFileManager.PrefetchRecentSymbolFiles()

    LogMessage("Starting server with the following options:\n" + str(gOptions))

    # Start the Web service
    httpd = ThreadedHTTPServer((gOptions['hostname'], gOptions['portNumber']),
                               RequestHandler)
    LogMessage("Server started - " + gOptions['hostname'] + ":" +
               str(gOptions['portNumber']))

    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        LogMessage("Received SIGINT, stopping...")

    httpd.server_close()
    LogMessage("Server stopped - " + gOptions['hostname'] + ":" +
               str(gOptions['portNumber']))
    return 0
Ejemplo n.º 2
0
  def LoadCacheEntries(self, MRU, diskCache):
    LogMessage("Initializing memory cache from disk cache")
    for lib in MRU[:self.MAX_SIZE]:
      LogDebug("Loading library " + str(lib))
      cachedLib = diskCache.Get(lib)
      if cachedLib:
	self.sCache[lib] = diskCache.Get(lib)
      else:
        # this is safe, iterating over a "copy" of the MRU because of slice operator
        MRU.remove(lib)
    LogMessage("Finished initializing memory cache from disk cache")
Ejemplo n.º 3
0
 def integrate_symbol_zip_from_url(self, symbol_zip_url):
     if self.have_integrated(symbol_zip_url):
         return
     LogMessage("Retrieving symbol zip from {symbol_zip_url}...".format(
         symbol_zip_url=symbol_zip_url))
     try:
         io = urllib2.urlopen(symbol_zip_url, None, 30)
         with zipfile.ZipFile(cStringIO.StringIO(io.read())) as zf:
             self.integrate_symbol_zip(zf)
         self._create_file_if_not_exists(self._marker_file(symbol_zip_url))
     except IOError:
         LogMessage("Symbol zip request failed.")
Ejemplo n.º 4
0
def load_json_debug(s):
    try:
        return json.loads(s)
    except Exception as e:
        LogMessage("Error loading JSON from string %s...%s" %
                   (s[0:100], s[-100:]))
        raise e
 def FetchSymbolsFromFile(self, path):
     try:
         with open(path, "r") as symFile:
             LogMessage("Parsing SYM file at " + path)
             return ParseSymbolFile(symFile)
     except Exception as e:
         LogDebug("Error opening file " + path + ": " + str(e))
         return None
    def __init__(self, options):
        self.sOptions = options

        self.fetchPipeline = (PathFetcher(options), URLFetcher(options))
        self.memoryCache = MemoryCache(options)
        self.diskCache = DiskCache(options)
        assert self.memoryCache.MAX_SIZE <= self.diskCache.MAX_SIZE

        self.MRU = self.diskCache.GetCacheEntries()

        if len(self.MRU) > self.diskCache.MAX_SIZE:
            evicted = self.MRU[self.diskCache.MAX_SIZE:]
            self.MRU = self.MRU[:self.diskCache.MAX_SIZE]
            self.diskCache.Evict(evicted)

        self.memoryCache.LoadCacheEntries(self.MRU, self.diskCache)

        LogMessage("MRU loaded with {} entries".format(len(self.MRU)))
        LogMessage("Server is ready")
 def PrefetchRecentSymbolFiles(self):
     try:
         mruSymbols = []
         with open(self.sOptions["mruSymbolStateFile"], "rb") as f:
             mruSymbols = json.load(
                 f)["symbols"][:self.sOptions["maxMRUSymbolsPersist"]]
         LogMessage("Going to prefetch %d recent symbol files" %
                    len(mruSymbols))
         self.sUpdateMRUFile = False
         for libName, breakpadId in mruSymbols:
             sym = self.GetLibSymbolMap(libName, breakpadId)
             if sym is None:
                 LogTrace("Failed to prefetch symbols for (%s,%s)" %
                          (libName, breakpadId))
         LogMessage("Finished prefetching recent symbol files")
     except IOError:
         LogError("Error reading MRU symbols state file")
     finally:
         self.sUpdateMRUFile = True
 def FetchSymbolsFromURL(self, url):
     try:
         with contextlib.closing(urllib2.urlopen(url)) as request:
             if request.getcode() != 200:
                 return None
             LogMessage("Parsing SYM file at " + url)
             return self.FetchSymbolsFromFileObj(request)
     except Exception as e:
         LogTrace("Error opening URL " + url + ": " + str(e))
         return None
Ejemplo n.º 9
0
 def get_talos_profiles(self, zip_url):
     LogMessage(
         "Retrieving profile zip from {zip_url}...".format(zip_url=zip_url))
     io = urllib2.urlopen(zip_url, None, 30)
     sio = cStringIO.StringIO(io.read())
     zf = zipfile.ZipFile(sio)
     io.close()
     for filename in zf.namelist():
         profilename_subtestname = os.path.dirname(filename)
         subtestname = os.path.basename(profilename_subtestname)
         yield (subtestname, FileInZip(zf, filename))
 def integrate_symbol_zip_from_url(self, symbol_zip_url):
   if self.have_integrated(symbol_zip_url):
     return
   LogMessage("Retrieving symbol zip from {symbol_zip_url}...".format(symbol_zip_url=symbol_zip_url))
   io = urllib2.urlopen(symbol_zip_url, None, 30)
   sio = cStringIO.StringIO(io.read())
   zf = zipfile.ZipFile(sio)
   io.close()
   self.integrate_symbol_zip(zf)
   zf.close()
   self._create_file_if_not_exists(self._marker_file(symbol_zip_url))
Ejemplo n.º 11
0
    def FetchSymbolsFromFile(self, path):
        try:
            symFile = open(path, "r")
        except Exception as e:
            LogTrace("Error opening file " + path + ": " + str(e))
            return None

        LogMessage("Parsing SYM file at " + path)

        try:
            symbolMap = {}
            lineNum = 0
            publicCount = 0
            funcCount = 0
            for line in symFile:
                lineNum += 1
                if line[0:7] == "PUBLIC ":
                    line = line.rstrip()
                    fields = line.split(" ")
                    if len(fields) < 4:
                        LogTrace("Line " + str(lineNum) + " is messed")
                        continue
                    address = int(fields[1], 16)
                    symbolMap[address] = " ".join(fields[3:])
                    publicCount += 1
                elif line[0:5] == "FUNC ":
                    line = line.rstrip()
                    fields = line.split(" ")
                    if len(fields) < 5:
                        LogTrace("Line " + str(lineNum) + " is messed")
                        continue
                    address = int(fields[1], 16)
                    symbolMap[address] = " ".join(fields[4:])
                    funcCount += 1
        except Exception as e:
            LogError("Error parsing SYM file " + path)
            return None

        logString = "Found " + str(len(
            symbolMap.keys())) + " unique entries from "
        logString += str(publicCount) + " PUBLIC lines, " + str(
            funcCount) + " FUNC lines"
        LogTrace(logString)

        return SymbolInfo(symbolMap)
Ejemplo n.º 12
0
  def Get(self, lib):
    path = self.MakePath(lib[0], lib[1])
    symbolInfo = None

    try:
      with bz2.BZ2File(path, 'rb') as f:
        symbolInfo = pickle.load(f)
    except (IOError, EOFError, pickle.PickleError) as ex:
      LogError("Could not load pickled lib {}: {}".format(path, ex))
      try:
        os.remove(path)
        LogMessage("Removed unreadable pickled lib {}".format(path))
      except Exception as ex2:
        LogError("Could not remove unreadable pickled file {}: {}".format(path, ex2))
    except Exception as ex:
      LogError("Unexpected error loading pickled lib[{}]: {}".format(path, ex))

    return symbolInfo
    def FetchSymbolsFromURL(self, url):
        try:
            with contextlib.closing(urllib2.urlopen(url)) as request:
                if request.getcode() != 200:
                    return None
                headers = request.info()
                contentEncoding = headers.get("Content-Encoding", "").lower()
                if contentEncoding in ("gzip", "x-gzip", "deflate"):
                    data = request.read()
                    # We have to put it in a string IO because gzip looks for
                    # the "tell()" file object method
                    request = StringIO(data)
                    try:
                        with gzip.GzipFile(fileobj=request) as f:
                            request = StringIO(f.read())
                    except Exception:
                        request = StringIO(data.decode('zlib'))

                LogMessage("Parsing SYM file at " + url)
                return ParseSymbolFile(request)
        except Exception as e:
            LogDebug("Error opening URL " + url + ": " + str(e))
            return None
Ejemplo n.º 14
0
 def LogMessage(self, string):
     LogMessage(string, self.remoteIp)
Ejemplo n.º 15
0
    def FetchSymbolsFromFile(self, path):
        try:
            symFile = open(path, "r")
        except Exception as e:
            LogTrace("Error opening file " + path + ": " + str(e))
            return None

        LogMessage("Parsing SYM file at " + path)

        try:
            symbolMap = {}
            lineNum = 0
            publicCount = 0
            funcCount = 0
            if path.endswith(".sym"):
                for line in symFile:
                    lineNum += 1
                    if line[0:7] == "PUBLIC ":
                        line = line.rstrip()
                        fields = line.split(" ")
                        if len(fields) < 4:
                            LogTrace("Line " + str(lineNum) + " is messed")
                            continue
                        address = int(fields[1], 16)
                        symbolMap[address] = " ".join(fields[3:])
                        publicCount += 1
                    elif line[0:5] == "FUNC ":
                        line = line.rstrip()
                        fields = line.split(" ")
                        if len(fields) < 5:
                            LogTrace("Line " + str(lineNum) + " is messed")
                            continue
                        address = int(fields[1], 16)
                        symbolMap[address] = " ".join(fields[4:])
                        funcCount += 1
            elif path.endswith(".nmsym"):
                addressLength = 0
                for line in symFile:
                    lineNum += 1
                    if line.startswith(" "):
                        continue
                    if addressLength == 0:
                        addressLength = line.find(" ")
                    address = int(line[0:addressLength], 16)
                    # Some lines have the form
                    # "address space letter space symbol",
                    # some have the form "address space symbol".
                    # The letter has a meaning, but we ignore it.
                    if line[addressLength + 2] == " ":
                        symbol = line[addressLength + 3:].rstrip()
                    else:
                        symbol = line[addressLength + 1:].rstrip()
                    symbolMap[address] = symbol
                    publicCount += 1
        except Exception as e:
            LogError("Error parsing SYM file " + path)
            return None

        logString = "Found " + \
            str(len(symbolMap.keys())) + " unique entries from "
        logString += str(publicCount) + " PUBLIC lines, " + \
            str(funcCount) + " FUNC lines"
        LogTrace(logString)

        return SymbolInfo(symbolMap)
Ejemplo n.º 16
0
        nargs="*",
        default=[],
        help="files containing the profiles from before the regression")
    parser.add_argument(
        "-a",
        "--after",
        nargs="+",
        help="files containing the profiles from after the regression")
    parser.add_argument("-o",
                        "--out",
                        default="comparison-profile.txt",
                        help="result filename")

    args = parser.parse_args()

    LogMessage('Reading "before" profiles...')
    profilestrings_before = get_profiles_in_files(args.before)
    profiles_before = [json.loads(s) for s in profilestrings_before]
    LogMessage('Reading "after" profiles...')
    profilestrings_after = get_profiles_in_files(args.after)
    profiles_after = [json.loads(s) for s in profilestrings_after]
    LogMessage('Changing sample weights on "before" profiles to -1...')
    for profile in profiles_before:
        weight_profile(profile, -1)
    LogMessage('Merging profiles...')
    profile = sps.merge_profiles(profiles_before + profiles_after)
    fixup_sample_data(profile)
    LogMessage('Compressing result profile...')
    sps.compress_profile(profile)
    sps.save_profile(profile, args.out)
    LogMessage('Created {out}.'.format(out=args.out))
Ejemplo n.º 17
0
    def PrefetchRecentSymbolFiles(self):
        global PREFETCHED_LIBS

        LogMessage("Prefetching recent symbol files")
        # Schedule next timer callback
        interval = self.sOptions['prefetchInterval'] * 60 * 60
        self.sCallbackTimer = threading.Timer(interval,
                                              self.PrefetchRecentSymbolFiles)
        self.sCallbackTimer.start()

        thresholdTime = time.time() - \
            self.sOptions['prefetchThreshold'] * 60 * 60
        symDirsToInspect = {}
        for pdbName in PREFETCHED_LIBS:
            symDirsToInspect[pdbName] = []
            topLibPath = self.sOptions['symbolPaths'][
                'FIREFOX'] + os.sep + pdbName

            try:
                symbolDirs = os.listdir(topLibPath)
                for symbolDir in symbolDirs:
                    candidatePath = topLibPath + os.sep + symbolDir
                    mtime = os.path.getmtime(candidatePath)
                    if mtime > thresholdTime:
                        symDirsToInspect[pdbName].append(
                            (mtime, candidatePath))
            except Exception as e:
                LogError("Error while pre-fetching: " + str(e))

            LogMessage("Found " + str(len(symDirsToInspect[pdbName])) +
                       " new " + pdbName + " recent dirs")

            # Only prefetch the most recent N entries
            symDirsToInspect[pdbName].sort(reverse=True)
            symDirsToInspect[pdbName] = symDirsToInspect[
                pdbName][:self.sOptions['prefetchMaxSymbolsPerLib']]

        # Don't fetch symbols already in cache.
        # Ideally, mutex would be held from check to insert in self.sCache,
        # but we don't want to hold the lock during I/O. This won't cause
        # inconsistencies.
        self.sCacheLock.acquire()
        try:
            for pdbName in symDirsToInspect:
                for (mtime, symbolDirPath) in symDirsToInspect[pdbName]:
                    pdbId = os.path.basename(symbolDirPath)
                    if pdbName in self.sCache and \
                            pdbId in self.sCache[pdbName]:
                        symDirsToInspect[pdbName].remove(
                            (mtime, symbolDirPath))
        finally:
            self.sCacheLock.release()

        # Read all new symbol files in at once
        fetchedSymbols = {}
        fetchedCount = 0
        for pdbName in symDirsToInspect:
            # The corresponding symbol file name ends with .sym
            symFileName = re.sub(r"\.[^\.]+$", ".sym", pdbName)

            for (mtime, symbolDirPath) in symDirsToInspect[pdbName]:
                pdbId = os.path.basename(symbolDirPath)
                symbolFilePath = symbolDirPath + os.sep + symFileName
                symbolInfo = self.FetchSymbolsFromFile(symbolFilePath)
                if symbolInfo:
                    # Stop if the prefetched items are bigger than the cache
                    if fetchedCount + symbolInfo.GetEntryCount() > \
                            self.sOptions["maxCacheEntries"]:
                        break
                    fetchedSymbols[(pdbName, pdbId)] = symbolInfo
                    fetchedCount += symbolInfo.GetEntryCount()
                else:
                    LogError("Couldn't fetch .sym file symbols for " +
                             symbolFilePath)
                    continue

        # Insert new symbols into global symbol cache
        self.sCacheLock.acquire()
        try:
            # Make room for the new symbols
            self.MaybeEvict(fetchedCount)

            for (pdbName, pdbId) in fetchedSymbols:
                if pdbName not in self.sCache:
                    self.sCache[pdbName] = {}

                if pdbId in self.sCache[pdbName]:
                    continue

                newSymbolFile = fetchedSymbols[(pdbName, pdbId)]
                self.sCache[pdbName][pdbId] = newSymbolFile
                self.sCacheCount += newSymbolFile.GetEntryCount()

                # Move new symbols to front of MRU list to give them a chance
                self.UpdateMruList(pdbName, pdbId)

        finally:
            self.sCacheLock.release()

        LogMessage("Finished prefetching recent symbol files")
Ejemplo n.º 18
0
args = parser.parse_args()

symbolicator = symbolication.ProfileSymbolicator(gSymbolicationOptions)


def load_json_debug(s):
    try:
        return json.loads(s)
    except Exception as e:
        LogMessage("Error loading JSON from string %s...%s" %
                   (s[0:100], s[-100:]))
        raise e


for rev in args.rev:
    LogMessage("Listing Talos runs for try revision {rev}...".format(rev=rev))
    push = tryserver.TryserverPush(rev)
    LogMessage("Looking for build directory...")
    dir = push.get_build_dir(args.platform)
    LogMessage("Downloading profiles...")
    profiles = {}
    for talos_zip_url in push.find_talos_zips(args.platform, args.test):
        for subtest, profile in push.get_talos_profiles(talos_zip_url):
            profiles.setdefault(subtest, []).append(profile)
    for subtest in profiles:
        num_exceed_max = max(0, len(profiles[subtest]) - args.max)
        if num_exceed_max > 0:
            LogMessage(
                "Discarding {num_exceed_max} profiles for subtest {subtest} due to --max restriction to {max} profiles."
                .format(num_exceed_max=num_exceed_max,
                        subtest=subtest,