Exemple #1
0
 def testLookupLimitedStringOrNone(self):
   dict = {"key":"value"}
   assert("value" == util.lookupLimitedStringOrNone(dict,"key",8))
   assert("val" == util.lookupLimitedStringOrNone(dict,"key",3))
   assert(None == util.lookupLimitedStringOrNone(dict,"not",33))
Exemple #2
0
  def insertCrashProcess (self, threadLocalCursor, reportId, jsonDocument,
                          date_processed, processorErrorMessages):
    """ Electrolysis Support - Optional - jsonDocument may contain a ProcessType
    of plugin. In the future this value would be default, content, maybe even
    Jetpack... This indicates which process was the crashing process.
        plugin - When set to plugin, the jsonDocument MUST calso contain
                 PluginFilename, PluginName, and PluginVersion
    """
    crashProcesOutputDict = sutil.DotDict()
    processType = sutil.lookupLimitedStringOrNone(jsonDocument, 'ProcessType',
                                                  10)
    if not processType:
      return crashProcesOutputDict
    crashProcesOutputDict.processType = processType

    #logger.debug('processType %s', processType)
    if "plugin" == processType:
      # Bug#543776 We actually will are relaxing the non-null policy... a null
      # filename, name, and version is OK. We'll use empty strings
      pluginFilename = sutil.lookupStringOrEmptyString(jsonDocument,
                                                       'PluginFilename')
      pluginName     = sutil.lookupStringOrEmptyString(jsonDocument,
                                                       'PluginName')
      pluginVersion  = sutil.lookupStringOrEmptyString(jsonDocument,
                                                       'PluginVersion')
      crashProcesOutputDict.pluginFilename = pluginFilename
      crashProcesOutputDict.pluginName = pluginName
      crashProcesOutputDict.pluginVersion = pluginVersion

      try:
        pluginId = self.sdb.singleRowSql(threadLocalCursor,
                                         'select id from plugins '
                                         'where filename = %s '
                                         'and name = %s',
                                         (pluginFilename, pluginName))
        #logger.debug('%s/%s already exists in the database',
                      #pluginFilename, pluginName)
      except sdb.SQLDidNotReturnSingleRow, x:
        self.pluginsTable.insert(threadLocalCursor,
                                 (pluginFilename, pluginName))
        pluginId = self.sdb.singleRowSql(threadLocalCursor,
                                         'select id from plugins '
                                         'where filename = %s '
                                         'and name = %s',
                                         (pluginFilename, pluginName))
        #logger.debug('%s/%s inserted into the database',
                      #pluginFilename, pluginName)

      try:
        self.pluginsReportsTable.insert(threadLocalCursor,
                                        (reportId,
                                         pluginId,
                                         date_processed,
                                         pluginVersion),
                                        self.databaseConnectionPool.connectionCursorPair,
                                        date_processed=date_processed)
      except sdb.db_module.IntegrityError, x:
        logger.error("psycopg2.IntegrityError %s", str(x))
        logger.error("Unable to save record for plugin report. pluginId: %s"
                     "reportId: %s version: %s", pluginId, reportId,
                     pluginVersion)
        processorErrorMessages.append("Detected out of process plugin crash, "
                                      "but unable to record %s %s %s" %
                                      (pluginFilename, pluginName,
                                       pluginVersion))
Exemple #3
0
 def testLookupLimitedStringOrNone(self):
     dict = {"key": "value"}
     assert ("value" == util.lookupLimitedStringOrNone(dict, "key", 8))
     assert ("val" == util.lookupLimitedStringOrNone(dict, "key", 3))
     assert (None == util.lookupLimitedStringOrNone(dict, "not", 33))
Exemple #4
0
  def insertReportIntoDatabase(self, threadLocalCursor, uuid, jsonDocument, date_processed, processorErrorMessages):
    """
    This function is run only by a worker thread.
      Create the record for the current job in the 'reports' table
      input parameters:
        threadLocalCursor: a database cursor for exclusive use by the calling thread
        uuid: the unique id identifying the job - corresponds with the uuid column in the 'jobs' and the 'reports' tables
        jsonDocument: an object with a dictionary interface for fetching the components of the json document
        date_processed: when job came in (a key used in partitioning)
        processorErrorMessages: list of strings of error messages
      jsonDocument MUST contain                                      : stored in table reports
        ProductName: Any string with length <= 30                    : in column productdims_id
        Version: Any string with length <= 16                        : in column productdims_id
      jsonDocument SHOULD contain:
        BuildID: 10-character date, as: datetime.strftime('%Y%m%d%H'): build_date (calculated from BuildID) (may also have minutes, seconds)
        CrashTime(preferred), or
        timestamp (deprecated): decimal unix timestamp               : in column client_crash_date
        StartupTime: decimal unix timestamp of 10 or fewer digits    : in column uptime = client_crash_date - startupTime
        InstallTime: decimal unix timestamp of 10 or fewer digits    : in column install_age = client_crash_date - installTime
        SecondsSinceLastCrash: some integer value                    : in column last_crash
      jsonDocument MAY contain:
        Comments: Length <= 500                                      : in column user_comments
        Notes:    Length <= 1000                                     : in column app_notes
        Distributor: Length <= 20                                    : in column distributor
        Distributor_version: Length <= 20                            : in column distributor_version
        HangId: uuid-like                                            : in column hangid
    """
    #logger.debug("starting insertReportIntoDatabase")
    product = Processor.getJsonOrWarn(jsonDocument,'ProductName',processorErrorMessages,None, 30)
    version = Processor.getJsonOrWarn(jsonDocument,'Version', processorErrorMessages,None,16)
    buildID =   Processor.getJsonOrWarn(jsonDocument,'BuildID', processorErrorMessages,None,16)
    url = sutil.lookupLimitedStringOrNone(jsonDocument, 'URL', 255)
    user_comments = sutil.lookupLimitedStringOrNone(jsonDocument, 'Comments', 500)
    app_notes = sutil.lookupLimitedStringOrNone(jsonDocument, 'Notes', 1000)
    distributor = sutil.lookupLimitedStringOrNone(jsonDocument, 'Distributor', 20)
    distributor_version = sutil.lookupLimitedStringOrNone(jsonDocument, 'Distributor_version', 20)
    defaultCrashTime = int(time.mktime(date_processed.timetuple())) # must have crashed before date processed
    timestampTime = int(jsonDocument.get('timestamp',defaultCrashTime)) # the old name for crash time
    crash_time = int(Processor.getJsonOrWarn(jsonDocument,'CrashTime',processorErrorMessages,timestampTime,10))
    startupTime = int(jsonDocument.get('StartupTime',crash_time)) # must have started up some time before crash
    installTime = int(jsonDocument.get('InstallTime',startupTime)) # must have installed some time before startup
    crash_date = datetime.datetime.fromtimestamp(crash_time, UTC)
    install_age = crash_time - installTime
    email = sutil.lookupLimitedStringOrNone(jsonDocument, 'Email', 100)
    hangid = jsonDocument.get('HangID',None)
    process_type = sutil.lookupLimitedStringOrNone(jsonDocument, 'ProcessType', 10)
    #logger.debug ('hangid: %s', hangid)
    #logger.debug ('Email: %s', str(jsonDocument))
    # userId is now deprecated and replace with empty string
    user_id = ""
    uptime = max(0, crash_time - startupTime)
    if crash_time == defaultCrashTime:
      logger.warning("no 'crash_time' calculated in %s: Using date_processed", uuid)
      #sutil.reportExceptionAndContinue(logger, logging.WARNING)
      processorErrorMessages.append("WARNING: No 'client_crash_date' could be determined from the Json file")
    try:
      last_crash = int(jsonDocument['SecondsSinceLastCrash'])
    except:
      last_crash = None

    release_channel = jsonDocument.get('ReleaseChannel','unknown')

    newReportRecordAsTuple = (uuid, crash_date, date_processed, product, version, buildID, url, install_age, last_crash, uptime, email, user_id, user_comments, app_notes, distributor, distributor_version,None,None,None,hangid,process_type,release_channel)
    newReportRecordAsDict = dict(x for x in zip(self.reportsTable.columns, newReportRecordAsTuple))
    if not product or not version:
      msgTemplate = "Skipping report: Missing product&version: ["+", ".join(["%s:%%s"%x for x in self.reportsTable.columns])+"]"
      logger.error(msgTemplate % newReportRecordAsTuple)
      return {}
    try:
      #logger.debug("inserting for %s, %s", uuid, str(date_processed))
      self.reportsTable.insert(threadLocalCursor, newReportRecordAsTuple, self.databaseConnectionPool.connectionCursorPair, date_processed=date_processed)
    except sdb.db_module.IntegrityError, x:
      #logger.debug("psycopg2.IntegrityError %s", str(x))
      logger.debug("replacing record that already exsited: %s", uuid)
      threadLocalCursor.connection.rollback()
      # the following code fragment can prevent a crash from being processed a second time
      #previousTrialWasSuccessful = self.sdb.singleValueSql(threadLocalCursor, "select success from reports where uuid = '%s' and date_processed = timestamp with time zone '%s'" % (uuid, date_processed))
      #if previousTrialWasSuccessful:
        #raise DuplicateEntryException(uuid)
      threadLocalCursor.execute("delete from reports where uuid = '%s' and date_processed = timestamp with time zone '%s'" % (uuid, date_processed))
      processorErrorMessages.append("INFO: This record is a replacement for a previous record with the same uuid")
      self.reportsTable.insert(threadLocalCursor, newReportRecordAsTuple, self.databaseConnectionPool.connectionCursorPair, date_processed=date_processed)