def _create_basic_processed_crash(self, uuid, raw_crash, submitted_timestamp, started_timestamp, processor_notes): """ This function is run only by a worker thread. Create the record for the current job in the 'reports' table input parameters: uuid: the unique id identifying the job - corresponds with the uuid column in the 'jobs' and the 'reports' tables jsonDocument: an object with a dictionary interface for fetching the components of the json document submitted_timestamp: when job came in (a key used in partitioning) processor_notes: list of strings of error messages """ #logger.debug("starting insertReportIntoDatabase") processed_crash = DotDict() processed_crash.success = False processed_crash.uuid = uuid processed_crash.startedDateTime = started_timestamp processed_crash.product = self._get_truncate_or_warn( raw_crash, 'ProductName', processor_notes, None, 30 ) processed_crash.version = self._get_truncate_or_warn( raw_crash, 'Version', processor_notes, None, 16 ) processed_crash.build = self._get_truncate_or_warn( raw_crash, 'BuildID', processor_notes, None, 16 ) processed_crash.url = self._get_truncate_or_none( raw_crash, 'URL', 255 ) processed_crash.user_comments = self._get_truncate_or_none( raw_crash, 'Comments', 500 ) processed_crash.app_notes = self._get_truncate_or_none( raw_crash, 'Notes', 1000 ) processed_crash.distributor = self._get_truncate_or_none( raw_crash, 'Distributor', 20 ) processed_crash.distributor_version = self._get_truncate_or_none( raw_crash, 'Distributor_version', 20 ) processed_crash.email = self._get_truncate_or_none( raw_crash, 'Email', 100 ) processed_crash.process_type = self._get_truncate_or_none( raw_crash, 'ProcessType', 10 ) processed_crash.release_channel = raw_crash.get( 'ReleaseChannel', 'unknown' ) # userId is now deprecated and replace with empty string processed_crash.user_id = "" # ++++++++++++++++++++ # date transformations processed_crash.date_processed = submitted_timestamp # defaultCrashTime: must have crashed before date processed submitted_timestamp_as_epoch = int( time.mktime(submitted_timestamp.timetuple()) ) timestampTime = int( raw_crash.get('timestamp', submitted_timestamp_as_epoch) ) # the old name for crash time crash_time = int( self._get_truncate_or_warn( raw_crash, 'CrashTime', processor_notes, timestampTime, 10 ) ) processed_crash.crash_time = crash_time if crash_time == submitted_timestamp_as_epoch: processor_notes.append( "WARNING: No 'client_crash_date' " "could be determined from the raw_crash" ) # StartupTime: must have started up some time before crash startupTime = int(raw_crash.get('StartupTime', crash_time)) # InstallTime: must have installed some time before startup installTime = int(raw_crash.get('InstallTime', startupTime)) processed_crash.client_crash_date = datetime.datetime.fromtimestamp( crash_time, UTC ) processed_crash.install_age = crash_time - installTime processed_crash.uptime = max(0, crash_time - startupTime) try: last_crash = int(raw_crash.SecondsSinceLastCrash) except: last_crash = None processed_crash.last_crash = last_crash # TODO: not sure how to reimplemnt this #if crash_id in self.priority_job_set: #processor_notes.append('Priority Job') #self.priority_job_set.remove(crash_id) # can't get report id because we don't have the database here #reportId = processed_crash["id"] processed_crash.dump = '' try: processed_crash.ReleaseChannel = \ raw_crash.ReleaseChannel except KeyError: processed_crash.ReleaseChannel = 'unknown' if self.config.collect_addon: #logger.debug("collecting Addons") # formerly 'insertAdddonsIntoDatabase' addons_as_a_list_of_tuples = self._process_list_of_addons( raw_crash, processor_notes ) processed_crash.addons = addons_as_a_list_of_tuples if self.config.collect_crash_process: #logger.debug("collecting Crash Process") # formerly insertCrashProcess processed_crash.update( self._add_process_type_to_processed_crash(raw_crash) ) processed_crash.addons_checked = None try: addons_checked_txt = raw_crash.EMCheckCompatibility.lower() processed_crash.addons_checked = False if addons_checked_txt == 'true': processed_crash.addons_checked = True except KeyError: pass # leaving it as None if not in the document if int(raw_crash.get('PluginHang', False)): processed_crash.hangid = 'fake-' + uuid else: processed_crash.hangid = raw_crash.get('HangID', None) if int(raw_crash.get('Hang', False)): processed_crash.hang_type = 1 elif int(raw_crash.get('PluginHang', False)): processed_crash.hang_type = -1 elif processed_crash.hangid: processed_crash.hang_type = -1 else: processed_crash.hang_type = 0 processed_crash.java_stack_trace = \ raw_crash.setdefault('JavaStackTrace', None) return processed_crash
def _create_basic_processed_crash(self, uuid, raw_crash, submitted_timestamp, started_timestamp, processor_notes): """ This function is run only by a worker thread. Create the record for the current job in the 'reports' table input parameters: uuid: the unique id identifying the job - corresponds with the uuid column in the 'jobs' and the 'reports' tables jsonDocument: an object with a dictionary interface for fetching the components of the json document submitted_timestamp: when job came in (a key used in partitioning) processor_notes: list of strings of error messages """ #logger.debug("starting insertReportIntoDatabase") processed_crash = DotDict() processed_crash.success = False processed_crash.uuid = uuid processed_crash.startedDateTime = started_timestamp processed_crash.product = self._get_truncate_or_warn( raw_crash, 'ProductName', processor_notes, None, 30) processed_crash.version = self._get_truncate_or_warn( raw_crash, 'Version', processor_notes, None, 16) processed_crash.build = self._get_truncate_or_warn( raw_crash, 'BuildID', processor_notes, None, 16) processed_crash.url = self._get_truncate_or_none(raw_crash, 'URL', 255) processed_crash.user_comments = self._get_truncate_or_none( raw_crash, 'Comments', 500) processed_crash.app_notes = self._get_truncate_or_none( raw_crash, 'Notes', 1000) processed_crash.distributor = self._get_truncate_or_none( raw_crash, 'Distributor', 20) processed_crash.distributor_version = self._get_truncate_or_none( raw_crash, 'Distributor_version', 20) processed_crash.email = self._get_truncate_or_none( raw_crash, 'Email', 100) processed_crash.process_type = self._get_truncate_or_none( raw_crash, 'ProcessType', 10) processed_crash.release_channel = raw_crash.get( 'ReleaseChannel', 'unknown') # userId is now deprecated and replace with empty string processed_crash.user_id = "" # ++++++++++++++++++++ # date transformations processed_crash.date_processed = submitted_timestamp # defaultCrashTime: must have crashed before date processed submitted_timestamp_as_epoch = int( time.mktime(submitted_timestamp.timetuple())) timestampTime = int( raw_crash.get( 'timestamp', submitted_timestamp_as_epoch)) # the old name for crash time crash_time = int( self._get_truncate_or_warn(raw_crash, 'CrashTime', processor_notes, timestampTime, 10)) processed_crash.crash_time = crash_time if crash_time == submitted_timestamp_as_epoch: processor_notes.append("WARNING: No 'client_crash_date' " "could be determined from the raw_crash") # StartupTime: must have started up some time before crash startupTime = int(raw_crash.get('StartupTime', crash_time)) # InstallTime: must have installed some time before startup installTime = int(raw_crash.get('InstallTime', startupTime)) processed_crash.client_crash_date = datetime.datetime.fromtimestamp( crash_time, UTC) processed_crash.install_age = crash_time - installTime processed_crash.uptime = max(0, crash_time - startupTime) try: last_crash = int(raw_crash.SecondsSinceLastCrash) except: last_crash = None processed_crash.last_crash = last_crash # TODO: not sure how to reimplemnt this #if crash_id in self.priority_job_set: #processor_notes.append('Priority Job') #self.priority_job_set.remove(crash_id) # can't get report id because we don't have the database here #reportId = processed_crash["id"] processed_crash.dump = '' try: processed_crash.ReleaseChannel = \ raw_crash.ReleaseChannel except KeyError: processed_crash.ReleaseChannel = 'unknown' if self.config.collect_addon: #logger.debug("collecting Addons") # formerly 'insertAdddonsIntoDatabase' addons_as_a_list_of_tuples = self._process_list_of_addons( raw_crash, processor_notes) processed_crash.addons = addons_as_a_list_of_tuples if self.config.collect_crash_process: #logger.debug("collecting Crash Process") # formerly insertCrashProcess processed_crash.update( self._add_process_type_to_processed_crash(raw_crash)) processed_crash.addons_checked = None try: addons_checked_txt = raw_crash.EMCheckCompatibility.lower() processed_crash.addons_checked = False if addons_checked_txt == 'true': processed_crash.addons_checked = True except KeyError: pass # leaving it as None if not in the document processed_crash.hangid = raw_crash.get('HangID', None) if 'Hang' in raw_crash: processed_crash.hang_type = raw_crash.Hang elif processed_crash.hangid: processed_crash.hang_type = -1 else: processed_crash.hang_type = 0 processed_crash.java_stack_trace = \ raw_crash.setdefault('JavaStackTrace', None) return processed_crash