Esempio n. 1
0
    def __init__(self, app_id, lookback_minutes, new_relic_account,
                 new_relic_app_id, event_type):
        super(ErrorsByVersionProcessor,
              self).__init__(app_id, lookback_minutes, new_relic_account,
                             new_relic_app_id, event_type)

        self._to_save = []
        errors_with_details = self.get_errors_with_details(lookback_minutes)

        events = []
        cumulative_errors_found = 0
        cumulative_previously_known = 0
        for cr_error in errors_with_details:
            todays_date = cr_error.current_date().strftime('%Y-%m-%d')
            e = Event(new_relic_account, new_relic_app_id, event_type,
                      datetime.now())

            for k, v in cr_error.as_event_dict(app_id).items():
                e.set(k, v)

            for version, version_occurrences in cr_error.current_date_occurrences(
            ).items():
                previously_known = CRErrorHistoryDAO.num_previously_known(
                    self._app_id, event_type, cr_error.crittercism_hash(),
                    version, todays_date)

                delta = version_occurrences - previously_known

                cumulative_errors_found += version_occurrences
                cumulative_previously_known += previously_known

                logging.getLogger().debug(
                    'Building event for cr_error=%s version=%s previously_known=%s num_new=%s',
                    cr_error.crittercism_hash(), version, previously_known,
                    delta)
                version_event = copy.deepcopy(e)
                version_event.set(u'version', version)

                events += [version_event] * delta

                self._to_save.append(
                    (event_type, cr_error.crittercism_hash(), version,
                     todays_date, version_occurrences))

        self._events = events
        logging.getLogger().info(
            'AppId=%s Event=%s num_found=%s num_known=%s num_new=%s', app_id,
            event_type, cumulative_errors_found, cumulative_previously_known,
            len(events))
Esempio n. 2
0
    def __init__(self, app_id, lookback_minutes, new_relic_account,
                 new_relic_app_id, event_type):
        super(MetricsAggregateSplitter,
              self).__init__(app_id, lookback_minutes, new_relic_account,
                             new_relic_app_id, event_type)

        volumes = self.get_data(self.get_volume_metric())
        events_in_volume = []
        self._events = []
        for volume in volumes.values():
            events_in_volume.append(volume.total())
        num_events = max(events_in_volume)
        self._start_time, self._end_time = volumes.values()[0].time_range()
        logging.getLogger().debug('start_time: %s end_time: %s',
                                  self._start_time, self._end_time)

        if not num_events:
            logging.getLogger().info('No events to upload.')
            return

        logging.getLogger().debug('Creating events')

        for i in range(num_events):
            self._events.append(
                Event(new_relic_account, new_relic_app_id, event_type,
                      random_timestamp(self._start_time, self._end_time)))

        logging.getLogger().debug('Extracting ranges and setting volumes.')
        self._ranges = self.get_event_ranges(volumes)
        logging.getLogger().debug(
            'Getting the rest of the data and processing')
        for metric, strategy in self.get_metrics_and_strategies():
            metric_data = self.get_data(metric)
            strategy(self, metric, metric_data)
        logging.getLogger().info('ETL Completed')
Esempio n. 3
0
    def __init__(self, app_id, lookback_minutes, new_relic_account, new_relic_app_id, event_type):
        super(MetricsAggregateSplitter, self).__init__(app_id, lookback_minutes, new_relic_account, new_relic_app_id, event_type)

        volumes = self.get_data(self.get_volume_metric())
        num_queried_events = volumes.values()[0].total()
        self._start_time, self._end_time = volumes.values()[0].time_range()
        logging.getLogger().debug('start_time: %s end_time: %s', self._start_time, self._end_time)
        num_events = num_queried_events - self.get_num_already_uploaded()

        if not num_events:
            if num_queried_events:
                logging.getLogger().info('All events queried were been uploaded. num_queried=%s', num_queried_events)

            logging.getLogger().info('No new events to upload.')
            self._events = []
            return

        self._fudge_ratio = num_events / num_queried_events
        logging.getLogger().debug('Creating events')
        self._events = [Event(new_relic_account, new_relic_app_id, event_type, random_timestamp(self._start_time,
                                                                                                self._end_time))
                        for
                        i in range(num_events)]
        logging.getLogger().debug('Extracting ranges and setting volumes.')
        self._ranges = self.get_event_ranges(volumes)
        logging.getLogger().debug('Getting the rest of the data and processing')
        for metric, strategy in self.get_metrics_and_strategies():
            metric_data = self.get_data(metric)
            strategy(self, metric, metric_data)
        logging.getLogger().info('ETL Completed')
    def __init__(self, app_id, lookback_minutes, new_relic_account, new_relic_app_id, event_type):
        super(ErrorsByVersionProcessor, self).__init__(app_id, lookback_minutes,
                                                       new_relic_account, new_relic_app_id, event_type)

        self._to_save = []
        errors_with_details = self.get_errors_with_details(lookback_minutes)

        events = []
        cumulative_errors_found = 0
        cumulative_previously_known = 0
        for cr_error in errors_with_details:
            todays_date = cr_error.current_date().strftime('%Y-%m-%d')
            e = Event(new_relic_account, new_relic_app_id, event_type, datetime.now())

            for k, v in cr_error.as_event_dict().items():
                e.set(k, v)

            for version, version_occurrences in cr_error.current_date_occurrences().items():
                previously_known = CRErrorHistoryDAO.num_previously_known(self._app_id, event_type,
                                                                          cr_error.crittercism_hash(),
                                                                          version, todays_date)

                delta = version_occurrences - previously_known

                cumulative_errors_found += version_occurrences
                cumulative_previously_known += previously_known

                logging.getLogger().debug('Building event for cr_error=%s version=%s previously_known=%s num_new=%s',
                                          cr_error.crittercism_hash(), version, previously_known, delta)
                version_event = copy.deepcopy(e)
                version_event.set(u'version', version)

                events += [version_event] * delta

                self._to_save.append((event_type, cr_error.crittercism_hash(), version,
                                      todays_date, version_occurrences))

        self._events = events
        logging.getLogger().info('AppId=%s Event=%s num_found=%s num_known=%s num_new=%s',
                                 app_id, event_type, cumulative_errors_found, cumulative_previously_known, len(events))
Esempio n. 5
0
 def make_event(self, trace, group_name):
     dt = datetime.strptime(trace[u'traceTs'], '%Y-%m-%dT%H:%M:%S.%fZ')
     e = Event(self._new_relic_account, self._new_relic_app_id, self._event_type, dt)
     e.set('group', group_name)
     for k, v in trace.items():
         e.set(k, v)
     return e