Exemplo n.º 1
0
def get(channel, versions=None, product='Firefox', start_date=None, end_date='today', duration=30, platforms=None):
    if not isinstance(versions, list):
        if isinstance(versions, numbers.Number):
            versions = socorro.ProductVersions.get_active(vnumber=versions, product=product)
        else:
            versions = socorro.ProductVersions.get_active(product=product)
        versions = versions[channel.lower()]

    if start_date:
        _sdate = utils.get_date_ymd(start_date)
        _edate = utils.get_date_ymd(end_date)
        duration = (_edate - _sdate).days

    adi = socorro.ADI.get(version=versions, product=product, end_date=end_date, duration=duration, platforms=platforms)

    data = {}
    for d, n in adi.items():
        data[d] = {'adi': n, 'browser': 0, 'content': 0, 'plugin': 0, 'browser_rate': 0, 'content_rate': 0, 'b+c_rate': 0, 'plugin_rate': 0}

    start_date = utils.get_date(end_date, duration)
    search_date = socorro.SuperSearch.get_search_date(start_date, end_date)

    socorro.SuperSearch(params={'product': product,
                                'version': versions,
                                'release_channel': channel,
                                'date': search_date,
                                '_results_number': 0,
                                '_facets_size': 2,  # 2 is for a facet on plugin and on content
                                '_histogram.date': ['process_type']},
                        handler=__super_search_handler,
                        handlerdata=data).wait()

    return data
Exemplo n.º 2
0
def get(channel,
        versions=None,
        product='Firefox',
        start_date=None,
        end_date='today',
        duration=30,
        platforms=None):
    if not isinstance(versions, list):
        if isinstance(versions, numbers.Number):
            versions = socorro.ProductVersions.get_active(vnumber=versions,
                                                          product=product)
        else:
            versions = socorro.ProductVersions.get_active(product=product)
        versions = versions[channel.lower()]

    if start_date:
        _sdate = utils.get_date_ymd(start_date)
        _edate = utils.get_date_ymd(end_date)
        duration = (_edate - _sdate).days

    adi = socorro.ADI.get(version=versions,
                          product=product,
                          end_date=end_date,
                          duration=duration,
                          platforms=platforms)

    data = {}
    for d, n in adi.items():
        data[d] = {
            'adi': n,
            'browser': 0,
            'content': 0,
            'plugin': 0,
            'browser_rate': 0,
            'content_rate': 0,
            'b+c_rate': 0,
            'plugin_rate': 0
        }

    start_date = utils.get_date(end_date, duration)
    search_date = socorro.SuperSearch.get_search_date(start_date, end_date)

    socorro.SuperSearch(
        params={
            'product': product,
            'version': versions,
            'release_channel': channel,
            'date': search_date,
            '_results_number': 0,
            '_facets_size': 2,  # 2 is for a facet on plugin and on content
            '_histogram.date': ['process_type']
        },
        handler=__super_search_handler,
        handlerdata=data).wait()

    return data
Exemplo n.º 3
0
 def handler_ss(chan, json, data):
     for facets in json['facets']['histogram_date']:
         d = utils.get_date_ymd(facets['term'])
         w = get_past_week(d)
         s = facets['facets']['signature']
         for signature in s:
             count = signature['count']
             sgn = signature['term']
             data[sgn][chan][w] += count
Exemplo n.º 4
0
    def history_handler(_history, data):
        bots = ['*****@*****.**', '*****@*****.**']
        bugid = str(_history['id'])
        history = _history['history']
        if history:
            last_change_date = utils.get_guttenberg_death()
            has_patch = False
            has_assignee = False
            is_fixed = False
            resolved = False
            incomplete = False
            for changes in history:
                if changes['who'] not in bots:
                    last_change_date = utils.get_date_ymd(changes['when'])
                for change in changes['changes']:
                    field_name = change.get('field_name', None)
                    if field_name == 'status':
                        if change.get('added', None) == 'RESOLVED':
                            resolved = True
                        elif change.get('removed', None) == 'RESOLVED':
                            resolved = False
                    elif field_name == 'resolution':
                        added = change.get('added', None)
                        removed = change.get('removed', None)
                        if added == 'FIXED':
                            is_fixed = True
                        elif added == 'INCOMPLETE':
                            incomplete = True
                        if removed == 'FIXED':
                            is_fixed = False
                        elif removed == 'INCOMPLETE':
                            incomplete = False
                    elif field_name == 'flagtypes.name':
                        if not has_patch and 'attachment_id' in change and 'added' in change:
                            added = change['added']
                            if added.startswith('review'):
                                has_patch = True
                    elif field_name == 'assigned_to':
                        has_assignee = change.get('added',
                                                  None) != '*****@*****.**'

            data['bugs'][bugid] = {
                'resolved':
                resolved,
                'incomplete':
                incomplete,
                'fixed':
                is_fixed,
                'patched':
                has_patch,
                'assigned':
                has_assignee,
                'last_change':
                last_change_date.astimezone(pytz.utc).replace(tzinfo=None)
            }
        else:
            data['no_history'].append(bugid)
Exemplo n.º 5
0
def get_last_bug(bugids, bugsinfo, min_date=None):
    if not bugids:
        return None

    start_date = utils.get_guttenberg_death().replace(tzinfo=None)
    lasts = {
        'resolved-unfixed': ['', start_date],
        'resolved-fixed-unpatched': ['', start_date],
        'resolved-fixed-patched': ['', start_date],
        'unresolved-assigned': ['', start_date],
        'unresolved-unassigned': ['', start_date]
    }
    _lasts = {
        (True, False, False): lasts['resolved-unfixed'],
        (True, False, True): lasts['resolved-unfixed'],
        (True, True, False): lasts['resolved-fixed-unpatched'],
        (True, True, True): lasts['resolved-fixed-patched'],
        (False, True): lasts['unresolved-assigned'],
        (False, False): lasts['unresolved-unassigned']
    }

    for bugid in bugids:
        bugid = str(bugid)
        if bugid in bugsinfo:
            info = bugsinfo[bugid]
            if not info['incomplete']:
                i = _lasts[(True, info['fixed'],
                            info['patched'])] if info['resolved'] else _lasts[(
                                False, info['assigned'])]
                if i[1] < info['last_change']:
                    i[0] = bugid
                    i[1] = info['last_change']

    one_year_ago = utils.get_date_ymd('today') - relativedelta(years=1)

    if lasts['resolved-fixed-patched'][
            1] >= min_date:  # We've a patch in the last days
        return lasts['resolved-fixed-patched'][0]
    elif lasts['resolved-fixed-unpatched'][
            1] >= min_date:  # The bug has been fixed without a patch (probably a side effect)
        return lasts['resolved-fixed-unpatched'][0]
    elif lasts['resolved-unfixed'][
            1] >= min_date:  # The bug has been resolved (not fixed)
        return lasts['resolved-unfixed'][0]
    elif lasts['unresolved-assigned'][
            1] >= one_year_ago:  # We take the last touched open and assigned bug
        return lasts['unresolved-assigned'][0]
    elif lasts['unresolved-unassigned'][
            0]:  # We take the last touched open and unassigned bug
        return lasts['unresolved-unassigned'][0]
    else:  # We've only closed bugs and closed before the beginning of the cycle
        return None
Exemplo n.º 6
0
 def bug_handler(bug, data):
     last_change_date = utils.get_date_ymd(bug['last_change_time'])
     data[str(bug['id'])] = {
         'resolved':
         False,
         'incomplete':
         False,
         'fixed':
         False,
         'patched':
         False,
         'assigned':
         False,
         'last_change':
         last_change_date.astimezone(pytz.utc).replace(tzinfo=None)
     }
Exemplo n.º 7
0
    def test_version_dates(self):
        self.assertEqual(versions.getMajorDate(46),
                         datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46'),
                         datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46.0'),
                         datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46.0.1'),
                         datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1'),
                         datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1.0'),
                         datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1.5'),
                         datetime.datetime(2005, 11, 29, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14'),
                         datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14.0'),
                         datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14.0.1'),
                         datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33'),
                         datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33.0'),
                         datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33.1'),
                         datetime.datetime(2014, 11, 10, 8, 0, tzinfo=tzutc()))

        v = versions.get(base=True)
        self.assertTrue(
            versions.getMajorDate(v['release']) <= versions.getMajorDate(
                v['beta']) <= versions.getMajorDate(v['aurora']) <=
            versions.getMajorDate(v['nightly']))

        date = utils.get_date_ymd('2011-08-24T14:52:52Z')
        self.assertEqual(date - versions.getMajorDate('7'),
                         datetime.timedelta(-34, 28372))

        self.assertEqual(
            versions.getCloserMajorRelease(date),
            ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
        self.assertEqual(
            versions.getCloserMajorRelease(date, negative=True),
            ('6.0', datetime.datetime(2011, 8, 16, 7, 0, tzinfo=tzutc())))
        self.assertEqual(
            versions.getCloserMajorRelease(date, negative=False),
            ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
Exemplo n.º 8
0
    def test_version_dates(self):
        self.assertEqual(versions.getMajorDate(46), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46.0'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('46.0.1'), datetime.datetime(2016, 4, 26, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1'), datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1.0'), datetime.datetime(2004, 11, 9, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('1.5'), datetime.datetime(2005, 11, 29, 8, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14.0'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('14.0.1'), datetime.datetime(2012, 7, 17, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33'), datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33.0'), datetime.datetime(2014, 10, 14, 7, 0, tzinfo=tzutc()))
        self.assertEqual(versions.getMajorDate('33.1'), datetime.datetime(2014, 11, 10, 8, 0, tzinfo=tzutc()))

        v = versions.get(base=True)
        self.assertTrue(versions.getMajorDate(v['release']) <= versions.getMajorDate(v['beta']) <= versions.getMajorDate(v['aurora']) <= versions.getMajorDate(v['nightly']))

        date = utils.get_date_ymd('2011-08-24T14:52:52Z')
        self.assertEqual(date - versions.getMajorDate('7'), datetime.timedelta(-34, 28372))

        self.assertEqual(versions.getCloserMajorRelease(date), ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
        self.assertEqual(versions.getCloserMajorRelease(date, negative=True), ('6.0', datetime.datetime(2011, 8, 16, 7, 0, tzinfo=tzutc())))
        self.assertEqual(versions.getCloserMajorRelease(date, negative=False), ('7.0', datetime.datetime(2011, 9, 27, 7, 0, tzinfo=tzutc())))
Exemplo n.º 9
0
    def test_get_date_ymd(self):
        self.assertIsNotNone(utils.get_date_ymd('today'))
        self.assertIsNotNone(utils.get_date_ymd('today_utc'))
        self.assertIsNotNone(utils.get_date_ymd('yesterday'))
        self.assertIsNotNone(utils.get_date_ymd('yesterday_utc'))
        self.assertIsNotNone(utils.get_date_ymd('tomorrow'))
        self.assertTrue(utils.get_date_ymd('yesterday') < utils.get_date_ymd('today') < utils.get_date_ymd('tomorrow'))
        self.assertTrue(utils.get_date_ymd('yesterday_utc') < utils.get_date_ymd('today_utc'))
        date = datetime.datetime.strptime('1991-04-16', '%Y-%m-%d')
        self.assertEqual(utils.get_date_ymd('1991/04/16'), date)
        self.assertEqual(utils.get_date_ymd('1991-04-16'), date)
        self.assertEqual(utils.get_date_ymd('1991 04 16'), date)
        self.assertEqual(utils.get_date_ymd('04/16/1991'), date)
        self.assertEqual(utils.get_date_ymd('16/04/1991'), date)
        self.assertEqual(utils.get_date_ymd('1991-04-16 12:00:00'), datetime.datetime(1991, 4, 16, 12, 0))

        with self.assertRaises(Exception):
            utils.get_date_ymd('')
        with self.assertRaises(Exception):
            utils.get_date_ymd('marco')
Exemplo n.º 10
0
def get(channel, date, product='Firefox', duration=11, tc_limit=50, crash_type='all', startup=False):
    """Get crashes info

    Args:
        channel (str): the channel
        date (str): the final date
        product (Optional[str]): the product
        duration (Optional[int]): the duration to retrieve the data
        tc_limit (Optional[int]): the number of topcrashes to load
        crash_type (Optional[str]): 'all' (default) or 'browser' or 'content' or 'plugin'

    Returns:
        dict: contains all the info relative to the crashes
    """
    channel = channel.lower()
    version = v[channel]
    versions_info = socorro.ProductVersions.get_version_info(version, channel=channel, product=product)
    versions = versions_info.keys()
    platforms = socorro.Platforms.get_cached_all()

    if crash_type and isinstance(crash_type, six.string_types):
        crash_type = [crash_type]

    throttle = set(map(lambda p: p[1], versions_info.values()))
    if len(throttle) == 1:
        throttle = throttle.pop()
    else:
        return

    _date = utils.get_date_ymd(date)
    start_date = utils.get_date_str(_date - timedelta(duration - 1))
    end_date = utils.get_date_str(_date)

    # First, we get the ADI
    adi = socorro.ADI.get(version=versions, product=product, end_date=end_date, duration=duration, platforms=platforms)
    adi = [adi[key] for key in sorted(adi.keys(), reverse=True)]

    # get the khours
    khours = Redash.get_khours(utils.get_date_ymd(start_date), utils.get_date_ymd(end_date), channel, versions, product)
    khours = [khours[key] for key in sorted(khours.keys(), reverse=True)]

    overall_crashes_by_day = []
    signatures = {}

    def signature_handler(json):
        for signature in json['facets']['signature']:
            signatures[signature['term']] = [signature['count'], 0, 0, 0, 0]

            for platform in signature['facets']['platform']:
                if platform['term'] == 'Linux':
                    signatures[signature['term']][3] = platform['count']
                elif platform['term'] == 'Windows NT':
                    signatures[signature['term']][1] = platform['count']
                elif platform['term'] == 'Mac OS X':
                    signatures[signature['term']][2] = platform['count']

            for uptime in signature['facets']['uptime']:
                if int(uptime['term']) < 60:
                    signatures[signature['term']][4] += uptime['count']

        for facets in json['facets']['histogram_date']:
            overall_crashes_by_day.insert(0, facets['count'])

    params = {
        'product': product,
        'version': versions,
        'date': socorro.SuperSearch.get_search_date(start_date, end_date),
        'release_channel': channel,
        '_aggs.signature': ['platform', 'uptime'],
        '_results_number': 0,
        '_facets_size': tc_limit,
        '_histogram.date': ['product'],
        '_histogram_interval': 1
    }

    if startup:
        params['uptime'] = '<=60'

    socorro.SuperSearch(params=params, handler=signature_handler).wait()

    bug_flags = ['resolution', 'id', 'last_change_time', 'cf_tracking_firefox' + str(version)]
    for i in range(int(version), int(v['nightly']) + 1):
        bug_flags.append('cf_status_firefox' + str(i))

    # TODO: too many requests... should be improved with chunks
    bugs = {}
    # TODO: Use regexp, when the Bugzilla bug that prevents them from working will be fixed.
    base = {
        'j_top': 'OR',
        'o1': 'substring',
        'f1': 'cf_crash_signature',
        'v1': None,
        'o2': 'substring',
        'f2': 'cf_crash_signature',
        'v2': None,
        'o3': 'substring',
        'f3': 'cf_crash_signature',
        'v3': None,
        'o4': 'substring',
        'f4': 'cf_crash_signature',
        'v4': None,
        'include_fields': bug_flags
    }

    queries = []
    for sgn in signatures.keys():
        cparams = base.copy()
        cparams['v1'] = '[@' + sgn + ']'
        cparams['v2'] = '[@ ' + sgn + ' ]'
        cparams['v3'] = '[@ ' + sgn + ']'
        cparams['v4'] = '[@' + sgn + ' ]'
        bugs[sgn] = []
        queries.append(Query(Bugzilla.API_URL, cparams, __bug_handler, bugs[sgn]))
    res_bugs = Bugzilla(queries=queries)

    # we have stats by signature in self.signatures
    # for each signature get the number of crashes on the last X days
    # so get the signature trend
    trends = {}
    default_trend = {}
    for i in range(duration):
        default_trend[_date - timedelta(i)] = 0

    base = {'product': product,
            'version': versions,
            'signature': None,
            'date': socorro.SuperSearch.get_search_date(start_date, end_date),
            'release_channel': channel,
            '_results_number': 0,
            '_histogram.date': ['signature'],
            '_histogram_interval': 1}

    queries = []
    for sgns in Connection.chunks(list(map(lambda sgn: '=' + sgn, signatures.keys())), 10):
        sgn_group = []
        for sgn in sgns:
            if sum(len(s) for s in sgn_group) >= 1000:
                cparams = base.copy()
                cparams['signature'] = sgn_group
                queries.append(Query(socorro.SuperSearch.URL, cparams, functools.partial(__trend_handler, default_trend), trends))
                sgn_group = []

            sgn_group.append(sgn)

        if len(sgn_group) > 0:
            cparams = base.copy()
            cparams['signature'] = sgn_group
            queries.append(Query(socorro.SuperSearch.URL, cparams, functools.partial(__trend_handler, default_trend), trends))

    socorro.SuperSearch(queries=queries).wait()

    for sgn, trend in trends.items():
        signatures[sgn] = (signatures[sgn], [trend[key] for key in sorted(trend.keys(), reverse=True)])

    _signatures = {}
    # order self.signatures by crash count
    l = sorted(signatures.items(), key=lambda x: x[1][0][0], reverse=True)
    i = 1
    for s in l:
        _signatures[s[0]] = i  # top crash rank
        i += 1

    res_bugs.wait()

    # TODO: In the first query to get the bugs, also get dupe_of and avoid the first query
    #       in follow_dup (so modify follow_dup to accept both a bug ID or a bug object).
    queries = []
    for sgn in signatures.keys():
        duplicate_ids = [bug['id'] for bug in bugs[sgn] if bug['resolution'] == 'DUPLICATE']

        # Remove bugs resolved as DUPLICATE from the list of bugs associated to the signature.
        bugs[sgn] = [bug for bug in bugs[sgn] if bug['id'] not in duplicate_ids]

        # Find duplicates for bugs resolved as DUPLICATE.
        duplicates = {k: v for k, v in Bugzilla.follow_dup(duplicate_ids).items() if v is not None}
        duplicate_targets = [bug_id for bug_id in duplicates.values() if int(bug_id) not in [bug['id'] for bug in bugs[sgn]]]
        if len(duplicate_targets) == 0:
            continue

        # Get info about bugs that the DUPLICATE bugs have been duped to.
        params = {
            'id': ','.join(duplicate_targets),
            'include_fields': bug_flags,
        }
        queries.append(Query(Bugzilla.API_URL, params, __bug_handler, bugs[sgn]))
    Bugzilla(queries=queries).wait()

    for sgn, stats in signatures.items():
        # stats is 2-uple: ([count, win_count, mac_count, linux_count, startup_count], trend)
        startup_percent = float(stats[0][4]) / float(stats[0][0])
        _signatures[sgn] = {'tc_rank': _signatures[sgn],
                            'crash_count': stats[0][0],
                            'startup_percent': startup_percent,
                            'crash_by_day': stats[1],
                            'bugs': bugs[sgn]}

    return {
        'start_date': start_date,
        'end_date': end_date,
        'versions': list(versions),
        'adi': adi,
        'khours': khours,
        'crash_by_day': overall_crashes_by_day,
        'signatures': _signatures,
        'throttle': float(throttle)
    }
Exemplo n.º 11
0
def get(channel, date, versions=None, product='Firefox', duration=1):
    """Get stability info

    Args:
        channel (str): the channel
        date (str): the final date
        versions (Optional[List[str]]): the versions to treat
        product (Optional[str]): the product
        duration (Optional[int]): the duration to retrieve the data

    Returns:
        dict: contains all the info relative to stability
    """
    channel = channel.lower()
    cycle = duration <= 0
    versions_info = socorro.ProductVersions.get_version_info(versions, channel=channel, product=product)

    versions = versions_info.keys()
    throttle = set(map(lambda p: p[1], versions_info.values()))
    diff_throttle = len(throttle) != 1
    # normally the throttle is 10% for release and 100% for others channel
    if not diff_throttle:
        throttle = throttle.pop()

    platforms = socorro.Platforms.get_cached_all()

    end_date_dt = utils.get_date_ymd(date)
    if cycle:
        # we get all the start date for each versions and get the min
        start_date_dt = min(map(lambda p: utils.get_date_ymd(p[0]), versions_info.values()))
        duration = (end_date_dt - start_date_dt).days + 1
    else:
        start_date_dt = end_date_dt - timedelta(duration - 1)

    start_date_str = utils.get_date_str(start_date_dt)
    end_date_str = utils.get_date_str(end_date_dt)

    # First, we get the ADI
    adi = socorro.ADI.get(version=versions, product=product, end_date=end_date_str, duration=duration, platforms=platforms)
    adi = [adi[key] for key in sorted(adi.keys(), reverse=False)]

    # Get the khours
    khours = Redash.get_khours(start_date_dt, end_date_dt, channel, versions, product)
    khours = [khours[key] for key in sorted(khours.keys(), reverse=False)]

    # Get the # of crashes (crash pings)
    crash_pings = Redash.get_number_of_crash(start_date_dt, end_date_dt, channel, versions, product)

    crashes = {}
    stats = {'m+c': 0.,
             'main': 0.,
             'content': 0.,
             'plugin': 0.,
             'all': 0.}
    for i in range(duration):
        d = end_date_dt - timedelta(i)
        crashes[d] = {}
        crashes[d]['socorro'] = {'global': stats.copy(), 'startup': stats.copy()}
        crashes[d]['telemetry'] = crash_pings[d]

    base = {'product': product,
            'version': None,
            'date': socorro.SuperSearch.get_search_date(start_date_str, end_date_str),
            'release_channel': channel,
            '_results_number': 1,
            '_histogram.date': ['product', 'process_type'],
            '_facets_size': 3}

    if diff_throttle:
        # in this case each version could have a different throttle so we need to compute stats for each version
        queries = []
        for v, t in versions_info.items():
            cparams = base.copy()
            cparams['version'] = v
            queries.append(Query(socorro.SuperSearch.URL, cparams, functools.partial(__crash_handler, t[1]), crashes))
            cparams = copy.deepcopy(cparams)
            cparams['uptime'] = '<60'
            cparams['_histogram.date'].append('uptime')
            queries.append(Query(socorro.SuperSearch.URL, cparams, functools.partial(__crash_handler, t[1]), crashes))
    else:
        base['version'] = versions
        queries = []
        queries.append(Query(socorro.SuperSearch.URL, base, functools.partial(__crash_handler, throttle), crashes))
        cparams = copy.deepcopy(base)
        cparams['uptime'] = '<60'
        cparams['_histogram.date'].append('uptime')
        queries.append(Query(socorro.SuperSearch.URL, cparams, functools.partial(__crash_handler, throttle), crashes))

    socorro.SuperSearch(queries=queries).wait()
    crashes = [crashes[key] for key in sorted(crashes.keys(), reverse=False)]

    # Now we compute the rates and the averages
    stats = {'m+c': [0., 0., 0., 0.],
             'main': [0., 0., 0., 0.],
             'content': [0., 0., 0., 0.],
             'plugin': [0., 0., 0., 0.],
             'all': [0., 0., 0., 0.]}
    averages = {}
    averages['socorro'] = {'global': stats, 'startup': copy.deepcopy(stats)}
    averages['telemetry'] = copy.deepcopy(stats)
    N = len(adi)

    # sum
    for i in range(N):
        crash_soc = crashes[i]['socorro']
        for k1, v1 in averages['socorro'].items():
            for k2, av in v1.items():
                c = crash_soc[k1][k2]
                # the rate is computed for 100 adi
                x = utils.rate(100. * c, adi[i])
                av[0] += x
                av[1] += x ** 2
                y = utils.rate(c, khours[i])
                av[2] += y
                av[3] += y ** 2
                crash_soc[k1][k2] = (c, x, y)
        crash_tel = crashes[i]['telemetry']
        for k1, av in averages['telemetry'].items():
            c = crash_tel[k1]
            # the rate is computed for 100 adi
            x = utils.rate(100. * c, adi[i])
            av[0] += x
            av[1] += x ** 2
            y = utils.rate(c, khours[i])
            av[2] += y
            av[3] += y ** 2
            crash_tel[k1] = (c, x, y)

    N = float(N)
    averages_old = {'socorro': {}, 'telemetry': {}}
    averages_new = copy.deepcopy(averages_old)

    # mean & standard deviation
    av_new_soc = averages_new['socorro']
    av_old_soc = averages_old['socorro']
    for k1, v1 in averages['socorro'].items():
        d1 = {}
        av_old_soc[k1] = d1
        d2 = {}
        av_new_soc[k1] = d2
        for k2, av in v1.items():
            m = av[0] / N
            d1[k2] = (m, math.sqrt(av[1] / N - m ** 2))
            m = av[2] / N
            d2[k2] = (m, math.sqrt(av[3] / N - m ** 2))

    av_new_tel = averages_new['telemetry']
    av_old_tel = averages_old['telemetry']
    for k1, av in averages['telemetry'].items():
        m = av[0] / N
        av_old_tel[k1] = (m, math.sqrt(av[1] / N - m ** 2))
        m = av[2] / N
        av_new_tel[k1] = (m, math.sqrt(av[3] / N - m ** 2))

    return {'start_date': start_date_str,
            'end_date': end_date_str,
            'versions': versions,
            'adi': adi,
            'khours': khours,
            'crashes': crashes,
            'averages_old': averages_old,
            'averages_new': averages_new}
Exemplo n.º 12
0
        av_new_tel[k1] = (m, math.sqrt(av[3] / N - m ** 2))

    return {'start_date': start_date_str,
            'end_date': end_date_str,
            'versions': versions,
            'adi': adi,
            'khours': khours,
            'crashes': crashes,
            'averages_old': averages_old,
            'averages_new': averages_new}


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Track')
    parser.add_argument('-c', '--channel', action='store', default='release', help='release channel')
    parser.add_argument('-s', '--startdate', action='store', default='', help='the end date')
    parser.add_argument('-e', '--enddate', action='store', default='yesterday', help='the end date')
    parser.add_argument('-D', '--duration', action='store', default=1, type=int, help='the duration')
    parser.add_argument('-p', '--product', action='store', default='Firefox', help='the product')
    parser.add_argument('-v', '--versions', action='store', nargs='+', help='the Firefox versions')
    parser.add_argument('--cycle', action='store_true', help='duration is computed to take into account all the cycle')

    args = parser.parse_args()

    if args.startdate:
        duration = (utils.get_date_ymd(args.enddate) - utils.get_date_ymd(args.startdate)).days + 1
    else:
        duration = -1 if args.cycle else args.duration
    stats = get(args.channel, args.enddate, product=args.product, versions=args.versions, duration=duration)
    pprint(stats)
Exemplo n.º 13
0
    def test_get_date_ymd(self):
        self.assertIsNotNone(utils.get_date_ymd('today'))
        self.assertIsNotNone(utils.get_date_ymd('today_utc'))
        self.assertIsNotNone(utils.get_date_ymd('yesterday'))
        self.assertIsNotNone(utils.get_date_ymd('yesterday_utc'))
        self.assertIsNotNone(utils.get_date_ymd('tomorrow'))
        self.assertTrue(
            utils.get_date_ymd('yesterday') < utils.get_date_ymd('today') <
            utils.get_date_ymd('tomorrow'))
        self.assertTrue(
            utils.get_date_ymd('yesterday_utc') < utils.get_date_ymd(
                'today_utc'))
        date = datetime.datetime.strptime('1991-04-16', '%Y-%m-%d')
        self.assertEqual(utils.get_date_ymd('1991/04/16'), date)
        self.assertEqual(utils.get_date_ymd('1991-04-16'), date)
        self.assertEqual(utils.get_date_ymd('1991 04 16'), date)
        self.assertEqual(utils.get_date_ymd('04/16/1991'), date)
        self.assertEqual(utils.get_date_ymd('16/04/1991'), date)
        self.assertEqual(utils.get_date_ymd('1991-04-16 12:00:00'),
                         datetime.datetime(1991, 4, 16, 12, 0))

        with self.assertRaises(Exception):
            utils.get_date_ymd('')
        with self.assertRaises(Exception):
            utils.get_date_ymd('marco')
Exemplo n.º 14
0
def get(product='Firefox',
        limit=1000,
        verbose=False,
        search_start_date='',
        signatures=[],
        bug_ids=[],
        max_bugs=-1):
    """Get crashes info

    Args:
        product (Optional[str]): the product
        limit (Optional[int]): the number of crashes to get from tcbs

    Returns:
        dict: contains all the info about how to update flags
    """
    p = product.lower()
    if p == 'firefox':
        product = 'Firefox'
    elif p == 'fennecandroid':
        product = 'FennecAndroid'

    channel = ['release', 'beta', 'aurora', 'nightly']
    if product == 'Firefox':
        channel.append('esr')

    base_versions = clouseau.versions.get(base=True)
    versions_by_channel = socorro.ProductVersions.get_info_from_major(
        base_versions, product=product)
    channel_by_version = {}
    all_versions = []
    start_date_by_channel = {}
    start_date = utils.get_date_ymd('today')
    for chan, versions in versions_by_channel.iteritems():
        start_date_by_channel[chan] = utils.get_date_ymd('tomorrow')
        for v in versions:
            channel_by_version[v['version']] = chan
            d = utils.get_date_ymd(v['start_date'])
            all_versions.append(v['version'])
            if d < start_date:
                start_date = d
            if d < start_date_by_channel[chan]:
                start_date_by_channel[chan] = d

    __warn('Versions: %s' % ', '.join(all_versions), verbose)
    __warn('Start dates: %s' % start_date_by_channel, verbose)

    end_date = utils.get_date('today')
    if search_start_date:
        search_date = socorro.SuperSearch.get_search_date(
            search_start_date, end_date)
    else:
        search_date = socorro.SuperSearch.get_search_date(
            utils.get_date_str(start_date), end_date)

    signatures = __get_signatures(limit, product, all_versions, channel,
                                  search_date, signatures, bug_ids, verbose)

    __warn('Collected signatures: %d' % len(signatures), verbose)

    # get the bugs for each signatures
    bugs_by_signature = socorro.Bugs.get_bugs(signatures.keys())

    # if we've some bugs in bug_ids then we must remove the other ones for a given signature
    if bug_ids:
        bids = set(bug_ids)
        for s, bugids in bugs_by_signature.items():
            inter = bids.intersection(bugids)
            if inter:
                bugs_by_signature[s] = inter

    __warn('Collected bugs in Socorro: Ok', verbose)

    # we remove dup bugs
    # for example if we've {1,2,3,4,5} and if 2 is a dup of 5 then the set will be reduced to {1,3,4,5}
    bugs = set()
    for v in bugs_by_signature.values():
        bugs = bugs.union(v)
    dups = Bugzilla.follow_dup(bugs, only_final=False)
    bugs_count = 0
    bugs.clear()
    for s, bugids in bugs_by_signature.items():
        _bugids = set(bugids)
        toremove = set()
        for bugid in bugids:
            chain = dups[str(bugid)]
            if chain:
                elems = []
                for e in chain:
                    e = int(e)
                    if e in _bugids:
                        elems.append(e)
                if elems:
                    elems[
                        -1] = bugid  # we remove the final and put the initial
                    toremove = toremove.union(elems)
        diff = _bugids - toremove
        bugs_by_signature[s] = list(diff)
        bugs_count += len(diff)
        bugs = bugs.union(diff)

    __warn('Remove duplicates: Ok', verbose)
    __warn('Bugs to analyze: %d' % bugs_count, verbose)

    # we filter the bugs to remove meaningless ones
    if not bug_ids:
        bugs = filter_bugs(bugs, product)

    # we get the "better" bug where to update the info
    bugs_history_info = __get_bugs_info(bugs)

    crashes_to_reopen = []
    bugs.clear()
    tomorrow = utils.get_date_ymd('tomorrow')
    for s, v in bugs_by_signature.items():
        info = signatures[s]
        if v:
            min_date = tomorrow
            for i in info['affected_channels']:
                if i[0] != 'esr':
                    d = start_date_by_channel[i[0]]
                    if d < min_date:
                        min_date = d

            bug_to_touch = get_last_bug(v, bugs_history_info, min_date)
            if not bug_to_touch:
                crashes_to_reopen.append(s)
        else:
            bug_to_touch = None

        info['selected_bug'] = bug_to_touch
        info['bugs'] = v
        if bug_to_touch:
            bugs.add(bug_to_touch)

    __warn('Collected last bugs: %d' % len(bugs), verbose)

    # get bug info
    include_fields = ['status', 'id', 'cf_crash_signature']
    status_flags = {}
    for c, v in base_versions.iteritems():
        v = str(v)
        if c != 'esr':
            f1 = 'cf_status_firefox' + v
        else:
            f1 = 'cf_status_firefox_esr' + v
        include_fields.append(f1)
        status_flags[c] = f1

    bug_info = {}

    def bug_handler(bug, data):
        data[str(bug['id'])] = bug

    Bugzilla(list(bugs),
             include_fields=include_fields,
             bughandler=bug_handler,
             bugdata=bug_info).get_data().wait()

    __warn('Collected bug info: Ok', verbose)

    for info in signatures.values():
        bug = info['selected_bug']
        if bug:
            if bug in bug_info:
                info['selected_bug'] = bug_info[bug]
            else:
                info['selected_bug'] = 'private'

    analysis = __analyze(signatures, status_flags)

    if max_bugs > 0:
        __analysis = {}
        count = 0
        for signature, info in analysis.items():
            if info['firefox']:
                __analysis[signature] = info
                count += 1
                if count == max_bugs:
                    analysis = __analysis
                    break

    __warn('Analysis: Ok', verbose)

    # Now get the number of crashes for each signature
    queries = []
    trends = {}
    signatures_by_chan = {}
    default_trend_by_chan = {}
    today = utils.get_date_ymd('today')
    ref_w = today.isocalendar()[1]

    def get_past_week(date):
        isodate = date.isocalendar()
        w = isodate[1]
        if w > ref_w:
            return ref_w - w + 53
        else:
            return ref_w - w

    for chan in channel:
        past_w = get_past_week(start_date_by_channel[chan])
        default_trend_by_chan[chan] = {i: 0 for i in range(past_w + 1)}

    for signature, info in analysis.items():
        if info['firefox']:
            data = {}
            trends[signature] = data
            # for chan, volume in info['affected']:
            for chan in channel:
                if chan in signatures_by_chan:
                    signatures_by_chan[chan].append(signature)
                else:
                    signatures_by_chan[chan] = [signature]
                data[chan] = default_trend_by_chan[chan].copy()

    def handler_ss(chan, json, data):
        for facets in json['facets']['histogram_date']:
            d = utils.get_date_ymd(facets['term'])
            w = get_past_week(d)
            s = facets['facets']['signature']
            for signature in s:
                count = signature['count']
                sgn = signature['term']
                data[sgn][chan][w] += count

    for chan, signatures in signatures_by_chan.items():
        if search_start_date:
            search_date = socorro.SuperSearch.get_search_date(
                search_start_date, end_date)
        else:
            search_date = socorro.SuperSearch.get_search_date(
                utils.get_date_str(start_date_by_channel[chan]), end_date)

        for sgns in Connection.chunks(signatures, 10):
            queries.append(
                Query(socorro.SuperSearch.URL, {
                    'signature': ['=' + s for s in sgns],
                    'product': product,
                    'version': all_versions,
                    'release_channel': chan,
                    'date': search_date,
                    '_histogram.date': 'signature',
                    '_histogram_interval': 1,
                    '_results_number': 0
                },
                      handler=functools.partial(handler_ss, chan),
                      handlerdata=trends))
    socorro.SuperSearch(queries=queries).wait()

    __warn('Collected trends: Ok\n', verbose)

    # replace dictionary containing trends by a list
    for signature, i in trends.items():
        for chan, trend in i.items():
            i[chan] = [
                trend[week] for week in sorted(trend.keys(), reverse=False)
            ]
        analysis[signature]['trend'] = i

    return {
        'status_flags': status_flags,
        'base_versions': base_versions,
        'start_dates': start_date_by_channel,
        'signatures': analysis
    }
Exemplo n.º 15
0
                        type=int,
                        help='the duration')
    parser.add_argument('-p',
                        '--product',
                        action='store',
                        default='Firefox',
                        help='the product')
    parser.add_argument('-v',
                        '--versions',
                        action='store',
                        nargs='+',
                        help='the Firefox versions')
    parser.add_argument(
        '--cycle',
        action='store_true',
        help='duration is computed to take into account all the cycle')

    args = parser.parse_args()

    if args.startdate:
        duration = (utils.get_date_ymd(args.enddate) -
                    utils.get_date_ymd(args.startdate)).days + 1
    else:
        duration = -1 if args.cycle else args.duration
    stats = get(args.channel,
                args.enddate,
                product=args.product,
                versions=args.versions,
                duration=duration)
    pprint(stats)
Exemplo n.º 16
0
def get(channel, date, versions=None, product='Firefox', duration=1):
    """Get stability info

    Args:
        channel (str): the channel
        date (str): the final date
        versions (Optional[List[str]]): the versions to treat
        product (Optional[str]): the product
        duration (Optional[int]): the duration to retrieve the data

    Returns:
        dict: contains all the info relative to stability
    """
    channel = channel.lower()
    cycle = duration <= 0
    versions_info = socorro.ProductVersions.get_version_info(versions,
                                                             channel=channel,
                                                             product=product)

    versions = versions_info.keys()
    throttle = set(map(lambda p: p[1], versions_info.values()))
    diff_throttle = len(throttle) != 1
    # normally the throttle is 10% for release and 100% for others channel
    if not diff_throttle:
        throttle = throttle.pop()

    platforms = socorro.Platforms.get_cached_all()

    end_date_dt = utils.get_date_ymd(date)
    if cycle:
        # we get all the start date for each versions and get the min
        start_date_dt = min(
            map(lambda p: utils.get_date_ymd(p[0]), versions_info.values()))
        duration = (end_date_dt - start_date_dt).days + 1
    else:
        start_date_dt = end_date_dt - timedelta(duration - 1)

    start_date_str = utils.get_date_str(start_date_dt)
    end_date_str = utils.get_date_str(end_date_dt)

    # First, we get the ADI
    adi = socorro.ADI.get(version=versions,
                          product=product,
                          end_date=end_date_str,
                          duration=duration,
                          platforms=platforms)
    adi = [adi[key] for key in sorted(adi.keys(), reverse=False)]

    # Get the khours
    khours = Redash.get_khours(start_date_dt, end_date_dt, channel, versions,
                               product)
    khours = [khours[key] for key in sorted(khours.keys(), reverse=False)]

    # Get the # of crashes (crash pings)
    crash_pings = Redash.get_number_of_crash(start_date_dt, end_date_dt,
                                             channel, versions, product)

    crashes = {}
    stats = {'m+c': 0., 'main': 0., 'content': 0., 'plugin': 0., 'all': 0.}
    for i in range(duration):
        d = end_date_dt - timedelta(i)
        crashes[d] = {}
        crashes[d]['socorro'] = {
            'global': stats.copy(),
            'startup': stats.copy()
        }
        crashes[d]['telemetry'] = crash_pings[d]

    base = {
        'product': product,
        'version': None,
        'date': socorro.SuperSearch.get_search_date(start_date_str,
                                                    end_date_str),
        'release_channel': channel,
        '_results_number': 1,
        '_histogram.date': ['product', 'process_type'],
        '_facets_size': 3
    }

    if diff_throttle:
        # in this case each version could have a different throttle so we need to compute stats for each version
        queries = []
        for v, t in versions_info.items():
            cparams = base.copy()
            cparams['version'] = v
            queries.append(
                Query(socorro.SuperSearch.URL, cparams,
                      functools.partial(__crash_handler, t[1]), crashes))
            cparams = copy.deepcopy(cparams)
            cparams['uptime'] = '<60'
            cparams['_histogram.date'].append('uptime')
            queries.append(
                Query(socorro.SuperSearch.URL, cparams,
                      functools.partial(__crash_handler, t[1]), crashes))
    else:
        base['version'] = versions
        queries = []
        queries.append(
            Query(socorro.SuperSearch.URL, base,
                  functools.partial(__crash_handler, throttle), crashes))
        cparams = copy.deepcopy(base)
        cparams['uptime'] = '<60'
        cparams['_histogram.date'].append('uptime')
        queries.append(
            Query(socorro.SuperSearch.URL, cparams,
                  functools.partial(__crash_handler, throttle), crashes))

    socorro.SuperSearch(queries=queries).wait()
    crashes = [crashes[key] for key in sorted(crashes.keys(), reverse=False)]

    # Now we compute the rates and the averages
    stats = {
        'm+c': [0., 0., 0., 0.],
        'main': [0., 0., 0., 0.],
        'content': [0., 0., 0., 0.],
        'plugin': [0., 0., 0., 0.],
        'all': [0., 0., 0., 0.]
    }
    averages = {}
    averages['socorro'] = {'global': stats, 'startup': copy.deepcopy(stats)}
    averages['telemetry'] = copy.deepcopy(stats)
    N = len(adi)

    # sum
    for i in range(N):
        crash_soc = crashes[i]['socorro']
        for k1, v1 in averages['socorro'].items():
            for k2, av in v1.items():
                c = crash_soc[k1][k2]
                # the rate is computed for 100 adi
                x = utils.rate(100. * c, adi[i])
                av[0] += x
                av[1] += x**2
                y = utils.rate(c, khours[i])
                av[2] += y
                av[3] += y**2
                crash_soc[k1][k2] = (c, x, y)
        crash_tel = crashes[i]['telemetry']
        for k1, av in averages['telemetry'].items():
            c = crash_tel[k1]
            # the rate is computed for 100 adi
            x = utils.rate(100. * c, adi[i])
            av[0] += x
            av[1] += x**2
            y = utils.rate(c, khours[i])
            av[2] += y
            av[3] += y**2
            crash_tel[k1] = (c, x, y)

    N = float(N)
    averages_old = {'socorro': {}, 'telemetry': {}}
    averages_new = copy.deepcopy(averages_old)

    # mean & standard deviation
    av_new_soc = averages_new['socorro']
    av_old_soc = averages_old['socorro']
    for k1, v1 in averages['socorro'].items():
        d1 = {}
        av_old_soc[k1] = d1
        d2 = {}
        av_new_soc[k1] = d2
        for k2, av in v1.items():
            m = av[0] / N
            d1[k2] = (m, math.sqrt(av[1] / N - m**2))
            m = av[2] / N
            d2[k2] = (m, math.sqrt(av[3] / N - m**2))

    av_new_tel = averages_new['telemetry']
    av_old_tel = averages_old['telemetry']
    for k1, av in averages['telemetry'].items():
        m = av[0] / N
        av_old_tel[k1] = (m, math.sqrt(av[1] / N - m**2))
        m = av[2] / N
        av_new_tel[k1] = (m, math.sqrt(av[3] / N - m**2))

    return {
        'start_date': start_date_str,
        'end_date': end_date_str,
        'versions': versions,
        'adi': adi,
        'khours': khours,
        'crashes': crashes,
        'averages_old': averages_old,
        'averages_new': averages_new
    }
Exemplo n.º 17
0
def get(channel,
        date,
        product='Firefox',
        duration=11,
        tc_limit=50,
        crash_type='all',
        startup=False):
    """Get crashes info

    Args:
        channel (str): the channel
        date (str): the final date
        product (Optional[str]): the product
        duration (Optional[int]): the duration to retrieve the data
        tc_limit (Optional[int]): the number of topcrashes to load
        crash_type (Optional[str]): 'all' (default) or 'browser' or 'content' or 'plugin'

    Returns:
        dict: contains all the info relative to the crashes
    """
    channel = channel.lower()
    version = v[channel]
    versions_info = socorro.ProductVersions.get_version_info(version,
                                                             channel=channel,
                                                             product=product)
    versions = versions_info.keys()
    platforms = socorro.Platforms.get_cached_all()

    if crash_type and isinstance(crash_type, six.string_types):
        crash_type = [crash_type]

    throttle = set(map(lambda p: p[1], versions_info.values()))
    if len(throttle) == 1:
        throttle = throttle.pop()
    else:
        return

    _date = utils.get_date_ymd(date)
    start_date = utils.get_date_str(_date - timedelta(duration - 1))
    end_date = utils.get_date_str(_date)

    # First, we get the ADI
    adi = socorro.ADI.get(version=versions,
                          product=product,
                          end_date=end_date,
                          duration=duration,
                          platforms=platforms)
    adi = [adi[key] for key in sorted(adi.keys(), reverse=True)]

    # get the khours
    khours = Redash.get_khours(utils.get_date_ymd(start_date),
                               utils.get_date_ymd(end_date), channel, versions,
                               product)
    khours = [khours[key] for key in sorted(khours.keys(), reverse=True)]

    overall_crashes_by_day = []
    signatures = {}

    def signature_handler(json):
        for signature in json['facets']['signature']:
            signatures[signature['term']] = [signature['count'], 0, 0, 0, 0]

            for platform in signature['facets']['platform']:
                if platform['term'] == 'Linux':
                    signatures[signature['term']][3] = platform['count']
                elif platform['term'] == 'Windows NT':
                    signatures[signature['term']][1] = platform['count']
                elif platform['term'] == 'Mac OS X':
                    signatures[signature['term']][2] = platform['count']

            for uptime in signature['facets']['uptime']:
                if int(uptime['term']) < 60:
                    signatures[signature['term']][4] += uptime['count']

        for facets in json['facets']['histogram_date']:
            overall_crashes_by_day.insert(0, facets['count'])

    params = {
        'product': product,
        'version': versions,
        'date': socorro.SuperSearch.get_search_date(start_date, end_date),
        'release_channel': channel,
        '_aggs.signature': ['platform', 'uptime'],
        '_results_number': 0,
        '_facets_size': tc_limit,
        '_histogram.date': ['product'],
        '_histogram_interval': 1
    }

    if startup:
        params['uptime'] = '<=60'

    socorro.SuperSearch(params=params, handler=signature_handler).wait()

    bug_flags = [
        'resolution', 'id', 'last_change_time',
        'cf_tracking_firefox' + str(version)
    ]
    for i in range(int(version), int(v['nightly']) + 1):
        bug_flags.append('cf_status_firefox' + str(i))

    # TODO: too many requests... should be improved with chunks
    bugs = {}
    # TODO: Use regexp, when the Bugzilla bug that prevents them from working will be fixed.
    base = {
        'j_top': 'OR',
        'o1': 'substring',
        'f1': 'cf_crash_signature',
        'v1': None,
        'o2': 'substring',
        'f2': 'cf_crash_signature',
        'v2': None,
        'o3': 'substring',
        'f3': 'cf_crash_signature',
        'v3': None,
        'o4': 'substring',
        'f4': 'cf_crash_signature',
        'v4': None,
        'include_fields': bug_flags
    }

    queries = []
    for sgn in signatures.keys():
        cparams = base.copy()
        cparams['v1'] = '[@' + sgn + ']'
        cparams['v2'] = '[@ ' + sgn + ' ]'
        cparams['v3'] = '[@ ' + sgn + ']'
        cparams['v4'] = '[@' + sgn + ' ]'
        bugs[sgn] = []
        queries.append(
            Query(Bugzilla.API_URL, cparams, __bug_handler, bugs[sgn]))
    res_bugs = Bugzilla(queries=queries)

    # we have stats by signature in self.signatures
    # for each signature get the number of crashes on the last X days
    # so get the signature trend
    trends = {}
    default_trend = {}
    for i in range(duration):
        default_trend[_date - timedelta(i)] = 0

    base = {
        'product': product,
        'version': versions,
        'signature': None,
        'date': socorro.SuperSearch.get_search_date(start_date, end_date),
        'release_channel': channel,
        '_results_number': 0,
        '_histogram.date': ['signature'],
        '_histogram_interval': 1
    }

    queries = []
    for sgns in Connection.chunks(
            list(map(lambda sgn: '=' + sgn, signatures.keys())), 10):
        sgn_group = []
        for sgn in sgns:
            if sum(len(s) for s in sgn_group) >= 1000:
                cparams = base.copy()
                cparams['signature'] = sgn_group
                queries.append(
                    Query(socorro.SuperSearch.URL, cparams,
                          functools.partial(__trend_handler, default_trend),
                          trends))
                sgn_group = []

            sgn_group.append(sgn)

        if len(sgn_group) > 0:
            cparams = base.copy()
            cparams['signature'] = sgn_group
            queries.append(
                Query(socorro.SuperSearch.URL, cparams,
                      functools.partial(__trend_handler, default_trend),
                      trends))

    socorro.SuperSearch(queries=queries).wait()

    for sgn, trend in trends.items():
        signatures[sgn] = (signatures[sgn], [
            trend[key] for key in sorted(trend.keys(), reverse=True)
        ])

    _signatures = {}
    # order self.signatures by crash count
    l = sorted(signatures.items(), key=lambda x: x[1][0][0], reverse=True)
    i = 1
    for s in l:
        _signatures[s[0]] = i  # top crash rank
        i += 1

    res_bugs.wait()

    # TODO: In the first query to get the bugs, also get dupe_of and avoid the first query
    #       in follow_dup (so modify follow_dup to accept both a bug ID or a bug object).
    queries = []
    for sgn in signatures.keys():
        duplicate_ids = [
            bug['id'] for bug in bugs[sgn] if bug['resolution'] == 'DUPLICATE'
        ]

        # Remove bugs resolved as DUPLICATE from the list of bugs associated to the signature.
        bugs[sgn] = [
            bug for bug in bugs[sgn] if bug['id'] not in duplicate_ids
        ]

        # Find duplicates for bugs resolved as DUPLICATE.
        duplicates = {
            k: v
            for k, v in Bugzilla.follow_dup(duplicate_ids).items()
            if v is not None
        }
        duplicate_targets = [
            bug_id for bug_id in duplicates.values()
            if int(bug_id) not in [bug['id'] for bug in bugs[sgn]]
        ]
        if len(duplicate_targets) == 0:
            continue

        # Get info about bugs that the DUPLICATE bugs have been duped to.
        params = {
            'id': ','.join(duplicate_targets),
            'include_fields': bug_flags,
        }
        queries.append(
            Query(Bugzilla.API_URL, params, __bug_handler, bugs[sgn]))
    Bugzilla(queries=queries).wait()

    for sgn, stats in signatures.items():
        # stats is 2-uple: ([count, win_count, mac_count, linux_count, startup_count], trend)
        startup_percent = float(stats[0][4]) / float(stats[0][0])
        _signatures[sgn] = {
            'tc_rank': _signatures[sgn],
            'crash_count': stats[0][0],
            'startup_percent': startup_percent,
            'crash_by_day': stats[1],
            'bugs': bugs[sgn]
        }

    return {
        'start_date': start_date,
        'end_date': end_date,
        'versions': list(versions),
        'adi': adi,
        'khours': khours,
        'crash_by_day': overall_crashes_by_day,
        'signatures': _signatures,
        'throttle': float(throttle)
    }