Esempio n. 1
0
def functional():
  return count_by(itemgetter('hour'),
                  map(json.loads,
                      filter(None,
                             mapcat(lambda output: output.strip().split('\n'),
                                    map(lambda date: logs[date.strftime('%Y/%m/%d')],
                                        map(lambda days_ago: today - timedelta(days=days_ago),
                                            range(1, days_of_logs + 1)))))))
Esempio n. 2
0
def piped():
  return (_| range(1, days_of_logs + 1)
           | map(lambda days_ago: today - timedelta(days=days_ago))
           | map(lambda date: logs[date.strftime('%Y/%m/%d')])
           | mapcat(lambda output: output.strip().split('\n'))
           | filter(None)
           | map(json.loads)
           | count_by(itemgetter('hour'))
           |_)
Esempio n. 3
0
def piped():
    return (_ | range(1, days_of_logs + 1)
            | map(lambda days_ago: today - timedelta(days=days_ago))
            | map(lambda date: logs[date.strftime('%Y/%m/%d')])
            | mapcat(lambda output: output.strip().split('\n'))
            | filter(None)
            | map(json.loads)
            | count_by(itemgetter('hour'))
            | _)
Esempio n. 4
0
def functional():
    return count_by(
        itemgetter('hour'),
        map(
            json.loads,
            filter(
                None,
                mapcat(
                    lambda output: output.strip().split('\n'),
                    map(
                        lambda date: logs[date.strftime('%Y/%m/%d')],
                        map(lambda days_ago: today - timedelta(days=days_ago),
                            range(1, days_of_logs + 1)))))))
Esempio n. 5
0
    def get_data(self):
        all_transactions = filter(
            lambda t: t["type"] in ("request-changes", "accept"),
            cat(pluck("transactions", self.raw_data)),
        )
        accept_transactions, reject_transactions = split(
            lambda t: t["type"] == "accept", all_transactions)
        most_accepting_author, most_accepting_count = Counter(
            count_by(itemgetter("authorPHID"),
                     accept_transactions)).most_common(1)[0]
        most_rejecting_author, most_rejecting_count = Counter(
            count_by(itemgetter("authorPHID"),
                     reject_transactions)).most_common(1)[0]

        return (
            {
                "author": self.users_mapping[most_accepting_author],
                "count": most_accepting_count,
            },
            {
                "author": self.users_mapping[most_rejecting_author],
                "count": most_rejecting_count,
            },
        )
Esempio n. 6
0
File: ME60.py Progetto: sjava/weihu
def get_vlan_users(ip, inf):
    def _get_users(child, i):
        rslt = do_some(child, 'disp access-user interface {i} | in /'.format(i=i))
        users = re_all(r'(\d+)/', rslt)
        return users

    try:
        child = telnet(ip)
        infs = do_some(child, 'disp cu interface | in Eth-Trunk{inf}\.'.format(inf=inf))
        infs = re_all(r'interface (\S+)', infs)
        rslt = lmapcat(partial(_get_users, child), infs)
        close(child)
        rslt = count_by(int, rslt)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', rslt, ip)
Esempio n. 7
0
File: M6k.py Progetto: sjava/weihu
def get_vlan_users(ip, inf):
    def _get_users(child, i):
        rslt = do_some(child, 'show subscriber interface {i} | in external-vlan'.format(i=i))
        vlans = re_all(r'external-vlan\s+:(\d+)', rslt)
        return vlans

    try:
        child = telnet(ip)
        rslt = do_some(child, 'show running-config | in smartgroup{inf}\.'.format(inf=inf))
        infs = distinct(re_all(r'(smartgroup\S+)', rslt))
        vlans = lmapcat(partial(_get_users, child), infs)
        close(child)
        vlans = count_by(int, vlans)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', vlans, ip)
Esempio n. 8
0
def get_vlan_users(ip, inf):
    def _get_users(child, i):
        rslt = do_some(child,
                       'disp access-user interface {i} | in /'.format(i=i))
        users = re_all(r'(\d+)/', rslt)
        return users

    try:
        child = telnet(ip)
        infs = do_some(
            child, 'disp cu interface | in Eth-Trunk{inf}\.'.format(inf=inf))
        infs = re_all(r'interface (\S+)', infs)
        rslt = lmapcat(partial(_get_users, child), infs)
        close(child)
        rslt = count_by(int, rslt)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', rslt, ip)
def distribute_by_created_on(qs):
    return accumulate(
        count_by(ceil_date, qs.values_list('created_on', flat=True)))
def distribute_by_user_id(qs):
    data = group_values(qs.values_list('created_by_id', 'created_on'))
    return walk_values(lambda dates: accumulate(count_by(ceil_date, dates)),
                       data)
    def handle(self, *args, **options):
        series = {}
        samples = {}

        platform_created_on = join_with(
            min, [{p: ceil_attrs_date(s)
                   for p in s.platforms} for s in Series.objects.all()])
        platform_qs = Platform.objects.annotate(probes_count=Count('probes'))\
            .values('gpl_name', 'probes_count')
        platforms = {}
        platforms_probes = {}

        series_annotations = {}
        sample_annotations = {}
        concordant_series_annotations = {}
        concordant_sample_annotations = {}

        series_tags = {}
        concordant_series_tags = {}
        sample_tags = {}
        concordant_sample_tags = {}

        series_validations = {}
        sample_validations = {}
        concordant_series_validations = {}
        concordant_sample_validations = {}

        for specie in SPECIES.values():
            series[specie] = accumulate(
                count_by(ceil_attrs_date,
                         Series.objects.filter(specie=specie)))

            qs = Sample.objects.filter(platform__specie=specie)
            iterator = tqdm(queryset_iterator(qs, 30000),
                            total=qs.count(),
                            desc='{0} samples'.format(specie))
            samples[specie] = accumulate(count_by(ceil_attrs_date, iterator))

            platforms_data = [[
                platform_created_on[item['gpl_name']], item['probes_count']
            ] for item in platform_qs.filter(specie=specie)]
            platforms[specie] = accumulate(count_by(first, platforms_data))
            group = group_values(platforms_data)
            platforms_probes[specie] = accumulate(walk_values(sum, group))

            qs = SeriesAnnotation.objects.filter(series__specie=specie)
            series_annotations[specie], \
                sample_annotations[specie] = distribute_series_and_sample_annotations(qs)

            concordant_series_annotations[specie], \
                concordant_sample_annotations[specie] = distribute_series_and_sample_annotations(
                    qs.filter(best_cohens_kappa=1))

            qs = SeriesTag.objects.filter(platform__specie=specie,
                                          is_active=True)
            series_tags[specie] = distribute_by_created_on(qs)
            concordant_series_tags[specie] = distribute_by_created_on(
                qs.exclude(agreed=None))

            qs = SampleTag.objects.filter(sample__platform__specie=specie,
                                          is_active=True)
            sample_tags[specie] = distribute_by_created_on(qs)
            concordant_sample_tags[specie] = distribute_by_created_on(
                qs.exclude(series_tag__agreed=None))

            qs = SerieValidation.objects.filter(platform__specie=specie,
                                                ignored=False,
                                                by_incompetent=False)
            series_validations[specie] = distribute_by_created_on(qs)
            concordant_series_validations[specie] = distribute_by_created_on(
                qs.filter(best_kappa=1))

            qs = SampleValidation\
                .objects\
                .filter(sample__platform__specie=specie,
                        serie_validation__ignored=False,
                        serie_validation__by_incompetent=False)
            sample_validations[specie] = distribute_by_created_on(qs)
            concordant_sample_validations[specie] = distribute_by_created_on(
                qs.filter(
                    Q(serie_validation__best_kappa=1) | Q(concordant=True)))

        users = accumulate(
            count_by(ceil_date,
                     User.objects.values_list('date_joined', flat=True)))
        tags = accumulate(
            count_by(ceil_date, Tag.objects.values_list('created_on',
                                                        flat=True)))

        delta = CURRENT_DATE - START_DATE
        keys = sorted(
            set(
                ceil_date(START_DATE + timedelta(days=index * 20))
                for index in range(delta.days / 20 + 1)))

        specie_data = {
            'series':
            series,
            'samples':
            samples,
            'platforms':
            platforms,
            'platforms_probes':
            platforms_probes,
            'series_annotations':
            series_annotations,
            'sample_annotations':
            sample_annotations,
            'concordant_series_annotations':
            concordant_series_annotations,
            'concordant_sample_annotations':
            concordant_sample_annotations,
            'series_tags':
            series_tags,
            'sample_tags':
            sample_tags,
            'concordant_series_tags':
            concordant_series_tags,
            'concordant_sample_tags':
            concordant_sample_tags,
            'series_validations':
            series_validations,
            'sample_validations':
            sample_validations,
            'concordant_series_validations':
            concordant_series_validations,
            'concordant_sample_validations':
            concordant_sample_validations,
            'series_tags_by_users':
            distribute_by_user_id(SeriesTag.objects.filter(is_active=True)),
            'sample_tags_by_users':
            distribute_by_user_id(SampleTag.objects.filter(is_active=True)),
            'series_validations_by_users':
            distribute_by_user_id(
                SerieValidation.objects.filter(ignored=False,
                                               by_incompetent=False)),
            'sample_validations_by_users':
            distribute_by_user_id(
                SampleValidation.objects.filter(
                    serie_validation__ignored=False,
                    serie_validation__by_incompetent=False)),
            'series_tag_history':
            get_series_tag_history(),
        }

        data = {
            'users': users,
            'tags': tags,
        }

        with transaction.atomic():
            HistoricalCounter.objects.filter(
                created_on__lte=CURRENT_DATE).delete()
            HistoricalCounter.objects.bulk_create([
                HistoricalCounter(created_on=key,
                                  counters=merge(
                                      walk_values(get_value(keys, index),
                                                  data),
                                      walk_values(
                                          lambda value: walk_values(
                                              get_value(keys, index), value),
                                          specie_data)))
                for index, key in enumerate(keys)
            ])
Esempio n. 12
0
    def contributors_sorted_by_repos_contributed_in(self, contributors):
        users_by_repos = funcy.count_by(None, contributors).items()
        sorted_users = sorted(users_by_repos, key=itemgetter(1), reverse=True)

        return funcy.map(0, sorted_users)
Esempio n. 13
0
 def get_data(self):
     counts = Counter(
         count_by(partial(get_in, path=["fields", "authorPHID"]),
                  self.raw_data)).most_common(3)
     return [(self.users_mapping[author], count)
             for author, count in counts]