Exemple #1
0
def get_shipit_facts(issuewrapper,
                     meta,
                     module_indexer,
                     core_team=[],
                     botnames=[]):
    """ Count shipits by maintainers/community/other """

    # maintainers - people who maintain this file/module
    # community - people who maintain file(s) in the same directory
    # other - anyone else who comments with shipit/+1/LGTM

    iw = issuewrapper
    nmeta = {
        'shipit': False,
        'owner_pr': False,
        'shipit_ansible': False,
        'shipit_community': False,
        'shipit_count_other': False,
        'shipit_count_community': False,
        'shipit_count_maintainer': False,
        'shipit_count_ansible': False,
        'shipit_actors': None,
        'community_usernames': [],
        'notify_community_shipit': False,
    }

    if not iw.is_pullrequest():
        return nmeta

    module_utils_files_owned = 0  # module_utils files for which submitter is maintainer
    if meta['is_module_util']:
        for f in iw.files:
            if f.startswith('lib/ansible/module_utils'
                            ) and f in module_indexer.botmeta['files']:
                maintainers = module_indexer.botmeta['files'][f].get(
                    'maintainers', [])
                if maintainers and (iw.submitter in maintainers):
                    module_utils_files_owned += 1
        if module_utils_files_owned == len(iw.files):
            nmeta['owner_pr'] = True
            return nmeta

    if not meta['module_match']:
        return nmeta

    # https://github.com/ansible/ansibullbot/issues/722
    if iw.wip:
        logging.debug('WIP PRs do not get shipits')
        return nmeta

    if meta['is_needs_revision'] or meta['is_needs_rebase']:
        logging.debug(
            'PRs with needs_revision or needs_rebase label do not get shipits')
        return nmeta

    maintainers = meta['module_match']['maintainers']
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    modules_files_owned = 0
    if not meta['is_new_module']:
        for f in iw.files:
            if f.startswith(
                    'lib/ansible/modules'
            ) and iw.submitter in module_indexer.modules[f]['maintainers']:
                modules_files_owned += 1
    nmeta['owner_pr'] = modules_files_owned + module_utils_files_owned == len(
        iw.files)

    # community is the other maintainers in the same namespace
    mnamespace = meta['module_match']['namespace']
    community = \
        module_indexer.get_maintainers_for_namespace(mnamespace)
    community = [
        x for x in community
        if x != 'ansible' and x not in core_team and x != 'DEPRECATED'
    ]

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    other_shipits = 0
    shipit_actors = []
    shipit_actors_other = []

    for event in iw.history.history:

        if event['event'] not in [
                'commented', 'committed', 'review_approved', 'review_comment'
        ]:
            continue
        if event['actor'] in botnames:
            continue

        # commits reset the counters
        if event['event'] == 'committed':
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            other_shipits = 0
            shipit_actors = []
            shipit_actors_other = []
            continue

        actor = event['actor']
        body = event.get('body', '')
        body = body.strip()
        if not is_approval(body):
            continue
        logging.info('%s shipit' % actor)

        # ansible shipits
        if actor in core_team:
            if actor not in shipit_actors:
                ansible_shipits += 1
                shipit_actors.append(actor)
            continue

        # maintainer shipits
        if actor in maintainers:
            if actor not in shipit_actors:
                maintainer_shipits += 1
                shipit_actors.append(actor)
            continue

        # community shipits
        if actor in community:
            if actor not in shipit_actors:
                community_shipits += 1
                shipit_actors.append(actor)
            continue

        # other shipits
        if actor not in shipit_actors_other:
            other_shipits += 1
            shipit_actors_other.append(actor)
        continue

    # submitters should count if they are core team/maintainers/community
    if iw.submitter in core_team:
        if iw.submitter not in shipit_actors:
            ansible_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)

    nmeta['shipit_count_other'] = other_shipits
    nmeta['shipit_count_community'] = community_shipits
    nmeta['shipit_count_maintainer'] = maintainer_shipits
    nmeta['shipit_count_ansible'] = ansible_shipits
    nmeta['shipit_actors'] = shipit_actors
    nmeta['shipit_actors_other'] = shipit_actors_other
    nmeta['community_usernames'] = sorted(community)

    total = community_shipits + maintainer_shipits + ansible_shipits

    # include shipits from other people to push over the edge
    if total == 1 and other_shipits > 2:
        total += other_shipits

    if total > 1:
        nmeta['shipit'] = True
    elif meta['is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        if community:
            bpc = iw.history.get_boilerplate_comments()
            if 'community_shipit_notify' not in bpc:
                nmeta['notify_community_shipit'] = True

    logging.info('total shipits: %s' % total)

    return nmeta
Exemple #2
0
def get_shipit_facts(issuewrapper,
                     inmeta,
                     module_indexer,
                     core_team=[],
                     botnames=[]):
    """ Count shipits by maintainers/community/other """

    # supershipit - maintainers with isolated commit access
    # maintainers - people who maintain this file/module
    # community - people who maintain file(s) in the same directory
    # other - anyone else who comments with shipit/+1/LGTM

    meta = inmeta.copy()
    iw = issuewrapper
    nmeta = {
        u'shipit': False,
        u'supershipit': False,
        u'owner_pr': False,
        u'shipit_ansible': False,
        u'shipit_community': False,
        u'shipit_count_other': False,
        u'shipit_count_community': False,
        u'shipit_count_maintainer': False,
        u'shipit_count_ansible': False,
        u'shipit_count_vtotal': False,
        u'shipit_count_historical': False,
        u'shipit_actors': None,
        u'supershipit_actors': None,
        u'community_usernames': [],
        u'notify_community_shipit': False,
        u'is_rebuild_merge': False,
    }

    if not iw.is_pullrequest():
        return nmeta

    # https://github.com/ansible/ansibullbot/issues/1147
    meta[u'component_matches'] = [
        x for x in meta.get(u'component_matches', [])
        if not x[u'repo_filename'].startswith(u'changelogs/fragments/')
    ]

    files = [f for f in iw.files if not f.startswith(u'changelogs/fragments/')]
    module_utils_files_owned = 0  # module_utils files for which submitter is maintainer
    if meta[u'is_module_util']:
        for f in files:
            if f.startswith(u'lib/ansible/module_utils'
                            ) and f in module_indexer.botmeta[u'files']:
                maintainers = module_indexer.botmeta[u'files'][f].get(
                    u'maintainers', [])
                if maintainers and (iw.submitter in maintainers):
                    module_utils_files_owned += 1

    modules_files_owned = 0
    if not meta[u'is_new_module']:
        for f in files:
            if f.startswith(u'lib/ansible/modules') and iw.submitter in meta[
                    u'component_maintainers']:
                modules_files_owned += 1
    nmeta[u'owner_pr'] = modules_files_owned + module_utils_files_owned == len(
        files)

    # https://github.com/ansible/ansibullbot/issues/722
    if iw.wip:
        logging.debug(u'WIP PRs do not get shipits')
        return nmeta

    if meta[u'is_needs_revision'] or meta[u'is_needs_rebase']:
        logging.debug(
            u'PRs with needs_revision or needs_rebase label do not get shipits'
        )
        return nmeta

    supershipiteers_byfile = {}
    supershipiteers_byuser = {}
    for cm in meta.get('component_matches', []):
        _ss = cm.get(u'supershipit', [])
        supershipiteers_byfile[cm[u'repo_filename']] = _ss[:]
        for ss in _ss:
            if ss not in supershipiteers_byuser:
                supershipiteers_byuser[ss] = []
            supershipiteers_byuser[ss].append(cm[u'repo_filename'])

    maintainers = meta.get(u'component_maintainers', [])
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    # community is the other maintainers in the same namespace
    community = meta.get(u'component_namespace_maintainers', [])
    community = [
        x for x in community
        if x != u'ansible' and x not in core_team and x != u'DEPRECATED'
    ]

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    other_shipits = 0
    shipit_actors = []
    shipit_actors_other = []
    supershipiteers_voted = set()
    rebuild_merge = False
    shipits_historical = set()

    for event in iw.history.history:

        if event[u'event'] not in [
                u'commented', u'committed', u'review_approved',
                u'review_comment'
        ]:
            continue
        if event[u'actor'] in botnames:
            continue

        # commits reset the counters
        if event[u'event'] == u'committed':
            logging.info(event)
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            other_shipits = 0
            shipit_actors = []
            shipit_actors_other = []
            supershipiteers_voted = set()
            rebuild_merge = False
            logging.info('commit detected, resetting shipit tallies')
            continue

        actor = event[u'actor']
        body = event.get(u'body', u'')
        body = body.strip()

        if not is_approval(body):
            continue

        # historical shipits (keep track of all of them, even if reset)
        shipits_historical.add(actor)

        if actor in core_team and is_rebuild_merge(body):
            rebuild_merge = True
            logging.info(u'%s shipit [rebuild_merge]' % actor)
        else:
            logging.info(u'%s shipit' % actor)

        # super shipits
        if actor in supershipiteers_byuser:
            supershipiteers_voted.add(actor)

        # ansible shipits
        if actor in core_team:
            if actor not in shipit_actors:
                ansible_shipits += 1
                shipit_actors.append(actor)
            continue

        # maintainer shipits
        if actor in maintainers:
            if actor not in shipit_actors:
                maintainer_shipits += 1
                shipit_actors.append(actor)
            continue

        # community shipits
        if actor in community:
            if actor not in shipit_actors:
                community_shipits += 1
                shipit_actors.append(actor)
            continue

        # other shipits
        if actor not in shipit_actors_other:
            other_shipits += 1
            shipit_actors_other.append(actor)

        continue

    # submitters should count if they are core team/maintainers/community
    if iw.submitter in core_team:
        if iw.submitter not in shipit_actors:
            ansible_shipits += 1
            shipit_actors.append(iw.submitter)
        shipits_historical.add(iw.submitter)
    elif iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
        shipits_historical.add(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)
        shipits_historical.add(iw.submitter)

    nmeta[u'shipit_count_other'] = other_shipits
    nmeta[u'shipit_count_community'] = community_shipits
    nmeta[u'shipit_count_maintainer'] = maintainer_shipits
    nmeta[u'shipit_count_ansible'] = ansible_shipits
    nmeta[u'shipit_actors'] = shipit_actors
    nmeta[u'shipit_actors_other'] = shipit_actors_other
    nmeta[u'community_usernames'] = sorted(community)
    nmeta[u'shipit_count_historical'] = list(shipits_historical)
    nmeta[u'shipit_count_htotal'] = len(list(shipits_historical))

    total = community_shipits + maintainer_shipits + ansible_shipits
    nmeta[u'shipit_count_vtotal'] = total + other_shipits

    if rebuild_merge:
        nmeta['is_rebuild_merge'] = True

    # include shipits from other people to push over the edge
    if total == 1 and other_shipits > 2:
        total += other_shipits

    if total > 1 or rebuild_merge:
        nmeta[u'shipit'] = True
    elif meta[u'is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        # don't notify if there is no maintainer or if submitter is the only namespace maintainer
        if set(community) - {iw.submitter}:
            bpc = iw.history.get_boilerplate_comments()
            bpc = [x[0] for x in bpc]
            if u'community_shipit_notify' not in bpc:
                nmeta[u'notify_community_shipit'] = True

    logging.info(u'total shipits: %s' % total)

    # supershipit ...
    #   if a supershipiteer for each file exists and has blessed the PR
    #   on the current commit, then override all shipit tallies and get this PR merged
    if supershipiteers_voted:
        nmeta[u'supershipit_actors'] = list(supershipiteers_voted)
        cm_files = [x[u'repo_filename'] for x in meta[u'component_matches']]
        ss_files = set()
        for ssv in supershipiteers_voted:
            for fn in supershipiteers_byuser[ssv]:
                ss_files.add(fn)

        if sorted(set(cm_files)) == sorted(set(ss_files)):
            logging.info(u'supershipit enabled on %s' % iw.html_url)
            nmeta[u'supershipit'] = True
            nmeta[u'shipit'] = True
        else:
            for cm_file in sorted(cm_files):
                if cm_file not in ss_files:
                    logging.info('%s is not governed by supershipit' % cm_file)

    return nmeta
Exemple #3
0
def main():

    set_logger()

    METAFILES = extract_metafiles()

    SKIP = load_skip()
    EXPECTED = load_expected()
    MATCH_MAP = load_match_map()

    ERRORS = []
    ERRORS_COMPONENTS = []

    start_at = None
    if len(sys.argv) == 2:
        start_at = int(sys.argv[1])

    FI = FileIndexer(checkoutdir=CACHEDIR)
    with open('/tmp/files.json', 'wb') as f:
        f.write(json.dumps(FI.files, indent=2))
    GQLC = GithubGraphQLClient(C.DEFAULT_GITHUB_TOKEN)
    MI = ModuleIndexer(cachedir=CACHEDIR,
                       gh_client=GQLC,
                       blames=False,
                       commits=False)

    CM = AnsibleComponentMatcher(cachedir=CACHEDIR)

    for k, v in MI.modules.items():
        if k in MATCH_MAP:
            MATCH_MAP.pop(k, None)
        kname = v.get('name')
        if kname not in MATCH_MAP:
            MATCH_MAP[kname] = v.get('repo_filename')
        if kname + ' module' not in MATCH_MAP:
            MATCH_MAP[kname + ' module'] = v.get('repo_filename')
        if kname + 'module: ' + kname not in MATCH_MAP:
            MATCH_MAP['module: ' + kname] = v.get('repo_filename')
        if kname + 'module ' + kname not in MATCH_MAP:
            MATCH_MAP['module ' + kname] = v.get('repo_filename')

        # /modules/remote_management/foreman/katello.py
        pname = k.replace('lib/ansible', '')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # ansible/modules/packaging/os/rpm_key.py
        pname = k.replace('lib/', '/')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # /ansible/modules/packaging/os/rpm_key.py
        pname = k.replace('lib/', '')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # ansible/lib/ansible/modules/monitoring/monit.py
        pname = 'ansible/' + k
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # network/f5/bigip_gtm_wide_ip
        pname = k.replace('lib/ansible/modules/', '')
        pname = pname.replace('.py', '')
        pname = pname.replace('.ps1', '')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # network/f5/bigip_gtm_wide_ip.py
        pname = k.replace('lib/ansible/modules/', '')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

        # modules/packaging/os/pkgng.py
        pname = k.replace('lib/ansible/', '')
        if pname not in MATCH_MAP:
            MATCH_MAP[pname] = v.get('repo_filename')

    save_match_map(MATCH_MAP)

    total = len(METAFILES)
    for IDMF, MF in enumerate(METAFILES):

        if start_at and IDMF < start_at:
            continue

        with open(MF, 'rb') as f:
            meta = json.loads(f.read())

        if not meta.get('is_issue'):
            continue

        component = meta.get('template_data', {}).get('component_raw')

        #if component != 'Module `synchronize`':
        #if component != 'Module: include_role':
        #    continue

        if component:
            print(f'------------------------------------------ {total}|{IDMF}')
            print(meta['html_url'])
            print(meta['title'])
            print(component)

            hurl = meta['html_url']
            if hurl in SKIP:
                continue

            # bad template or bad template parsing
            if len(component) > 100:
                continue

            iw = IssueWrapperMock(meta)
            if 'module' not in iw.body.lower(
            ) and 'module' not in iw.title.lower():
                continue

            expected_fns = []

            # OLD METHOD
            if hurl not in EXPECTED and component not in MATCH_MAP:
                cmf = get_component_match_facts(iw, meta, FI, MI, LABELS)
                expected_fns = cmf.get('module_match')
                if not isinstance(expected_fns, list):
                    expected_fns = [expected_fns]
                expected_fns = [x['repo_filename'] for x in expected_fns if x]
                if 'component_matches' in cmf:
                    expected_fns = [
                        x['filename'] for x in cmf['component_matches']
                    ]
                expected_fns = sorted(set(expected_fns))

            # NEW METHOD
            cmr = CM.match_components(iw.title, iw.body,
                                      iw.template_data.get('component_raw'))
            cmr_fns = [x['repo_filename'] for x in cmr if x]
            cmr_fns = sorted(set(cmr_fns))

            # VALIDATE FROM EXPECTED IF KNOWN
            if hurl in EXPECTED:
                if EXPECTED[hurl] and not isinstance(EXPECTED[hurl], list):
                    expected_fns = [EXPECTED[hurl]]
                elif EXPECTED[hurl]:
                    expected_fns = EXPECTED[hurl]
                else:
                    expected_fns = []

            # USE THE CACHED MAP
            if component in MATCH_MAP:
                expected_fns = MATCH_MAP[component]
                if not isinstance(expected_fns, list):
                    expected_fns = [expected_fns]
            elif component.lower() in MATCH_MAP:
                expected_fns = MATCH_MAP[component.lower()]
                if not isinstance(expected_fns, list):
                    expected_fns = [expected_fns]
            elif component.startswith(':\n') and component.endswith(' module'):
                mapkey = component.lstrip(':\n')
                if mapkey in MATCH_MAP:
                    expected_fns = MATCH_MAP[mapkey]
                    if not isinstance(expected_fns, list):
                        expected_fns = [expected_fns]

            # OLD CODE USED ACTION PLUGINS INSTEAD OF MODULES
            if expected_fns != cmr_fns and hurl not in EXPECTED:
                if len(expected_fns) == 1 and len(
                        cmr_fns) == 1 and 'plugins/action' in expected_fns[0]:
                    e_bn = os.path.basename(expected_fns[0])
                    c_bn = os.path.basename(cmr_fns[0])
                    if e_bn == c_bn:
                        MATCH_MAP[component] = cmr_fns
                        save_match_map(MATCH_MAP)
                        continue

            # DOCS URLS
            if expected_fns != cmr_fns and hurl not in EXPECTED:
                if len(cmr_fns) == 1 and 'lib/ansible/modules' in cmr_fns[0]:
                    c_bn = os.path.basename(cmr_fns[0])
                    if f'docs.ansible.com/ansible/latest/{c_bn}_module.html' in component:
                        MATCH_MAP[component] = cmr_fns
                        save_match_map(MATCH_MAP)
                        continue
                    elif CM.strategy in ['search_by_regex_urls']:
                        MATCH_MAP[component] = cmr_fns
                        save_match_map(MATCH_MAP)
                        continue

            # NXOS ISSUES HAVE NXOS_VERSION HEADER
            if '- nxos' in component:
                if len(cmr_fns) == 1:
                    if os.path.basename(cmr_fns[0]).replace('.py',
                                                            '') in component:
                        MATCH_MAP[component] = cmr_fns
                        save_match_map(MATCH_MAP)
                        continue
                #import epdb; epdb.st()

            # ODDBALL MODULE COMPONENTS
            if len(cmr_fns) == 1 and 'lib/ansible/modules' in cmr_fns[0]:
                bn = os.path.basename(cmr_fns[0])
                bn = bn.replace('.py', '')
                bn = bn.replace('.ps1', '')
                if (bn in component or bn.lstrip('_')
                        in component) and 'module' in component.lower():
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif component == '- ' + bn:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif component == bn + '.py' or component == bn + '.ps1':
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif component == '_' + bn + '.py' or component == '_' + bn + '.ps1':
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif component == ':\n' + bn or component == ':\n' + bn.lstrip(
                        '_'):
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            # 'multiple modules', etc ...
            if component in CM.KEYWORDS or component.lower() in CM.KEYWORDS:
                if component in CM.KEYWORDS and CM.KEYWORDS[
                        component] is None and not cmr_fns:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif component.lower() in CM.KEYWORDS and CM.KEYWORDS[
                        component.lower()] is None and not cmr_fns:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif len(cmr_fns) == 1 and cmr_fns[0] == CM.KEYWORDS.get(
                        component):
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue
                elif len(cmr_fns) == 1 and cmr_fns[0] == CM.KEYWORDS.get(
                        component.lower()):
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            if component.lstrip('-').strip() in CM.KEYWORDS and len(
                    cmr_fns) == 1:
                cname = component.lstrip('-').strip()
                if CM.KEYWORDS[cname] == cmr_fns[0]:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            if component.endswith(' lookup') and len(
                    cmr_fns
            ) == 1 and 'lib/ansible/plugins/lookup' in cmr_fns[0]:
                MATCH_MAP[component] = cmr_fns
                save_match_map(MATCH_MAP)
                continue

            if component.endswith(' inventory script') and len(
                    cmr_fns) == 1 and 'contrib/inventory' in cmr_fns[0]:
                MATCH_MAP[component] = cmr_fns
                save_match_map(MATCH_MAP)
                continue

            if component.startswith('ansible/lib') and len(cmr_fns) == 1:
                fn = cmr_fns[0]
                if 'ansible/' + fn == component:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            if component.endswith(' inventory plugin') and len(cmr_fns) == 1:
                fn = cmr_fns[0]
                if fn.startswith('lib/ansible/plugins/inventory'):
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            if component == 'ec2.py' and cmr_fns and 'contrib/inventory/ec2.py' in cmr_fns:
                MATCH_MAP[component] = cmr_fns
                save_match_map(MATCH_MAP)
                continue

            if len(expected_fns) == 1 and len(cmr_fns) == 1:
                if os.path.basename(expected_fns[0]) == os.path.basename(
                        cmr_fns[0]):
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)
                    continue

            # COMPARE AND RECORD
            if expected_fns != cmr_fns and hurl not in EXPECTED:

                if component in MATCH_MAP or component.lower() in MATCH_MAP:
                    if component.lower() in MATCH_MAP:
                        mmc = MATCH_MAP[component.lower()]
                    else:
                        mmc = MATCH_MAP[component]
                    if not isinstance(mmc, list):
                        mmc == [mmc]
                    if mmc == cmr_fns:
                        EXPECTED[iw.html_url] = cmr_fns
                        save_expected(EXPECTED)
                        continue

                print('## COMPONENT ...')
                print(component)
                print('## EXPECTED ...')
                pprint(expected_fns)
                print('## RESULT ...')
                pprint(cmr_fns)
                print('## STRATEGIES ..')
                pprint(CM.strategy)
                pprint(CM.strategies)

                print('--------------------------------')
                res = raw_input('Is the result correct? (y/n/s/d): ')
                if res.lower() in ['y', 'yes']:
                    MATCH_MAP[component] = cmr_fns
                    EXPECTED[iw.html_url] = cmr_fns
                    save_expected(EXPECTED)
                    continue
                elif res.lower() in ['s', 'skip']:
                    SKIP.append(hurl)
                    save_skip(SKIP)
                    continue
                elif res.lower() in ['d', 'debug']:
                    import epdb
                    epdb.st()

                ERRORS.append(iw.html_url)
                ERRORS_COMPONENTS.append({
                    'url':
                    iw.html_url,
                    'component':
                    component,
                    'component_raw':
                    iw.template_data.get('component_raw'),
                    'result':
                    cmr_fns,
                    'expected':
                    expected_fns,
                    'strategy':
                    CM.strategy,
                    'strategies':
                    CM.strategies
                })

            else:

                if component not in MATCH_MAP:
                    MATCH_MAP[component] = cmr_fns
                    save_match_map(MATCH_MAP)

                if hurl not in EXPECTED:
                    EXPECTED[hurl] = cmr_fns
                    save_expected(EXPECTED)

            continue

    pprint(ERRORS)
    fn = os.path.join(FIXTUREDIR, 'component_errors.json')
    with open(fn, 'wb') as f:
        f.write(json.dumps(ERRORS_COMPONENTS, indent=2, sort_keys=True))

    clean_metafiles(METAFILES)
Exemple #4
0
def get_shipit_facts(issuewrapper,
                     meta,
                     module_indexer,
                     core_team=[],
                     botnames=[]):
    # shipit/+1/LGTM in comment.body from maintainer

    # AUTOMERGE
    # * New module, existing namespace: require a "shipit" from some
    #   other maintainer in the namespace. (Ideally, identify a maintainer
    #   for the entire namespace.)
    # * New module, new namespace: require discussion with the creator
    #   of the namespace, which will likely be a vendor.
    # * And all new modules, of course, go in as "preview" mode.

    iw = issuewrapper
    nmeta = {
        'shipit': False,
        'owner_pr': False,
        'shipit_ansible': False,
        'shipit_community': False,
        'shipit_count_community': False,
        'shipit_count_maintainer': False,
        'shipit_count_ansible': False,
        'shipit_actors': None,
        'community_usernames': [],
        'notify_community_shipit': False,
    }

    if not iw.is_pullrequest():
        return nmeta
    if not meta['module_match']:
        return nmeta

    maintainers = meta['module_match']['maintainers']
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    if not meta['is_new_module'] and iw.submitter in maintainers:
        nmeta['owner_pr'] = True

    # community is the other maintainers in the same namespace
    mnamespace = meta['module_match']['namespace']
    community = \
        module_indexer.get_maintainers_for_namespace(mnamespace)
    community = [
        x for x in community
        if x != 'ansible' and x not in core_team and x != 'DEPRECATED'
    ]

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    shipit_actors = []

    for event in iw.history.history:

        if event['event'] not in ['commented', 'committed']:
            continue
        if event['actor'] in botnames:
            continue

        # commits reset the counters
        if event['event'] == 'committed':
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            shipit_actors = []
            continue

        actor = event['actor']
        body = event['body']

        # ansible shipits
        if actor in core_team:
            if 'shipit' in body or '+1' in body or 'LGTM' in body:
                logging.info('%s shipit' % actor)
                if actor not in shipit_actors:
                    ansible_shipits += 1
                    shipit_actors.append(actor)
                continue

        # maintainer shipits
        if actor in maintainers:
            if 'shipit' in body or '+1' in body or 'LGTM' in body:
                logging.info('%s shipit' % actor)
                if actor not in shipit_actors:
                    maintainer_shipits += 1
                    shipit_actors.append(actor)
                continue

        # community shipits
        if actor in community:
            if 'shipit' in body or '+1' in body or 'LGTM' in body:
                logging.info('%s shipit' % actor)
                if actor not in shipit_actors:
                    community_shipits += 1
                    shipit_actors.append(actor)
                continue

    # submitters should count if they are maintainers/community
    if iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)

    nmeta['shipit_count_community'] = community_shipits
    nmeta['shipit_count_maintainer'] = maintainer_shipits
    nmeta['shipit_count_ansible'] = ansible_shipits
    nmeta['shipit_actors'] = shipit_actors
    nmeta['community_usernames'] = sorted(community)

    if (community_shipits + maintainer_shipits + ansible_shipits) > 1:
        nmeta['shipit'] = True
    elif meta['is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        if community:
            bpc = iw.history.get_boilerplate_comments()
            if 'community_shipit_notify' not in bpc:
                nmeta['notify_community_shipit'] = True

    logging.info('total shipits: %s' %
                 (community_shipits + maintainer_shipits + ansible_shipits))

    return nmeta
Exemple #5
0
def get_shipit_facts(issuewrapper, inmeta, module_indexer, core_team=[], botnames=[]):
    """ Count shipits by maintainers/community/other """

    # supershipit - maintainers with isolated commit access
    # maintainers - people who maintain this file/module
    # community - people who maintain file(s) in the same directory
    # other - anyone else who comments with shipit/+1/LGTM

    meta = inmeta.copy()
    iw = issuewrapper
    nmeta = {
        u'shipit': False,
        u'supershipit': False,
        u'owner_pr': False,
        u'shipit_ansible': False,
        u'shipit_community': False,
        u'shipit_count_other': False,
        u'shipit_count_community': False,
        u'shipit_count_maintainer': False,
        u'shipit_count_ansible': False,
        u'shipit_count_vtotal': False,
        u'shipit_actors': None,
        u'supershipit_actors': None,
        u'community_usernames': [],
        u'notify_community_shipit': False,
        u'is_rebuild_merge': False,
    }

    if not iw.is_pullrequest():
        return nmeta

    # https://github.com/ansible/ansibullbot/issues/1147
    meta[u'component_matches'] = [
        x for x in meta.get(u'component_matches', [])
        if not x[u'repo_filename'].startswith(u'changelogs/fragments/')
    ]

    module_utils_files_owned = 0  # module_utils files for which submitter is maintainer
    if meta[u'is_module_util']:
        for f in iw.files:
            if f.startswith(u'lib/ansible/module_utils') and f in module_indexer.botmeta[u'files']:
                maintainers = module_indexer.botmeta[u'files'][f].get(u'maintainers', [])
                if maintainers and (iw.submitter in maintainers):
                    module_utils_files_owned += 1

    modules_files_owned = 0
    if not meta[u'is_new_module']:
        for f in iw.files:
            if f.startswith(u'lib/ansible/modules') and iw.submitter in meta[u'component_maintainers']:
                modules_files_owned += 1
    nmeta[u'owner_pr'] = modules_files_owned + module_utils_files_owned == len(iw.files)

    # https://github.com/ansible/ansibullbot/issues/722
    if iw.wip:
        logging.debug(u'WIP PRs do not get shipits')
        return nmeta

    if meta[u'is_needs_revision'] or meta[u'is_needs_rebase']:
        logging.debug(u'PRs with needs_revision or needs_rebase label do not get shipits')
        return nmeta

    supershipiteers_byfile = {}
    supershipiteers_byuser = {}
    for cm in meta.get('component_matches', []):
        _ss = cm.get(u'supershipit', [])
        supershipiteers_byfile[cm[u'repo_filename']] = _ss[:]
        for ss in _ss:
            if ss not in supershipiteers_byuser:
                supershipiteers_byuser[ss] = []
            supershipiteers_byuser[ss].append(cm[u'repo_filename'])

    maintainers = meta.get(u'component_maintainers', [])
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    # community is the other maintainers in the same namespace
    community = meta.get(u'component_namespace_maintainers', [])
    community = [x for x in community if x != u'ansible' and
                 x not in core_team and
                 x != u'DEPRECATED']

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    other_shipits = 0
    shipit_actors = []
    shipit_actors_other = []
    supershipiteers_voted = set()
    rebuild_merge = False

    for event in iw.history.history:

        if event[u'event'] not in [u'commented', u'committed', u'review_approved', u'review_comment']:
            continue
        if event[u'actor'] in botnames:
            continue

        # commits reset the counters
        if event[u'event'] == u'committed':
            logging.info(event)
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            other_shipits = 0
            shipit_actors = []
            shipit_actors_other = []
            supershipiteers_voted = set()
            rebuild_merge = False
            logging.info('commit detected, resetting shipit tallies')
            continue

        actor = event[u'actor']
        body = event.get(u'body', u'')
        body = body.strip()

        if not is_approval(body):
            continue

        if actor in core_team and is_rebuild_merge(body):
            rebuild_merge = True
            logging.info(u'%s shipit [rebuild_merge]' % actor)
        else:
            logging.info(u'%s shipit' % actor)

        # super shipits
        if actor in supershipiteers_byuser:
            supershipiteers_voted.add(actor)

        # ansible shipits
        if actor in core_team:
            if actor not in shipit_actors:
                ansible_shipits += 1
                shipit_actors.append(actor)
            continue

        # maintainer shipits
        if actor in maintainers:
            if actor not in shipit_actors:
                maintainer_shipits += 1
                shipit_actors.append(actor)
            continue

        # community shipits
        if actor in community:
            if actor not in shipit_actors:
                community_shipits += 1
                shipit_actors.append(actor)
            continue

        # other shipits
        if actor not in shipit_actors_other:
            other_shipits += 1
            shipit_actors_other.append(actor)

        continue

    # submitters should count if they are core team/maintainers/community
    if iw.submitter in core_team:
        if iw.submitter not in shipit_actors:
            ansible_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)

    nmeta[u'shipit_count_other'] = other_shipits
    nmeta[u'shipit_count_community'] = community_shipits
    nmeta[u'shipit_count_maintainer'] = maintainer_shipits
    nmeta[u'shipit_count_ansible'] = ansible_shipits
    nmeta[u'shipit_actors'] = shipit_actors
    nmeta[u'shipit_actors_other'] = shipit_actors_other
    nmeta[u'community_usernames'] = sorted(community)

    total = community_shipits + maintainer_shipits + ansible_shipits
    nmeta[u'shipit_count_vtotal'] = total + other_shipits

    if rebuild_merge:
        nmeta['is_rebuild_merge'] = True

    # include shipits from other people to push over the edge
    if total == 1 and other_shipits > 2:
        total += other_shipits

    if total > 1 or rebuild_merge:
        nmeta[u'shipit'] = True
    elif meta[u'is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        # don't notify if there is no maintainer or if submitter is the only namespace maintainer
        if set(community) - {iw.submitter}:
            bpc = iw.history.get_boilerplate_comments()
            bpc = [x[0] for x in bpc]
            if u'community_shipit_notify' not in bpc:
                nmeta[u'notify_community_shipit'] = True

    logging.info(u'total shipits: %s' % total)

    # supershipit ...
    #   if a supershipiteer for each file exists and has blessed the PR
    #   on the current commit, then override all shipit tallies and get this PR merged
    if supershipiteers_voted:
        nmeta[u'supershipit_actors'] = list(supershipiteers_voted)
        cm_files = [x[u'repo_filename'] for x in meta[u'component_matches']]
        ss_files = set()
        for ssv in supershipiteers_voted:
            for fn in supershipiteers_byuser[ssv]:
                ss_files.add(fn)

        if sorted(set(cm_files)) == sorted(set(ss_files)):
            logging.info(u'supershipit enabled on %s' % iw.html_url)
            nmeta[u'supershipit'] = True
            nmeta[u'shipit'] = True
        else:
            for cm_file in sorted(cm_files):
                if cm_file not in ss_files:
                    logging.info('%s is not governed by supershipit' % cm_file)

    return nmeta
Exemple #6
0
def main():
    pprint(sys.argv)
    dest = sys.argv[1]
    print('dest: %s' % dest)

    # get_valid_labels('ansible/ansible')
    # /home/jtanner/.ansibullbot/cache/ansible/ansible/labels.pickle

    with open(os.path.expanduser('~/.ansibullbot/cache/ansible/ansible/labels.pickle'), 'rb') as f:
        labels = pickle.load(f)
    valid_labels = [x.name for x in labels[1]]

    FILEMAP_FILENAME = 'FILEMAP.json'
    COMPONENTMAP_FILENAME = 'COMPONENTMAP.json'
    FI = FileIndexer(
        checkoutdir=os.path.expanduser(
            '~/.ansibullbot/cache/ansible.files.checkout'
        ),
        cmap=COMPONENTMAP_FILENAME,
    )

    module_cache_file = '/tmp/mi-modules.json'
    if not os.path.isfile(module_cache_file):
        module_maintainers = get_maintainers_mapping()
        MI = ModuleIndexer(maintainers=module_maintainers)
        MI.get_ansible_modules()
        with open(module_cache_file, 'wb') as f:
            f.write(json.dumps(MI.modules, sort_keys=True, indent=2))
        modules = MI.modules
    else:
        with open(module_cache_file, 'rb') as f:
            modules = json.loads(f.read())

    macro_teams = {
        'Qalthos,gundalow,privateip': 'openswitch',
        'Qalthos,ganeshrn,gundalow,privateip,rcarrillocruz,trishnaguha': 'networking',
        'GGabriele,jedelman8,mikewiebe,privateip,rahushen,rcarrillocruz,trishnaguha': 'nxos',
        'emonty,j2sol,juliakreger,rcarrillocruz,shrews,thingee': 'openstack',
        'chrishoffman,manuel-sousa,romanek-adam': 'rabbitmq',
        'alikins,barnabycourt,flossware,vritant': 'rhn',
        'Qalthos,amitsi,gundalow,privateip': 'netvisor',
        'haroldwongms,nitzmahone,tstringer': 'azure',
        'dagwieers,jborean93,jhawkesworth': 'windows',
        'dagwieers,dav1x,jctanner': 'vmware',
        'isharacomix,jrrivers,privateip': 'cumulus',
        'chiradeep,giorgos-nikolopoulos': 'netscaler',
        'ericsysmin,grastogi23,khaltore': 'avi',
        'ghjm,jlaska,matburt,wwitzel3': 'tower',
        'hulquest,lmprice,timuster': 'netapp',
    }

    usermap = {
        'mpdehaan': False
    }
    namemap = {
        'Shrews': 'shrews'
    }
    exclusions = {
        '*': ['chouseknecht', 'Java1Guy', 'franckcuny', 'mhite', 'bennojoy', 'risaacson', 'whenrik'],
        'network/wakeonlan': ['dagwiers'],
    }

    removed = get_removed_maintainers()

    teams = {}
    data = {}
    data['files'] = {}

    # merge the moduleindexer data
    for k,v in modules.items():
        fp = v.get('filepath')
        if not fp or not fp.startswith('lib/ansible'):
            continue
        data['files'][k] = {}
        if v['_maintainers']:
            data['files'][k]['maintainers'] = []
            data['files'][k]['maintainers'] = [x for x in v['_maintainers']]
        if v['authors']:
            if 'maintainers' not in data['files'][k]:
                data['files'][k]['maintainers'] = []
            data['files'][k]['maintainers'] += v['authors']
            data['files'][k]['maintainers'] = sorted(set(data['files'][k]['maintainers']))

        # validate each maintainer exists
        if 'maintainers' in data['files'][k]:
            maintainers = []
            for x in data['files'][k]['maintainers']:

                if x in exclusions['*']:
                    continue

                if x in namemap:
                    x = namemap[x]
                if x in usermap:
                    if usermap[x]:
                        maintainers.append(x)
                else:
                    if x == 'ansible':
                        usermap['ansible'] = True
                        maintainers.append(x)
                        continue
                    res = requests.get('https://github.com/%s' % x)
                    if res.status_code == 200:
                        usermap[x] = True
                        maintainers.append(x)
                    else:
                        usermap[x] = False
            data['files'][k]['maintainers'] = sorted(set(maintainers))
            if not data['files'][k]['maintainers']:
                data['files'][k].pop('maintainers', None)

    # merge the removed people
    for k,v in removed.items():
        k = os.path.join('lib/ansible/modules', k)
        v = sorted(set(v))
        if k in data['files']:
            if 'maintainers' in data['files'][k]:
                for vx in v:
                    if vx in data['files'][k]['maintainers']:
                        data['files'][k]['maintainers'].remove(vx)
                        if 'ignored' not in data['files'][k]:
                            data['files'][k]['ignored'] = []
                        data['files'][k]['ignored'].append(vx)
                if not data['files'][k]['maintainers']:
                    data['files'][k].pop('maintainers', None)
                    #import epdb; epdb.st()

    # merge the fileindexer data
    for k in FI.files:
        #if 'contrib/inventory' in k:
        #    import epdb; epdb.st()
        #print(k)
        try:
            klabels = FI.get_component_labels(valid_labels, [k])
            if klabels:
                klabels = [x for x in klabels if not x.startswith('c:')]
                if not klabels:
                    continue
                if k not in data['files']:
                    data['files'][k] = {}
                if 'labels' not in data['files'][k]:
                    data['files'][k]['labels'] = []
                data['files'][k]['labels'] += klabels
        except UnicodeDecodeError:
            continue

        keywords = FI.get_keywords_for_file(k)
        if keywords:
            if k not in data['files']:
                data['files'][k] = {}
            if 'keywords' not in data['files'][k]:
                data['files'][k]['keywords'] = []
            data['files'][k]['keywords'] += keywords
            #import epdb; epdb.st()

    '''
    # calculate all teams
    for k,v in data['files'].items():
        if not v.get('maintainers'):
            continue
        maintainers = sorted(set(v['maintainers']))
        key = ','.join(maintainers)
        if key not in teams:
            teams[key] = []
        teams[key].append(k)

    # rank and show
    steams = sorted(teams, key=len, reverse=True)
    for x in steams[0:15]:
        if x in macro_teams:
            continue
        pprint(teams[x])
        print(x)
        import epdb; epdb.st()
    import epdb; epdb.st()
    '''

    for k,v in data['files'].items():
        if not v.get('maintainers'):
            continue
        maintainers = v.get('maintainers')
        for idx,x in enumerate(maintainers):
            if x == 'ansible':
                maintainers[idx] = '$team_ansible'
        if maintainers == ['$team_ansible']:
            data['files'][k]['maintainers'] = ' '.join(maintainers)
            continue
        if len(maintainers) == 1:
            data['files'][k]['maintainers'] = ' '.join(maintainers)
            continue
        mkey = ','.join(sorted(set(maintainers)))
        if mkey in macro_teams:
            maintainers = ['$team_%s' % macro_teams[mkey]]
            data['files'][k]['maintainers'] = ' '.join(maintainers)
        else:
            # partial matching
            match = None
            subnames = sorted(set(maintainers))
            for sn in subnames:
                filtered = [x for x in subnames if x != sn]
                fkey = ','.join(filtered)
                if fkey in macro_teams:
                    match = fkey
            if match:
                to_clear = match.split(',')
                maintainers = [x for x in maintainers if x not in to_clear]
                data['files'][k]['maintainers'] = ' '.join(maintainers)

    # fix deprecations
    safe_names = [x for x in FI.files if all(c in string.printable for c in x)]
    remove = []
    for k,v in data['files'].items():
        maintainers = v.get('maintainers')
        if maintainers:
            if 'DEPRECATED' in data['files'][k]['maintainers']:
                data['files'][k].pop('maintainers', None)
                data['files'][k]['deprecated'] = True
        bn = os.path.basename(k)
        if bn.startswith('_') and bn != '__init__.py' and '/modules/' in k:
            '''
            data['files'][k]['deprecated'] = True
            if 'maintainers' in data['files'][k]:
                data['files'][k].pop('maintainers', None)
            '''
            remove.append(k)

        # get rid of files no longer in the repo
        if k not in safe_names:
            remove.append(k)

    for x in remove:
        data['files'].pop(x, None)


    # remove any keys where maintainers == authors
    remove = []
    for k,v in data['files'].items():
        if v.keys() != ['maintainers']:
            continue
        if v['maintainers'] != modules[k]['authors']:
            continue
        remove.append(k)
    for x in remove:
        data['files'].pop(x, None)

    #####################################
    # add special notifies
    #####################################
    data['files']['lib/ansible/modules/cloud/amazon/'] = {
        'notify': ['willthames']
    }

    #####################################
    # reduce to namespace maintainers
    #####################################
    groups = {}
    for k,v in data['files'].items():
        dn = os.path.dirname(k)
        if dn not in groups:
            groups[dn] = {
                'matches': [],
                'values': []
            }
        groups[dn]['matches'].append(k)
        if v not in groups[dn]['values']:
            groups[dn]['values'].append(v)
    for k,v in groups.items():
        if not len(v['values']) == 1:
            continue
        if len(v['matches']) == 1:
            continue
        #print(k)
        #pprint(v)

        newk = k + '/'
        data['files'][newk] = v['values'][0]
        for pf in v['matches']:
            data['files'].pop(pf, None)

        if newk in removed:
            import epdb; epdb.st()


    #####################################
    # make a sorted dict
    #####################################

    files = data['files']
    data['files'] = OrderedDict()
    fkeys = sorted(files.keys())
    fkeys = [x.replace('lib/ansible/modules', '$modules') for x in fkeys]
    fkeys = sorted(set(fkeys))
    for fkey in fkeys:
        if fkey.startswith('$modules'):
            mkey = fkey.replace('$modules', 'lib/ansible/modules')
            data['files'][fkey] = files[mkey]
        else:
            data['files'][fkey] = files[fkey]

    data['macros'] = OrderedDict()
    data['macros']['modules'] = 'lib/ansible/modules'
    macro_items = macro_teams.items()
    macro_items = [[x[1],x[0]] for x in macro_items]
    macro_dict ={}
    for x in macro_items:
        macro_dict[x[0]] = x[1]

    data['macros']['team_ansible'] = []
    keys = macro_dict.keys()
    for k in sorted(keys):
        team = macro_dict[k]
        team = team.split(',')
        if len(team) < 10:
            team = " ".join(team)
        data['macros']['team_%s' % k] = team

    # if maintainers is the only subkey, make the primary value a string
    for k,v in data['files'].items():
        keys = v.keys()
        if keys == ['maintainers']:
            if isinstance(v['maintainers'], list):
                data['files'][k] = " ".join(v['maintainers'])
            else:
                data['files'][k] = v['maintainers']
        for xk in ['ignored', 'notified', 'maintainers']:
            if xk in data['files'][k]:
                if not isinstance(data['files'][k][xk], (str, unicode)):
                    data['files'][k][xk] = " ".join(data['files'][k][xk])


    # write it once with ryaml to make it ordered
    ryaml = rYAML()
    (fo, fn) = tempfile.mkstemp()
    with open(fn, 'wb') as f:
        ryaml.dump(data, f)

    # read it back in
    with open(fn, 'rb') as f:
        ylines = f.readlines()

    phase = None
    for idx,x in enumerate(ylines):
        x = x.rstrip()
        x = x.replace('!!omap', '')
        if x.endswith(' {}'):
            x = x.replace(' {}', '')
        if x.startswith('-'):
            x = x.replace('-', ' ', 1)
        ylines[idx] = x


        if x.startswith(' ') and ':' not in x and '-' not in x:
            ylines[idx-1] += ' ' + x.strip()
            ylines[idx] = ''

    ylines = [x for x in ylines if x.strip()]
    ylines = [HEADER] + ylines

    with open(dest, 'wb') as f:
        f.write('\n'.join(ylines))
Exemple #7
0
 def indexer(m_update, m_authors, m_modules, m_content):
     module_indexer = ModuleIndexer()
     module_indexer.parse_metadata()
     module_indexer.set_maintainers()
     return module_indexer
Exemple #8
0
def get_shipit_facts(issuewrapper, meta, module_indexer, core_team=[], botnames=[]):
    """ Count shipits by maintainers/community/other """

    # maintainers - people who maintain this file/module
    # community - people who maintain file(s) in the same directory
    # other - anyone else who comments with shipit/+1/LGTM

    iw = issuewrapper
    nmeta = {
        u'shipit': False,
        u'owner_pr': False,
        u'shipit_ansible': False,
        u'shipit_community': False,
        u'shipit_count_other': False,
        u'shipit_count_community': False,
        u'shipit_count_maintainer': False,
        u'shipit_count_ansible': False,
        u'shipit_count_vtotal': False,
        u'shipit_actors': None,
        u'community_usernames': [],
        u'notify_community_shipit': False,
    }

    if not iw.is_pullrequest():
        return nmeta

    module_utils_files_owned = 0  # module_utils files for which submitter is maintainer
    if meta[u'is_module_util']:
        for f in iw.files:
            if f.startswith(u'lib/ansible/module_utils') and f in module_indexer.botmeta[u'files']:
                maintainers = module_indexer.botmeta[u'files'][f].get(u'maintainers', [])
                if maintainers and (iw.submitter in maintainers):
                    module_utils_files_owned += 1

    modules_files_owned = 0
    if not meta[u'is_new_module']:
        for f in iw.files:
            if f.startswith(u'lib/ansible/modules') and iw.submitter in meta[u'component_maintainers']:
                modules_files_owned += 1
    nmeta[u'owner_pr'] = modules_files_owned + module_utils_files_owned == len(iw.files)

    #if not meta['module_match']:
    #    return nmeta

    # https://github.com/ansible/ansibullbot/issues/722
    if iw.wip:
        logging.debug(u'WIP PRs do not get shipits')
        return nmeta

    if meta[u'is_needs_revision'] or meta[u'is_needs_rebase']:
        logging.debug(u'PRs with needs_revision or needs_rebase label do not get shipits')
        return nmeta

    maintainers = meta.get(u'component_maintainers', [])
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    # community is the other maintainers in the same namespace
    community = meta.get(u'component_namespace_maintainers', [])
    community = [x for x in community if x != u'ansible' and
                 x not in core_team and
                 x != u'DEPRECATED']

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    other_shipits = 0
    shipit_actors = []
    shipit_actors_other = []

    for event in iw.history.history:

        if event[u'event'] not in [u'commented', u'committed', u'review_approved', u'review_comment']:
            continue
        if event[u'actor'] in botnames:
            continue

        # commits reset the counters
        if event[u'event'] == u'committed':
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            other_shipits = 0
            shipit_actors = []
            shipit_actors_other = []
            continue

        actor = event[u'actor']
        body = event.get(u'body', u'')
        body = body.strip()
        if not is_approval(body):
            continue
        logging.info(u'%s shipit' % actor)

        # ansible shipits
        if actor in core_team:
            if actor not in shipit_actors:
                ansible_shipits += 1
                shipit_actors.append(actor)
            continue

        # maintainer shipits
        if actor in maintainers:
            if actor not in shipit_actors:
                maintainer_shipits += 1
                shipit_actors.append(actor)
            continue

        # community shipits
        if actor in community:
            if actor not in shipit_actors:
                community_shipits += 1
                shipit_actors.append(actor)
            continue

        # other shipits
        if actor not in shipit_actors_other:
            other_shipits += 1
            shipit_actors_other.append(actor)
        continue

    # submitters should count if they are core team/maintainers/community
    if iw.submitter in core_team:
        if iw.submitter not in shipit_actors:
            ansible_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)

    nmeta[u'shipit_count_other'] = other_shipits
    nmeta[u'shipit_count_community'] = community_shipits
    nmeta[u'shipit_count_maintainer'] = maintainer_shipits
    nmeta[u'shipit_count_ansible'] = ansible_shipits
    nmeta[u'shipit_actors'] = shipit_actors
    nmeta[u'shipit_actors_other'] = shipit_actors_other
    nmeta[u'community_usernames'] = sorted(community)

    total = community_shipits + maintainer_shipits + ansible_shipits
    nmeta[u'shipit_count_vtotal'] = total + other_shipits

    # include shipits from other people to push over the edge
    if total == 1 and other_shipits > 2:
        total += other_shipits

    if total > 1:
        nmeta[u'shipit'] = True
    elif meta[u'is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        # don't notify if there is no maintainer or if submitter is the only namespace maintainer
        if set(community) - {iw.submitter}:
            bpc = iw.history.get_boilerplate_comments()
            bpc = [x[0] for x in bpc]
            if u'community_shipit_notify' not in bpc:
                nmeta[u'notify_community_shipit'] = True

    logging.info(u'total shipits: %s' % total)

    return nmeta
def main():
    pprint(sys.argv)
    dest = sys.argv[1]
    print('dest: %s' % dest)

    # get_valid_labels('ansible/ansible')
    # /home/jtanner/.ansibullbot/cache/ansible/ansible/labels.pickle

    with open(os.path.expanduser('~/.ansibullbot/cache/ansible/ansible/labels.pickle'), 'rb') as f:
        labels = pickle_load(f)
    valid_labels = [x.name for x in labels[1]]

    FILEMAP_FILENAME = 'FILEMAP.json'
    COMPONENTMAP_FILENAME = 'COMPONENTMAP.json'
    FI = FileIndexer(
        checkoutdir=os.path.expanduser(
            '~/.ansibullbot/cache/ansible.files.checkout'
        ),
        cmap=COMPONENTMAP_FILENAME,
    )

    module_cache_file = '/tmp/mi-modules.json'
    if not os.path.isfile(module_cache_file):
        module_maintainers = get_maintainers_mapping()
        MI = ModuleIndexer(maintainers=module_maintainers)
        MI.get_ansible_modules()
        with open(module_cache_file, 'wb') as f:
            f.write(json.dumps(MI.modules, sort_keys=True, indent=2))
        modules = MI.modules
    else:
        with open(module_cache_file, 'rb') as f:
            modules = json.loads(f.read())

    macro_teams = {
        'Qalthos,gundalow,privateip': 'openswitch',
        'Qalthos,ganeshrn,gundalow,privateip,rcarrillocruz,trishnaguha': 'networking',
        'GGabriele,jedelman8,mikewiebe,privateip,rahushen,rcarrillocruz,trishnaguha': 'nxos',
        'emonty,j2sol,juliakreger,rcarrillocruz,shrews,thingee': 'openstack',
        'chrishoffman,manuel-sousa,romanek-adam': 'rabbitmq',
        'alikins,barnabycourt,flossware,vritant': 'rhn',
        'Qalthos,amitsi,gundalow,privateip': 'netvisor',
        'haroldwongms,nitzmahone,tstringer': 'azure',
        'dagwieers,jborean93,jhawkesworth': 'windows',
        'dagwieers,dav1x,jctanner': 'vmware',
        'isharacomix,jrrivers,privateip': 'cumulus',
        'chiradeep,giorgos-nikolopoulos': 'netscaler',
        'ericsysmin,grastogi23,khaltore': 'avi',
        'ghjm,jlaska,matburt,wwitzel3': 'tower',
        'hulquest,lmprice,timuster': 'netapp',
    }

    usermap = {
        'mpdehaan': False
    }
    namemap = {
        'Shrews': 'shrews'
    }
    exclusions = {
        '*': ['chouseknecht', 'Java1Guy', 'franckcuny', 'mhite', 'bennojoy', 'risaacson', 'whenrik'],
        'network/wakeonlan': ['dagwiers'],
    }

    removed = get_removed_maintainers()

    teams = {}
    data = {}
    data['files'] = {}

    # merge the moduleindexer data
    for k,v in modules.items():
        fp = v.get('filepath')
        if not fp or not fp.startswith('lib/ansible'):
            continue
        data['files'][k] = {}
        if v['_maintainers']:
            data['files'][k]['maintainers'] = []
            data['files'][k]['maintainers'] = [x for x in v['_maintainers']]
        if v['authors']:
            if 'maintainers' not in data['files'][k]:
                data['files'][k]['maintainers'] = []
            data['files'][k]['maintainers'] += v['authors']
            data['files'][k]['maintainers'] = sorted(set(data['files'][k]['maintainers']))

        # validate each maintainer exists
        if 'maintainers' in data['files'][k]:
            maintainers = []
            for x in data['files'][k]['maintainers']:

                if x in exclusions['*']:
                    continue

                if x in namemap:
                    x = namemap[x]
                if x in usermap:
                    if usermap[x]:
                        maintainers.append(x)
                else:
                    if x == 'ansible':
                        usermap['ansible'] = True
                        maintainers.append(x)
                        continue
                    res = requests.get('https://github.com/%s' % x)
                    if res.status_code == 200:
                        usermap[x] = True
                        maintainers.append(x)
                    else:
                        usermap[x] = False
            data['files'][k]['maintainers'] = sorted(set(maintainers))
            if not data['files'][k]['maintainers']:
                data['files'][k].pop('maintainers', None)

    # merge the removed people
    for k,v in removed.items():
        k = os.path.join('lib/ansible/modules', k)
        v = sorted(set(v))
        if k in data['files']:
            if 'maintainers' in data['files'][k]:
                for vx in v:
                    if vx in data['files'][k]['maintainers']:
                        data['files'][k]['maintainers'].remove(vx)
                        if 'ignored' not in data['files'][k]:
                            data['files'][k]['ignored'] = []
                        data['files'][k]['ignored'].append(vx)
                if not data['files'][k]['maintainers']:
                    data['files'][k].pop('maintainers', None)
                    #import epdb; epdb.st()

    # merge the fileindexer data
    for k in FI.files:
        #if 'contrib/inventory' in k:
        #    import epdb; epdb.st()
        #print(k)
        try:
            klabels = FI.get_component_labels(valid_labels, [k])
            if klabels:
                klabels = [x for x in klabels if not x.startswith('c:')]
                if not klabels:
                    continue
                if k not in data['files']:
                    data['files'][k] = {}
                if 'labels' not in data['files'][k]:
                    data['files'][k]['labels'] = []
                data['files'][k]['labels'] += klabels
        except UnicodeDecodeError:
            continue

        keywords = FI.get_keywords_for_file(k)
        if keywords:
            if k not in data['files']:
                data['files'][k] = {}
            if 'keywords' not in data['files'][k]:
                data['files'][k]['keywords'] = []
            data['files'][k]['keywords'] += keywords
            #import epdb; epdb.st()

    '''
    # calculate all teams
    for k,v in data['files'].items():
        if not v.get('maintainers'):
            continue
        maintainers = sorted(set(v['maintainers']))
        key = ','.join(maintainers)
        if key not in teams:
            teams[key] = []
        teams[key].append(k)

    # rank and show
    steams = sorted(teams, key=len, reverse=True)
    for x in steams[0:15]:
        if x in macro_teams:
            continue
        pprint(teams[x])
        print(x)
        import epdb; epdb.st()
    import epdb; epdb.st()
    '''

    for k,v in data['files'].items():
        if not v.get('maintainers'):
            continue
        maintainers = v.get('maintainers')
        for idx,x in enumerate(maintainers):
            if x == 'ansible':
                maintainers[idx] = '$team_ansible'
        if maintainers == ['$team_ansible']:
            data['files'][k]['maintainers'] = ' '.join(maintainers)
            continue
        if len(maintainers) == 1:
            data['files'][k]['maintainers'] = ' '.join(maintainers)
            continue
        mkey = ','.join(sorted(set(maintainers)))
        if mkey in macro_teams:
            maintainers = ['$team_%s' % macro_teams[mkey]]
            data['files'][k]['maintainers'] = ' '.join(maintainers)
        else:
            # partial matching
            match = None
            subnames = sorted(set(maintainers))
            for sn in subnames:
                filtered = [x for x in subnames if x != sn]
                fkey = ','.join(filtered)
                if fkey in macro_teams:
                    match = fkey
            if match:
                to_clear = match.split(',')
                maintainers = [x for x in maintainers if x not in to_clear]
                data['files'][k]['maintainers'] = ' '.join(maintainers)

    # fix deprecations
    safe_names = [x for x in FI.files if all(c in string.printable for c in x)]
    remove = []
    for k,v in data['files'].items():
        maintainers = v.get('maintainers')
        if maintainers:
            if 'DEPRECATED' in data['files'][k]['maintainers']:
                data['files'][k].pop('maintainers', None)
                data['files'][k]['deprecated'] = True
        bn = os.path.basename(k)
        if bn.startswith('_') and bn != '__init__.py' and '/modules/' in k:
            '''
            data['files'][k]['deprecated'] = True
            if 'maintainers' in data['files'][k]:
                data['files'][k].pop('maintainers', None)
            '''
            remove.append(k)

        # get rid of files no longer in the repo
        if k not in safe_names:
            remove.append(k)

    for x in remove:
        data['files'].pop(x, None)


    # remove any keys where maintainers == authors
    remove = []
    for k,v in data['files'].items():
        if v.keys() != ['maintainers']:
            continue
        if v['maintainers'] != modules[k]['authors']:
            continue
        remove.append(k)
    for x in remove:
        data['files'].pop(x, None)

    #####################################
    # add special notifies
    #####################################
    data['files']['lib/ansible/modules/cloud/amazon/'] = {
        'notify': ['willthames']
    }

    #####################################
    # reduce to namespace maintainers
    #####################################
    groups = {}
    for k,v in data['files'].items():
        dn = os.path.dirname(k)
        if dn not in groups:
            groups[dn] = {
                'matches': [],
                'values': []
            }
        groups[dn]['matches'].append(k)
        if v not in groups[dn]['values']:
            groups[dn]['values'].append(v)
    for k,v in groups.items():
        if not len(v['values']) == 1:
            continue
        if len(v['matches']) == 1:
            continue
        #print(k)
        #pprint(v)

        newk = k + '/'
        data['files'][newk] = v['values'][0]
        for pf in v['matches']:
            data['files'].pop(pf, None)

        if newk in removed:
            import epdb; epdb.st()


    #####################################
    # make a sorted dict
    #####################################

    files = data['files']
    data['files'] = OrderedDict()
    fkeys = sorted(files.keys())
    fkeys = [x.replace('lib/ansible/modules', '$modules') for x in fkeys]
    fkeys = sorted(set(fkeys))
    for fkey in fkeys:
        if fkey.startswith('$modules'):
            mkey = fkey.replace('$modules', 'lib/ansible/modules')
            data['files'][fkey] = files[mkey]
        else:
            data['files'][fkey] = files[fkey]

    data['macros'] = OrderedDict()
    data['macros']['modules'] = 'lib/ansible/modules'
    macro_items = macro_teams.items()
    macro_items = [[x[1],x[0]] for x in macro_items]
    macro_dict ={}
    for x in macro_items:
        macro_dict[x[0]] = x[1]

    data['macros']['team_ansible'] = []
    keys = macro_dict.keys()
    for k in sorted(keys):
        team = macro_dict[k]
        team = team.split(',')
        if len(team) < 10:
            team = " ".join(team)
        data['macros']['team_%s' % k] = team

    # if maintainers is the only subkey, make the primary value a string
    for k,v in data['files'].items():
        keys = v.keys()
        if keys == ['maintainers']:
            if isinstance(v['maintainers'], list):
                data['files'][k] = " ".join(v['maintainers'])
            else:
                data['files'][k] = v['maintainers']
        for xk in ['ignored', 'notified', 'maintainers']:
            if xk in data['files'][k]:
                if not isinstance(data['files'][k][xk], (str, unicode)):
                    data['files'][k][xk] = " ".join(data['files'][k][xk])


    # write it once with ryaml to make it ordered
    ryaml = rYAML()
    (fo, fn) = tempfile.mkstemp()
    with open(fn, 'wb') as f:
        ryaml.dump(data, f)

    # read it back in
    with open(fn, 'rb') as f:
        ylines = f.readlines()

    phase = None
    for idx,x in enumerate(ylines):
        x = x.rstrip()
        x = x.replace('!!omap', '')
        if x.endswith(' {}'):
            x = x.replace(' {}', '')
        if x.startswith('-'):
            x = x.replace('-', ' ', 1)
        ylines[idx] = x


        if x.startswith(' ') and ':' not in x and '-' not in x:
            ylines[idx-1] += ' ' + x.strip()
            ylines[idx] = ''

    ylines = [x for x in ylines if x.strip()]
    ylines = [HEADER] + ylines

    with open(dest, 'wb') as f:
        f.write('\n'.join(ylines))
Exemple #10
0
def get_shipit_facts(issuewrapper, meta, module_indexer, core_team=[], botnames=[]):
    """ Count shipits by maintainers/community/other """

    # maintainers - people who maintain this file/module
    # community - people who maintain file(s) in the same directory
    # other - anyone else who comments with shipit/+1/LGTM

    iw = issuewrapper
    nmeta = {
        'shipit': False,
        'owner_pr': False,
        'shipit_ansible': False,
        'shipit_community': False,
        'shipit_count_other': False,
        'shipit_count_community': False,
        'shipit_count_maintainer': False,
        'shipit_count_ansible': False,
        'shipit_actors': None,
        'community_usernames': [],
        'notify_community_shipit': False,
    }

    if not iw.is_pullrequest():
        return nmeta
    if not meta['module_match']:
        return nmeta

    maintainers = meta['module_match']['maintainers']
    maintainers = \
        ModuleIndexer.replace_ansible(
            maintainers,
            core_team,
            bots=botnames
        )

    if not meta['is_new_module'] and iw.submitter in maintainers:
        nmeta['owner_pr'] = True

    # community is the other maintainers in the same namespace
    mnamespace = meta['module_match']['namespace']
    community = \
        module_indexer.get_maintainers_for_namespace(mnamespace)
    community = [x for x in community if x != 'ansible' and
                 x not in core_team and
                 x != 'DEPRECATED']

    # shipit tallies
    ansible_shipits = 0
    maintainer_shipits = 0
    community_shipits = 0
    other_shipits = 0
    shipit_actors = []
    shipit_actors_other = []

    for event in iw.history.history:

        if event['event'] not in ['commented', 'committed']:
            continue
        if event['actor'] in botnames:
            continue

        # commits reset the counters
        if event['event'] == 'committed':
            ansible_shipits = 0
            maintainer_shipits = 0
            community_shipits = 0
            other_shipits = 0
            shipit_actors = []
            shipit_actors_other = []
            continue

        actor = event['actor']
        body = event['body']
        body = body.strip()
        if 'shipit' not in body and '+1' not in body and 'LGTM' not in body:
            continue
        logging.info('%s shipit' % actor)

        # ansible shipits
        if actor in core_team:
            if actor not in shipit_actors:
                ansible_shipits += 1
                shipit_actors.append(actor)
            continue

        # maintainer shipits
        if actor in maintainers:
            if actor not in shipit_actors:
                maintainer_shipits += 1
                shipit_actors.append(actor)
            continue

        # community shipits
        if actor in community:
            if actor not in shipit_actors:
                community_shipits += 1
                shipit_actors.append(actor)
            continue

        # other shipits
        if actor not in shipit_actors_other:
            other_shipits += 1
            shipit_actors_other.append(actor)
        continue

    # submitters should count if they are maintainers/community
    if iw.submitter in maintainers:
        if iw.submitter not in shipit_actors:
            maintainer_shipits += 1
            shipit_actors.append(iw.submitter)
    elif iw.submitter in community:
        if iw.submitter not in shipit_actors:
            community_shipits += 1
            shipit_actors.append(iw.submitter)

    nmeta['shipit_count_other'] = other_shipits
    nmeta['shipit_count_community'] = community_shipits
    nmeta['shipit_count_maintainer'] = maintainer_shipits
    nmeta['shipit_count_ansible'] = ansible_shipits
    nmeta['shipit_actors'] = shipit_actors
    nmeta['shipit_actors_other'] = shipit_actors_other
    nmeta['community_usernames'] = sorted(community)

    total = community_shipits + maintainer_shipits + ansible_shipits

    # include shipits from other people to push over the edge
    if total == 1 and other_shipits > 2:
        total += other_shipits

    if total > 1:
        nmeta['shipit'] = True
    elif meta['is_new_module'] or \
            (len(maintainers) == 1 and maintainer_shipits == 1):
        if community:
            bpc = iw.history.get_boilerplate_comments()
            if 'community_shipit_notify' not in bpc:
                nmeta['notify_community_shipit'] = True

    logging.info('total shipits: %s' % total)

    return nmeta