Пример #1
0
def get_sha_from_commit_position(commit_position):
    args = ['crrev-parse', commit_position]
    sha = chromium_git(args)
    if len(sha) == 40:
        return sha
    else:
        return None
Пример #2
0
def get_sha_from_change_id(change_id):
    grep = '^Change-Id: ' + change_id + '$'
    args = ['log', 'origin/master', '--format=%H', '-1', '--grep=%s' % grep]
    sha = chromium_git(args)
    if len(sha) == 40:
        return sha
    else:
        return None
def get_latencies(prs):
    try:
        latencies = ExportLatencyDB(LATENCIES_CSV)
        latencies.read()
        print('Read', len(latencies), 'latency entries from', LATENCIES_CSV)
        print('Processing new PRs')
    except (IOError, AssertionError):
        latencies = ExportLatencyDB(LATENCIES_CSV)

    skipped = []
    total_prs = len(prs)
    for index, pr in enumerate(prs):
        pr_number = pr['PR']
        print('[{}/{}] PR: https://github.com/web-platform-tests/wpt/pull/{}'.
              format(index + 1, total_prs, pr_number))
        if latencies.get(pr_number):
            continue

        merged_at = dateutil.parser.parse(pr['merged_at'])
        chromium_commit = pr['chromium_commit']
        if chromium_commit.startswith('I'):
            sha = get_sha_from_change_id(chromium_commit)
        else:
            sha = get_sha_from_commit_position(chromium_commit)

        if sha is None:
            print('Unable to find commit. SKIPPING!')
            skipped.append(pr_number)
            continue

        commit_time_str = chromium_git(['show', '-s', '--format=%cI',
                                        sha]).strip()
        commit_time = dateutil.parser.parse(commit_time_str)
        delay = (merged_at - commit_time).total_seconds() / 60

        print('Found Chromium commit {} committed at {}'.format(
            sha, commit_time_str))
        print('Export PR merged at {}'.format(merged_at))
        print('Delay (mins):', delay)
        if delay < 0:
            print('Negative delay. SKIPPING!')
            skipped.append(pr_number)
            continue
        latencies.add({
            'PR': str(pr_number),
            'exported_sha': sha,
            'commit_time': commit_time_str,
            'latency': delay,
        })

    if skipped:
        print('Skipped PRs:', skipped)

    print('Writing file', LATENCIES_CSV)
    latencies.write(order='asc')
    return latencies
Пример #4
0
def list_imports():
    output = chromium_git([
        'log',
        'origin/master',
        '--format=%H|%s|%cI',
        '--grep=^[Ii]mport wpt@',
        '--since',
        CUTOFF,
        # Uncomment the line below to count only auto imports.
        # '--author', '*****@*****.**'
    ])
    imports = []
    subject_re = re.compile(r'^[Ii]mport wpt@(\w+)')
    for line in output.split('\n'):
        cr_sha, subject, date = line.split('|')
        match = subject_re.match(subject)
        if not match:
            continue
        wpt_sha = match.groups()[0]
        imports.append(Import(cr_sha, wpt_sha, date))
    return imports
Пример #5
0
def calculate_pr_delays(prs):
    try:
        with open(MINS_FILE) as f:
            min_differences = json.load(f)
            print('Read', len(min_differences), 'results from', MINS_FILE)
            return min_differences
    except (IOError, ValueError):
        pass

    min_differences = {}
    skipped = []
    total_prs = len(prs)

    for index, pr in enumerate(prs):
        pr_number = pr['number']
        print('[{}/{}] PR: {}'.format(index + 1, total_prs, pr['html_url']))
        pr_closed_at = dateutil.parser.parse(pr['closed_at'])

        match = re.search(r'^Change-Id\: (.+)$', pr['body'], re.MULTILINE)

        try:
            change_id = match.groups()[0].strip()
            print('Found Change-Id', change_id)
            sha = get_sha_from_change_id(change_id)
        except AttributeError:
            print('Could not get Change-Id from PR, trying Cr-Commit-Position')
            match = re.search(r'^Cr-Commit-Position\: (.+)$', pr['body'],
                              re.MULTILINE)

            try:
                commit_position = match.groups()[0].strip()
                print('Found Cr-Commit-Position', commit_position)
                sha = get_sha_from_commit_position(commit_position)
            except AttributeError:
                sha = None

        if sha is None:
            print('Unable to find commit. SKIPPING!')
            skipped.append(pr_number)
            continue

        print('Found SHA', sha)

        output = chromium_git(['show', '-s', '--format=%cI', sha])
        commit_time = dateutil.parser.parse(output)
        mins_difference = (pr_closed_at - commit_time).total_seconds() / 60

        print('Committed at', commit_time)
        print('PR closed at', pr_closed_at)
        print('Delay (mins):', mins_difference)
        if mins_difference < 0:
            print('Negative delay. SKIPPING!')
            skipped.append(pr_number)
            continue

        datekey = commit_time.strftime('%Y-%m')
        min_differences[pr_number] = {
            'latency': mins_difference,
            'month': datekey,
            'time': commit_time.strftime(_GITHUB_DATE_FORMAT)
        }

    if skipped:
        print('Skipped PRs:', skipped)

    print('Writing file', MINS_FILE)
    with open(MINS_FILE, 'w') as f:
        json.dump(min_differences, f, indent=2)

    return min_differences