Ejemplo n.º 1
0
def get_builders():
    local_path = os.path.abspath(os.path.join(HERE, 'workdir'))
    data_url = 'https://chrome-internal.googlesource.com/infradata/hosts'
    buildermap_path = 'buildermap.json'
    mastermap_path = 'mastermap.json'
    r = Repo(data_url)
    r.repos_dir = local_path
    r.reify()
    r.fetch()
    builder_list = json.loads(
        r.run('cat-file', 'blob', 'refs/heads/master:%s' % buildermap_path))
    master_list = json.loads(
        r.run('cat-file', 'blob', 'refs/heads/master:%s' % mastermap_path))
    master_map = {master['dirname']: master for master in master_list}
    for entry in builder_list:
        master_url = master_map.get(entry['mastername'],
                                    {}).get('buildbot_url')
        if not master_url:
            LOGGER.warn('No master url found for %s/%s', entry['mastername'],
                        entry['builder'])
            url = None
        else:
            url = '%s/builders/%s' % (master_url.rstrip('/'), entry['builder'])
        entry['url'] = url
    return builder_list
Ejemplo n.º 2
0
def get_builders():
  local_path = os.path.abspath(os.path.join(HERE, 'workdir'))
  data_url = 'https://chrome-internal.googlesource.com/infradata/hosts'
  buildermap_path = 'buildermap.json'
  mastermap_path = 'mastermap.json'
  r = Repo(data_url)
  r.repos_dir = local_path
  r.reify()
  r.fetch()
  builder_list = json.loads(
      r.run('cat-file', 'blob', 'refs/heads/master:%s' % buildermap_path))
  master_list = json.loads(
      r.run('cat-file', 'blob', 'refs/heads/master:%s' % mastermap_path))
  master_map = {master['dirname']: master for master in master_list}
  for entry in builder_list:
    master_url = master_map.get(entry['mastername'], {}).get('buildbot_url')
    if not master_url:
      LOGGER.warn('No master url found for %s/%s',
                  entry['mastername'], entry['builder'])
      url = None
    else:
      url = '%s/builders/%s' % (master_url.rstrip('/'), entry['builder'])
    entry['url'] = url
  return builder_list
Ejemplo n.º 3
0
def main(argv):
  p = argparse.ArgumentParser()
  p.add_argument(
    '--repo-dir', metavar='DIR', default='zip_release_commits_repos',
    help='The directory to use for git clones (default: %(default)s).')
  logs.add_argparse_options(p)
  opts = p.parse_args(argv)
  logs.process_argparse_options(opts)

  # Get all refs
  r = Repo('https://chromium.googlesource.com/chromium/src')
  r.repos_dir = os.path.abspath(opts.repo_dir)
  r.reify()
  r.fetch()

  all_releases = r['refs/heads/ignore/foo']

  tags = r.run(
    'for-each-ref', '--sort=committerdate', '--format=%(objectname) %(refname)',
    'refs/tags'
  ).splitlines()

  already_have = set()
  try:
    already_have = r.run('rev-list', '--first-parent', '--parents',
                         all_releases.ref).splitlines()
    # Last commit in chain is the null commit
    already_have = set(l.split()[-1] for l in already_have[:-1])
  except CalledProcessError:
    pass

  for hsh_tag in tags:
    hsh, tag = hsh_tag.split()
    if hsh in already_have:
      print 'skipping', tag
      continue

    print 'processing', tag
    c = r.get_commit(hsh)
    if all_releases.commit is INVALID:
      cu = c.data.committer
      cu = cu.alter(timestamp=cu.timestamp.alter(secs=cu.timestamp.secs-1))
      au = c.data.author
      au = au.alter(timestamp=au.timestamp.alter(secs=au.timestamp.secs-1))

      all_releases.update_to(c.alter(
        author=au,
        committer=cu,
        parents=(),
        tree=None,
      ))

    parents = [all_releases.commit.hsh, c.hsh]
    all_releases.update_to(c.alter(
      author=c.data.committer,
      message_lines=[tag],
      parents=parents,
      tree=None,
    ))

  print all_releases.commit
  r.run('push', 'origin', '%s:%s' % (all_releases.commit.hsh, all_releases.ref))
Ejemplo n.º 4
0
def main(argv):
    p = argparse.ArgumentParser()
    p.add_argument(
        '--repo-dir',
        metavar='DIR',
        default='zip_release_commits_repos',
        help='The directory to use for git clones (default: %(default)s).')
    logs.add_argparse_options(p)
    opts = p.parse_args(argv)
    logs.process_argparse_options(opts)

    # Get all refs
    r = Repo('https://chromium.googlesource.com/chromium/src')
    r.repos_dir = os.path.abspath(opts.repo_dir)
    r.reify()
    r.fetch()

    all_releases = r['refs/heads/ignore/foo']

    tags = r.run('for-each-ref', '--sort=committerdate',
                 '--format=%(objectname) %(refname)',
                 'refs/tags').splitlines()

    already_have = set()
    try:
        already_have = r.run('rev-list', '--first-parent', '--parents',
                             all_releases.ref).splitlines()
        # Last commit in chain is the null commit
        already_have = set(l.split()[-1] for l in already_have[:-1])
    except CalledProcessError:
        pass

    for hsh_tag in tags:
        hsh, tag = hsh_tag.split()
        if hsh in already_have:
            print 'skipping', tag
            continue

        print 'processing', tag
        c = r.get_commit(hsh)
        if all_releases.commit is INVALID:
            cu = c.data.committer
            cu = cu.alter(timestamp=cu.timestamp.alter(secs=cu.timestamp.secs -
                                                       1))
            au = c.data.author
            au = au.alter(timestamp=au.timestamp.alter(secs=au.timestamp.secs -
                                                       1))

            all_releases.update_to(
                c.alter(
                    author=au,
                    committer=cu,
                    parents=(),
                    tree=None,
                ))

        parents = [all_releases.commit.hsh, c.hsh]
        all_releases.update_to(
            c.alter(
                author=c.data.committer,
                message_lines=[tag],
                parents=parents,
                tree=None,
            ))

    print all_releases.commit
    r.run('push', 'origin',
          '%s:%s' % (all_releases.commit.hsh, all_releases.ref))