예제 #1
0
    def testMercurialChangeRepo(self):
        # Create a new repo
        old_env = os.environ.copy()
        if 'HG_SHARE_BASE_DIR' in os.environ:
            del os.environ['HG_SHARE_BASE_DIR']

        try:
            repo2 = os.path.join(self.tmpdir, 'repo2')
            run_cmd(['%s/init_hgrepo.sh' % os.path.dirname(__file__), repo2])

            self.assertNotEqual(self.revisions, getRevisions(repo2))

            # Clone the original repo
            mercurial(self.repodir, self.wc)
            self.assertEquals(getRevisions(self.wc), self.revisions)
            open(os.path.join(self.wc, 'test.txt'), 'w').write("hello!")

            # Clone the new one
            mercurial(repo2, self.wc)
            self.assertEquals(getRevisions(self.wc), getRevisions(repo2))
            # Make sure our local file went away
            self.failUnless(
                not os.path.exists(os.path.join(self.wc, 'test.txt')))
        finally:
            os.environ.clear()
            os.environ.update(old_env)
예제 #2
0
    def bump_and_tag(repo, attempt, config, relbranch, revision, tags,
                     defaultBranch):
        relbranchChangesets = len(tags)
        defaultBranchChangesets = 0

        if relbranch in get_branches(reponame):
            update(reponame, revision=relbranch)
        else:
            update(reponame, revision=revision)
            run_cmd(['hg', 'branch', relbranch], cwd=reponame)

        if len(bumpFiles) > 0:
            # Bump files on the relbranch, if necessary
            bump(reponame, bumpFiles, 'version')
            run_cmd(['hg', 'diff'], cwd=repo)
            try:
                get_output(['hg', 'commit', '-u', config['hgUsername'],
                            '-m', getBumpCommitMessage(config['productName'], config['version'])],
                           cwd=reponame)
                relbranchChangesets += 1
                revision = get_revision(reponame)
            except subprocess.CalledProcessError, e:
                # We only want to ignore exceptions caused by having nothing to
                # commit, which are OK. We still want to raise exceptions caused
                # by any other thing.
                if e.returncode != 1 or "nothing changed" not in e.output:
                    raise
예제 #3
0
    def testGitShare(self):
        shareBase = os.path.join(self.tmpdir, 'git-repos')
        rev = git.git(self.repodir, self.wc, shareBase=shareBase)
        shareDir = os.path.join(shareBase, git.get_repo_name(self.repodir))
        self.assertEquals(rev, self.revisions[-1])

        # We should see all the revisions
        revs = getRevisions(
            self.wc, branches=['origin/master', 'origin/branch2'])
        shared_revs = getRevisions(
            shareDir, branches=['origin/master', 'origin/branch2'])

        self.assertEquals(revs, shared_revs)
        self.assertEquals(revs, self.revisions)

        # Update to a different rev
        rev = git.git(self.repodir, self.wc,
                      revision=self.revisions[0], shareBase=shareBase)
        self.assertEquals(rev, self.revisions[0])
        self.assertFalse(os.path.exists(os.path.join(self.wc, 'newfile')))

        # Add a commit to the original repo
        newfile = os.path.join(self.repodir, 'newfile')
        touch(newfile)
        run_cmd(['git', 'add', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'commit', '-q', '-m', 'add newfile'], cwd=self.repodir)
        new_rev = getRevisions(self.repodir)[-1]

        # Update to the new rev
        rev = git.git(
            self.repodir, self.wc, revision=new_rev, shareBase=shareBase)
        self.assertEquals(rev, new_rev)
        self.assertTrue(os.path.exists(os.path.join(self.wc, 'newfile')))
예제 #4
0
파일: svn.py 프로젝트: pocmo/build-tools
def checkoutSVN(targetSVNDirectory, svnURL):
    """
    Checkout the product details SVN
    """
    if not os.path.isdir(targetSVNDirectory):
        cmd = ["svn", "co", svnURL, targetSVNDirectory]
        run_cmd(cmd)
예제 #5
0
 def testCommitWithUser(self):
     run_cmd(['touch', 'newfile'], cwd=self.repodir)
     run_cmd(['hg', 'add', 'newfile'], cwd=self.repodir)
     rev = commit(self.repodir, user='******', msg='new stuff!')
     info = getRevInfo(self.repodir, rev)
     self.assertEquals(info['user'], 'unittest')
     self.assertEquals(info['msg'], 'new stuff!')
예제 #6
0
def check_buildbot():
    """check if buildbot command works"""
    try:
        run_cmd(['buildbot', '--version'])
    except CalledProcessError:
        print "FAIL: buildbot command doesn't work"
        raise
예제 #7
0
파일: test_util_hg.py 프로젝트: gerva/tools
 def testCommit(self):
     newfile = os.path.join(self.repodir, 'newfile')
     touch(newfile)
     run_cmd(['hg', 'add', 'newfile'], cwd=self.repodir)
     rev = commit(self.repodir, user='******', msg='gooooood')
     info = getRevInfo(self.repodir, rev)
     self.assertEquals(info['msg'], 'gooooood')
예제 #8
0
 def testCommitWithUser(self):
     run_cmd(["touch", "newfile"], cwd=self.repodir)
     run_cmd(["hg", "add", "newfile"], cwd=self.repodir)
     rev = commit(self.repodir, user="******", msg="new stuff!")
     info = getRevInfo(self.repodir, rev)
     self.assertEquals(info["user"], "unittest")
     self.assertEquals(info["msg"], "new stuff!")
예제 #9
0
def recreate_repos(repoSetupConfig):
    hgHost = repoSetupConfig['hgHost']
    repoPath = repoSetupConfig['repoPath']
    hgUserName = repoSetupConfig['hgUserName']
    hgSshKey = path.join(path.expanduser("~"), ".ssh",
                         repoSetupConfig['hgSshKey'])

    for repo in repoSetupConfig['reposToClone'].keys():
        maybe_delete_repo(hgHost, hgUserName, hgSshKey, repo, repoPath)
        clone_repo(hgHost, hgUserName, hgSshKey, repo)

    run_cmd(['sleep', '600'])
    allTags = []

    for repo in repoSetupConfig['reposToClone'].keys():
        if repoSetupConfig['reposToClone'][repo].get('overrides'):
            tags = bump_configs(
                hgHost, hgUserName, hgSshKey, repo, repoPath,
                repoSetupConfig['reposToClone'][repo]['overrides'],
                repoSetupConfig['reposToClone'][repo].get('nobump_overrides', []))
            allTags.extend(tags)
    log.info('Tagging using %s' % ' '. join(allTags))

    for repo in repoSetupConfig['reposToClone'].keys():
        if repoSetupConfig['reposToClone'][repo].get('doTag'):
            tag_repo(hgHost, hgUserName, hgSshKey, repo, repoPath, allTags)
예제 #10
0
def getPreviousBuildID(download_base_url, download_subdir):
    """Figure out what the previous buildid is"""
    if os.path.exists('previous.apk'):
        os.remove('previous.apk')
    run_cmd(['wget', '-O', 'previous.apk',
         '%s/nightly/latest-%s/gecko-unsigned-unaligned.apk' % (download_base_url, download_subdir)])
    return parseApk('previous.apk')[0]
예제 #11
0
    def testOutofSyncMirrorFailingMaster(self):
        # First create the mirror
        mirror = os.path.join(self.tmpdir, "repo2")
        clone(self.repodir, mirror)

        shareBase = os.path.join(self.tmpdir, "share")
        os.mkdir(shareBase)
        mercurial(self.repodir, self.wc, shareBase=shareBase, mirrors=[mirror])

        # Create a bundle
        bundle = os.path.join(self.tmpdir, "bundle")
        run_cmd(["hg", "bundle", "-a", bundle], cwd=self.repodir)

        # Move our repodir out of the way so that pulling/cloning from it fails
        os.rename(self.repodir, self.repodir + "-bad")

        # Try and update to a non-existent revision using our mirror and
        # bundle, with the master failing. We should fail
        self.assertRaises(
            subprocess.CalledProcessError,
            mercurial,
            self.repodir,
            self.wc,
            shareBase=shareBase,
            mirrors=[mirror],
            bundles=[bundle],
            revision="1234567890",
        )
예제 #12
0
def rsyncFilesByPattern(server, userName, sshKey, source_dir, target_dir,
                        pattern):
    cmd = ['rsync', '-e',
           'ssh -l %s -oIdentityFile=%s' % (userName, sshKey),
           '-av', '--include=%s' % pattern, '--include=*/', '--exclude=*',
           '%s:%s' % (server, source_dir), target_dir]
    run_cmd(cmd)
예제 #13
0
def check_buildbot():
    """check if buildbot command works"""
    try:
        run_cmd(["buildbot", "--version"])
    except CalledProcessError:
        log.error("FAIL: buildbot command doesn't work", exc_info=True)
        raise
예제 #14
0
파일: hg.py 프로젝트: 70599/Waterfox
def cleanOutgoingRevs(reponame, remote, username, sshKey):
    outgoingRevs = retry(out, kwargs=dict(src=reponame, remote=remote,
                                          ssh_username=username,
                                          ssh_key=sshKey))
    for r in reversed(outgoingRevs):
        run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                 r[REVISION]], cwd=reponame)
예제 #15
0
파일: test_util_hg.py 프로젝트: gerva/tools
    def testShareExtraFiles(self):
        shareBase = os.path.join(self.tmpdir, 'share')
        backup = os.path.join(self.tmpdir, 'backup')

        # Clone the original repo
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        clone(self.repodir, backup)

        # Make the working repo have a new file. We need it to have an earlier
        # timestamp (yesterday) to trigger the odd behavior in hg
        newfile = os.path.join(self.wc, 'newfile')
        touch(newfile, timestamp=yesterday_timestamp())
        run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
        run_cmd(['hg', 'commit', '-m', '"add newfile"'], cwd=self.wc)

        # Reset the share base to remove the 'add newfile' commit. We
        # overwrite repodir with the backup that doesn't have the commit,
        # then clone the repodir to a throwaway dir to create the new
        # shareBase. Now self.wc still points to shareBase, but the
        # changeset that self.wc was on is lost.
        shutil.rmtree(self.repodir)
        shutil.rmtree(shareBase)
        clone(backup, self.repodir)
        throwaway = os.path.join(self.tmpdir, 'throwaway')
        mercurial(self.repodir, throwaway, shareBase=shareBase)

        # Try and update our working copy
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        self.assertFalse(os.path.exists(os.path.join(self.wc, 'newfile')))
예제 #16
0
 def process_configs(repo, attempt):
     """Helper method that encapsulates all of the things necessary
        to run release runner for all releases."""
     log.info("Bumping %s, attempt #%s" % (repo, attempt))
     for release in rr.new_releases:
         rr.update_status(release, 'Writing configs')
         l10nContents = rr.get_release_l10n(release['name'])
         tags.extend(getTags(
             getBaseTag(release['product'], release['version']),
             release['buildNumber'])
         )
         update(configs_workdir, revision='default')
         cfgFile = getReleaseConfigName(
             release['product'], path.basename(release['branch']),
             release['version'], staging)
         bump_configs(release=release, cfgFile=cfgFile,
                      l10nContents=l10nContents, workdir=configs_workdir,
                      hg_username=hg_username,
                      productionBranch=buildbot_configs_branch)
         rr.update_status(release, 'Running release sanity')
         rs_args = get_release_sanity_args(configs_workdir, release,
                                           cfgFile, masters_json,
                                           buildbot_configs_branch)
         release_sanity_script = "%s/buildbot-helpers/release_sanity.py" % tools_workdir
         run_cmd(['python', release_sanity_script] + rs_args +
                 ['--dry-run'])
         rr.update_status(
             release, 'Waiting for other releases to run release sanity'
         )
예제 #17
0
파일: test_util_hg.py 프로젝트: gerva/tools
 def testPushWithForce(self):
     clone(self.repodir, self.wc, revision=self.revisions[0],
           clone_by_rev=True)
     newfile = os.path.join(self.wc, 'newfile')
     touch(newfile)
     run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
     run_cmd(['hg', 'commit', '-m', '"re-add newfile"'], cwd=self.wc)
     push(self.repodir, self.wc, push_new_branches=False, force=True)
예제 #18
0
    def setUp(self):
        self.tmpdir = tempfile.mkdtemp()
        self.repodir = os.path.join(self.tmpdir, "repo")
        run_cmd(["%s/init_hgrepo.sh" % os.path.dirname(__file__), self.repodir])

        self.revisions = getRevisions(self.repodir)
        self.wc = os.path.join(self.tmpdir, "wc")
        self.pwd = os.getcwd()
예제 #19
0
파일: hg.py 프로젝트: 70599/Waterfox
def purge(dest):
    """Purge the repository of all untracked and ignored files."""
    try:
        run_cmd(['hg', '--config', 'extensions.purge=', 'purge',
                 '-a', '--all', dest], cwd=dest)
    except subprocess.CalledProcessError, e:
        log.debug('purge failed: %s' % e)
        raise
예제 #20
0
    def testPullDefault(self):
        clone(self.repodir, self.wc)
        run_cmd(["hg", "tag", "-f", "TAG1"], cwd=self.repodir)
        revisions = getRevisions(self.repodir)

        rev = pull(self.repodir, self.wc, revision="default")
        self.assertEquals(rev, revisions[0])
        self.assertEquals(getRevisions(self.wc), revisions)
예제 #21
0
    def testPullDefault(self):
        clone(self.repodir, self.wc)
        run_cmd(['hg', 'tag', '-f', 'TAG1'], cwd=self.repodir)
        revisions = getRevisions(self.repodir)

        rev = pull(self.repodir, self.wc, revision='default')
        self.assertEquals(rev, revisions[0])
        self.assertEquals(getRevisions(self.wc), revisions)
예제 #22
0
 def _run_hg(cls, command, repo=None, want_output=False):
     cmd = [cls.hg, '--config', 'extensions.mq=']
     if repo:
         cmd.extend(["-R", repo])
     if want_output:
         return get_output(cmd + command)
     else:
         run_cmd(cmd + command)
예제 #23
0
파일: hg.py 프로젝트: 70599/Waterfox
def push(src, remote, push_new_branches=True, force=False, **kwargs):
    cmd = ['hg', 'push']
    cmd.extend(common_args(**kwargs))
    if force:
        cmd.append('-f')
    if push_new_branches:
        cmd.append('--new-branch')
    cmd.append(remote)
    run_cmd(cmd, cwd=src)
예제 #24
0
def downloadNightlyBuild(localeSrcDir, env): 
    run_cmd(["make", "wget-en-US"], cwd=localeSrcDir, env=env)
    run_cmd(["make", "unpack"], cwd=localeSrcDir, env=env)
    output = get_output(["make", "ident"], cwd=localeSrcDir, env=env)
    info = {}
    for line in output.splitlines():
        key, value = line.rstrip().split()
        info[key] = value
    return info
예제 #25
0
파일: test_util_hg.py 프로젝트: gerva/tools
 def testPushForceFail(self):
     clone(self.repodir, self.wc, revision=self.revisions[0],
           clone_by_rev=True)
     newfile = os.path.join(self.wc, 'newfile')
     touch(newfile)
     run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
     run_cmd(['hg', 'commit', '-m', '"add newfile"'], cwd=self.wc)
     self.assertRaises(Exception, push, self.repodir, self.wc,
                       push_new_branches=False, force=False)
예제 #26
0
파일: svn.py 프로젝트: pocmo/build-tools
def exportJSON(targetSVNDirectory):
    """
    Export the PHP product details to json
    """
    retval = os.getcwd()
    try:
        os.chdir(targetSVNDirectory)
        run_cmd(["php", "export_json.php"])
    finally:
        os.chdir(retval)
예제 #27
0
def tagRepo(config, repo, reponame, revision, tags, bumpFiles, relbranch,
            pushAttempts, defaultBranch='default'):
    remote = make_hg_url(HG, repo)
    mercurial(remote, reponame)

    def bump_and_tag(repo, attempt, config, relbranch, revision, tags,
                     defaultBranch):
        # set relbranchChangesets=1 because tag() generates exactly 1 commit
        relbranchChangesets = 1
        defaultBranchChangesets = 0

        if relbranch in get_branches(reponame):
            update(reponame, revision=relbranch)
        else:
            update(reponame, revision=revision)
            run_cmd(['hg', 'branch', relbranch], cwd=reponame)

        if len(bumpFiles) > 0:
            # Bump files on the relbranch, if necessary
            bump(reponame, bumpFiles, 'version')
            run_cmd(['hg', 'diff'], cwd=repo)
            try:
                get_output(['hg', 'commit', '-u', config['hgUsername'],
                            '-m', getBumpCommitMessage(config['productName'], config['version'])],
                           cwd=reponame)
                relbranchChangesets += 1
            except subprocess.CalledProcessError, e:
                # We only want to ignore exceptions caused by having nothing to
                # commit, which are OK. We still want to raise exceptions caused
                # by any other thing.
                if e.returncode != 1 or "nothing changed" not in e.output:
                    raise

        # We always want our tags pointing at the tip of the relbranch
        # so we need to grab the current revision after we've switched
        # branches and bumped versions.
        revision = get_revision(reponame)
        # Create the desired tags on the relbranch
        tag(repo, revision, tags, config['hgUsername'])

        # This is the bump of the version on the default branch
        # We do it after the other one in order to get the tip of the
        # repository back on default, thus avoiding confusion.
        if len(bumpFiles) > 0:
            update(reponame, revision=defaultBranch)
            bump(reponame, bumpFiles, 'nextVersion')
            run_cmd(['hg', 'diff'], cwd=repo)
            try:
                get_output(['hg', 'commit', '-u', config['hgUsername'],
                            '-m', getBumpCommitMessage(config['productName'], config['version'])],
                           cwd=reponame)
                defaultBranchChangesets += 1
            except subprocess.CalledProcessError, e:
                if e.returncode != 1 or "nothing changed" not in e.output:
                    raise
예제 #28
0
 def testMercurialWithExistingShare(self):
     shareBase = os.path.join(self.tmpdir, 'share')
     sharerepo = os.path.join(shareBase, self.repodir.lstrip("/"))
     os.mkdir(shareBase)
     mercurial(self.repodir, sharerepo)
     open(os.path.join(self.repodir, 'test.txt'), 'w').write('hello!')
     run_cmd(['hg', 'add', 'test.txt'], cwd=self.repodir)
     run_cmd(['hg', 'commit', '-m', 'adding changeset'], cwd=self.repodir)
     mercurial(self.repodir, self.wc, shareBase=shareBase)
     self.assertEquals(getRevisions(self.repodir), getRevisions(self.wc))
     self.assertEquals(getRevisions(self.repodir), getRevisions(sharerepo))
예제 #29
0
    def testUnbundle(self):
        # First create the bundle
        bundle = os.path.join(self.tmpdir, 'bundle')
        run_cmd(['hg', 'bundle', '-a', bundle], cwd=self.repodir)

        # Now unbundle it in a new place
        newdir = os.path.join(self.tmpdir, 'new')
        init(newdir)
        unbundle(bundle, newdir)

        self.assertEquals(self.revisions, getRevisions(newdir))
예제 #30
0
파일: hg.py 프로젝트: ccooper/build-tools
def share(source, dest, branch=None, revision=None):
    """Creates a new working directory in "dest" that shares history with
       "source" using Mercurial's share extension"""
    if not os.path.exists(dest):
        log.info("mkdir: %s" % str(dest))
        try:
            os.makedirs(dest)
        except OSError:
            log.error("Can't create directory %s!" % dest)
    run_cmd(['hg', 'share', '-U', source, dest])
    return update(dest, branch=branch, revision=revision)
예제 #31
0
    def testFetchAll(self):
        # Clone just the main branch
        git.clone(self.repodir, self.wc, update_dest=False)
        # Now pull in branch2
        git.fetch(self.repodir, self.wc, refname='branch2')

        # Change the original repo
        newfile = os.path.join(self.repodir, 'newfile')
        touch(newfile)
        run_cmd(['git', 'add', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'commit', '-q', '-m', 'add newfile'], cwd=self.repodir)

        # Now pull in everything from master branch
        git.fetch(self.repodir, self.wc, refname='master')

        for branch in 'master', 'branch2':
            self.assertEquals(
                getRevisions(self.wc, branches=['origin/%s' % branch]),
                getRevisions(self.repodir, branches=[branch]))

        # Make sure we actually changed something
        self.assertNotEqual(
            getRevisions(self.repodir, branches=['master', 'branch2']),
            self.revisions)
예제 #32
0
def apply_and_push(localrepo, remote, changer, max_attempts=10,
                   ssh_username=None, ssh_key=None, force=False):
    """This function calls `changer' to make changes to the repo, and tries
       its hardest to get them to the origin repo. `changer' must be a
       callable object that receives two arguments: the directory of the local
       repository, and the attempt number. This function will push ALL
       changesets missing from remote."""
    assert callable(changer)
    branch = get_branch(localrepo)
    changer(localrepo, 1)
    for n in range(1, max_attempts + 1):
        new_revs = []
        try:
            new_revs = out(src=localrepo, remote=remote,
                           ssh_username=ssh_username,
                           ssh_key=ssh_key)
            if len(new_revs) < 1:
                raise HgUtilError("No revs to push")
            push(src=localrepo, remote=remote, ssh_username=ssh_username,
                 ssh_key=ssh_key, force=force)
            return
        except subprocess.CalledProcessError, e:
            log.debug("Hit error when trying to push: %s" % str(e))
            if n == max_attempts:
                log.debug("Tried %d times, giving up" % max_attempts)
                for r in reversed(new_revs):
                    run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                             r[REVISION]], cwd=localrepo)
                raise HgUtilError("Failed to push")
            pull(remote, localrepo, update_dest=False,
                 ssh_username=ssh_username, ssh_key=ssh_key)
            # After we successfully rebase or strip away heads the push is
            # is attempted again at the start of the loop
            try:
                run_cmd(['hg', '--config', 'ui.merge=internal:merge',
                         'rebase'], cwd=localrepo)
            except subprocess.CalledProcessError, e:
                log.debug("Failed to rebase: %s" % str(e))
                # abort failed rebase
                run_cmd(['hg', 'rebase', '--abort'], cwd=localrepo)
                update(localrepo, branch=branch)
                for r in reversed(new_revs):
                    run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                             r[REVISION]], cwd=localrepo)
                changer(localrepo, n + 1)
예제 #33
0
def updateRev(targetSVNDirectory, newRev):
    run_cmd([
        "svn", "propset", "svn:externals", "'product-details -r" + newRev,
        "http://svn.mozilla.org/libs/product-details'", "tags/stage/includes"
    ])
    run_cmd([
        "svn", "propset", "svn:externals", "'product-details -r" + newRev,
        "http://svn.mozilla.org/libs/product-details'",
        "tags/productions/includes"
    ])
    run_cmd(["svn", "up"])
예제 #34
0
    def testShareExtraFilesReset(self):
        shareBase = os.path.join(self.tmpdir, 'share')

        # Clone the original repo
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        # Reset the repo
        run_cmd(
            ['%s/init_hgrepo.sh' % os.path.dirname(__file__), self.repodir])

        # Make the working repo have a new file. We need it to have an earlier
        # timestamp to trigger the odd behavior in hg, so use '-d yesterday'
        run_cmd(['touch', '-d', 'yesterday', 'newfile'], cwd=self.wc)
        run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
        run_cmd(['hg', 'commit', '-m', '"add newfile"'], cwd=self.wc)

        # Try and update our working copy
        mercurial(self.repodir, self.wc, shareBase=shareBase)

        self.assertFalse(os.path.exists(os.path.join(self.wc, 'newfile')))
예제 #35
0
 def testPushForceFail(self):
     clone(self.repodir,
           self.wc,
           revision=self.revisions[0],
           clone_by_rev=True)
     run_cmd(['touch', 'newfile'], cwd=self.wc)
     run_cmd(['hg', 'add', 'newfile'], cwd=self.wc)
     run_cmd(['hg', 'commit', '-m', '"add newfile"'], cwd=self.wc)
     self.assertRaises(Exception,
                       push,
                       self.repodir,
                       self.wc,
                       push_new_branches=False,
                       force=False)
예제 #36
0
    def testGitRev(self):
        rev = git.git(self.repodir, self.wc)
        self.assertEquals(rev, self.revisions[-1])

        # Update to a different rev
        rev = git.git(self.repodir, self.wc, revision=self.revisions[1])
        self.assertEquals(rev, self.revisions[1])

        # Make a new commit in repodir
        run_cmd(['touch', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'add', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'commit', '-q', '-m', 'add newfile'], cwd=self.repodir)
        new_rev = getRevisions(self.repodir)[-1]

        rev = git.git(self.repodir, self.wc, revision=new_rev)

        self.assertEquals(new_rev, rev)
예제 #37
0
    def testGitMirrors(self):
        # Create a bad mirror and a good mirror
        mirror1 = os.path.join(self.tmpdir, 'mirror1')
        mirror2 = os.path.join(self.tmpdir, 'mirror2')
        git.git(self.repodir, mirror2, update_dest=False)

        rev = git.git(self.repodir, self.wc, mirrors=[mirror1, mirror2])
        self.assertEquals(rev, self.revisions[-1])

        # Add a commit to the mirror
        run_cmd(['touch', 'newfile'], cwd=mirror2)
        run_cmd(['git', 'add', 'newfile'], cwd=mirror2)
        run_cmd(['git', 'commit', '-q', '-m', 'add newfile'], cwd=mirror2)
        new_rev = getRevisions(mirror2)[-1]

        # Now clone using the mirror. We should get the new commit that's in
        # the mirror
        rev = git.git(self.repodir, self.wc, mirrors=[mirror1, mirror2])
        self.assertEquals(rev, new_rev)
예제 #38
0
    def testPullWithUnrelatedMirror(self):
        mirror = os.path.join(self.tmpdir, 'repo2')
        run_cmd(['%s/init_hgrepo.sh' % os.path.dirname(__file__), mirror])

        # Now clone from the original
        clone(self.repodir, self.wc)

        # Create a new commit in the original repo
        open(os.path.join(self.repodir, 'test.txt'), 'w').write('hello!')
        run_cmd(['hg', 'add', 'test.txt'], cwd=self.repodir)
        run_cmd(['hg', 'commit', '-m', 'adding changeset'], cwd=self.repodir)

        # Pull using the mirror
        pull(self.repodir, self.wc, mirrors=[mirror])

        self.assertEquals(getRevisions(self.wc), getRevisions(self.repodir))
        # We shouldn't have anything from the unrelated mirror
        self.assertEquals(
            set(),
            set(getRevisions(mirror)).intersection(set(getRevisions(self.wc))))

        # Our default path should point to the original repo
        self.assertEquals(self.repodir, path(self.wc))
예제 #39
0
    def testMercurialWithShareAndBundle(self):
        # First create the bundle
        bundle = os.path.join(self.tmpdir, 'bundle')
        run_cmd(['hg', 'bundle', '-a', bundle], cwd=self.repodir)

        # Create a commit
        open(os.path.join(self.repodir, 'test.txt'), 'w').write('hello!')
        run_cmd(['hg', 'add', 'test.txt'], cwd=self.repodir)
        run_cmd(['hg', 'commit', '-m', 'adding changeset'], cwd=self.repodir)

        # Wrap unbundle so we can tell if it got called
        orig_unbundle = unbundle
        try:
            called = []

            def new_unbundle(*args, **kwargs):
                called.append(True)
                return orig_unbundle(*args, **kwargs)

            hg.unbundle = new_unbundle

            shareBase = os.path.join(self.tmpdir, 'share')
            sharerepo = os.path.join(shareBase, self.repodir.lstrip("/"))
            os.mkdir(shareBase)
            mercurial(self.repodir,
                      self.wc,
                      shareBase=shareBase,
                      bundles=[bundle])

            self.assertEquals(called, [True])
            self.assertEquals(getRevisions(self.repodir),
                              getRevisions(self.wc))
            self.assertEquals(getRevisions(self.repodir),
                              getRevisions(sharerepo))
        finally:
            hg.unbundle = orig_unbundle
예제 #40
0
    def testGitShare(self):
        shareBase = os.path.join(self.tmpdir, 'git-repos')
        rev = git.git(self.repodir, self.wc, shareBase=shareBase)
        shareDir = os.path.join(shareBase, git.get_repo_name(self.repodir))
        self.assertEquals(rev, self.revisions[-1])

        # We should see all the revisions
        revs = getRevisions(self.wc,
                            branches=['origin/master', 'origin/branch2'])
        shared_revs = getRevisions(
            shareDir, branches=['origin/master', 'origin/branch2'])

        self.assertEquals(revs, shared_revs)
        self.assertEquals(revs, self.revisions)

        # Update to a different rev
        rev = git.git(self.repodir,
                      self.wc,
                      revision=self.revisions[0],
                      shareBase=shareBase)
        self.assertEquals(rev, self.revisions[0])
        self.assertFalse(os.path.exists(os.path.join(self.wc, 'newfile')))

        # Add a commit to the original repo
        run_cmd(['touch', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'add', 'newfile'], cwd=self.repodir)
        run_cmd(['git', 'commit', '-q', '-m', 'add newfile'], cwd=self.repodir)
        new_rev = getRevisions(self.repodir)[-1]

        # Update to the new rev
        rev = git.git(self.repodir,
                      self.wc,
                      revision=new_rev,
                      shareBase=shareBase)
        self.assertEquals(rev, new_rev)
        self.assertTrue(os.path.exists(os.path.join(self.wc, 'newfile')))
예제 #41
0
def init(dest):
    """Initializes an empty repo in `dest`"""
    run_cmd(['hg', 'init', dest])
예제 #42
0
def tag(repo, revision, tags, username):
    for tag in tags:
        cmd = ['hg', 'tag', '-u', username, '-r', revision,
               '-m', getTagCommitMessage(revision, tag), '-f', tag]
        run_cmd(cmd, cwd=repo)
예제 #43
0
def share(source, dest, branch=None, revision=None):
    """Creates a new working directory in "dest" that shares history with
       "source" using Mercurial's share extension"""
    run_cmd(['hg', 'share', '-U', source, dest])
    return update(dest, branch=branch, revision=revision)
예제 #44
0
def mercurial(repo, dest, branch=None, revision=None, update_dest=True,
              shareBase=DefaultShareBase, allowUnsharedLocalClones=False,
              clone_by_rev=False, mirrors=None, bundles=None, autoPurge=False):
    """Makes sure that `dest` is has `revision` or `branch` checked out from
    `repo`.

    Do what it takes to make that happen, including possibly clobbering
    dest.

    If allowUnsharedLocalClones is True and we're trying to use the share
    extension but fail, then we will be able to clone from the shared repo to
    our destination.  If this is False, the default, then if we don't have the
    share extension we will just clone from the remote repository.

    If `clone_by_rev` is True, use 'hg clone -r <rev>' instead of 'hg clone'.
    This is slower, but useful when cloning repos with lots of heads.

    If `mirrors` is set, will try and use the mirrors before `repo`.

    If `bundles` is set, will try and download the bundle first and
    unbundle it instead of doing a full clone. If successful, will pull in
    new revisions from mirrors or the master repo. If unbundling fails, will
    fall back to doing a regular clone from mirrors or the master repo.
    """
    dest = os.path.abspath(dest)
    if shareBase is DefaultShareBase:
        shareBase = os.environ.get("HG_SHARE_BASE_DIR", None)

    log.info("Reporting hg version in use")
    cmd = ['hg', '-q', 'version']
    run_cmd(cmd, cwd='.')

    if shareBase:
        # Check that 'hg share' works
        try:
            log.info("Checking if share extension works")
            output = get_output(['hg', 'help', 'share'], dont_log=True)
            if 'no commands defined' in output:
                # Share extension is enabled, but not functional
                log.info("Disabling sharing since share extension doesn't seem to work (1)")
                shareBase = None
            elif 'unknown command' in output:
                # Share extension is disabled
                log.info("Disabling sharing since share extension doesn't seem to work (2)")
                shareBase = None
        except subprocess.CalledProcessError:
            # The command failed, so disable sharing
            log.info("Disabling sharing since share extension doesn't seem to work (3)")
            shareBase = None

    # Check that our default path is correct
    if os.path.exists(os.path.join(dest, '.hg')):
        hgpath = path(dest, "default")

        # Make sure that our default path is correct
        if hgpath != _make_absolute(repo):
            log.info("hg path isn't correct (%s should be %s); clobbering",
                     hgpath, _make_absolute(repo))
            remove_path(dest)

    # If the working directory already exists and isn't using share we update
    # the working directory directly from the repo, ignoring the sharing
    # settings
    if os.path.exists(dest):
        if not os.path.exists(os.path.join(dest, ".hg")):
            log.warning("%s doesn't appear to be a valid hg directory; clobbering", dest)
            remove_path(dest)
        elif not os.path.exists(os.path.join(dest, ".hg", "sharedpath")):
            try:
                if autoPurge:
                    purge(dest)
                return pull(repo, dest, update_dest=update_dest, branch=branch,
                            revision=revision,
                            mirrors=mirrors)
            except subprocess.CalledProcessError:
                log.warning("Error pulling changes into %s from %s; clobbering", dest, repo)
                log.debug("Exception:", exc_info=True)
                remove_path(dest)

    # If that fails for any reason, and sharing is requested, we'll try to
    # update the shared repository, and then update the working directory from
    # that.
    if shareBase:
        sharedRepo = os.path.join(shareBase, get_repo_path(repo))
        dest_sharedPath = os.path.join(dest, '.hg', 'sharedpath')

        if os.path.exists(sharedRepo):
            hgpath = path(sharedRepo, "default")

            # Make sure that our default path is correct
            if hgpath != _make_absolute(repo):
                log.info("hg path isn't correct (%s should be %s); clobbering",
                         hgpath, _make_absolute(repo))
                # we need to clobber both the shared checkout and the dest,
                # since hgrc needs to be in both places
                remove_path(sharedRepo)
                remove_path(dest)

        if os.path.exists(dest_sharedPath):
            # Make sure that the sharedpath points to sharedRepo
            dest_sharedPath_data = os.path.normpath(
                open(dest_sharedPath).read())
            norm_sharedRepo = os.path.normpath(os.path.join(sharedRepo, '.hg'))
            if dest_sharedPath_data != norm_sharedRepo:
                # Clobber!
                log.info("We're currently shared from %s, but are being requested to pull from %s (%s); clobbering",
                         dest_sharedPath_data, repo, norm_sharedRepo)
                remove_path(dest)

        try:
            log.info("Updating shared repo")
            mercurial(repo, sharedRepo, branch=branch, revision=revision,
                      update_dest=False, shareBase=None, clone_by_rev=clone_by_rev,
                      mirrors=mirrors, bundles=bundles, autoPurge=False)
            if os.path.exists(dest):
                if autoPurge:
                    purge(dest)
                return update(dest, branch=branch, revision=revision)

            try:
                log.info("Trying to share %s to %s", sharedRepo, dest)
                return share(sharedRepo, dest, branch=branch, revision=revision)
            except subprocess.CalledProcessError:
                if not allowUnsharedLocalClones:
                    # Re-raise the exception so it gets caught below.
                    # We'll then clobber dest, and clone from original repo
                    raise

                log.warning("Error calling hg share from %s to %s;"
                            "falling back to normal clone from shared repo",
                            sharedRepo, dest)
                # Do a full local clone first, and then update to the
                # revision we want
                # This lets us use hardlinks for the local clone if the OS
                # supports it
                clone(sharedRepo, dest, update_dest=False,
                      mirrors=mirrors, bundles=bundles)
                return update(dest, branch=branch, revision=revision)
        except subprocess.CalledProcessError:
            log.warning(
                "Error updating %s from sharedRepo (%s): ", dest, sharedRepo)
            log.debug("Exception:", exc_info=True)
            remove_path(dest)
    # end if shareBase

    if not os.path.exists(os.path.dirname(dest)):
        os.makedirs(os.path.dirname(dest))

    # Share isn't available or has failed, clone directly from the source
    return clone(repo, dest, branch, revision,
                 update_dest=update_dest, mirrors=mirrors,
                 bundles=bundles, clone_by_rev=clone_by_rev)
예제 #45
0
def strip_outgoing(dest):
    try:
        run_cmd(["hg", "strip", "--no-backup", "outgoing()"], cwd=dest)
    except Exception:
        log.warn("Ignoring strip error in %s", dest)
예제 #46
0
def clone(repo, dest, branch=None, revision=None, update_dest=True,
          clone_by_rev=False, mirrors=None, bundles=None):
    """Clones hg repo and places it at `dest`, replacing whatever else is
    there.  The working copy will be empty.

    If `revision` is set, only the specified revision and its ancestors will
    be cloned.

    If `update_dest` is set, then `dest` will be updated to `revision` if
    set, otherwise to `branch`, otherwise to the head of default.

    If `mirrors` is set, will try and clone from the mirrors before
    cloning from `repo`.

    If `bundles` is set, will try and download the bundle first and
    unbundle it. If successful, will pull in new revisions from mirrors or
    the master repo. If unbundling fails, will fall back to doing a regular
    clone from mirrors or the master repo.

    Regardless of how the repository ends up being cloned, the 'default' path
    will point to `repo`.
    """
    if os.path.exists(dest):
        remove_path(dest)

    if bundles:
        log.info("Attempting to initialize clone with bundles")
        for bundle in bundles:
            if os.path.exists(dest):
                remove_path(dest)
            init(dest)
            log.info("Trying to use bundle %s", bundle)
            try:
                if not unbundle(bundle, dest):
                    remove_path(dest)
                    continue
                adjust_paths(dest, default=repo)
                # Now pull / update
                return pull(repo, dest, update_dest=update_dest,
                            mirrors=mirrors, revision=revision, branch=branch)
            except Exception:
                remove_path(dest)
                log.exception("Problem unbundling/pulling from %s", bundle)
                continue
        else:
            log.info("Using bundles failed; falling back to clone")

    if mirrors:
        log.info("Attempting to clone from mirrors")
        for mirror in mirrors:
            log.info("Cloning from %s", mirror)
            try:
                retval = clone(mirror, dest, branch, revision,
                               update_dest=update_dest, clone_by_rev=clone_by_rev)
                adjust_paths(dest, default=repo)
                return retval
            except:
                log.exception("Problem cloning from mirror %s", mirror)
                continue
        else:
            log.info("Pulling from mirrors failed; falling back to %s", repo)
            # We may have a partial repo here; mercurial() copes with that
            # We need to make sure our paths are correct though
            if os.path.exists(os.path.join(dest, '.hg')):
                adjust_paths(dest, default=repo)
            return mercurial(repo, dest, branch, revision, autoPurge=True,
                             update_dest=update_dest, clone_by_rev=clone_by_rev)

    cmd = ['hg', 'clone']
    if not update_dest:
        cmd.append('-U')

    if clone_by_rev:
        if revision:
            cmd.extend(['-r', revision])
        elif branch:
            # hg >= 1.6 supports -b branch for cloning
            ver = hg_ver()
            if ver >= (1, 6, 0):
                cmd.extend(['-b', branch])

    cmd.extend([repo, dest])
    run_cmd(cmd)

    if update_dest:
        return update(dest, branch, revision)
예제 #47
0
def tagRepo(config,
            repo,
            reponame,
            revision,
            tags,
            bumpFiles,
            relbranch,
            pushAttempts,
            defaultBranch='default'):
    remote = make_hg_url(HG, repo)
    retry(mercurial, args=(remote, reponame))

    def bump_and_tag(repo, attempt, config, relbranch, revision, tags,
                     defaultBranch):
        # set relbranchChangesets=1 because tag() generates exactly 1 commit
        relbranchChangesets = 1
        defaultBranchChangesets = 0

        if relbranch in get_branches(reponame):
            update(reponame, revision=relbranch)
        else:
            update(reponame, revision=revision)
            run_cmd(['hg', 'branch', relbranch], cwd=reponame)

        if len(bumpFiles) > 0:
            # Bump files on the relbranch, if necessary
            bump(reponame, bumpFiles, 'version')
            run_cmd(['hg', 'diff'], cwd=repo)
            try:
                get_output([
                    'hg', 'commit', '-u', config['hgUsername'], '-m',
                    getBumpCommitMessage(config['productName'],
                                         config['version'])
                ],
                           cwd=reponame)
                relbranchChangesets += 1
            except subprocess.CalledProcessError, e:
                # We only want to ignore exceptions caused by having nothing to
                # commit, which are OK. We still want to raise exceptions caused
                # by any other thing.
                if e.returncode != 1 or "nothing changed" not in e.output:
                    raise

        # We always want our tags pointing at the tip of the relbranch
        # so we need to grab the current revision after we've switched
        # branches and bumped versions.
        revision = get_revision(reponame)
        # Create the desired tags on the relbranch
        tag(repo, revision, tags, config['hgUsername'])

        # This is the bump of the version on the default branch
        # We do it after the other one in order to get the tip of the
        # repository back on default, thus avoiding confusion.
        if len(bumpFiles) > 0:
            update(reponame, revision=defaultBranch)
            bump(reponame, bumpFiles, 'nextVersion')
            run_cmd(['hg', 'diff'], cwd=repo)
            try:
                get_output([
                    'hg', 'commit', '-u', config['hgUsername'], '-m',
                    getBumpCommitMessage(config['productName'],
                                         config['version'])
                ],
                           cwd=reponame)
                defaultBranchChangesets += 1
            except subprocess.CalledProcessError, e:
                if e.returncode != 1 or "nothing changed" not in e.output:
                    raise
예제 #48
0
def doCommitSVN(commitMSG):
    """
    Actually do the commit (called with retry)
    """
    log.info("svn commit -m " + commitMSG)
    run_cmd(["svn", "commit", "-m", commitMSG])
 def testTagFailsIfExists(self):
     run_cmd(['hg', 'tag', '-R', self.repodir, 'tagg'])
     self.assertRaises(subprocess.CalledProcessError, tag, self.repodir,
                       'tagg')
예제 #50
0
    parser.add_argument("--diff-summary", required=True, type=str)

    options = parser.parse_args()
    assert options.chunks and options.thisChunk, "chunks and this-chunk are required"
    assert path.isfile(
        options.verifyConfig), "Update verify config must exist!"
    verifyConfigFile = options.verifyConfig

    fd, configFile = mkstemp()
    # Needs to be opened in "bytes" mode because we perform relative seeks on it
    fh = os.fdopen(fd, "wb")
    try:
        verifyConfig = UpdateVerifyConfig()
        verifyConfig.read(path.join(UPDATE_VERIFY_DIR, verifyConfigFile))
        myVerifyConfig = verifyConfig.getChunk(options.chunks,
                                               options.thisChunk)
        # override the channel if explicitly set
        if options.verify_channel:
            myVerifyConfig.channel = options.verify_channel
        myVerifyConfig.write(fh)
        fh.close()
        run_cmd(["cat", configFile])
        run_cmd(
            UPDATE_VERIFY_COMMAND + [configFile],
            cwd=UPDATE_VERIFY_DIR,
            env={"DIFF_SUMMARY_LOG": path.abspath(options.diff_summary)},
        )
    finally:
        if path.exists(configFile):
            os.unlink(configFile)
예제 #51
0
    parser.add_argument("--chunks", required=True, dest="chunks", type=int)
    parser.add_argument("--this-chunk",
                        required=True,
                        dest="thisChunk",
                        type=int)

    options = parser.parse_args()
    assert options.chunks and options.thisChunk, \
        "chunks and this-chunk are required"
    assert path.isfile(
        options.verifyConfig), "Update verify config must exist!"
    verifyConfigFile = options.verifyConfig

    fd, configFile = mkstemp()
    fh = os.fdopen(fd, "w")
    try:
        verifyConfig = UpdateVerifyConfig()
        verifyConfig.read(path.join(UPDATE_VERIFY_DIR, verifyConfigFile))
        myVerifyConfig = verifyConfig.getChunk(options.chunks,
                                               options.thisChunk)
        # override the channel if explicitly set
        if options.verify_channel:
            myVerifyConfig.channel = options.verify_channel
        myVerifyConfig.write(fh)
        fh.close()
        run_cmd(["cat", configFile])
        run_cmd(UPDATE_VERIFY_COMMAND + [configFile], cwd=UPDATE_VERIFY_DIR)
    finally:
        if path.exists(configFile):
            os.unlink(configFile)
예제 #52
0
def createRepacks(sourceRepo,
                  revision,
                  l10nRepoDir,
                  l10nBaseRepo,
                  mozconfigPath,
                  srcMozconfigPath,
                  objdir,
                  makeDirs,
                  appName,
                  locales,
                  product,
                  version,
                  buildNumber,
                  stageServer,
                  stageUsername,
                  stageSshKey,
                  ftpServer,
                  compareLocalesRepo,
                  merge,
                  platform,
                  brand,
                  appVersion,
                  generatePartials=False,
                  partialUpdates=None,
                  usePymake=False,
                  tooltoolManifest=None,
                  tooltool_script=None,
                  tooltool_urls=None,
                  balrog_submitter=None,
                  balrog_hash="sha512",
                  mozillaDir=None,
                  mozillaSrcDir=None,
                  bucket_prefix=None):
    buildid = retry(getBuildID,
                    args=(platform, product, version, buildNumber,
                          'candidates', ftpServer))
    log.info('Got buildid: %s' % buildid)
    sourceRepoName = path.split(sourceRepo)[-1]
    absObjdir = path.abspath(path.join(sourceRepoName, objdir))
    localeSrcDir = path.join(absObjdir, appName, "locales")
    # Even on Windows we need to use "/" as a separator for this because
    # compare-locales doesn"t work any other way
    l10nIni = "/".join([sourceRepoName, appName, "locales", "l10n.ini"])
    env = {
        "MOZ_OBJDIR": objdir,
        "MOZ_MAKE_COMPLETE_MAR": "1",
        "DOWNLOAD_HOST": ftpServer,
        "UPLOAD_HOST": stageServer,
        "UPLOAD_USER": stageUsername,
        "UPLOAD_SSH_KEY": stageSshKey,
        "UPLOAD_TO_TEMP": "1",
        "MOZ_PKG_PRETTYNAMES": "1",
        "MOZILLA_REV": os.getenv('MOZILLA_REV', ''),
        "COMM_REV": os.getenv('COMM_REV', ''),
        "LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
        "MBSDIFF_HOOK": os.getenv("MBSDIFF_HOOK", ""),
    }
    if appVersion is None or version != appVersion:
        env["MOZ_PKG_VERSION"] = version
    signed = False
    if os.environ.get('MOZ_SIGN_CMD'):
        env['MOZ_SIGN_CMD'] = os.environ['MOZ_SIGN_CMD']
        signed = True
    env['POST_UPLOAD_CMD'] = postUploadCmdPrefix(
        to_candidates=True,
        product=product,
        version=version,
        buildNumber=buildNumber,
        signed=signed,
        bucket_prefix=bucket_prefix,
    )
    if usePymake:
        env['USE_PYMAKE'] = "1"
        env['MOZILLA_OFFICIAL'] = "1"
        env["MOZ_SIGN_CMD"] = "python " + \
            path.join(os.getcwd(), "scripts", "release", "signing", "signtool.py").replace('\\', '\\\\\\\\') + \
            " --cachedir " + \
            path.join(os.getcwd(), "signing_cache").replace('\\', '\\\\\\\\') + \
            " -t " + \
            path.join(os.getcwd(), "token").replace('\\', '\\\\\\\\') + \
            " -n " + \
            path.join(os.getcwd(), "nonce").replace('\\', '\\\\\\\\') + \
            " -c " + \
            path.join(os.getcwd(), "scripts", "release", "signing", "host.cert").replace('\\', '\\\\\\\\')
        signingServers = os.environ["MOZ_SIGN_CMD"].split("-H",
                                                          1)[1].split("-H")
        for s in signingServers:
            env["MOZ_SIGN_CMD"] += " -H %s" % s.strip()
    build.misc.cleanupObjdir(sourceRepoName, objdir, appName)
    mercurial(sourceRepo, sourceRepoName)
    update(sourceRepoName, revision=revision)
    l10nRepackPrep(sourceRepoName, objdir, mozconfigPath, srcMozconfigPath,
                   l10nRepoDir, makeDirs, env, tooltoolManifest,
                   tooltool_script, tooltool_urls)
    input_env = retry(downloadReleaseBuilds,
                      args=(ftpServer, product, brand, version, buildNumber,
                            platform),
                      kwargs={
                          'signed': signed,
                          'usePymake': usePymake
                      })
    env.update(input_env)

    if product == "thunderbird" and platform == "macosx64":
        # TODO: FIXME: HACK: KILLME:
        # Terrible, terrible, terrible hack to work around bug 1234935 and make
        # the build system happier
        absMozillaSrcDir = path.abspath(
            path.join(sourceRepoName, mozillaSrcDir))
        run_cmd(['ln', '-sf', '../obj-l10n', absMozillaSrcDir])

    failed = []
    for l in locales:
        try:
            if generatePartials:
                for oldVersion in partialUpdates:
                    oldBuildNumber = partialUpdates[oldVersion]['buildNumber']
                    partialUpdates[oldVersion]['mar'] = retry(
                        downloadUpdateIgnore404,
                        args=(ftpServer, product, oldVersion, oldBuildNumber,
                              platform, l))
            checksums_file = repackLocale(
                locale=l,
                l10nRepoDir=l10nRepoDir,
                l10nBaseRepo=l10nBaseRepo,
                revision=revision,
                localeSrcDir=localeSrcDir,
                l10nIni=l10nIni,
                compareLocalesRepo=compareLocalesRepo,
                env=env,
                absObjdir=absObjdir,
                merge=merge,
                productName=product,
                platform=platform,
                version=version,
                partialUpdates=partialUpdates,
                buildNumber=buildNumber,
                stageServer=ftpServer,
                mozillaDir=mozillaDir,
                mozillaSrcDir=mozillaSrcDir)

            if balrog_submitter:
                # TODO: partials, after bug 797033 is fixed
                checksums = parseChecksumsFile(open(checksums_file).read())
                completeInfo = []
                partialInfo = []
                for f, info in checksums.iteritems():
                    if f.endswith('.complete.mar'):
                        completeInfo.append({
                            "size":
                            info["size"],
                            "hash":
                            info["hashes"][balrog_hash],
                        })
                    if f.endswith('.partial.mar'):
                        pathInfo = fileInfo(f, product.lower())
                        previousVersion = pathInfo["previousVersion"]
                        partialInfo.append({
                            "previousVersion":
                            previousVersion,
                            "previousBuildNumber":
                            partialUpdates[previousVersion]['buildNumber'],
                            "size":
                            info["size"],
                            "hash":
                            info["hashes"][balrog_hash],
                        })
                if not completeInfo:
                    raise Exception("Couldn't find complete mar info")
                retry(balrog_submitter.run,
                      kwargs={
                          'platform': platform,
                          'productName': product.capitalize(),
                          'appVersion': appVersion,
                          'version': version,
                          'build_number': buildNumber,
                          'locale': l,
                          'hashFunction': balrog_hash,
                          'extVersion': appVersion,
                          'buildID': buildid,
                          'completeInfo': completeInfo,
                          'partialInfo': partialInfo,
                      })
        except Exception, e:
            print_exc()
            failed.append((l, format_exc()))
예제 #53
0
def merge_via_debugsetparents(dest, old_head, new_head, msg, user=None):
    """Merge 2 heads avoiding non-fastforward commits"""
    cmd = ['hg', 'debugsetparents', new_head, old_head]
    run_cmd(cmd, cwd=dest)
    commit(dest, msg=msg, user=user)
예제 #54
0
def commit(dest, msg, user=None):
    cmd = ['hg', 'commit', '-m', msg]
    if user:
        cmd.extend(['-u', user])
    run_cmd(cmd, cwd=dest)
    return get_revision(dest)
예제 #55
0
def reconfig():
    """reconfig the master in the cwd"""
    run_cmd(['python', RECONFIG_SCRIPT, 'reconfig', os.getcwd()])
예제 #56
0
def repackLocale(locale,
                 l10nRepoDir,
                 l10nBaseRepo,
                 revision,
                 localeSrcDir,
                 l10nIni,
                 compareLocalesRepo,
                 env,
                 absObjdir,
                 merge=True,
                 productName=None,
                 platform=None,
                 version=None,
                 partialUpdates=None,
                 buildNumber=None,
                 stageServer=None):
    repo = "/".join([l10nBaseRepo, locale])
    localeDir = path.join(l10nRepoDir, locale)
    retry(mercurial, args=(repo, localeDir))
    update(localeDir, revision=revision)

    mozillaDir = ''
    if 'thunderbird' in productName:
        mozillaDir = 'mozilla/'

    # It's a bad assumption to make, but the source dir is currently always
    # one level above the objdir.
    absSourceRepoPath = path.split(absObjdir)[0]
    use_pymake = env.get("USE_PYMAKE", False)
    make = getMakeCommand(use_pymake, absSourceRepoPath)

    env["AB_CD"] = locale
    env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged"))
    if sys.platform.startswith('win'):
        if use_pymake:
            env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"])
        else:
            env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"])
    if sys.platform.startswith('darwin'):
        env["MOZ_PKG_PLATFORM"] = "mac"
    UPLOAD_EXTRA_FILES = []
    nativeDistDir = path.normpath(
        path.abspath(path.join(localeSrcDir, '../../%sdist' % mozillaDir)))
    posixDistDir = windows2msys(nativeDistDir)
    mar = '%s/host/bin/mar' % posixDistDir
    mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir
    if platform.startswith('win'):
        mar += ".exe"
        mbsdiff += ".exe"
    current = '%s/current' % posixDistDir
    previous = '%s/previous' % posixDistDir
    updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale)
    updateAbsDir = '%s/%s' % (posixDistDir, updateDir)
    current_mar = '%s/%s-%s.complete.mar' % (updateAbsDir, productName,
                                             version)
    unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl'
    make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh'
    prevMarDir = '../../../../'
    if mozillaDir:
        unwrap_full_update = '../../../../%stools/update-packaging/unwrap_full_update.pl' % mozillaDir
        make_incremental_update = '../../../%stools/update-packaging/make_incremental_update.sh' % mozillaDir
        prevMarDir = '../../../../../'
    env['MAR'] = mar
    env['MBSDIFF'] = mbsdiff

    log.info("Download mar tools")
    if stageServer:
        candidates_dir = makeCandidatesDir(productName,
                                           version,
                                           buildNumber,
                                           protocol="http",
                                           server=stageServer)
        if not path.isfile(msys2windows(mar)):
            marUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mar)s" % \
                dict(c_dir=candidates_dir, platform=platform,
                     mar=path.basename(mar))
            run_cmd(['mkdir', '-p', path.dirname(mar)])
            log.info("Downloading %s to %s", marUrl, mar)
            urlretrieve(marUrl, msys2windows(mar))
            if not sys.platform.startswith('win'):
                run_cmd(['chmod', '755', mar])
        if not path.isfile(msys2windows(mbsdiff)):
            mbsdiffUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mbsdiff)s" % \
                dict(c_dir=candidates_dir, platform=platform,
                     mbsdiff=path.basename(mbsdiff))
            run_cmd(['mkdir', '-p', path.dirname(mbsdiff)])
            log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff)
            urlretrieve(mbsdiffUrl, msys2windows(mbsdiff))
            if not sys.platform.startswith('win'):
                run_cmd(['chmod', '755', mbsdiff])
    else:
        log.warning('stageServer not set. mar tools will *not* be downloaded.')

    compareLocales(compareLocalesRepo,
                   locale,
                   l10nRepoDir,
                   localeSrcDir,
                   l10nIni,
                   revision=revision,
                   merge=merge)
    run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=env)

    # Our Windows-native rm from bug 727551 requires Windows-style paths
    run_cmd(['rm', '-rf', msys2windows(current)])
    run_cmd(['mkdir', current])
    run_cmd(['perl', unwrap_full_update, current_mar],
            cwd=path.join(nativeDistDir, 'current'),
            env=env)
    for oldVersion in partialUpdates:
        prevMar = partialUpdates[oldVersion]['mar']
        if prevMar:
            partial_mar_name = '%s-%s-%s.partial.mar' % (productName,
                                                         oldVersion, version)
            partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name)
            UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name))
            # Our Windows-native rm from bug 727551 requires Windows-style paths
            run_cmd(['rm', '-rf', msys2windows(previous)])
            run_cmd(['mkdir', previous])
            run_cmd(
                ['perl', unwrap_full_update,
                 '%s/%s' % (prevMarDir, prevMar)],
                cwd=path.join(nativeDistDir, 'previous'),
                env=env)
            run_cmd([
                'bash', make_incremental_update, partial_mar, previous, current
            ],
                    cwd=nativeDistDir,
                    env=env)
            if os.environ.get('MOZ_SIGN_CMD'):
                run_cmd([
                    'bash', '-c',
                    '%s -f mar -f gpg "%s"' %
                    (os.environ['MOZ_SIGN_CMD'], partial_mar)
                ],
                        env=env)
                UPLOAD_EXTRA_FILES.append('%s/%s.asc' %
                                          (updateDir, partial_mar_name))
        else:
            log.warning("Skipping partial MAR creation for %s %s" %
                        (oldVersion, locale))

    env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES)
    retry(run_cmd,
          args=(make + ["upload", "AB_CD=%s" % locale], ),
          kwargs={
              'cwd': localeSrcDir,
              'env': env
          })

    # return the location of the checksums file, because consumers may want
    # some information about the files that were generated.
    # Some versions of make that we use (at least pymake) imply --print-directory
    # We need to turn it off to avoid getting extra output that mess up our
    # parsing of the checksum file path.
    curdir = os.getcwd()
    try:
        os.chdir(localeSrcDir)
        relative_checksums = get_output(make + [
            "--no-print-directory", "echo-variable-CHECKSUM_FILE",
            "AB_CD=%s" % locale
        ],
                                        env=env).strip("\"'\n")
        return path.normpath(path.join(localeSrcDir, relative_checksums))
    finally:
        os.chdir(curdir)
예제 #57
0
 def do_tag(repo, tags):
     cmd = ['hg', 'tag', '-f', '-m', 'Automatic preproduction tag'] + tags
     run_cmd(cmd, cwd=repo)
예제 #58
0
def main():
    logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO)
    parser = argparse.ArgumentParser()
    parser.add_argument("--from-dir", default="mozilla-beta",
                        help="Working directory of repo to be merged from")
    parser.add_argument("--from-repo",
                        default="ssh://hg.mozilla.org/releases/mozilla-beta",
                        help="Repo to be merged from")
    parser.add_argument("--to-dir", default="mozilla-release",
                        help="Working directory of repo to be merged to")
    parser.add_argument(
        "--to-repo", default="ssh://hg.mozilla.org/releases/mozilla-release",
        help="Repo to be merged to")
    parser.add_argument("--hg-user", default="ffxbld <*****@*****.**>",
                        help="Mercurial username to be passed to hg -u")
    parser.add_argument("--remove-locale", dest="remove_locales", action="append",
                        required=True,
                        help="Locales to be removed from release shipped-locales")

    args = parser.parse_args()
    from_dir = args.from_dir
    to_dir = args.to_dir
    from_repo = args.from_repo
    to_repo = args.to_repo
    hg_user = args.hg_user

    with retrying(mercurial) as clone:
        for (d, repo) in ((from_dir, from_repo), (to_dir, to_repo)):
            clone(repo, d)
            log.info("Cleaning up %s...", d)
            strip_outgoing(d)
            update(d, branch="default")
    beta_rev = get_revision(from_dir)
    release_rev = get_revision(to_dir)

    now = datetime.datetime.now()
    date = now.strftime("%Y%m%d")
    # TODO: make this tag consistent with other branches
    release_base_tag = "RELEASE_BASE_" + date

    log.info("Tagging %s beta with %s", beta_rev, release_base_tag)
    tag(from_dir, tags=[release_base_tag], rev=beta_rev, user=hg_user,
        msg="Added %s tag for changeset %s. DONTBUILD CLOSED TREE a=release" %
        (release_base_tag, beta_rev))
    new_beta_rev = get_revision(from_dir)
    raw_input("Push mozilla-beta and hit Return")

    pull(from_dir, dest=to_dir)
    merge_via_debugsetparents(
        to_dir, old_head=release_rev, new_head=new_beta_rev, user=hg_user,
        msg="Merge old head via |hg debugsetparents %s %s|. "
        "CLOSED TREE DONTBUILD a=release" % (new_beta_rev, release_rev))

    replace(
        path.join(to_dir, "browser/confvars.sh"),
        "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release",
        "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release")
    replace(path.join(to_dir, "browser/confvars.sh"),
            "MAR_CHANNEL_ID=firefox-mozilla-beta",
            "MAR_CHANNEL_ID=firefox-mozilla-release")

    for d in branding_dirs:
        for f in branding_files:
            replace(
                path.join(to_dir, d, f),
                "ac_add_options --with-branding=mobile/android/branding/beta",
                "ac_add_options --with-branding=mobile/android/branding/official")

    if args.remove_locales:
        log.info("Removing locales: %s", args.remove_locales)
        remove_locales(path.join(to_dir, "browser/locales/shipped-locales"),
                       args.remove_locales)

    log.warn("Apply any manual changes, such as disabling features.")
    raw_input("Hit 'return' to display channel, branding, and feature diffs onscreen")
    run_cmd(["hg", "diff"], cwd=to_dir)
    raw_input("If the diff looks good hit return to commit those changes")
    commit(to_dir, user=hg_user,
           msg="Update configs. CLOSED TREE a=release ba=release")
    raw_input("Go ahead and push mozilla-release changes.")
 def testForcedTag(self):
     run_cmd(['hg', 'tag', '-R', self.repodir, 'tag'])
     tag(self.repodir, ['tag'], force=True)
     self.assertTrue('tag' in getTags(self.repodir))
예제 #60
0
 def testSimple(self):
     self.assertEquals(run_cmd(['true']), 0)