def bump_and_tag(repo, attempt, config, relbranch, revision, tags, defaultBranch): relbranchChangesets = len(tags) defaultBranchChangesets = 0 if relbranch in get_branches(reponame): update(reponame, revision=relbranch) else: update(reponame, revision=revision) run_cmd(['hg', 'branch', relbranch], cwd=reponame) if len(bumpFiles) > 0: # Bump files on the relbranch, if necessary bump(reponame, bumpFiles, 'version') run_cmd(['hg', 'diff'], cwd=repo) try: get_output(['hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version'])], cwd=reponame) relbranchChangesets += 1 revision = get_revision(reponame) except subprocess.CalledProcessError, e: # We only want to ignore exceptions caused by having nothing to # commit, which are OK. We still want to raise exceptions caused # by any other thing. if e.returncode != 1 or "nothing changed" not in e.output: raise
def testOutputAttachedToError(self): """Older versions of CalledProcessError don't attach 'output' to themselves. This test is to ensure that get_output always does.""" try: get_output(['bash', '-c', 'echo hello && false']) except subprocess.CalledProcessError, e: self.assertEquals(e.output, 'hello\n')
def bump_and_tag(repo, attempt, config, relbranch, revision, tags): # set relbranchChangesets=1 because tag() generates exactly 1 commit relbranchChangesets = 1 if relbranch in get_branches(reponame): update(reponame, revision=relbranch) else: update(reponame, revision=revision) run_cmd(['hg', 'branch', relbranch], cwd=reponame) if len(bumpFiles) > 0: # Bump files on the relbranch, if necessary bump(reponame, bumpFiles, 'version') run_cmd(['hg', 'diff'], cwd=repo) try: get_output([ 'hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version']) ], cwd=reponame) relbranchChangesets += 1 except subprocess.CalledProcessError, e: # We only want to ignore exceptions caused by having nothing to # commit, which are OK. We still want to raise exceptions caused # by any other thing. if e.returncode != 1 or "nothing changed" not in e.output: raise
def unbundle(bundle, dest): """Unbundles the bundle located at `bundle` into `dest`. `bundle` can be a local file or remote url.""" try: get_output(['hg', 'unbundle', bundle], cwd=dest, include_stderr=True) return True except subprocess.CalledProcessError: return False
def tagRepo(config, repo, reponame, revision, tags, bumpFiles, relbranch, pushAttempts, defaultBranch='default'): remote = make_hg_url(HG, repo) mercurial(remote, reponame) def bump_and_tag(repo, attempt, config, relbranch, revision, tags, defaultBranch): # set relbranchChangesets=1 because tag() generates exactly 1 commit relbranchChangesets = 1 defaultBranchChangesets = 0 if relbranch in get_branches(reponame): update(reponame, revision=relbranch) else: update(reponame, revision=revision) run_cmd(['hg', 'branch', relbranch], cwd=reponame) if len(bumpFiles) > 0: # Bump files on the relbranch, if necessary bump(reponame, bumpFiles, 'version') run_cmd(['hg', 'diff'], cwd=repo) try: get_output(['hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version'])], cwd=reponame) relbranchChangesets += 1 except subprocess.CalledProcessError, e: # We only want to ignore exceptions caused by having nothing to # commit, which are OK. We still want to raise exceptions caused # by any other thing. if e.returncode != 1 or "nothing changed" not in e.output: raise # We always want our tags pointing at the tip of the relbranch # so we need to grab the current revision after we've switched # branches and bumped versions. revision = get_revision(reponame) # Create the desired tags on the relbranch tag(repo, revision, tags, config['hgUsername']) # This is the bump of the version on the default branch # We do it after the other one in order to get the tip of the # repository back on default, thus avoiding confusion. if len(bumpFiles) > 0: update(reponame, revision=defaultBranch) bump(reponame, bumpFiles, 'nextVersion') run_cmd(['hg', 'diff'], cwd=repo) try: get_output(['hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version'])], cwd=reponame) defaultBranchChangesets += 1 except subprocess.CalledProcessError, e: if e.returncode != 1 or "nothing changed" not in e.output: raise
def testTerminateAndGetOutput(self): text_s = "this is just a test" text_e = "this should go to the stderr" cmd = ['bash', '-c', 'echo "%s" && echo "%s" 1>&2 && sleep 1 && true' % (text_s, text_e)] try: get_output(cmd, timeout=0.5) except subprocess.CalledProcessError as error: self.assertTrue(text_s in error.output) self.assertTrue(text_e not in error.output) else: self.fail("get_output did not raise CalledProcessError")
def tagRepo(config, repo, reponame, revision, tags, bumpFiles, relbranch, pushAttempts, defaultBranch='default'): remote = make_hg_url(HG, repo) retry(mercurial, args=(remote, reponame)) def bump_and_tag(repo, attempt, config, relbranch, revision, tags, defaultBranch): relbranchChangesets = len(tags) defaultBranchChangesets = 0 if relbranch in get_branches(reponame): update(reponame, revision=relbranch) else: update(reponame, revision=revision) run_cmd(['hg', 'branch', relbranch], cwd=reponame) if len(bumpFiles) > 0: # Bump files on the relbranch, if necessary bump(reponame, bumpFiles, 'version') run_cmd(['hg', 'diff'], cwd=repo) try: get_output(['hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version'])], cwd=reponame) relbranchChangesets += 1 revision = get_revision(reponame) except subprocess.CalledProcessError, e: # We only want to ignore exceptions caused by having nothing to # commit, which are OK. We still want to raise exceptions caused # by any other thing. if e.returncode != 1 or "nothing changed" not in e.output: raise # Create the desired tags on the relbranch tag(repo, revision, tags, config['hgUsername']) # This is the bump of the version on the default branch # We do it after the other one in order to get the tip of the # repository back on default, thus avoiding confusion. if len(bumpFiles) > 0: update(reponame, revision=defaultBranch) bump(reponame, bumpFiles, 'nextVersion') run_cmd(['hg', 'diff'], cwd=repo) try: get_output(['hg', 'commit', '-u', config['hgUsername'], '-m', getBumpCommitMessage(config['productName'], config['version'])], cwd=reponame) defaultBranchChangesets += 1 except subprocess.CalledProcessError, e: if e.returncode != 1 or "nothing changed" not in e.output: raise
def testTerminateAndGetOutput(self): text_s = "this is just a test" text_e = "this should go to the stderr" cmd = [ 'bash', '-c', 'echo "%s" && echo "%s" 1>&2 && sleep 1 && true' % (text_s, text_e) ] try: get_output(cmd, timeout=0.5) except subprocess.CalledProcessError as error: self.assertTrue(text_s in error.output) self.assertTrue(text_e not in error.output) else: self.fail("get_output did not raise CalledProcessError")
def get_buildbot_username_param(): cmd = ["buildbot", "sendchange", "--help"] output = get_output(cmd) if "-W, --who=" in output: return "--who" else: return "--username"
def get_buildbot_username_param(): cmd = ['buildbot', 'sendchange', '--help'] output = get_output(cmd) if "-W, --who=" in output: return "--who" else: return "--username"
def testInsaneOutputStreamTimeout(self): if not has_urandom(): raise SkipTest text_s = "this is just a test" # create a huge output, check that text_s is in the raised exception # output. # tested with ~400 MB output: decreasing the size of the output to make # tests faster (terminates happens on time, but proc.communicate() can # take few seconds) cmd = ['bash', '-c', 'echo "%s" && dd if=/dev/urandom bs=4096 count=1000 2>&1 && sleep 2 && true' % text_s] try: get_output(cmd, include_stderr=False, timeout=1.0) except subprocess.CalledProcessError as error: self.assertTrue(text_s in error.output) else: self.fail("get_output did not raise CalledProcessError")
def getRevisions(dest): retval = [] for rev in get_output(["hg", "log", "-R", dest, "--template", "{node|short}\n"]).split("\n"): rev = rev.strip() if not rev: continue retval.append(rev) return retval
def getRevisions(dest): retval = [] for rev in get_output(['hg', 'log', '-R', dest, '--template', '{node|short}\n']).split('\n'): rev = rev.strip() if not rev: continue retval.append(rev) return retval
def _run_hg(cls, command, repo=None, want_output=False): cmd = [cls.hg, '--config', 'extensions.mq='] if repo: cmd.extend(["-R", repo]) if want_output: return get_output(cmd + command) else: run_cmd(cmd + command)
def downloadNightlyBuild(localeSrcDir, env): run_cmd(["make", "wget-en-US"], cwd=localeSrcDir, env=env) run_cmd(["make", "unpack"], cwd=localeSrcDir, env=env) output = get_output(["make", "ident"], cwd=localeSrcDir, env=env) info = {} for line in output.splitlines(): key, value = line.rstrip().split() info[key] = value return info
def getRevisions(dest): retval = [] for rev in get_output( ['hg', 'log', '-R', dest, '--template', '{node|short}\n']).split('\n'): rev = rev.strip() if not rev: continue retval.append(rev) return retval
def getRevInfo(dest, rev): output = get_output([ 'hg', 'log', '-R', dest, '-r', rev, '--template', '{author}\n{desc}\n{tags}' ]).splitlines() info = {'user': output[0], 'msg': output[1], 'tags': []} if len(output) > 2: info['tags'] = output[2].split() return info
def uploadLog(self, build): """Uploads the build log, and returns the URL to it""" builder = build.builder info = self.getBuildInfo(build) branch = info['branch'] product = info['product'].lower() platform = info['platform'] upload_args = [ '-r', '2', '-t', '10', '--master-name', self.config['statusdb.master_name'] ] if "nightly" in builder.name: upload_args.append("--nightly") if builder.name.startswith("release-") and \ not info['release_promotion']: upload_args.append("--release") upload_args.append("%s/%s" % (info.get('version'), info.get('build_number'))) if branch and 'try' in branch: upload_args.append("--try") elif branch == 'shadow-central': upload_args.append("--shadow") if 'l10n' in builder.name and not info['release_promotion']: upload_args.append("--l10n") if product: upload_args.extend(["--product", product]) if platform: upload_args.extend(["--platform", platform]) else: upload_args.extend(["--platform", 'noarch']) if branch: upload_args.extend(["--branch", branch]) upload_args.extend(self.getUploadArgs(build, product)) upload_args.extend([builder.basedir, str(build.number)]) my_dir = os.path.abspath(os.path.dirname(__file__)) cmd = [sys.executable, "%s/log_uploader.py" % my_dir] + upload_args devnull = open(os.devnull) log.info("Running %s", cmd) output = get_output(cmd, stdin=devnull) # Look for URLs url = re.search("http(s)?://\S+", output) if url: return url.group() return None
def getSVNrev(targetSVNDirectory): """ Return the svn revision """ retval = os.getcwd() try: os.chdir(targetSVNDirectory) return get_output(["svnversion"]) finally: os.chdir(retval)
def uploadLog(self, build): """Uploads the build log, and returns the URL to it""" builder = build.builder info = self.getBuildInfo(build) branch = info['branch'] product = info['product'].lower() platform = info['platform'] upload_args = ['-r', '2', '-t', '10', '--master-name', self.config['statusdb.master_name']] if "nightly" in builder.name: upload_args.append("--nightly") if builder.name.startswith("release-") and \ not info['release_promotion']: upload_args.append("--release") upload_args.append( "%s/%s" % (info.get('version'), info.get('build_number'))) if branch and 'try' in branch: upload_args.append("--try") elif branch == 'shadow-central': upload_args.append("--shadow") if 'l10n' in builder.name and not info['release_promotion']: upload_args.append("--l10n") if product: upload_args.extend(["--product", product]) if platform: upload_args.extend(["--platform", platform]) else: upload_args.extend(["--platform", 'noarch']) if branch: upload_args.extend(["--branch", branch]) upload_args.extend(self.getUploadArgs(build, product)) upload_args.extend([builder.basedir, str(build.number)]) my_dir = os.path.abspath(os.path.dirname(__file__)) cmd = [sys.executable, "%s/log_uploader.py" % my_dir] + upload_args devnull = open(os.devnull) log.info("Running %s", cmd) output = get_output(cmd, stdin=devnull) # Look for URLs url = re.search("http(s)?://\S+", output) if url: return url.group() return None
def getRevInfo(dest, rev): output = get_output(['hg', 'log', '-R', dest, '-r', rev, '--template', '{author}\n{desc}\n{tags}']).splitlines() info = { 'user': output[0], 'msg': output[1], 'tags': [] } if len(output) > 2: info['tags'] = output[2].split() return info
def testInsaneOutputStreamTimeout(self): if not has_urandom(): raise SkipTest text_s = "this is just a test" # create a huge output, check that text_s is in the raised exception # output. # tested with ~400 MB output: decreasing the size of the output to make # tests faster (terminates happens on time, but proc.communicate() can # take few seconds) cmd = [ 'bash', '-c', 'echo "%s" && dd if=/dev/urandom bs=4096 count=1000 2>&1 && sleep 2 && true' % text_s ] try: get_output(cmd, include_stderr=False, timeout=1.0) except subprocess.CalledProcessError as error: self.assertTrue(text_s in error.output) else: self.fail("get_output did not raise CalledProcessError")
def checkoutSVN(targetSVNDirectory, svnURL): """ Checkout the product details SVN """ if not os.path.isdir(targetSVNDirectory): cmd = ["svn", "co", svnURL, targetSVNDirectory] run_cmd(cmd) # Sanity check svnStatus = get_output(["svn", "status", targetSVNDirectory]) if len(svnStatus) != 0: raise Exception("Uncommited changes: " + svnStatus)
def hg_ver(): """Returns the current version of hg, as a tuple of (major, minor, build)""" ver_string = get_output(['hg', '-q', 'version']) match = re.search("\(version ([0-9.]+)\)", ver_string) if match: bits = match.group(1).split(".") if len(bits) < 3: bits += (0,) ver = tuple(int(b) for b in bits) else: ver = (0, 0, 0) log.debug("Running hg version %s", ver) return ver
def get_hg_output(cmd, **kwargs): """ Runs hg with the given arguments and sets HGPLAIN in the environment to enforce consistent output. Equivalent to: env = {} env['HGPLAIN'] = '1' return get_output(['hg'] + cmd, env=env, **kwargs) """ if 'env' in kwargs: env = kwargs['env'] del kwargs['env'] else: env = {} env['HGPLAIN'] = '1' return get_output(['hg'] + cmd, env=env, **kwargs)
def getRevisions(dest, branches=None): retval = [] cmd = ['git', 'log', '--pretty=oneline'] if branches: cmd.extend(branches) for line in get_output(cmd, cwd=dest).split('\n'): line = line.strip() if not line: continue rev, ref = line.split(" ", 1) rev = rev.strip() if not rev: continue if rev not in retval: retval.append(rev) retval.reverse() return retval
def update(dest, branch=None, revision=None): """Updates working copy `dest` to `branch` or `revision`. If neither is set then the working copy will be updated to the latest revision on the current branch. Local changes will be discarded.""" # If we have a revision, switch to that if revision is not None: cmd = ['hg', 'update', '-C', '-r', revision] run_cmd(cmd, cwd=dest) else: # Check & switch branch local_branch = get_output(['hg', 'branch'], cwd=dest).strip() cmd = ['hg', 'update', '-C'] # If this is different, checkout the other branch if branch and branch != local_branch: cmd.append(branch) run_cmd(cmd, cwd=dest) return get_revision(dest)
def get_hg_output(cmd, timeout=1800, **kwargs): """ Runs hg with the given arguments and sets HGPLAIN in the environment to enforce consistent output. Equivalent to: env = {} env['HGPLAIN'] = '1' return get_output(['hg'] + cmd, env=env, **kwargs) """ if 'env' in kwargs: env = kwargs['env'] del kwargs['env'] else: env = {} env['HGPLAIN'] = '1' try: return get_output(['hg'] + cmd, timeout=timeout, env=env, **kwargs) except subprocess.CalledProcessError: # because we always want hg debug info log.exception("Hit exception running hg:") raise
def out(src, remote, **kwargs): """Check for outgoing changesets present in a repo""" cmd = ['hg', '-q', 'out', '--template', '{node} {branches}\n'] cmd.extend(common_args(**kwargs)) cmd.append(remote) if os.path.exists(src): try: revs = [] for line in get_output(cmd, cwd=src).rstrip().split("\n"): try: rev, branch = line.split() # Mercurial displays no branch at all if the revision is on # "default" except ValueError: rev = line.rstrip() branch = "default" revs.append((rev, branch)) return revs except subprocess.CalledProcessError, inst: # In some situations, some versions of Mercurial return "1" # if no changes are found, so we need to ignore this return code if inst.returncode == 1: return [] raise
def repackLocale(locale, l10nRepoDir, l10nBaseRepo, revision, localeSrcDir, l10nIni, compareLocalesRepo, env, absObjdir, merge=True, productName=None, platform=None, version=None, partialUpdates=None, buildNumber=None, stageServer=None): repo = "/".join([l10nBaseRepo, locale]) localeDir = path.join(l10nRepoDir, locale) retry(mercurial, args=(repo, localeDir)) update(localeDir, revision=revision) mozillaDir = '' if 'thunderbird' in productName: mozillaDir = 'mozilla/' # It's a bad assumption to make, but the source dir is currently always # one level above the objdir. absSourceRepoPath = path.split(absObjdir)[0] use_pymake = env.get("USE_PYMAKE", False) make = getMakeCommand(use_pymake, absSourceRepoPath) env["AB_CD"] = locale env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged")) if sys.platform.startswith('win'): if use_pymake: env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"]) else: env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"]) if sys.platform.startswith('darwin'): env["MOZ_PKG_PLATFORM"] = "mac" UPLOAD_EXTRA_FILES = [] nativeDistDir = path.normpath( path.abspath(path.join(localeSrcDir, '../../%sdist' % mozillaDir))) posixDistDir = windows2msys(nativeDistDir) mar = '%s/host/bin/mar' % posixDistDir mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir if platform.startswith('win'): mar += ".exe" mbsdiff += ".exe" current = '%s/current' % posixDistDir previous = '%s/previous' % posixDistDir updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale) updateAbsDir = '%s/%s' % (posixDistDir, updateDir) current_mar = '%s/%s-%s.complete.mar' % (updateAbsDir, productName, version) unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl' make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh' prevMarDir = '../../../../' if mozillaDir: unwrap_full_update = '../../../../%stools/update-packaging/unwrap_full_update.pl' % mozillaDir make_incremental_update = '../../../%stools/update-packaging/make_incremental_update.sh' % mozillaDir prevMarDir = '../../../../../' env['MAR'] = mar env['MBSDIFF'] = mbsdiff log.info("Download mar tools") if stageServer: candidates_dir = makeCandidatesDir(productName, version, buildNumber, protocol="http", server=stageServer) if not path.isfile(msys2windows(mar)): marUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mar)s" % \ dict(c_dir=candidates_dir, platform=platform, mar=path.basename(mar)) run_cmd(['mkdir', '-p', path.dirname(mar)]) log.info("Downloading %s to %s", marUrl, mar) urlretrieve(marUrl, msys2windows(mar)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mar]) if not path.isfile(msys2windows(mbsdiff)): mbsdiffUrl = "%(c_dir)s/mar-tools/%(platform)s/%(mbsdiff)s" % \ dict(c_dir=candidates_dir, platform=platform, mbsdiff=path.basename(mbsdiff)) run_cmd(['mkdir', '-p', path.dirname(mbsdiff)]) log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff) urlretrieve(mbsdiffUrl, msys2windows(mbsdiff)) if not sys.platform.startswith('win'): run_cmd(['chmod', '755', mbsdiff]) else: log.warning('stageServer not set. mar tools will *not* be downloaded.') compareLocales(compareLocalesRepo, locale, l10nRepoDir, localeSrcDir, l10nIni, revision=revision, merge=merge) run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=env) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(current)]) run_cmd(['mkdir', current]) run_cmd(['perl', unwrap_full_update, current_mar], cwd=path.join(nativeDistDir, 'current'), env=env) for oldVersion in partialUpdates: prevMar = partialUpdates[oldVersion]['mar'] if prevMar: partial_mar_name = '%s-%s-%s.partial.mar' % (productName, oldVersion, version) partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name) UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name)) # Our Windows-native rm from bug 727551 requires Windows-style paths run_cmd(['rm', '-rf', msys2windows(previous)]) run_cmd(['mkdir', previous]) run_cmd( ['perl', unwrap_full_update, '%s/%s' % (prevMarDir, prevMar)], cwd=path.join(nativeDistDir, 'previous'), env=env) run_cmd([ 'bash', make_incremental_update, partial_mar, previous, current ], cwd=nativeDistDir, env=env) if os.environ.get('MOZ_SIGN_CMD'): run_cmd([ 'bash', '-c', '%s -f mar -f gpg "%s"' % (os.environ['MOZ_SIGN_CMD'], partial_mar) ], env=env) UPLOAD_EXTRA_FILES.append('%s/%s.asc' % (updateDir, partial_mar_name)) else: log.warning("Skipping partial MAR creation for %s %s" % (oldVersion, locale)) env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES) retry(run_cmd, args=(make + ["upload", "AB_CD=%s" % locale], ), kwargs={ 'cwd': localeSrcDir, 'env': env }) # return the location of the checksums file, because consumers may want # some information about the files that were generated. # Some versions of make that we use (at least pymake) imply --print-directory # We need to turn it off to avoid getting extra output that mess up our # parsing of the checksum file path. curdir = os.getcwd() try: os.chdir(localeSrcDir) relative_checksums = get_output(make + [ "--no-print-directory", "echo-variable-CHECKSUM_FILE", "AB_CD=%s" % locale ], env=env).strip("\"'\n") return path.normpath(path.join(localeSrcDir, relative_checksums)) finally: os.chdir(curdir)
def get_branches(path): branches = [] for line in get_output(['hg', 'branches', '-c'], cwd=path).splitlines(): branches.append(line.split()[0]) return branches
def get_branch(path): return get_output(['hg', 'branch'], cwd=path).strip()
def get_revision(path): """Returns which revision directory `path` currently has checked out.""" return get_output(['hg', 'parent', '--template', '{node|short}'], cwd=path)
def testOutputTimeout(self): try: get_output(['bash', '-c', 'sleep 5'], timeout=1) except subprocess.CalledProcessError, e: self.assertTrue(e.output.startswith(TERMINATED_PROCESS_MSG))
def path(src, name='default'): """Returns the remote path associated with "name" """ try: return get_output(['hg', 'path', name], cwd=src).strip() except subprocess.CalledProcessError: return None
def mercurial(repo, dest, branch=None, revision=None, update_dest=True, shareBase=DefaultShareBase, allowUnsharedLocalClones=False, clone_by_rev=False, mirrors=None, bundles=None, autoPurge=False): """Makes sure that `dest` is has `revision` or `branch` checked out from `repo`. Do what it takes to make that happen, including possibly clobbering dest. If allowUnsharedLocalClones is True and we're trying to use the share extension but fail, then we will be able to clone from the shared repo to our destination. If this is False, the default, then if we don't have the share extension we will just clone from the remote repository. If `clone_by_rev` is True, use 'hg clone -r <rev>' instead of 'hg clone'. This is slower, but useful when cloning repos with lots of heads. If `mirrors` is set, will try and use the mirrors before `repo`. If `bundles` is set, will try and download the bundle first and unbundle it instead of doing a full clone. If successful, will pull in new revisions from mirrors or the master repo. If unbundling fails, will fall back to doing a regular clone from mirrors or the master repo. """ dest = os.path.abspath(dest) if shareBase is DefaultShareBase: shareBase = os.environ.get("HG_SHARE_BASE_DIR", None) log.info("Reporting hg version in use") cmd = ['hg', '-q', 'version'] run_cmd(cmd, cwd='.') if shareBase: # Check that 'hg share' works try: log.info("Checking if share extension works") output = get_output(['hg', 'help', 'share'], dont_log=True) if 'no commands defined' in output: # Share extension is enabled, but not functional log.info("Disabling sharing since share extension doesn't seem to work (1)") shareBase = None elif 'unknown command' in output: # Share extension is disabled log.info("Disabling sharing since share extension doesn't seem to work (2)") shareBase = None except subprocess.CalledProcessError: # The command failed, so disable sharing log.info("Disabling sharing since share extension doesn't seem to work (3)") shareBase = None # Check that our default path is correct if os.path.exists(os.path.join(dest, '.hg')): hgpath = path(dest, "default") # Make sure that our default path is correct if hgpath != _make_absolute(repo): log.info("hg path isn't correct (%s should be %s); clobbering", hgpath, _make_absolute(repo)) remove_path(dest) # If the working directory already exists and isn't using share we update # the working directory directly from the repo, ignoring the sharing # settings if os.path.exists(dest): if not os.path.exists(os.path.join(dest, ".hg")): log.warning("%s doesn't appear to be a valid hg directory; clobbering", dest) remove_path(dest) elif not os.path.exists(os.path.join(dest, ".hg", "sharedpath")): try: if autoPurge: purge(dest) return pull(repo, dest, update_dest=update_dest, branch=branch, revision=revision, mirrors=mirrors) except subprocess.CalledProcessError: log.warning("Error pulling changes into %s from %s; clobbering", dest, repo) log.debug("Exception:", exc_info=True) remove_path(dest) # If that fails for any reason, and sharing is requested, we'll try to # update the shared repository, and then update the working directory from # that. if shareBase: sharedRepo = os.path.join(shareBase, get_repo_path(repo)) dest_sharedPath = os.path.join(dest, '.hg', 'sharedpath') if os.path.exists(sharedRepo): hgpath = path(sharedRepo, "default") # Make sure that our default path is correct if hgpath != _make_absolute(repo): log.info("hg path isn't correct (%s should be %s); clobbering", hgpath, _make_absolute(repo)) # we need to clobber both the shared checkout and the dest, # since hgrc needs to be in both places remove_path(sharedRepo) remove_path(dest) if os.path.exists(dest_sharedPath): # Make sure that the sharedpath points to sharedRepo dest_sharedPath_data = os.path.normpath( open(dest_sharedPath).read()) norm_sharedRepo = os.path.normpath(os.path.join(sharedRepo, '.hg')) if dest_sharedPath_data != norm_sharedRepo: # Clobber! log.info("We're currently shared from %s, but are being requested to pull from %s (%s); clobbering", dest_sharedPath_data, repo, norm_sharedRepo) remove_path(dest) try: log.info("Updating shared repo") mercurial(repo, sharedRepo, branch=branch, revision=revision, update_dest=False, shareBase=None, clone_by_rev=clone_by_rev, mirrors=mirrors, bundles=bundles, autoPurge=False) if os.path.exists(dest): if autoPurge: purge(dest) return update(dest, branch=branch, revision=revision) try: log.info("Trying to share %s to %s", sharedRepo, dest) return share(sharedRepo, dest, branch=branch, revision=revision) except subprocess.CalledProcessError: if not allowUnsharedLocalClones: # Re-raise the exception so it gets caught below. # We'll then clobber dest, and clone from original repo raise log.warning("Error calling hg share from %s to %s;" "falling back to normal clone from shared repo", sharedRepo, dest) # Do a full local clone first, and then update to the # revision we want # This lets us use hardlinks for the local clone if the OS # supports it clone(sharedRepo, dest, update_dest=False, mirrors=mirrors, bundles=bundles) return update(dest, branch=branch, revision=revision) except subprocess.CalledProcessError: log.warning( "Error updating %s from sharedRepo (%s): ", dest, sharedRepo) log.debug("Exception:", exc_info=True) remove_path(dest) # end if shareBase if not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest)) # Share isn't available or has failed, clone directly from the source return clone(repo, dest, branch, revision, update_dest=update_dest, mirrors=mirrors, bundles=bundles, clone_by_rev=clone_by_rev)
def getTags(dest): tags = [] for t in get_output(['hg', 'tags', '-R', dest]).splitlines(): tags.append(t.split()[0]) return tags
def testNoStdErr(self): output = get_output(['bash', '-c', 'echo hello 1>&2']) self.assertEquals(output, '')
def testStdErr(self): output = get_output(['bash', '-c', 'echo hello 1>&2'], include_stderr=True) self.assertEquals(output, 'hello\n')
def testOutput(self): output = get_output(['echo', 'hello']) self.assertEquals(output, 'hello\n')