def create(self): """ Initialize .git through ``git init-db`` or ``git-clone``. """ from os import renames, mkdir from os.path import join, exists if exists(join(self.basedir, self.METADIR)): return if self.parent_repo: cmd = self.command("clone", "--shared", "-n", self.parent_repo, 'tmp') clone = GitExternalCommand(self, cwd=self.basedir, command=cmd) clone.execute() if clone.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(clone), clone.exit_status)) renames(join(self.basedir, 'tmp', '.git'), join(self.basedir, '.git')) cmd = self.command("reset", "--soft", self.branch_point) reset = GitExternalCommand(self, cwd=self.basedir, command=cmd) reset.execute() if reset.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(reset), reset.exit_status)) elif self.repository and self.branch_name: # ...and exists(self.storagedir) ? # initialization of a new branch in single-repository mode mkdir(join(self.basedir, self.METADIR)) bp = self.runCommand(['rev-parse', self.branch_point])[0] self.runCommand(['read-tree', bp]) self.runCommand(['update-ref', self.branch_name, bp]) #self.runCommand(['checkout-index']) else: if exists(join(self.basedir, self.storagedir)): raise TargetInitializationFailure( "Repository %s already exists - " "did you forget to set \"branch\" parameter ?" % self.storagedir) self.runCommand(['init-db']) if self.repository: # in this mode, the db is not stored in working dir, so we # may have to create .git ourselves try: mkdir(join(self.basedir, self.METADIR)) except OSError: # if it's already there, that's not a no problem pass
def create(self): """ Create a new monotone DB, storing the commit keys, if available """ if not self.repository or exists(self.repository): return cmd = self.command("db", "init", "--db", self.repository) init = ExternalCommand(command=cmd) init.execute(stdout=PIPE, stderr=PIPE) if init.exit_status: raise TargetInitializationFailure("Was not able to initialize " "the monotone db at %r" % self.repository) if self.keyid: self.log.info("Using key %s for commits" % (self.keyid, )) else: # keystore key id unspecified, look at other options if self.keygenid: keyfile = join(getenv("HOME"), '.monotone', 'keys', self.keygenid) if exists(keyfile): self.log.info("Key %s exist, don't genkey again" % self.keygenid) else: # requested a new key cmd = self.command("genkey", "--db", self.repository) regkey = ExternalCommand(command=cmd) if self.passphrase: passp = "%s\n%s\n" % (self.passphrase, self.passphrase) else: passp = None regkey.execute(self.keygenid, input=passp, stdout=PIPE, stderr=PIPE) if regkey.exit_status: raise TargetInitializationFailure( "Was not able to setup " "the monotone initial key at %r" % self.repository) else: raise TargetInitializationFailure("Can't setup the monotone " "repository %r. " "A keyid or keygenid " "must be provided." % self.repository)
def setupTagsDirectory(self): if self._setupTagsDirectory == None: self._setupTagsDirectory = False if self.module and self.module <> '/': # Check the existing tags directory cmd = self.command("ls") svnls = ExternalCommand(command=cmd) svnls.execute(self.repository + self.tags_path) if svnls.exit_status: # create it, if not exist cmd = self.command("mkdir", "-m", "This directory will host the tags") svnmkdir = ExternalCommand(command=cmd) svnmkdir.execute(self.repository + self.tags_path) if svnmkdir.exit_status: raise TargetInitializationFailure( "Was not able to create tags directory '%s'" % self.tags_path) else: self.log.debug("Directory '%s' already exists" % self.tags_path) self._setupTagsDirectory = True else: self.log.debug("Tags needs module setup other than '/'") return self._setupTagsDirectory
def __checkout_initial_revision(self, fqrev, root, destdir): if not os.path.exists(root): os.makedirs(root) cmd = self.repository.command("get", "--no-pristine", fqrev, destdir) c = ExternalCommand(cwd=root, command=cmd) out, err = c.execute(stdout=PIPE, stderr=PIPE) if c.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(c), c.exit_status, err.read()))
def create(self): """ Create a local CVS repository. """ from os import rmdir, makedirs from tempfile import mkdtemp from os.path import join, exists if self.repository.startswith(':local:'): path = self.repository[7:] elif self.repository.startswith('/'): path = self.repository else: # Remote repository return if exists(join(path, 'CVSROOT')): return makedirs(path) cmd = self.command("-f", "-d", path, "init") c = ExternalCommand(command=cmd) c.execute() if c.exit_status: raise TargetInitializationFailure( "Could not create CVS repository at %r", path) if self.module: tempwc = mkdtemp('cvs', 'tailor') cmd = self.command( "-f", "-d", path, "import", "-m", "This directory will host the " "upstream sources", self.module, "tailor", "start") c = ExternalCommand(cwd=tempwc, command=cmd) c.execute() rmdir(tempwc) if c.exit_status: raise TargetInitializationFailure( "Could not create initial module")
def _initializeWorkingDir(self): """ Add the given directory to an already existing svn working tree. """ from os.path import exists, join if not exists(join(self.repository.basedir, self.repository.METADIR)): raise TargetInitializationFailure( "'%s' needs to be an SVN working copy already under SVN" % self.repository.basedir) SynchronizableTargetWorkingDir._initializeWorkingDir(self)
def __initial_revision(self, revision): fqversion = '/'.join( [self.repository.repository, self.repository.module]) if revision in ['HEAD', 'INITIAL']: cmd = self.repository.command("revisions") if revision == 'HEAD': cmd.append("-r") cmd.append(fqversion) c = ExternalCommand(command=cmd) out, err = c.execute(stdout=PIPE, stderr=PIPE) if c.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(c), c.exit_status, err.read())) revision = out.readline().strip() return '--'.join([fqversion, revision])
def _initializeWorkingDir(self): """ Setup the monotone working copy The user must setup a monotone working directory himself or use the tailor config file to provide parameters for creation. Then we simply use 'mtn commit', without having to specify a database file or branch. Monotone looks up the database and branch in it's _MTN directory. """ if not exists(join(self.repository.basedir, '_MTN')): raise TargetInitializationFailure("Please setup '%s' as a " "monotone working directory" % self.repository.basedir) SynchronizableTargetWorkingDir._initializeWorkingDir(self)
def create(self): """ Execute ``cg init``. """ from os.path import join, exists if exists(join(self.basedir, self.METADIR)): return cmd = self.command("init", "-I") init = ExternalCommand(cwd=self.basedir, command=cmd) init.execute() if init.exit_status: raise TargetInitializationFailure("%s returned status %s" % (str(init), init.exit_status))
def _initializeWorkingDir(self): """ Setup the ArX working copy The user must setup a ArX working directory himself. Then we simply use 'arx commit', without having to specify an archive or branch. ArX looks up the archive and branch in it's _arx directory. """ from os.path import exists, join if not exists(join(self.repository.basedir, '_arx')): raise TargetInitializationFailure( "Please setup '%s' as an ArX working directory" % self.repository.basedir) SynchronizableTargetWorkingDir._initializeWorkingDir(self)
def create(self): """ Create the base directory if it doesn't exist, and the repository as well in the new working directory, executing a ``cdv init`` there. """ from os.path import join, normpath, exists if exists(join(self.basedir, self.METADIR)): return init = ExternalCommand(cwd=self.basedir, command=self.command("init")) init.execute() if init.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(init), init.exit_status))
def create(self): from vcpx.dualwd import IGNORED_METADIRS from os.path import join cmd = self.command("initialize") init = ExternalCommand(cwd=self.basedir, command=cmd) init.execute() if init.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(init), init.exit_status)) metadir = join(self.basedir, '_darcs') prefsdir = join(metadir, 'prefs') prefsname = join(prefsdir, 'prefs') boringname = join(prefsdir, 'boring') boring = open(boringname, 'rU') ignored = boring.read().rstrip().split('\n') boring.close() # Augment the boring file, that contains a regexp per line # with all known VCs metadirs to be skipped. ignored.extend(['(^|/)%s($|/)' % re.escape(md) for md in IGNORED_METADIRS]) # Eventually omit our own log... logfile = self.projectref().logfile if logfile.startswith(self.basedir): ignored.append('^%s$' % re.escape(logfile[len(self.basedir)+1:])) # ... and state file sfname = self.projectref().state_file.filename if sfname.startswith(self.basedir): sfrelname = sfname[len(self.basedir)+1:] ignored.append('^%s$' % re.escape(sfrelname)) ignored.append('^%s$' % re.escape(sfrelname+'.old')) ignored.append('^%s$' % re.escape(sfrelname+'.journal')) boring = open(boringname, 'w') boring.write('\n'.join(ignored)) boring.write('\n') boring.close()
def _checkoutUpstreamRevision(self, revision): """ Concretely do the checkout of the FIRST upstream revision. """ effrev = self._convert_head_initial(self.repository.repository, self.repository.module, revision, self.repository.rootdir) if not exists(join(self.repository.basedir, '_MTN')): # actually check out the revision self.log.info("Checking out a working copy") if self.shared_basedirs: basedir = '.' cwd = self.repository.basedir else: basedir = self.repository.basedir cwd = self.repository.rootdir cmd = self.repository.command("co", "--db", self.repository.repository, "--revision", effrev, "--branch", self.repository.module, basedir) mtl = ExternalCommand(cwd=cwd, command=cmd) mtl.execute(stdout=PIPE, stderr=PIPE) if mtl.exit_status: raise TargetInitializationFailure( "'mtn co' returned status %s" % mtl.exit_status) else: self.log.debug( "%r already exists, assuming it's a monotone " "working dir already populated", self.repository.basedir) # Ok, now the workdir contains the checked out revision. We # need to return a changeset describing it. Since this is the # first revision checked out, we don't have a (linearized) # ancestor, so we must use None as the lin_ancestor parameter chset = MonotoneChangeset(None, effrev) # now we update the new chset with basic data - without the # linearized ancestor, changeset entries will NOT be filled mtr = MonotoneRevToCset(repository=self.repository, working_dir=self.repository.basedir, branch=self.repository.module) mtr.updateCset(chset) return chset
def _checkoutUpstreamRevision(self, revision): """ Concretely do the checkout of the upstream revision. """ from os.path import join, exists # Verify that the we have the root of the repository: do that # iterating an "svn ls" over the hierarchy until one fails lastok = self.repository.repository if not self.repository.trust_root: # Use --non-interactive, so that it fails if credentials # are needed. cmd = self.repository.command("ls", "--non-interactive") svnls = ExternalCommand(command=cmd) # First verify that we have a valid repository svnls.execute(self.repository.repository) if svnls.exit_status: lastok = None else: # Then verify it really points to the root of the # repository: this is needed because later the svn log # parser needs to know the "offset". reporoot = lastok[:lastok.rfind('/')] # Even if it would be enough asserting that the uplevel # directory is not a repository, find the real root to # suggest it in the exception. But don't go too far, that # is, stop when you hit schema://... while '//' in reporoot: svnls.execute(reporoot) if svnls.exit_status: break lastok = reporoot reporoot = reporoot[:reporoot.rfind('/')] if lastok is None: raise ConfigurationError( "%r is not the root of a svn repository." % self.repository.repository) elif lastok <> self.repository.repository: module = self.repository.repository[len(lastok):] module += self.repository.module raise ConfigurationError( "Non-root svn repository %r. " "Please specify that as 'repository=%s' " "and 'module=%s'." % (self.repository.repository, lastok, module.rstrip('/'))) if revision == 'INITIAL': initial = True cmd = self.repository.command("log", "--verbose", "--xml", "--non-interactive", "--stop-on-copy", "--revision", "1:HEAD") if self.repository.use_limit: cmd.extend(["--limit", "1"]) svnlog = ExternalCommand(command=cmd) out, err = svnlog.execute( "%s%s" % (self.repository.repository, self.repository.module), stdout=PIPE, stderr=PIPE) if svnlog.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(svnlog), svnlog.exit_status, err.read())) csets = changesets_from_svnlog(out, self.repository) last = csets.next() revision = last.revision else: initial = False if not exists(join(self.repository.basedir, self.repository.METADIR)): self.log.debug("Checking out a working copy") cmd = self.repository.command("co", "--quiet") if self.repository.ignore_externals: cmd.append("--ignore-externals") cmd.extend(["--revision", revision]) svnco = ExternalCommand(command=cmd) out, err = svnco.execute( "%s%s@%s" % (self.repository.repository, self.repository.module, revision), self.repository.basedir, stdout=PIPE, stderr=PIPE) if svnco.exit_status: raise TargetInitializationFailure( "%s returned status %s saying\n%s" % (str(svnco), svnco.exit_status, err.read())) else: self.log.debug( "%r already exists, assuming it's " "a svn working dir", self.repository.basedir) if not initial: if revision == 'HEAD': revision = 'COMMITTED' cmd = self.repository.command("log", "--verbose", "--xml", "--non-interactive", "--revision", revision) svnlog = ExternalCommand(cwd=self.repository.basedir, command=cmd) out, err = svnlog.execute(stdout=PIPE, stderr=PIPE) if svnlog.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(svnlog), svnlog.exit_status, err.read())) csets = changesets_from_svnlog(out, self.repository) last = csets.next() self.log.debug("Working copy up to svn revision %s", last.revision) return last
def _checkoutUpstreamRevision(self, revision): """ Concretely do the checkout of the upstream revision and return the last applied changeset. """ from os.path import join, exists from os import mkdir from vcpx.source import InvocationError if not self.repository.repository: raise InvocationError("Must specify a the darcs source repository") if revision == 'INITIAL' or self.is_hash_rx.match(revision): initial = True if revision == 'INITIAL': cmd = self.repository.command("changes", "--xml-output", "--repo", self.repository.repository, "--reverse") changes = ExternalCommand(command=cmd) output = changes.execute(stdout=PIPE)[0] if changes.exit_status: raise ChangesetApplicationFailure( "%s returned status %d saying\n%s" % (str(changes), changes.exit_status, output and output.read() or '')) csets = changesets_from_darcschanges(output, replace_badchars=self.repository.replace_badchars) try: changeset = csets.next() except StopIteration: # No changesets, no party! return None revision = 'hash %s' % changeset.darcs_hash else: revision = 'hash %s' % revision else: initial = False # Darcs 2.0 fails with "darcs get --to-match", see issue885 darcs2 = self.repository.darcs_version.startswith('2') if darcs2 or self.repository.subdir == '.' or exists(self.repository.basedir): # This is currently *very* slow, compared to the darcs get # below! if not exists(join(self.repository.basedir, '_darcs')): if not exists(self.repository.basedir): mkdir(self.repository.basedir) cmd = self.repository.command("initialize") init = ExternalCommand(cwd=self.repository.basedir, command=cmd) init.execute() if init.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(init), init.exit_status)) cmd = self.repository.command("pull", "--all", "--quiet") if revision and revision<>'HEAD': cmd.extend([initial and "--match" or "--tag", revision]) dpull = ExternalCommand(cwd=self.repository.basedir, command=cmd) output = dpull.execute(self.repository.repository, stdout=PIPE, stderr=STDOUT)[0] if dpull.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(dpull), dpull.exit_status, output.read())) else: # Use much faster 'darcs get' cmd = self.repository.command("get", "--quiet") if revision and revision<>'HEAD': cmd.extend([initial and "--to-match" or "--tag", revision]) else: cmd.append("--partial") dget = ExternalCommand(command=cmd) output = dget.execute(self.repository.repository, self.repository.basedir, stdout=PIPE, stderr=STDOUT)[0] if dget.exit_status: raise TargetInitializationFailure( "%s returned status %d saying\n%s" % (str(dget), dget.exit_status, output.read())) cmd = self.repository.command("changes", "--last", "1", "--xml-output") changes = ExternalCommand(cwd=self.repository.basedir, command=cmd) output = changes.execute(stdout=PIPE)[0] if changes.exit_status: raise ChangesetApplicationFailure( "%s returned status %d saying\n%s" % (str(changes), changes.exit_status, output.read())) try: last = changesets_from_darcschanges( output, replace_badchars=self.repository.replace_badchars).next() except StopIteration: last = None return last
def create(self): """ Create a local SVN repository, if it does not exist, and configure it. """ from os.path import join, exists from sys import platform # Verify the existence of repository by listing its root cmd = self.command("ls") svnls = ExternalCommand(command=cmd) svnls.execute(self.repository) # Create it if it isn't a valid repository if svnls.exit_status: if not self.repository.startswith('file:///'): raise TargetInitializationFailure( "%r does not exist and " "cannot be created since " "it's not a local (file:///) " "repository" % self.repository) repodir = self.repository[7:] cmd = self.command("create", "--fs-type", "fsfs", svnadmin=True) svnadmin = ExternalCommand(command=cmd) svnadmin.execute(repodir) if svnadmin.exit_status: raise TargetInitializationFailure( "Was not able to create a 'fsfs' " "svn repository at %r" % self.repository) if self.use_propset: if not self.repository.startswith('file:///'): self.log.warning( "Repository is remote, cannot verify if it " "has the 'pre-revprop-change' hook active, needed " "by 'use-propset=True'. Assuming it does...") else: repodir = self.repository[7:] hookname = join(repodir, 'hooks', 'pre-revprop-change') if platform == 'win32': hookname += '.bat' if not exists(hookname): prehook = open(hookname, 'w') if platform <> 'win32': prehook.write('#!/bin/sh\n') prehook.write('exit 0\n') prehook.close() if platform <> 'win32': from os import chmod chmod(hookname, 0755) if self.module and self.module <> '/': cmd = self.command("ls") svnls = ExternalCommand(command=cmd) svnls.execute(self.repository + self.module) if svnls.exit_status: paths = [] # Auto detect missing "branches/" if self.module.startswith(self.branches_path + '/'): path = self.repository + self.branches_path cmd = self.command("ls") svnls = ExternalCommand(command=cmd) svnls.execute(path) if svnls.exit_status: paths.append(path) paths.append(self.repository + self.module) cmd = self.command( "mkdir", "-m", "This directory will host the upstream sources") svnmkdir = ExternalCommand(command=cmd) svnmkdir.execute(paths) if svnmkdir.exit_status: raise TargetInitializationFailure( "Was not able to create the " "module %r, maybe more than " "one level directory?" % self.module)
def _checkoutUpstreamRevision(self, revision): """ Concretely do the checkout of the upstream sources. Use `revision` as the name of the tag to get, or as a date if it starts with a number. Return the last applied changeset. """ from os.path import join, exists, split from time import sleep from vcpx.repository.cvs import CvsEntries, compare_cvs_revs from vcpx.changes import ChangesetEntry if not self.repository.module: raise InvocationError("Must specify a module name") timestamp = None if revision is not None: # If the revision contains a space, assume it really # specify a branch and a timestamp. If it starts with # a digit, assume it's a timestamp. Otherwise, it must # be a branch name if revision[0] in '0123456789' or revision == 'INITIAL': timestamp = revision revision = None elif ' ' in revision: revision, timestamp = revision.split(' ', 1) csets = self.getPendingChangesets(revision) if not csets: raise TargetInitializationFailure( "Something went wrong: there are no changesets since " "revision '%s'" % revision) if timestamp == 'INITIAL': initialcset = csets.next() timestamp = initialcset.date.replace(tzinfo=None).isoformat( sep=' ') else: initialcset = None if not exists(join(self.repository.basedir, 'CVS')): # CVS does not handle "checkout -d multi/level/subdir", so # split the basedir and use it's parentdir as cwd below. parentdir, subdir = split(self.repository.basedir) cmd = self.repository.command("-f", "-q", "-d", self.repository.repository, "checkout", "-d", subdir) if revision: cmd.extend(["-r", revision]) if timestamp: cmd.extend(["-D", "%s UTC" % timestamp]) if self.repository.freeze_keywords: cmd.append('-kk') checkout = ExternalCommand(cwd=parentdir, command=cmd) retry = 0 while True: checkout.execute(self.repository.module) if checkout.exit_status: retry += 1 if retry > 3: break delay = 2**retry self.log.warning( "%s returned status %s, " "retrying in %d seconds...", str(checkout), checkout.exit_status, delay) sleep(retry) else: break if checkout.exit_status: raise TargetInitializationFailure( "%s returned status %s" % (str(checkout), checkout.exit_status)) else: self.log.info("Using existing %s", self.repository.basedir) if self.repository.tag_entries: self.__forceTagOnEachEntry() entries = CvsEntries(self.repository.basedir) youngest_entry = entries.getYoungestEntry() if youngest_entry is None: raise EmptyRepositoriesFoolsMe("The working copy '%s' of the " "CVS repository seems empty, " "don't know how to deal with " "that." % self.repository.basedir) # loop over the changesets and find the last applied, to find # out the actual cvsps revision found = False def already_applied(cs, entries=entries): "Loop over changeset entries to determine if it's already applied." applied = False # applied become True when an entry is DELETED *and* there is # no metainfo for it: thus, a changeset that removes a few entries # very late in history would be assumed as applied. Prevent that # by checking for at least one explicit match on an existing entry. onepositive = False for m in cs.entries: info = entries.getFileInfo(m.name) # If the entry's info exists, compare the on-disk # version with what we have: the revision is already # applied if the former is greater or equal than the # latter. The same if the info does not exist and it's # a delete event. if info: odversion = info.cvs_version applied = compare_cvs_revs(odversion, m.new_revision) >= 0 # If only one "hunk" is not yet applied, the whole # changeset is new. if not applied: break else: onepositive = True elif m.action_kind == ChangesetEntry.DELETED: applied = True return applied and onepositive # We cannot stop at the first not-applied cset, because it may # old enough to trick already_applied(): an entry may have # been moved in the meantime, and thus the getFileInfo() # method would return None, for example... So we really have # to loop over the whole queue. for cset in self.state_file: applied = already_applied(cset) found = found or applied if applied: last = cset if not found and initialcset: found = already_applied(initialcset) if found: last = initialcset if not found: raise TargetInitializationFailure( "Something went wrong: unable to determine the exact upstream " "revision of the checked out tree in '%s'. Either you specified " "the wrong initial timestamp, or you are checking out a " "composition of 'CVS modules' and Tailor does not support them; " "see the option 'trim-module-components' for a possible " "workaround." % self.repository.basedir) else: self.log.info("Working copy up to revision %s", last.revision) return last
def _prepareWorkingDirectory(self, source_repo): """ Possibly checkout a working copy of the target VC, that will host the upstream source tree, when overriden by subclasses. """ from re import escape if not self.repository.repository or exists( join(self.repository.basedir, '_MTN')): return if not self.repository.module: raise TargetInitializationFailure("Monotone needs a module " "defined (to be used as " "commit branch)") cmd = self.repository.command("setup", "--db", self.repository.repository, "--branch", self.repository.module) if self.repository.keygenid: self.repository.keyid = self.repository.keygenid if self.repository.keyid: cmd.extend(("--key", self.repository.keyid)) setup = ExternalCommand(command=cmd) setup.execute(self.repository.basedir, stdout=PIPE, stderr=PIPE) if self.repository.passphrase or self.repository.custom_lua: monotonerc = open( join(self.repository.basedir, '_MTN', 'monotonerc'), 'w') if self.repository.passphrase: monotonerc.write(MONOTONERC % self.repository.passphrase) else: raise TargetInitializationFailure( "The passphrase must be specified") if self.repository.custom_lua: self.log.info("Adding custom lua script") monotonerc.write(self.repository.custom_lua) monotonerc.close() # Add the tailor log file and state file to _MTN's list of # ignored files ignored = [] logfile = self.repository.projectref().logfile if logfile.startswith(self.repository.basedir): ignored.append('^%s$' % escape(logfile[len(self.repository.basedir) + 1:])) sfname = self.repository.projectref().state_file.filename if sfname.startswith(self.repository.basedir): sfrelname = sfname[len(self.repository.basedir) + 1:] ignored.append('^%s$' % escape(sfrelname)) ignored.append('^%s$' % escape(sfrelname + '.old')) ignored.append('^%s$' % escape(sfrelname + '.journal')) if len(ignored) > 0: mt_ignored = open(join(self.repository.basedir, '.mtn-ignore'), 'a') mt_ignored.write('\n'.join(ignored)) mt_ignored.close()