Beispiel #1
0
 def get_file_or_folder(self, mode):
     if mode == 'src':
         curr = self.get_src()
         if os.path.isfile(curr):
             caption = _('Select Source File')
         else:
             caption = _('Select Source Folder')
     else:
         curr = self.get_dest()
         if os.path.isfile(curr):
             caption = _('Select Destination File')
         else:
             caption = _('Select Destination Folder')
     FD = QFileDialog
     if os.path.isfile(curr):
         path = FD.getOpenFileName(parent=self, caption=caption,
                 options=FD.ReadOnly)
     else:
         path = FD.getExistingDirectory(parent=self, caption=caption,
                 options=FD.ShowDirsOnly | FD.ReadOnly)
     if path:
         path = util.normpath(unicode(path))
         pathprefix = util.normpath(hglib.tounicode(self.root)) + '/'
         if not path.startswith(pathprefix):
             return
         relpath = path[len(pathprefix):]
         if mode == 'src':
             self.src_txt.setText(relpath)
         else:
             self.dest_txt.setText(relpath)
Beispiel #2
0
 def init_data(self, ui, pats):
     """calculate initial values for widgets"""
     fname = ''
     target = ''
     cwd = os.getcwd()
     try:
         self.root = paths.find_root()
         self.repo = thgrepo.repository(ui, path=self.root)
     except (error.RepoError):
         qtlib.ErrorMsgBox(_('Error'),
                 _('Could not find or initialize the repository '
                   'from folder<p>%s</p>' % cwd))
         return ('', '')
     try:
         fname = hglib.canonpath(self.root, cwd, pats[0])
         target = hglib.canonpath(self.root, cwd, pats[1])
     except:
         pass
     os.chdir(self.root)
     fname = hglib.tounicode(util.normpath(fname))
     if target:
         target = hglib.tounicode(util.normpath(target))
     else:
         target = fname
     return (fname, target)
Beispiel #3
0
 def init_data(self, ui, pats):
     """calculate initial values for widgets"""
     fname = ''
     target = ''
     cwd = os.getcwd()
     try:
         self.root = paths.find_root()
         self.repo = thgrepo.repository(ui, path=self.root)
     except (error.RepoError):
         qtlib.ErrorMsgBox(_('Error'),
                 _('Could not find or initialize the repository '
                   'from folder<p>%s</p>' % cwd))
         return ('', '')
     try:
         fname = scmutil.canonpath(self.root, cwd, pats[0])
         target = scmutil.canonpath(self.root, cwd, pats[1])
     except:
         pass
     os.chdir(self.root)
     fname = hglib.tounicode(util.normpath(fname))
     if target:
         target = hglib.tounicode(util.normpath(target))
     else:
         target = fname
     return (fname, target)
Beispiel #4
0
 def to_relative_path(self, fullpath):  # unicode or QString
     if not fullpath:
         return
     fullpath = util.normpath(unicode(fullpath))
     pathprefix = util.normpath(hglib.tounicode(self.repo.root)) + '/'
     if not os.path.normcase(fullpath).startswith(os.path.normcase(pathprefix)):
         return
     return fullpath[len(pathprefix):]
Beispiel #5
0
 def to_relative_path(self, fullpath):  # unicode or QString
     if not fullpath:
         return
     fullpath = util.normpath(unicode(fullpath))
     pathprefix = util.normpath(hglib.tounicode(self.repo.root)) + '/'
     if not os.path.normcase(fullpath).startswith(os.path.normcase(pathprefix)):
         return
     return fullpath[len(pathprefix):]
Beispiel #6
0
    def lockany(self):
        wfile = QFileDialog.getOpenFileName(
            self, _('Open a (nonmergable) file you wish to be locked'),
            self.repo.root, _FILE_FILTER)

        wfile = util.normpath(unicode(wfile))
        pathprefix = util.normpath(hglib.tounicode(self.repo.root)) + '/'
        if not os.path.normcase(wfile).startswith(os.path.normcase(pathprefix)):
            self.showMessage.emit(_('File was not within current repository'))
        wfile = wfile[len(pathprefix):]

        self.showMessage.emit(_('Locking %s') % wfile)
        self.lockrun(['lock', wfile])
Beispiel #7
0
    def lockany(self):
        wfile, _filter = QFileDialog.getOpenFileName(
            self, _('Open a (nonmergable) file you wish to be locked'),
            self.repo.root, _FILE_FILTER)

        wfile = util.normpath(unicode(wfile))
        pathprefix = util.normpath(hglib.tounicode(self.repo.root)) + '/'
        if not os.path.normcase(wfile).startswith(
                os.path.normcase(pathprefix)):
            self.showMessage.emit(_('File was not within current repository'))
        wfile = wfile[len(pathprefix):]

        self.showMessage.emit(_('Locking %s') % wfile)
        self.lockrun(['lock', wfile])
Beispiel #8
0
    def _normalizeEntryPaths(self, entry):
        """
        Normalize the name and old_name of an entry.

        This implementation uses ``mercurial.util.normpath()``, since
        at this level hg is expecting UNIX style pathnames, with
        forward slash"/" as separator, also under insane operating systems.
        """

        from mercurial.util import normpath

        entry.name = normpath(self.repository.encode(entry.name))
        if entry.old_name:
            entry.old_name = normpath(self.repository.encode(entry.old_name))
Beispiel #9
0
    def _normalizeEntryPaths(self, entry):
        """
        Normalize the name and old_name of an entry.

        This implementation uses ``mercurial.util.normpath()``, since
        at this level hg is expecting UNIX style pathnames, with
        forward slash"/" as separator, also under insane operating systems.
        """

        from mercurial.util import normpath

        entry.name = normpath(self.repository.encode(entry.name))
        if entry.old_name:
            entry.old_name = normpath(self.repository.encode(entry.old_name))
Beispiel #10
0
 def keyfunc(x):
     l = hglib.fromunicode(x.rootpath())
     try:
         return hgsuborder.index(util.normpath(l))
     except ValueError:
         # If an item is not found, place it at the top
         return 0
Beispiel #11
0
    def addRepo(self, group, root, row=-1):
        grp = group
        if grp == None:
            grp = self.allreposIndex()
        rgi = grp.internalPointer()
        if row < 0:
            row = rgi.childCount()

        # make sure all paths are properly normalized
        root = os.path.normpath(root)

        # Check whether the repo that we are adding is a subrepo
        # This check could be expensive, particularly for network repositories
        # Thus, only perform this check on network repos if the showNetworkSubrepos
        # flag is set
        itemIsSubrepo = False
        if self.showNetworkSubrepos \
                or not paths.netdrive_status(root):
            outerrepopath = paths.find_root(os.path.dirname(root))
            if outerrepopath:
                # Check whether repo we are adding is a subrepo of
                # its containing (outer) repo
                # This check is currently quite imperfect, since it
                # only checks the current repo revision
                outerrepo = hg.repository(ui.ui(), path=outerrepopath)
                relroot = util.normpath(root[len(outerrepopath) + 1:])
                if relroot in outerrepo['.'].substate:
                    itemIsSubrepo = True

        self.beginInsertRows(grp, row, row)
        if itemIsSubrepo:
            ri = SubrepoItem(root)
        else:
            ri = RepoItem(root)
        rgi.insertChild(row, ri)

        if not self.showSubrepos \
                or (not self.showNetworkSubrepos and paths.netdrive_status(root)):
            self.endInsertRows()
            return

        invalidRepoList = ri.appendSubrepos()

        self.endInsertRows()

        if invalidRepoList:
            if invalidRepoList[0] == root:
                qtlib.WarningMsgBox(
                    _('Could not get subrepository list'),
                    _('It was not possible to get the subrepository list for '
                      'the repository in:<br><br><i>%s</i>') % root)
            else:
                qtlib.WarningMsgBox(
                    _('Could not open some subrepositories'),
                    _('It was not possible to fully load the subrepository '
                      'list for the repository in:<br><br><i>%s</i><br><br>'
                      'The following subrepositories may be missing, broken or '
                      'on an inconsistent state and cannot be accessed:'
                      '<br><br><i>%s</i>') %
                    (root, "<br>".join(invalidRepoList)))
Beispiel #12
0
def _cwdlist(repo):
    """ List the contents in the current directory. Annotate
    the files in the sparse profile.
    """
    ctx = repo['.']
    mf = ctx.manifest()
    cwd = util.normpath(os.getcwd())

    # Get the root of the repo so that we remove the content of
    # the root from the current working directory
    root = repo.root
    if cwd.startswith(root):
        cwd = cwd[len(root):]
    else:
        raise error.Abort(_("the current working directory should begin " +
            "with the root %s") % root)

    cwd = cwd.strip("/")
    sparsematch = repo.sparsematch(ctx.rev())
    checkedoutentries = set()
    allentries = set()
    cwdlength = len(cwd) + 1
    for filepath in mf:
        if filepath.startswith(cwd):
            tail = filepath[cwdlength:] if cwdlength > 1 else filepath
            entryname = tail.split('/', 1)[0]

            allentries.add(entryname)
            if sparsematch(filepath):
                checkedoutentries.add(entryname)

    ui = repo.ui
    for entry in sorted(allentries):
        marker = ' ' if entry in checkedoutentries else '-'
        ui.status("%s %s\n" % (marker, entry))
def _promptvctextension(ui, cw, ext, msg):
    ext_path = _vctextpath(pycompat.sysstr(ext))

    # Do nothing (return) if the user has configured this extension, unless it
    # points to the directory that we manage and that directory is missing.
    users_ext_path = ui.config(b'extensions', ext)
    if users_ext_path != None:
        users_ext_path = pycompat.fsdecode(
            util.normpath(util.expandpath(users_ext_path)))
        if users_ext_path != ext_path or os.path.exists(ext_path):
            return

    # Verify the extension loads before prompting to enable it. This is
    # done out of paranoia.

    # Even if we launch hg.exe, sys.argv[0] is "hg" on Windows. Since "hg" isn't
    # a Windows application, we can't simply run it. So change to the ".exe"
    # variant if necessary.
    hg = sys.argv[0]
    if sys.platform in ('win32', 'msys') and hg.endswith('hg'):
        hg += '.exe'

    result = subprocess.check_output([
        hg, '--config',
        'extensions.testmodule=%s' % ext_path, '--config', 'ui.traceback=true'
    ],
                                     stderr=subprocess.STDOUT)
    if b'Traceback' in result:
        return

    if uipromptchoice(ui, b'%s (Yn) $$ &Yes $$ &No' % msg):
        return

    _enableext(cw, pycompat.sysstr(ext), ext_path)
Beispiel #14
0
    def sortbyhgsub(self):
        model = self.tview.model()
        index = self.tview.currentIndex()
        ip = index.internalPointer()
        repo = hg.repository(hglib.loadui(),
                             hglib.fromunicode(model.repoRoot(index)))
        ctx = repo['.']
        wfile = '.hgsub'
        if wfile not in ctx:
            return self.sortbypath()
        data = ctx[wfile].data().strip()
        data = data.split('\n')
        getsubpath = lambda x: x.split('=')[0].strip()
        abspath = lambda x: util.normpath(repo.wjoin(x))
        hgsuborder = [abspath(getsubpath(x)) for x in data]

        def keyfunc(x):
            l = hglib.fromunicode(x.rootpath())
            try:
                return hgsuborder.index(util.normpath(l))
            except ValueError:
                # If an item is not found, place it at the top
                return 0

        self.tview.model().sortchilds(ip.childs, keyfunc)
Beispiel #15
0
 def keyfunc(x):
     l = hglib.fromunicode(x.rootpath())
     try:
         return hgsuborder.index(util.normpath(l))
     except ValueError:
         # If an item is not found, place it at the top
         return 0
Beispiel #16
0
 def _removeDirs(self, names):
     from os.path import isdir, join, normpath
     """Remove the names that reference a directory."""
     return [
         n for n in names
         if not isdir(join(self.repository.basedir, normpath(n)))
     ]
     return notdirs
    def addRepo(self, group, root, row=-1):
        grp = group
        if grp == None:
            grp = self.allreposIndex()
        rgi = grp.internalPointer()
        if row < 0:
            row = rgi.childCount()

        # make sure all paths are properly normalized
        root = os.path.normpath(root)

        # Check whether the repo that we are adding is a subrepo
        # This check could be expensive, particularly for network repositories
        # Thus, only perform this check on network repos if the showNetworkSubrepos
        # flag is set
        itemIsSubrepo = False
        if self.showNetworkSubrepos \
                or not paths.netdrive_status(root):
            outerrepopath = paths.find_root(os.path.dirname(root))
            if outerrepopath:
                # Check whether repo we are adding is a subrepo of
                # its containing (outer) repo
                # This check is currently quite imperfect, since it
                # only checks the current repo revision
                outerrepo = hg.repository(ui.ui(), path=outerrepopath)
                relroot = util.normpath(root[len(outerrepopath)+1:])
                if relroot in outerrepo['.'].substate:
                    itemIsSubrepo = True

        self.beginInsertRows(grp, row, row)
        if itemIsSubrepo:
            ri = SubrepoItem(root)
        else:
            ri = RepoItem(root)
        rgi.insertChild(row, ri)

        if not self.showSubrepos \
                or (not self.showNetworkSubrepos and paths.netdrive_status(root)):
            self.endInsertRows()
            return

        invalidRepoList = ri.appendSubrepos()

        self.endInsertRows()

        if invalidRepoList:
            if invalidRepoList[0] == root:
                qtlib.WarningMsgBox(_('Could not get subrepository list'),
                    _('It was not possible to get the subrepository list for '
                    'the repository in:<br><br><i>%s</i>') % root)
            else:
                qtlib.WarningMsgBox(_('Could not open some subrepositories'),
                    _('It was not possible to fully load the subrepository '
                    'list for the repository in:<br><br><i>%s</i><br><br>'
                    'The following subrepositories may be missing, broken or '
                    'on an inconsistent state and cannot be accessed:'
                    '<br><br><i>%s</i>')  %
                    (root, "<br>".join(invalidRepoList)))
Beispiel #18
0
 def init_data(self, pats):
     """calculate initial values for widgets"""
     fname = ''
     target = ''
     root = self.repo.root
     cwd = os.getcwd()
     try:
         fname = scmutil.canonpath(root, cwd, pats[0])
         target = scmutil.canonpath(root, cwd, pats[1])
     except:
         pass
     os.chdir(root)
     fname = hglib.tounicode(util.normpath(fname))
     if target:
         target = hglib.tounicode(util.normpath(target))
     else:
         target = fname
     return (fname, target)
Beispiel #19
0
def _checkevolve(ui, cw, hg_version):
    if hg_version < (4, 3, 0):
        ui.warn(EVOLVE_INCOMPATIBLE)
        return

    remote_evolve_path = b'https://www.mercurial-scm.org/repo/evolve/'
    # Install to the same dir as v-c-t, unless the mozbuild directory path is passed (testing)
    evolve_clone_dir = ui.config(b'mozilla', b'mozbuild_state_path', _vcthome())

    local_evolve_path = b'%(evolve_clone_dir)s/evolve' % {b'evolve_clone_dir': evolve_clone_dir}
    evolve_config_value = os.path.normpath('%(evolve_path)s/hgext3rd/evolve' % \
                                           {'evolve_path': pycompat.sysstr(local_evolve_path)})

    users_evolve_path = ui.config(b'extensions', b'evolve')
    if users_evolve_path:
        users_evolve_path = os.path.normpath(pycompat.fsdecode(util.normpath(util.expandpath(users_evolve_path))))

    # If evolve is not installed, install it. (If the user's path to evolve is
    # the path that we manage, but it doesn't exist yet, assume that their
    # config file has been copied to a new machine and we need to clone evolve.
    if users_evolve_path == None or \
            (users_evolve_path == evolve_config_value and not os.path.exists(evolve_config_value)):
        if uipromptchoice(ui, EVOLVE_INFO_WARNING):
            return

        try:
            # Clone the evolve extension and enable
            hg.clone(ui, {}, remote_evolve_path, branch=(b'stable',), dest=local_evolve_path)
            _enableext(cw, 'evolve', evolve_config_value)

            ui.write(b'Evolve was downloaded successfully.\n')

        except error.Abort as hg_err:
            ui.write(pycompat.bytestr(hg_err))
            ui.write(EVOLVE_CLONE_ERROR)

        return

    # If evolve is installed and managed by this wizard,
    # update it via pull/update
    if users_evolve_path == evolve_config_value:
        if uipromptchoice(ui, EVOLVE_UPDATE_PROMPT % {b'evolve_dir': local_evolve_path}):
            return

        try:
            local_evolve_repo = hg.repository(ui, local_evolve_path)

            # Pull the latest stable, update to tip
            hgpull(ui, local_evolve_repo, source=remote_evolve_path, branch=(b'stable',))
            hgupdate(ui, local_evolve_repo, rev=b'stable')

            ui.write(b'Evolve was updated successfully.\n')

        except error.Abort as hg_err:
            ui.write(EVOLVE_CLONE_ERROR)
Beispiel #20
0
def geturl(path):
    try:
        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
    except SubversionException:
        pass
    if os.path.isdir(path):
        path = os.path.normpath(os.path.abspath(path))
        if os.name == 'nt':
            path = '/' + util.normpath(path)
        return 'file://%s' % path
    return path
Beispiel #21
0
def geturl(path):
    try:
        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
    except SubversionException:
        pass
    if os.path.isdir(path):
        path = os.path.normpath(os.path.abspath(path))
        if os.name == 'nt':
            path = '/' + util.normpath(path)
        return 'file://%s' % path
    return path
Beispiel #22
0
    def __init__(self, ui, path):

        if svn is None:
            raise MissingTool(_('Could not load Subversion python bindings'))
        converter_sink.__init__(self, ui, path)
        commandline.__init__(self, ui, 'svn')
        self.delete = []
        self.setexec = []
        self.delexec = []
        self.copies = []
        self.wc = None
        self.cwd = os.getcwd()

        path = os.path.realpath(path)

        created = False
        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
            self.wc = path
            self.run0('update')
        else:
            wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')

            if os.path.isdir(os.path.dirname(path)):
                if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
                    ui.status(
                        _('initializing svn repository %r\n') %
                        os.path.basename(path))
                    commandline(ui, 'svnadmin').run0('create', path)
                    created = path
                path = util.normpath(path)
                if not path.startswith('/'):
                    path = '/' + path
                path = 'file://' + path

            ui.status(
                _('initializing svn working copy %r\n') %
                os.path.basename(wcpath))
            self.run0('checkout', path, wcpath)

            self.wc = wcpath
        self.opener = util.opener(self.wc)
        self.wopener = util.opener(self.wc)
        self.childmap = mapfile(ui, self.join('hg-childmap'))
        self.is_exec = util.checkexec(self.wc) and util.is_exec or None

        if created:
            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
            fp = open(hook, 'w')
            fp.write(pre_revprop_change)
            fp.close()
            util.set_flags(hook, False, True)

        xport = transport.SvnRaTransport(url=geturl(path))
        self.uuid = svn.ra.get_uuid(xport.ra)
Beispiel #23
0
def getprojrcserverset(ui):
    """Get the list of projrc servers, normalizing paths and character cases"""
    serverlist = ui.configlist('projrc', 'servers')

    for n, server in enumerate(serverlist):
        server = ui.expandpath(server)
        filepath = isfilepath(server)
        if filepath:
            serverlist[n] = os.path.normcase(util.normpath(server))
        else:
            serverlist[n] = server.lower()
    return set(serverlist)
Beispiel #24
0
    def _renamePathname(self, oldname, newname):
        """Rename an entry"""

        from os.path import join, isdir, normpath

        self.log.info('Renaming %r to %r...', oldname, newname)
        # Check both names, because maybe we are operating in
        # disjunct dirs, and the target may be renamed to a
        # temporary name
        if (isdir(join(self.repository.basedir, normpath(oldname)))
                or isdir(join(self.repository.basedir, normpath(newname)))):
            # Given lack of support for directories in current HG,
            # loop over all files under the old directory and
            # do a copy on them.
            for f in self._walk(oldname):
                oldpath = join(oldname, f)
                self._hgCommand('copy', oldpath, join(newname, f))
                self._hgCommand('remove', oldpath, unlink=True)
        else:
            self._hgCommand('copy', oldname, newname)
            self._hgCommand('remove', oldname, unlink=True)
Beispiel #25
0
    def _renamePathname(self, oldname, newname):
        """Rename an entry"""

        from os.path import join, isdir, normpath

        self.log.info('Renaming %r to %r...', oldname, newname)
        # Check both names, because maybe we are operating in
        # disjunct dirs, and the target may be renamed to a
        # temporary name
        if (isdir(join(self.repository.basedir, normpath(oldname)))
            or isdir(join(self.repository.basedir, normpath(newname)))):
            # Given lack of support for directories in current HG,
            # loop over all files under the old directory and
            # do a copy on them.
            for f in self._walk(oldname):
                oldpath = join(oldname, f)
                self._hgCommand('copy', oldpath, join(newname, f))
                self._hgCommand('remove', oldpath, unlink=True)
        else:
            self._hgCommand('copy', oldname, newname)
            self._hgCommand('remove', oldname, unlink=True)
Beispiel #26
0
    def __init__(self, ui, path):

        if svn is None:
            raise MissingTool(_('Could not load Subversion python bindings'))
        converter_sink.__init__(self, ui, path)
        commandline.__init__(self, ui, 'svn')
        self.delete = []
        self.setexec = []
        self.delexec = []
        self.copies = []
        self.wc = None
        self.cwd = os.getcwd()

        path = os.path.realpath(path)

        created = False
        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
            self.wc = path
            self.run0('update')
        else:
            wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')

            if os.path.isdir(os.path.dirname(path)):
                if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
                    ui.status(_('initializing svn repository %r\n') %
                              os.path.basename(path))
                    commandline(ui, 'svnadmin').run0('create', path)
                    created = path
                path = util.normpath(path)
                if not path.startswith('/'):
                    path = '/' + path
                path = 'file://' + path

            ui.status(_('initializing svn working copy %r\n')
                      % os.path.basename(wcpath))
            self.run0('checkout', path, wcpath)

            self.wc = wcpath
        self.opener = util.opener(self.wc)
        self.wopener = util.opener(self.wc)
        self.childmap = mapfile(ui, self.join('hg-childmap'))
        self.is_exec = util.checkexec(self.wc) and util.is_exec or None

        if created:
            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
            fp = open(hook, 'w')
            fp.write(pre_revprop_change)
            fp.close()
            util.set_flags(hook, False, True)

        xport = transport.SvnRaTransport(url=geturl(path))
        self.uuid = svn.ra.get_uuid(xport.ra)
    def __init__(self, ui, path):

        converter_sink.__init__(self, ui, path)
        commandline.__init__(self, ui, 'svn')
        self.delete = []
        self.setexec = []
        self.delexec = []
        self.copies = []
        self.wc = None
        self.cwd = os.getcwd()

        path = os.path.realpath(path)

        created = False
        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
            self.wc = path
            self.run0('update')
        else:
            wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')

            if os.path.isdir(os.path.dirname(path)):
                if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
                    ui.status(
                        _('initializing svn repository %r\n') %
                        os.path.basename(path))
                    commandline(ui, 'svnadmin').run0('create', path)
                    created = path
                path = util.normpath(path)
                if not path.startswith('/'):
                    path = '/' + path
                path = 'file://' + path

            ui.status(
                _('initializing svn working copy %r\n') %
                os.path.basename(wcpath))
            self.run0('checkout', path, wcpath)

            self.wc = wcpath
        self.opener = scmutil.opener(self.wc)
        self.wopener = scmutil.opener(self.wc)
        self.childmap = mapfile(ui, self.join('hg-childmap'))
        self.is_exec = util.checkexec(self.wc) and util.isexec or None

        if created:
            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
            fp = open(hook, 'w')
            fp.write(pre_revprop_change)
            fp.close()
            util.setflags(hook, False, True)

        output = self.run0('info')
        self.uuid = self.uuid_re.search(output).group(1).strip()
Beispiel #28
0
    def __init__(self, ui, path):
        checktool('svn', debname='subversion')
        checktool('svnadmin', debname='subversion')

        converter_sink.__init__(self, ui, path)
        commandline.__init__(self, ui, 'svn')
        self.delete = []
        self.setexec = []
        self.delexec = []
        self.copies = []
        self.wc = None
        self.cwd = os.getcwd()

        created = False
        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
            self.wc = os.path.realpath(path)
            self.run0('update')
        else:
            if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
                path = os.path.realpath(path)
                if os.path.isdir(os.path.dirname(path)):
                    if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
                        ui.status(_('initializing svn repository %r\n') %
                                  os.path.basename(path))
                        commandline(ui, 'svnadmin').run0('create', path)
                        created = path
                    path = util.normpath(path)
                    if not path.startswith('/'):
                        path = '/' + path
                    path = 'file://' + path

            wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
            ui.status(_('initializing svn working copy %r\n')
                      % os.path.basename(wcpath))
            self.run0('checkout', path, wcpath)

            self.wc = wcpath
        self.opener = scmutil.opener(self.wc)
        self.wopener = scmutil.opener(self.wc)
        self.childmap = mapfile(ui, self.join('hg-childmap'))
        self.is_exec = util.checkexec(self.wc) and util.isexec or None

        if created:
            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
            fp = open(hook, 'w')
            fp.write(pre_revprop_change)
            fp.close()
            util.setflags(hook, False, True)

        output = self.run0('info')
        self.uuid = self.uuid_re.search(output).group(1).strip()
Beispiel #29
0
    def appendSubrepos(self, repo=None):
        self._sharedpath = ''
        invalidRepoList = []
        try:
            sri = None
            if repo is None:
                if not os.path.exists(self._root):
                    self._valid = False
                    return [hglib.fromunicode(self._root)]
                elif (not os.path.exists(os.path.join(self._root, '.hgsub'))
                      and not os.path.exists(
                          os.path.join(self._root, '.hg', 'sharedpath'))):
                    return []  # skip repo creation, which is expensive
                repo = hg.repository(hglib.loadui(),
                                     hglib.fromunicode(self._root))
            if repo.sharedpath != repo.path:
                self._sharedpath = hglib.tounicode(repo.sharedpath)
            wctx = repo['.']
            sortkey = lambda x: os.path.basename(util.normpath(repo.wjoin(x)))
            for subpath in sorted(wctx.substate, key=sortkey):
                sri = None
                abssubpath = repo.wjoin(subpath)
                subtype = wctx.substate[subpath][2]
                sriIsValid = os.path.isdir(abssubpath)
                sri = _newSubrepoItem(hglib.tounicode(abssubpath),
                                      repotype=subtype)
                sri._valid = sriIsValid
                self.appendChild(sri)

                if not sriIsValid:
                    self._valid = False
                    sri._valid = False
                    invalidRepoList.append(repo.wjoin(subpath))
                    return invalidRepoList

                if subtype == 'hg':
                    # Only recurse into mercurial subrepos
                    sctx = wctx.sub(subpath)
                    invalidSubrepoList = sri.appendSubrepos(sctx._repo)
                    if invalidSubrepoList:
                        self._valid = False
                        invalidRepoList += invalidSubrepoList

        except (EnvironmentError, error.RepoError, util.Abort), e:
            # Add the repo to the list of repos/subrepos
            # that could not be open
            self._valid = False
            if sri:
                sri._valid = False
                invalidRepoList.append(abssubpath)
            invalidRepoList.append(hglib.fromunicode(self._root))
def geturl(path):
    try:
        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
    except SubversionException:
        pass
    if os.path.isdir(path):
        path = os.path.normpath(os.path.abspath(path))
        if os.name == 'nt':
            path = '/' + util.normpath(path)
        # Module URL is later compared with the repository URL returned
        # by svn API, which is UTF-8.
        path = encoding.tolocal(path)
        return 'file://%s' % urllib.quote(path)
    return path
Beispiel #31
0
    def appendSubrepos(self, repo=None):
        self._sharedpath = ''
        invalidRepoList = []
        try:
            sri = None
            if repo is None:
                if not os.path.exists(self._root):
                    self._valid = False
                    return [hglib.fromunicode(self._root)]
                elif (not os.path.exists(os.path.join(self._root, '.hgsub'))
                      and not os.path.exists(
                          os.path.join(self._root, '.hg', 'sharedpath'))):
                    return []  # skip repo creation, which is expensive
                repo = hg.repository(ui.ui(), hglib.fromunicode(self._root))
            if repo.sharedpath != repo.path:
                self._sharedpath = hglib.tounicode(repo.sharedpath)
            wctx = repo['.']
            sortkey = lambda x: os.path.basename(util.normpath(repo.wjoin(x)))
            for subpath in sorted(wctx.substate, key=sortkey):
                sri = None
                abssubpath = repo.wjoin(subpath)
                subtype = wctx.substate[subpath][2]
                sriIsValid = os.path.isdir(abssubpath)
                sri = _newSubrepoItem(hglib.tounicode(abssubpath),
                                      repotype=subtype)
                sri._valid = sriIsValid
                self.appendChild(sri)

                if not sriIsValid:
                    self._valid = False
                    sri._valid = False
                    invalidRepoList.append(repo.wjoin(subpath))
                    return invalidRepoList

                if subtype == 'hg':
                    # Only recurse into mercurial subrepos
                    sctx = wctx.sub(subpath)
                    invalidSubrepoList = sri.appendSubrepos(sctx._repo)
                    if invalidSubrepoList:
                        self._valid = False
                        invalidRepoList += invalidSubrepoList

        except (EnvironmentError, error.RepoError, util.Abort), e:
            # Add the repo to the list of repos/subrepos
            # that could not be open
            self._valid = False
            if sri:
                sri._valid = False
                invalidRepoList.append(abssubpath)
            invalidRepoList.append(hglib.fromunicode(self._root))
def geturl(path):
    try:
        return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
    except SubversionException:
        pass
    if os.path.isdir(path):
        path = os.path.normpath(os.path.abspath(path))
        if os.name == 'nt':
            path = '/' + util.normpath(path)
        # Module URL is later compared with the repository URL returned
        # by svn API, which is UTF-8.
        path = encoding.tolocal(path)
        return 'file://%s' % urllib.quote(path)
    return path
 def sortbyhgsub(self):
     ip = self.selitem.internalPointer()
     repo = hg.repository(ui.ui(), ip.rootpath())
     ctx = repo['.']
     wfile = '.hgsub'
     if wfile not in ctx:
         return self.sortbypath()
     data = ctx[wfile].data().strip()
     data = data.split('\n')
     getsubpath = lambda x: x.split('=')[0].strip()
     abspath = lambda x: util.normpath(repo.wjoin(x))
     hgsuborder = [abspath(getsubpath(x)) for x in data]
     def keyfunc(x):
         try:
             return hgsuborder.index(util.normpath(x.rootpath()))
         except:
             # If an item is not found, place it at the top
             return 0
     self.tview.model().sortchilds(ip.childs, keyfunc)
Beispiel #34
0
def _checkmultiplevct(ui, cw):
    # References to multiple version-control-tools checkouts can confuse
    # version-control-tools since various Mercurial extensions resolve
    # dependencies via __file__. Files from different revisions could lead
    # to unexpected environments and break things.
    seenvct = set()
    for k, v in ui.configitems('extensions'):
        # mercurial.extensions.loadpath() does variable and user expansion.
        # We need to match behavior.
        v = os.path.realpath(util.normpath(util.expandpath(v)))

        if 'version-control-tools' not in v:
            continue
        i = v.index('version-control-tools')
        vct = v[0:i + len('version-control-tools')]
        seenvct.add(vct)

    if len(seenvct) > 1:
        ui.write(MULTIPLE_VCT % cw.path)
Beispiel #35
0
    def _renamePathname(self, oldname, newname):
        """Rename an entry"""

        from os.path import join, isdir, normpath

        repo = self._getRepo()

        self.log.info('Renaming %r to %r...', oldname, newname)
        if isdir(join(self.repository.basedir, normpath(newname))):
            # Given lack of support for directories in current HG,
            # loop over all files under the old directory and
            # do a copy on them.
            for f in self._walk(oldname):
                oldpath = join(oldname, f)
                repo.copy(oldpath, join(newname, f))
                repo.remove([oldpath], unlink=True)
        else:
            repo.copy(oldname, newname)
            repo.remove([oldname], unlink=True)
Beispiel #36
0
    def sortbyhgsub(self):
        ip = self.selitem.internalPointer()
        repo = hg.repository(ui.ui(), ip.rootpath())
        ctx = repo['.']
        wfile = '.hgsub'
        if wfile not in ctx:
            return self.sortbypath()
        data = ctx[wfile].data().strip()
        data = data.split('\n')
        getsubpath = lambda x: x.split('=')[0].strip()
        abspath = lambda x: util.normpath(repo.wjoin(x))
        hgsuborder = [abspath(getsubpath(x)) for x in data]

        def keyfunc(x):
            try:
                return hgsuborder.index(util.normpath(x.rootpath()))
            except:
                # If an item is not found, place it at the top
                return 0

        self.tview.model().sortchilds(ip.childs, keyfunc)
Beispiel #37
0
 def sortbyhgsub(self):
     model = self.tview.model()
     index = self.tview.currentIndex()
     ip = index.internalPointer()
     repo = hg.repository(ui.ui(), hglib.fromunicode(model.repoRoot(index)))
     ctx = repo['.']
     wfile = '.hgsub'
     if wfile not in ctx:
         return self.sortbypath()
     data = ctx[wfile].data().strip()
     data = data.split('\n')
     getsubpath = lambda x: x.split('=')[0].strip()
     abspath = lambda x: util.normpath(repo.wjoin(x))
     hgsuborder = [abspath(getsubpath(x)) for x in data]
     def keyfunc(x):
         l = hglib.fromunicode(x.rootpath())
         try:
             return hgsuborder.index(util.normpath(l))
         except ValueError:
             # If an item is not found, place it at the top
             return 0
     self.tview.model().sortchilds(ip.childs, keyfunc)
Beispiel #38
0
 def write(self, fd, oldstyle=False):
     """Writes a snapshot file to a file descriptor."""
     counter = 1
     for tree in self.trees:
         fd.write("[tree%s]\n" % counter)
         root = relpath(self.top().root, tree.root)
         if root == os.curdir:
             root = '.'
         root = util.normpath(root)
         fd.write("root = %s\n" % root)
         if tree.revs:
             fd.write("revision = %s\n" % tree.revs[0])
         else:
             fd.write("revision = None\n")
         if not oldstyle:
             for name, path in tree.paths.items():
                 fd.write("path.%s = %s\n" % (name, path))
         else:
             fd.write("\n[tree%s.paths]\n" % counter)
             for name, path in tree.paths.items():
                 fd.write("%s = %s\n" % (name, path))
         fd.write("\n")
         counter += 1
Beispiel #39
0
def createlog(ui, directory=None, root="", rlog=True, cache=None):
    '''Collect the CVS rlog'''

    # Because we store many duplicate commit log messages, reusing strings
    # saves a lot of memory and pickle storage space.
    _scache = {}

    def scache(s):
        "return a shared version of a string"
        return _scache.setdefault(s, s)

    ui.status(_('collecting CVS rlog\n'))

    log = []  # list of logentry objects containing the CVS state

    # patterns to match in CVS (r)log output, by state of use
    re_00 = re.compile('RCS file: (.+)$')
    re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
    re_02 = re.compile('cvs (r?log|server): (.+)\n$')
    re_03 = re.compile("(Cannot access.+CVSROOT)|"
                       "(can't create temporary directory.+)$")
    re_10 = re.compile('Working file: (.+)$')
    re_20 = re.compile('symbolic names:')
    re_30 = re.compile('\t(.+): ([\\d.]+)$')
    re_31 = re.compile('----------------------------$')
    re_32 = re.compile('======================================='
                       '======================================$')
    re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
    re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
                       r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
                       r'(\s+commitid:\s+([^;]+);)?'
                       r'(.*mergepoint:\s+([^;]+);)?')
    re_70 = re.compile('branches: (.+);$')

    file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')

    prefix = ''  # leading path to strip of what we get from CVS

    if directory is None:
        # Current working directory

        # Get the real directory in the repository
        try:
            prefix = open(os.path.join('CVS', 'Repository')).read().strip()
            directory = prefix
            if prefix == ".":
                prefix = ""
        except IOError:
            raise logerror(_('not a CVS sandbox'))

        if prefix and not prefix.endswith(os.sep):
            prefix += os.sep

        # Use the Root file in the sandbox, if it exists
        try:
            root = open(os.path.join('CVS', 'Root')).read().strip()
        except IOError:
            pass

    if not root:
        root = os.environ.get('CVSROOT', '')

    # read log cache if one exists
    oldlog = []
    date = None

    if cache:
        cachedir = os.path.expanduser('~/.hg.cvsps')
        if not os.path.exists(cachedir):
            os.mkdir(cachedir)

        # The cvsps cache pickle needs a uniquified name, based on the
        # repository location. The address may have all sort of nasties
        # in it, slashes, colons and such. So here we take just the
        # alphanumeric characters, concatenated in a way that does not
        # mix up the various components, so that
        #    :pserver:user@server:/path
        # and
        #    /pserver/user/server/path
        # are mapped to different cache file names.
        cachefile = root.split(":") + [directory, "cache"]
        cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
        cachefile = os.path.join(cachedir,
                                 '.'.join([s for s in cachefile if s]))

    if cache == 'update':
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile))
            for e in oldlog:
                if not (util.safehasattr(e, 'branchpoints')
                        and util.safehasattr(e, 'commitid')
                        and util.safehasattr(e, 'mergepoint')):
                    ui.status(_('ignoring old cache\n'))
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception as e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date  # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}  # dictionary of revisions on current file with their tags
    branchmap = {}  # mapping between branch names and revision numbers
    rcsmap = {}
    state = 0
    store = False  # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
                tags = {}
                if rlog:
                    filename = util.normpath(rcs[:-2])
                    if filename.startswith(prefix):
                        filename = filename[len(prefix):]
                    if filename.startswith('/'):
                        filename = filename[1:]
                    if filename.startswith('Attic/'):
                        filename = filename[6:]
                    else:
                        filename = filename.replace('/Attic/', '/')
                    state = 2
                    continue
                state = 1
                continue
            match = re_01.match(line)
            if match:
                raise logerror(match.group(1))
            match = re_02.match(line)
            if match:
                raise logerror(match.group(2))
            if re_03.match(line):
                raise logerror(line)

        elif state == 1:
            # expect 'Working file' (only when using log instead of rlog)
            match = re_10.match(line)
            assert match, _('RCS file must be followed by working file')
            filename = util.normpath(match.group(1))
            state = 2

        elif state == 2:
            # expect 'symbolic names'
            if re_20.match(line):
                branchmap = {}
                state = 3

        elif state == 3:
            # read the symbolic names and store as tags
            match = re_30.match(line)
            if match:
                rev = [int(x) for x in match.group(2).split('.')]

                # Convert magic branch number to an odd-numbered one
                revn = len(rev)
                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
                    rev = rev[:-2] + rev[-1:]
                rev = tuple(rev)

                if rev not in tags:
                    tags[rev] = []
                tags[rev].append(match.group(1))
                branchmap[match.group(1)] = match.group(2)

            elif re_31.match(line):
                state = 5
            elif re_32.match(line):
                state = 0

        elif state == 4:
            # expecting '------' separator before first revision
            if re_31.match(line):
                state = 5
            else:
                assert not re_32.match(line), _('must have at least '
                                                'some revisions')

        elif state == 5:
            # expecting revision number and possibly (ignored) lock indication
            # we create the logentry here from values stored in states 0 to 4,
            # as this state is re-entered for subsequent revisions of a file.
            match = re_50.match(line)
            assert match, _('expected revision number')
            e = logentry(rcs=scache(rcs),
                         file=scache(filename),
                         revision=tuple(
                             [int(x) for x in match.group(1).split('.')]),
                         branches=[],
                         parent=None,
                         commitid=None,
                         mergepoint=None,
                         branchpoints=set())

            state = 6

        elif state == 6:
            # expecting date, author, state, lines changed
            match = re_60.match(line)
            assert match, _('revision must be followed by date line')
            d = match.group(1)
            if d[2] == '/':
                # Y2K
                d = '19' + d

            if len(d.split()) != 3:
                # cvs log dates always in GMT
                d = d + ' UTC'
            e.date = util.parsedate(d, [
                '%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'
            ])
            e.author = scache(match.group(2))
            e.dead = match.group(3).lower() == 'dead'

            if match.group(5):
                if match.group(6):
                    e.lines = (int(match.group(5)), int(match.group(6)))
                else:
                    e.lines = (int(match.group(5)), 0)
            elif match.group(6):
                e.lines = (0, int(match.group(6)))
            else:
                e.lines = None

            if match.group(7):  # cvs 1.12 commitid
                e.commitid = match.group(8)

            if match.group(9):  # cvsnt mergepoint
                myrev = match.group(10).split('.')
                if len(myrev) == 2:  # head
                    e.mergepoint = 'HEAD'
                else:
                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                    branches = [b for b in branchmap if branchmap[b] == myrev]
                    assert len(branches) == 1, ('unknown branch: %s' %
                                                e.mergepoint)
                    e.mergepoint = branches[0]

            e.comment = []
            state = 7

        elif state == 7:
            # read the revision numbers of branches that start at this revision
            # or store the commit log message otherwise
            m = re_70.match(line)
            if m:
                e.branches = [
                    tuple([int(y) for y in x.strip().split('.')])
                    for x in m.group(1).split(';')
                ]
                state = 8
            elif re_31.match(line) and re_50.match(peek):
                state = 5
                store = True
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        elif state == 8:
            # store commit log message
            if re_31.match(line):
                cpeek = peek
                if cpeek.endswith('\n'):
                    cpeek = cpeek[:-1]
                if re_50.match(cpeek):
                    state = 5
                    store = True
                else:
                    e.comment.append(line)
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        # When a file is added on a branch B1, CVS creates a synthetic
        # dead trunk revision 1.1 so that the branch has a root.
        # Likewise, if you merge such a file to a later branch B2 (one
        # that already existed when the file was added on B1), CVS
        # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
        # these revisions now, but mark them synthetic so
        # createchangeset() can take care of them.
        if (store and e.dead and e.revision[-1] == 1 and  # 1.1 or 1.1.x.1
                len(e.comment) == 1 and file_added_re.match(e.comment[0])):
            ui.debug('found synthetic revision in %s: %r\n' %
                     (e.rcs, e.comment[0]))
            e.synthetic = True

        if store:
            # clean up the results and save in the log.
            store = False
            e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
            e.comment = scache('\n'.join(e.comment))

            revn = len(e.revision)
            if revn > 3 and (revn % 2) == 0:
                e.branch = tags.get(e.revision[:-1], [None])[0]
            else:
                e.branch = None

            # find the branches starting from this revision
            branchpoints = set()
            for branch, revision in branchmap.iteritems():
                revparts = tuple([int(i) for i in revision.split('.')])
                if len(revparts) < 2:  # bad tags
                    continue
                if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                    # normal branch
                    if revparts[:-2] == e.revision:
                        branchpoints.add(branch)
                elif revparts == (1, 1, 1):  # vendor branch
                    if revparts in e.branches:
                        branchpoints.add(branch)
            e.branchpoints = branchpoints

            log.append(e)

            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs

            if len(log) % 100 == 0:
                ui.status(
                    util.ellipsis('%d %s' % (len(log), e.file), 80) + '\n')

    log.sort(key=lambda x: (x.rcs, x.revision))

    # find parent revisions of individual files
    versions = {}
    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
        rcs = e.rcs.replace('/Attic/', '/')
        if rcs in rcsmap:
            e.rcs = rcsmap[rcs]
        branch = e.revision[:-1]
        versions[(e.rcs, branch)] = e.revision

    for e in log:
        branch = e.revision[:-1]
        p = versions.get((e.rcs, branch), None)
        if p is None:
            p = e.revision[:-2]
        e.parent = p
        versions[(e.rcs, branch)] = e.revision

    # update the log cache
    if cache:
        if log:
            # join up the old and new logs
            log.sort(key=lambda x: x.date)

            if oldlog and oldlog[-1].date >= log[0].date:
                raise logerror(
                    _('log cache overlaps with new log entries,'
                      ' re-run without cache.'))

            log = oldlog + log

            # write the new cachefile
            ui.note(_('writing cvs log cache %s\n') % cachefile)
            pickle.dump(log, open(cachefile, 'w'))
        else:
            log = oldlog

    ui.status(_('%d log entries\n') % len(log))

    hook.hook(ui, None, "cvslog", True, log=log)

    return log
Beispiel #40
0
def createlog(ui, directory=None, root="", rlog=True, cache=None):
    '''Collect the CVS rlog'''

    # Because we store many duplicate commit log messages, reusing strings
    # saves a lot of memory and pickle storage space.
    _scache = {}
    def scache(s):
        "return a shared version of a string"
        return _scache.setdefault(s, s)

    ui.status(_('collecting CVS rlog\n'))

    log = []      # list of logentry objects containing the CVS state

    # patterns to match in CVS (r)log output, by state of use
    re_00 = re.compile('RCS file: (.+)$')
    re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
    re_02 = re.compile('cvs (r?log|server): (.+)\n$')
    re_03 = re.compile("(Cannot access.+CVSROOT)|"
                       "(can't create temporary directory.+)$")
    re_10 = re.compile('Working file: (.+)$')
    re_20 = re.compile('symbolic names:')
    re_30 = re.compile('\t(.+): ([\\d.]+)$')
    re_31 = re.compile('----------------------------$')
    re_32 = re.compile('======================================='
                       '======================================$')
    re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
    re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
                       r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
                       r'(\s+commitid:\s+([^;]+);)?'
                       r'(.*mergepoint:\s+([^;]+);)?')
    re_70 = re.compile('branches: (.+);$')

    file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')

    prefix = ''   # leading path to strip of what we get from CVS

    if directory is None:
        # Current working directory

        # Get the real directory in the repository
        try:
            prefix = open(os.path.join('CVS','Repository')).read().strip()
            directory = prefix
            if prefix == ".":
                prefix = ""
        except IOError:
            raise logerror(_('not a CVS sandbox'))

        if prefix and not prefix.endswith(os.sep):
            prefix += os.sep

        # Use the Root file in the sandbox, if it exists
        try:
            root = open(os.path.join('CVS','Root')).read().strip()
        except IOError:
            pass

    if not root:
        root = os.environ.get('CVSROOT', '')

    # read log cache if one exists
    oldlog = []
    date = None

    if cache:
        cachedir = os.path.expanduser('~/.hg.cvsps')
        if not os.path.exists(cachedir):
            os.mkdir(cachedir)

        # The cvsps cache pickle needs a uniquified name, based on the
        # repository location. The address may have all sort of nasties
        # in it, slashes, colons and such. So here we take just the
        # alphanumeric characters, concatenated in a way that does not
        # mix up the various components, so that
        #    :pserver:user@server:/path
        # and
        #    /pserver/user/server/path
        # are mapped to different cache file names.
        cachefile = root.split(":") + [directory, "cache"]
        cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
        cachefile = os.path.join(cachedir,
                                 '.'.join([s for s in cachefile if s]))

    if cache == 'update':
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile))
            for e in oldlog:
                if not (util.safehasattr(e, 'branchpoints') and
                        util.safehasattr(e, 'commitid') and
                        util.safehasattr(e, 'mergepoint')):
                    ui.status(_('ignoring old cache\n'))
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception as e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
    branchmap = {} # mapping between branch names and revision numbers
    rcsmap = {}
    state = 0
    store = False # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
                tags = {}
                if rlog:
                    filename = util.normpath(rcs[:-2])
                    if filename.startswith(prefix):
                        filename = filename[len(prefix):]
                    if filename.startswith('/'):
                        filename = filename[1:]
                    if filename.startswith('Attic/'):
                        filename = filename[6:]
                    else:
                        filename = filename.replace('/Attic/', '/')
                    state = 2
                    continue
                state = 1
                continue
            match = re_01.match(line)
            if match:
                raise logerror(match.group(1))
            match = re_02.match(line)
            if match:
                raise logerror(match.group(2))
            if re_03.match(line):
                raise logerror(line)

        elif state == 1:
            # expect 'Working file' (only when using log instead of rlog)
            match = re_10.match(line)
            assert match, _('RCS file must be followed by working file')
            filename = util.normpath(match.group(1))
            state = 2

        elif state == 2:
            # expect 'symbolic names'
            if re_20.match(line):
                branchmap = {}
                state = 3

        elif state == 3:
            # read the symbolic names and store as tags
            match = re_30.match(line)
            if match:
                rev = [int(x) for x in match.group(2).split('.')]

                # Convert magic branch number to an odd-numbered one
                revn = len(rev)
                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
                    rev = rev[:-2] + rev[-1:]
                rev = tuple(rev)

                if rev not in tags:
                    tags[rev] = []
                tags[rev].append(match.group(1))
                branchmap[match.group(1)] = match.group(2)

            elif re_31.match(line):
                state = 5
            elif re_32.match(line):
                state = 0

        elif state == 4:
            # expecting '------' separator before first revision
            if re_31.match(line):
                state = 5
            else:
                assert not re_32.match(line), _('must have at least '
                                                'some revisions')

        elif state == 5:
            # expecting revision number and possibly (ignored) lock indication
            # we create the logentry here from values stored in states 0 to 4,
            # as this state is re-entered for subsequent revisions of a file.
            match = re_50.match(line)
            assert match, _('expected revision number')
            e = logentry(rcs=scache(rcs),
                         file=scache(filename),
                         revision=tuple([int(x) for x in
                                         match.group(1).split('.')]),
                         branches=[],
                         parent=None,
                         commitid=None,
                         mergepoint=None,
                         branchpoints=set())

            state = 6

        elif state == 6:
            # expecting date, author, state, lines changed
            match = re_60.match(line)
            assert match, _('revision must be followed by date line')
            d = match.group(1)
            if d[2] == '/':
                # Y2K
                d = '19' + d

            if len(d.split()) != 3:
                # cvs log dates always in GMT
                d = d + ' UTC'
            e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
                                        '%Y/%m/%d %H:%M:%S',
                                        '%Y-%m-%d %H:%M:%S'])
            e.author = scache(match.group(2))
            e.dead = match.group(3).lower() == 'dead'

            if match.group(5):
                if match.group(6):
                    e.lines = (int(match.group(5)), int(match.group(6)))
                else:
                    e.lines = (int(match.group(5)), 0)
            elif match.group(6):
                e.lines = (0, int(match.group(6)))
            else:
                e.lines = None

            if match.group(7): # cvs 1.12 commitid
                e.commitid = match.group(8)

            if match.group(9): # cvsnt mergepoint
                myrev = match.group(10).split('.')
                if len(myrev) == 2: # head
                    e.mergepoint = 'HEAD'
                else:
                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                    branches = [b for b in branchmap if branchmap[b] == myrev]
                    assert len(branches) == 1, ('unknown branch: %s'
                                                % e.mergepoint)
                    e.mergepoint = branches[0]

            e.comment = []
            state = 7

        elif state == 7:
            # read the revision numbers of branches that start at this revision
            # or store the commit log message otherwise
            m = re_70.match(line)
            if m:
                e.branches = [tuple([int(y) for y in x.strip().split('.')])
                                for x in m.group(1).split(';')]
                state = 8
            elif re_31.match(line) and re_50.match(peek):
                state = 5
                store = True
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        elif state == 8:
            # store commit log message
            if re_31.match(line):
                cpeek = peek
                if cpeek.endswith('\n'):
                    cpeek = cpeek[:-1]
                if re_50.match(cpeek):
                    state = 5
                    store = True
                else:
                    e.comment.append(line)
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        # When a file is added on a branch B1, CVS creates a synthetic
        # dead trunk revision 1.1 so that the branch has a root.
        # Likewise, if you merge such a file to a later branch B2 (one
        # that already existed when the file was added on B1), CVS
        # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
        # these revisions now, but mark them synthetic so
        # createchangeset() can take care of them.
        if (store and
              e.dead and
              e.revision[-1] == 1 and      # 1.1 or 1.1.x.1
              len(e.comment) == 1 and
              file_added_re.match(e.comment[0])):
            ui.debug('found synthetic revision in %s: %r\n'
                     % (e.rcs, e.comment[0]))
            e.synthetic = True

        if store:
            # clean up the results and save in the log.
            store = False
            e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
            e.comment = scache('\n'.join(e.comment))

            revn = len(e.revision)
            if revn > 3 and (revn % 2) == 0:
                e.branch = tags.get(e.revision[:-1], [None])[0]
            else:
                e.branch = None

            # find the branches starting from this revision
            branchpoints = set()
            for branch, revision in branchmap.iteritems():
                revparts = tuple([int(i) for i in revision.split('.')])
                if len(revparts) < 2: # bad tags
                    continue
                if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                    # normal branch
                    if revparts[:-2] == e.revision:
                        branchpoints.add(branch)
                elif revparts == (1, 1, 1): # vendor branch
                    if revparts in e.branches:
                        branchpoints.add(branch)
            e.branchpoints = branchpoints

            log.append(e)

            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs

            if len(log) % 100 == 0:
                ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')

    log.sort(key=lambda x: (x.rcs, x.revision))

    # find parent revisions of individual files
    versions = {}
    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
        rcs = e.rcs.replace('/Attic/', '/')
        if rcs in rcsmap:
            e.rcs = rcsmap[rcs]
        branch = e.revision[:-1]
        versions[(e.rcs, branch)] = e.revision

    for e in log:
        branch = e.revision[:-1]
        p = versions.get((e.rcs, branch), None)
        if p is None:
            p = e.revision[:-2]
        e.parent = p
        versions[(e.rcs, branch)] = e.revision

    # update the log cache
    if cache:
        if log:
            # join up the old and new logs
            log.sort(key=lambda x: x.date)

            if oldlog and oldlog[-1].date >= log[0].date:
                raise logerror(_('log cache overlaps with new log entries,'
                                 ' re-run without cache.'))

            log = oldlog + log

            # write the new cachefile
            ui.note(_('writing cvs log cache %s\n') % cachefile)
            pickle.dump(log, open(cachefile, 'w'))
        else:
            log = oldlog

    ui.status(_('%d log entries\n') % len(log))

    hook.hook(ui, None, "cvslog", True, log=log)

    return log
Beispiel #41
0
    def appendSubrepos(self, repo=None):
        invalidRepoList = []

        # Mercurial repos are the only ones that can have subrepos
        if self.repotype() == 'hg':
            try:
                sri = None
                if repo is None:
                    if not os.path.exists(self._root):
                        self._valid = False
                        return [self._root]
                    elif not os.path.exists(os.path.join(self._root, '.hgsub')):
                        return []  # skip repo creation, which is expensive
                    repo = hg.repository(ui.ui(), self._root)
                wctx = repo['.']
                sortkey = lambda x: os.path.basename(util.normpath(repo.wjoin(x)))
                for subpath in sorted(wctx.substate, key=sortkey):
                    sri = None
                    abssubpath = repo.wjoin(subpath)
                    subtype = wctx.substate[subpath][2]
                    sriIsValid = os.path.isdir(abssubpath)
                    sri = SubrepoItem(abssubpath, subtype=subtype)
                    sri._valid = sriIsValid
                    self.appendChild(sri)

                    if not sriIsValid:
                        self._valid = False
                        sri._valid = False
                        invalidRepoList.append(repo.wjoin(subpath))
                        return invalidRepoList
                        continue

                    if subtype == 'hg':
                        # Only recurse into mercurial subrepos
                        sctx = wctx.sub(subpath)
                        invalidSubrepoList = sri.appendSubrepos(sctx._repo)
                        if invalidSubrepoList:
                            self._valid = False
                            invalidRepoList += invalidSubrepoList

            except (EnvironmentError, error.RepoError, util.Abort), e:
                # Add the repo to the list of repos/subrepos
                # that could not be open
                self._valid = False
                if sri:
                    sri._valid = False
                    invalidRepoList.append(abssubpath)
                invalidRepoList.append(self._root)
            except Exception, e:
                # If any other sort of exception happens, show the corresponding
                # error message, but do not crash!
                # Note that we _also_ will mark the offending repos as invalid
                # It is unfortunate that Python 2.4, which we target does not
                # support combined try/except/finally clauses, forcing us
                # to duplicate some code here
                self._valid = False
                if sri:
                    sri._valid = False
                    invalidRepoList.append(abssubpath)
                invalidRepoList.append(self._root)

                # Show a warning message indicating that there was an error
                if repo:
                    rootpath = repo.root
                else:
                    rootpath = self._root
                warningMessage = (_('An exception happened while loading the ' \
                    'subrepos of:<br><br>"%s"<br><br>') + \
                    _('The exception error message was:<br><br>%s<br><br>') +\
                    _('Click OK to continue or Abort to exit.')) \
                    % (rootpath, e.message)
                res = qtlib.WarningMsgBox(_('Error loading subrepos'),
                                    warningMessage,
                                    buttons = QMessageBox.Ok | QMessageBox.Abort)
                # Let the user abort so that he gets the full exception info
                if res == QMessageBox.Abort:
                    raise
Beispiel #42
0
 def _removeDirs(self, names):
     from os.path import isdir, join, normpath
     """Remove the names that reference a directory."""
     return [n for n in names
             if not isdir(join(self.repository.basedir, normpath(n)))]
     return notdirs
Beispiel #43
0
 def keyfunc(x):
     try:
         return hgsuborder.index(util.normpath(x.rootpath()))
     except:
         # If an item is not found, place it at the top
         return 0
Beispiel #44
0
from tortoisehg.hgtk import dialog, gdialog

def run(ui, *pats, **opts):
    fname, target = '', ''
    cwd = os.getcwd()
    root = paths.find_root(cwd)
    try:
        fname = util.canonpath(root, cwd, pats[0])
        target = util.canonpath(root, cwd, pats[1])
    except util.Abort, e:
        return gdialog.Prompt(_('Invalid path'), str(e), None)
    except IndexError:
        pass
    os.chdir(root)
    fname = util.normpath(fname)
    if target:
        target = hglib.toutf(util.normpath(target))
    else:
        target = hglib.toutf(fname)
    title = _('Rename ') + hglib.toutf(fname)
    dlg = dialog.entry_dialog(None, title, True, target, rename_resp)
    dlg.orig = fname
    return dlg

def rename_resp(dlg, response):
    if response != gtk.RESPONSE_OK:
        dlg.destroy()
        return
    try:
        root = paths.find_root()
Beispiel #45
0
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile))
            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception, e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
Beispiel #46
0
 def sortbypath(self):
     childs = self.selitem.internalPointer().childs
     self.tview.model().sortchilds(
         childs, lambda x: os.path.normcase(util.normpath(x.rootpath())))
Beispiel #47
0
def getremoteprojrc(ui, repo, other):
    """
    Get the contents of a remote projrc and check that they are valid
    
    This function returns a 2-element tuple:
    - The projrc contents as a string (or None if no projrc was found)
    - A boolean indicating whether the data is valid.
    
    Note that it is possible to return (None, True), which simply means
    that no data matching the projrc filter settings was found.
    """
    if not repo.local():
        return None, True

    # Get the list of repos that we are supposed to get a projrc file from
    # (i.e. the projrc "servers")
    projrcserverset = getprojrcserverset(ui)

    try:
        remotepath = other.root
        remotepath = os.path.normcase(util.normpath(remotepath))
    except:
        # Non local repos have no root property
        remotepath = other.url()
        if remotepath.startswith('file:'):
            remotepath = remotepath[5:]

    if '*' not in projrcserverset and \
            not findpatternmatch(remotepath, projrcserverset)[0] and \
            not ("localhost" in projrcserverset and islocalpath(remotepath)):
        # The pull source is not on the projrc server list
        # Note that we keep any existing local projrc file, which may have been
        # transferred from another valid server
        return None, True

    # Get the list of remote keys that we must load from the remote projrc file
    includedkeys, excludedkeys = getallowedkeys(ui)
    if includedkeys or excludedkeys:
        projrc = other.listkeys('projrc')
    else:
        # There are no remote keys to load
        projrc = {} # This ensures that any existing projrc file will be deleted

    data = None
    valid = True
    if 'data' in projrc:
        data = projrc['data'].decode('string-escape')
        if data.startswith("#\\\\ "):
            data = data.decode('string-escape')
        # verify that we can parse the file we got, and filter it according
        # to the local projrc extension settings
        try:
            c = config.config()
            c.parse('projrc', data)
            # Filter the received config, only allowing the sections that
            # the user has specified in any of its hgrc files
            data = ENCODING_CHECK + \
                serializeconfig(c, includedkeys, excludedkeys)
        except error.ParseError, e:
                ui.warn(_("not saving retrieved projrc file: "
                          "parse error at '%s' on %s\n") % e.args)
                valid = False
 def keyfunc(x):
     try:
         return hgsuborder.index(util.normpath(x.rootpath()))
     except:
         # If an item is not found, place it at the top
         return 0
Beispiel #49
0
 def normpath(path):
     if path:
         return util.normpath(path)
     else:
         return '.'
 def sortbypath(self):
     childs = self.selitem.internalPointer().childs
     self.tview.model().sortchilds(childs, lambda x: util.normpath(x.rootpath()))
Beispiel #51
0
 def keyfunc(x):
     l = hglib.fromunicode(x.rootpath())
     return os.path.normcase(util.normpath(l))
Beispiel #52
0
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception, e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
Beispiel #53
0
 def keyfunc(x):
     l = hglib.fromunicode(x.rootpath())
     return os.path.normcase(util.normpath(l))