Example #1
0
    def __init__(self, alias, url):
        if hg.islocal(url.encode("utf-8")):
            url = p(url).abspath()
            # Force git to use an absolute path in the future
            remote_name = os.path.basename(sys.argv[0]).replace("git-remote-", "")
            cmd = ["git", "config", "remote.%s.url" % alias, "%s::%s" % (remote_name, url)]
            subprocess.call(cmd)

        # use hash of URL as unique identifier in various places.
        # this has the advantage over 'alias' that it stays constant
        # when the user does a "git remote rename old new".
        if hg_version() >= "4.0.1":
            d = digester(["md5", "sha1"])
            d.update(url.encode("utf-8"))
            self.uuid = d["sha1"]
        else:
            self.uuid = sha1(url.encode("utf-8")).hexdigest()

        gitdir = p(os.environ["GIT_DIR"].decode("utf-8"))
        self.remotedir = gitdir.joinpath("hg", self.uuid)
        self.marks_git_path = self.remotedir.joinpath("marks-git")
        self.marks_hg_path = self.remotedir.joinpath("marks-hg")
        self.marks = HGMarks(self.marks_hg_path)
        self.git_marks = GitMarks(self.marks_git_path)
        self.parsed_refs = {}
        self.blob_marks = {}
        self.branches = {}
        self.bookmarks = {}

        self.prefix = "refs/hg/%s" % alias
        self.alias = alias
        self.url = url
        self.build_repo(url)
Example #2
0
def urltopath(url):
    if url and hg.islocal(url):
        if url.startswith("file://"):
            url = url[7:]
        elif url.startswith("file:"):
            url = url[5:]
    return url
Example #3
0
    def __init__(self, alias, url):
        if hg.islocal(url.encode('utf-8')):
            url = p(url).abspath()
            # Force git to use an absolute path in the future
            cmd = ['git', 'config', 'remote.%s.url' % alias, "gitifyhg::%s" % url]
            subprocess.call(cmd)

        # use hash of URL as unique identifier in various places.
        # this has the advantage over 'alias' that it stays constant
        # when the user does a "git remote rename old new".
        self.uuid = sha1(url.encode('utf-8')).hexdigest()

        gitdir = p(os.environ['GIT_DIR'].decode('utf-8'))
        self.remotedir = gitdir.joinpath('hg', self.uuid)
        self.marks_git_path = self.remotedir.joinpath('marks-git')
        self.marks = HGMarks(self.remotedir.joinpath('marks-hg'))
        self.parsed_refs = {}
        self.blob_marks = {}
        self.branches = {}
        self.bookmarks = {}

        self.prefix = 'refs/hg/%s' % alias
        self.alias = alias
        self.url = url
        self.build_repo(url)
Example #4
0
    def __init__(self, alias, url):
        if hg.islocal(url.encode('utf-8')):
            url = p(url).abspath()
            # Force git to use an absolute path in the future
            remote_name = os.path.basename(sys.argv[0]).replace(
                "git-remote-", "")
            cmd = [
                'git', 'config',
                'remote.%s.url' % alias,
                "%s::%s" % (remote_name, url)
            ]
            subprocess.call(cmd)

        # use hash of URL as unique identifier in various places.
        # this has the advantage over 'alias' that it stays constant
        # when the user does a "git remote rename old new".
        self.uuid = sha1(url.encode('utf-8')).hexdigest()

        gitdir = p(os.environ['GIT_DIR'].decode('utf-8'))
        self.remotedir = gitdir.joinpath('hg', self.uuid)
        self.marks_git_path = self.remotedir.joinpath('marks-git')
        self.marks_hg_path = self.remotedir.joinpath('marks-hg')
        self.marks = HGMarks(self.marks_hg_path)
        self.git_marks = GitMarks(self.marks_git_path)
        self.parsed_refs = {}
        self.blob_marks = {}
        self.branches = {}
        self.bookmarks = {}

        self.prefix = 'refs/hg/%s' % alias
        self.alias = alias
        self.url = url
        self.build_repo(url)
Example #5
0
def get_repo(url, alias):
    global peer

    myui = ui.ui()
    myui.setconfig('ui', 'interactive', 'off')
    myui.fout = sys.stderr

    if get_config_bool('remote-hg.insecure'):
        myui.setconfig('web', 'cacerts', '')

    extensions.loadall(myui)

    if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
        repo = hg.repository(myui, url)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
    else:
        shared_path = os.path.join(gitdir, 'hg')

        # check and upgrade old organization
        hg_path = os.path.join(shared_path, '.hg')
        if os.path.exists(shared_path) and not os.path.exists(hg_path):
            repos = os.listdir(shared_path)
            for x in repos:
                local_hg = os.path.join(shared_path, x, 'clone', '.hg')
                if not os.path.exists(local_hg):
                    continue
                if not os.path.exists(hg_path):
                    shutil.move(local_hg, hg_path)
                shutil.rmtree(os.path.join(shared_path, x, 'clone'))

        # setup shared repo (if not there)
        try:
            hg.peer(myui, {}, shared_path, create=True)
        except error.RepoError:
            pass

        if not os.path.exists(dirname):
            os.makedirs(dirname)

        local_path = os.path.join(dirname, 'clone')
        if not os.path.exists(local_path):
            hg.share(myui, shared_path, local_path, update=False)
        else:
            # make sure the shared path is always up-to-date
            util.writefile(os.path.join(local_path, '.hg', 'sharedpath'),
                           hg_path)

        repo = hg.repository(myui, local_path)
        try:
            peer = hg.peer(myui, {}, url)
        except:
            die('Repository error')
        repo.pull(peer, heads=None, force=True)

        updatebookmarks(repo, peer)

    return repo
Example #6
0
def get_repo(url, alias):
    global peer

    myui = ui.ui()
    myui.setconfig('ui', 'interactive', 'off')
    myui.fout = sys.stderr

    if get_config_bool('remote-hg.insecure'):
        myui.setconfig('web', 'cacerts', '')

    extensions.loadall(myui)

    if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
        repo = hg.repository(myui, url)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
    else:
        shared_path = os.path.join(gitdir, 'hg')

        # check and upgrade old organization
        hg_path = os.path.join(shared_path, '.hg')
        if os.path.exists(shared_path) and not os.path.exists(hg_path):
            repos = os.listdir(shared_path)
            for x in repos:
                local_hg = os.path.join(shared_path, x, 'clone', '.hg')
                if not os.path.exists(local_hg):
                    continue
                if not os.path.exists(hg_path):
                    shutil.move(local_hg, hg_path)
                shutil.rmtree(os.path.join(shared_path, x, 'clone'))

        # setup shared repo (if not there)
        try:
            hg.peer(myui, {}, shared_path, create=True)
        except error.RepoError:
            pass

        if not os.path.exists(dirname):
            os.makedirs(dirname)

        local_path = os.path.join(dirname, 'clone')
        if not os.path.exists(local_path):
            hg.share(myui, shared_path, local_path, update=False)
        else:
            # make sure the shared path is always up-to-date
            util.writefile(os.path.join(local_path, '.hg', 'sharedpath'), hg_path)

        repo = hg.repository(myui, local_path)
        try:
            peer = hg.peer(myui, {}, url)
        except:
            die('Repository error')
        repo.pull(peer, heads=None, force=True)

        updatebookmarks(repo, peer)

    return repo
Example #7
0
def overrideclone(orig, ui, source, dest=None, **opts):
    d = dest
    if d is None:
        d = hg.defaultdest(source)
    if opts.get('all_largefiles') and not hg.islocal(d):
            raise util.Abort(_(
            '--all-largefiles is incompatible with non-local destination %s' %
            d))

    return orig(ui, source, dest, **opts)
Example #8
0
def overrideclone(orig, ui, source, dest=None, **opts):
    d = dest
    if d is None:
        d = hg.defaultdest(source)
    if opts.get('all_largefiles') and not hg.islocal(d):
        raise util.Abort(
            _('--all-largefiles is incompatible with non-local destination %s'
              % d))

    return orig(ui, source, dest, **opts)
def clone(ui, source, dest=None, **opts):
    """make a clone of an existing forest of repositories

    Create a clone of an existing forest in a new directory.

    Look at the help text for the clone command for more information.
    """
    die_on_numeric_revs(opts['rev'])
    source = ui.expandpath(source) or source
    islocalsrc = hg.islocal(source)
    if islocalsrc:
        source = os.path.abspath(urltopath(source))
    if dest:
        if hg.islocal(dest):
            dest = os.path.normpath(dest)
        else:
            pass
    else:
        dest = hg.defaultdest(source)
    toprepo = hg.repository(ui, source)
    forests = toprepo.forests(walkhgenabled(ui, opts['walkhg']))
    for rpath in forests:
        if rpath == '.':
            rpath = ''
        if islocalsrc:
            srcpath = source
            srcpath = os.path.join(source, util.localpath(rpath))
        else:
            srcpath = '/'.join((source, rpath))
        if rpath:
            destpath = os.path.join(dest, util.localpath(rpath))
        else:
            destpath = dest
        try:
            qclone(ui=ui,
                   source=srcpath, sroot=source,
                   dest=destpath, rpath=os.path.normpath(rpath),
                   opts=opts)
        except util.Abort, err:
            ui.warn(_("skipped: %s\n") % err)
        ui.status("\n")
Example #10
0
 def getpath(self, paths):
     assert(type(paths) != str)
     if paths is None:
         return None
     for path in paths:
         if not hg.islocal(path):
             return path
         result = urltopath(path)
         if os.path.isdir(result):
             return result
         result = urltopath(self.paths.get(path, None))
         if result is not None:
             return result
     return None
Example #11
0
def cache_cmd(ui, source=None, **opts):
    if source is None and not opts.get('update'):
        raise hg.util.Abort(_("either SOURCE or --update is required"))
    print source
    if opts.get('update'):
        for repo_d in os.listdir(CACHE):
            if source is None or repo_d == url_to_filename(source):
                ui.status('updating cache {}\n'.format(repo_d))
                cache_peer = hg.peer(ui, {}, os.path.join(CACHE, repo_d))
                commands.pull(cache_peer.ui, cache_peer.local(), noupdate=True)
    else:
        if hg.islocal(source):
            raise hg.util.Abort(_("not caching local repo {}".format(source)))
        cache_d = os.path.join(CACHE, url_to_filename(source))
        ui.status(_('caching {} to {}\n'.format(source, cache_d)))
        commands.clone(ui, source, cache_d, noupdate=True)
Example #12
0
 def mq_applied(self):
     rpath = urltopath(self.root)
     if not hg.islocal(rpath):
         raise util.Abort(_("'%s' is not a local repository") % rpath)
     rpath = util.localpath(rpath)
     rpath = os.path.join(rpath, ".hg")
     if not os.path.isdir(rpath):
         return False
     for entry in os.listdir(rpath):
         path = os.path.join(rpath, entry)
         if (os.path.isdir(path) and
             os.path.isfile(os.path.join(path, 'series'))):
             try:
                 s = os.stat(os.path.join(path, "status"))
                 if s.st_size > 0:
                     return path
             except OSError, err:
                 if err.errno != errno.ENOENT:
                     raise
Example #13
0
def qclone(ui, source, sroot, dest, rpath, opts):
    """Helper function to clone from a remote repository.

    source is the URL of the source of this repository
    dest is the directory of the destination
    rpath is the relative path of the destination
    opts are a list of options to be passed into the clone
    """
    ui.status("[%s]\n" % rpath)
    assert(dest is not None)
    destpfx = os.path.normpath(os.path.dirname(dest))
    if not os.path.exists(destpfx):
        os.makedirs(destpfx)
    repo = hg.repository(ui, source)
    mqdir = None
    assert(source is not None)
    if hg.islocal(source):
        Forest.Tree(repo=repo).die_on_mq(sroot)
    url = urltopath(repo.url())
    ui.note(_("cloning %s to %s\n") % (url, dest))
    commands.clone(ui, url, dest, **opts)
    repo = None
def lfconvert(ui, src, dest, *pats, **opts):
    '''convert a normal repository to a largefiles repository

    Convert repository SOURCE to a new repository DEST, identical to
    SOURCE except that certain files will be converted as largefiles:
    specifically, any file that matches any PATTERN *or* whose size is
    above the minimum size threshold is converted as a largefile. The
    size used to determine whether or not to track a file as a
    largefile is the size of the first version of the file. The
    minimum size can be specified either with --size or in
    configuration as ``largefiles.size``.

    After running this command you will need to make sure that
    largefiles is enabled anywhere you intend to push the new
    repository.

    Use --to-normal to convert largefiles back to normal files; after
    this, the DEST repository can be used without largefiles at all.'''

    opts = pycompat.byteskwargs(opts)
    if opts['to_normal']:
        tolfile = False
    else:
        tolfile = True
        size = lfutil.getminsize(ui, True, opts.get('size'), default=None)

    if not hg.islocal(src):
        raise error.Abort(_('%s is not a local Mercurial repo') % src)
    if not hg.islocal(dest):
        raise error.Abort(_('%s is not a local Mercurial repo') % dest)

    rsrc = hg.repository(ui, src)
    ui.status(_('initializing destination %s\n') % dest)
    rdst = hg.repository(ui, dest, create=True)

    success = False
    dstwlock = dstlock = None
    try:
        # Get a list of all changesets in the source.  The easy way to do this
        # is to simply walk the changelog, using changelog.nodesbetween().
        # Take a look at mercurial/revlog.py:639 for more details.
        # Use a generator instead of a list to decrease memory usage
        ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
            rsrc.heads())[0])
        revmap = {node.nullid: node.nullid}
        if tolfile:
            # Lock destination to prevent modification while it is converted to.
            # Don't need to lock src because we are just reading from its
            # history which can't change.
            dstwlock = rdst.wlock()
            dstlock = rdst.lock()

            lfiles = set()
            normalfiles = set()
            if not pats:
                pats = ui.configlist(lfutil.longname, 'patterns')
            if pats:
                matcher = matchmod.match(rsrc.root, '', list(pats))
            else:
                matcher = None

            lfiletohash = {}
            for ctx in ctxs:
                ui.progress(_('converting revisions'), ctx.rev(),
                    unit=_('revisions'), total=rsrc['tip'].rev())
                _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
                    lfiles, normalfiles, matcher, size, lfiletohash)
            ui.progress(_('converting revisions'), None)

            if rdst.wvfs.exists(lfutil.shortname):
                rdst.wvfs.rmtree(lfutil.shortname)

            for f in lfiletohash.keys():
                if rdst.wvfs.isfile(f):
                    rdst.wvfs.unlink(f)
                try:
                    rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
                except OSError:
                    pass

            # If there were any files converted to largefiles, add largefiles
            # to the destination repository's requirements.
            if lfiles:
                rdst.requirements.add('largefiles')
                rdst._writerequirements()
        else:
            class lfsource(filemap.filemap_source):
                def __init__(self, ui, source):
                    super(lfsource, self).__init__(ui, source, None)
                    self.filemapper.rename[lfutil.shortname] = '.'

                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise error.Abort(_("missing largefile for '%s' in %s")
                                          % (realname, realrev))
                    return util.readfile(path), f[1]

            class converter(convcmd.converter):
                def __init__(self, ui, source, dest, revmapfile, opts):
                    src = lfsource(ui, source)

                    super(converter, self).__init__(ui, src, dest, revmapfile,
                                                    opts)

            found, missing = downloadlfiles(ui, rsrc)
            if missing != 0:
                raise error.Abort(_("all largefiles must be present locally"))

            orig = convcmd.converter
            convcmd.converter = converter

            try:
                convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg')
            finally:
                convcmd.converter = orig
        success = True
    finally:
        if tolfile:
            rdst.dirstate.clear()
            release(dstlock, dstwlock)
        if not success:
            # we failed, remove the new directory
            shutil.rmtree(rdst.root)
Example #15
0
def clone_cache_cmd(ui, source, dest=None, **opts):
    source_url = url(source)
    if source_url.fragment is not None:
        raise ValueError('Someone is being clever! We are not clever. Bail.')

    orig_source = source
    cache_source = os.path.join(CACHE, url_to_filename(source))
    was_cached = False
    clone_source = source
    if not opts.get('nocache'):
        was_cached = os.path.exists(cache_source)
        if was_cached:
            ui.status('cloning from cache {}\n'.format(cache_source))
            clone_source = cache_source
            if dest is None:
                dest = hg.defaultdest(source)
            if opts.get('rev'):
                ui.status('updating cache {} to rev {}\n'.format(cache_source, opts.get('rev')))
                cache_peer = hg.peer(ui, {}, cache_source)
                commands.pull(cache_peer.ui, cache_peer.local(), noupdate=True, rev=opts.get('rev'))
        else:
            ui.status('no cache found at {}, cloning from source {}\n'.format(
                cache_source, source))
    
    if opts.get('noupdate') and opts.get('updaterev'):
        raise util.Abort(_("cannot specify both --noupdate and --updaterev"))

    r = hg.clone(ui, opts, clone_source, dest,
                 pull=opts.get('pull'),
                 stream=opts.get('uncompressed'),
                 rev=opts.get('rev'),
                 update=opts.get('updaterev') or not opts.get('noupdate'),
                 branch=opts.get('branch'))

    if r is None:
        return True

    source_peer, dest_peer = r

    if was_cached:
        dest_repo = dest_peer.local()
        if dest_repo:
            orig_source = dest_repo.ui.expandpath(orig_source)
            abspath = orig_source
            if hg.islocal(orig_source):
                abspath = os.path.abspath(hg.util.urllocalpath(orig_source))

            u = url(abspath)
            u.passwd = None
            defaulturl = str(u)
            fp = dest_repo.opener("hgrc", "w", text=True)
            fp.write("[paths]\n")
            fp.write("default = %s\n" % defaulturl)
            fp.write('\n')
            fp.write('[clonecache]\n')
            fp.write('cache = %s\n' % cache_source)
            fp.close()

            dest_repo.ui.setconfig('paths', 'default', defaulturl, 'clone')

            commands.pull(dest_repo.ui, dest_repo)

            commands.update(ui, dest_repo)

    return False
Example #16
0
def lfconvert(ui, src, dest, *pats, **opts):
    '''convert a normal repository to a largefiles repository

    Convert repository SOURCE to a new repository DEST, identical to
    SOURCE except that certain files will be converted as largefiles:
    specifically, any file that matches any PATTERN *or* whose size is
    above the minimum size threshold is converted as a largefile. The
    size used to determine whether or not to track a file as a
    largefile is the size of the first version of the file. The
    minimum size can be specified either with --size or in
    configuration as ``largefiles.size``.

    After running this command you will need to make sure that
    largefiles is enabled anywhere you intend to push the new
    repository.

    Use --to-normal to convert largefiles back to normal files; after
    this, the DEST repository can be used without largefiles at all.'''

    if opts['to_normal']:
        tolfile = False
    else:
        tolfile = True
        size = lfutil.getminsize(ui, True, opts.get('size'), default=None)

    if not hg.islocal(src):
        raise error.Abort(_('%s is not a local Mercurial repo') % src)
    if not hg.islocal(dest):
        raise error.Abort(_('%s is not a local Mercurial repo') % dest)

    rsrc = hg.repository(ui, src)
    ui.status(_('initializing destination %s\n') % dest)
    rdst = hg.repository(ui, dest, create=True)

    success = False
    dstwlock = dstlock = None
    try:
        # Get a list of all changesets in the source.  The easy way to do this
        # is to simply walk the changelog, using changelog.nodesbetween().
        # Take a look at mercurial/revlog.py:639 for more details.
        # Use a generator instead of a list to decrease memory usage
        ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
            rsrc.heads())[0])
        revmap = {node.nullid: node.nullid}
        if tolfile:
            # Lock destination to prevent modification while it is converted to.
            # Don't need to lock src because we are just reading from its
            # history which can't change.
            dstwlock = rdst.wlock()
            dstlock = rdst.lock()

            lfiles = set()
            normalfiles = set()
            if not pats:
                pats = ui.configlist(lfutil.longname, 'patterns', default=[])
            if pats:
                matcher = match_.match(rsrc.root, '', list(pats))
            else:
                matcher = None

            lfiletohash = {}
            for ctx in ctxs:
                ui.progress(_('converting revisions'), ctx.rev(),
                    unit=_('revisions'), total=rsrc['tip'].rev())
                _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
                    lfiles, normalfiles, matcher, size, lfiletohash)
            ui.progress(_('converting revisions'), None)

            if rdst.wvfs.exists(lfutil.shortname):
                rdst.wvfs.rmtree(lfutil.shortname)

            for f in lfiletohash.keys():
                if rdst.wvfs.isfile(f):
                    rdst.wvfs.unlink(f)
                try:
                    rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
                except OSError:
                    pass

            # If there were any files converted to largefiles, add largefiles
            # to the destination repository's requirements.
            if lfiles:
                rdst.requirements.add('largefiles')
                rdst._writerequirements()
        else:
            class lfsource(filemap.filemap_source):
                def __init__(self, ui, source):
                    super(lfsource, self).__init__(ui, source, None)
                    self.filemapper.rename[lfutil.shortname] = '.'

                def getfile(self, name, rev):
                    realname, realrev = rev
                    f = super(lfsource, self).getfile(name, rev)

                    if (not realname.startswith(lfutil.shortnameslash)
                            or f[0] is None):
                        return f

                    # Substitute in the largefile data for the hash
                    hash = f[0].strip()
                    path = lfutil.findfile(rsrc, hash)

                    if path is None:
                        raise error.Abort(_("missing largefile for '%s' in %s")
                                          % (realname, realrev))
                    return util.readfile(path), f[1]

            class converter(convcmd.converter):
                def __init__(self, ui, source, dest, revmapfile, opts):
                    src = lfsource(ui, source)

                    super(converter, self).__init__(ui, src, dest, revmapfile,
                                                    opts)

            found, missing = downloadlfiles(ui, rsrc)
            if missing != 0:
                raise error.Abort(_("all largefiles must be present locally"))

            orig = convcmd.converter
            convcmd.converter = converter

            try:
                convcmd.convert(ui, src, dest)
            finally:
                convcmd.converter = orig
        success = True
    finally:
        if tolfile:
            rdst.dirstate.clear()
            release(dstlock, dstwlock)
        if not success:
            # we failed, remove the new directory
            shutil.rmtree(rdst.root)
Example #17
0
def lfconvert(ui, src, dest, *pats, **opts):
    '''convert a normal repository to a largefiles repository

    Convert repository SOURCE to a new repository DEST, identical to
    SOURCE except that certain files will be converted as largefiles:
    specifically, any file that matches any PATTERN *or* whose size is
    above the minimum size threshold is converted as a largefile. The
    size used to determine whether or not to track a file as a
    largefile is the size of the first version of the file. The
    minimum size can be specified either with --size or in
    configuration as ``largefiles.size``.

    After running this command you will need to make sure that
    largefiles is enabled anywhere you intend to push the new
    repository.

    Use --to-normal to convert largefiles back to normal files; after
    this, the DEST repository can be used without largefiles at all.'''

    if opts['to_normal']:
        tolfile = False
    else:
        tolfile = True
        size = lfutil.getminsize(ui, True, opts.get('size'), default=None)

    if not hg.islocal(src):
        raise util.Abort(_('%s is not a local Mercurial repo') % src)
    if not hg.islocal(dest):
        raise util.Abort(_('%s is not a local Mercurial repo') % dest)

    rsrc = hg.repository(ui, src)
    ui.status(_('initializing destination %s\n') % dest)
    rdst = hg.repository(ui, dest, create=True)

    success = False
    dstwlock = dstlock = None
    try:
        # Lock destination to prevent modification while it is converted to.
        # Don't need to lock src because we are just reading from its history
        # which can't change.
        dstwlock = rdst.wlock()
        dstlock = rdst.lock()

        # Get a list of all changesets in the source.  The easy way to do this
        # is to simply walk the changelog, using changelog.nodesbetween().
        # Take a look at mercurial/revlog.py:639 for more details.
        # Use a generator instead of a list to decrease memory usage
        ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
            rsrc.heads())[0])
        revmap = {node.nullid: node.nullid}
        if tolfile:
            lfiles = set()
            normalfiles = set()
            if not pats:
                pats = ui.configlist(lfutil.longname, 'patterns', default=[])
            if pats:
                matcher = match_.match(rsrc.root, '', list(pats))
            else:
                matcher = None

            lfiletohash = {}
            for ctx in ctxs:
                ui.progress(_('converting revisions'), ctx.rev(),
                    unit=_('revision'), total=rsrc['tip'].rev())
                _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
                    lfiles, normalfiles, matcher, size, lfiletohash)
            ui.progress(_('converting revisions'), None)

            if os.path.exists(rdst.wjoin(lfutil.shortname)):
                shutil.rmtree(rdst.wjoin(lfutil.shortname))

            for f in lfiletohash.keys():
                if os.path.isfile(rdst.wjoin(f)):
                    os.unlink(rdst.wjoin(f))
                try:
                    os.removedirs(os.path.dirname(rdst.wjoin(f)))
                except OSError:
                    pass

            # If there were any files converted to largefiles, add largefiles
            # to the destination repository's requirements.
            if lfiles:
                rdst.requirements.add('largefiles')
                rdst._writerequirements()
        else:
            for ctx in ctxs:
                ui.progress(_('converting revisions'), ctx.rev(),
                    unit=_('revision'), total=rsrc['tip'].rev())
                _addchangeset(ui, rsrc, rdst, ctx, revmap)

            ui.progress(_('converting revisions'), None)
        success = True
    finally:
        rdst.dirstate.clear()
        release(dstlock, dstwlock)
        if not success:
            # we failed, remove the new directory
            shutil.rmtree(rdst.root)
def checkconflict(ui, repo, source=None, **opts):
    """Print there will be a conflict after merge or not."""

    check_uncommited_changes(repo)

    cur_dir = repo.root
    local_clone_dir = cur_dir + '-local'
    remote_clone_dir = cur_dir + '-remote'

    # if the source is not specified,
    # we take the default one from the repo configuration.
    # otherwise we take source

    if source is None:
        clone_source = check_config(repo, 'default')
    else:
        is_URL = bool(urlparse.urlparse(source).netloc)
        if os.path.isdir(source) or is_URL:
            clone_source = source
        else:
            clone_source = check_config(repo, str(source))

    # path to the cache list
    cache_dir = os.path.expanduser('~\\.hg.cache')
    cache_list = os.path.join(cache_dir, 'cache_list.json')

    # if the source is local, then just clone

    if hg.islocal(clone_source):
        clone(repo, clone_source, remote_clone_dir)

    # otherwise, open the cache list and see
    # if it contains path information to the cache
    # of the specified resource for the current working repo

    else:
        cache_source = None

        # clear the cache list if this option is set, or
        # if the cache list does not exist, create it

        if not os.path.exists(cache_list) or opts.get('clear_cache_list'):
            if not os.path.exists(cache_dir):
                make_dir(cache_dir)
            create_cache_list(cache_list)
        else:
            data = read_cache_list(cache_list)
            cache_source = find_cache_src(data, cur_dir, clone_source)

        # if the cache resource is found but this path does not exist or the path exists,
        # but it is not a repo, or set_cache_repo option,
        # we delete information about this cache repo from the cache list
        was_cached = cache_source is not None
        if was_cached:
            cache_source = cache_source.encode('cp1251')
            if not is_repo(repo, cache_source) or opts.get('set_cache_repo'):
                cache_data = read_cache_list(cache_list)
                new_cache_data = find_cache_src(cache_data, cur_dir, clone_source, for_remove=True)

                write_cache_list(cache_list, new_cache_data)

                repo.ui.write('\nThe last path to the cache repository is broken.\n')
                cache_source = None

        # if the cache resource is not found
        # suggest to choose the path to the cash repo
        # if the paths exists and empty -> clone,
        # if the path exists and repo -> checkupdate
        # else: select empty folder

        if cache_source is None:
            cache_source = str(raw_input('Specify the path for the cache-repository,\n'
                                         'or if no path it will be use /user/.hg.cache path.\n')).replace('\r', '')
            if not cache_source:
                cache_source = default_cache_src(cur_dir, cache_dir)
            if os.path.exists(cache_source):
                if not os.listdir(cache_source):
                    clone(repo, clone_source, cache_source)  # clone from the resource to the cache
                elif is_repo(repo, cache_source):
                    clone_root = commands.identify(repo.ui, repo, rev=0)
                    repo = hg.repository(repo.ui, cache_source)
                    cache_root = commands.identify(repo.ui, repo, rev=0)
                    if clone_root == cache_root:
                        check_update(repo, clone_source)
                        repo = hg.repository(repo.ui, cur_dir)
                    else:
                        repo = hg.repository(repo.ui, cur_dir)
                        repo.ui.write('\nCache-repo and remote-repo do not match.\n')
                        sys.exit()
                else:
                    repo.ui.write('\nYou must select an empty folder or an existing repo folder.\n')
                    sys.exit()
            else:
                make_dir(cache_source)
                clone(repo, clone_source, cache_source)

            note = gen_note(cur_dir, clone_source, cache_source)
            write_cache_list(cache_list, note, add=True)

        # if the cache resource is found,
        # check if new changes can be pulled.
        # if yes, pull and update

        else:
            repo = hg.repository(repo.ui, cache_source)
            check_update(repo, clone_source)
            repo = hg.repository(repo.ui, cur_dir)

        # finally clone from cache to remote
        clone(repo, cache_source, remote_clone_dir)

    # create a local repo clone
    clone(repo, cur_dir, local_clone_dir)

    repo = hg.repository(repo.ui, remote_clone_dir)  # go to remote repo clone
    commands.pull(repo.ui, repo, local_clone_dir)  # pull changes from a local repo clone to it
    commands.update(repo.ui, repo)  # update

    repo.ui.pushbuffer()
    conflict = do_merge(repo)
    deleted_str = repo.ui.popbuffer()
    deleted_list = re.findall('\'(.*)\'', deleted_str)

    # if there is a conflict,
    # we look at the list of files with conflicts
    # and display them, because merge3 will mark conflicting lines with special tags

    if conflict:
        repo.ui.pushbuffer()
        commands.resolve(repo.ui, repo, list=True)
        u_files_str = repo.ui.popbuffer()
        u_files_list = re.findall('U (.*)\n', u_files_str)

        if opts.get('check_file'):
            file = opts.get('check_file')
            if file in deleted_list:
                repo.ui.write(
                    '\nfile ' + file + ' was deleted in other [merge rev] but was modified in local [working '
                                       'copy].\n')
            elif file in u_files_list:
                show_file_merge(repo, remote_clone_dir, file)
            else:
                repo.ui.write('\nFile ' + str(file) + ' does not cause conflict.\n'
                                                      'The conflict occurs in the following files:\n')
                show_all_conflicts(repo, u_files_list, deleted_list)

        else:
            show_all_conflicts(repo, u_files_list, deleted_list)

        repo.ui.write('\nYes, here is a conflict\n')

    # if there is no conflict, say it
    else:
        repo.ui.write('\nNo, everything cool\n')

    # go back to our work repo
    repo = hg.repository(repo.ui, cur_dir)

    # delete clones
    remove_clones(local_clone_dir, remote_clone_dir)

    sys.exit()
Example #19
0
def lfconvert(ui, src, dest, *pats, **opts):
    '''convert a normal repository to a largefiles repository

    Convert repository SOURCE to a new repository DEST, identical to
    SOURCE except that certain files will be converted as largefiles:
    specifically, any file that matches any PATTERN *or* whose size is
    above the minimum size threshold is converted as a largefile. The
    size used to determine whether or not to track a file as a
    largefile is the size of the first version of the file. The
    minimum size can be specified either with --size or in
    configuration as ``largefiles.size``.

    After running this command you will need to make sure that
    largefiles is enabled anywhere you intend to push the new
    repository.

    Use --to-normal to convert largefiles back to normal files; after
    this, the DEST repository can be used without largefiles at all.'''

    if opts['to_normal']:
        tolfile = False
    else:
        tolfile = True
        size = lfutil.getminsize(ui, True, opts.get('size'), default=None)

    if not hg.islocal(src):
        raise util.Abort(_('%s is not a local Mercurial repo') % src)
    if not hg.islocal(dest):
        raise util.Abort(_('%s is not a local Mercurial repo') % dest)

    rsrc = hg.repository(ui, src)
    ui.status(_('initializing destination %s\n') % dest)
    rdst = hg.repository(ui, dest, create=True)

    success = False
    dstwlock = dstlock = None
    try:
        # Lock destination to prevent modification while it is converted to.
        # Don't need to lock src because we are just reading from its history
        # which can't change.
        dstwlock = rdst.wlock()
        dstlock = rdst.lock()

        # Get a list of all changesets in the source.  The easy way to do this
        # is to simply walk the changelog, using changelog.nodesbetween().
        # Take a look at mercurial/revlog.py:639 for more details.
        # Use a generator instead of a list to decrease memory usage
        ctxs = (rsrc[ctx]
                for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0])
        revmap = {node.nullid: node.nullid}
        if tolfile:
            lfiles = set()
            normalfiles = set()
            if not pats:
                pats = ui.configlist(lfutil.longname, 'patterns', default=[])
            if pats:
                matcher = match_.match(rsrc.root, '', list(pats))
            else:
                matcher = None

            lfiletohash = {}
            for ctx in ctxs:
                ui.progress(_('converting revisions'),
                            ctx.rev(),
                            unit=_('revision'),
                            total=rsrc['tip'].rev())
                _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles,
                                        normalfiles, matcher, size,
                                        lfiletohash)
            ui.progress(_('converting revisions'), None)

            if os.path.exists(rdst.wjoin(lfutil.shortname)):
                shutil.rmtree(rdst.wjoin(lfutil.shortname))

            for f in lfiletohash.keys():
                if os.path.isfile(rdst.wjoin(f)):
                    os.unlink(rdst.wjoin(f))
                try:
                    os.removedirs(os.path.dirname(rdst.wjoin(f)))
                except OSError:
                    pass

            # If there were any files converted to largefiles, add largefiles
            # to the destination repository's requirements.
            if lfiles:
                rdst.requirements.add('largefiles')
                rdst._writerequirements()
        else:
            for ctx in ctxs:
                ui.progress(_('converting revisions'),
                            ctx.rev(),
                            unit=_('revision'),
                            total=rsrc['tip'].rev())
                _addchangeset(ui, rsrc, rdst, ctx, revmap)

            ui.progress(_('converting revisions'), None)
        success = True
    finally:
        rdst.dirstate.clear()
        release(dstlock, dstwlock)
        if not success:
            # we failed, remove the new directory
            shutil.rmtree(rdst.root)