Ejemplo n.º 1
0
 def _writeundo(self):
     """write transaction data for possible future undo call"""
     if self.undoname is None:
         return
     undobackupfile = self.opener.open("%s.backupfiles" % self.undoname,
                                       'w')
     undobackupfile.write('%d\n' % version)
     for l, f, b, c in self._backupentries:
         if not f:  # temporary file
             continue
         if not b:
             u = ''
         else:
             if l not in self._vfsmap and c:
                 self.report("couldn't remote %s: unknown cache location"
                             "%s\n" % (b, l))
                 continue
             vfs = self._vfsmap[l]
             base, name = vfs.split(b)
             assert name.startswith(self.journal), name
             uname = name.replace(self.journal, self.undoname, 1)
             u = vfs.reljoin(base, uname)
             util.copyfile(vfs.join(b), vfs.join(u), hardlink=True)
         undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c))
     undobackupfile.close()
Ejemplo n.º 2
0
def _premerge(repo, toolconf, files):
    tool, toolpath, binary, symlink = toolconf
    a, b, c, back = files

    ui = repo.ui

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        valid = 'keep'.split()
        if premerge not in valid:
            _valid = ', '.join(["'" + v + "'" for v in valid])
            raise error.ConfigError(
                _("%s.premerge not valid "
                  "('%s' is neither boolean nor %s)") %
                (tool, premerge, _valid))

    if premerge:
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(" premerge successful\n")
            return 0
        if premerge != 'keep':
            util.copyfile(back, a)  # restore from backup and try again
    return 1  # continue merging
Ejemplo n.º 3
0
 def copybranchcache(fname):
     srcbranchcache = srcrepo.join("cache/%s" % fname)
     dstbranchcache = os.path.join(dstcachedir, fname)
     if os.path.exists(srcbranchcache):
         if not os.path.exists(dstcachedir):
             os.mkdir(dstcachedir)
         util.copyfile(srcbranchcache, dstbranchcache)
Ejemplo n.º 4
0
def _premerge(repo, toolconf, files):
    tool, toolpath, binary, symlink = toolconf
    if symlink:
        return 1
    a, b, c, back = files

    ui = repo.ui

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not binary)
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        valid = 'keep'.split()
        if premerge not in valid:
            _valid = ', '.join(["'" + v + "'" for v in valid])
            raise error.ConfigError(_("%s.premerge not valid "
                                      "('%s' is neither boolean nor %s)") %
                                    (tool, premerge, _valid))

    if premerge:
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(" premerge successful\n")
            return 0
        if premerge != 'keep':
            util.copyfile(back, a) # restore from backup and try again
    return 1 # continue merging
Ejemplo n.º 5
0
 def backup_app_lib(self):
     dr = self.driver
     dr.find_element_by_name("工具箱").click()
     time.sleep(1)
     WebDriverWait(
         dr, 5).until(lambda d: d.find_element_by_name("备份程序数据")).click()
     time.sleep(1)
     WebDriverWait(dr,
                   5).until(lambda d: d.find_element_by_name("备份")).click()
     time.sleep(5)
     #检测备份完成
     WebDriverWait(dr, 30).until(lambda d: d.find_element_by_name("备份"))
     #删除lib文件
     removefile("/sdcard/008backUp/*/*/lib")
     time.sleep(2)
     #移动备份文件
     copyfile("/sdcard/008backUp/*", "/sdcard/008backUp2/")
     time.sleep(5)
     removefile("/sdcard/008backUp/*")
     time.sleep(5)
     dr.press_keycode(4)
     time.sleep(1)
     dr.press_keycode(4)
     time.sleep(1)
     return self.do_toolbox_task
Ejemplo n.º 6
0
def _premerge(repo, toolconf, files, labels=None):
    tool, toolpath, binary, symlink = toolconf
    if symlink:
        return 1
    a, b, c, back = files

    ui = repo.ui

    validkeep = ['keep', 'keep-merge3']

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not binary)
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        if premerge not in validkeep:
            _valid = ', '.join(["'" + v + "'" for v in validkeep])
            raise error.ConfigError(
                _("%s.premerge not valid "
                  "('%s' is neither boolean nor %s)") %
                (tool, premerge, _valid))

    if premerge:
        if premerge == 'keep-merge3':
            if not labels:
                labels = _defaultconflictlabels
            if len(labels) < 3:
                labels.append('base')
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels)
        if not r:
            ui.debug(" premerge successful\n")
            return 0
        if premerge not in validkeep:
            util.copyfile(back, a)  # restore from backup and try again
    return 1  # continue merging
Ejemplo n.º 7
0
def _premerge(repo, toolconf, files, labels=None):
    tool, toolpath, binary, symlink = toolconf
    if symlink:
        return 1
    a, b, c, back = files

    ui = repo.ui

    validkeep = ['keep', 'keep-merge3']

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not binary)
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        if premerge not in validkeep:
            _valid = ', '.join(["'" + v + "'" for v in validkeep])
            raise error.ConfigError(_("%s.premerge not valid "
                                      "('%s' is neither boolean nor %s)") %
                                    (tool, premerge, _valid))

    if premerge:
        if premerge == 'keep-merge3':
            if not labels:
                labels = _defaultconflictlabels
            if len(labels) < 3:
                labels.append('base')
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels)
        if not r:
            ui.debug(" premerge successful\n")
            return 0
        if premerge not in validkeep:
            util.copyfile(back, a) # restore from backup and try again
    return 1 # continue merging
Ejemplo n.º 8
0
    def addbackup(self, file, hardlink=True, location=''):
        """Adds a backup of the file to the transaction

        Calling addbackup() creates a hardlink backup of the specified file
        that is used to recover the file in the event of the transaction
        aborting.

        * `file`: the file path, relative to .hg/store
        * `hardlink`: use a hardlink to quickly create the backup
        """
        if self._queue:
            msg = 'cannot use transaction.addbackup inside "group"'
            raise RuntimeError(msg)

        if file in self.map or file in self._backupmap:
            return
        vfs = self._vfsmap[location]
        dirname, filename = vfs.split(file)
        backupfilename = "%s.backup.%s" % (self.journal, filename)
        backupfile = vfs.reljoin(dirname, backupfilename)
        if vfs.exists(file):
            filepath = vfs.join(file)
            backuppath = vfs.join(backupfile)
            util.copyfile(filepath, backuppath, hardlink=hardlink)
        else:
            backupfile = ''

        self._addbackupentry((location, file, backupfile, False))
Ejemplo n.º 9
0
 def copybranchcache(fname):
     srcbranchcache = srcrepo.join('cache/%s' % fname)
     dstbranchcache = os.path.join(dstcachedir, fname)
     if os.path.exists(srcbranchcache):
         if not os.path.exists(dstcachedir):
             os.mkdir(dstcachedir)
         util.copyfile(srcbranchcache, dstbranchcache)
Ejemplo n.º 10
0
 def recovery_app_lib(self):
     dr = self.driver
     try:
         #提取备份文件
         copyfile("/sdcard/008backUp2/*__%s" % self.imei,
                  "/sdcard/008backUp/")
         time.sleep(2)
         copyfile("/sdcard/lib", "/sdcard/008backUp/*/*/")
         time.sleep(2)
         removefile("/sdcard/008backUp2/*__%s" % self.imei)
         time.sleep(5)
         WebDriverWait(
             dr,
             5).until(lambda d: d.find_element_by_name("备份程序数据")).click()
         time.sleep(1)
         dr.find_element_by_name(self.imei).click()
         time.sleep(1)
         WebDriverWait(
             dr, 5).until(lambda d: d.find_element_by_name("还原")).click()
         time.sleep(5)
         #检测还原成功
         WebDriverWait(
             dr, 30).until(lambda d: d.find_element_by_name(self.imei))
     except:
         dr.press_keycode(4)
         time.sleep(1)
     dr.press_keycode(4)  # keypress back
     time.sleep(1)
     return self.do_toolbox_task
Ejemplo n.º 11
0
 def recovery_app(self):
     dr = self.driver
     dr.find_element_by_name("工具箱").click()
     time.sleep(1)
     try:
         #提取备份文件
         copyfile("/sdcard/008backUp2/*__%s" % self.imei,
                  "/sdcard/008backUp/")
         time.sleep(5)
         removefile("/sdcard/008backUp2/*__%s" % self.imei)
         time.sleep(5)
         WebDriverWait(
             dr,
             5).until(lambda d: d.find_element_by_name("备份程序数据")).click()
         time.sleep(1)
         WebDriverWait(
             dr,
             5).until(lambda d: d.find_element_by_name(self.imei)).click()
         time.sleep(1)
         WebDriverWait(
             dr, 5).until(lambda d: d.find_element_by_name("还原")).click()
         time.sleep(1)
         #检测还原成功
         WebDriverWait(
             dr, 30).until(lambda d: d.find_element_by_name(self.imei))
     except TimeoutException:
         pass
     dr.press_keycode(4)
     time.sleep(1)
     dr.press_keycode(4)
     time.sleep(1)
     return self.do_toolbox_task
Ejemplo n.º 12
0
def _playback(journal,
              report,
              opener,
              vfsmap,
              entries,
              backupentries,
              unlink=True):
    for f, o, _ignore in entries:
        if o or not unlink:
            try:
                fp = opener(f, 'a')
                fp.truncate(o)
                fp.close()
            except IOError:
                report(_("failed to truncate %s\n") % f)
                raise
        else:
            try:
                opener.unlink(f)
            except (IOError, OSError) as inst:
                if inst.errno != errno.ENOENT:
                    raise

    backupfiles = []
    for l, f, b, c in backupentries:
        if l not in vfsmap and c:
            report("couldn't handle %s: unknown cache location %s\n" % (b, l))
        vfs = vfsmap[l]
        try:
            if f and b:
                filepath = vfs.join(f)
                backuppath = vfs.join(b)
                try:
                    util.copyfile(backuppath, filepath)
                    backupfiles.append(b)
                except IOError:
                    report(_("failed to recover %s\n") % f)
            else:
                target = f or b
                try:
                    vfs.unlink(target)
                except (IOError, OSError) as inst:
                    if inst.errno != errno.ENOENT:
                        raise
        except (IOError, OSError, util.Abort) as inst:
            if not c:
                raise

    opener.unlink(journal)
    backuppath = "%s.backupfiles" % journal
    if opener.exists(backuppath):
        opener.unlink(backuppath)
    try:
        for f in backupfiles:
            if opener.exists(f):
                opener.unlink(f)
    except (IOError, OSError, util.Abort) as inst:
        # only pure backup file remains, it is sage to ignore any error
        pass
Ejemplo n.º 13
0
def run():
    print("Creating search")
    util.makedirs("../dest/search")
    util.copyfile("templates/search/search.js", "../dest/search/search.js")
    html = templates.get("search/index")
    html = templates.initial_replace(html, 3)
    html = templates.final_replace(html, "..")
    util.writefile("../dest/search/index.html", html)
Ejemplo n.º 14
0
def _playback(journal, report, opener, vfsmap, entries, backupentries,
              unlink=True):
    for f, o, _ignore in entries:
        if o or not unlink:
            try:
                fp = opener(f, 'a')
                fp.truncate(o)
                fp.close()
            except IOError:
                report(_("failed to truncate %s\n") % f)
                raise
        else:
            try:
                opener.unlink(f)
            except (IOError, OSError) as inst:
                if inst.errno != errno.ENOENT:
                    raise

    backupfiles = []
    for l, f, b, c in backupentries:
        if l not in vfsmap and c:
            report("couldn't handle %s: unknown cache location %s\n"
                        % (b, l))
        vfs = vfsmap[l]
        try:
            if f and b:
                filepath = vfs.join(f)
                backuppath = vfs.join(b)
                try:
                    util.copyfile(backuppath, filepath)
                    backupfiles.append(b)
                except IOError:
                    report(_("failed to recover %s\n") % f)
            else:
                target = f or b
                try:
                    vfs.unlink(target)
                except (IOError, OSError) as inst:
                    if inst.errno != errno.ENOENT:
                        raise
        except (IOError, OSError, util.Abort) as inst:
            if not c:
                raise

    opener.unlink(journal)
    backuppath = "%s.backupfiles" % journal
    if opener.exists(backuppath):
        opener.unlink(backuppath)
    try:
        for f in backupfiles:
            if opener.exists(f):
                opener.unlink(f)
    except (IOError, OSError, util.Abort) as inst:
        # only pure backup file remains, it is sage to ignore any error
        pass
Ejemplo n.º 15
0
def joinBshells(imgPath, ref_bvals_file=None, ref_bvals=None, sep_prefix=None):

    if ref_bvals_file:
        print('Reading reference b-shell file ...')
        ref_bvals = read_bvals(ref_bvals_file)

    print('Joining b-shells for', imgPath)

    imgPath = local.path(imgPath)
    img = load(imgPath._path)
    dim = img.header['dim'][1:5]

    inPrefix = abspath(imgPath).split('.nii')[0]
    directory = dirname(inPrefix)
    prefix = basename(inPrefix)

    bvalFile = inPrefix + '.bval'
    bvecFile = inPrefix + '.bvec'

    if sep_prefix:
        harmPrefix = pjoin(directory, sep_prefix + prefix)
    else:
        harmPrefix = inPrefix

    if not isfile(harmPrefix + '.bval'):
        copyfile(bvalFile, harmPrefix + '.bval')
    if not isfile(harmPrefix + '.bvec'):
        copyfile(bvecFile, harmPrefix + '.bvec')

    bvals = np.array(read_bvals(inPrefix + '.bval'))

    joinedDwi = np.zeros((dim[0], dim[1], dim[2], dim[3]), dtype='float32')

    for bval in ref_bvals:

        # ind= np.where(bval==bvals)[0]
        ind = np.where(abs(bval - bvals) <= BSHELL_MIN_DIST)[0]

        if bval == 0.:
            b0Img = load(inPrefix + '_b0.nii.gz')
            b0 = b0Img.get_data()
            for i in ind:
                joinedDwi[:, :, :, i] = b0

        else:
            b0_bshell = load(harmPrefix + f'_b{int(bval)}.nii.gz').get_data()

            joinedDwi[:, :, :, ind] = b0_bshell[:, :, :, 1:]

    if not isfile(harmPrefix + '.nii.gz'):
        save_nifti(harmPrefix + '.nii.gz', joinedDwi, b0Img.affine,
                   b0Img.header)
    else:
        print(harmPrefix + '.nii.gz', 'already exists, not overwritten.')
Ejemplo n.º 16
0
    def copyfile(abssrc, relsrc, otarget, exact):
        abstarget = scmutil.canonpath(repo.root, cwd, otarget)
        reltarget = repo.pathto(abstarget, cwd)
        target = repo.wjoin(abstarget)
        src = repo.wjoin(abssrc)
        state = repo.dirstate[abstarget]

        scmutil.checkportable(ui, abstarget)

        # check for collisions
        prevsrc = targets.get(abstarget)
        if prevsrc is not None:
            ui.warn(
                _('%s: not overwriting - %s collides with %s\n') %
                (reltarget, repo.pathto(abssrc, cwd), repo.pathto(
                    prevsrc, cwd)))
            return

        # check for overwrites
        exists = os.path.lexists(target)
        if not after and exists or after and state in 'mn':
            if not opts['force']:
                ui.warn(_('%s: not overwriting - file exists\n') % reltarget)
                return

        if after:
            if not exists:
                if rename:
                    ui.warn(
                        _('%s: not recording move - %s does not exist\n') %
                        (relsrc, reltarget))
                else:
                    ui.warn(
                        _('%s: not recording copy - %s does not exist\n') %
                        (relsrc, reltarget))
                return
        elif not dryrun:
            try:
                if exists:
                    os.unlink(target)
                targetdir = os.path.dirname(target) or '.'
                if not os.path.isdir(targetdir):
                    os.makedirs(targetdir)
                util.copyfile(src, target)
                srcexists = True
            except IOError, inst:
                if inst.errno == errno.ENOENT:
                    ui.warn(_('%s: deleted in working copy\n') % relsrc)
                    srcexists = False
                else:
                    ui.warn(
                        _('%s: cannot copy - %s\n') % (relsrc, inst.strerror))
                    return True  # report a failure
Ejemplo n.º 17
0
def copyfile(src, dst, basedir):
    abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
    if os.path.exists(absdst):
        raise util.Abort(_("cannot create %s: destination already exists") % dst)

    dstdir = os.path.dirname(absdst)
    if dstdir and not os.path.isdir(dstdir):
        try:
            os.makedirs(dstdir)
        except IOError:
            raise util.Abort(_("cannot create %s: unable to create destination directory") % dst)

    util.copyfile(abssrc, absdst)
Ejemplo n.º 18
0
    def copyfile(abssrc, relsrc, otarget, exact):
        abstarget = scmutil.canonpath(repo.root, cwd, otarget)
        reltarget = repo.pathto(abstarget, cwd)
        target = repo.wjoin(abstarget)
        src = repo.wjoin(abssrc)
        state = repo.dirstate[abstarget]

        scmutil.checkportable(ui, abstarget)

        # check for collisions
        prevsrc = targets.get(abstarget)
        if prevsrc is not None:
            ui.warn(_('%s: not overwriting - %s collides with %s\n') %
                    (reltarget, repo.pathto(abssrc, cwd),
                     repo.pathto(prevsrc, cwd)))
            return

        # check for overwrites
        exists = os.path.lexists(target)
        if not after and exists or after and state in 'mn':
            if not opts['force']:
                ui.warn(_('%s: not overwriting - file exists\n') %
                        reltarget)
                return

        if after:
            if not exists:
                if rename:
                    ui.warn(_('%s: not recording move - %s does not exist\n') %
                            (relsrc, reltarget))
                else:
                    ui.warn(_('%s: not recording copy - %s does not exist\n') %
                            (relsrc, reltarget))
                return
        elif not dryrun:
            try:
                if exists:
                    os.unlink(target)
                targetdir = os.path.dirname(target) or '.'
                if not os.path.isdir(targetdir):
                    os.makedirs(targetdir)
                util.copyfile(src, target)
                srcexists = True
            except IOError, inst:
                if inst.errno == errno.ENOENT:
                    ui.warn(_('%s: deleted in working copy\n') % relsrc)
                    srcexists = False
                else:
                    ui.warn(_('%s: cannot copy - %s\n') %
                            (relsrc, inst.strerror))
                    return True # report a failure
Ejemplo n.º 19
0
def copyfile(src, dst, basedir=None):
    if not basedir:
        basedir = os.getcwd()

    abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
    if os.path.exists(absdst):
        raise util.Abort(
            _("cannot create %s: destination already exists") % dst)

    targetdir = os.path.dirname(absdst)
    if not os.path.isdir(targetdir):
        os.makedirs(targetdir)

    util.copyfile(abssrc, absdst)
Ejemplo n.º 20
0
def copyfile(src, dst, basedir=None):
    if not basedir:
        basedir = os.getcwd()

    abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
    if os.path.exists(absdst):
        raise util.Abort(_("cannot create %s: destination already exists") %
                         dst)

    targetdir = os.path.dirname(absdst)
    if not os.path.isdir(targetdir):
        os.makedirs(targetdir)

    util.copyfile(abssrc, absdst)
Ejemplo n.º 21
0
def copyfile(src, dst, basedir):
    abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
    if os.path.exists(absdst):
        raise util.Abort(_("cannot create %s: destination already exists") %
                         dst)

    dstdir = os.path.dirname(absdst)
    if dstdir and not os.path.isdir(dstdir):
        try:
            os.makedirs(dstdir)
        except IOError:
            raise util.Abort(
                _("cannot create %s: unable to create destination directory")
                % dst)

    util.copyfile(abssrc, absdst)
Ejemplo n.º 22
0
def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
    """
    Creates three versions of the files to merge, containing the
    contents of local, other and base. These files can then be used to
    perform a merge manually. If the file to be merged is named
    ``a.txt``, these files will accordingly be named ``a.txt.local``,
    ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
    same directory as ``a.txt``."""
    r = _premerge(repo, toolconf, files, labels=labels)
    if r:
        a, b, c, back = files

        fd = fcd.path()

        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
    return False, r
Ejemplo n.º 23
0
def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
    """
    Creates three versions of the files to merge, containing the
    contents of local, other and base. These files can then be used to
    perform a merge manually. If the file to be merged is named
    ``a.txt``, these files will accordingly be named ``a.txt.local``,
    ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
    same directory as ``a.txt``."""
    r = _premerge(repo, toolconf, files, labels=labels)
    if r:
        a, b, c, back = files

        fd = fcd.path()

        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
    return False, r
Ejemplo n.º 24
0
def run():
    print("Creating search")
    util.makedirs("../search")
    util.copyfile("database/countries.csv", "../search/countries.csv")
    util.copyfile("database/estudiantes.csv", "../search/estudiantes.csv")
    util.copyfile("templates/search/search.js", "../search/search.js")
    util.copyfile("templates/search/asciify.js", "../search/asciify.js")
    html = templates.get("search/index")
    html = templates.initial_replace(html, 3)
    html = templates.final_replace(html, "..")
    util.writefile("../search/index.html", html)
Ejemplo n.º 25
0
 def _writeundo(self):
     """write transaction data for possible future undo call"""
     if self.undoname is None:
         return
     undobackupfile = self.opener.open("%s.backupfiles" % self.undoname, 'w')
     undobackupfile.write('%d\n' % version)
     for l, f, b, c in self._backupentries:
         if not f:  # temporary file
             continue
         if not b:
             u = ''
         else:
             if l not in self._vfsmap and c:
                 self.report("couldn't remote %s: unknown cache location"
                             "%s\n" % (b, l))
                 continue
             vfs = self._vfsmap[l]
             base, name = vfs.split(b)
             assert name.startswith(self.journal), name
             uname = name.replace(self.journal, self.undoname, 1)
             u = vfs.reljoin(base, uname)
             util.copyfile(vfs.join(b), vfs.join(u), hardlink=True)
         undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c))
     undobackupfile.close()
Ejemplo n.º 26
0
def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """
    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    if not fco.cmp(fcd):  # files identical?
        return None

    ui = repo.ui
    fd = fcd.path()
    binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
    symlink = 'l' in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
             (tool, fd, binary, symlink))

    if tool in internals:
        func = internals[tool]
        trymerge = func.trymerge
        onfailure = func.onfailure
    else:
        func = _xmerge
        trymerge = True
        onfailure = _("merging %s failed!\n")

    toolconf = tool, toolpath, binary, symlink

    if not trymerge:
        return func(repo, mynode, orig, fcd, fco, fca, toolconf)

    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))

    markerstyle = ui.config('ui', 'mergemarkers', 'basic')
    if not labels:
        labels = _defaultconflictlabels
    if markerstyle != 'basic':
        labels = _formatlabels(repo, fcd, fco, fca, labels)

    needcheck, r = func(repo,
                        mynode,
                        orig,
                        fcd,
                        fco,
                        fca,
                        toolconf, (a, b, c, back),
                        labels=labels)
    if not needcheck:
        if r:
            if onfailure:
                ui.warn(onfailure % fd)
        else:
            util.unlink(back)

        util.unlink(b)
        util.unlink(c)
        return r

    if not r and (_toolbool(ui, tool, "checkconflicts")
                  or 'conflicts' in _toollist(ui, tool, "check")):
        if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
                     re.MULTILINE):
            r = 1

    checked = False
    if 'prompt' in _toollist(ui, tool, "check"):
        checked = True
        if ui.promptchoice(
                _("was merge of '%s' successful (yn)?"
                  "$$ &Yes $$ &No") % fd, 1):
            r = 1

    if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
                                  'changed' in _toollist(ui, tool, "check")):
        if filecmp.cmp(a, back):
            if ui.promptchoice(
                    _(" output file %s appears unchanged\n"
                      "was merge successful (yn)?"
                      "$$ &Yes $$ &No") % fd, 1):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(a, back)

    if r:
        if onfailure:
            ui.warn(onfailure % fd)
    else:
        util.unlink(back)

    util.unlink(b)
    util.unlink(c)
    return r
Ejemplo n.º 27
0
            except IOError:
                report(_("failed to truncate %s\n") % f)
                raise
        else:
            try:
                opener.unlink(f)
            except (IOError, OSError), inst:
                if inst.errno != errno.ENOENT:
                    raise

    backupfiles = []
    for f, b, ignore in backupentries:
        filepath = opener.join(f)
        backuppath = opener.join(b)
        try:
            util.copyfile(backuppath, filepath)
            backupfiles.append(b)
        except IOError:
            report(_("failed to recover %s\n") % f)
            raise

    opener.unlink(journal)
    backuppath = "%s.backupfiles" % journal
    if opener.exists(backuppath):
        opener.unlink(backuppath)
    for f in backupfiles:
        opener.unlink(f)

class transaction(object):
    def __init__(self, report, opener, journal, after=None, createmode=None,
            onclose=None, onabort=None):
Ejemplo n.º 28
0
def filemerge(repo, mynode, orig, fcd, fco, fca):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """

    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    if not fco.cmp(fcd):  # files identical?
        return None

    ui = repo.ui
    fd = fcd.path()
    binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
    symlink = "l" in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" % (tool, fd, binary, symlink))

    if not tool or tool == "internal:prompt":
        tool = "internal:local"
        if ui.promptchoice(
            _(" no tool found to merge %s\n" "keep (l)ocal or take (o)ther?") % fd, (_("&Local"), _("&Other")), 0
        ):
            tool = "internal:other"
    if tool == "internal:local":
        return 0
    if tool == "internal:other":
        repo.wwrite(fd, fco.data(), fco.flags())
        return 0
    if tool == "internal:fail":
        return 1

    # do the actual merge
    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    out = ""
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        valid = "keep".split()
        if premerge not in valid:
            _valid = ", ".join(["'" + v + "'" for v in valid])
            raise error.ConfigError(
                _("%s.premerge not valid " "('%s' is neither boolean nor %s)") % (tool, premerge, _valid)
            )

    if premerge:
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(" premerge successful\n")
            os.unlink(back)
            os.unlink(b)
            os.unlink(c)
            return 0
        if premerge != "keep":
            util.copyfile(back, a)  # restore from backup and try again

    env = dict(
        HG_FILE=fd,
        HG_MY_NODE=short(mynode),
        HG_OTHER_NODE=str(fco.changectx()),
        HG_BASE_NODE=str(fca.changectx()),
        HG_MY_ISLINK="l" in fcd.flags(),
        HG_OTHER_ISLINK="l" in fco.flags(),
        HG_BASE_ISLINK="l" in fca.flags(),
    )

    if tool == "internal:merge":
        r = simplemerge.simplemerge(ui, a, b, c, label=["local", "other"])
    elif tool == "internal:dump":
        a = repo.wjoin(fd)
        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
        os.unlink(b)
        os.unlink(c)
        return 1  # unresolved
    else:
        args = _toolstr(ui, tool, "args", "$local $base $other")
        if "$output" in args:
            out, a = a, back  # read input from backup, write to original
        replace = dict(local=a, base=b, other=c, output=out)
        args = util.interpolate(r"\$", replace, args, lambda s: '"%s"' % util.localpath(s))
        r = util.system(toolpath + " " + args, cwd=repo.root, environ=env, out=ui.fout)

    if not r and (_toolbool(ui, tool, "checkconflicts") or "conflicts" in _toollist(ui, tool, "check")):
        if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), re.MULTILINE):
            r = 1

    checked = False
    if "prompt" in _toollist(ui, tool, "check"):
        checked = True
        if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd, (_("&Yes"), _("&No")), 1):
            r = 1

    if not r and not checked and (_toolbool(ui, tool, "checkchanged") or "changed" in _toollist(ui, tool, "check")):
        if filecmp.cmp(repo.wjoin(fd), back):
            if ui.promptchoice(
                _(" output file %s appears unchanged\n" "was merge successful (yn)?") % fd, (_("&Yes"), _("&No")), 1
            ):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(repo.wjoin(fd), back)

    if r:
        if tool == "internal:merge":
            ui.warn(_("merging %s incomplete! " "(edit conflicts, then use 'hg resolve --mark')\n") % fd)
        else:
            ui.warn(_("merging %s failed!\n") % fd)
    else:
        os.unlink(back)

    os.unlink(b)
    os.unlink(c)
    return r
Ejemplo n.º 29
0
def clone(ui,
          peeropts,
          source,
          dest=None,
          pull=False,
          rev=None,
          update=True,
          stream=False,
          branch=None):
    """Make a copy of an existing repository.

    Create a copy of an existing repository in a new directory.  The
    source and destination are URLs, as passed to the repository
    function.  Returns a pair of repository peers, the source and
    newly created destination.

    The location of the source is added to the new repository's
    .hg/hgrc file, as the default to be used for future pulls and
    pushes.

    If an exception is raised, the partly cloned/updated destination
    repository will be deleted.

    Arguments:

    source: repository object or URL

    dest: URL of destination repository to create (defaults to base
    name of source repository)

    pull: always pull from source repository, even in local case or if the
    server prefers streaming

    stream: stream raw data uncompressed from repository (fast over
    LAN, slow over WAN)

    rev: revision to clone up to (implies pull=True)

    update: update working directory after clone completes, if
    destination is local repository (True means update to default rev,
    anything else is treated as a revision)

    branch: branches to clone
    """

    if isinstance(source, str):
        origsource = ui.expandpath(source)
        source, branch = parseurl(origsource, branch)
        srcpeer = peer(ui, peeropts, source)
    else:
        srcpeer = source.peer()  # in case we were called with a localrepo
        branch = (None, branch or [])
        origsource = source = srcpeer.url()
    rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)

    if dest is None:
        dest = defaultdest(source)
        if dest:
            ui.status(_("destination directory: %s\n") % dest)
    else:
        dest = ui.expandpath(dest)

    dest = util.urllocalpath(dest)
    source = util.urllocalpath(source)

    if not dest:
        raise util.Abort(_("empty destination path is not valid"))

    destvfs = scmutil.vfs(dest, expandpath=True)
    if destvfs.lexists():
        if not destvfs.isdir():
            raise util.Abort(_("destination '%s' already exists") % dest)
        elif destvfs.listdir():
            raise util.Abort(_("destination '%s' is not empty") % dest)

    srclock = destlock = cleandir = None
    srcrepo = srcpeer.local()
    try:
        abspath = origsource
        if islocal(origsource):
            abspath = os.path.abspath(util.urllocalpath(origsource))

        if islocal(dest):
            cleandir = dest

        copy = False
        if (srcrepo and srcrepo.cancopy() and islocal(dest)
                and not phases.hassecret(srcrepo)):
            copy = not pull and not rev

        if copy:
            try:
                # we use a lock here because if we race with commit, we
                # can end up with extra data in the cloned revlogs that's
                # not pointed to by changesets, thus causing verify to
                # fail
                srclock = srcrepo.lock(wait=False)
            except error.LockError:
                copy = False

        if copy:
            srcrepo.hook('preoutgoing', throw=True, source='clone')
            hgdir = os.path.realpath(os.path.join(dest, ".hg"))
            if not os.path.exists(dest):
                os.mkdir(dest)
            else:
                # only clean up directories we create ourselves
                cleandir = hgdir
            try:
                destpath = hgdir
                util.makedir(destpath, notindexed=True)
            except OSError, inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(
                        _("destination '%s' already exists") % dest)
                raise

            destlock = copystore(ui, srcrepo, destpath)
            # copy bookmarks over
            srcbookmarks = srcrepo.join('bookmarks')
            dstbookmarks = os.path.join(destpath, 'bookmarks')
            if os.path.exists(srcbookmarks):
                util.copyfile(srcbookmarks, dstbookmarks)

            # Recomputing branch cache might be slow on big repos,
            # so just copy it
            def copybranchcache(fname):
                srcbranchcache = srcrepo.join('cache/%s' % fname)
                dstbranchcache = os.path.join(dstcachedir, fname)
                if os.path.exists(srcbranchcache):
                    if not os.path.exists(dstcachedir):
                        os.mkdir(dstcachedir)
                    util.copyfile(srcbranchcache, dstbranchcache)

            dstcachedir = os.path.join(destpath, 'cache')
            # In local clones we're copying all nodes, not just served
            # ones. Therefore copy all branch caches over.
            copybranchcache('branch2')
            for cachename in repoview.filtertable:
                copybranchcache('branch2-%s' % cachename)

            # we need to re-init the repo after manually copying the data
            # into it
            destpeer = peer(srcrepo, peeropts, dest)
            srcrepo.hook('outgoing',
                         source='clone',
                         node=node.hex(node.nullid))
        else:
Ejemplo n.º 30
0
def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """

    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    if not fco.cmp(fcd): # files identical?
        return None

    ui = repo.ui
    fd = fcd.path()
    binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
    symlink = 'l' in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
               (tool, fd, binary, symlink))

    if tool in internals:
        func = internals[tool]
        trymerge = func.trymerge
        onfailure = func.onfailure
    else:
        func = _xmerge
        trymerge = True
        onfailure = _("merging %s failed!\n")

    toolconf = tool, toolpath, binary, symlink

    if not trymerge:
        return func(repo, mynode, orig, fcd, fco, fca, toolconf)

    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))

    markerstyle = ui.config('ui', 'mergemarkers', 'basic')
    if not labels:
        labels = _defaultconflictlabels
    if markerstyle != 'basic':
        labels = _formatlabels(repo, fcd, fco, fca, labels)

    needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
                        (a, b, c, back), labels=labels)
    if not needcheck:
        if r:
            if onfailure:
                ui.warn(onfailure % fd)
        else:
            util.unlink(back)

        util.unlink(b)
        util.unlink(c)
        return r

    if not r and (_toolbool(ui, tool, "checkconflicts") or
                  'conflicts' in _toollist(ui, tool, "check")):
        if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
                     re.MULTILINE):
            r = 1

    checked = False
    if 'prompt' in _toollist(ui, tool, "check"):
        checked = True
        if ui.promptchoice(_("was merge of '%s' successful (yn)?"
                             "$$ &Yes $$ &No") % fd, 1):
            r = 1

    if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
                                  'changed' in _toollist(ui, tool, "check")):
        if filecmp.cmp(a, back):
            if ui.promptchoice(_(" output file %s appears unchanged\n"
                                 "was merge successful (yn)?"
                                 "$$ &Yes $$ &No") % fd, 1):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(a, back)

    if r:
        if onfailure:
            ui.warn(onfailure % fd)
    else:
        util.unlink(back)

    util.unlink(b)
    util.unlink(c)
    return r
Ejemplo n.º 31
0
def filemerge(repo, mynode, orig, fcd, fco, fca):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """
    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    def isbin(ctx):
        try:
            return util.binary(ctx.data())
        except IOError:
            return False

    if not fco.cmp(fcd.data()):  # files identical?
        return None

    if fca == fco:  # backwards, use working dir parent as ancestor
        fca = fcd.parents()[0]

    ui = repo.ui
    fd = fcd.path()
    binary = isbin(fcd) or isbin(fco) or isbin(fca)
    symlink = 'l' in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
             (tool, fd, binary, symlink))

    if not tool or tool == 'internal:prompt':
        tool = "internal:local"
        if ui.promptchoice(
                _(" no tool found to merge %s\n"
                  "keep (l)ocal or take (o)ther?") % fd,
            (_("&Local"), _("&Other")), 0):
            tool = "internal:other"
    if tool == "internal:local":
        return 0
    if tool == "internal:other":
        repo.wwrite(fd, fco.data(), fco.flags())
        return 0
    if tool == "internal:fail":
        return 1

    # do the actual merge
    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    out = ""
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))

    # do we attempt to simplemerge first?
    if _toolbool(ui, tool, "premerge", not (binary or symlink)):
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(" premerge successful\n")
            os.unlink(back)
            os.unlink(b)
            os.unlink(c)
            return 0
        util.copyfile(back, a)  # restore from backup and try again

    env = dict(HG_FILE=fd,
               HG_MY_NODE=short(mynode),
               HG_OTHER_NODE=str(fco.changectx()),
               HG_BASE_NODE=str(fca.changectx()),
               HG_MY_ISLINK='l' in fcd.flags(),
               HG_OTHER_ISLINK='l' in fco.flags(),
               HG_BASE_ISLINK='l' in fca.flags())

    if tool == "internal:merge":
        r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
    elif tool == 'internal:dump':
        a = repo.wjoin(fd)
        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
        return 1  # unresolved
    else:
        args = _toolstr(ui, tool, "args", '$local $base $other')
        if "$output" in args:
            out, a = a, back  # read input from backup, write to original
        replace = dict(local=a, base=b, other=c, output=out)
        args = re.sub(
            "\$(local|base|other|output)",
            lambda x: '"%s"' % util.localpath(replace[x.group()[1:]]), args)
        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)

    if not r and _toolbool(ui, tool, "checkconflicts"):
        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
            r = 1

    if not r and _toolbool(ui, tool, "checkchanged"):
        if filecmp.cmp(repo.wjoin(fd), back):
            if ui.promptchoice(
                    _(" output file %s appears unchanged\n"
                      "was merge successful (yn)?") % fd,
                (_("&Yes"), _("&No")), 1):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(repo.wjoin(fd), back)

    if r:
        ui.warn(_("merging %s failed!\n") % fd)
    else:
        os.unlink(back)

    os.unlink(b)
    os.unlink(c)
    return r
Ejemplo n.º 32
0
def filemerge(repo, mynode, orig, fcd, fco, fca):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """
    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    if not fco.cmp(fcd):  # files identical?
        return None

    ui = repo.ui
    fd = fcd.path()
    binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
    symlink = 'l' in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
             (tool, fd, binary, symlink))

    if not tool or tool == 'internal:prompt':
        tool = "internal:local"
        if ui.promptchoice(
                _(" no tool found to merge %s\n"
                  "keep (l)ocal or take (o)ther?") % fd,
            (_("&Local"), _("&Other")), 0):
            tool = "internal:other"
    if tool == "internal:local":
        return 0
    if tool == "internal:other":
        repo.wwrite(fd, fco.data(), fco.flags())
        return 0
    if tool == "internal:fail":
        return 1

    # do the actual merge
    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    out = ""
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))

    # do we attempt to simplemerge first?
    try:
        premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
    except error.ConfigError:
        premerge = _toolstr(ui, tool, "premerge").lower()
        valid = 'keep'.split()
        if premerge not in valid:
            _valid = ', '.join(["'" + v + "'" for v in valid])
            raise error.ConfigError(
                _("%s.premerge not valid "
                  "('%s' is neither boolean nor %s)") %
                (tool, premerge, _valid))

    if premerge:
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(" premerge successful\n")
            os.unlink(back)
            os.unlink(b)
            os.unlink(c)
            return 0
        if premerge != 'keep':
            util.copyfile(back, a)  # restore from backup and try again

    env = dict(HG_FILE=fd,
               HG_MY_NODE=short(mynode),
               HG_OTHER_NODE=str(fco.changectx()),
               HG_BASE_NODE=str(fca.changectx()),
               HG_MY_ISLINK='l' in fcd.flags(),
               HG_OTHER_ISLINK='l' in fco.flags(),
               HG_BASE_ISLINK='l' in fca.flags())

    if tool == "internal:merge":
        r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
    elif tool == 'internal:dump':
        a = repo.wjoin(fd)
        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
        os.unlink(b)
        os.unlink(c)
        return 1  # unresolved
    else:
        args = _toolstr(ui, tool, "args", '$local $base $other')
        if "$output" in args:
            out, a = a, back  # read input from backup, write to original
        replace = dict(local=a, base=b, other=c, output=out)
        args = util.interpolate(r'\$', replace, args,
                                lambda s: '"%s"' % util.localpath(s))
        r = util.system(toolpath + ' ' + args,
                        cwd=repo.root,
                        environ=env,
                        out=ui.fout)

    if not r and (_toolbool(ui, tool, "checkconflicts")
                  or 'conflicts' in _toollist(ui, tool, "check")):
        if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
                     re.MULTILINE):
            r = 1

    checked = False
    if 'prompt' in _toollist(ui, tool, "check"):
        checked = True
        if ui.promptchoice(
                _("was merge of '%s' successful (yn)?") % fd,
            (_("&Yes"), _("&No")), 1):
            r = 1

    if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
                                  'changed' in _toollist(ui, tool, "check")):
        if filecmp.cmp(repo.wjoin(fd), back):
            if ui.promptchoice(
                    _(" output file %s appears unchanged\n"
                      "was merge successful (yn)?") % fd,
                (_("&Yes"), _("&No")), 1):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(repo.wjoin(fd), back)

    if r:
        if tool == "internal:merge":
            ui.warn(
                _("merging %s incomplete! "
                  "(edit conflicts, then use 'hg resolve --mark')\n") % fd)
        else:
            ui.warn(_("merging %s failed!\n") % fd)
    else:
        os.unlink(back)

    os.unlink(b)
    os.unlink(c)
    return r
Ejemplo n.º 33
0
def filemerge(repo, fw, fd, fo, wctx, mctx):
    """perform a 3-way merge in the working directory

    fw = original filename in the working directory
    fd = destination filename in the working directory
    fo = filename in other parent
    wctx, mctx = working and merge changecontexts
    """

    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    def isbin(ctx):
        try:
            return util.binary(ctx.data())
        except IOError:
            return False

    fco = mctx.filectx(fo)
    if not fco.cmp(wctx.filectx(fd).data()): # files identical?
        return None

    ui = repo.ui
    fcm = wctx.filectx(fw)
    fca = fcm.ancestor(fco) or repo.filectx(fw, fileid=nullrev)
    binary = isbin(fcm) or isbin(fco) or isbin(fca)
    symlink = fcm.islink() or fco.islink()
    tool, toolpath = _picktool(repo, ui, fw, binary, symlink)
    ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
               (tool, fw, binary, symlink))

    if not tool:
        tool = "internal:local"
        if ui.prompt(_(" no tool found to merge %s\n"
                       "keep (l)ocal or take (o)ther?") % fw,
                     _("[lo]"), _("l")) != _("l"):
            tool = "internal:other"
    if tool == "internal:local":
        return 0
    if tool == "internal:other":
        repo.wwrite(fd, fco.data(), fco.fileflags())
        return 0
    if tool == "internal:fail":
        return 1

    # do the actual merge
    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    out = ""
    back = a + ".orig"
    util.copyfile(a, back)

    if fw != fo:
        repo.ui.status(_("merging %s and %s\n") % (fw, fo))
    else:
        repo.ui.status(_("merging %s\n") % fw)
    repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))

    # do we attempt to simplemerge first?
    if _toolbool(ui, tool, "premerge", not (binary or symlink)):
        r = simplemerge.simplemerge(a, b, c, quiet=True)
        if not r:
            ui.debug(_(" premerge successful\n"))
            os.unlink(back)
            os.unlink(b)
            os.unlink(c)
            return 0
        util.copyfile(back, a) # restore from backup and try again

    env = dict(HG_FILE=fd,
               HG_MY_NODE=str(wctx.parents()[0]),
               HG_OTHER_NODE=str(mctx),
               HG_MY_ISLINK=fcm.islink(),
               HG_OTHER_ISLINK=fco.islink(),
               HG_BASE_ISLINK=fca.islink())

    if tool == "internal:merge":
        r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
    else:
        args = _toolstr(ui, tool, "args", '$local $base $other')
        if "$output" in args:
            out, a = a, back # read input from backup, write to original
        replace = dict(local=a, base=b, other=c, output=out)
        args = re.sub("\$(local|base|other|output)",
                      lambda x: '"%s"' % replace[x.group()[1:]], args)
        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)

    if not r and _toolbool(ui, tool, "checkconflicts"):
        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcm.data()):
            r = 1

    if not r and _toolbool(ui, tool, "checkchanged"):
        if filecmp.cmp(repo.wjoin(fd), back):
            if ui.prompt(_(" output file %s appears unchanged\n"
                "was merge successful (yn)?") % fd,
                _("[yn]"), _("n")) != _("y"):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(repo.wjoin(fd), back)

    if r:
        repo.ui.warn(_("merging %s failed!\n") % fd)
    else:
        os.unlink(back)

    os.unlink(b)
    os.unlink(c)
    return r
Ejemplo n.º 34
0
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
          update=True, stream=False, branch=None):
    """Make a copy of an existing repository.

    Create a copy of an existing repository in a new directory.  The
    source and destination are URLs, as passed to the repository
    function.  Returns a pair of repository peers, the source and
    newly created destination.

    The location of the source is added to the new repository's
    .hg/hgrc file, as the default to be used for future pulls and
    pushes.

    If an exception is raised, the partly cloned/updated destination
    repository will be deleted.

    Arguments:

    source: repository object or URL

    dest: URL of destination repository to create (defaults to base
    name of source repository)

    pull: always pull from source repository, even in local case

    stream: stream raw data uncompressed from repository (fast over
    LAN, slow over WAN)

    rev: revision to clone up to (implies pull=True)

    update: update working directory after clone completes, if
    destination is local repository (True means update to default rev,
    anything else is treated as a revision)

    branch: branches to clone
    """

    if isinstance(source, str):
        origsource = ui.expandpath(source)
        source, branch = parseurl(origsource, branch)
        srcpeer = peer(ui, peeropts, source)
    else:
        srcpeer = source.peer() # in case we were called with a localrepo
        branch = (None, branch or [])
        origsource = source = srcpeer.url()
    rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)

    if dest is None:
        dest = defaultdest(source)
        ui.status(_("destination directory: %s\n") % dest)
    else:
        dest = ui.expandpath(dest)

    dest = util.urllocalpath(dest)
    source = util.urllocalpath(source)

    if not dest:
        raise util.Abort(_("empty destination path is not valid"))
    if os.path.exists(dest):
        if not os.path.isdir(dest):
            raise util.Abort(_("destination '%s' already exists") % dest)
        elif os.listdir(dest):
            raise util.Abort(_("destination '%s' is not empty") % dest)

    srclock = destlock = cleandir = None
    srcrepo = srcpeer.local()
    try:
        abspath = origsource
        if islocal(origsource):
            abspath = os.path.abspath(util.urllocalpath(origsource))

        if islocal(dest):
            cleandir = dest

        copy = False
        if (srcrepo and srcrepo.cancopy() and islocal(dest)
            and not phases.hassecret(srcrepo)):
            copy = not pull and not rev

        if copy:
            try:
                # we use a lock here because if we race with commit, we
                # can end up with extra data in the cloned revlogs that's
                # not pointed to by changesets, thus causing verify to
                # fail
                srclock = srcrepo.lock(wait=False)
            except error.LockError:
                copy = False

        if copy:
            srcrepo.hook('preoutgoing', throw=True, source='clone')
            hgdir = os.path.realpath(os.path.join(dest, ".hg"))
            if not os.path.exists(dest):
                os.mkdir(dest)
            else:
                # only clean up directories we create ourselves
                cleandir = hgdir
            try:
                destpath = hgdir
                util.makedir(destpath, notindexed=True)
            except OSError, inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(_("destination '%s' already exists")
                                     % dest)
                raise

            destlock = copystore(ui, srcrepo, destpath)

            # Recomputing branch cache might be slow on big repos,
            # so just copy it
            dstcachedir = os.path.join(destpath, 'cache')
            srcbranchcache = srcrepo.sjoin('cache/branchheads')
            dstbranchcache = os.path.join(dstcachedir, 'branchheads')
            if os.path.exists(srcbranchcache):
                if not os.path.exists(dstcachedir):
                    os.mkdir(dstcachedir)
                util.copyfile(srcbranchcache, dstbranchcache)

            # we need to re-init the repo after manually copying the data
            # into it
            destpeer = peer(srcrepo, peeropts, dest)
            srcrepo.hook('outgoing', source='clone',
                          node=node.hex(node.nullid))
        else:
Ejemplo n.º 35
0
            except (IOError, OSError), inst:
                if inst.errno != errno.ENOENT:
                    raise

    backupfiles = []
    for l, f, b, c in backupentries:
        if l not in vfsmap and c:
            report("couldn't handle %s: unknown cache location %s\n"
                        % (b, l))
        vfs = vfsmap[l]
        try:
            if f and b:
                filepath = vfs.join(f)
                backuppath = vfs.join(b)
                try:
                    util.copyfile(backuppath, filepath)
                    backupfiles.append(b)
                except IOError:
                    report(_("failed to recover %s\n") % f)
            else:
                target = f or b
                try:
                    vfs.unlink(target)
                except (IOError, OSError), inst:
                    if inst.errno != errno.ENOENT:
                        raise
        except (IOError, OSError, util.Abort), inst:
            if not c:
                raise

    opener.unlink(journal)
Ejemplo n.º 36
0
 def begin(self):
     dr = self.driver
     dr.press_keycode(3)
     time.sleep(1)
     #注册率
     sign_rate = random.randint(1, 10000)
     if sign_rate <= 8500:
         removefile("/sdcard/2/*.jpg")
         removefile("/sdcard/*.jpg")
         time.sleep(1)
         copyfile(
             "/sdcard/1/\.1touxiang/PIC\ \(%s\).jpg" %
             random.randint(1, 1400), "/sdcard/")
         time.sleep(0.5)
         for _ in range(5):
             copyfile(
                 "/sdcard/1/\.1xiangce/PIC\ \(%s\).jpg" %
                 random.randint(1, 1400), "/sdcard/2/")
             time.sleep(0.5)
         WebDriverWait(
             dr,
             30).until(lambda d: d.find_element_by_name("文件管理")).click()
         time.sleep(1)
         try:
             WebDriverWait(
                 dr,
                 30).until(lambda d: d.find_element_by_name("手机")).click()
             time.sleep(1)
         except TimeoutException:
             dr.press_keycode(4)
             time.sleep(1)
             dr.press_keycode(4)
             time.sleep(1)
             dr.press_keycode(4)
             time.sleep(1)
             return self.begin
         WebDriverWait(
             dr, 30).until(lambda d: d.find_element_by_name("2")).click()
         time.sleep(1)
         dr.swipe(600, 400, 590, 400, 50)
         time.sleep(1)
         try:
             WebDriverWait(
                 dr,
                 5).until(lambda d: d.find_element_by_name("剪切")).click()
             time.sleep(1)
         except TimeoutException:
             WebDriverWait(
                 dr,
                 30).until(lambda d: d.find_element_by_name("更多")).click()
             time.sleep(2)
             WebDriverWait(
                 dr,
                 30).until(lambda d: d.find_element_by_name("剪切")).click()
             time.sleep(1)
         WebDriverWait(
             dr, 30).until(lambda d: d.find_element_by_name("粘贴")).click()
         time.sleep(1)
         dr.press_keycode(4)
         time.sleep(1)
         dr.press_keycode(4)
         time.sleep(1)
         dr.press_keycode(4)
         time.sleep(1)
         dr.press_keycode(4)
         time.sleep(1)
         WebDriverWait(
             dr,
             30).until(lambda d: d.find_element_by_name("文件管理")).click()
         time.sleep(1)
         WebDriverWait(
             dr, 30).until(lambda d: d.find_element_by_name("图片")).click()
         time.sleep(5)
         dr.press_keycode(4)
         time.sleep(1)
         dr.press_keycode(4)
         time.sleep(1)
         #加速器
         WebDriverWait(dr, 10).until(
             lambda d: d.find_element_by_name("GMD Speed Time")).click()
         time.sleep(1)
         try:
             WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
                 "com.gmd.speedtime:id/buttonStop")).click()
             time.sleep(1)
             WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
                 "com.gmd.speedtime:id/buttonStart")).click()
             time.sleep(1)
         except TimeoutException:
             WebDriverWait(dr, 5).until(
                 lambda d: d.find_element_by_name("No, thanks")).click()
             time.sleep(1)
             WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
                 "com.gmd.speedtime:id/buttonStop")).click()
             time.sleep(1)
             WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
                 "com.gmd.speedtime:id/buttonStart")).click()
             time.sleep(1)
         dr.press_keycode(3)
         time.sleep(1)
         WebDriverWait(dr, 30).until(
             lambda d: d.find_element_by_name(self.appname)).click()
         time.sleep(5)
         #检测已进入app
         WebDriverWait(dr, 60).until(lambda d: d.find_element_by_id(
             "com.tujiaapp.tujia:id/btn_register"))
         self.begintime = "开始:%s:%s:%s" % (time.localtime().tm_hour,
                                           time.localtime().tm_min,
                                           time.localtime().tm_sec)
         time.sleep(1)
         return self.login_code_platform
     #加速器
     WebDriverWait(dr, 10).until(
         lambda d: d.find_element_by_name("GMD Speed Time")).click()
     time.sleep(1)
     WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
         "com.gmd.speedtime:id/buttonStop")).click()
     time.sleep(1)
     WebDriverWait(dr, 10).until(lambda d: d.find_element_by_id(
         "com.gmd.speedtime:id/buttonStart")).click()
     time.sleep(1)
     dr.press_keycode(3)
     time.sleep(1)
     WebDriverWait(
         dr,
         30).until(lambda d: d.find_element_by_name(self.appname)).click()
     time.sleep(5)
     #检测已进入app
     WebDriverWait(dr, 60).until(lambda d: d.find_element_by_id(
         "com.tujiaapp.tujia:id/btn_register"))
     self.begintime = "开始:%s:%s:%s" % (time.localtime().tm_hour,
                                       time.localtime().tm_min,
                                       time.localtime().tm_sec)
     time.sleep(random.randint(30, 60))
     return self.ends
Ejemplo n.º 37
0
def clone(ui, peeropts, source, dest=None, pull=False, rev=None, update=True, stream=False, branch=None):
    """Make a copy of an existing repository.

    Create a copy of an existing repository in a new directory.  The
    source and destination are URLs, as passed to the repository
    function.  Returns a pair of repository peers, the source and
    newly created destination.

    The location of the source is added to the new repository's
    .hg/hgrc file, as the default to be used for future pulls and
    pushes.

    If an exception is raised, the partly cloned/updated destination
    repository will be deleted.

    Arguments:

    source: repository object or URL

    dest: URL of destination repository to create (defaults to base
    name of source repository)

    pull: always pull from source repository, even in local case or if the
    server prefers streaming

    stream: stream raw data uncompressed from repository (fast over
    LAN, slow over WAN)

    rev: revision to clone up to (implies pull=True)

    update: update working directory after clone completes, if
    destination is local repository (True means update to default rev,
    anything else is treated as a revision)

    branch: branches to clone
    """

    if isinstance(source, str):
        origsource = ui.expandpath(source)
        source, branch = parseurl(origsource, branch)
        srcpeer = peer(ui, peeropts, source)
    else:
        srcpeer = source.peer()  # in case we were called with a localrepo
        branch = (None, branch or [])
        origsource = source = srcpeer.url()
    rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)

    if dest is None:
        dest = defaultdest(source)
        if dest:
            ui.status(_("destination directory: %s\n") % dest)
    else:
        dest = ui.expandpath(dest)

    dest = util.urllocalpath(dest)
    source = util.urllocalpath(source)

    if not dest:
        raise util.Abort(_("empty destination path is not valid"))

    destvfs = scmutil.vfs(dest, expandpath=True)
    if destvfs.lexists():
        if not destvfs.isdir():
            raise util.Abort(_("destination '%s' already exists") % dest)
        elif destvfs.listdir():
            raise util.Abort(_("destination '%s' is not empty") % dest)

    srclock = destlock = cleandir = None
    srcrepo = srcpeer.local()
    try:
        abspath = origsource
        if islocal(origsource):
            abspath = os.path.abspath(util.urllocalpath(origsource))

        if islocal(dest):
            cleandir = dest

        copy = False
        if srcrepo and srcrepo.cancopy() and islocal(dest) and not phases.hassecret(srcrepo):
            copy = not pull and not rev

        if copy:
            try:
                # we use a lock here because if we race with commit, we
                # can end up with extra data in the cloned revlogs that's
                # not pointed to by changesets, thus causing verify to
                # fail
                srclock = srcrepo.lock(wait=False)
            except error.LockError:
                copy = False

        if copy:
            srcrepo.hook("preoutgoing", throw=True, source="clone")
            hgdir = os.path.realpath(os.path.join(dest, ".hg"))
            if not os.path.exists(dest):
                os.mkdir(dest)
            else:
                # only clean up directories we create ourselves
                cleandir = hgdir
            try:
                destpath = hgdir
                util.makedir(destpath, notindexed=True)
            except OSError as inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(_("destination '%s' already exists") % dest)
                raise

            destlock = copystore(ui, srcrepo, destpath)
            # copy bookmarks over
            srcbookmarks = srcrepo.join("bookmarks")
            dstbookmarks = os.path.join(destpath, "bookmarks")
            if os.path.exists(srcbookmarks):
                util.copyfile(srcbookmarks, dstbookmarks)

            # Recomputing branch cache might be slow on big repos,
            # so just copy it
            def copybranchcache(fname):
                srcbranchcache = srcrepo.join("cache/%s" % fname)
                dstbranchcache = os.path.join(dstcachedir, fname)
                if os.path.exists(srcbranchcache):
                    if not os.path.exists(dstcachedir):
                        os.mkdir(dstcachedir)
                    util.copyfile(srcbranchcache, dstbranchcache)

            dstcachedir = os.path.join(destpath, "cache")
            # In local clones we're copying all nodes, not just served
            # ones. Therefore copy all branch caches over.
            copybranchcache("branch2")
            for cachename in repoview.filtertable:
                copybranchcache("branch2-%s" % cachename)

            # we need to re-init the repo after manually copying the data
            # into it
            destpeer = peer(srcrepo, peeropts, dest)
            srcrepo.hook("outgoing", source="clone", node=node.hex(node.nullid))
        else:
            try:
                destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
                # only pass ui when no srcrepo
            except OSError as inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(_("destination '%s' already exists") % dest)
                raise

            revs = None
            if rev:
                if not srcpeer.capable("lookup"):
                    raise util.Abort(
                        _(
                            "src repository does not support "
                            "revision lookup and so doesn't "
                            "support clone by revision"
                        )
                    )
                revs = [srcpeer.lookup(r) for r in rev]
                checkout = revs[0]
            if destpeer.local():
                if not stream:
                    if pull:
                        stream = False
                    else:
                        stream = None
                destpeer.local().clone(srcpeer, heads=revs, stream=stream)
            elif srcrepo:
                exchange.push(srcrepo, destpeer, revs=revs, bookmarks=srcrepo._bookmarks.keys())
            else:
                raise util.Abort(_("clone from remote to remote not supported"))

        cleandir = None

        destrepo = destpeer.local()
        if destrepo:
            template = uimod.samplehgrcs["cloned"]
            fp = destrepo.vfs("hgrc", "w", text=True)
            u = util.url(abspath)
            u.passwd = None
            defaulturl = str(u)
            fp.write(template % defaulturl)
            fp.close()

            destrepo.ui.setconfig("paths", "default", defaulturl, "clone")

            if update:
                if update is not True:
                    checkout = srcpeer.lookup(update)
                uprev = None
                status = None
                if checkout is not None:
                    try:
                        uprev = destrepo.lookup(checkout)
                    except error.RepoLookupError:
                        pass
                if uprev is None:
                    try:
                        uprev = destrepo._bookmarks["@"]
                        update = "@"
                        bn = destrepo[uprev].branch()
                        if bn == "default":
                            status = _("updating to bookmark @\n")
                        else:
                            status = _("updating to bookmark @ on branch %s\n") % bn
                    except KeyError:
                        try:
                            uprev = destrepo.branchtip("default")
                        except error.RepoLookupError:
                            uprev = destrepo.lookup("tip")
                if not status:
                    bn = destrepo[uprev].branch()
                    status = _("updating to branch %s\n") % bn
                destrepo.ui.status(status)
                _update(destrepo, uprev)
                if update in destrepo._bookmarks:
                    bookmarks.activate(destrepo, update)
    finally:
        release(srclock, destlock)
        if cleandir is not None:
            shutil.rmtree(cleandir, True)
        if srcpeer is not None:
            srcpeer.close()
    return srcpeer, destpeer
Ejemplo n.º 38
0
def filemerge(repo, mynode, orig, fcd, fco, fca):
    """perform a 3-way merge in the working directory

    mynode = parent node before merge
    orig = original local filename before merge
    fco = other file context
    fca = ancestor file context
    fcd = local file context for current/destination file
    """

    def temp(prefix, ctx):
        pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
        (fd, name) = tempfile.mkstemp(prefix=pre)
        data = repo.wwritedata(ctx.path(), ctx.data())
        f = os.fdopen(fd, "wb")
        f.write(data)
        f.close()
        return name

    def isbin(ctx):
        try:
            return util.binary(ctx.data())
        except IOError:
            return False

    if not fco.cmp(fcd.data()): # files identical?
        return None

    ui = repo.ui
    fd = fcd.path()
    binary = isbin(fcd) or isbin(fco) or isbin(fca)
    symlink = 'l' in fcd.flags() + fco.flags()
    tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
    ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
               (tool, fd, binary, symlink))

    if not tool or tool == 'internal:prompt':
        tool = "internal:local"
        if ui.prompt(_(" no tool found to merge %s\n"
                       "keep (l)ocal or take (o)ther?") % fd,
                     (_("&Local"), _("&Other")), _("l")) != _("l"):
            tool = "internal:other"
    if tool == "internal:local":
        return 0
    if tool == "internal:other":
        repo.wwrite(fd, fco.data(), fco.flags())
        return 0
    if tool == "internal:fail":
        return 1

    # do the actual merge
    a = repo.wjoin(fd)
    b = temp("base", fca)
    c = temp("other", fco)
    out = ""
    back = a + ".orig"
    util.copyfile(a, back)

    if orig != fco.path():
        ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
    else:
        ui.status(_("merging %s\n") % fd)

    ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))

    # do we attempt to simplemerge first?
    if _toolbool(ui, tool, "premerge", not (binary or symlink)):
        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
        if not r:
            ui.debug(_(" premerge successful\n"))
            os.unlink(back)
            os.unlink(b)
            os.unlink(c)
            return 0
        util.copyfile(back, a) # restore from backup and try again

    env = dict(HG_FILE=fd,
               HG_MY_NODE=short(mynode),
               HG_OTHER_NODE=str(fco.changectx()),
               HG_MY_ISLINK='l' in fcd.flags(),
               HG_OTHER_ISLINK='l' in fco.flags(),
               HG_BASE_ISLINK='l' in fca.flags())

    if tool == "internal:merge":
        r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
    elif tool == 'internal:dump':
        a = repo.wjoin(fd)
        util.copyfile(a, a + ".local")
        repo.wwrite(fd + ".other", fco.data(), fco.flags())
        repo.wwrite(fd + ".base", fca.data(), fca.flags())
        return 1 # unresolved
    else:
        args = _toolstr(ui, tool, "args", '$local $base $other')
        if "$output" in args:
            out, a = a, back # read input from backup, write to original
        replace = dict(local=a, base=b, other=c, output=out)
        args = re.sub("\$(local|base|other|output)",
                      lambda x: '"%s"' % replace[x.group()[1:]], args)
        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)

    if not r and _toolbool(ui, tool, "checkconflicts"):
        if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
            r = 1

    if not r and _toolbool(ui, tool, "checkchanged"):
        if filecmp.cmp(repo.wjoin(fd), back):
            if ui.prompt(_(" output file %s appears unchanged\n"
                "was merge successful (yn)?") % fd,
                (_("&Yes"), _("&No")), _("n")) != _("y"):
                r = 1

    if _toolbool(ui, tool, "fixeol"):
        _matcheol(repo.wjoin(fd), back)

    if r:
        ui.warn(_("merging %s failed!\n") % fd)
    else:
        os.unlink(back)

    os.unlink(b)
    os.unlink(c)
    return r
Ejemplo n.º 39
0
 def after_signup(self):
     dr = self.driver
     try:
         WebDriverWait(dr, 30).until(lambda d: d.find_element_by_id(
             "com.shwatch.news:id/user_info")).click()
         time.sleep(1)
         #修改名字
         if random.randint(0, 1):
             WebDriverWait(dr, 60).until(lambda d: d.find_element_by_id(
                 "com.shwatch.news:id/nickname_layout")).click()
             time.sleep(1)
             #输入昵称
             edts = WebDriverWait(
                 dr, 15).until(lambda d: d.find_element_by_class_name(
                     "android.widget.EditText"))
             edts.send_keys(self.get_filemessage("name.txt"))
             time.sleep(1)
             WebDriverWait(dr, 30).until(lambda d: d.find_element_by_id(
                 "com.shwatch.news:id/comfirmed_change")).click()
             time.sleep(1)
         #修改头像
         if random.randint(0, 9) == 0:
             copyfile(
                 "/sdcard/1/1touxiang/PIC\ \(%s\).jpg" %
                 random.randint(1, 1000), "/sdcard/2/")
             time.sleep(1)
             WebDriverWait(dr, 15).until(lambda d: d.find_element_by_id(
                 "com.shwatch.news:id/mainpage_user_headimg")).click()
             time.sleep(1)
             WebDriverWait(dr, 15).until(lambda d: d.find_element_by_id(
                 "com.shwatch.news:id/mainpage_btn_picgroup")).click()
             time.sleep(1)
             WebDriverWait(
                 dr,
                 5).until(lambda d: d.find_element_by_name("文件管理")).click()
             time.sleep(1)
             WebDriverWait(
                 dr,
                 15).until(lambda d: d.find_element_by_name("2")).click()
             time.sleep(1)
             self.select_one_by_id("com.android.fileexplorer:id/file_image")
             time.sleep(1)
             WebDriverWait(
                 dr,
                 15).until(lambda d: d.find_element_by_name("应用")).click()
             time.sleep(5)
         WebDriverWait(dr, 15).until(lambda d: d.find_element_by_id(
             "com.shwatch.news:id/btn_callback")).click()
         time.sleep(1)
         for x in range(10):
             dr.press_keycode(4)
             time.sleep(1)
             try:
                 WebDriverWait(dr, 2).until(
                     lambda d: d.find_element_by_name("取消")).click()
                 time.sleep(1)
                 try:
                     dr.tap(random.randint(800, 1000),
                            random.randint(100, 1600))
                 except:
                     dr.tap(random.randint(550, 700),
                            random.randint(100, 1000))
                 time.sleep(5)
                 break
             except TimeoutException:
                 pass
     except:
         for x in range(10):
             dr.press_keycode(4)
             time.sleep(1)
             try:
                 WebDriverWait(dr, 2).until(
                     lambda d: d.find_element_by_name("取消")).click()
                 time.sleep(1)
                 try:
                     dr.tap(random.randint(800, 1000),
                            random.randint(100, 1600))
                 except:
                     dr.tap(random.randint(550, 700),
                            random.randint(100, 1000))
                 time.sleep(5)
                 break
             except TimeoutException:
                 pass
     return self.do
Ejemplo n.º 40
0
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
          update=True, stream=False, branch=None):
    """Make a copy of an existing repository.

    Create a copy of an existing repository in a new directory.  The
    source and destination are URLs, as passed to the repository
    function.  Returns a pair of repository peers, the source and
    newly created destination.

    The location of the source is added to the new repository's
    .hg/hgrc file, as the default to be used for future pulls and
    pushes.

    If an exception is raised, the partly cloned/updated destination
    repository will be deleted.

    Arguments:

    source: repository object or URL

    dest: URL of destination repository to create (defaults to base
    name of source repository)

    pull: always pull from source repository, even in local case or if the
    server prefers streaming

    stream: stream raw data uncompressed from repository (fast over
    LAN, slow over WAN)

    rev: revision to clone up to (implies pull=True)

    update: update working directory after clone completes, if
    destination is local repository (True means update to default rev,
    anything else is treated as a revision)

    branch: branches to clone
    """

    if isinstance(source, str):
        origsource = ui.expandpath(source)
        source, branch = parseurl(origsource, branch)
        srcpeer = peer(ui, peeropts, source)
    else:
        srcpeer = source.peer() # in case we were called with a localrepo
        branch = (None, branch or [])
        origsource = source = srcpeer.url()
    rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)

    if dest is None:
        dest = defaultdest(source)
        if dest:
            ui.status(_("destination directory: %s\n") % dest)
    else:
        dest = ui.expandpath(dest)

    dest = util.urllocalpath(dest)
    source = util.urllocalpath(source)

    if not dest:
        raise util.Abort(_("empty destination path is not valid"))

    destvfs = scmutil.vfs(dest, expandpath=True)
    if destvfs.lexists():
        if not destvfs.isdir():
            raise util.Abort(_("destination '%s' already exists") % dest)
        elif destvfs.listdir():
            raise util.Abort(_("destination '%s' is not empty") % dest)

    srclock = destlock = cleandir = None
    srcrepo = srcpeer.local()
    try:
        abspath = origsource
        if islocal(origsource):
            abspath = os.path.abspath(util.urllocalpath(origsource))

        if islocal(dest):
            cleandir = dest

        copy = False
        if (srcrepo and srcrepo.cancopy() and islocal(dest)
            and not phases.hassecret(srcrepo)):
            copy = not pull and not rev

        if copy:
            try:
                # we use a lock here because if we race with commit, we
                # can end up with extra data in the cloned revlogs that's
                # not pointed to by changesets, thus causing verify to
                # fail
                srclock = srcrepo.lock(wait=False)
            except error.LockError:
                copy = False

        if copy:
            srcrepo.hook('preoutgoing', throw=True, source='clone')
            hgdir = os.path.realpath(os.path.join(dest, ".hg"))
            if not os.path.exists(dest):
                os.mkdir(dest)
            else:
                # only clean up directories we create ourselves
                cleandir = hgdir
            try:
                destpath = hgdir
                util.makedir(destpath, notindexed=True)
            except OSError as inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(_("destination '%s' already exists")
                                     % dest)
                raise

            destlock = copystore(ui, srcrepo, destpath)
            # copy bookmarks over
            srcbookmarks = srcrepo.join('bookmarks')
            dstbookmarks = os.path.join(destpath, 'bookmarks')
            if os.path.exists(srcbookmarks):
                util.copyfile(srcbookmarks, dstbookmarks)

            # Recomputing branch cache might be slow on big repos,
            # so just copy it
            def copybranchcache(fname):
                srcbranchcache = srcrepo.join('cache/%s' % fname)
                dstbranchcache = os.path.join(dstcachedir, fname)
                if os.path.exists(srcbranchcache):
                    if not os.path.exists(dstcachedir):
                        os.mkdir(dstcachedir)
                    util.copyfile(srcbranchcache, dstbranchcache)

            dstcachedir = os.path.join(destpath, 'cache')
            # In local clones we're copying all nodes, not just served
            # ones. Therefore copy all branch caches over.
            copybranchcache('branch2')
            for cachename in repoview.filtertable:
                copybranchcache('branch2-%s' % cachename)

            # we need to re-init the repo after manually copying the data
            # into it
            destpeer = peer(srcrepo, peeropts, dest)
            srcrepo.hook('outgoing', source='clone',
                          node=node.hex(node.nullid))
        else:
            try:
                destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
                                # only pass ui when no srcrepo
            except OSError as inst:
                if inst.errno == errno.EEXIST:
                    cleandir = None
                    raise util.Abort(_("destination '%s' already exists")
                                     % dest)
                raise

            revs = None
            if rev:
                if not srcpeer.capable('lookup'):
                    raise util.Abort(_("src repository does not support "
                                       "revision lookup and so doesn't "
                                       "support clone by revision"))
                revs = [srcpeer.lookup(r) for r in rev]
                checkout = revs[0]
            if destpeer.local():
                if not stream:
                    if pull:
                        stream = False
                    else:
                        stream = None
                destpeer.local().clone(srcpeer, heads=revs, stream=stream)
            elif srcrepo:
                exchange.push(srcrepo, destpeer, revs=revs,
                              bookmarks=srcrepo._bookmarks.keys())
            else:
                raise util.Abort(_("clone from remote to remote not supported"))

        cleandir = None

        destrepo = destpeer.local()
        if destrepo:
            template = uimod.samplehgrcs['cloned']
            fp = destrepo.vfs("hgrc", "w", text=True)
            u = util.url(abspath)
            u.passwd = None
            defaulturl = str(u)
            fp.write(template % defaulturl)
            fp.close()

            destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')

            if update:
                if update is not True:
                    checkout = srcpeer.lookup(update)
                uprev = None
                status = None
                if checkout is not None:
                    try:
                        uprev = destrepo.lookup(checkout)
                    except error.RepoLookupError:
                        pass
                if uprev is None:
                    try:
                        uprev = destrepo._bookmarks['@']
                        update = '@'
                        bn = destrepo[uprev].branch()
                        if bn == 'default':
                            status = _("updating to bookmark @\n")
                        else:
                            status = (_("updating to bookmark @ on branch %s\n")
                                       % bn)
                    except KeyError:
                        try:
                            uprev = destrepo.branchtip('default')
                        except error.RepoLookupError:
                            uprev = destrepo.lookup('tip')
                if not status:
                    bn = destrepo[uprev].branch()
                    status = _("updating to branch %s\n") % bn
                destrepo.ui.status(status)
                _update(destrepo, uprev)
                if update in destrepo._bookmarks:
                    bookmarks.activate(destrepo, update)
    finally:
        release(srclock, destlock)
        if cleandir is not None:
            shutil.rmtree(cleandir, True)
        if srcpeer is not None:
            srcpeer.close()
    return srcpeer, destpeer