Example #1
1
    def processCommitHook(self, req):
        self.env.log.debug("processCommitHook")
        status = self.closestatus
        if not status:
            status = 'closed'

        data = req.args.get('payload')
        branches = (parse_qs(req.query_string).get('branches') or self.branches).split(',')
        self.env.log.debug("Using branches: %s", branches)

        if data:
            jsondata = simplejson.loads(data)
            ref = jsondata['ref'].split('/')[-1]

            if ref in branches or 'all' in branches:
                for i in jsondata['commits']:
                    self.hook.process(i, status, jsondata)
            else:
                self.env.log.debug("Not running hook, ref %s is not in %s", ref, branches)

        if self.autofetch:
            repo = Git(self.repo)

            try:
              repo.execute(['git', 'fetch'])
            except:
              self.env.log.debug("git fetch failed!")
Example #2
0
def prepare_repository(project):
    debug.message("Checkout commit %s" % project["commit"], indent=1)

    repo = Git(repository.get_path(project), repository.get_origin_url(project))

    # Check out the correct commit and create a reference to it (deployed)
    repo.checkout_commit(project["commit"])
Example #3
0
def build_command(config_file, strict, site_dir, tags, default, latest):
    """Build the MkDocs documentation"""

    cli.configure_logging(level=logging.INFO)

    g = Git()
    tags = tags or g.tag().splitlines()

    log.info("Building %s to /", default)
    g.checkout(default)
    _build(_load_config(config_file, strict, site_dir), default, tags)

    log.info("Building %s to /latest", latest)
    g.checkout(default)
    _build(_load_config(config_file, strict, site_dir), latest, tags, 'latest')

    for tag in sorted(tags):

        g.checkout(tag)

        if not os.path.exists("mkdocs.yml"):
            log.warning("Unable to build %s, as no mkdocs.yml was found", tag)
            continue

        site_dir = "v{0}".format(tag)
        log.info("Building %s to /%s", tag, site_dir)
        _build(_load_config(config_file, strict, site_dir), tag, tags, site_dir)

    g.checkout('master')
Example #4
0
 def full_remote(self):
     # alextodo. wrap the calls to git commit
     repo = Git(self.path)
     cmd = ['git', 'remote', '-v']
     remote = repo.execute(cmd).split('(fetch)')[0]
     remote = remote or ''
     return remote.strip()
Example #5
0
    def processCommitHook(self, req):
        self.env.log.debug("processCommitHook")
        status = self.closestatus
        if not status:
            status = 'closed'

        if self.autofetch:
            repodir = RepositoryManager(self.env).repository_dir
            if not os.path.isabs(repodir):
                repodir = os.path.join(self.env.path, repodir)
            # TODO: This was the previous code, the repo options is probably unecessary now.
			# repodir = "%s/%s" % (self.repo, reponame)
            self.env.log.debug("Autofetching: %s" % repodir)
            repo = Git(repodir)

            try:
              self.env.log.debug("Fetching repo %s" % self.repo)
              repo.execute(['git', 'fetch'])
              try:
                self.env.log.debug("Resyncing local repo")
                self.env.get_repository('').sync()
              except:
                self.env.log.error("git sync failed!")
            except:
              self.env.log.error("git fetch failed!")

        data = req.args.get('payload')
         
        if data:
            jsondata = simplejson.loads(data)
            reponame = jsondata['repository']['name']

            for i in jsondata['commits']:
                self.hook.process(i, status, self.enable_revmap,reponame)
Example #6
0
def pull(ctx):
    """Pull dotfile changes from GitHub"""

    VerboseLog('Running pull()', ctx)
    check_init(ctx)

    if not os.path.exists(trackfile_path()):
        click.secho('Pssst...you aren\'t tracking any files.\nTry running `dot track [filename]`\n.', fg='yellow', bold=True)
        ctx.abort()

    with open(trackfile_path(), 'r') as trf:
        for line in trf:
            line = line.rstrip()
            VerboseLog('Copying ' + dot_dir_path() + '/' + line + ' to ' + home() + '/' + line, ctx)
            if os.path.exists(dot_dir_path() + '/' + line):
                shutil.copyfile(dot_dir_path() + '/' + line, home() + '/' + line)

    VerboseLog('Creating Git class object, running git.pull()', ctx)
    git = Git(home(), Conf('options', 'gitname'), Conf('options', 'reponame'))
    return_code = git.pull()

    VerboseLog('git.pull() return code was ' + str(return_code), ctx)

    if return_code == 0:
        click.secho('\ndotfiles pulled.\n', fg='green')
    else:
        click.secho('\nThere was an error pulling the dotfiles.\n', fg='red')
Example #7
0
File: pool.py Project: qrntz/pool
    def register(self, stock):
        dir, branch = self._parse_stock(stock)
        if not isdir(dir):
            raise Error("not a directory `%s'" % dir)

        try:
            git = Git(dir)
        except Git.Error:
            git = None

        if (not git and branch) or (git and branch and not git.show_ref(branch)):
            raise Error("no such branch `%s' at `%s'" % (branch, dir))

        if git and not branch:
            branch = basename(git.symbolic_ref("HEAD"))

        stock_name = basename(abspath(dir))
        if branch:
            stock_name += "#" + branch

        if self.stocks.has_key(stock_name):
            raise Error("stock already registered under name `%s'" % stock_name)

        stock_path = join(self.path, stock_name)
        Stock.create(stock_path, dir)
        self._load_stock(stock_path)
Example #8
0
def push(ctx):
    """Push dotfile changes to GitHub"""

    VerboseLog('Running push()', ctx)
    check_init(ctx)

    if not os.path.exists(trackfile_path()):
        VerboseLog('.trackfile does not exist.', ctx)
        click.secho('Pssst...you aren\'t tracking any files.\nTry running `dot track [filename]`\n.', fg='yellow', bold=True)
        ctx.abort()

    with open(trackfile_path(), 'r') as trf:
        for line in trf:
            line = line.rstrip()
            VerboseLog('Copying ' + line + ' to ' + dot_dir_path() + '/' + line, ctx)
            if os.path.exists(home() + '/' + line):
                shutil.copyfile(home() + '/' + line, dot_dir_path() + '/' + line)

    VerboseLog('Creating Git class object, running git.push()', ctx)
    git = Git(home(), Conf('options', 'gitname'), Conf('options', 'reponame'))
    return_code = git.push(Conf('options', 'time'))

    VerboseLog('git.push() return codes were ' +
                str(return_code[0]) + ' ' + 
                str(return_code[1]) + ' ' + 
                str(return_code[2]), ctx)

    if return_code[1] != 0 and return_code[2] == 0:
        click.echo('No dotfile changes to push.')
    elif return_code[2] != 0:
        click.secho('\nThere was a problem pushing the changes.\n', fg='red')
    else:
        click.secho('\ndotfiles pushed.\n', fg='green')
def branches():
    g = Git(PROJECT_DIR)
    send('重新获取远程分支中.....')
    g.fetch(REMOTE_NAME)
    send('获取成功')
    branch_names = g.branch('-a').split('\n')
    return jsonify(branch_names)
def previous(rel_ver):
    """
    Given a release version, find the previous version based on the latest Git
    tag that is strictly a lower version than the given release version.
    """
    if DEBUG:
        print 'Calculating previous release version (option -p was specified).'
    version_loose = LooseVersion('0.0.0')
    rel_ver_loose = LooseVersion(rel_ver)
    gexc = Git('.')
    tags = gexc.execute(['git', 'tag',
                         '--list', '1.*',
                         '--sort', '-version:refname'])
    for tag in tags.splitlines():
        previous_tag_match = PREVIOUS_TAG_RE.match(tag)
        if previous_tag_match:
            version_new = {}
            version_new['major'] = int(previous_tag_match.group('vermaj'))
            version_new['minor'] = int(previous_tag_match.group('vermin'))
            version_new['patch'] = int(previous_tag_match.group('verpatch'))
            new_version_loose = LooseVersion(str(version_new['major']) + '.' +
                                             str(version_new['minor']) + '.' +
                                             str(version_new['patch']))
            if new_version_loose < rel_ver_loose:
                version_loose = new_version_loose
                if DEBUG:
                    print 'Found new best version "' + str(version_loose) \
                            + '" from tag "' + tag + '"'
                return str(version_loose)

    return str(version_loose)
Example #11
0
def watch(build_queue):
    while True:
        debug.set_prefix("repository_watcher")
        debug.message("Retrieving projects")
        try:
            projects = Api.get_projects()

            for project in projects:
                debug.message("Check repository status for project %s" % project["Name"])
                repository = Git(get_path(project), get_origin_url(project))
                commit_count = handle_incoming_commits(repository.check_for_new_commits_on_origin(), project,
                                                       repository, build_queue)
                if commit_count > 0:
                    repository.merge_origin()

                # Add <initial_nr_commits> commits if this is a new repository
                if project["Commits"] is None or len(project["Commits"]) == 0:
                    handle_incoming_commits(
                        repository.get_commits(registry.config["repositories"]["initial_nr_commits"]), project,
                        repository, build_queue)

        except ValueError, e:
            debug.exception("Error retrieving projects", e)
        except GitError, e:
            debug.exception("Error with Git repository", e)
Example #12
0
    def merged_refs(self, skip=[]):
        """
        Returns a list of remote refs that have been merged into the master
        branch.

        The "master" branch may have a different name than master. The value of
        ``self.master_name`` is used to determine what this name is.
        """
        origin = self._origin

        master = self._master_ref(origin)
        refs = self._filtered_remotes(
            origin, skip=['HEAD', self.master_branch] + skip)
        merged = []

        for ref in refs:
            upstream = '{origin}/{master}'.format(
                origin=origin.name, master=master.remote_head)
            head = '{origin}/{branch}'.format(
                origin=origin.name, branch=ref.remote_head)
            cmd = Git(self.repo.working_dir)
            # Drop to the git binary to do this, it's just easier to work with
            # at this level.
            (retcode, stdout, stderr) = cmd.execute(
                ['git', 'cherry', upstream, head],
                with_extended_output=True, with_exceptions=False)
            if retcode == 0 and not stdout:
                # This means there are no commits in the branch that are not
                # also in the master branch. This is ready to be deleted.
                merged.append(ref)

        return merged
Example #13
0
        def remote_repo_creator(self):
            rw_daemon_repo_dir = tempfile.mktemp(prefix="daemon_repo-%s-" % func.__name__)
            rw_repo_dir = tempfile.mktemp(prefix="daemon_cloned_repo-%s-" % func.__name__)

            rw_daemon_repo = self.rorepo.clone(rw_daemon_repo_dir, shared=True, bare=True)
            # recursive alternates info ?
            rw_repo = rw_daemon_repo.clone(rw_repo_dir, shared=True, bare=False, n=True)
            try:
                rw_repo.head.commit = working_tree_ref
                rw_repo.head.reference.checkout()

                # prepare for git-daemon
                rw_daemon_repo.daemon_export = True

                # this thing is just annoying !
                with rw_daemon_repo.config_writer() as crw:
                    section = "daemon"
                    try:
                        crw.add_section(section)
                    except Exception:
                        pass
                    crw.set(section, "receivepack", True)

                # Initialize the remote - first do it as local remote and pull, then
                # we change the url to point to the daemon.
                d_remote = Remote.create(rw_repo, "daemon_origin", rw_daemon_repo_dir)
                d_remote.fetch()

                base_daemon_path, rel_repo_dir = osp.split(rw_daemon_repo_dir)

                remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, rel_repo_dir))
                with d_remote.config_writer as cw:
                    cw.set('url', remote_repo_url)

                with git_daemon_launched(Git.polish_url(base_daemon_path, is_cygwin=False),  # No daemon in Cygwin.
                                         '127.0.0.1',
                                         GIT_DAEMON_PORT):
                    # Try listing remotes, to diagnose whether the daemon is up.
                    rw_repo.git.ls_remote(d_remote)

                    with cwd(rw_repo.working_dir):
                        try:
                            return func(self, rw_repo, rw_daemon_repo)
                        except:
                            log.info("Keeping repos after failure: \n  rw_repo_dir: %s \n  rw_daemon_repo_dir: %s",
                                     rw_repo_dir, rw_daemon_repo_dir)
                            rw_repo_dir = rw_daemon_repo_dir = None
                            raise

            finally:
                rw_repo.git.clear_cache()
                rw_daemon_repo.git.clear_cache()
                del rw_repo
                del rw_daemon_repo
                import gc
                gc.collect()
                if rw_repo_dir:
                    rmtree(rw_repo_dir)
                if rw_daemon_repo_dir:
                    rmtree(rw_daemon_repo_dir)
Example #14
0
    def processCommitHook(self, req):
        self.env.log.debug("processCommitHook")
        status = self.closestatus
        if not status:
            status = 'closed'
	
        data = req.args.get('payload')
	jsondata = simplejson.loads(data)
		
	repoName = jsondata['repository']['name']

        if self.autofetch:
	    if data:
	    	jsondata = simplejson.loads(data)
		self.env.log.debug(jsondata['repository']['name']);
	        repo = Git(self.gitreposdir+repoName+"/.git")

            try:
              self.env.log.debug("Fetching repo %s" % self.repo)
              repo.execute(['git', 'fetch'])
              try:
                self.env.log.debug("Resyncing local repo")
                self.env.get_repository(repoName).sync()
              except:
                self.env.log.error("git sync failed!")
            except:
              self.env.log.error("git fetch failed!")

        jsondata = simplejson.loads(data)

         
        if jsondata:
            if jsondata['ref'] == "refs/heads/master" or re.search('-stable$', jsondata['ref']):
                for i in jsondata['commits']:
                    self.hook.process(i, status)
Example #15
0
 def post(self, project_name):
     try:
         project = Projects(project_name)
         git = Git(project)
         git.clear_repo()
         self.write(cyclone.escape.json_encode({"status": "ok"}))
     except Exception, e:
         self.write(cyclone.escape.json_encode({"status": "fail", "error": str(e)}))
Example #16
0
def test_package():
    current_dir = os.path.dirname(__file__)
    deploy_dir = os.path.join(current_dir,
        'test_repository/deploy_git_project/export')
   
    scm = Git(repository_url='git://github.com/kevinanew/webdeploy.git',
        deploy_dir=deploy_dir)
    scm.package()
Example #17
0
 def post(self, project_name):
     try:
         project = Projects(project_name)
         git = Git(project)
         git.clear_repo()
         self.write(cyclone.escape.json_encode({'status': 'ok'}))
     except Exception, e:
         self.write(cyclone.escape.json_encode({'status': 'fail', 'error': str(e)}))
Example #18
0
 def create(cls, path=None, mk_dir=False):
     """
     Initialise the repository WITH a working copy
     """
     if not os.path.exists(path) and mk_dir:
         os.mkdir(path)
     _git = Git(path or os.curdir)
     _git.init()
     return cls(path=path)
Example #19
0
    def command(self, command):
        """
        Runs the Git command in self.repo
        """
        args = split(command)

        cmd = Git(self.repodir)

        cmd.execute(args)
Example #20
0
    def __init__(self):
        # Prevent working as root
        uname = puke.utils.env.get("PUKE_OS", puke.system.uname).lower()
        id = puke.utils.env.get("PUKE_LOGIN", puke.system.login)
        if id == "root":
            puke.display.error("Root detected! Panic!")
            puke.log.critical(
                "Running puke as root without a PUKE_LOGIN is frown upon")

        # Load chained config files
        r = Config(
            {}, "~/.pukerc", ["package.json", "package-%s-%s.json" % (id, uname)])

        self.man = r
        r = r.content
        # Map to older format for lazyness reasons :)
        clean = re.sub('[.]git$', '', r.repository["url"])

        r.package = {
            "name": r.name,
            "version": r.version
        }

        r.rights = {
            "license": '<a href="%s">%s</a>' % (r.licenses[0]["url"], r.licenses[0]["type"]),
            "copyright": 'All rights reserved <a href="http://www.webitup.fr">copyright %s</a>' % r.author,
            "author": r.author
        }

        r.git = {
            "root": '%s/blob/master/src' % clean
        }
        r.paths = r.directories
        r.config = r.config

        # Git in the yanks
        try:
            g = Git()
            r.git.root = r.git.root.replace(
                '/master/', '/%s/' % g.branch())
            r.git.revision = '#' + g.nb() + '-' + g.hash()
        except:
            r.git.revision = '#no-git-information'
            puke.display.warning(
                "FAILED fetching git information - locations won't be accurate")

        for (key, path) in r.paths.items():
            puke.fs.mkdir(path)

        self.config = r

        # Bower wrapping
        try:
            self.bower = Bower(self.config.bower)
        except Exception as e:
            puke.sh.npm.install()
            self.bower = Bower(self.config.bower)
Example #21
0
class Worker:
    """The worker Class."""

    def __init__(self, args):
        """Constructor."""
        self.args = args
        self.git = Git(args)

    def branches_to_be_deleted(self):
        """Return branches to be deleted."""
        branches = self.branches_to_be_deleted_excluding_skipped()
        if len(branches) > 0:
            Formatter.print_pretty_warn_message("Following local branches would \
                be deleted:")
            return branches
        else:
            Formatter.print_pretty_fail_message("No branches to be deleted")
            return []

    def branches_to_be_deleted_excluding_skipped(self):
        """Return branches to be deleted except pattern matching branches."""
        branches = list(
            set(
                self.git.merged_branches()
            ).difference(
                self.git.pattern_matching_branches())
            )
        self.exclude_current_branch(branches)
        return branches

    def delete_branches(self, remote=False):
        """Delete the branches."""
        """If Remote=True, deletes remote branches as well."""
        if len(self.branches_to_be_deleted()) > 0:
            self.delete_local_branches()
            if remote:
                self.delete_remote_branches()
            Formatter.print_pretty_ok_message("Cleaned Successfully!!")

    def delete_remote_branches(self):
        """Delete remote branches."""
        try:
            os.popen("git push origin --delete " + " ".join(
                self.branches_to_be_deleted())
            )
        except:
            print "There was an error deleting remote branches: ",
            sys.exc_info()[0]

    def delete_local_branches(self):
        """Delete local branches."""
        os.popen("git branch -D " + " ".join(self.branches_to_be_deleted()))

    def exclude_current_branch(self, branches):
        """Exclude current branch from list of branches to be deleted."""
        if(self.git.current_branch() in branches):
            branches.remove(self.git.current_branch())
class _GitWrapperCommon(object):
    '''
    Wrap git module to provide a more stable interface across versions
    '''
    def __init__(self, repo_path):
        self.git = Git()
        self.repo = Repo(os.path.abspath('.'))

    def is_file_managed_by_git(self, path):
        '''
        :param path: Path to check
        :returns: True if path is managed by git
        '''
        status, _stdout, _stderr = self.git.execute(
            ['git', 'ls-files', path, '--error-unmatch'],
            with_extended_output=True,
            with_exceptions=False)
        return status == 0

    def is_file_modified(self, path):
        '''
        Does a file have local changes not yet committed

        :returns: True if file has local changes
        '''
        status, _stdout, _stderr = self.git.execute(
            ['git', 'diff', '--quiet', 'HEAD', path],
            with_extended_output=True,
            with_exceptions=False)
        return status != 0

    def get_commits_following(self, path):
        '''
        Get all commits including path following the file through
        renames

        :param path: Path which we will find commits for
        :returns: Sequence of commit objects. Newest to oldest
        '''
        commit_shas = self.git.log(
            '--pretty=%H', '--follow', '--', path).splitlines()
        return map(self.repo.commit, commit_shas)

    def get_commits(self, path, follow=False):
        '''
        Get all commits including path

        :param path: Path which we will find commits for
        :param bool follow: If True we will follow path through renames

        :returns: Sequence of commit objects. Newest to oldest
        '''
        if follow:
            return self.get_commits_following(path)
        else:
            return self._get_commits(path)
Example #23
0
def sync(repos_gh, gitlab):
    git = Git()

    for repo_gh in repos_gh:
        name = repo_gh['name']
        repo_gl = gitlab.get_repo(name)
        print("Repository " + name + ":")
        if not git.repo_exists(name):
            print(git.repo_create(repo_gh, repo_gl))
        print(git.repo_sync(repo_gh))
 def checkout_source(self, git_path, checkout_options = None):
     '''
     Checkout source and switch to the right tag
     '''
     shutil.rmtree(self.workspace, ignore_errors=True)
     os.mkdir(self.workspace)
     os.chdir(self.workspace)
     Repo.clone_from(git_path, self.target_name)
     g = Git(self.source_path)
     g.checkout(checkout_options)    
Example #25
0
def get_changed_files(diff_spec):
    files = []
    git_ = Git(os.getcwd())
    changed_files = git_.diff(diff_spec, '--name-only').strip().split('\n')

    # Changed files will contain a single empty string if no files were changed
    if len(changed_files) == 1 and not changed_files[0]:
        return []
    files.extend(changed_files)
    return files
Example #26
0
    def plant_seed(self, seed_id=None, target_dir=None):
        if not seed_id or not target_dir:
            self.rage_quit("Missing arguments, seed plant failed")

        seeds = self.get_seeds()
        tagref = seeds.get(seed_id, None)
        if not tagref:
            self.rage_quit("Seed id {} not found".format(seed_id))

        git = Git(tagref.repo.working_dir)
        current_commit = str(Repo(tagref.repo.working_dir).commit())
        cprint("Current commit: {}".format(current_commit), Fore.GREEN)
        dirty = tagref.repo.is_dirty()

        cprint("Working directory {}".format('dirty' if dirty else 'clean'), Fore.GREEN)
        if dirty:
            cprint("--> git stash", Fore.YELLOW)
            git.stash()

        cprint("--> git checkout {}".format(seed_id), Fore.YELLOW)
        git.checkout(seed_id)

        try:
            cprint("Copying seed directory: {}".format(tagref.repo.working_dir), Fore.GREEN)
            call(["cp", "-r", tagref.repo.working_dir, target_dir])
        except OSError as error:
            cprint("Copying directory failed:\n{}".format(error), Fore.RED)
        finally:
            if dirty:
                cprint("--> git stash apply", Fore.YELLOW)
                git.stash('apply')

            cprint("--> git checkout {}".format(current_commit), Fore.YELLOW)
            git.checkout(current_commit)
Example #27
0
 def test_update(self):
     git = Git(root_dir_local, test_repo_dir)
     git.clone()
     self.assertTrue(os.path.exists(os.path.join(test_repo_dir, "ANOTHER_FILE.txt")) != 1)
     os.chdir(test_repo_dir)
     with open("ANOTHER_FILE.txt", "a") as the_file:
         the_file.write("Another file")
     lib.call_and_exit_if_failed("git add .")
     lib.call_and_exit_if_failed('git commit -m "another commit"')
     git.update()
     self.assertTrue(os.path.exists(os.path.join(test_repo_dir, "ANOTHER_FILE.txt")) == 1)
Example #28
0
 def delete(self, name):
     try:
         project = Projects(name)
         git = Git(project)
         git.clear_repo()
         project.clear_branches()
         project.delete()
         self.write(cyclone.escape.json_encode({'status': 'project deleted'}))
     except Exception, e:
         log.err(e)
         self.write(cyclone.escape.json_encode({'status': 'failed to delete %s' % str(e)}))
Example #29
0
class _GitWrapperCommon(object):
    """
    Wrap git module to provide a more stable interface across versions
    """

    def __init__(self, repo_path):
        self.git = Git()
        self.repo = Repo(os.path.abspath("."))

    def is_file_managed_by_git(self, path):
        """
        :param path: Path to check
        :returns: True if path is managed by git
        """
        status, _stdout, _stderr = self.git.execute(
            ["git", "ls-files", path, "--error-unmatch"], with_extended_output=True, with_exceptions=False
        )
        return status == 0

    def is_file_modified(self, path):
        """
        Does a file have local changes not yet committed

        :returns: True if file has local changes
        """
        status, _stdout, _stderr = self.git.execute(
            ["git", "diff", "--quiet", "HEAD", path], with_extended_output=True, with_exceptions=False
        )
        return status != 0

    def get_commits_following(self, path):
        """
        Get all commits including path following the file through
        renames

        :param path: Path which we will find commits for
        :returns: Sequence of commit objects. Newest to oldest
        """
        commit_shas = self.git.log("--pretty=%H", "--follow", "--", path).splitlines()
        return [self.repo.commit(shas) for shas in commit_shas]

    def get_commits(self, path, follow=False):
        """
        Get all commits including path

        :param path: Path which we will find commits for
        :param bool follow: If True we will follow path through renames

        :returns: Sequence of commit objects. Newest to oldest
        """
        if follow:
            return self.get_commits_following(path)
        else:
            return self._get_commits(path)
Example #30
0
    def describe(self):
        repo = Git(self.path)
        cmd = ['git', 'describe', '--tags']
        result = repo.execute(cmd).split('-')

        if (len(result) == 1):
            return '', 0, ''
        else:
            howmany, sha = result[-2:]
            branch = '-'.join(result[0:len(result) - 2])
            return branch, howmany, sha
Example #31
0
        def remote_repo_creator(self):
            remote_repo_dir = _mktemp("remote_repo_%s" % func.__name__)
            repo_dir = _mktemp("remote_clone_non_bare_repo")

            rw_remote_repo = self.rorepo.clone(remote_repo_dir,
                                               shared=True,
                                               bare=True)
            # recursive alternates info ?
            rw_repo = rw_remote_repo.clone(repo_dir,
                                           shared=True,
                                           bare=False,
                                           n=True)
            rw_repo.head.commit = working_tree_ref
            rw_repo.head.reference.checkout()

            # prepare for git-daemon
            rw_remote_repo.daemon_export = True

            # this thing is just annoying !
            crw = rw_remote_repo.config_writer()
            section = "daemon"
            try:
                crw.add_section(section)
            except Exception:
                pass
            crw.set(section, "receivepack", True)
            # release lock
            crw.release()
            del (crw)

            # initialize the remote - first do it as local remote and pull, then
            # we change the url to point to the daemon. The daemon should be started
            # by the user, not by us
            d_remote = Remote.create(rw_repo, "daemon_origin", remote_repo_dir)
            d_remote.fetch()
            remote_repo_url = "git://localhost:%s%s" % (GIT_DAEMON_PORT,
                                                        remote_repo_dir)

            d_remote.config_writer.set('url', remote_repo_url)

            temp_dir = osp(_mktemp())
            # On windows, this will fail ... we deal with failures anyway and default to telling the user to do it
            try:
                gd = Git().daemon(temp_dir,
                                  enable='receive-pack',
                                  listen='127.0.0.1',
                                  port=GIT_DAEMON_PORT,
                                  as_process=True)
                # yes, I know ... fortunately, this is always going to work if sleep time is just large enough
                time.sleep(0.5)
            except Exception:
                gd = None
            # end

            # try to list remotes to diagnoes whether the server is up
            try:
                rw_repo.git.ls_remote(d_remote)
            except GitCommandError as e:
                # We assume in good faith that we didn't start the daemon - but make sure we kill it anyway
                # Of course we expect it to work here already, but maybe there are timing constraints
                # on some platforms ?
                if gd is not None:
                    os.kill(gd.proc.pid, 15)
                print(str(e))
                if os.name == 'nt':
                    msg = "git-daemon needs to run this test, but windows does not have one. "
                    msg += 'Otherwise, run: git-daemon "%s"' % temp_dir
                    raise AssertionError(msg)
                else:
                    msg = 'Please start a git-daemon to run this test, execute: git daemon --enable=receive-pack "%s"'
                    msg += 'You can also run the daemon on a different port by passing --port=<port>'
                    msg += 'and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to <port>'
                    msg %= temp_dir
                    raise AssertionError(msg)
                # END make assertion
            # END catch ls remote error

            # adjust working dir
            prev_cwd = os.getcwd()
            os.chdir(rw_repo.working_dir)
            try:
                try:
                    return func(self, rw_repo, rw_remote_repo)
                except:
                    print(
                        "Keeping repos after failure: repo_dir = %s, remote_repo_dir = %s"
                        % (repo_dir, remote_repo_dir),
                        file=sys.stderr)
                    repo_dir = remote_repo_dir = None
                    raise
            finally:
                # gd.proc.kill() ... no idea why that doesn't work
                if gd is not None:
                    os.kill(gd.proc.pid, 15)

                os.chdir(prev_cwd)
                rw_repo.git.clear_cache()
                rw_remote_repo.git.clear_cache()
                if repo_dir:
                    shutil.rmtree(repo_dir, onerror=_rmtree_onerror)
                if remote_repo_dir:
                    shutil.rmtree(remote_repo_dir, onerror=_rmtree_onerror)

                if gd is not None:
                    gd.proc.wait()
Example #32
0
 def _small_repo_url(self):
     """:return" a path to a small, clonable repository"""
     from git.cmd import Git
     return Git.polish_url(
         osp.join(self.rorepo.working_tree_dir,
                  'git/ext/gitdb/gitdb/ext/smmap'))
Example #33
0
def git_checkout(path, branch):
    Git(path).checkout(branch)
Example #34
0
class CreateAction(ConsoleAction):
    def __init__(self, args, name: str = None):
        if name:
            self.name = name

    @staticmethod
    def register(parser: ArgumentParser):
        pass

    @Lazy
    def name(self) -> str:
        name_to_skill = {skill.name: skill for skill in self.msm.list()}
        while True:
            name = ask_input(
                'Enter a short unique skill name (ie. "siren alarm" or "pizza orderer"):',
                lambda x: re.match(r'^[a-zA-Z \-]+$', x),
                'Please use only letter and spaces.').strip(
                    ' -').lower().replace(' ', '-')
            skill = name_to_skill.get(
                name, name_to_skill.get('{}-skill'.format(name)))
            if skill:
                print('The skill {} {}already exists'.format(
                    skill.name,
                    'by {} '.format(skill.author) * bool(skill.author)))
                if ask_yes_no('Remove it? (y/N)', False):
                    rmtree(skill.path)
                else:
                    continue
            class_name = '{}Skill'.format(to_camel(name.replace('-', '_')))
            repo_name = '{}-skill'.format(name)
            print()
            print('Class name:', class_name)
            print('Repo name:', repo_name)
            print()
            alright = ask_yes_no('Looks good? (Y/n)', True)
            if alright:
                return name

    path = Lazy(lambda s: join(s.msm.skills_dir, s.name + '-skill'))
    git = Lazy(lambda s: Git(s.path))
    short_description = Lazy(lambda s: ask_input(
        'Enter a one line description for your skill (ie. Orders fresh pizzas from the store):',
    ).capitalize())
    author = Lazy(lambda s: ask_input('Enter author:'))
    intent_lines = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter some example phrases to trigger your skill:', '-')
    ])
    dialog_lines = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter what your skill should say to respond:', '-')
    ])
    intent_entities = Lazy(lambda s: set(
        re.findall(r'(?<={)[a-z_A-Z]*(?=})', '\n'.join(
            i for i in s.intent_lines))))
    dialog_entities = Lazy(lambda s: set(
        re.findall(r'(?<={)[a-z_A-Z]*(?=})', '\n'.join(s.dialog_lines))))
    long_description = Lazy(
        lambda s: '\n\n'.join(ask_input_lines('Enter a long description:', '>')
                              ).strip().capitalize())
    readme = Lazy(lambda s: readme_template.format(
        title_name=s.name.replace('-', ' ').title(),
        short_description=s.short_description,
        long_description=s.long_description,
        examples=''.join(' - "{}"\n'.format(i) for i in s.intent_lines),
        credits=credits_template.format(author=s.author)))
    init_file = Lazy(lambda s: init_template.format(
        class_name=to_camel(s.name.replace('-', '_')),
        handler_name=s.intent_name.replace('.', '_'),
        handler_code='\n'.join(' ' * 8 * bool(i) + i for i in [
            "{ent} = message.data['{ent}']".format(ent=entity)
            for entity in sorted(s.intent_entities)
        ] + [
            "{ent} = ''".format(ent=entity)
            for entity in sorted(s.dialog_entities - s.intent_entities)
        ] + [''] * bool(
            s.dialog_entities | s.intent_entities
        ) + "self.speak_dialog('{intent}'{args})".format(
            intent=s.intent_name,
            args=", data={{\n{}\n}}".format(
                ',\n'.join("    '{ent}': {ent}".format(ent=entity)
                           for entity in s.dialog_entities | s.intent_entities)
            ) * bool(s.dialog_entities | s.intent_entities)).split('\n')),
        intent_name=s.intent_name))
    intent_name = Lazy(lambda s: '.'.join(reversed(s.name.split('-'))))

    def add_vocab(self):
        makedirs(join(self.path, 'vocab', self.lang))
        with open(
                join(self.path, 'vocab', self.lang,
                     self.intent_name + '.intent'), 'w') as f:
            f.write('\n'.join(self.intent_lines + ['']))

    def add_dialog(self):
        makedirs(join(self.path, 'dialog', self.lang))
        with open(
                join(self.path, 'dialog', self.lang,
                     self.intent_name + '.dialog'), 'w') as f:
            f.write('\n'.join(self.dialog_lines + ['']))

    def initialize_template(self, files: set = None):
        git = Git(self.path)

        skill_template = [
            ('', lambda: makedirs(self.path)), ('vocab', self.add_vocab),
            ('dialog', self.add_dialog),
            ('__init__.py', lambda: self.init_file),
            ('README.md', lambda: self.readme),
            ('.gitignore', lambda: gitignore_template),
            ('settingsmeta.json', lambda: settingsmeta_template.format(
                capital_desc=self.name.replace('-', ' ').capitalize())),
            ('.git', lambda: git.init())
        ]

        def cleanup():
            rmtree(self.path)

        if not isdir(self.path):
            atexit.register(cleanup)
        for file, handler in skill_template:
            if files and file not in files:
                continue
            if not exists(join(self.path, file)):
                result = handler()
                if isinstance(result,
                              str) and not exists(join(self.path, file)):
                    with open(join(self.path, file), 'w') as f:
                        f.write(result)
        atexit.unregister(cleanup)

    def commit_changes(self):
        if self.git.rev_parse('HEAD', with_exceptions=False) == 'HEAD':
            self.git.add('.')
            self.git.commit(message='Initial commit')

    def create_github_repo(self,
                           get_repo_name: Callable = None
                           ) -> Optional[Repository]:
        if 'origin' not in Git(self.path).remote().split('\n'):
            if ask_yes_no(
                    'Would you like to create a GitHub repo for it? (Y/n)',
                    True):
                repo_name = (get_repo_name
                             and get_repo_name()) or (self.name + '-skill')
                try:
                    repo = self.user.create_repo(repo_name,
                                                 self.short_description)
                except GithubException as e:
                    if e.status == 422:
                        raise GithubRepoExists(repo_name) from e
                    raise
                self.git.remote('add', 'origin', repo.html_url)
                call(['git', 'push', '-u', 'origin', 'master'],
                     cwd=self.git.working_dir)
                print('Created GitHub repo:', repo.html_url)
                return repo
        return None

    def perform(self):
        self.initialize_template()
        self.commit_changes()
        with print_error(GithubRepoExists):
            self.create_github_repo()
        print('Created skill at:', self.path)
Example #35
0
  def __init__(self, admin_repository):
    self.path = Path(admin_repository)
    self.git = Git(admin_repository)

    if not self.path.isdir():
      raise ValueError('Admin repository path should point to directory')
Example #36
0
 def test_call_unpack_args(self):
     args = Git._Git__unpack_args(['git', 'log', '--', 'Unicode€™'])
     mangled_value = 'Unicode\u20ac\u2122'
     self.assertEqual(args, ['git', 'log', '--', mangled_value])
Example #37
0
 def setUpClass(cls):
     super(TestGit, cls).setUpClass()
     cls.git = Git(cls.rorepo.working_dir)
Example #38
0
 def __init__(self, repo_path):
     self.git = Git()
     self.git.update_environment(GIT_CONFIG_NOSYSTEM='true',
                                 HOME=os.getcwd(),
                                 XDG_CONFIG_HOME=os.getcwd())
     self.repo = Repo(os.path.abspath('.'))
Example #39
0
class _GitWrapperCommon(object):
    '''
    Wrap git module to provide a more stable interface across versions
    '''
    def __init__(self, repo_path):
        self.git = Git()
        self.git.update_environment(GIT_CONFIG_NOSYSTEM='true',
                                    HOME=os.getcwd(),
                                    XDG_CONFIG_HOME=os.getcwd())
        self.repo = Repo(os.path.abspath('.'))

    def is_file_managed_by_git(self, path):
        '''
        :param path: Path to check
        :returns: True if path is managed by git
        '''
        status, _stdout, _stderr = self.git.execute(
            ['git', 'ls-files', path, '--error-unmatch'],
            with_extended_output=True,
            with_exceptions=False)
        return status == 0

    def is_file_modified(self, path):
        '''
        Does a file have local changes not yet committed

        :returns: True if file has local changes
        '''
        status, _stdout, _stderr = self.git.execute(
            ['git', 'diff', '--quiet', 'HEAD', path],
            with_extended_output=True,
            with_exceptions=False)
        return status != 0

    def get_commits_following(self, path):
        '''
        Get all commits including path following the file through
        renames

        :param path: Path which we will find commits for
        :returns: Sequence of commit objects. Newest to oldest
        '''
        return [commit for commit, _ in self.get_commits_and_names_iter(path)]

    def get_commits_and_names_iter(self, path):
        '''
        Get all commits including a given path following renames
        '''
        log_result = self.git.log('--pretty=%H', '--follow', '--name-only',
                                  '--', path).splitlines()

        for commit_sha, _, filename in grouper(log_result, 3):
            yield self.repo.commit(commit_sha), filename

    def get_commits(self, path, follow=False):
        '''
        Get all commits including path

        :param path: Path which we will find commits for
        :param bool follow: If True we will follow path through renames

        :returns: Sequence of commit objects. Newest to oldest
        '''
        if follow:
            return self.get_commits_following(path)
        else:
            return self._get_commits(path)
Example #40
0
 def __git_init(self):
     """ Initialize git repository in the project infrastructure path """
     if self._git_repo:
         return Git().clone(self._git_repo, self._repository_directory)
     else:
         return Repo.init(self._repository_directory)
Example #41
0
 def test_call_unpack_args(self):
     args = Git._Git__unpack_args(['git', 'log', '--', u'Unicode' + unichr(40960)])
     assert_equal(args, ['git', 'log', '--', 'Unicode\xea\x80\x80'])
Example #42
0
 def test_call_unpack_args_unicode(self):
     args = Git._Git__unpack_args(u'Unicode' + unichr(40960))
     assert_equal(args, ['Unicode\xea\x80\x80'])
Example #43
0
def build_command(config_file, strict, site_dir, tags, default, latest):
    """Build the MkDocs documentation"""

    cli.configure_logging(level=logging.INFO)

    g = Git()
    tags = tags or g.tag().splitlines()

    log.info("Building %s to /", default)
    g.checkout(default)
    _build(_load_config(config_file, strict, site_dir), default, tags)

    log.info("Building %s to /latest", latest)
    g.checkout(default)
    _build(_load_config(config_file, strict, site_dir), latest, tags, 'latest')

    for tag in sorted(tags):

        g.checkout(tag)

        if not os.path.exists("mkdocs.yml"):
            log.warning("Unable to build %s, as no mkdocs.yml was found", tag)
            continue

        site_dir = "v{0}".format(tag)
        log.info("Building %s to /%s", tag, site_dir)
        _build(_load_config(config_file, strict, site_dir), tag, tags,
               site_dir)

    g.checkout('master')
Example #44
0
def build_command(config_file, strict, site_dir, branches, default_branch,
                  latest, logging_level):
    """Build the MkDocs documentation"""

    #    cli.configure_logging(level=logging.INFO)
    global release_branches

    logging.basicConfig(
        stream=sys.stdout,
        level=get_logging_level(logging_level),
        format=
        '%(asctime)s %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] %(message)s'
    )

    g = Git()
    repo = Repo()

    branches = branches or g.branch('-r').splitlines()
    all_branch_names = list(
        map(lambda branch: branch.split("origin/")[1], branches))

    active_branch = repo.active_branch.name
    print("Active branch %s", active_branch)
    print("Default branch %s", default_branch)
    print("Latest branch %s", latest)

    start_stashes_count = len(
        re.findall("stash@{[0-9]{1,3}}:", repo.git.stash("list")))
    repo.git.stash("save")

    if active_branch != latest:
        print("Checkout Default %s", active_branch)
        g.checkout(default_branch)

    default_config = _load_config(config_file, strict, site_dir)

    versions = default_config.get("extra").get("versions")

    formatedCSVersions = {}
    jelasticVersions = []

    for version in versions:
        formatedCSVersions[unicode(version)] = versions[version]

    if formatedCSVersions is not None:
        release_branches = formatedCSVersions.keys()
        jelasticVersions = formatedCSVersions.values()

    if release_branches is not None:
        release_branches = sorted(release_branches,
                                  key=functools.cmp_to_key(version_compare))
        jelasticVersions = sorted(jelasticVersions,
                                  key=functools.cmp_to_key(version_compare))

        default_version = next(iter(release_branches),
                               None)  # release_branches[-1]

        print("Default version %s", default_version)
        print("Building %s to /", default_version)

        _build(default_config, default_version, release_branches,
               jelasticVersions)

        for branch in release_branches:
            if branch in all_branch_names:  #branch != default_version and
                g.checkout(branch)
                g.pull()

                if not os.path.exists("mkdocs.yml"):
                    log.warning(
                        "Unable to build %s, as no mkdocs.yml was found",
                        branch)
                    print("Unable to build %s, as no mkdocs.yml was found",
                          branch)
                    continue

                site_dir = "{0}".format(branch)
                log.info("Building %s to /%s", branch, "site/" + site_dir)
                print("Building %s to /%s", branch, "site/" + site_dir)
                _build(_load_config(config_file, strict, site_dir), branch,
                       release_branches, jelasticVersions, "site/" + site_dir)

        # print("Selected Branches %s", default_config.get("versions").get("releases"))

    print("Checkout branch %s", active_branch)
    g.checkout("master")

    end_stashes_count = len(
        re.findall("stash@{[0-9]{1,3}}:", repo.git.stash("list")))

    if end_stashes_count > start_stashes_count:
        repo.git.stash("pop")
        print("pop latest stash")
Example #45
0
def main(debug=False):
  lines = []
  print()
  lines.append(u' *******************************************')
  lines.append(u' **     Taichi Programming Language       **')
  lines.append(u' *******************************************')
  if debug:
    lines.append(u' *****************Debug Mode****************')
    os.environ['TI_DEBUG'] = '1'
  print(u'\n'.join(lines))
  print()
  import taichi as ti

  ti.tc_core.set_core_debug(debug)

  argc = len(sys.argv)
  if argc == 1 or sys.argv[1] == 'help':
    print(
        "    Usage: ti run [task name]        |-> Run a specific task\n"
        "           ti benchmark              |-> Run performance benchmark\n"
        "           ti test                   |-> Run all tests\n"
        "           ti test_verbose           |-> Run all tests with verbose outputs\n"
        "           ti test_python            |-> Run python tests\n"
        "           ti test_cpp               |-> Run cpp tests\n"
        "           ti format                 |-> Reformat modified source files\n"
        "           ti format_all             |-> Reformat all source files\n"
        "           ti build                  |-> Build C++ files\n"
        "           ti video                  |-> Make a video using *.png files in the current folder\n"
        "           ti video_scale            |-> Scale video resolution \n"
        "           ti video_crop             |-> Crop video\n"
        "           ti video_speed            |-> Speed up video\n"
        "           ti gif                    |-> Convert mp4 file to gif\n"
        "           ti doc                    |-> Build documentation\n"
        "           ti release                |-> Make source code release\n"
        "           ti debug [script.py]      |-> Debug script\n")
    exit(0)
  mode = sys.argv[1]

  t = time.time()
  if mode.endswith('.py'):
    import subprocess
    subprocess.call([sys.executable] + sys.argv[1:])
  elif mode == "run":
    if argc <= 2:
      print("Please specify [task name], e.g. test_math")
      exit(-1)
    name = sys.argv[2]
    task = ti.Task(name)
    task.run(*sys.argv[3:])
  elif mode == "debug":
    ti.core.set_core_trigger_gdb_when_crash(True)
    if argc <= 2:
      print("Please specify [file name], e.g. render.py")
      exit(-1)
    name = sys.argv[2]
    with open(name) as script:
      script = script.read()
    exec(script, {'__name__': '__main__'})
  elif mode == "test_python":
    return test_python()
  elif mode == "test_cpp":
    return test_cpp()
  elif mode == "test":
    if test_python() != 0:
      return -1
    return test_cpp()
  elif mode == "test_verbose":
    if test_python(True) != 0:
      return -1
    return test_cpp()
  elif mode == "build":
    ti.core.build()
  elif mode == "format":
    ti.core.format()
  elif mode == "format_all":
    ti.core.format(all=True)
  elif mode == "statement":
    exec(sys.argv[2])
  elif mode == "update":
    ti.core.update(True)
    ti.core.build()
  elif mode == "asm":
    fn = sys.argv[2]
    os.system(r"sed '/^\s*\.\(L[A-Z]\|[a-z]\)/ d' {0} > clean_{0}".format(fn))
  elif mode == "interpolate":
    interpolate_frames('.')
  elif mode == "amal":
    cwd = os.getcwd()
    os.chdir(ti.get_repo_directory())
    with open('misc/amalgamate.py') as script:
      script = script.read()
    exec(script, {'__name__': '__main__'})
    os.chdir(cwd)
    shutil.copy(
        os.path.join(ti.get_repo_directory(), 'build/taichi.h'), './taichi.h')
  elif mode == "doc":
    os.system('cd {}/docs && sphinx-build -b html . build'.format(ti.get_repo_directory()))
  elif mode == "video":
    files = sorted(os.listdir('.'))
    files = list(filter(lambda x: x.endswith('.png'), files))
    if len(sys.argv) >= 3:
      frame_rate = int(sys.argv[2])
    else:
      frame_rate = 24
    if len(sys.argv) >= 4:
      trunc = int(sys.argv[3])
      files = files[:trunc]
    ti.info('Making video using {} png files...', len(files))
    ti.info("frame_rate={}", frame_rate)
    output_fn = 'video.mp4'
    make_video(files, output_path=output_fn, frame_rate=frame_rate)
    ti.info('Done! Output video file = {}', output_fn)
  elif mode == "video_scale":
    input_fn = sys.argv[2]
    assert input_fn[-4:] == '.mp4'
    output_fn = input_fn[:-4] + '-scaled.mp4'
    ratiow = float(sys.argv[3])
    if len(sys.argv) >= 5:
      ratioh = float(sys.argv[4])
    else:
      ratioh = ratiow
    scale_video(input_fn, output_fn, ratiow, ratioh)
  elif mode == "video_crop":
    if len(sys.argv) != 7:
      print('Usage: ti video_crop fn x_begin x_end y_begin y_end')
      exit(-1)
    input_fn = sys.argv[2]
    assert input_fn[-4:] == '.mp4'
    output_fn = input_fn[:-4] + '-cropped.mp4'
    x_begin = float(sys.argv[3])
    x_end = float(sys.argv[4])
    y_begin = float(sys.argv[5])
    y_end = float(sys.argv[6])
    crop_video(input_fn, output_fn, x_begin, x_end, y_begin, y_end)
  elif mode == "video_speed":
    if len(sys.argv) != 4:
      print('Usage: ti video_speed fn speed_up_factor')
      exit(-1)
    input_fn = sys.argv[2]
    assert input_fn[-4:] == '.mp4'
    output_fn = input_fn[:-4] + '-sped.mp4'
    speed = float(sys.argv[3])
    accelerate_video(input_fn, output_fn, speed)
  elif mode == "gif":
    input_fn = sys.argv[2]
    assert input_fn[-4:] == '.mp4'
    output_fn = input_fn[:-4] + '.gif'
    ti.info('Converting {} to {}'.format(input_fn, output_fn))
    framerate = 24
    mp4_to_gif(input_fn, output_fn, framerate)
  elif mode == "convert":
    # http://www.commandlinefu.com/commands/view/3584/remove-color-codes-special-characters-with-sed
    # TODO: Windows support
    for fn in sys.argv[2:]:
      print("Converting logging file: {}".format(fn))
      tmp_fn = '/tmp/{}.{:05d}.backup'.format(fn, random.randint(0, 10000))
      shutil.move(fn, tmp_fn)
      command = r'sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"'
      os.system('{} {} > {}'.format(command, tmp_fn, fn))
  elif mode == "release":
    from git import Git
    import zipfile
    import hashlib
    g = Git(ti.get_repo_directory())
    g.init()
    with zipfile.ZipFile('release.zip', 'w') as zip:
      files = g.ls_files().split('\n')
      os.chdir(ti.get_repo_directory())
      for f in files:
        if not os.path.isdir(f):
          zip.write(f)
    ver = ti.__version__
    md5 = hashlib.md5()
    with open('release.zip', "rb") as f:
      for chunk in iter(lambda: f.read(4096), b""):
        md5.update(chunk)
    md5 = md5.hexdigest()
    commit = ti.core.get_commit_hash()[:8]
    fn = f'taichi-src-v{ver[0]}-{ver[1]}-{ver[2]}-{commit}-{md5}.zip'
    import shutil
    shutil.move('release.zip', fn)
  else:
    name = sys.argv[1]
    print('Running task [{}]...'.format(name))
    task = ti.Task(name)
    task.run(*sys.argv[2:])
  print()
  print(">>> Running time: {:.2f}s".format(time.time() - t))
def main():
    #make sure the directory is empty
    if not os.listdir(gitdirectory):
        repo = git.Repo.clone_from(giturl, gitdirectory, branch='master')

    else:
        repo = git.Repo(gitdirectory)
        repo2 = git.Repo(localdirectory)
    originalrepo = Git(gitdirectory)
    commits = list(repo.iter_commits("master", max_count=1000000))
    tree = repo.head.commit.tree

    print(originalrepo.branch())

    stay = True
    current_original_commit = 0

    print(
        "quit to quit, current to check current commit sha, < for past one, > for future one, number for type commit number you want, \"complete\" to go back to a commit and update commits until up to newest and newest feature \"100commit\""
    )
    iterate = 0
    iterate100 = 0
    iteratevalue = 0
    while stay:
        if iterate == 0 and iterate100 == 0:
            userinput = input("input: ")
        if userinput == "quit":
            stay = False
        elif userinput == "current":
            print(repo.head.commit)
            print(originalrepo.committed_date)
            print("commits in the past = " + str(current_original_commit))
            continue
        elif userinput == ">":
            if current_original_commit > 0:
                current_original_commit -= 1
            else:
                print(
                    "you tried to go out of range, this is the newest commit")
                continue
        elif userinput == "<":
            if current_original_commit < len(commits) - 1:
                current_original_commit += 1
            else:
                print(
                    "you tried to go out of range, this is the oldest commit")
                continue
        elif userinput.isdigit():
            if int(userinput) < len(commits) and int(userinput) >= 0:
                current_original_commit = int(userinput)
            else:
                print("you tried to go out of range, max range is: " +
                      str(len(commits)))
                continue
        elif userinput == "complete" or iterate == 1:
            if iterate == 0:
                firstcommitnumber = int(
                    input(
                        "How far back would you like to go in commits? Input: "
                    ))
                if firstcommitnumber > len(commits):
                    print(
                        "sorry, you have gone out of the scope of the project. There are "
                        + str(len(commits)) + " total commits")
                else:
                    start_time = time.time()
                    current_original_commit = firstcommitnumber - 1
                    iterate = 1
            if iterate == 1:
                if current_original_commit > 0:
                    current_original_commit -= 1
                else:
                    time_elapsed = time.time() - start_time
                    print(
                        "You have reached the final newest commit (shown below) in "
                        + str(time_elapsed))
                    iterate = 0
        elif userinput == "100commit" or iterate100 == 1:
            if iterate100 == 0:
                firstcommitnumber = int(
                    input(
                        "How far back would you like to go in commits? Input: "
                    ))
                if firstcommitnumber > len(commits):
                    print(
                        "sorry, you have gone out of the scope of the project. There are "
                        + str(len(commits)) + " total commits")
                else:
                    start_time = time.time()
                    current_original_commit = firstcommitnumber - 1
                    iterate100 = 1
            if iterate100 == 1:
                if current_original_commit > 0:
                    if iteratevalue < 100:
                        current_original_commit -= 1
                        iteratevalue += 1
                    else:
                        confirmation = input(
                            "type \"confirm\" to run the next 50 commits: ")
                        if confirmation == "confirm":
                            iteratevalue = 0
                            current_original_commit -= 1
                else:
                    time_elapsed = time.time() - start_time
                    print(
                        "You have reached the final newest commit (shown below) in "
                        + str(time_elapsed))
                    iterate100 = 0

        else:
            print("sorry, not recognised try again")
            continue
        originalrepo.checkout(commits[current_original_commit])

        dcmp = dircmp(localdirectory, gitdirectory)
        #print("files and folders added:")
        add_diff_files(dcmp)
        #print("files and folders removed:")
        delete_diff_files(dcmp)
        #print("files and folders replaced:")
        merge_diff_files(dcmp)
        #print("DIFFERENCES" + str(dcmp.left_only) + str(dcmp.right_only) +str(dcmp.diff_files))
        print("changes complete, starting commit number: " +
              str(current_original_commit) +
              " commit(s) from the newest commit. hash: " +
              str(commits[current_original_commit]))
        #try:
        repo2 = Repo(localdirectory)
        #repo2.git.push(force=True)
        #repo2.index.add('.')
        #repo2.git.add(update=True)
        repo2.git.add("-A")
        repo2.index.commit(str(current_original_commit))
        #repo2.git.commit('-m', 'test commit', author='*****@*****.**')
        origin = repo2.remote(name='origin')
        origin.push()
        print("commit successful, pushing")
Example #47
0
 def test_call_unpack_args_unicode(self):
     args = Git._Git__unpack_args('Unicode€™')
     mangled_value = 'Unicode\u20ac\u2122'
     self.assertEqual(args, [mangled_value])
Example #48
0
class TestRepo(TestBase):

    def setUp(self):
        _rm_lock_files()

    def tearDown(self):
        for lfp in glob.glob(_tc_lock_fpaths):
            if osp.isfile(lfp):
                raise AssertionError('Previous TC left hanging git-lock file: {}'.format(lfp))
        import gc
        gc.collect()

    @raises(InvalidGitRepositoryError)
    def test_new_should_raise_on_invalid_repo_location(self):
        Repo(tempfile.gettempdir())

    @raises(NoSuchPathError)
    def test_new_should_raise_on_non_existent_path(self):
        Repo("repos/foobar")

    @with_rw_repo('0.3.2.1')
    def test_repo_creation_from_different_paths(self, rw_repo):
        r_from_gitdir = Repo(rw_repo.git_dir)
        self.assertEqual(r_from_gitdir.git_dir, rw_repo.git_dir)
        assert r_from_gitdir.git_dir.endswith('.git')
        assert not rw_repo.git.working_dir.endswith('.git')
        self.assertEqual(r_from_gitdir.git.working_dir, rw_repo.git.working_dir)

    @with_rw_repo('0.3.2.1')
    def test_repo_creation_pathlib(self, rw_repo):
        if pathlib is None:  # pythons bellow 3.4 don't have pathlib
            raise SkipTest("pathlib was introduced in 3.4")

        r_from_gitdir = Repo(pathlib.Path(rw_repo.git_dir))
        self.assertEqual(r_from_gitdir.git_dir, rw_repo.git_dir)

    def test_description(self):
        txt = "Test repository"
        self.rorepo.description = txt
        assert_equal(self.rorepo.description, txt)

    def test_heads_should_return_array_of_head_objects(self):
        for head in self.rorepo.heads:
            assert_equal(Head, head.__class__)

    def test_heads_should_populate_head_data(self):
        for head in self.rorepo.heads:
            assert head.name
            self.assertIsInstance(head.commit, Commit)
        # END for each head

        self.assertIsInstance(self.rorepo.heads.master, Head)
        self.assertIsInstance(self.rorepo.heads['master'], Head)

    def test_tree_from_revision(self):
        tree = self.rorepo.tree('0.1.6')
        self.assertEqual(len(tree.hexsha), 40)
        self.assertEqual(tree.type, "tree")
        self.assertEqual(self.rorepo.tree(tree), tree)

        # try from invalid revision that does not exist
        self.failUnlessRaises(BadName, self.rorepo.tree, 'hello world')

    def test_pickleable(self):
        pickle.loads(pickle.dumps(self.rorepo))

    def test_commit_from_revision(self):
        commit = self.rorepo.commit('0.1.4')
        self.assertEqual(commit.type, 'commit')
        self.assertEqual(self.rorepo.commit(commit), commit)

    def test_commits(self):
        mc = 10
        commits = list(self.rorepo.iter_commits('0.1.6', max_count=mc))
        self.assertEqual(len(commits), mc)

        c = commits[0]
        assert_equal('9a4b1d4d11eee3c5362a4152216376e634bd14cf', c.hexsha)
        assert_equal(["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents])
        assert_equal("ce41fc29549042f1aa09cc03174896cf23f112e3", c.tree.hexsha)
        assert_equal("Michael Trier", c.author.name)
        assert_equal("*****@*****.**", c.author.email)
        assert_equal(1232829715, c.authored_date)
        assert_equal(5 * 3600, c.author_tz_offset)
        assert_equal("Michael Trier", c.committer.name)
        assert_equal("*****@*****.**", c.committer.email)
        assert_equal(1232829715, c.committed_date)
        assert_equal(5 * 3600, c.committer_tz_offset)
        assert_equal("Bumped version 0.1.6\n", c.message)

        c = commits[1]
        self.assertIsInstance(c.parents, tuple)

    def test_trees(self):
        mc = 30
        num_trees = 0
        for tree in self.rorepo.iter_trees('0.1.5', max_count=mc):
            num_trees += 1
            self.assertIsInstance(tree, Tree)
        # END for each tree
        self.assertEqual(num_trees, mc)

    def _assert_empty_repo(self, repo):
        # test all kinds of things with an empty, freshly initialized repo.
        # It should throw good errors

        # entries should be empty
        self.assertEqual(len(repo.index.entries), 0)

        # head is accessible
        assert repo.head
        assert repo.head.ref
        assert not repo.head.is_valid()

        # we can change the head to some other ref
        head_ref = Head.from_path(repo, Head.to_full_path('some_head'))
        assert not head_ref.is_valid()
        repo.head.ref = head_ref

        # is_dirty can handle all kwargs
        for args in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
            assert not repo.is_dirty(*args)
        # END for each arg

        # we can add a file to the index ( if we are not bare )
        if not repo.bare:
            pass
        # END test repos with working tree

    @with_rw_directory
    def test_clone_from_keeps_env(self, rw_dir):
        original_repo = Repo.init(osp.join(rw_dir, "repo"))
        environment = {"entry1": "value", "another_entry": "10"}

        cloned = Repo.clone_from(original_repo.git_dir, osp.join(rw_dir, "clone"), env=environment)

        assert_equal(environment, cloned.git.environment())

    @with_rw_directory
    def test_date_format(self, rw_dir):
        repo = Repo.init(osp.join(rw_dir, "repo"))
        # @-timestamp is the format used by git commit hooks
        repo.index.commit("Commit messages", commit_date="@1400000000 +0000")

    @with_rw_directory
    def test_clone_from_pathlib(self, rw_dir):
        if pathlib is None:  # pythons bellow 3.4 don't have pathlib
            raise SkipTest("pathlib was introduced in 3.4")

        original_repo = Repo.init(osp.join(rw_dir, "repo"))

        Repo.clone_from(original_repo.git_dir, pathlib.Path(rw_dir) / "clone_pathlib")

    @with_rw_directory
    def test_clone_from_pathlib_withConfig(self, rw_dir):
        if pathlib is None:  # pythons bellow 3.4 don't have pathlib
            raise SkipTest("pathlib was introduced in 3.4")

        original_repo = Repo.init(osp.join(rw_dir, "repo"))

        cloned = Repo.clone_from(original_repo.git_dir, pathlib.Path(rw_dir) / "clone_pathlib_withConfig",
                                 multi_options=["--recurse-submodules=repo",
                                                "--config core.filemode=false",
                                                "--config submodule.repo.update=checkout"])

        assert_equal(cloned.config_reader().get_value('submodule', 'active'), 'repo')
        assert_equal(cloned.config_reader().get_value('core', 'filemode'), False)
        assert_equal(cloned.config_reader().get_value('submodule "repo"', 'update'), 'checkout')

    def test_clone_from_with_path_contains_unicode(self):
        with tempfile.TemporaryDirectory() as tmpdir:
            unicode_dir_name = '\u0394'
            path_with_unicode = os.path.join(tmpdir, unicode_dir_name)
            os.makedirs(path_with_unicode)

            try:
                Repo.clone_from(
                    url=self._small_repo_url(),
                    to_path=path_with_unicode,
                )
            except UnicodeEncodeError:
                self.fail('Raised UnicodeEncodeError')

    @with_rw_repo('HEAD')
    def test_max_chunk_size(self, repo):
        class TestOutputStream(object):
            def __init__(self, max_chunk_size):
                self.max_chunk_size = max_chunk_size

            def write(self, b):
                assert_true(len(b) <= self.max_chunk_size)

        for chunk_size in [16, 128, 1024]:
            repo.git.status(output_stream=TestOutputStream(chunk_size), max_chunk_size=chunk_size)

        repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=None)
        repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=-10)
        repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE))

    def test_init(self):
        prev_cwd = os.getcwd()
        os.chdir(tempfile.gettempdir())
        git_dir_rela = "repos/foo/bar.git"
        del_dir_abs = osp.abspath("repos")
        git_dir_abs = osp.abspath(git_dir_rela)
        try:
            # with specific path
            for path in (git_dir_rela, git_dir_abs):
                r = Repo.init(path=path, bare=True)
                self.assertIsInstance(r, Repo)
                assert r.bare is True
                assert not r.has_separate_working_tree()
                assert osp.isdir(r.git_dir)

                self._assert_empty_repo(r)

                # test clone
                clone_path = path + "_clone"
                rc = r.clone(clone_path)
                self._assert_empty_repo(rc)

                try:
                    rmtree(clone_path)
                except OSError:
                    # when relative paths are used, the clone may actually be inside
                    # of the parent directory
                    pass
                # END exception handling

                # try again, this time with the absolute version
                rc = Repo.clone_from(r.git_dir, clone_path)
                self._assert_empty_repo(rc)

                rmtree(git_dir_abs)
                try:
                    rmtree(clone_path)
                except OSError:
                    # when relative paths are used, the clone may actually be inside
                    # of the parent directory
                    pass
                # END exception handling

            # END for each path

            os.makedirs(git_dir_rela)
            os.chdir(git_dir_rela)
            r = Repo.init(bare=False)
            assert r.bare is False
            assert not r.has_separate_working_tree()

            self._assert_empty_repo(r)
        finally:
            try:
                rmtree(del_dir_abs)
            except OSError:
                pass
            os.chdir(prev_cwd)
        # END restore previous state

    def test_bare_property(self):
        self.rorepo.bare

    def test_daemon_export(self):
        orig_val = self.rorepo.daemon_export
        self.rorepo.daemon_export = not orig_val
        self.assertEqual(self.rorepo.daemon_export, (not orig_val))
        self.rorepo.daemon_export = orig_val
        self.assertEqual(self.rorepo.daemon_export, orig_val)

    def test_alternates(self):
        cur_alternates = self.rorepo.alternates
        # empty alternates
        self.rorepo.alternates = []
        self.assertEqual(self.rorepo.alternates, [])
        alts = ["other/location", "this/location"]
        self.rorepo.alternates = alts
        self.assertEqual(alts, self.rorepo.alternates)
        self.rorepo.alternates = cur_alternates

    def test_repr(self):
        assert repr(self.rorepo).startswith('<git.Repo ')

    def test_is_dirty_with_bare_repository(self):
        orig_value = self.rorepo._bare
        self.rorepo._bare = True
        assert_false(self.rorepo.is_dirty())
        self.rorepo._bare = orig_value

    def test_is_dirty(self):
        self.rorepo._bare = False
        for index in (0, 1):
            for working_tree in (0, 1):
                for untracked_files in (0, 1):
                    assert self.rorepo.is_dirty(index, working_tree, untracked_files) in (True, False)
                # END untracked files
            # END working tree
        # END index
        orig_val = self.rorepo._bare
        self.rorepo._bare = True
        assert self.rorepo.is_dirty() is False
        self.rorepo._bare = orig_val

    @with_rw_repo('HEAD')
    def test_is_dirty_with_path(self, rwrepo):
        assert rwrepo.is_dirty(path="git") is False

        with open(osp.join(rwrepo.working_dir, "git", "util.py"), "at") as f:
            f.write("junk")
        assert rwrepo.is_dirty(path="git") is True
        assert rwrepo.is_dirty(path="doc") is False

        rwrepo.git.add(Git.polish_url(osp.join("git", "util.py")))
        assert rwrepo.is_dirty(index=False, path="git") is False
        assert rwrepo.is_dirty(path="git") is True

        with open(osp.join(rwrepo.working_dir, "doc", "no-such-file.txt"), "wt") as f:
            f.write("junk")
        assert rwrepo.is_dirty(path="doc") is False
        assert rwrepo.is_dirty(untracked_files=True, path="doc") is True

    def test_head(self):
        self.assertEqual(self.rorepo.head.reference.object, self.rorepo.active_branch.object)

    def test_index(self):
        index = self.rorepo.index
        self.assertIsInstance(index, IndexFile)

    def test_tag(self):
        assert self.rorepo.tag('refs/tags/0.1.5').commit

    def test_archive(self):
        tmpfile = tempfile.mktemp(suffix='archive-test')
        with open(tmpfile, 'wb') as stream:
            self.rorepo.archive(stream, '0.1.6', path='doc')
            assert stream.tell()
        os.remove(tmpfile)

    @patch.object(Git, '_call_process')
    def test_should_display_blame_information(self, git):
        git.return_value = fixture('blame')
        b = self.rorepo.blame('master', 'lib/git.py')
        assert_equal(13, len(b))
        assert_equal(2, len(b[0]))
        # assert_equal(25, reduce(lambda acc, x: acc + len(x[-1]), b))
        assert_equal(hash(b[0][0]), hash(b[9][0]))
        c = b[0][0]
        assert_true(git.called)

        assert_equal('634396b2f541a9f2d58b00be1a07f0c358b999b3', c.hexsha)
        assert_equal('Tom Preston-Werner', c.author.name)
        assert_equal('*****@*****.**', c.author.email)
        assert_equal(1191997100, c.authored_date)
        assert_equal('Tom Preston-Werner', c.committer.name)
        assert_equal('*****@*****.**', c.committer.email)
        assert_equal(1191997100, c.committed_date)
        self.assertRaisesRegexp(ValueError, "634396b2f541a9f2d58b00be1a07f0c358b999b3 missing", lambda: c.message)

        # test the 'lines per commit' entries
        tlist = b[0][1]
        assert_true(tlist)
        assert_true(isinstance(tlist[0], string_types))
        assert_true(len(tlist) < sum(len(t) for t in tlist))               # test for single-char bug

        # BINARY BLAME
        git.return_value = fixture('blame_binary')
        blames = self.rorepo.blame('master', 'rps')
        self.assertEqual(len(blames), 2)

    def test_blame_real(self):
        c = 0
        nml = 0   # amount of multi-lines per blame
        for item in self.rorepo.head.commit.tree.traverse(
                predicate=lambda i, d: i.type == 'blob' and i.path.endswith('.py')):
            c += 1

            for b in self.rorepo.blame(self.rorepo.head, item.path):
                nml += int(len(b[1]) > 1)
        # END for each item to traverse
        assert c, "Should have executed at least one blame command"
        assert nml, "There should at least be one blame commit that contains multiple lines"

    @patch.object(Git, '_call_process')
    def test_blame_incremental(self, git):
        # loop over two fixtures, create a test fixture for 2.11.1+ syntax
        for git_fixture in ('blame_incremental', 'blame_incremental_2.11.1_plus'):
            git.return_value = fixture(git_fixture)
            blame_output = self.rorepo.blame_incremental('9debf6b0aafb6f7781ea9d1383c86939a1aacde3', 'AUTHORS')
            blame_output = list(blame_output)
            self.assertEqual(len(blame_output), 5)

            # Check all outputted line numbers
            ranges = flatten([entry.linenos for entry in blame_output])
            self.assertEqual(ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(3, 14), range(15, 17)]))

            commits = [entry.commit.hexsha[:7] for entry in blame_output]
            self.assertEqual(commits, ['82b8902', '82b8902', 'c76852d', 'c76852d', 'c76852d'])

            # Original filenames
            self.assertSequenceEqual([entry.orig_path for entry in blame_output], [u'AUTHORS'] * len(blame_output))

            # Original line numbers
            orig_ranges = flatten([entry.orig_linenos for entry in blame_output])
            self.assertEqual(orig_ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(2, 13), range(13, 15)]))   # noqa E501

    @patch.object(Git, '_call_process')
    def test_blame_complex_revision(self, git):
        git.return_value = fixture('blame_complex_revision')
        res = self.rorepo.blame("HEAD~10..HEAD", "README.md")
        self.assertEqual(len(res), 1)
        self.assertEqual(len(res[0][1]), 83, "Unexpected amount of parsed blame lines")

    @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(),
            """FIXME: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute
                    raise GitCommandError(command, status, stderr_value, stdout_value)
                GitCommandError: Cmd('git') failed due to: exit code(128)
                  cmdline: git add 1__��ava verb��ten 1_test _myfile 1_test_other_file
                          1_��ava-----verb��ten
                  stderr: 'fatal: pathspec '"1__çava verböten"' did not match any files'
                """)
    @with_rw_repo('HEAD', bare=False)
    def test_untracked_files(self, rwrepo):
        for run, (repo_add, is_invoking_git) in enumerate((
                (rwrepo.index.add, False),
                (rwrepo.git.add, True),
        )):
            base = rwrepo.working_tree_dir
            files = (join_path_native(base, u"%i_test _myfile" % run),
                     join_path_native(base, "%i_test_other_file" % run),
                     join_path_native(base, u"%i__çava verböten" % run),
                     join_path_native(base, u"%i_çava-----verböten" % run))

            num_recently_untracked = 0
            for fpath in files:
                with open(fpath, "wb"):
                    pass
            untracked_files = rwrepo.untracked_files
            num_recently_untracked = len(untracked_files)

            # assure we have all names - they are relative to the git-dir
            num_test_untracked = 0
            for utfile in untracked_files:
                num_test_untracked += join_path_native(base, utfile) in files
            self.assertEqual(len(files), num_test_untracked)

            if is_win and not PY3 and is_invoking_git:
                ## On Windows, shell needed when passing unicode cmd-args.
                #
                repo_add = fnt.partial(repo_add, shell=True)
                untracked_files = [win_encode(f) for f in untracked_files]
            repo_add(untracked_files)
            self.assertEqual(len(rwrepo.untracked_files), (num_recently_untracked - len(files)))
        # end for each run

    def test_config_reader(self):
        reader = self.rorepo.config_reader()                # all config files
        assert reader.read_only
        reader = self.rorepo.config_reader("repository")    # single config file
        assert reader.read_only

    def test_config_writer(self):
        for config_level in self.rorepo.config_level:
            try:
                with self.rorepo.config_writer(config_level) as writer:
                    self.assertFalse(writer.read_only)
            except IOError:
                # its okay not to get a writer for some configuration files if we
                # have no permissions
                pass

    def test_config_level_paths(self):
        for config_level in self.rorepo.config_level:
            assert self.rorepo._get_config_path(config_level)

    def test_creation_deletion(self):
        # just a very quick test to assure it generally works. There are
        # specialized cases in the test_refs module
        head = self.rorepo.create_head("new_head", "HEAD~1")
        self.rorepo.delete_head(head)

        try:
            tag = self.rorepo.create_tag("new_tag", "HEAD~2")
        finally:
            self.rorepo.delete_tag(tag)
        with self.rorepo.config_writer():
            pass
        try:
            remote = self.rorepo.create_remote("new_remote", "git@server:repo.git")
        finally:
            self.rorepo.delete_remote(remote)

    def test_comparison_and_hash(self):
        # this is only a preliminary test, more testing done in test_index
        self.assertEqual(self.rorepo, self.rorepo)
        self.assertFalse(self.rorepo != self.rorepo)
        self.assertEqual(len({self.rorepo, self.rorepo}), 1)

    @with_rw_directory
    def test_tilde_and_env_vars_in_repo_path(self, rw_dir):
        ph = os.environ.get('HOME')
        try:
            os.environ['HOME'] = rw_dir
            Repo.init(osp.join('~', 'test.git'), bare=True)

            os.environ['FOO'] = rw_dir
            Repo.init(osp.join('$FOO', 'test.git'), bare=True)
        finally:
            if ph:
                os.environ['HOME'] = ph
                del os.environ['FOO']
        # end assure HOME gets reset to what it was

    def test_git_cmd(self):
        # test CatFileContentStream, just to be very sure we have no fencepost errors
        # last \n is the terminating newline that it expects
        l1 = b"0123456789\n"
        l2 = b"abcdefghijklmnopqrstxy\n"
        l3 = b"z\n"
        d = l1 + l2 + l3 + b"\n"

        l1p = l1[:5]

        # full size
        # size is without terminating newline
        def mkfull():
            return Git.CatFileContentStream(len(d) - 1, BytesIO(d))

        ts = 5

        def mktiny():
            return Git.CatFileContentStream(ts, BytesIO(d))

        # readlines no limit
        s = mkfull()
        lines = s.readlines()
        self.assertEqual(len(lines), 3)
        self.assertTrue(lines[-1].endswith(b'\n'), lines[-1])
        self.assertEqual(s._stream.tell(), len(d))  # must have scrubbed to the end

        # realines line limit
        s = mkfull()
        lines = s.readlines(5)
        self.assertEqual(len(lines), 1)

        # readlines on tiny sections
        s = mktiny()
        lines = s.readlines()
        self.assertEqual(len(lines), 1)
        self.assertEqual(lines[0], l1p)
        self.assertEqual(s._stream.tell(), ts + 1)

        # readline no limit
        s = mkfull()
        self.assertEqual(s.readline(), l1)
        self.assertEqual(s.readline(), l2)
        self.assertEqual(s.readline(), l3)
        self.assertEqual(s.readline(), b'')
        self.assertEqual(s._stream.tell(), len(d))

        # readline limit
        s = mkfull()
        self.assertEqual(s.readline(5), l1p)
        self.assertEqual(s.readline(), l1[5:])

        # readline on tiny section
        s = mktiny()
        self.assertEqual(s.readline(), l1p)
        self.assertEqual(s.readline(), b'')
        self.assertEqual(s._stream.tell(), ts + 1)

        # read no limit
        s = mkfull()
        self.assertEqual(s.read(), d[:-1])
        self.assertEqual(s.read(), b'')
        self.assertEqual(s._stream.tell(), len(d))

        # read limit
        s = mkfull()
        self.assertEqual(s.read(5), l1p)
        self.assertEqual(s.read(6), l1[5:])
        self.assertEqual(s._stream.tell(), 5 + 6)  # its not yet done

        # read tiny
        s = mktiny()
        self.assertEqual(s.read(2), l1[:2])
        self.assertEqual(s._stream.tell(), 2)
        self.assertEqual(s.read(), l1[2:ts])
        self.assertEqual(s._stream.tell(), ts + 1)

    def _assert_rev_parse_types(self, name, rev_obj):
        rev_parse = self.rorepo.rev_parse

        if rev_obj.type == 'tag':
            rev_obj = rev_obj.object

        # tree and blob type
        obj = rev_parse(name + '^{tree}')
        self.assertEqual(obj, rev_obj.tree)

        obj = rev_parse(name + ':CHANGES')
        self.assertEqual(obj.type, 'blob')
        self.assertEqual(obj.path, 'CHANGES')
        self.assertEqual(rev_obj.tree['CHANGES'], obj)

    def _assert_rev_parse(self, name):
        """tries multiple different rev-parse syntaxes with the given name
        :return: parsed object"""
        rev_parse = self.rorepo.rev_parse
        orig_obj = rev_parse(name)
        if orig_obj.type == 'tag':
            obj = orig_obj.object
        else:
            obj = orig_obj
        # END deref tags by default

        # try history
        rev = name + "~"
        obj2 = rev_parse(rev)
        self.assertEqual(obj2, obj.parents[0])
        self._assert_rev_parse_types(rev, obj2)

        # history with number
        ni = 11
        history = [obj.parents[0]]
        for pn in range(ni):
            history.append(history[-1].parents[0])
        # END get given amount of commits

        for pn in range(11):
            rev = name + "~%i" % (pn + 1)
            obj2 = rev_parse(rev)
            self.assertEqual(obj2, history[pn])
            self._assert_rev_parse_types(rev, obj2)
        # END history check

        # parent ( default )
        rev = name + "^"
        obj2 = rev_parse(rev)
        self.assertEqual(obj2, obj.parents[0])
        self._assert_rev_parse_types(rev, obj2)

        # parent with number
        for pn, parent in enumerate(obj.parents):
            rev = name + "^%i" % (pn + 1)
            self.assertEqual(rev_parse(rev), parent)
            self._assert_rev_parse_types(rev, parent)
        # END for each parent

        return orig_obj

    @with_rw_repo('HEAD', bare=False)
    def test_rw_rev_parse(self, rwrepo):
        # verify it does not confuse branches with hexsha ids
        ahead = rwrepo.create_head('aaaaaaaa')
        assert(rwrepo.rev_parse(str(ahead)) == ahead.commit)

    def test_rev_parse(self):
        rev_parse = self.rorepo.rev_parse

        # try special case: This one failed at some point, make sure its fixed
        self.assertEqual(rev_parse("33ebe").hexsha, "33ebe7acec14b25c5f84f35a664803fcab2f7781")

        # start from reference
        num_resolved = 0

        for ref_no, ref in enumerate(Reference.iter_items(self.rorepo)):
            path_tokens = ref.path.split("/")
            for pt in range(len(path_tokens)):
                path_section = '/'.join(path_tokens[-(pt + 1):])
                try:
                    obj = self._assert_rev_parse(path_section)
                    self.assertEqual(obj.type, ref.object.type)
                    num_resolved += 1
                except (BadName, BadObject):
                    print("failed on %s" % path_section)
                    # is fine, in case we have something like 112, which belongs to remotes/rname/merge-requests/112
                # END exception handling
            # END for each token
            if ref_no == 3 - 1:
                break
        # END for each reference
        assert num_resolved

        # it works with tags !
        tag = self._assert_rev_parse('0.1.4')
        self.assertEqual(tag.type, 'tag')

        # try full sha directly ( including type conversion )
        self.assertEqual(tag.object, rev_parse(tag.object.hexsha))
        self._assert_rev_parse_types(tag.object.hexsha, tag.object)

        # multiple tree types result in the same tree: HEAD^{tree}^{tree}:CHANGES
        rev = '0.1.4^{tree}^{tree}'
        self.assertEqual(rev_parse(rev), tag.object.tree)
        self.assertEqual(rev_parse(rev + ':CHANGES'), tag.object.tree['CHANGES'])

        # try to get parents from first revision - it should fail as no such revision
        # exists
        first_rev = "33ebe7acec14b25c5f84f35a664803fcab2f7781"
        commit = rev_parse(first_rev)
        self.assertEqual(len(commit.parents), 0)
        self.assertEqual(commit.hexsha, first_rev)
        self.failUnlessRaises(BadName, rev_parse, first_rev + "~")
        self.failUnlessRaises(BadName, rev_parse, first_rev + "^")

        # short SHA1
        commit2 = rev_parse(first_rev[:20])
        self.assertEqual(commit2, commit)
        commit2 = rev_parse(first_rev[:5])
        self.assertEqual(commit2, commit)

        # todo: dereference tag into a blob 0.1.7^{blob} - quite a special one
        # needs a tag which points to a blob

        # ref^0 returns commit being pointed to, same with ref~0, and ^{}
        tag = rev_parse('0.1.4')
        for token in (('~0', '^0', '^{}')):
            self.assertEqual(tag.object, rev_parse('0.1.4%s' % token))
        # END handle multiple tokens

        # try partial parsing
        max_items = 40
        for i, binsha in enumerate(self.rorepo.odb.sha_iter()):
            self.assertEqual(rev_parse(bin_to_hex(binsha)[:8 - (i % 2)].decode('ascii')).binsha, binsha)
            if i > max_items:
                # this is rather slow currently, as rev_parse returns an object
                # which requires accessing packs, it has some additional overhead
                break
        # END for each binsha in repo

        # missing closing brace commit^{tree
        self.failUnlessRaises(ValueError, rev_parse, '0.1.4^{tree')

        # missing starting brace
        self.failUnlessRaises(ValueError, rev_parse, '0.1.4^tree}')

        # REVLOG
        #######
        head = self.rorepo.head

        # need to specify a ref when using the @ syntax
        self.failUnlessRaises(BadObject, rev_parse, "%s@{0}" % head.commit.hexsha)

        # uses HEAD.ref by default
        self.assertEqual(rev_parse('@{0}'), head.commit)
        if not head.is_detached:
            refspec = '%s@{0}' % head.ref.name
            self.assertEqual(rev_parse(refspec), head.ref.commit)
            # all additional specs work as well
            self.assertEqual(rev_parse(refspec + "^{tree}"), head.commit.tree)
            self.assertEqual(rev_parse(refspec + ":CHANGES").type, 'blob')
        # END operate on non-detached head

        # position doesn't exist
        self.failUnlessRaises(IndexError, rev_parse, '@{10000}')

        # currently, nothing more is supported
        self.failUnlessRaises(NotImplementedError, rev_parse, "@{1 week ago}")

        # the last position
        assert rev_parse('@{1}') != head.commit

    def test_repo_odbtype(self):
        target_type = GitCmdObjectDB
        self.assertIsInstance(self.rorepo.odb, target_type)

    def test_submodules(self):
        self.assertEqual(len(self.rorepo.submodules), 1)  # non-recursive
        self.assertGreaterEqual(len(list(self.rorepo.iter_submodules())), 2)

        self.assertIsInstance(self.rorepo.submodule("gitdb"), Submodule)
        self.failUnlessRaises(ValueError, self.rorepo.submodule, "doesn't exist")

    @with_rw_repo('HEAD', bare=False)
    def test_submodule_update(self, rwrepo):
        # fails in bare mode
        rwrepo._bare = True
        self.failUnlessRaises(InvalidGitRepositoryError, rwrepo.submodule_update)
        rwrepo._bare = False

        # test create submodule
        sm = rwrepo.submodules[0]
        sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path))
        self.assertIsInstance(sm, Submodule)

        # note: the rest of this functionality is tested in test_submodule

    @with_rw_repo('HEAD')
    def test_git_file(self, rwrepo):
        # Move the .git directory to another location and create the .git file.
        real_path_abs = osp.abspath(join_path_native(rwrepo.working_tree_dir, '.real'))
        os.rename(rwrepo.git_dir, real_path_abs)
        git_file_path = join_path_native(rwrepo.working_tree_dir, '.git')
        with open(git_file_path, 'wb') as fp:
            fp.write(fixture('git_file'))

        # Create a repo and make sure it's pointing to the relocated .git directory.
        git_file_repo = Repo(rwrepo.working_tree_dir)
        self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs)

        # Test using an absolute gitdir path in the .git file.
        with open(git_file_path, 'wb') as fp:
            fp.write(('gitdir: %s\n' % real_path_abs).encode('ascii'))
        git_file_repo = Repo(rwrepo.working_tree_dir)
        self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs)

    def test_file_handle_leaks(self):
        def last_commit(repo, rev, path):
            commit = next(repo.iter_commits(rev, path, max_count=1))
            commit.tree[path]

        # This is based on this comment
        # https://github.com/gitpython-developers/GitPython/issues/60#issuecomment-23558741
        # And we expect to set max handles to a low value, like 64
        # You should set ulimit -n X, see .travis.yml
        # The loops below would easily create 500 handles if these would leak (4 pipes + multiple mapped files)
        for _ in range(64):
            for repo_type in (GitCmdObjectDB, GitDB):
                repo = Repo(self.rorepo.working_tree_dir, odbt=repo_type)
                last_commit(repo, 'master', 'git/test/test_base.py')
            # end for each repository type
        # end for each iteration

    def test_remote_method(self):
        self.failUnlessRaises(ValueError, self.rorepo.remote, 'foo-blue')
        self.assertIsInstance(self.rorepo.remote(name='origin'), Remote)

    @with_rw_directory
    def test_empty_repo(self, rw_dir):
        """Assure we can handle empty repositories"""
        r = Repo.init(rw_dir, mkdir=False)
        # It's ok not to be able to iterate a commit, as there is none
        self.failUnlessRaises(ValueError, r.iter_commits)
        self.assertEqual(r.active_branch.name, 'master')
        assert not r.active_branch.is_valid(), "Branch is yet to be born"

        # actually, when trying to create a new branch without a commit, git itself fails
        # We should, however, not fail ungracefully
        self.failUnlessRaises(BadName, r.create_head, 'foo')
        self.failUnlessRaises(BadName, r.create_head, 'master')
        # It's expected to not be able to access a tree
        self.failUnlessRaises(ValueError, r.tree)

        new_file_path = osp.join(rw_dir, "new_file.ext")
        touch(new_file_path)
        r.index.add([new_file_path])
        r.index.commit("initial commit\nBAD MESSAGE 1\n")

        # Now a branch should be creatable
        nb = r.create_head('foo')
        assert nb.is_valid()

        with open(new_file_path, 'w') as f:
            f.write('Line 1\n')

        r.index.add([new_file_path])
        r.index.commit("add line 1\nBAD MESSAGE 2\n")

        with open('%s/.git/logs/refs/heads/master' % (rw_dir,), 'r') as f:
            contents = f.read()

        assert 'BAD MESSAGE' not in contents, 'log is corrupt'

    def test_merge_base(self):
        repo = self.rorepo
        c1 = 'f6aa8d1'
        c2 = repo.commit('d46e3fe')
        c3 = '763ef75'
        self.failUnlessRaises(ValueError, repo.merge_base)
        self.failUnlessRaises(ValueError, repo.merge_base, 'foo')

        # two commit merge-base
        res = repo.merge_base(c1, c2)
        self.assertIsInstance(res, list)
        self.assertEqual(len(res), 1)
        self.assertIsInstance(res[0], Commit)
        self.assertTrue(res[0].hexsha.startswith('3936084'))

        for kw in ('a', 'all'):
            res = repo.merge_base(c1, c2, c3, **{kw: True})
            self.assertIsInstance(res, list)
            self.assertEqual(len(res), 1)
        # end for each keyword signalling all merge-bases to be returned

        # Test for no merge base - can't do as we have
        self.failUnlessRaises(GitCommandError, repo.merge_base, c1, 'ffffff')

    def test_is_ancestor(self):
        git = self.rorepo.git
        if git.version_info[:3] < (1, 8, 0):
            raise SkipTest("git merge-base --is-ancestor feature unsupported")

        repo = self.rorepo
        c1 = 'f6aa8d1'
        c2 = '763ef75'
        self.assertTrue(repo.is_ancestor(c1, c1))
        self.assertTrue(repo.is_ancestor("master", "master"))
        self.assertTrue(repo.is_ancestor(c1, c2))
        self.assertTrue(repo.is_ancestor(c1, "master"))
        self.assertFalse(repo.is_ancestor(c2, c1))
        self.assertFalse(repo.is_ancestor("master", c1))
        for i, j in itertools.permutations([c1, 'ffffff', ''], r=2):
            self.assertRaises(GitCommandError, repo.is_ancestor, i, j)

    @with_rw_directory
    def test_git_work_tree_dotgit(self, rw_dir):
        """Check that we find .git as a worktree file and find the worktree
        based on it."""
        git = Git(rw_dir)
        if git.version_info[:3] < (2, 5, 1):
            raise SkipTest("worktree feature unsupported")

        rw_master = self.rorepo.clone(join_path_native(rw_dir, 'master_repo'))
        branch = rw_master.create_head('aaaaaaaa')
        worktree_path = join_path_native(rw_dir, 'worktree_repo')
        if Git.is_cygwin():
            worktree_path = cygpath(worktree_path)
        rw_master.git.worktree('add', worktree_path, branch.name)

        # this ensures that we can read the repo's gitdir correctly
        repo = Repo(worktree_path)
        self.assertIsInstance(repo, Repo)

        # this ensures we're able to actually read the refs in the tree, which
        # means we can read commondir correctly.
        commit = repo.head.commit
        self.assertIsInstance(commit, Object)

        # this ensures we can read the remotes, which confirms we're reading
        # the config correctly.
        origin = repo.remotes.origin
        self.assertIsInstance(origin, Remote)

        self.assertIsInstance(repo.heads['aaaaaaaa'], Head)

    @with_rw_directory
    def test_git_work_tree_env(self, rw_dir):
        """Check that we yield to GIT_WORK_TREE"""
        # clone a repo
        # move .git directory to a subdirectory
        # set GIT_DIR and GIT_WORK_TREE appropriately
        # check that repo.working_tree_dir == rw_dir
        self.rorepo.clone(join_path_native(rw_dir, 'master_repo'))

        repo_dir = join_path_native(rw_dir, 'master_repo')
        old_git_dir = join_path_native(repo_dir, '.git')
        new_subdir = join_path_native(repo_dir, 'gitdir')
        new_git_dir = join_path_native(new_subdir, 'git')
        os.mkdir(new_subdir)
        os.rename(old_git_dir, new_git_dir)

        oldenv = os.environ.copy()
        os.environ['GIT_DIR'] = new_git_dir
        os.environ['GIT_WORK_TREE'] = repo_dir

        try:
            r = Repo()
            self.assertEqual(r.working_tree_dir, repo_dir)
            self.assertEqual(r.working_dir, repo_dir)
        finally:
            os.environ = oldenv

    @with_rw_directory
    def test_rebasing(self, rw_dir):
        r = Repo.init(rw_dir)
        fp = osp.join(rw_dir, 'hello.txt')
        r.git.commit("--allow-empty", message="init",)
        with open(fp, 'w') as fs:
            fs.write("hello world")
        r.git.add(Git.polish_url(fp))
        r.git.commit(message="English")
        self.assertEqual(r.currently_rebasing_on(), None)
        r.git.checkout("HEAD^1")
        with open(fp, 'w') as fs:
            fs.write("Hola Mundo")
        r.git.add(Git.polish_url(fp))
        r.git.commit(message="Spanish")
        commitSpanish = r.commit()
        try:
            r.git.rebase("master")
        except GitCommandError:
            pass
        self.assertEqual(r.currently_rebasing_on(), commitSpanish)
Example #49
0
 def _get_branches(self):
     c_git = Git(str(self._conf.get('path_to_repo')))
     branches = set()
     for branch in set(c_git.branch('--contains', self.hash).split('\n')):
         branches.add(branch.strip().replace('* ', ''))
     return branches
Example #50
0
from flask import Flask, render_template
from git import Git

app = Flask(__name__)
git = Git('')


@app.route('/')
def index():
    git.get_conflict()
    return render_template("index.html")


if __name__ == '__main__':
    app.run()
Example #51
0
 def mktiny():
     return Git.CatFileContentStream(ts, BytesIO(d))
Example #52
0
def git_clone(path, url, branch='master'):
    with git_environ():
        Git(path).clone(url, branch=branch, recursive=True)
Example #53
0
def main():
    os.chdir(ABS_PATH_OF_TOP_LEVEL_DIR)

    print("Copying all README.md files to docs.")
    with open("README.md") as f:
        readme_content = f.readlines()
    readme_content = [x.replace("docs/", "") for x in readme_content]
    with open("docs/index.md", "w") as f:
        f.writelines(readme_content)

    project_readmes = []
    for readme_file_path in glob.glob("projects/**/README.md", recursive=True):
        if "docs/" not in readme_file_path:
            new_path = os.path.join("docs", readme_file_path)
            os.makedirs(os.path.dirname(new_path), exist_ok=True)
            shutil.copy(readme_file_path, new_path)
            project_readmes.append(new_path)

    print("Copying LICENSE file to docs.")
    shutil.copy("LICENSE", "docs/LICENSE.md")

    print("Copying CONTRIBUTING.md file to docs.")
    shutil.copy("CONTRIBUTING.md", "docs/CONTRIBUTING.md")

    # print("Copying CNAME file to docs.")
    # shutil.copy("CNAME", "docs/CNAME")

    print("Building the docs.")
    parent_folder_path = Path(__file__).parent.parent
    yaml_path = parent_folder_path / "mkdocs.yml"
    source_path = parent_folder_path
    docs_dir = str(parent_folder_path / "docs" / "api")
    if not os.path.exists(docs_dir):
        os.mkdir(docs_dir)

    # Adding project readmes to the yaml
    yaml = YAML()
    mkdocs_yaml = yaml.load(yaml_path)
    site_nav = mkdocs_yaml["nav"]
    # TODO Find a way to do the following in a way that results in nice titles.
    # projects_key = "Projects using allenact"
    # nav_obj = None
    # for obj in site_nav:
    #     if projects_key in obj:
    #         nav_obj = obj
    #         break
    # nav_obj[projects_key] = project_readme_paths_to_nav_structure(project_readmes)

    with open(yaml_path, "w") as f:
        yaml.dump(mkdocs_yaml, f)

    # Get directories to ignore
    git_dirs = set(
        os.path.abspath(os.path.split(p)[0]) for p in Git(".").ls_files().split("\n")
    )
    ignore_rel_dirs = ["docs", "scripts", "experiments", "src", ".pip_src"]
    ignore_abs_dirs = set(
        os.path.abspath(os.path.join(str(parent_folder_path), rel_dir))
        for rel_dir in ignore_rel_dirs
    )
    for d in ignore_abs_dirs:
        if d in git_dirs:
            git_dirs.remove(d)

    threads: List = []
    nav_entries = build_docs(
        parent_folder_path,
        source_path,
        docs_dir,
        threads=threads,
        allowed_dirs=git_dirs,
    )
    nav_entries.sort(key=lambda x: list(x)[0], reverse=False)

    for thread in threads:
        thread.join()

    nav_entries = pruned_nav_entries(nav_entries)

    docs_key = "API"

    # Find the yaml corresponding to the API
    nav_obj = None
    for obj in site_nav:
        if docs_key in obj:
            nav_obj = obj
            break

    nav_obj[docs_key] = nav_entries

    with open(yaml_path, "w") as f:
        yaml.dump(mkdocs_yaml, f)
Example #54
0
        def remote_repo_creator(self):
            rw_daemon_repo_dir = tempfile.mktemp(prefix="daemon_repo-%s-" %
                                                 func.__name__)
            rw_repo_dir = tempfile.mktemp(prefix="daemon_cloned_repo-%s-" %
                                          func.__name__)

            rw_daemon_repo = self.rorepo.clone(rw_daemon_repo_dir,
                                               shared=True,
                                               bare=True)
            # recursive alternates info ?
            rw_repo = rw_daemon_repo.clone(rw_repo_dir,
                                           shared=True,
                                           bare=False,
                                           n=True)
            try:
                rw_repo.head.commit = working_tree_ref
                rw_repo.head.reference.checkout()

                # prepare for git-daemon
                rw_daemon_repo.daemon_export = True

                # this thing is just annoying !
                with rw_daemon_repo.config_writer() as crw:
                    section = "daemon"
                    try:
                        crw.add_section(section)
                    except Exception:
                        pass
                    crw.set(section, "receivepack", True)

                # Initialize the remote - first do it as local remote and pull, then
                # we change the url to point to the daemon.
                d_remote = Remote.create(rw_repo, "daemon_origin",
                                         rw_daemon_repo_dir)
                d_remote.fetch()

                base_daemon_path, rel_repo_dir = osp.split(rw_daemon_repo_dir)

                remote_repo_url = Git.polish_url(
                    "git://localhost:%s/%s" % (GIT_DAEMON_PORT, rel_repo_dir))
                with d_remote.config_writer as cw:
                    cw.set('url', remote_repo_url)

                with git_daemon_launched(
                        Git.polish_url(
                            base_daemon_path,
                            is_cygwin=False),  # No daemon in Cygwin.
                        '127.0.0.1',
                        GIT_DAEMON_PORT):
                    # Try listing remotes, to diagnose whether the daemon is up.
                    rw_repo.git.ls_remote(d_remote)

                    with cwd(rw_repo.working_dir):
                        try:
                            return func(self, rw_repo, rw_daemon_repo)
                        except:
                            log.info(
                                "Keeping repos after failure: \n  rw_repo_dir: %s \n  rw_daemon_repo_dir: %s",
                                rw_repo_dir, rw_daemon_repo_dir)
                            rw_repo_dir = rw_daemon_repo_dir = None
                            raise

            finally:
                rw_repo.git.clear_cache()
                rw_daemon_repo.git.clear_cache()
                del rw_repo
                del rw_daemon_repo
                gc.collect()
                gitdb.util.mman.collect()
                gc.collect()
                if rw_repo_dir:
                    rmtree(rw_repo_dir)
                if rw_daemon_repo_dir:
                    rmtree(rw_daemon_repo_dir)
Example #55
0
 def _open_git(self) -> Git:
     return Git(str(self.path))
Example #56
0
        help=
        "Always generate at least the most recent schema, regardless of the commit file."
    )
    # Options, parse 'em
    (options, cmd_input) = optparser.parse_args()

    if options.verbose:
        logging.getLogger().setLevel(logging.DEBUG)

    # Do some standard initialization
    dt_path = os.path.join(root_dir, "data_types.csv")
    dictionary_dir = os.path.join(root_dir, 'nmr-star-dictionary')

    # Pull changes
    if not os.path.exists(dictionary_dir):
        Git(root_dir).clone(
            'https://github.com/uwbmrb/nmr-star-dictionary.git')
    repo = Repo(dictionary_dir)
    repo.remotes.origin.pull()
    most_recent_commit = repo.commit()

    schema_dir = os.path.join(root_dir, "schema_data")
    if not os.path.exists(schema_dir):
        os.mkdir(schema_dir)

    # Quit early if there aren't any new commits
    last_commit_file = os.path.join(schema_dir, 'last_commit')
    if os.path.exists(last_commit_file) and open(last_commit_file, 'r').read() == str(most_recent_commit) and \
            not options.force and not options.full:
        print('Schemas already up to date according to git commit stored.')
        sys.exit(0)
Example #57
0
 def mkfull():
     return Git.CatFileContentStream(len(d) - 1, BytesIO(d))
    assert len(tagged_commits) == 0


@pytest.mark.parametrize('path', ['test-repos/szz/'])
def test_get_commits_last_modified_lines_hyper_blame(repo):

    buggy_commits = repo.get_commits_last_modified_lines(repo.get_commit(
        'e6d3b38a9ef683e8184eac10a0471075c2808bbd'))

    assert len(buggy_commits) == 1
    assert '540c7f31c18664a38190fafb6721b5174ff4a166' in buggy_commits[
        'B.java']


@pytest.mark.skipif(Git().version_info < (2, 23),
                    reason="requires 2.23 or higher")
@pytest.mark.parametrize('path', ['test-repos/szz/'])
def test_get_commits_last_modified_lines_hyper_blame_unblamable(tmp_path,
                                                                repo):
    p = tmp_path / "ignore.txt"
    p.write_text("540c7f31c18664a38190fafb6721b5174ff4a166")


    buggy_commits = repo.get_commits_last_modified_lines(repo.get_commit(
        'e6d3b38a9ef683e8184eac10a0471075c2808bbd'),
        hashes_to_ignore_path=str(p))

    assert len(buggy_commits) == 0

import git
from git import Git
from shutil import copyfile

# Grab todays date
now = datetime.datetime.now()
now = now.strftime("%Y-%m-%d")

# Cloning Tracy's repo
git.Git().clone("https://github.com/triketora/women-in-software-eng.git",
                "/women-in-tech-datasets/triketora")

# Our repo
cloned_repo = git.cmd.Git("/women-in-tech-datasets/triketora")
cloned_repo1 = Git("/women-in-tech-datasets/triketora")

# Getting sha for historic commits
loginfo = cloned_repo.log('--format=format:%H', '--', '--', 'data.txt')
# Converting it into an array
loginfo_array = loginfo.split('\n')

ntp = open("/datasets/tracy_data/new_to_parse.txt")
for hexsha in loginfo_array:
    cloned_repo1.checkout(hexsha)
    copyfile("/women-in-tech-datasets/triketora/data.txt",
             "/datasets/tracy_data/data_%s.txt" % hexsha)
    ntp.append("data_%s.txt" % hexsha)
ntp.close()

f = open("/datasets/tracy_data/success_runDate.txt")
Example #60
0
def git_daemon_launched(base_path, ip, port):
    from git import Git  # Avoid circular deps.

    gd = None
    try:
        if is_win:
            ## On MINGW-git, daemon exists in .\Git\mingw64\libexec\git-core\,
            #  but if invoked as 'git daemon', it detaches from parent `git` cmd,
            #  and then CANNOT DIE!
            #  So, invoke it as a single command.
            ## Cygwin-git has no daemon.  But it can use MINGW's.
            #
            daemon_cmd = [
                'git-daemon', '--enable=receive-pack',
                '--listen=%s' % ip,
                '--port=%s' % port,
                '--base-path=%s' % base_path, base_path
            ]
            gd = Git().execute(daemon_cmd, as_process=True)
        else:
            gd = Git().daemon(base_path,
                              enable='receive-pack',
                              listen=ip,
                              port=port,
                              base_path=base_path,
                              as_process=True)
        # yes, I know ... fortunately, this is always going to work if sleep time is just large enough
        time.sleep(0.5 * (1 + is_win))
    except Exception as ex:
        msg = textwrap.dedent("""
        Launching git-daemon failed due to: %s
          Probably test will fail subsequently.

          BUT you may start *git-daemon* manually with this command:"
                git daemon --enable=receive-pack  --listen=%s --port=%s --base-path=%s  %s
          You may also run the daemon on a different port by passing --port=<port>"
          and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to <port>
        """)
        if is_win:
            msg += textwrap.dedent("""

            On Windows,
              the `git-daemon.exe` must be in PATH.
              For MINGW, look into .\Git\mingw64\libexec\git-core\), but problems with paths might appear.
              CYGWIN has no daemon, but if one exists, it gets along fine (but has also paths problems)."""
                                   )
        log.warning(msg, ex, ip, port, base_path, base_path, exc_info=1)

        yield  # OK, assume daemon started manually.

    else:
        yield  # Yield outside try, to avoid catching
    finally:
        if gd:
            try:
                log.debug("Killing git-daemon...")
                gd.proc.kill()
            except Exception as ex:
                ## Either it has died (and we're here), or it won't die, again here...
                log.debug("Hidden error while Killing git-daemon: %s",
                          ex,
                          exc_info=1)