Exemple #1
0
    def _prepare_output_git(self, branch, input_work_dir, output_work_dir,
                            output_reference):
        start_commit = self.output_branch_name(branch)
        new_branch = False
        try:
            git.clone(
                output_reference,
                output_work_dir,
                options=['--branch',
                         self.output_branch_name(branch), '-q'])
            git.set_origin_url(self._output_tree, output_work_dir)
            try:
                git.commit_env_vars(self.output_branch_name(branch),
                                    tree=output_work_dir)
            except git.GitError:
                shutil.rmtree(output_work_dir)
                self.debug('clone incorrect - new branch?')
                raise Exception('clone incorrect - new branch?')
        except:
            # assume error was due to branch not found
            new_branch = True
            start_commit = self._check_new_branch(branch, input_work_dir,
                                                  output_work_dir,
                                                  output_reference)
            git.reset(['--hard', start_commit], tree=output_work_dir)

        try:
            git.fetch('refs/notes/mirror:refs/notes/mirror',
                      tree=output_work_dir)
        except:
            pass

        return start_commit, new_branch
Exemple #2
0
def benchmark(git_repo,
              src_dir,
              branches,
              git_origin,
              images,
              qmake_file,
              binary,
              clone_url=None):
    if (not os.path.exists(git_repo)):
        if (clone_url != None):
            git.clone_repo(clone_url, git_repo)
            git.fetch(git_repo)
            git.replace_remote(git_repo, clone_url)
        else:
            git.clone_repo(git_origin, git_repo)
    git.fetch_all(git_repo)
    results = {"computer_name": computer_name()}
    for branch in branches:
        git.checkout_branch(git_repo, branch)
        git.clean(src_dir)
        compile.compile(qmake_file, cwd=src_dir)

        r = {"revision": git.current_revision(git_repo)}
        r["results"] = run(src_dir + "/" + binary, images)
        results[branch] = r

    return results
Exemple #3
0
def get_git(scheme,url,target,overwrite,tag):
    import git

    if os.path.exists(target + '/.git'):
        if not overwrite: return
    else:
        if len(scheme) == 1: giturl = url
        else: giturl = url[4:]
        git.clone(giturl,target)

    fs.goto(target)
    git.fetch()
    out = git.branch()
    for line in out.split('\n'):
        if not line: continue
        if line[0] != '*': continue
        out = line.split()[1]
        break
    #print out,tag
    if out != tag:
        lbranches,rbranches = git.branches()
        if tag in lbranches:
            git.checkout(tag)
        else:
            # git 1.5 does not put remotes/ like 1.6 does
            from exception import CommandFailure
            try:
                git.checkout('origin/'+tag,tag)
            except CommandFailure:
                git.checkout('remotes/origin/'+tag,tag)
    git.pull()
    fs.goback()
    return
def get_branch_files(repository, check_upstream=False):
    if check_upstream and repository.github.upstream is not None:
        git.fetch(
            'upstream', '--no-tags', '%s:refs/remotes/upstream/%s' % \
                (repository.github.branch, repository.github.branch))

        diff_output = git.diff(
            '--name-only', 'origin/%s..upstream/%s' % \
                (repository.github.branch, repository.github.branch),
            strip=False)

        git.rebase('upstream/%s' % repository.github.branch)
    else:
        diff_output = '\n'.join([
            line[3:] for line in git.status('status', '-s', strip=False)
            if len(line) > 3
        ])

    new_files = get_eligible_files(repository, diff_output, 'en')

    lstree_output = git.ls_tree('-r',
                                '--name-only',
                                repository.github.branch,
                                strip=False)
    all_files = get_eligible_files(repository, lstree_output, 'en')

    return new_files, all_files
Exemple #5
0
def git_update_remote_heads():
    git.fetch()

    remote_branch_heads = git.get_all_remote_branch_heads(branch_regexp=config.branch_name_regexp)
    dbcon.executemany(
        "insert or ignore into branch(branch, git_remote_head_remote) values (?, ?);", remote_branch_heads
    )
    dbcon.executemany(
        "update branch set git_remote_head_remote=? where branch=?;",
        map(lambda (branch, head): (head, branch), remote_branch_heads),
    )

    git_master_head_remote = git.get_head("remotes/origin/master")
    dbcon.execute("""update branch set git_master_head_remote=?;""", (git_master_head_remote,))
Exemple #6
0
def benchmark(git_repo, src_dir, branches, git_origin, images, qmake_file, binary, clone_url=None):
    if (not os.path.exists(git_repo)):
        if (clone_url != None):
            git.clone_repo(clone_url, git_repo)
            git.fetch(git_repo)
            git.replace_remote(git_repo, clone_url)
        else:
            git.clone_repo(git_origin, git_repo)
    git.fetch_all(git_repo)
    results = {"computer_name": computer_name()}
    for branch in branches:
        git.checkout_branch(git_repo, branch)
        git.clean(src_dir)
        compile.compile(qmake_file,cwd=src_dir)

        r = {"revision": git.current_revision(git_repo)}
        r["results"] = run(src_dir+"/"+binary, images)
        results[branch] = r

    return results
	def __call__(self):
		self.log.write('New changesets have been pushed.\n')

		# See if the push changed something in the master branch
		git.reset('origin/master')
		current_id = git.id()
		git.fetch()
		git.reset(self.revision)

		if current_id != self.revision:
			self.log.write('The master branch has new commits.\n')

			# Make a new development snapshot
			builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'openclonk', False)
			# TODO: Remove all other snapshot builders from the queue
			self.queue.put(50, builder)

			# Also make a new mape build. In principle we could do this only if something in the
			# mape directory or any of the other files used by mape change, but let's keep it simple here.
			builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'mape', False)
			# TODO: Remove all other snapshot builders from the queue
			self.queue.put(70, builder)

			# See if something in the docs directory has changed
			log = git.log('docs', current_id, self.revision, 'oneline')
			if len(log) > 1 or (len(log) == 1 and log[0] != current_id):
				# TODO: Remove all other doc builders from the queue
				builder = docbuilder.DocBuilder(self.revision, self.log)
				self.queue.put(80, builder)

		else:
			self.log.write('The master branch has no new commits.\n')

		# TODO: Make a release if the version changed

		return True
Exemple #8
0
def fetch_and_reset(repository_path,
                    b_verbose=False,
                    revision='origin/master',
                    remote='origin'):
    """
    cd to repository_path
    git fetch
    git reset origin/master
    return to original path
    """

    org_path = repo_path.cd(repository_path)

    clean_repo_before_update(b_verbose=b_verbose,
                             caller_name='fetch_and_reset')

    git.checkout('master', b_verbose=b_verbose)

    stdout, stderr = git.fetch(remote)

    clean_repo_after_error(
        stdout,
        stderr,
        'fetch_and_reset__fetch',
        b_verbose=b_verbose,
    )

    stdout, stderr = git.reset_hard_revision(revision)

    clean_repo_after_error(
        stdout,
        stderr,
        'fetch_and_reset__reset',
        b_verbose=b_verbose,
    )

    os.chdir(org_path)
    def _config(self, remote, conf, groups):
        """
        Builds the groups file and project.config file for a project.

        @param remote - gerrit.Remote object
        @param conf - Dict containing git config information
        @param groups - List of groups

        """
        if not self.config:
            return

        msg = "Project %s: Configuring." % self.name
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        uuid_dir = str(uuid4())
        repo_dir = os.path.join(repo_dir, uuid_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        logger.debug(
            "Project %s: Creating directory %s" % (self.name, repo_dir)
        )
        os.makedirs(repo_dir)

        # Save the current working directory
        old_cwd = os.getcwd()

        origin = 'origin'

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            # Git init empty directory
            git.init()

            # Add remote origin
            ssh_url = 'ssh://%s@%s:%s/%s' % (
                remote.username,
                remote.host,
                remote.port,
                self.name
            )

            git.add_remote(origin, ssh_url)

            # Fetch refs/meta/config for project
            refspec = 'refs/meta/config:refs/remotes/origin/meta/config'
            git.fetch(origin, refspec)

            # Checkout refs/meta/config
            git.checkout_branch('meta/config')

            # Get md5 of existing config
            _file = os.path.join(repo_dir, 'project.config')
            contents = ''
            try:
                with open(_file, 'r') as f:
                    contents = f.read()
            except IOError:
                pass
            existing_md5 = hashlib.md5(contents).hexdigest()

            # Get md5 of new config
            with open(self.config, 'r') as f:
                contents = f.read()
            new_md5 = hashlib.md5(contents).hexdigest()

            msg = "Project %s: Md5 comparision\n%s\n%s"
            msg = msg % (self.name, existing_md5, new_md5)
            logger.debug(msg)
            print msg

            # Only alter if checksums do not match
            if existing_md5 != new_md5:

                logger.debug(
                    "Project %s: config md5's are different." % self.name
                )

                # Update project.config file
                _file = os.path.join(repo_dir, 'project.config')
                with open(_file, 'w') as f:
                    f.write(contents)

                # Update groups file
                group_contents = groups_file_contents(groups)
                _file = os.path.join(repo_dir, 'groups')
                with open(_file, 'w') as f:
                    f.write(group_contents)

                # Git config user.email
                git.set_config('user.email', conf['git-config']['email'])

                # Git config user.name
                git.set_config('user.name', conf['git-config']['name'])

                # Add groups and project.config
                git.add(['groups', 'project.config'])

                # Git commit
                git.commit(message='Setting up %s' % self.name)

                # Git push
                git.push(origin, refspecs='meta/config:refs/meta/config')
                logger.info("Project %s: pushed configuration." % self.name)

            else:
                msg = "Project %s: config unchanged." % self.name
                logger.info(msg)
                print msg

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
Exemple #10
0
base_dockerfile = "/Users/ahonnecke/Code/repos/web-batch/docker/Dockerfile.base"

chrome_version = get_chrome_driver_version()
print(f'Latest chrome version {chrome_version}')

bionic_tag = get_bionic_version()
print(f'Latest bionic version {bionic_tag}')

web_repo = git.Repo("/Users/ahonnecke/Code/repos/web-batch/")
git = web_repo.git

git.reset('--hard', 'upstream/master')
git.checkout('upstream/master')

call(['git', 'fetch', '--all'])
git.fetch('--all')

with open(base_dockerfile, 'r') as reader:
    content = reader.read()
    content_new = re.sub(
        'FROM ubuntu:.*',
        r'FROM ubuntu:' + str(bionic_tag),
        content,
        flags=re.M
    )

with open(base_dockerfile, "w") as writer:
    writer.write(content_new)
    writer.close()

with open(base_dockerfile, 'r') as reader:
Exemple #11
0
forward_branch = "forward/" + source_branch
git = repo.git

# We do not want to use any pre-existing branch.
try:
    git.branch('-D', forward_branch)
except:
    pass

git.checkout('HEAD', b=forward_branch)

# If using a local repository we want to fetch from the remote.
if options.path_to_repo:
    print "Fetching from remote repository."
    git.fetch()

# TODO: Add merging behavior, see:
# http://gitpython.readthedocs.io/en/stable/tutorial.html#using-git-directly

# TODO: Add continue option. This should be used after conflicts are resolved.
# This will commit and push the changes.
if options.continue_fp:
    sys.exit("The continue option is not yet supported.")
    # TODO: It would be nice to then view the diff here (or be provided with a
    # link of the diff) and then enable one to open a pull request locally. PR
    # should be named something like 'Fwdport forward/rel/6.2.5 to 6.3'. It
    # should be opened against the target branch.

# TODO: Cleanup any cloned repository.
    def _config(self, remote, conf, groups):
        """
        Builds the groups file and project.config file for a project.

        @param remote - gerrit.Remote object
        @param conf - Dict containing git config information
        @param groups - List of groups

        """
        if not self.config:
            return

        msg = "Project %s: Configuring." % self.name
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        uuid_dir = str(uuid4())
        repo_dir = os.path.join(repo_dir, uuid_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        logger.debug("Project %s: Creating directory %s" %
                     (self.name, repo_dir))
        os.makedirs(repo_dir)

        # Save the current working directory
        old_cwd = os.getcwd()

        origin = 'origin'

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            # Git init empty directory
            git.init()

            # Add remote origin
            ssh_url = 'ssh://%s@%s:%s/%s' % (remote.username, remote.host,
                                             remote.port, self.name)

            git.add_remote(origin, ssh_url)

            # Fetch refs/meta/config for project
            refspec = 'refs/meta/config:refs/remotes/origin/meta/config'
            git.fetch(origin, refspec)

            # Checkout refs/meta/config
            git.checkout_branch('meta/config')

            # Get md5 of existing config
            _file = os.path.join(repo_dir, 'project.config')
            contents = ''
            try:
                with open(_file, 'r') as f:
                    contents = f.read()
            except IOError:
                pass
            existing_md5 = hashlib.md5(contents).hexdigest()

            # Get md5 of new config
            with open(self.config, 'r') as f:
                contents = f.read()
            new_md5 = hashlib.md5(contents).hexdigest()

            msg = "Project %s: Md5 comparision\n%s\n%s"
            msg = msg % (self.name, existing_md5, new_md5)
            logger.debug(msg)
            print msg

            # Only alter if checksums do not match
            if existing_md5 != new_md5:

                logger.debug("Project %s: config md5's are different." %
                             self.name)

                # Update project.config file
                _file = os.path.join(repo_dir, 'project.config')
                with open(_file, 'w') as f:
                    f.write(contents)

                # Update groups file
                group_contents = groups_file_contents(groups)
                _file = os.path.join(repo_dir, 'groups')
                with open(_file, 'w') as f:
                    f.write(group_contents)

                # Git config user.email
                git.set_config('user.email', conf['git-config']['email'])

                # Git config user.name
                git.set_config('user.name', conf['git-config']['name'])

                # Add groups and project.config
                git.add(['groups', 'project.config'])

                # Git commit
                git.commit(message='Setting up %s' % self.name)

                # Git push
                git.push(origin, refspecs='meta/config:refs/meta/config')
                logger.info("Project %s: pushed configuration." % self.name)

            else:
                msg = "Project %s: config unchanged." % self.name
                logger.info(msg)
                print msg

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
Exemple #13
0
            os.unlink(ssh_wrapper_path)

    remote = f'[email protected]:{os.environ["GITHUB_REPOSITORY"]}'
else:

    @contextmanager
    def git_auth():
        """
        No-op context manager.
        """
        yield

    remote = f'https://{token}@github.com/{os.environ["GITHUB_REPOSITORY"]}'
git.remote('add', 'gh-token', remote)
with git_auth():
    git.fetch('gh-token')
    # reset will fail if 'generated-documentation` branch doesn't yet exist.
    # That's fine, it will exist after our push. Just not the error and move on.
    try:
        git.reset('gh-token/generated-documentation')
    except GitCommandError:
        print(NOTICE + "Couldn't git reset generated-documentation." + ENDC)
        print(NOTICE +
              "This error is expected if the branch doesn't exist yet." + ENDC)

    print(INFO + "Running 'mkdocs gh-deploy'." + ENDC)
    os.chdir(docs_build_dir)
    rslt = os.system(
        'mkdocs gh-deploy --verbose --clean --remote-name gh-token '
        '--remote-branch generated-documentation')
if rslt != 0:
Exemple #14
0
	def __call__(self):
		if self.dry_release:
			self.log.write('Dry-Releasing revision %s...\n' % self.revision)
		else:
			self.log.write('Releasing revision %s...\n' % self.revision)

		# Update to revision and get hexadecimal ID.
		# TODO: Reset back to 'origin/master' afterwards

		# TODO: The following could be checked easier maybe...
		prefix = ''
		if not self.revision.startswith('v'):
			for x in 'ghijklmnopqrstuvwxyz':
				if len(prefix) == 0 and x in self.revision:
					prefix = 'origin/'

		git.fetch()
		git.reset('%s%s' % (prefix, self.revision))
		revision = git.id()[:12]

		(major, minor) = self.parse_version_file('Version.txt')

		self.log.write('==> Version %d.%d\n' % (major, minor))

		dry_suffix = ''
		if self.dry_release: dry_suffix = '-dry'
		archive = os.path.join(self.archive_dir, '%d.%d%s' % (major, minor, dry_suffix))

		if os.path.exists(archive):
			self.log.write('Archive directory %s exists already. Clearing...\n' % archive)
			shutil.rmtree(archive)

		os.mkdir(archive)

		# Copy game content to archive
		self.log.write('Copying and packing game content to archive...\n')
		content = [] # game content
		others  = [] # other misc. non-architecture dependent files
		for filename, stream in contentiter.ContentIter(groupcontent.release):
			self.log.write('%s...\n' % filename)
			if contentiter.ContentIter.is_group_file(filename):
				content.append(filename)
			else:
				others.append(filename)

			destination = os.path.join(archive, filename)
			_create_and_open(destination).write(stream.read()) # TODO: copyfileobj?

		# Create architecture specific files:
		all_files = {}
		for arch in arches.arches:
			# Obtain clonk and c4group binaries and dependencies and save in archive/$arch
			self.log.write('Creating architecture dependent files for %s...\n' % arch)

			archdir = os.path.join(archive, arch)
			os.mkdir(archdir)

			binaries = []
			if arch.startswith('darwin'):
				result, uuid = autobuild.obtain(self.amqp_connection, revision, arch, ['openclonk'])
				filename, stream = result[0]
				_create_and_open(os.path.join(archdir, filename)).write(stream.read())
				binaries.append(filename)
			else:
				# Copy both binaries and dependencies into archive. 
				for filename, stream in architer.ArchIter(self.amqp_connection, arch, revision, 'openclonk'):
					_create_and_open(os.path.join(archdir, filename)).write(stream.read())
					if architer.ArchIter.is_executable(filename):
						os.chmod(os.path.join(archdir, filename), 0755)
					binaries.append(filename)

			# Create distribution directory and copy both common and
			# architecture dependent files there.
			distdir = os.path.join(archdir, 'openclonk-%d.%d' % (major, minor))
			os.mkdir(distdir)
			if not arch.startswith('darwin'):
				for filename in content + others:
					_ensure_dir_exists(os.path.dirname(os.path.join(distdir, filename)))
					shutil.copy(os.path.join(archive, filename), os.path.join(distdir, filename))
			for filename in binaries:
				_ensure_dir_exists(os.path.dirname(os.path.join(distdir, filename)))
				shutil.copy(os.path.join(archdir, filename), os.path.join(distdir, filename))

			# Create full installation package
			file = self.pack_full_installation(distdir, arch)
			all_files[arch] = file

		# TODO: Create a source tarball

		uploader = upload.Uploader(self.log, self.dry_release)
		
		# TODO uncomment when source tarball created
		#uploader.release_file(source_package_filename, (major, minor))
		
		for arch,file in all_files.items():
			uploader.release_binaries(file, arch, (major, minor))
			os.unlink(file)

		# Remove the archive if this was a dry release
		if self.dry_release:
			shutil.rmtree(archive)
		return True