Пример #1
0
  def apply(self, reverse=False, stop_on_error=True, commit=False):
    all_patches_applied = True
    failed_patches = []

    for patch in self.patches:
      # Even if `commit` is True we're not going to commit
      # individual patches, it takes too much time in the Chromium repo.
      # Applying all commits takes about 10 minutes (!) on a fast dev machine.
      # Instead of it we are going only to add all changes to the index
      # and commit them all at once later.
      applied_successfully = patch.apply(reverse=reverse, index=commit, commit=False)

      if not applied_successfully:
        all_patches_applied = False
        failed_patches.append(patch)

      should_stop_now = not applied_successfully and stop_on_error
      if should_stop_now:
        break

    if commit and not all_patches_applied:
      git.reset(self.repo_path)

    if commit and all_patches_applied:
      author = 'Electron Build Process <*****@*****.**>'
      message = 'Apply Electron patches'
      git.commit(self.repo_path, author=author, message=message)

    return (all_patches_applied, failed_patches)
Пример #2
0
    def _prepare_output_git(self, branch, input_work_dir, output_work_dir,
                            output_reference):
        start_commit = self.output_branch_name(branch)
        new_branch = False
        try:
            git.clone(
                output_reference,
                output_work_dir,
                options=['--branch',
                         self.output_branch_name(branch), '-q'])
            git.set_origin_url(self._output_tree, output_work_dir)
            try:
                git.commit_env_vars(self.output_branch_name(branch),
                                    tree=output_work_dir)
            except git.GitError:
                shutil.rmtree(output_work_dir)
                self.debug('clone incorrect - new branch?')
                raise Exception('clone incorrect - new branch?')
        except:
            # assume error was due to branch not found
            new_branch = True
            start_commit = self._check_new_branch(branch, input_work_dir,
                                                  output_work_dir,
                                                  output_reference)
            git.reset(['--hard', start_commit], tree=output_work_dir)

        try:
            git.fetch('refs/notes/mirror:refs/notes/mirror',
                      tree=output_work_dir)
        except:
            pass

        return start_commit, new_branch
Пример #3
0
    def apply(self, reverse=False, stop_on_error=True, commit=False):
        all_patches_applied = True
        failed_patches = []

        for patch in self.patches:
            # Even if `commit` is True we're not going to commit
            # individual patches, it takes too much time in the Chromium repo.
            # Applying all commits takes about 10 minutes (!) on a fast dev machine.
            # Instead of it we are going only to add all changes to the index
            # and commit them all at once later.
            applied_successfully = patch.apply(reverse=reverse,
                                               index=commit,
                                               commit=False)

            if not applied_successfully:
                all_patches_applied = False
                failed_patches.append(patch)

            should_stop_now = not applied_successfully and stop_on_error
            if should_stop_now:
                break

        if commit and not all_patches_applied:
            git.reset(self.repo_path)

        if commit and all_patches_applied:
            author = 'Electron Build Process <*****@*****.**>'
            message = 'Apply Electron patches'
            git.commit(self.repo_path, author=author, message=message)

        return (all_patches_applied, failed_patches)
Пример #4
0
    def __call__(self):
        # TODO: Exception safety
        self.log.write('updating from repository... ')
        starttime = datetime.datetime.now()
        git.reset(self.revision)
        timedelta = datetime.datetime.now() - starttime
        self.log.write('done. (took ' + str(total_seconds(timedelta)) +
                       's) \n')

        # Build the documentation
        self.log.write('building docs... ')
        starttime = datetime.datetime.now()
        make.make('docs')
        make.run('docs')
        timedelta = datetime.datetime.now() - starttime
        self.log.write('done. (took ' + str(total_seconds(timedelta)) +
                       's) \n')
        git.reset('master')  # Revert possible modifications to .po files

        username = '******'
        passwd = open('../passwd/docs.txt', 'r').read().strip()
        ftphost = FTPHostFix.connect('ftp.openclonk.org',
                                     user=username,
                                     password=passwd)

        # upload to /new
        self.log.write('uploading new docs... \n')
        starttime = datetime.datetime.now()
        self.__upload_new_files(ftphost)
        timedelta = datetime.datetime.now() - starttime
        self.log.write('done. (took ' + str(total_seconds(timedelta)) +
                       's) \n')

        # copy script files over to /new
        self.log.write('copying script files... \n')
        starttime = datetime.datetime.now()
        self.__copy_old_script_files(ftphost)
        timedelta = datetime.datetime.now() - starttime
        self.log.write('done. (took ' + str(total_seconds(timedelta)) +
                       's) \n')

        # move everything in / to /old (except just uploaded new directory)
        # move everything in /new to /
        self.log.write('replacing docs... ')
        self.__replace_files(ftphost)
        self.log.write('done. \n')

        # at this point, the new version is online.

        # delete /old folder
        # walk reverse cause the bottom-most folders need to be deleted first
        self.log.write('deleting old docs... \n')
        starttime = datetime.datetime.now()
        self.__remove_old_files(ftphost)
        timedelta = datetime.datetime.now() - starttime
        self.log.write('done. (took ' + str(total_seconds(timedelta)) +
                       's) \n')

        return True
Пример #5
0
 def _checkout(self, commit, input_work_dir):
     git.reset(opts=['--hard', commit], tree=input_work_dir)
     git.clean(opts=['-fdxq'], tree=input_work_dir)
     git.submodule(['--quiet', 'sync'], tree=input_work_dir)
     git.submodule(['--quiet', 'update'], tree=input_work_dir)
     git.submodule(['--quiet', 'foreach', 'git', 'reset', '-q', '--hard'],
                   tree=input_work_dir)
     git.submodule(['--quiet', 'foreach', 'git', 'clean', '-fdxq'],
                   tree=input_work_dir)
Пример #6
0
    def __call__(self):
        # TODO: Exception safety
        # TODO: If a branch name is given, checkout the branch from remote
        # TODO: Reset back to 'origin/master' afterwards
        git.reset(self.revision)
        revhash = git.id()

        # TODO: Use same content streams for all architectures
        upload_jobs = []
        for arch in arches.arches:
            date = time.strftime('%Y%m%d')
            filename = '%s-snapshot-%s-%s-%s' % (self.build_type, date,
                                                 revhash[:10], arch)

            try:
                archive_stream = StringIO.StringIO()
                archive_obj = archive.Archive(arch, archive_stream)

                if arch.startswith('darwin'):
                    result, uuid = autobuild.obtain(self.amqp_connection,
                                                    revhash, arch,
                                                    [self.build_type])
                    name, stream = result[0]
                    archive_obj.add(name, stream.read())
                else:
                    if self.build_type == 'openclonk':
                        for name, stream in contentiter.ContentIter(
                                groupcontent.snapshot):
                            archive_obj.add(name, stream.read())

                    arch_iter = architer.ArchIter(self.amqp_connection, arch,
                                                  revhash, self.build_type)
                    for name, stream in arch_iter:
                        archive_obj.add(name, stream.read())
                    uuid = arch_iter.uuid

                archive_filename = archive_obj.get_filename(filename)
                archive_obj.close()
                archive_stream.seek(0)

                upload_jobs.append(
                    (archive_filename, archive_stream, uuid, arch))
            except autobuild.AutobuildException as ex:
                # make an entry for "failed build"
                archive_filename = archive_obj.get_filename(filename)
                upload_jobs.append((archive_filename, None, ex.uuid, arch))

        uploader = upload.Uploader(self.log, self.dry_release)
        for archive_filename, archive_stream, uuid, arch in upload_jobs:
            if archive_stream is not None:  # Needed to skip mape osx build(?)
                uploader.nightly_file(self.build_type, archive_filename,
                                      archive_stream, uuid, revhash[:10], arch)

        return True
Пример #7
0
def resetGitRepo( repoPath ):
    repo = Repo( repoPath, odbt=GitCmdObjectDB )
    branchName = str( repo.active_branch )

    print 'resetting ' + repoPath + ' to remote HEAD...'

    git = repo.git
    # If we checked out from a tag, branchName is a tag name in that case
    if branchName[:4] == 'tag_':
        git.reset( '--hard', branchName[4:] )
    else:
        git.reset( '--hard', 'origin/' + branchName)

    return
Пример #8
0
 def refresh(self):
     """
       Refresh (git pull) repository
     """
     git = self.__getGitCmd()
     # git.fetch()
     git.reset('--hard', 'HEAD')
     counter = 5
     try:
         git.pull()
         return
     except:
         if counter == 0:
             self.__getLog().warning("Problem with pulling of "
                                     "the repository '%s'" % self.name)
             return
         counter -= 1
Пример #9
0
 def refresh(self):
     """
       Refresh (git pull) repository
     """
     git = self.__getGitCmd()
     # git.fetch()
     git.reset('--hard', 'HEAD')
     counter = 5
     try:
         git.pull()
         return
     except:
         if counter == 0:
             self.__getLog().warning("Problem with pulling of "
                                     "the repository '%s'" % self.name)
             return
         counter -= 1
Пример #10
0
 def _commit(self,
             commit,
             msg,
             env,
             output_work_dir,
             tree_id=None,
             parents=None,
             add_id=True):
     msg = self.filter_message(msg)
     if add_id:
         msg += '%s: %s' % (self._commit_id_prefix, commit.tree_id)
     if tree_id:
         assert parents is not None
         new_commit = git.commit_tree(tree_id,
                                      parents,
                                      msg,
                                      tree=output_work_dir,
                                      env=env)
         git.reset(opts=['--hard', new_commit], tree=output_work_dir)
     else:
         assert parents is None
         git.commit(msg, env=env, opts=['-a', '-q'], tree=output_work_dir)
	def __call__(self):
		self.log.write('New changesets have been pushed.\n')

		# See if the push changed something in the master branch
		git.reset('origin/master')
		current_id = git.id()
		git.fetch()
		git.reset(self.revision)

		if current_id != self.revision:
			self.log.write('The master branch has new commits.\n')

			# Make a new development snapshot
			builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'openclonk', False)
			# TODO: Remove all other snapshot builders from the queue
			self.queue.put(50, builder)

			# Also make a new mape build. In principle we could do this only if something in the
			# mape directory or any of the other files used by mape change, but let's keep it simple here.
			builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'mape', False)
			# TODO: Remove all other snapshot builders from the queue
			self.queue.put(70, builder)

			# See if something in the docs directory has changed
			log = git.log('docs', current_id, self.revision, 'oneline')
			if len(log) > 1 or (len(log) == 1 and log[0] != current_id):
				# TODO: Remove all other doc builders from the queue
				builder = docbuilder.DocBuilder(self.revision, self.log)
				self.queue.put(80, builder)

		else:
			self.log.write('The master branch has no new commits.\n')

		# TODO: Make a release if the version changed

		return True
Пример #12
0
    return bionic_tags[-1:][0]['name']


docker_tag = "ahonnecke/dad-base"
base_dockerfile = "/Users/ahonnecke/Code/repos/web-batch/docker/Dockerfile.base"

chrome_version = get_chrome_driver_version()
print(f'Latest chrome version {chrome_version}')

bionic_tag = get_bionic_version()
print(f'Latest bionic version {bionic_tag}')

web_repo = git.Repo("/Users/ahonnecke/Code/repos/web-batch/")
git = web_repo.git

git.reset('--hard', 'upstream/master')
git.checkout('upstream/master')

call(['git', 'fetch', '--all'])
git.fetch('--all')

with open(base_dockerfile, 'r') as reader:
    content = reader.read()
    content_new = re.sub(
        'FROM ubuntu:.*',
        r'FROM ubuntu:' + str(bionic_tag),
        content,
        flags=re.M
    )

with open(base_dockerfile, "w") as writer:
Пример #13
0
chrome_version = get_chrome_driver_version()
print(f'Latest chrome version {chrome_version}')

ubuntu_tag = get_bionic_version()
# ubuntu_tag = get_ubuntu_version()
print(f'Latest version {ubuntu_tag}')

ts = int(time.time())
tag = f'{IMGBASENAME}-{ubuntu_tag}-{chrome_version}-{ts}'
remote_tag = f'{GITUSERNAME}/{tag}'

repo_instance = git.Repo(repo_path)
git = repo_instance.git

if not args.keep:
    git.reset('--hard', SRCPATH)
    git.checkout(SRCPATH)

os.chdir(repo_path)
call(['git', 'fetch', '--all'])
git.fetch('--all')


def update_base_dockerfile(dockerfile):
    print('Updating the base dockerfile')
    with open(dockerfile, 'r') as reader:
        content = reader.read()
        content_new = re.sub(
            'FROM ubuntu:.* as chrome',
            r'FROM ubuntu:' + str(ubuntu_tag) + ' as chrome',
            content,
Пример #14
0
else:

    @contextmanager
    def git_auth():
        """
        No-op context manager.
        """
        yield

    remote = f'https://{token}@github.com/{os.environ["GITHUB_REPOSITORY"]}'
git.remote('add', 'gh-token', remote)
with git_auth():
    git.fetch('gh-token')
    # reset will fail if 'generated-documentation` branch doesn't yet exist.
    # That's fine, it will exist after our push. Just not the error and move on.
    try:
        git.reset('gh-token/generated-documentation')
    except GitCommandError:
        print(NOTICE + "Couldn't git reset generated-documentation." + ENDC)
        print(NOTICE +
              "This error is expected if the branch doesn't exist yet." + ENDC)

    print(INFO + "Running 'mkdocs gh-deploy'." + ENDC)
    os.chdir(docs_build_dir)
    rslt = os.system(
        'mkdocs gh-deploy --verbose --clean --remote-name gh-token '
        '--remote-branch generated-documentation')
if rslt != 0:
    print(ERROR + "'mkdocs gh-deploy' failed." + ENDC)
    sys.exit(1)
Пример #15
0
	def __call__(self):
		if self.dry_release:
			self.log.write('Dry-Releasing revision %s...\n' % self.revision)
		else:
			self.log.write('Releasing revision %s...\n' % self.revision)

		# Update to revision and get hexadecimal ID.
		# TODO: Reset back to 'origin/master' afterwards

		# TODO: The following could be checked easier maybe...
		prefix = ''
		if not self.revision.startswith('v'):
			for x in 'ghijklmnopqrstuvwxyz':
				if len(prefix) == 0 and x in self.revision:
					prefix = 'origin/'

		git.fetch()
		git.reset('%s%s' % (prefix, self.revision))
		revision = git.id()[:12]

		(major, minor) = self.parse_version_file('Version.txt')

		self.log.write('==> Version %d.%d\n' % (major, minor))

		dry_suffix = ''
		if self.dry_release: dry_suffix = '-dry'
		archive = os.path.join(self.archive_dir, '%d.%d%s' % (major, minor, dry_suffix))

		if os.path.exists(archive):
			self.log.write('Archive directory %s exists already. Clearing...\n' % archive)
			shutil.rmtree(archive)

		os.mkdir(archive)

		# Copy game content to archive
		self.log.write('Copying and packing game content to archive...\n')
		content = [] # game content
		others  = [] # other misc. non-architecture dependent files
		for filename, stream in contentiter.ContentIter(groupcontent.release):
			self.log.write('%s...\n' % filename)
			if contentiter.ContentIter.is_group_file(filename):
				content.append(filename)
			else:
				others.append(filename)

			destination = os.path.join(archive, filename)
			_create_and_open(destination).write(stream.read()) # TODO: copyfileobj?

		# Create architecture specific files:
		all_files = {}
		for arch in arches.arches:
			# Obtain clonk and c4group binaries and dependencies and save in archive/$arch
			self.log.write('Creating architecture dependent files for %s...\n' % arch)

			archdir = os.path.join(archive, arch)
			os.mkdir(archdir)

			binaries = []
			if arch.startswith('darwin'):
				result, uuid = autobuild.obtain(self.amqp_connection, revision, arch, ['openclonk'])
				filename, stream = result[0]
				_create_and_open(os.path.join(archdir, filename)).write(stream.read())
				binaries.append(filename)
			else:
				# Copy both binaries and dependencies into archive. 
				for filename, stream in architer.ArchIter(self.amqp_connection, arch, revision, 'openclonk'):
					_create_and_open(os.path.join(archdir, filename)).write(stream.read())
					if architer.ArchIter.is_executable(filename):
						os.chmod(os.path.join(archdir, filename), 0755)
					binaries.append(filename)

			# Create distribution directory and copy both common and
			# architecture dependent files there.
			distdir = os.path.join(archdir, 'openclonk-%d.%d' % (major, minor))
			os.mkdir(distdir)
			if not arch.startswith('darwin'):
				for filename in content + others:
					_ensure_dir_exists(os.path.dirname(os.path.join(distdir, filename)))
					shutil.copy(os.path.join(archive, filename), os.path.join(distdir, filename))
			for filename in binaries:
				_ensure_dir_exists(os.path.dirname(os.path.join(distdir, filename)))
				shutil.copy(os.path.join(archdir, filename), os.path.join(distdir, filename))

			# Create full installation package
			file = self.pack_full_installation(distdir, arch)
			all_files[arch] = file

		# TODO: Create a source tarball

		uploader = upload.Uploader(self.log, self.dry_release)
		
		# TODO uncomment when source tarball created
		#uploader.release_file(source_package_filename, (major, minor))
		
		for arch,file in all_files.items():
			uploader.release_binaries(file, arch, (major, minor))
			os.unlink(file)

		# Remove the archive if this was a dry release
		if self.dry_release:
			shutil.rmtree(archive)
		return True