def update_submodules(location, git_remote=None, use_upstream=False, reference=None): """Update git submodules on target machines""" if not use_upstream and git_remote is None: raise ValueError("Must set git_remote if not using upstream") ensure_dir(location) logger = utils.get_logger() with utils.cd(location): logger.debug('Fetch submodules') if not use_upstream: logger.debug('Remapping submodule %s to %s', location, git_remote) remap_submodules(location, git_remote) else: logger.debug('Using upstream submodules') cmd = ['update', '--init', '--recursive'] cmd = append_jobs_arg(cmd) if reference is not None and GIT_VERSION[0] > 1: logger.debug('Using --reference repository: %s', reference) ensure_dir(reference) cmd.append('--reference') cmd.append(reference) git.submodule(*cmd)
def clean_tags(location, max_tags): """Make sure there aren't more than max_tags.""" ensure_dir(location) with utils.cd(location): tags = git('for-each-ref', '--sort=taggerdate', '--format=%(refname)', 'refs/tags').splitlines() old_tags = [] while len(tags) > max_tags: tag = tags.pop(0) if tag.startswith('refs/tags/'): tag = tag[10:] # Don't delete tags that aren't ours if not tag.startswith(TAG_PREFIX): continue old_tags.append(tag) # if there aren't any old tags, bail early if not old_tags: return git.tag('-d', *old_tags)
def update_submodules(location, git_remote=None, use_upstream=False, reference=None): """Update git submodules on target machines""" if not use_upstream and git_remote is None: raise ValueError("Must set git_remote if not using upstream") ensure_dir(location) logger = utils.get_logger() with utils.cd(location): logger.debug('Fetch submodules') if not use_upstream: logger.debug('Remapping submodule %s to %s', location, git_remote) remap_submodules(location, git_remote) else: logger.debug('Using upstream submodules') cmd = ['update', '--init', '--recursive'] cmd = append_jobs_arg(cmd) if reference is not None and GIT_VERSION[0] > 1: logger.debug('Using --reference repository: %s', reference) ensure_dir(reference) cmd.append('--reference') cmd.append(reference) git.submodule(*cmd)
def update_submodule_update_strategy(self, path): with utils.cd(path): base_cmd = '/usr/bin/git -C %s config ' % path base_cmd += 'submodule.$name.update rebase' cmd = "/usr/bin/git submodule foreach --recursive '{}'".format( base_cmd) subprocess.call(cmd, shell=True)
def git_am(self, patchfile, dirname): """Apply a patch""" with utils.cd(dirname): ret = sh.git('apply', '--no-3way', '--check', patchfile) if ret.exit_code != 0: return ret return sh.git('am', '--no-3way', patchfile)
def default_ignore(location): """Create a default .gitignore file.""" remove_all_ignores(location) ignore = '\n'.join(DEFAULT_IGNORE) with utils.cd(location): with open('.gitignore', 'w+') as gitignore: gitignore.write(ignore)
def main(self, *extra_args): """ Apply security patches """ logger = self.get_logger() branch = self.arguments.branch if branch.startswith('php-'): branch = branch[4:] branch = branch.rstrip('/') stage_dir = self.config.get('stage_dir', './') self.patchdir = os.path.join('/srv/patches/', branch) self.checkdir('patch', self.patchdir) self.branchdir = os.path.join(stage_dir, 'php-%s' % branch) self.checkdir('branch', self.branchdir) with utils.cd(self.branchdir): for base, path, filename in self.get_patches(): repo_dir = './' if path == 'core' else os.path.join('./', path) patchfile = os.path.join(base, path, filename) try: self.apply_patch(patchfile, repo_dir) except PatchError as patche: os.rename(patche.patch, patche.patch + ".failed") logger.exception(patche)
def main(self, *extra_args): """Do all kinds of weird stuff for beta.""" # These are all joined with stage_dir, so '' becomes # /srv/mediawiki-staging/, etc. # php-master is like php-1.xx.y-wmf.z we use in production but is # tracking...master ;-) pull_paths = [ '', 'php-master', 'php-master/extensions', 'php-master/skins', 'php-master/vendor', ] stage_dir = self.config['stage_dir'] for path in pull_paths: with utils.cd(os.path.join(stage_dir, path)): subprocess.check_call(['/usr/bin/git', 'pull']) subprocess.check_call([ '/usr/bin/git', 'submodule', 'update', '--init', '--recursive', '--jobs', '8', '--rebase' ]) subprocess.check_call( ['/usr/bin/git', 'submodule', 'update', '--remote', 'portals'])
def default_ignore(location): """Create a default .gitignore file.""" remove_all_ignores(location) ignore = '\n'.join(DEFAULT_IGNORE) with utils.cd(location): with open('.gitignore', 'w+') as gitignore: gitignore.write(ignore)
def main(self, *extra_args): """ Apply security patches """ logger = self.get_logger() branch = self.arguments.branch if branch.startswith('php-'): branch = branch[4:] branch = branch.rstrip('/') stage_dir = self.config.get('stage_dir', './') self.patchdir = os.path.join('/srv/patches/', branch) self.checkdir('patch', self.patchdir) self.branchdir = os.path.join(stage_dir, 'php-%s' % branch) self.checkdir('branch', self.branchdir) with utils.cd(self.branchdir): for base, path, filename in self.get_patches(): repo_dir = './' if path == 'core' else os.path.join('./', path) patchfile = os.path.join(base, path, filename) try: self.apply_patch(patchfile, repo_dir) except PatchError as e: os.rename(e.patch, e.patch + ".failed") logger.exception(e)
def master_stuff(dest_dir): """If we're operating on a master branch, do some extra weird stuff.""" repos = { 'extensions': 'mediawiki/extensions', 'vendor': 'mediawiki/vendor', 'skins': 'mediawiki/skins', } for dest, upstream in repos.items(): path = os.path.join(dest_dir, dest) url = SOURCE_URL + upstream if os.path.exists(path): with utils.cd(path): subprocess.check_call( ['/usr/bin/git', 'init'] ) subprocess.check_call( ['/usr/bin/git', 'remote', 'add', 'origin', url] ) git.fetch(path, url) git.checkout(path, 'master') git.update_submodules(path, use_upstream=True) update_update_strategy(path)
def update_update_strategy(path): """For all submodules, update the merge strategy.""" with utils.cd(path): base_cmd = '/usr/bin/git -C %s config ' % path base_cmd += 'submodule.$name.update rebase' cmd = "/usr/bin/git submodule foreach --recursive '%s'" % base_cmd subprocess.call(cmd, shell=True)
def update_update_strategy(path): """For all submodules, update the merge strategy.""" with utils.cd(path): base_cmd = '/usr/bin/git -C %s config ' % path base_cmd += 'submodule.$name.update rebase' cmd = "/usr/bin/git submodule foreach --recursive '%s'" % base_cmd subprocess.call(cmd, shell=True)
def clean_tags(location, max_tags): """Make sure there aren't more than max_tags.""" ensure_dir(location) with utils.cd(location): tags = git('for-each-ref', '--sort=taggerdate', '--format=%(refname)', 'refs/tags').splitlines() old_tags = [] while len(tags) > max_tags: tag = tags.pop(0) if tag.startswith('refs/tags/'): tag = tag[10:] # Don't delete tags that aren't ours if not tag.startswith(TAG_PREFIX): continue old_tags.append(tag) # if there aren't any old tags, bail early if not old_tags: return git.tag('-d', *old_tags)
def cleanup_branch(self, branch, keep_static): stage_dir = os.path.join(self.config['stage_dir'], 'php-%s' % branch) deploy_dir = os.path.join(self.config['deploy_dir'], 'php-%s' % branch) if not keep_static: gerrit_prune_cmd = [ 'git', 'push', 'origin', '--delete', 'wmf/%s' % branch ] logger = self.get_logger() with log.Timer('prune-git-branches', self.get_stats()): # Prune all the submodules' remote branches for submodule in git.list_submodules(stage_dir): submodule_path = submodule.lstrip(' ').split(' ')[1] with utils.cd(os.path.join(stage_dir, submodule_path)): if subprocess.call(gerrit_prune_cmd) != 0: logger.info( 'Failed to prune submodule branch for %s' % submodule) # Prune core last with utils.cd(stage_dir): if subprocess.call(gerrit_prune_cmd) != 0: logger.info('Failed to prune core branch') # Prune cache junk from masters used by l10nupdate self.execute_remote('clean-masters-l10nupdate-cache', self._get_master_list(), [ 'sudo', '-u', 'l10nupdate', 'rm', '-fR', '/var/lib/l10nupdate/caches/cache-%s' % branch ]) # Prune junk from masters owned by l10nupdate self.execute_remote('clean-masters-l10nupdate', self._get_master_list(), [ 'sudo', '-u', 'l10nupdate', 'find', stage_dir, '-user', 'l10nupdate', '-delete' ]) # Update masters self.execute_remote('clean-masters', self._get_master_list(), self.clean_command(stage_dir, keep_static)) # Update apaches self.execute_remote('clean-apaches', self._get_target_list(), self.clean_command(deploy_dir, keep_static))
def cleanup_branch(self, branch, delete): """ Given a branch, go through the cleanup proccess on the master. (1) Prune git branches [if deletion] (2) Remove l10nupdate cache (3) Remove l10n cache (4) Remove l10n bootstrap file (5) Remove some branch files [all if deletion] (6) Remove security patches [if deletion] """ if not os.path.isdir(self.branch_stage_dir): raise ValueError('No such branch exists, aborting') with log.Timer('clean-l10nupdate-cache', self.get_stats()): utils.sudo_check_call( 'www-data', 'rm -fR /var/lib/l10nupdate/caches/cache-%s' % branch ) with log.Timer('clean-l10nupdate-owned-files', self.get_stats()): utils.sudo_check_call( 'l10nupdate', 'find %s -user l10nupdate -delete' % self.branch_stage_dir ) with log.Timer('clean-ExtensionMessages'): ext_msg = os.path.join(self.config['stage_dir'], 'wmf-config', 'ExtensionMessages-%s.php' % branch) self._maybe_delete(ext_msg) logger = self.get_logger() if delete: # Moved behind a feature flag until T218750 is resolved if self.arguments.delete_gerrit_branch: git_prune_cmd = [ 'git', 'push', 'origin', '--quiet', '--delete', 'wmf/%s' % branch ] with log.Timer('prune-git-branches', self.get_stats()): # Prune all the submodules' remote branches with utils.cd(self.branch_stage_dir): submodule_cmd = 'git submodule foreach "{} ||:"'.format( ' '.join(git_prune_cmd)) subprocess.check_output(submodule_cmd, shell=True) if subprocess.call(git_prune_cmd) != 0: logger.info('Failed to prune core branch') with log.Timer('removing-local-copy'): self._maybe_delete(self.branch_stage_dir) with log.Timer('cleaning-unused-patches', self.get_stats()): self._maybe_delete(os.path.join('/srv/patches', branch)) else: with log.Timer('cleaning-unused-files', self.get_stats()): for rmdir in DELETABLE_DIRS: self._maybe_delete( os.path.join(self.branch_stage_dir, rmdir) )
def init(location): if not os.path.exists(location): utils.mkdir_p(location) if not os.path.isdir(location): raise IOError(errno.ENOENT, 'Location is not a directory', location) with utils.cd(location): return git.init().strip()
def init(location): if not os.path.exists(location): utils.mkdir_p(location) if not os.path.isdir(location): raise IOError(errno.ENOENT, 'Location is not a directory', location) with utils.cd(location): return git.init().strip()
def last_deploy_tag(location): """Finds the last tag to use for this deployment""" ensure_dir(location) with utils.cd(location): tags = git.tag('--list', os.path.join(TAG_PREFIX, '*')).splitlines() tags = sorted(tags, reverse=True) if tags: return tags[0] return None
def last_deploy_tag(location): """Finds the last tag to use for this deployment""" ensure_dir(location) with utils.cd(location): tags = git.tag('--list', os.path.join(TAG_PREFIX, '*')).splitlines() tags = sorted(tags, reverse=True) if tags: return tags[0] return None
def apply_patch(self, patch, repo_dir='./'): with utils.cd(repo_dir): self.check_patch(patch) if not self.arguments.check_only: self.get_logger().info('In %s, Applying patch: %s' % (repo_dir, patch)) try: subprocess.check_call(['git', 'am', '-3', patch]) except subprocess.CalledProcessError as ex: msg = 'Patch %s failed to apply: %s' % (patch, ex.output) raise PatchError(msg, patch)
def apply_patch(self, patch, repo_dir='./'): with utils.cd(repo_dir): self.check_patch(patch) if not self.arguments.check_only: self.get_logger().info( 'In %s, Applying patch: %s' % (repo_dir, patch)) try: subprocess.check_call(['git', 'am', '-3', patch]) except subprocess.CalledProcessError as ex: msg = 'Patch %s failed to apply: %s' % (patch, ex.output) raise PatchError(msg, patch)
def fat_isinitialized(location): """Returns whether git-fat has been initialized for the given directory.""" with utils.cd(location): with open(os.devnull, 'w') as devnull: try: git.config('--local', '--get', 'filter.fat.smudge', _out=devnull) return True except ErrorReturnCode: return False
def tag_repo(deploy_info, location=os.getcwd()): """creates new tag in deploy repo""" ensure_dir(location) with utils.cd(location): cmd = """ /usr/bin/git tag -fa \\ -m 'user {0}' \\ -m 'timestamp {1}' -- \\ {2} {3} """.format(deploy_info['user'], deploy_info['timestamp'], deploy_info['tag'], deploy_info['commit']) subprocess.check_call(cmd, shell=True)
def fat_isinitialized(location): """Returns whether git-fat has been initialized for the given directory.""" with utils.cd(location): with open(os.devnull, 'w') as devnull: try: git.config('--local', '--get', 'filter.fat.smudge', _out=devnull) return True except ErrorReturnCode: return False
def largefile_pull(location, implementor): """Syncs all git-fat or git-lfs objects for the given repo directory. :param location: Repository to work in :param implementor: What implementation to pull with (git-lfs, git-fat) """ with utils.cd(location): if implementor == LFS: git.lfs('pull') elif implementor == FAT: fat_init(location) git.fat('pull') else: raise ValueError('Must be passed one of lfs or fat')
def largefile_pull(location, implementor): """Syncs all git-fat or git-lfs objects for the given repo directory. :param location: Repository to work in :param implementor: What implementation to pull with (git-lfs, git-fat) """ with utils.cd(location): if implementor == LFS: git.lfs('pull') elif implementor == FAT: fat_init(location) git.fat('pull') else: raise ValueError('Must be passed one of lfs or fat')
def update_deploy_head(deploy_info, location): """updates .git/DEPLOY_HEAD file :param deploy_info: current deploy info to write to file as YAML :param (optional) location: git directory location (default cwd) """ logger = utils.get_logger() ensure_dir(location) with utils.cd(location): deploy_file = os.path.join(location, '.git', 'DEPLOY_HEAD') logger.debug('Creating %s', deploy_file) with open(deploy_file, 'w+') as deployfile: deployfile.write(yaml.dump(deploy_info, default_flow_style=False)) deployfile.close()
def update_deploy_head(deploy_info, location): """updates .git/DEPLOY_HEAD file :param deploy_info: current deploy info to write to file as YAML :param (optional) location: git directory location (default cwd) """ logger = utils.get_logger() ensure_dir(location) with utils.cd(location): deploy_file = os.path.join(location, '.git', 'DEPLOY_HEAD') logger.debug('Creating %s', deploy_file) with open(deploy_file, 'w+') as deployfile: deployfile.write(yaml.dump(deploy_info, default_flow_style=False)) deployfile.close()
def master_stuff(self): repos = {"extensions": "mediawiki/extensions", "vendor": "mediawiki/vendor", "skins": "mediawiki/skins"} for dest, upstream in repos.items(): path = os.path.join(self.dest_dir, dest) url = self.gerrit + upstream if os.path.exists(path): with utils.cd(path): subprocess.check_call(["/usr/bin/git", "init"], shell=True) subprocess.check_call(["/usr/bin/git", "remote", "add", "origin", url], shell=True) git.fetch(path, url) git.checkout(path, "master") git.update_submodules(path, use_upstream=True) self.update_submodule_update_strategy(path)
def main(self, *extra_args): """ Checkout next MediaWiki """ self.branch = self.arguments.branch self.dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.branch) ) old_branch = self.active_wikiversions().keys()[0] copy_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch) ) if os.path.isdir(self.dest_dir): self.get_logger().info('Version already checked out') return 0 git.fetch(self.dest_dir, self.gerrit + 'mediawiki/core', copy_dir) with utils.cd(self.dest_dir): if subprocess.call(['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always']) != 0: self.get_logger().warn('Unable to setup auto-rebase') checkout_version = 'master' if self.branch != 'master': checkout_version = 'wmf/%s' % self.branch git.checkout(self.dest_dir, checkout_version) if checkout_version == 'master': self.master_stuff() else: git.update_submodules(self.dest_dir, use_upstream=True) self.update_submodule_update_strategy(self.dest_dir) self.write_localsettings() self.create_startprofiler_symlink() cache_dir = os.path.join(self.dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def tag_repo(deploy_info, location=os.getcwd()): """creates new tag in deploy repo""" ensure_dir(location) with utils.cd(location): cmd = """ /usr/bin/git tag -fa \\ -m 'user {0}' \\ -m 'timestamp {1}' -- \\ {2} {3} """.format( deploy_info['user'], deploy_info['timestamp'], deploy_info['tag'], deploy_info['commit'] ) subprocess.check_call(cmd, shell=True)
def main(self, *extra_args): pull_paths = [ 'portal-master', 'php-master', 'php-master/extensions', 'php-master/skins', 'php-master/vendor', ] for path in pull_paths: path = os.path.join(self.config['stage_dir'], path) with utils.cd(path): subprocess.check_call('/usr/bin/git pull', shell=True) for submodule in ['extensions', 'skins']: path = os.path.join(self.config['stage_dir'], 'php-master', submodule) git.update_submodules(path, use_upstream=True),
def main(self, *extra_args): pull_paths = [ 'portal-master', 'php-master', 'php-master/extensions', 'php-master/skins', 'php-master/vendor', ] for path in pull_paths: path = os.path.join(self.config['stage_dir'], path) with utils.cd(path): subprocess.check_call('/usr/bin/git pull', shell=True) for submodule in ['extensions', 'skins']: path = os.path.join(self.config['stage_dir'], 'php-master', submodule) git.update_submodules(path, use_upstream=True),
def check_patch_files(version, cfg): """Check to see if there are unmerged patch files from /srv/patches for a given revision. :param version: MediaWiki version string (e.g., '1.27.0-wmf.8') :param cfg: Scap configuration dict """ logger = logging.getLogger('check_patch_files') # Patches should live in /srv/patches/[version] patch_path = cfg['patch_path'] if patch_path is None: return version_base = os.path.join(patch_path, version) ext_dir = os.path.join(version_base, 'extensions') _, extensions, _ = next(os.walk(ext_dir)) patches = utils.get_patches(['core'], version_base) patches.update(utils.get_patches(extensions, ext_dir)) git_patch_check = ['/usr/bin/git', 'apply', '--check', '--reverse'] version_dir = 'php-{}'.format(version) apply_dir = os.path.join(cfg['stage_dir'], version_dir) for extension, diffs in patches.items(): diff = '\n'.join(diffs) if extension != 'core': apply_dir = os.path.join(apply_dir, 'extensions', extension) with utils.cd(apply_dir): p = subprocess.Popen(git_patch_check, stdin=subprocess.PIPE, stdout=subprocess.PIPE) p.communicate(diff) if p.returncode > 0: logger.warning('Patch(s) for %s have not been applied.', apply_dir)
def check_patch_files(version, cfg): """Check to see if there are unmerged patch files from /srv/patches for a given revision. :param version: MediaWiki version string (e.g., '1.27.0-wmf.8') :param cfg: Scap configuration dict """ logger = logging.getLogger('check_patch_files') # Patches should live in /srv/patches/[version] patch_path = cfg['patch_path'] if patch_path is None: return version_base = os.path.join(patch_path, version) ext_dir = os.path.join(version_base, 'extensions') _, extensions, _ = next(os.walk(ext_dir)) patches = utils.get_patches(['core'], version_base) patches.update(utils.get_patches(extensions, ext_dir)) git_patch_check = ['/usr/bin/git', 'apply', '--check', '--reverse'] version_dir = 'php-{}'.format(version) apply_dir = os.path.join(cfg['stage_dir'], version_dir) for extension, diffs in patches.items(): diff = '\n'.join(diffs) if extension != 'core': apply_dir = os.path.join(apply_dir, 'extensions', extension) with utils.cd(apply_dir): p = subprocess.Popen( git_patch_check, stdin=subprocess.PIPE, stdout=subprocess.PIPE) p.communicate(diff) if p.returncode > 0: logger.warning('Patch(s) for %s have not been applied.', apply_dir)
def master_stuff(self): repos = { 'extensions': 'mediawiki/extensions', 'vendor': 'mediawiki/vendor', 'skins': 'mediawiki/skins', } for dest, upstream in repos.items(): path = os.path.join(self.dest_dir, dest) url = self.gerrit + upstream if os.path.exists(path): with utils.cd(path): subprocess.check_call(['/usr/bin/git', 'init']) subprocess.check_call( ['/usr/bin/git', 'remote', 'add', 'origin', url]) git.fetch(path, url) git.checkout(path, 'master') git.update_submodules(path, use_upstream=True) self.update_submodule_update_strategy(path)
def master_stuff(dest_dir): """If we're operating on a master branch, do some extra weird stuff.""" repos = { 'extensions': 'mediawiki/extensions', 'vendor': 'mediawiki/vendor', 'skins': 'mediawiki/skins', } for dest, upstream in repos.items(): path = os.path.join(dest_dir, dest) url = SOURCE_URL + upstream if os.path.exists(path): with utils.cd(path): subprocess.check_call(['/usr/bin/git', 'init']) subprocess.check_call( ['/usr/bin/git', 'remote', 'add', 'origin', url]) git.fetch(path, url) git.checkout(path, 'master') git.update_submodules(path, use_upstream=True) update_update_strategy(path)
def main(self, *extra_args): """Run deploy-mediawiki.""" # Flatten local into git repo self.get_logger().info('scap deploy-mediawiki') git.default_ignore(self.config['deploy_dir']) git.add_all(self.config['deploy_dir'], message=self.arguments.message) git.garbage_collect(self.config['deploy_dir']) scap = self.get_script_path() options = { 'git_repo': self.config['deploy_dir'], } option_list = ['-D{}:{}'.format(x, y) for x, y in options.items()] cmd = [scap, 'deploy', '-v'] cmd += option_list cmd += ['--init'] with utils.cd(self.config['deploy_dir']): subprocess.check_call(cmd)
def main(self, *extra_args): """Run deploy-mediawiki.""" # Flatten local into git repo self.get_logger().info('scap deploy-mediawiki') git.default_ignore(self.config['deploy_dir']) git.add_all(self.config['deploy_dir'], message=self.arguments.message) git.garbage_collect(self.config['deploy_dir']) scap = self.get_script_path() options = { 'git_repo': self.config['deploy_dir'], } option_list = ['-D{}:{}'.format(x, y) for x, y in options.items()] cmd = [scap, 'deploy', '-v'] cmd += option_list cmd += ['--init'] with utils.cd(self.config['deploy_dir']): subprocess.check_call(cmd)
def main(self, *extra_args): """ Checkout next MediaWiki """ self.branch = self.arguments.branch self.dest_dir = os.path.join(self.config["stage_dir"], "{}{}".format(self.arguments.prefix, self.branch)) if os.path.isdir(self.dest_dir): self.get_logger().info("Version already checked out") return 0 git.fetch(self.dest_dir, self.gerrit + "mediawiki/core") with utils.cd(self.dest_dir): if subprocess.call(["/usr/bin/git", "config", "branch.autosetuprebase", "always"]) != 0: self.get_logger().warn("Unable to setup auto-rebase") checkout_version = "master" if self.branch != "master": checkout_version = "wmf/%s" % self.branch git.checkout(self.dest_dir, checkout_version) if checkout_version == "master": self.master_stuff() else: git.update_submodules(self.dest_dir, use_upstream=True) self.update_submodule_update_strategy(self.dest_dir) self.write_localsettings() self.create_startprofiler_symlink() cache_dir = os.path.join(self.dest_dir, "cache") os.chmod(cache_dir, 0o777) utils.sudo_check_call("l10nupdate", 'mkdir "%s"' % os.path.join(cache_dir, "l10n")) self.get_logger().info("MediaWiki %s successfully checked out." % checkout_version)
def main(self, *extra_args): """Do all kinds of weird stuff for beta.""" # These are all joined with stage_dir, so '' becomes # /srv/mediawiki-staging/, etc. # php-master is like php-1.xx.y-wmf.z we use in production but is # tracking...master ;-) pull_paths = [ '', 'php-master', 'php-master/extensions', 'php-master/skins', 'php-master/vendor', ] stage_dir = self.config['stage_dir'] for path in pull_paths: with utils.cd(os.path.join(stage_dir, path)): subprocess.check_call(['/usr/bin/git', 'pull']) subprocess.check_call(['/usr/bin/git', 'submodule', 'update', '--init', '--recursive', '--jobs', '8', '--rebase']) subprocess.check_call(['/usr/bin/git', 'submodule', 'update', '--remote', 'portals'])
def remote_exists(location, remote): """Check if remote exists in location""" ensure_dir(location) with utils.cd(location): cmd = '/usr/bin/git config --local --get remote.{}.url'.format(remote) return subprocess.call(cmd, shell=True) == 0
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch) ) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': # active_wikiversions() is already sorted by loose-version number, # we want the latest version if there's more than 1 old_branch = self.active_wikiversions().keys()[-1] old_branch_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch) ) reference_dir = None if (os.path.exists(old_branch_dir)): reference_dir = old_branch_dir patch_path = os.path.join('/srv/patches', self.arguments.branch) if not os.path.exists(patch_path): if os.path.exists(os.path.join('/srv/patches', old_branch)): shutil.copytree( os.path.join('/srv/patches', old_branch), os.path.join(patch_path) ) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 self.get_logger().info('Fetching core to {}'.format(dest_dir)) git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call(['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always']) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call(['/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') self.get_logger().info('Checkout {} in {}'.format(checkout_version, dest_dir)) git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) else: # Specific to production self.get_logger().info('Update submodules for {}'.format(dest_dir)) git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) self.get_logger().info('Creating LocalSettings.php stub') write_settings_stub(os.path.join(dest_dir, 'LocalSettings.php')) self.get_logger().info('Creating l10n cache dir') cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def sha(location, rev): """Returns SHA1 for things like HEAD or HEAD~~""" ensure_dir(location) with utils.cd(location): return git('rev-parse', '--verify', rev).strip()
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch)) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': old_branch = self.active_wikiversions().keys()[0] reference_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch)) patch_path = os.path.join('/srv/patches', self.arguments.branch) if not os.path.exists(patch_path): shutil.copytree(os.path.join('/srv/patches', old_branch), os.path.join(patch_path)) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call( ['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always' ]) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call([ '/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs ]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) write_settings_stub( os.path.join(dest_dir, 'StartProfiler.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'StartProfiler-labs.php')) else: # Specific to production git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) write_settings_stub( os.path.join(dest_dir, 'StartProfiler.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'StartProfiler.php')) write_settings_stub( os.path.join(dest_dir, 'LocalSettings.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'CommonSettings.php')) cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def fat_init(location): """Initializes the given directory for git-fat use.""" with utils.cd(location): git.fat('init')
def describe(location): """Returns a convenient label for the current state of the git repo.""" ensure_dir(location) with utils.cd(location): return git.describe('--always').strip()
def fat_init(location): """Initializes the given directory for git-fat use.""" with utils.cd(location): git.fat('init')
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch)) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': # active_wikiversions() is already sorted by loose-version number, # we want the latest version if there's more than 1 old_branch = self.active_wikiversions().keys()[-1] old_branch_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch)) reference_dir = None if (os.path.exists(old_branch_dir)): reference_dir = old_branch_dir patch_base_dir = '/srv/patches' patch_path = os.path.join(patch_base_dir, self.arguments.branch) if not os.path.exists(patch_path): if os.path.exists(os.path.join(patch_base_dir, old_branch)): shutil.copytree(os.path.join(patch_base_dir, old_branch), os.path.join(patch_path)) srv_patches_git_message = 'Scap prep for "{}"'.format( self.arguments.branch) git.add_all(patch_base_dir, message=srv_patches_git_message) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 self.get_logger().info('Fetching core to {}'.format(dest_dir)) git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call( ['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always' ]) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call([ '/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs ]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') self.get_logger().info('Checkout {} in {}'.format( checkout_version, dest_dir)) git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) else: # Specific to production self.get_logger().info('Update submodules for {}'.format(dest_dir)) git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) self.get_logger().info('Creating LocalSettings.php stub') write_settings_stub(os.path.join(dest_dir, 'LocalSettings.php')) self.get_logger().info('Creating l10n cache dir') cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def update_submodule_update_strategy(self, path): with utils.cd(path): base_cmd = "/usr/bin/git -C %s config " % path base_cmd += "submodule.$name.update rebase" cmd = "/usr/bin/git submodule foreach --recursive '{}'".format(base_cmd) subprocess.call(cmd, shell=True)
def describe(location): """Returns a convenient label for the current state of the git repo.""" ensure_dir(location) with utils.cd(location): return git.describe('--always').strip()
def sha(location, rev): """Returns SHA1 for things like HEAD or HEAD~~""" ensure_dir(location) with utils.cd(location): return git('rev-parse', '--verify', rev).strip()
def cleanup_branch(self, branch, delete): """ Given a branch, go through the cleanup proccess on the master: (1) Prune git branches [if deletion] (2) Remove l10nupdate cache (3) Remove l10n cache (4) Remove l10n bootstrap file (5) Remove some branch files [all if deletion] (6) Remove security patches [if deletion] """ stage_dir = os.path.join(self.config['stage_dir'], 'php-%s' % branch) if not os.path.isdir(stage_dir): raise ValueError('No such branch exists, aborting') command_list = [] command_list.append([ 'clean-l10nupdate-cache', [ 'sudo', '-u', 'www-data', 'rm', '-fR', '/var/lib/l10nupdate/caches/cache-%s' % branch ] ]) command_list.append([ 'clean-l10nupdate-owned-files', [ 'sudo', '-u', 'l10nupdate', 'find', stage_dir, '-user', 'l10nupdate', '-delete' ] ]) command_list.append([ 'clean-l10n-bootstrap', [ 'rm', '-fR', os.path.join(self.config['stage_dir'], 'wmf-config', 'ExtensionMessages-%s.php' % branch) ] ]) logger = self.get_logger() if delete: gerrit_prune_cmd = [ 'git', 'push', 'origin', '--quiet', '--delete', 'wmf/%s' % branch ] with log.Timer('prune-git-branches', self.get_stats()): # Prune all the submodules' remote branches for submodule in git.list_submodules(stage_dir): submodule_path = submodule.lstrip(' ').split(' ')[1] with utils.cd(os.path.join(stage_dir, submodule_path)): if subprocess.call(gerrit_prune_cmd) != 0: logger.info( 'Failed to prune submodule branch for %s' % submodule) # Prune core last with utils.cd(stage_dir): if subprocess.call(gerrit_prune_cmd) != 0: logger.info('Failed to prune core branch') command_list.append(['cleaning-branch', ['rm', '-fR', stage_dir]]) command_list.append([ 'cleaning-patches', ['rm', '-fR', os.path.join('/srv/patches', branch)] ]) else: regex = r'".*\.?({0})$"'.format('|'.join(DELETABLE_TYPES)) command_list.append([ 'cleaning-branch', [ 'find', stage_dir, '-type', 'f', '-regextype', 'posix-extended', '-regex', regex, '-delete' ] ]) for command_signature in command_list: name = command_signature[0] command = command_signature[1] with log.Timer(name + '-' + branch, self.get_stats()): try: subprocess.check_call(command) except (subprocess.CalledProcessError, OSError): logger.warning('Command failed [%s]: %s' % (name, ' '.join(command)))
def remote_exists(location, remote): """Check if remote exists in location""" ensure_dir(location) with utils.cd(location): cmd = '/usr/bin/git config --local --get remote.{}.url'.format(remote) return subprocess.call(cmd, shell=True) == 0