def main(self, *extra_args): rsync_args = ['--delete-excluded' ] if self.arguments.delete_excluded else [] tasks.sync_common(self.config, include=self.arguments.include, sync_from=self.arguments.servers, verbose=self.verbose, rsync_args=rsync_args) if self.arguments.update_l10n: with log.Timer('scap-cdb-rebuild', self.get_stats()): utils.sudo_check_call( 'mwdeploy', self.get_script_path() + ' cdb-rebuild --no-progress') # Invalidate opcache # TODO deduplicate this from AbstractSync._invalidate_opcache() php7_admin_port = self.config.get('php7-admin-port') if php7_admin_port: om = opcache_manager.OpcacheManager(php7_admin_port) failed = om.invalidate([socket.gethostname()], None) if failed: self.get_logger().warning('Opcache invalidation failed. ' 'Consider performing it manually.') if (self.arguments.php_restart): fpm = php_fpm.PHPRestart(self.config) self.get_logger().info('Checking if php-fpm restart needed') failed = fpm.restart_self() if failed: self.get_logger().warning('php-fpm restart failed!') return 0
def main(self, *extra_args): rsync_args = [ '--delete-excluded'] if self.arguments.delete_excluded else [] tasks.sync_common( self.config, include=self.arguments.include, sync_from=self.arguments.servers, verbose=self.verbose, rsync_args=rsync_args ) if self.arguments.update_l10n: with log.Timer('scap-cdb-rebuild', self.get_stats()): utils.sudo_check_call( 'mwdeploy', self.get_script_path() + ' cdb-rebuild --no-progress' ) tasks.clear_message_blobs() # Invalidate opcache # TODO deduplicate this from AbstractSync._invalidate_opcache() php7_admin_port = self.config.get('php7-admin-port') if not php7_admin_port: return 0 om = opcache_manager.OpcacheManager(php7_admin_port) failed = om.invalidate([socket.gethostname()], None) if failed: self.get_logger().warning( 'Opcache invalidation failed. Consider performing it manually.' ) return 0
def _git_repo(self): """Flatten deploy directory into shared git repo.""" if self.config['scap3_mediawiki']: self.get_logger().info('Setting up deploy git directory') cmd = '{} deploy-mediawiki -v "{}"'.format(self.get_script_path(), self.arguments.message) utils.sudo_check_call('mwdeploy', cmd)
def _git_repo(self): """Flatten deploy directory into shared git repo.""" if self.config['scap3_mediawiki']: self.get_logger().info('Setting up deploy git directory') cmd = '{} deploy-mediawiki -v "{}"'.format( self.get_script_path(), self.arguments.message) utils.sudo_check_call('mwdeploy', cmd)
def clear_message_blobs(logger=None): """Clear MessageBlobStore cache on all wikis""" logger.info('Running refreshMessageBlobs.php for each wiki') utils.sudo_check_call( 'www-data', '/usr/local/bin/foreachwiki ' 'extensions/WikimediaMaintenance/refreshMessageBlobs.php' )
def cleanup_branch(self, branch, delete): """ Given a branch, go through the cleanup proccess on the master. (1) Prune git branches [if deletion] (2) Remove l10nupdate cache (3) Remove l10n cache (4) Remove l10n bootstrap file (5) Remove some branch files [all if deletion] (6) Remove security patches [if deletion] """ if not os.path.isdir(self.branch_stage_dir): raise ValueError('No such branch exists, aborting') with log.Timer('clean-l10nupdate-cache', self.get_stats()): utils.sudo_check_call( 'www-data', 'rm -fR /var/lib/l10nupdate/caches/cache-%s' % branch ) with log.Timer('clean-l10nupdate-owned-files', self.get_stats()): utils.sudo_check_call( 'l10nupdate', 'find %s -user l10nupdate -delete' % self.branch_stage_dir ) with log.Timer('clean-ExtensionMessages'): ext_msg = os.path.join(self.config['stage_dir'], 'wmf-config', 'ExtensionMessages-%s.php' % branch) self._maybe_delete(ext_msg) logger = self.get_logger() if delete: # Moved behind a feature flag until T218750 is resolved if self.arguments.delete_gerrit_branch: git_prune_cmd = [ 'git', 'push', 'origin', '--quiet', '--delete', 'wmf/%s' % branch ] with log.Timer('prune-git-branches', self.get_stats()): # Prune all the submodules' remote branches with utils.cd(self.branch_stage_dir): submodule_cmd = 'git submodule foreach "{} ||:"'.format( ' '.join(git_prune_cmd)) subprocess.check_output(submodule_cmd, shell=True) if subprocess.call(git_prune_cmd) != 0: logger.info('Failed to prune core branch') with log.Timer('removing-local-copy'): self._maybe_delete(self.branch_stage_dir) with log.Timer('cleaning-unused-patches', self.get_stats()): self._maybe_delete(os.path.join('/srv/patches', branch)) else: with log.Timer('cleaning-unused-files', self.get_stats()): for rmdir in DELETABLE_DIRS: self._maybe_delete( os.path.join(self.branch_stage_dir, rmdir) )
def clear_message_blobs(logger=None): """ Clear MessageBlobStore cache on all wikis :param logger: logger instance """ logger.info('Running refreshMessageBlobs.php for each wiki') # This script is wiki-agnostic (affects all wikis) utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript ' 'extensions/WikimediaMaintenance/refreshMessageBlobs.php')
def _after_sync_common(self): super(Scap, self)._after_sync_common() # Bug 63659: Compile deploy_dir/wikiversions.json to cdb cmd = '{} wikiversions-compile'.format(self.get_script_path()) utils.sudo_check_call('mwdeploy', cmd) # Update list of extension message files and regenerate the # localisation cache. with log.Timer('l10n-update', self.get_stats()): for version, wikidb in self.active_wikiversions().items(): tasks.update_localization_cache(version, wikidb, self.verbose, self.config)
def _after_sync_common(self): super(Scap, self)._after_sync_common() # Bug 63659: Compile deploy_dir/wikiversions.json to cdb cmd = '{} wikiversions-compile'.format(self.get_script_path()) utils.sudo_check_call('mwdeploy', cmd) # Update list of extension message files and regenerate the # localisation cache. with log.Timer('l10n-update', self.get_stats()): for version, wikidb in self.active_wikiversions().items(): tasks.update_localization_cache( version, wikidb, self.verbose, self.config)
def main(self, *extra_args): """ Checkout next MediaWiki """ self.branch = self.arguments.branch self.dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.branch) ) old_branch = self.active_wikiversions().keys()[0] copy_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch) ) if os.path.isdir(self.dest_dir): self.get_logger().info('Version already checked out') return 0 git.fetch(self.dest_dir, self.gerrit + 'mediawiki/core', copy_dir) with utils.cd(self.dest_dir): if subprocess.call(['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always']) != 0: self.get_logger().warn('Unable to setup auto-rebase') checkout_version = 'master' if self.branch != 'master': checkout_version = 'wmf/%s' % self.branch git.checkout(self.dest_dir, checkout_version) if checkout_version == 'master': self.master_stuff() else: git.update_submodules(self.dest_dir, use_upstream=True) self.update_submodule_update_strategy(self.dest_dir) self.write_localsettings() self.create_startprofiler_symlink() cache_dir = os.path.join(self.dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def main(self, *extra_args): """ Checkout next MediaWiki """ self.branch = self.arguments.branch self.dest_dir = os.path.join(self.config["stage_dir"], "{}{}".format(self.arguments.prefix, self.branch)) if os.path.isdir(self.dest_dir): self.get_logger().info("Version already checked out") return 0 git.fetch(self.dest_dir, self.gerrit + "mediawiki/core") with utils.cd(self.dest_dir): if subprocess.call(["/usr/bin/git", "config", "branch.autosetuprebase", "always"]) != 0: self.get_logger().warn("Unable to setup auto-rebase") checkout_version = "master" if self.branch != "master": checkout_version = "wmf/%s" % self.branch git.checkout(self.dest_dir, checkout_version) if checkout_version == "master": self.master_stuff() else: git.update_submodules(self.dest_dir, use_upstream=True) self.update_submodule_update_strategy(self.dest_dir) self.write_localsettings() self.create_startprofiler_symlink() cache_dir = os.path.join(self.dest_dir, "cache") os.chmod(cache_dir, 0o777) utils.sudo_check_call("l10nupdate", 'mkdir "%s"' % os.path.join(cache_dir, "l10n")) self.get_logger().info("MediaWiki %s successfully checked out." % checkout_version)
def _call_rebuildLocalisationCache( wikidb, out_dir, use_cores=1, lang=None, force=False, quiet=False): """ Helper for update_localization_cache. :param wikidb: Wiki running given version :param out_dir: The output directory :param use_cores: The number of cores to run in :param lang: The --lang option, or None to omit :param force: Whether to pass --force :param quiet: Whether to pass --quiet """ with utils.sudo_temp_dir('www-data', 'scap_l10n_') as temp_dir: # Seed the temporary directory with the current CDB files if glob.glob('%s/*.cdb' % out_dir): utils.sudo_check_call( 'www-data', "cp '%(out_dir)s/'*.cdb '%(temp_dir)s'" % { 'temp_dir': temp_dir, 'out_dir': out_dir }) # Generate the files into a temporary directory as www-data utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript rebuildLocalisationCache.php ' '--wiki="%(wikidb)s" --outdir="%(temp_dir)s" ' '--threads=%(use_cores)s %(lang)s %(force)s %(quiet)s' % { 'wikidb': wikidb, 'temp_dir': temp_dir, 'use_cores': use_cores, 'lang': '--lang ' + lang if lang else '', 'force': '--force' if force else '', 'quiet': '--quiet' if quiet else '' }) # Copy the files into the real directory as l10nupdate utils.sudo_check_call( 'l10nupdate', 'cp -r "%(temp_dir)s"/* "%(out_dir)s"' % { 'temp_dir': temp_dir, 'out_dir': out_dir })
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch)) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': old_branch = self.active_wikiversions().keys()[0] reference_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch)) patch_path = os.path.join('/srv/patches', self.arguments.branch) if not os.path.exists(patch_path): shutil.copytree(os.path.join('/srv/patches', old_branch), os.path.join(patch_path)) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call( ['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always' ]) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call([ '/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs ]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) write_settings_stub( os.path.join(dest_dir, 'StartProfiler.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'StartProfiler-labs.php')) else: # Specific to production git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) write_settings_stub( os.path.join(dest_dir, 'StartProfiler.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'StartProfiler.php')) write_settings_stub( os.path.join(dest_dir, 'LocalSettings.php'), os.path.join(self.config['deploy_dir'], 'wmf-config', 'CommonSettings.php')) cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def update_localization_cache(version, wikidb, verbose, cfg, logger=None): """ Update the localization cache for a given MW version. :param version: MediaWiki version :param wikidb: Wiki running given version :param verbose: Provide verbose output :param cfg: Global configuration """ # Calculate the number of parallel threads # Leave a couple of cores free for other stuff use_cores = utils.cpus_for_jobs() verbose_messagelist = '' force_rebuild = False quiet_rebuild = True if verbose: verbose_messagelist = '--verbose' quiet_rebuild = False extension_messages = os.path.join( cfg['stage_dir'], 'wmf-config', 'ExtensionMessages-%s.php' % version) if not os.path.exists(extension_messages): # Touch the extension_messages file to prevent php require errors logger.info('Creating empty %s', extension_messages) open(extension_messages, 'a').close() cache_dir = os.path.join( cfg['stage_dir'], 'php-%s' % version, 'cache', 'l10n') if not os.path.exists(os.path.join(cache_dir, 'l10n_cache-en.cdb')): # mergeMessageFileList.php needs a l10n file logger.info('Bootstrapping l10n cache for %s', version) _call_rebuildLocalisationCache( wikidb, cache_dir, use_cores, lang='en', quiet=True) # Force subsequent cache rebuild to overwrite bootstrap version force_rebuild = True logger.info('Updating ExtensionMessages-%s.php', version) new_extension_messages = subprocess.check_output( 'sudo -u www-data -n -- /bin/mktemp', shell=True).strip() # attempt to read extension-list from the branch instead of wmf-config ext_list = os.path.join( cfg['stage_dir'], "php-%s" % version, "extension-list") if not os.path.isfile(ext_list): # fall back to the old location in wmf-config ext_list = "%s/wmf-config/extension-list" % cfg['stage_dir'] utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript mergeMessageFileList.php ' '--wiki="%s" --list-file="%s" ' '--output="%s" %s' % ( wikidb, ext_list, new_extension_messages, verbose_messagelist)) utils.sudo_check_call('www-data', 'chmod 0664 "%s"' % new_extension_messages) logger.debug('Copying %s to %s' % ( new_extension_messages, extension_messages)) shutil.copyfile(new_extension_messages, extension_messages) utils.sudo_check_call('www-data', 'rm "%s"' % new_extension_messages) # Update ExtensionMessages-*.php in the local copy. deploy_dir = os.path.realpath(cfg['deploy_dir']) stage_dir = os.path.realpath(cfg['stage_dir']) if stage_dir != deploy_dir: logger.debug('Copying ExtensionMessages-*.php to local copy') utils.sudo_check_call( 'mwdeploy', 'cp "%s" "%s/wmf-config/"' % ( extension_messages, cfg['deploy_dir'])) # Rebuild all the CDB files for each language logger.info( 'Updating LocalisationCache for %s ' 'using %s thread(s)' % (version, use_cores)) _call_rebuildLocalisationCache( wikidb, cache_dir, use_cores, force=force_rebuild, quiet=quiet_rebuild) # Include JSON versions of the CDB files and add MD5 files logger.info('Generating JSON versions and md5 files') scap_path = os.path.join(os.path.dirname(sys.argv[0]), 'scap') utils.sudo_check_call( 'l10nupdate', '%s cdb-json-refresh ' '--directory="%s" --threads=%s %s' % ( scap_path, cache_dir, use_cores, verbose_messagelist))
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch)) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': # active_wikiversions() is already sorted by loose-version number, # we want the latest version if there's more than 1 old_branch = self.active_wikiversions().keys()[-1] old_branch_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch)) reference_dir = None if (os.path.exists(old_branch_dir)): reference_dir = old_branch_dir patch_base_dir = '/srv/patches' patch_path = os.path.join(patch_base_dir, self.arguments.branch) if not os.path.exists(patch_path): if os.path.exists(os.path.join(patch_base_dir, old_branch)): shutil.copytree(os.path.join(patch_base_dir, old_branch), os.path.join(patch_path)) srv_patches_git_message = 'Scap prep for "{}"'.format( self.arguments.branch) git.add_all(patch_base_dir, message=srv_patches_git_message) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 self.get_logger().info('Fetching core to {}'.format(dest_dir)) git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call( ['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always' ]) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call([ '/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs ]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') self.get_logger().info('Checkout {} in {}'.format( checkout_version, dest_dir)) git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) else: # Specific to production self.get_logger().info('Update submodules for {}'.format(dest_dir)) git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) self.get_logger().info('Creating LocalSettings.php stub') write_settings_stub(os.path.join(dest_dir, 'LocalSettings.php')) self.get_logger().info('Creating l10n cache dir') cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)
def update_localization_cache(version, wikidb, verbose, cfg, logger=None): """ Update the localization cache for a given MW version. :param version: MediaWiki version :param wikidb: Wiki running given version :param verbose: Provide verbose output :param cfg: Global configuration """ # Calculate the number of parallel threads # Leave a couple of cores free for other stuff use_cores = utils.cpus_for_jobs() verbose_messagelist = '' force_rebuild = False quiet_rebuild = True if verbose: verbose_messagelist = '--verbose' quiet_rebuild = False extension_messages = os.path.join(cfg['stage_dir'], 'wmf-config', 'ExtensionMessages-%s.php' % version) if not os.path.exists(extension_messages): # Touch the extension_messages file to prevent php require errors logger.info('Creating empty %s', extension_messages) open(extension_messages, 'a').close() cache_dir = os.path.join(cfg['stage_dir'], 'php-%s' % version, 'cache', 'l10n') if not os.path.exists(os.path.join(cache_dir, 'l10n_cache-en.cdb')): # mergeMessageFileList.php needs a l10n file logger.info('Bootstrapping l10n cache for %s', version) _call_rebuildLocalisationCache(wikidb, cache_dir, use_cores, lang='en', quiet=True) # Force subsequent cache rebuild to overwrite bootstrap version force_rebuild = True logger.info('Updating ExtensionMessages-%s.php', version) new_extension_messages = subprocess.check_output( 'sudo -u www-data -n -- /bin/mktemp', shell=True).strip() # attempt to read extension-list from the branch instead of wmf-config ext_list = os.path.join(cfg['stage_dir'], "php-%s" % version, "extension-list") if not os.path.isfile(ext_list): # fall back to the old location in wmf-config ext_list = "%s/wmf-config/extension-list" % cfg['stage_dir'] utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript mergeMessageFileList.php ' '--wiki="%s" --list-file="%s" ' '--output="%s" %s' % (wikidb, ext_list, new_extension_messages, verbose_messagelist)) utils.sudo_check_call('www-data', 'chmod 0664 "%s"' % new_extension_messages) logger.debug('Copying %s to %s' % (new_extension_messages, extension_messages)) shutil.copyfile(new_extension_messages, extension_messages) utils.sudo_check_call('www-data', 'rm "%s"' % new_extension_messages) # Update ExtensionMessages-*.php in the local copy. deploy_dir = os.path.realpath(cfg['deploy_dir']) stage_dir = os.path.realpath(cfg['stage_dir']) if stage_dir != deploy_dir: logger.debug('Copying ExtensionMessages-*.php to local copy') utils.sudo_check_call( 'mwdeploy', 'cp "%s" "%s/wmf-config/"' % (extension_messages, cfg['deploy_dir'])) # Rebuild all the CDB files for each language logger.info('Updating LocalisationCache for %s ' 'using %s thread(s)' % (version, use_cores)) _call_rebuildLocalisationCache(wikidb, cache_dir, use_cores, force=force_rebuild, quiet=quiet_rebuild) # Include JSON versions of the CDB files and add MD5 files logger.info('Generating JSON versions and md5 files') scap_path = os.path.join(os.path.dirname(sys.argv[0]), 'scap') utils.sudo_check_call( 'l10nupdate', '%s cdb-json-refresh ' '--directory="%s" --threads=%s %s' % (scap_path, cache_dir, use_cores, verbose_messagelist))
def _call_rebuildLocalisationCache(wikidb, out_dir, use_cores=1, lang=None, force=False, quiet=False): """ Helper for update_localization_cache. :param wikidb: Wiki running given version :param out_dir: The output directory :param use_cores: The number of cores to run in :param lang: The --lang option, or None to omit :param force: Whether to pass --force :param quiet: Whether to pass --quiet """ with utils.sudo_temp_dir('www-data', 'scap_l10n_') as temp_dir: # Seed the temporary directory with the current CDB files if glob.glob('%s/*.cdb' % out_dir): utils.sudo_check_call( 'www-data', "cp '%(out_dir)s/'*.cdb '%(temp_dir)s'" % { 'temp_dir': temp_dir, 'out_dir': out_dir }) # Generate the files into a temporary directory as www-data utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript rebuildLocalisationCache.php ' '--wiki="%(wikidb)s" --outdir="%(temp_dir)s" ' '--store-class=LCStoreCDB ' '--threads=%(use_cores)s %(lang)s %(force)s %(quiet)s' % { 'wikidb': wikidb, 'temp_dir': temp_dir, 'use_cores': use_cores, 'lang': '--lang ' + lang if lang else '', 'force': '--force' if force else '', 'quiet': '--quiet' if quiet else '' }) # Copy the files into the real directory as l10nupdate utils.sudo_check_call( 'l10nupdate', 'cp -r "%(temp_dir)s"/* "%(out_dir)s"' % { 'temp_dir': temp_dir, 'out_dir': out_dir }) # Doing it all over again, with php array instead. # The cdb calls will be gone soon: T99740 with utils.sudo_temp_dir('www-data', 'scap_l10n_array_') as temp_dir: # Seed the temporary directory with the current php files if glob.glob('%s/*.php' % out_dir): utils.sudo_check_call( 'www-data', "cp '%(out_dir)s/'*.php '%(temp_dir)s'" % { 'temp_dir': temp_dir, 'out_dir': out_dir }) # Generate the files into a temporary directory as www-data utils.sudo_check_call( 'www-data', '/usr/local/bin/mwscript rebuildLocalisationCache.php ' '--wiki="%(wikidb)s" --outdir="%(temp_dir)s" ' '--store-class=LCStoreStaticArray ' '--threads=%(use_cores)s %(lang)s %(force)s %(quiet)s' % { 'wikidb': wikidb, 'temp_dir': temp_dir, 'use_cores': use_cores, 'lang': '--lang ' + lang if lang else '', 'force': '--force' if force else '', 'quiet': '--quiet' if quiet else '' }) # Copy the files into the real directory as l10nupdate utils.sudo_check_call( 'l10nupdate', 'cp -r "%(temp_dir)s"/* "%(out_dir)s"' % { 'temp_dir': temp_dir, 'out_dir': out_dir })
def main(self, *extra_args): """Checkout next MediaWiki.""" dest_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, self.arguments.branch) ) checkout_version = 'master' if self.arguments.branch != 'master': checkout_version = 'wmf/%s' % self.arguments.branch reference_dir = None if checkout_version != 'master': # active_wikiversions() is already sorted by loose-version number, # we want the latest version if there's more than 1 old_branch = self.active_wikiversions().keys()[-1] old_branch_dir = os.path.join( self.config['stage_dir'], '{}{}'.format(self.arguments.prefix, old_branch) ) reference_dir = None if (os.path.exists(old_branch_dir)): reference_dir = old_branch_dir patch_path = os.path.join('/srv/patches', self.arguments.branch) if not os.path.exists(patch_path): if os.path.exists(os.path.join('/srv/patches', old_branch)): shutil.copytree( os.path.join('/srv/patches', old_branch), os.path.join(patch_path) ) if os.path.isdir(dest_dir): self.get_logger().info('Version already checked out') return 0 self.get_logger().info('Fetching core to {}'.format(dest_dir)) git.fetch(dest_dir, SOURCE_URL + 'mediawiki/core', reference_dir) with utils.cd(dest_dir): if subprocess.call(['/usr/bin/git', 'config', 'branch.autosetuprebase', 'always']) != 0: self.get_logger().warn('Unable to setup auto-rebase') num_procs = str(max(multiprocessing.cpu_count() / 2, 1)) if subprocess.call(['/usr/bin/git', 'config', 'submodule.fetchJobs', num_procs]) != 0: self.get_logger().warn('Unable to setup submodule fetch jobs') self.get_logger().info('Checkout {} in {}'.format(checkout_version, dest_dir)) git.checkout(dest_dir, checkout_version) if checkout_version == 'master': # Specific to Beta Cluster master_stuff(dest_dir) else: # Specific to production self.get_logger().info('Update submodules for {}'.format(dest_dir)) git.update_submodules(dest_dir, use_upstream=True) update_update_strategy(dest_dir) self.get_logger().info('Creating LocalSettings.php stub') write_settings_stub(os.path.join(dest_dir, 'LocalSettings.php')) self.get_logger().info('Creating l10n cache dir') cache_dir = os.path.join(dest_dir, 'cache') os.chmod(cache_dir, 0o777) utils.sudo_check_call('l10nupdate', 'mkdir "%s"' % os.path.join(cache_dir, 'l10n')) self.get_logger().info('MediaWiki %s successfully checked out.' % checkout_version)