def main(): """Parse arguments and return an exit code (e.g. 0 for success, >=1 for failure)""" parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('config', help='The name of the release config file to load') parser.add_argument('--job-prefix', action='append', dest='prefixes', help='Prefix of jobs to wait to finish and succeed before promoting, ' 'can be specified multiple times') parser.add_argument('--dry-run', action='store_true', default=False, help='Run remote rsync as a dry-run, not making any actual changes.') parser.add_argument('--force', action='store_true', default=False, help='Force promotion, ignoring ') opts = parser.parse_args() configuration = builder.load_config(opts.config) # These must exist in the release config, KeyError here if they don't. repo_source_dir = configuration['rsync-target-dir'] repo_tested_dir = configuration['rsync-tested-dir'] if opts.force: print('Force specified, skipping promotion checks and immediately promoting build.') else: # We've only got the one check function now, # but we should easily be able to add more here if we want to for check_func in [job_status_check]: print("Running check function: {}".format(check_func.__name__)) if not check_func(opts.prefixes): print("Promotion blocked by check function.") # Returning 0 here, since this is a "normal" script exit. Jenkins scripting # can be used to parse the output and change the build to Unstable, if desired, # based on the printing of this message, as mentioned in the script's docstring. return 0 # Rsync the repos from the source dir to the target dir # source dir trailing slash is needed: it ensures that the *contents* of the # source dir are copied into the destination dir, not the source dir itself remote_source_dir = os.path.join(UPLOAD_BASE_DIR, repo_source_dir) + os.sep remote_tested_dir = os.path.join(UPLOAD_BASE_DIR, repo_tested_dir) # instead of rsyncing from the local mash to the target dir like build-all does, rsync from # the remote target dir to the remote tested dir command = ("ssh -o StrictHostKeyChecking=no [email protected]" " rsync -avz --recursive --delete %s %s") % (remote_source_dir, remote_tested_dir) if opts.dry_run: print("Simulating promotion of {} repo from {} to {}".format( opts.config, repo_source_dir, repo_tested_dir)) command += ' --dry-run' else: print("Promoting {} repo from {} to {}".format( opts.config, repo_source_dir, repo_tested_dir)) # Finally, return the rsync exit code, so this fails if rsync fails return subprocess.check_call(command, shell=True)
def main(): opts = parse_args() # Load the config file configuration = builder.load_config(opts.config) # Ensure the working dir exists builder.ensure_dir(WORKING_DIR, clean=True) print("Getting git repos") for component in builder.components(configuration): update_version_and_merge_for_component(component, opts)
def main(): # Parse the args parser = argparse.ArgumentParser() parser.add_argument("--release", required=True, help="Build the docs for a given release.") opts = parser.parse_args() is_pulp3 = opts.release.startswith('3') configuration = builder.load_config(opts.release) # Get platform build version repo_list = builder.components(configuration) try: pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0] except IndexError: raise RuntimeError("config file does not have an entry for 'pulp'") version = pulp_dict['version'] if version.endswith('alpha'): build_type = 'nightly' elif version.endswith('beta'): build_type = 'testing' elif version.endswith('rc'): build_type = 'testing' else: build_type = 'ga' x_y_version = '.'.join(version.split('.')[:2]) builder.ensure_dir(WORKING_DIR, clean=True) # use the version update scripts to check out git repos and ensure correct versions for component in repo_list: builder.clone_branch(component) # install any apidoc dependencies that exist for pulp 3 docs if is_pulp3: for repo, packages in APIDOC_PACKAGES.items(): for package in packages: package_dir = os.path.join(WORKING_DIR, repo, package) if os.path.exists(package_dir): subprocess.check_call(['python', 'setup.py', 'develop'], cwd=package_dir) plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins']) builder.ensure_dir(plugins_dir, clean=False) for component in repo_list: if component['name'] == 'pulp': promote.update_versions(os.path.join(WORKING_DIR, 'pulp'), *version.split('-')) continue if component['name'] == 'pulp_deb': continue src = os.sep.join([WORKING_DIR, component['name'], 'docs']) dst = os.sep.join([plugins_dir, component['name']]) os.symlink(src, dst) if is_pulp3: src_index_path = 'docs/pulp_index_pulp3.rst' src_all_content_path = 'docs/all_content_index_pulp3.rst' else: src_index_path = 'docs/pulp_index.rst' src_all_content_path = 'docs/all_content_index.rst' # copy in the plugin_index.rst file for Pulp 2 only # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here, # outside of platform code) plugin_index_rst = os.sep.join([plugins_dir, 'index.rst']) copyfile('docs/plugin_index.rst', plugin_index_rst) # copy in the pulp_index.rst file pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst']) copyfile(src_index_path, pulp_index_rst) # copy in the all_content_index.rst file all_content_index_rst = os.sep.join( [WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst']) copyfile(src_all_content_path, all_content_index_rst) # make the _templates dir layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates']) os.makedirs(layout_dir) # copy in the layout.html file for analytics layout_html_path = os.sep.join( [WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html']) copyfile('docs/layout.html', layout_html_path) # build the docs via the Pulp project itself print("Building the docs") docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs']) make_command = ['make', 'html'] exit_code = subprocess.call(make_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while building the docs.') # rsync the docs to the root if it's GA of latest if build_type == 'ga' and x_y_version == LATEST: local_path_arg = os.sep.join([docs_directory, '_build', 'html' ]) + os.sep remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT) rsync_command = [ 'rsync', '-avzh', '--delete', '--exclude', 'en', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError( 'An error occurred while pushing latest docs to OpenShift.') # rsync the nightly "master" docs to an unversioned "nightly" dir for # easy linking to in-development docs: /en/nightly/ if build_type == 'nightly' and opts.release == 'master': local_path_arg = os.sep.join([docs_directory, '_build', 'html' ]) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, build_type) path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT, build_type) rsync_command = [ 'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError( 'An error occurred while pushing nightly docs to OpenShift.') # rsync the docs to OpenShift local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version) if build_type != 'ga': remote_path_arg += build_type + '/' path_option_arg = 'mkdir -p %sen/%s/%s/ && rsync' % ( SITE_ROOT, x_y_version, build_type) rsync_command = [ 'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete', local_path_arg, remote_path_arg ] else: path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT, x_y_version) rsync_command = [ 'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete', '--exclude', 'nightly', '--exclude', 'testing', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError( 'An error occurred while pushing docs to OpenShift.') # rsync the robots.txt to OpenShift local_path_arg = 'docs/robots.txt' remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT) scp_command = ['scp', local_path_arg, remote_path_arg] exit_code = subprocess.call(scp_command) if exit_code != 0: raise RuntimeError( 'An error occurred while pushing robots.txt to OpenShift.') # rsync the testrubyserver.rb to OpenShift local_path_arg = 'docs/testrubyserver.rb' remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT) scp_command = ['scp', local_path_arg, remote_path_arg] exit_code = subprocess.call(scp_command) if exit_code != 0: raise RuntimeError( 'An error occurred while pushing testrubyserver.rb to OpenShift.') # add symlink for latest symlink_cmd = [ 'ssh', '%s@%s' % (USERNAME, HOSTNAME), 'ln -sfn %sen/%s %sen/latest' % (SITE_ROOT, LATEST, SITE_ROOT) ] exit_code = subprocess.call(symlink_cmd) if exit_code != 0: raise RuntimeError( "An error occurred while creating the 'latest' symlink " "testrubyserver.rb to OpenShift.")
remainder = spec_dir while True: remainder, dir_name = os.path.split(remainder) for project in component_list: if project == dir_name: return project if remainder == '/': return None builder.init_koji() # Build our working_dir working_dir = WORKING_DIR print working_dir # Load the config file configuration = builder.load_config(opts.config) koji_prefix = configuration['koji-target-prefix'] # Source extract all the components parent_branches = {} merge_forward = {} component_list = [] spec_project_map = {} print "Getting git repos" for component in get_components(configuration): print "Cloning from github: %s" % component.get('git_url') branch_name = component['git_branch'] parent_branch = component.get('parent_branch', None) command = ['git', 'clone', component.get('git_url'), '--branch', branch_name]
def project_name_from_spec_dir(spec_dir): remainder = spec_dir while True: remainder, dir_name = os.path.split(remainder) for project in component_list: if project == dir_name: return project if remainder == '/': return None builder.init_koji() # Load the config file configuration = builder.load_config(opts.config) koji_prefix = configuration['koji-target-prefix'] nightly_build = opts.config.endswith('-dev') # Source extract all the components parent_branches = {} merge_forward = {} component_list = [] spec_project_map = {} # these get set in the "for component" loop a few lines down platform_version = None # el6 is supported when the platform version is 2.11 or lower el6_supported = None
def main(): # Parse the args parser = argparse.ArgumentParser() parser.add_argument("--release", required=True, help="Build the docs for a given release.") opts = parser.parse_args() is_pulp3 = opts.release.startswith('3') configuration = builder.load_config(opts.release) # Get platform build version repo_list = builder.components(configuration) try: pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0] except IndexError: raise RuntimeError("config file does not have an entry for 'pulp'") version = pulp_dict['version'] if version.endswith('alpha') or is_pulp3: build_type = 'nightly' elif version.endswith('beta'): build_type = 'testing' elif version.endswith('rc'): build_type = 'testing' else: build_type = 'ga' x_y_version = '.'.join(version.split('.')[:2]) builder.ensure_dir(WORKING_DIR, clean=True) # use the version update scripts to check out git repos and ensure correct versions for component in repo_list: builder.clone_branch(component) # install any apidoc dependencies that exist for pulp 3 docs if is_pulp3: for repo, packages in APIDOC_PACKAGES.items(): for package in packages: package_dir = os.path.join(WORKING_DIR, repo, package) if os.path.exists(package_dir): subprocess.check_call(['pip', 'install', '-e', '.'], cwd=package_dir) plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins']) builder.ensure_dir(plugins_dir, clean=False) for component in repo_list: if component['name'] == 'pulp': promote.update_versions(os.path.join(WORKING_DIR, 'pulp'), *version.split('-')) continue if component['name'] == 'pulp_deb': continue src = os.sep.join([WORKING_DIR, component['name'], 'docs']) dst = os.sep.join([plugins_dir, component['name']]) os.symlink(src, dst) if is_pulp3: src_index_path = 'docs/pulp_index_pulp3.rst' src_all_content_path = 'docs/all_content_index_pulp3.rst' else: src_index_path = 'docs/pulp_index.rst' src_all_content_path = 'docs/all_content_index.rst' # copy in the plugin_index.rst file for Pulp 2 only # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here, # outside of platform code) plugin_index_rst = os.sep.join([plugins_dir, 'index.rst']) copyfile('docs/plugin_index.rst', plugin_index_rst) # copy in the pulp_index.rst file pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst']) copyfile(src_index_path, pulp_index_rst) # copy in the all_content_index.rst file all_content_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst']) copyfile(src_all_content_path, all_content_index_rst) # make the _templates dir layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates']) os.makedirs(layout_dir) # copy in the layout.html file for analytics layout_html_path = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html']) copyfile('docs/layout.html', layout_html_path) # build the docs via the Pulp project itself print("Building the docs") docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs']) # Get the latest api.yaml file to build the rest api docs if is_pulp3: with urllib.request.urlopen("http://*****:*****@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT) rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'en', '--omit-dir-times', local_path_arg, remote_path_arg] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing latest docs.') # Also publish to the /en/latest/ directory make_directory_with_rsync(['en', 'latest']) local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep remote_path_arg = '%s@%s:%sen/latest/' % (USERNAME, HOSTNAME, SITE_ROOT) rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError("An error occurred while pushing the 'latest' directory.") # rsync the nightly "2-master" docs to an unversioned "nightly" dir for # easy linking to in-development docs: /en/nightly/ if build_type == 'nightly' and opts.release == '2-master': local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, build_type) make_directory_with_rsync(['en', build_type]) rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing nightly docs.') # rsync the docs local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version) if build_type != 'ga': remote_path_arg += build_type + '/' make_directory_with_rsync(['en', x_y_version, build_type]) rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg] else: make_directory_with_rsync(['en', x_y_version]) rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'nightly', '--exclude', 'testing', local_path_arg, remote_path_arg] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing docs.')
def main(): # Parse the args parser = argparse.ArgumentParser() parser.add_argument("--release", required=True, help="Build the docs for a given release.") opts = parser.parse_args() is_pulp3 = opts.release.startswith('3') configuration = builder.load_config(opts.release) # Get platform build version repo_list = builder.components(configuration) try: pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0] except IndexError: raise RuntimeError("config file does not have an entry for 'pulp'") version = pulp_dict['version'] if version.endswith('alpha') or is_pulp3: build_type = 'nightly' elif version.endswith('beta'): build_type = 'testing' elif version.endswith('rc'): build_type = 'testing' else: build_type = 'ga' x_y_version = '.'.join(version.split('.')[:2]) builder.ensure_dir(WORKING_DIR, clean=True) # use the version update scripts to check out git repos and ensure correct versions for component in repo_list: builder.clone_branch(component) # install any apidoc dependencies that exist for pulp 3 docs if is_pulp3: for repo, packages in APIDOC_PACKAGES.items(): for package in packages: package_dir = os.path.join(WORKING_DIR, repo, package) if os.path.exists(package_dir): subprocess.check_call(['pip', 'install', '-e', '.'], cwd=package_dir) plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins']) builder.ensure_dir(plugins_dir, clean=False) for component in repo_list: if component['name'] == 'pulp': promote.update_versions(os.path.join(WORKING_DIR, 'pulp'), *version.split('-')) continue if component['name'] == 'pulp_deb': continue src = os.sep.join([WORKING_DIR, component['name'], 'docs']) dst = os.sep.join([plugins_dir, component['name']]) os.symlink(src, dst) if is_pulp3: src_index_path = 'docs/pulp_index_pulp3.rst' src_all_content_path = 'docs/all_content_index_pulp3.rst' else: src_index_path = 'docs/pulp_index.rst' src_all_content_path = 'docs/all_content_index.rst' # copy in the plugin_index.rst file for Pulp 2 only # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here, # outside of platform code) plugin_index_rst = os.sep.join([plugins_dir, 'index.rst']) copyfile('docs/plugin_index.rst', plugin_index_rst) # copy in the pulp_index.rst file pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst']) copyfile(src_index_path, pulp_index_rst) # copy in the all_content_index.rst file all_content_index_rst = os.sep.join( [WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst']) copyfile(src_all_content_path, all_content_index_rst) # make the _templates dir layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates']) os.makedirs(layout_dir) # copy in the layout.html file for analytics layout_html_path = os.sep.join( [WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html']) copyfile('docs/layout.html', layout_html_path) # build the docs via the Pulp project itself print("Building the docs") docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs']) # Get the latest api.yaml file to build the rest api docs if is_pulp3: with urllib.request.urlopen("http://*****:*****@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT) rsync_command = [ 'rsync', '-avzh', '--delete', '--exclude', 'en', '--omit-dir-times', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing latest docs.') # Also publish to the /en/latest/ directory make_directory_with_rsync(['en', 'latest']) local_path_arg = os.sep.join([docs_directory, '_build', 'html' ]) + os.sep remote_path_arg = '%s@%s:%sen/latest/' % (USERNAME, HOSTNAME, SITE_ROOT) rsync_command = [ 'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError( "An error occurred while pushing the 'latest' directory.") # rsync the nightly "2-master" docs to an unversioned "nightly" dir for # easy linking to in-development docs: /en/nightly/ if build_type == 'nightly' and opts.release == '2-master': local_path_arg = os.sep.join([docs_directory, '_build', 'html' ]) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, build_type) make_directory_with_rsync(['en', build_type]) rsync_command = [ 'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing nightly docs.') # rsync the docs local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version) if build_type != 'ga': remote_path_arg += build_type + '/' make_directory_with_rsync(['en', x_y_version, build_type]) rsync_command = [ 'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg ] else: make_directory_with_rsync(['en', x_y_version]) rsync_command = [ 'rsync', '-avzh', '--delete', '--exclude', 'nightly', '--exclude', 'testing', local_path_arg, remote_path_arg ] exit_code = subprocess.call(rsync_command, cwd=docs_directory) if exit_code != 0: raise RuntimeError('An error occurred while pushing docs.')