예제 #1
0
def main():
    opts = parse_args()

    # Load the config file
    configuration = builder.load_config(opts.config)

    # Ensure the working dir exists
    builder.ensure_dir(WORKING_DIR, clean=True)

    print("Getting git repos")
    for component in builder.components(configuration):
        update_version_and_merge_for_component(component, opts)
예제 #2
0
parser.add_argument("project", help="The name of the project to build (pulp, pulp_rpm, crane, ...)")
parser.add_argument("--branch", default="master",
                    help="The branch to build. Defaults to master.")
parser.add_argument("--koji_prefix", default="pulp-2.7",
                    help="The prefix for the koji build taret to use. Defaults to pulp-2.7")
parser.add_argument("--release", action="store_true", default=False,
                    help="Perform a release build. If a release build is requested the "
                         "branch will be verified to be in the form x.y-(dev|testing|release) "
                         "or 'master'. In addition the version will be tagged & the commit "
                         "will be promoted through to master")

opts = parser.parse_args()


builder.ensure_dir(WORKING_DIR, clean=True)
TITO_DIR = os.path.join(WORKING_DIR, 'tito')
MASH_DIR = os.path.join(WORKING_DIR, 'mash')
builder.ensure_dir(TITO_DIR, clean=True)
builder.ensure_dir(MASH_DIR, clean=True)

# Initialize our connection to koji
builder.init_koji()

# Build our working_dir
working_dir = WORKING_DIR
print working_dir

# Get the project to build from git

git_repo = "[email protected]:pulp/{project}.git".format(project=opts.project)
예제 #3
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release", required=True, help="Build the docs for a given release.")
    opts = parser.parse_args()
    is_pulp3 = opts.release.startswith('3')

    configuration = load_config(opts.release)

    # Get platform build version
    repo_list = configuration['repositories']
    try:
        pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict['version']

    if version.endswith('alpha'):
        build_type = 'nightly'
    elif version.endswith('beta'):
        build_type = 'testing'
    elif version.endswith('rc'):
        build_type = 'testing'
    else:
        build_type = 'ga'

    x_y_version = '.'.join(version.split('.')[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    # use the version update scripts to check out git repos and ensure correct versions
    update_version(opts.release)

    # install any apidoc dependencies that exist for pulp 3 docs
    if is_pulp3:
        for repo, packages in APIDOC_PACKAGES.items():
            for package in packages:
                package_dir = os.path.join(WORKING_DIR, repo, package)
                if os.path.exists(package_dir):
                    subprocess.check_call(['python', 'setup.py', 'develop'], cwd=package_dir)

    plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins'])
    builder.ensure_dir(plugins_dir)

    for component in get_components(configuration):
        if component['name'] == 'pulp':
            continue

        src = os.sep.join([WORKING_DIR, component['name'], 'docs'])
        dst = os.sep.join([plugins_dir, component['name']])
        os.symlink(src, dst)

    # copy in the pulp_index.rst file
    if is_pulp3:
        src_path = 'docs/pulp_index_pulp3.rst'
    else:
        src_path = 'docs/pulp_index.rst'
    pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst'])
    copyfile(src_path, pulp_index_rst)

    # copy in the plugin_index.rst file
    plugin_index_rst = os.sep.join([plugins_dir, 'index.rst'])
    copyfile('docs/plugin_index.rst', plugin_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst'])
    if is_pulp3:
        copyfile('docs/all_content_index_pulp3.rst', all_content_index_rst)
    else:
        copyfile('docs/all_content_index.rst', all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates'])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html'])
    copyfile('docs/layout.html', layout_html_path)

    # build the docs via the Pulp project itself
    print("Building the docs")
    docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs'])
    make_command = ['make', 'html']
    exit_code = subprocess.call(make_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while building the docs.')

    # rsync the docs to the root if it's GA of latest
    if build_type == 'ga' and x_y_version == LATEST:
        local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
        remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'en',
                         local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing latest docs to OpenShift.')

    # rsync the nightly "master" docs to an unversioned "nightly" dir for
    # easy linking to in-development docs: /en/nightly/
    if build_type == 'nightly' and opts.release == 'master':
        local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
        remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, build_type)
        path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT, build_type)
        rsync_command = ['rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
                         local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing nightly docs to OpenShift.')

    # rsync the docs to OpenShift
    local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
    remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version)
    if build_type != 'ga':
        remote_path_arg += build_type + '/'
        path_option_arg = 'mkdir -p %sen/%s/%s/ && rsync' % (SITE_ROOT, x_y_version, build_type)
        rsync_command = ['rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
                         local_path_arg, remote_path_arg]
    else:
        path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT, x_y_version)
        rsync_command = ['rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
                         '--exclude', 'nightly', '--exclude', 'testing',
                         local_path_arg, remote_path_arg]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing docs to OpenShift.')

    # rsync the robots.txt to OpenShift
    local_path_arg = 'docs/robots.txt'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing robots.txt to OpenShift.')

    # rsync the testrubyserver.rb to OpenShift
    local_path_arg = 'docs/testrubyserver.rb'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing testrubyserver.rb to OpenShift.')

    # add symlink for latest
    symlink_cmd = [
        'ssh',
        '%s@%s' % (USERNAME, HOSTNAME),
        'ln -sfn %sen/%s %sen/latest' % (SITE_ROOT, LATEST, SITE_ROOT)
    ]
    exit_code = subprocess.call(symlink_cmd)
    if exit_code != 0:
        raise RuntimeError("An error occurred while creating the 'latest' symlink "
                           "testrubyserver.rb to OpenShift.")
예제 #4
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release",
                        required=True,
                        help="Build the docs for a given release.")
    opts = parser.parse_args()
    is_pulp3 = opts.release.startswith('3')

    configuration = builder.load_config(opts.release)

    # Get platform build version
    repo_list = builder.components(configuration)
    try:
        pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict['version']

    if version.endswith('alpha'):
        build_type = 'nightly'
    elif version.endswith('beta'):
        build_type = 'testing'
    elif version.endswith('rc'):
        build_type = 'testing'
    else:
        build_type = 'ga'

    x_y_version = '.'.join(version.split('.')[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    # use the version update scripts to check out git repos and ensure correct versions
    for component in repo_list:
        builder.clone_branch(component)

    # install any apidoc dependencies that exist for pulp 3 docs
    if is_pulp3:
        for repo, packages in APIDOC_PACKAGES.items():
            for package in packages:
                package_dir = os.path.join(WORKING_DIR, repo, package)
                if os.path.exists(package_dir):
                    subprocess.check_call(['python', 'setup.py', 'develop'],
                                          cwd=package_dir)

    plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins'])
    builder.ensure_dir(plugins_dir, clean=False)

    for component in repo_list:
        if component['name'] == 'pulp':
            promote.update_versions(os.path.join(WORKING_DIR, 'pulp'),
                                    *version.split('-'))
            continue

        if component['name'] == 'pulp_deb':
            continue

        src = os.sep.join([WORKING_DIR, component['name'], 'docs'])
        dst = os.sep.join([plugins_dir, component['name']])
        os.symlink(src, dst)

    if is_pulp3:
        src_index_path = 'docs/pulp_index_pulp3.rst'
        src_all_content_path = 'docs/all_content_index_pulp3.rst'
    else:
        src_index_path = 'docs/pulp_index.rst'
        src_all_content_path = 'docs/all_content_index.rst'

        # copy in the plugin_index.rst file for Pulp 2 only
        # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here,
        # outside of platform code)
        plugin_index_rst = os.sep.join([plugins_dir, 'index.rst'])
        copyfile('docs/plugin_index.rst', plugin_index_rst)

    # copy in the pulp_index.rst file
    pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst'])
    copyfile(src_index_path, pulp_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join(
        [WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst'])
    copyfile(src_all_content_path, all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates'])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join(
        [WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html'])
    copyfile('docs/layout.html', layout_html_path)

    # build the docs via the Pulp project itself
    print("Building the docs")
    docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs'])
    make_command = ['make', 'html']
    exit_code = subprocess.call(make_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while building the docs.')

    # rsync the docs to the root if it's GA of latest
    if build_type == 'ga' and x_y_version == LATEST:
        local_path_arg = os.sep.join([docs_directory, '_build', 'html'
                                      ]) + os.sep
        remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = [
            'rsync', '-avzh', '--delete', '--exclude', 'en', local_path_arg,
            remote_path_arg
        ]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError(
                'An error occurred while pushing latest docs to OpenShift.')

    # rsync the nightly "master" docs to an unversioned "nightly" dir for
    # easy linking to in-development docs: /en/nightly/
    if build_type == 'nightly' and opts.release == 'master':
        local_path_arg = os.sep.join([docs_directory, '_build', 'html'
                                      ]) + os.sep
        remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT,
                                              build_type)
        path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT,
                                                          build_type)
        rsync_command = [
            'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
            local_path_arg, remote_path_arg
        ]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError(
                'An error occurred while pushing nightly docs to OpenShift.')

    # rsync the docs to OpenShift
    local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
    remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT,
                                          x_y_version)
    if build_type != 'ga':
        remote_path_arg += build_type + '/'
        path_option_arg = 'mkdir -p %sen/%s/%s/ && rsync' % (
            SITE_ROOT, x_y_version, build_type)
        rsync_command = [
            'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
            local_path_arg, remote_path_arg
        ]
    else:
        path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT,
                                                          x_y_version)
        rsync_command = [
            'rsync', '-avzh', '--rsync-path', path_option_arg, '--delete',
            '--exclude', 'nightly', '--exclude', 'testing', local_path_arg,
            remote_path_arg
        ]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError(
            'An error occurred while pushing docs to OpenShift.')

    # rsync the robots.txt to OpenShift
    local_path_arg = 'docs/robots.txt'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError(
            'An error occurred while pushing robots.txt to OpenShift.')

    # rsync the testrubyserver.rb to OpenShift
    local_path_arg = 'docs/testrubyserver.rb'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError(
            'An error occurred while pushing testrubyserver.rb to OpenShift.')

    # add symlink for latest
    symlink_cmd = [
        'ssh',
        '%s@%s' % (USERNAME, HOSTNAME),
        'ln -sfn %sen/%s %sen/latest' % (SITE_ROOT, LATEST, SITE_ROOT)
    ]
    exit_code = subprocess.call(symlink_cmd)
    if exit_code != 0:
        raise RuntimeError(
            "An error occurred while creating the 'latest' symlink "
            "testrubyserver.rb to OpenShift.")
import sys

from lib import builder
from lib import promote
from lib.builder import WORKSPACE, TITO_DIR, MASH_DIR, WORKING_DIR, CI_DIR


# Parse the args and run the program
parser = argparse.ArgumentParser()
parser.add_argument("config", help="The name of the config file to load from config/releases")
parser.add_argument("--push", action="store_true", default=False,
                    help="Don't push to GitHub")

opts = parser.parse_args()
push_to_github = opts.push
builder.ensure_dir(WORKING_DIR, clean=True)

def load_config(config_name):
    # Get the config
    config_file = os.path.join(os.path.dirname(__file__),
                               'config', 'releases', '%s.yaml' % config_name)
    if not os.path.exists(config_file):
        print "Error: %s not found. " % config_file
        sys.exit(1)
    with open(config_file, 'r') as config_handle:
        config = yaml.safe_load(config_handle)
    return config

def get_components(configuration):
    repos = configuration['repositories']
    for component in repos:
예제 #6
0
import sys

from lib import builder
from lib import promote
from lib.builder import WORKSPACE, TITO_DIR, MASH_DIR, WORKING_DIR, CI_DIR


# Parse the args and run the program
parser = argparse.ArgumentParser()
parser.add_argument("config", help="The name of the config file to load from config/releases")
parser.add_argument("--push", action="store_true", default=False,
                    help="Don't push to GitHub")

opts = parser.parse_args()
push_to_github = opts.push
builder.ensure_dir(WORKING_DIR, clean=True)

def load_config(config_name):
    # Get the config
    config_file = os.path.join(os.path.dirname(__file__),
                               'config', 'releases', '%s.yaml' % config_name)
    if not os.path.exists(config_file):
        print "Error: %s not found. " % config_file
        sys.exit(1)
    with open(config_file, 'r') as config_handle:
        config = yaml.safe_load(config_handle)
    return config

def get_components(configuration):
    repos = configuration['repositories']
    for component in repos:
예제 #7
0
parser.add_argument("--release", action="store_true", default=False,
                    help="Perform a release build. A scratch build will be performed first to "
                         "validate the spec files. ")
parser.add_argument("--disable-push", action="store_true", default=False,
                    help="Don't push to fedorapeople")
parser.add_argument("--rpmsig", help="The rpm signature hash to use when downloading RPMs. "
                                     "Using this flag will cause a failure if any component "
                                     "has not been built already.")
parser.add_argument("--show-versions", action="store_true", default=False,
                    help="Exit after printing out the required versions of each package.")

opts = parser.parse_args()
release_build = opts.release
rpm_signature = opts.rpmsig
# clean the TITO & MASH_DIR
builder.ensure_dir(TITO_DIR, clean=True)
builder.ensure_dir(MASH_DIR, clean=True)
builder.ensure_dir(WORKING_DIR, clean=True)


def load_config(config_name):
    # Get the config
    config_file = os.path.join(os.path.dirname(__file__),
                               'config', 'releases', '%s.yaml' % config_name)
    if not os.path.exists(config_file):
        print "Error: %s not found. " % config_file
        sys.exit(1)
    with open(config_file, 'r') as config_handle:
        config = yaml.safe_load(config_handle)
    return config
예제 #8
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release", required=True, help="Build the docs for a given release.")
    opts = parser.parse_args()

    configuration = load_config(opts.release)

    # Get platform build version
    repo_list = configuration["repositories"]
    try:
        pulp_dict = filter(lambda x: x["name"] == "pulp", repo_list)[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict["version"]

    if version.endswith("alpha"):
        build_type = "nightly"
    elif version.endswith("beta"):
        build_type = "testing"
    elif version.endswith("rc"):
        build_type = "testing"
    else:
        build_type = "ga"

    x_y_version = ".".join(version.split(".")[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    print "Getting git repos"
    for component in get_components(configuration):
        # clone the repos
        branch_name = component["git_branch"]
        print "Cloning from github: %s" % component.get("git_url")
        print "Switching to branch %s" % branch_name
        clone_command = ["git", "clone", component.get("git_url"), "--branch", branch_name]
        exit_code = subprocess.call(clone_command, cwd=WORKING_DIR)
        if exit_code != 0:
            raise RuntimeError("An error occurred while cloning the repo.")

    plugins_dir = os.sep.join([WORKING_DIR, "pulp", "docs", "plugins"])
    builder.ensure_dir(plugins_dir)

    for component in get_components(configuration):
        if component["name"] == "pulp":
            continue

        src = os.sep.join([WORKING_DIR, component["name"], "docs"])
        dst = os.sep.join([plugins_dir, component["name"]])
        os.symlink(src, dst)

    # copy in the pulp_index.rst file
    src_path = "docs/pulp_index.rst"
    pulp_index_rst = os.sep.join([WORKING_DIR, "pulp", "docs", "index.rst"])
    copyfile(src_path, pulp_index_rst)

    # copy in the plugin_index.rst file
    plugin_index_rst = os.sep.join([plugins_dir, "index.rst"])
    copyfile("docs/plugin_index.rst", plugin_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join([WORKING_DIR, "pulp", "docs", "all_content_index.rst"])
    copyfile("docs/all_content_index.rst", all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, "pulp", "docs", "_templates"])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join([WORKING_DIR, "pulp", "docs", "_templates", "layout.html"])
    copyfile("docs/layout.html", layout_html_path)

    # build the docs via the Pulp project itself
    print "Building the docs"
    docs_directory = os.sep.join([WORKING_DIR, "pulp", "docs"])
    make_command = ["make", "html", "SPHINXOPTS=-Wn"]
    exit_code = subprocess.call(make_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError("An error occurred while building the docs.")

    # rsync the docs to the root if it's GA of latest
    if build_type == "ga" and x_y_version == LATEST:
        local_path_arg = os.sep.join([docs_directory, "_build", "html"]) + os.sep
        remote_path_arg = "%s@%s:%s" % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = ["rsync", "-avzh", "--delete", "--exclude", "en", local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError("An error occurred while pushing docs to OpenShift.")

    # rsync the docs to OpenShift
    local_path_arg = os.sep.join([docs_directory, "_build", "html"]) + os.sep
    remote_path_arg = "%s@%s:%sen/%s/" % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version)
    if build_type != "ga":
        remote_path_arg += build_type + "/"
        path_option_arg = "mkdir -p %sen/%s/%s/ && rsync" % (SITE_ROOT, x_y_version, build_type)
        rsync_command = ["rsync", "-avzh", "--rsync-path", path_option_arg, "--delete", local_path_arg, remote_path_arg]
    else:
        path_option_arg = "mkdir -p %sen/%s/ && rsync" % (SITE_ROOT, x_y_version)
        rsync_command = [
            "rsync",
            "-avzh",
            "--rsync-path",
            path_option_arg,
            "--delete",
            "--exclude",
            "nightly",
            "--exclude",
            "testing",
            local_path_arg,
            remote_path_arg,
        ]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError("An error occurred while pushing docs to OpenShift.")

    # rsync the robots.txt to OpenShift
    local_path_arg = "docs/robots.txt"
    remote_path_arg = "%s@%s:%s" % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ["scp", local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError("An error occurred while pushing robots.txt to OpenShift.")

    # rsync the testrubyserver.rb to OpenShift
    local_path_arg = "docs/testrubyserver.rb"
    remote_path_arg = "%s@%s:%s" % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ["scp", local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError("An error occurred while pushing testrubyserver.rb to OpenShift.")

    # add symlink for latest
    symlink_cmd = [
        "ssh",
        "%s@%s" % (USERNAME, HOSTNAME),
        "ln -sfn %sen/%s %sen/latest" % (SITE_ROOT, LATEST, SITE_ROOT),
    ]
    exit_code = subprocess.call(symlink_cmd)
    if exit_code != 0:
        raise RuntimeError("An error occurred while creating the 'latest' symlink testrubyserver.rb to OpenShift.")
예제 #9
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release", required=True, help="Build the docs for a given release.")
    opts = parser.parse_args()

    configuration = load_config(opts.release)

    # Get platform build version
    repo_list = configuration['repositories']
    try:
        pulp_dict = filter(lambda x: x['name'] == 'pulp', repo_list)[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict['version']

    if version.endswith('alpha'):
        build_type = 'nightly'
    elif version.endswith('beta'):
        build_type = 'testing'
    elif version.endswith('rc'):
        build_type = 'testing'
    else:
        build_type = 'ga'

    x_y_version = '.'.join(version.split('.')[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    print "Getting git repos"
    for component in get_components(configuration):
        #clone the repos
        branch_name = component['git_branch']
        print "Cloning from github: %s" % component.get('git_url')
        print "Switching to branch %s" % branch_name
        clone_command = ['git', 'clone', component.get('git_url'), '--branch', branch_name]
        exit_code = subprocess.call(clone_command, cwd=WORKING_DIR)
        if exit_code != 0:
            raise RuntimeError('An error occurred while cloning the repo.')

    plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins'])
    builder.ensure_dir(plugins_dir)

    for component in get_components(configuration):
        if component['name'] == 'pulp':
            continue

        src = os.sep.join([WORKING_DIR, component['name'], 'docs'])
        dst = os.sep.join([plugins_dir, component['name']])
        os.symlink(src, dst)

    # copy in the pulp_index.rst file
    if x_y_version == '2.8' and build_type == 'ga':
        # This is a temporary codepath and should be removed when 2.8.5 is GA.
        # On 2.8.5+ the installation index page was moved and the pulp_index_2_8.txt
        # is the version that is compatible with 2.8.4 and earlier. Once 2.8.5 is GA
        # this if statement branch and the pulp_index_2_8.rst file should be removed.
        src_path = 'docs/pulp_index_2_8.rst'
    else:
        src_path = 'docs/pulp_index.rst'
    pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst'])
    copyfile(src_path, pulp_index_rst)

    # copy in the plugin_index.rst file
    plugin_index_rst = os.sep.join([plugins_dir, 'index.rst'])
    copyfile('docs/plugin_index.rst', plugin_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst'])
    copyfile('docs/all_content_index.rst', all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates'])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html'])
    copyfile('docs/layout.html', layout_html_path)

    # build the docs via the Pulp project itself
    print "Building the docs"
    docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs'])
    make_command = ['make', 'html', 'SPHINXOPTS=-Wn']
    exit_code = subprocess.call(make_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while building the docs.')

    # rsync the docs to the root if it's GA of latest
    if build_type == 'ga' and x_y_version == LATEST:
        local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
        remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'en', local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing docs to OpenShift.')

    # rsync the docs to OpenShift
    local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
    remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version)
    if build_type != 'ga':
        remote_path_arg += build_type + '/'
        path_option_arg = 'mkdir -p %sen/%s/%s/ && rsync' % (SITE_ROOT, x_y_version, build_type)
        rsync_command = ['rsync', '-avzh', '--rsync-path', path_option_arg, '--delete', local_path_arg, remote_path_arg]
    else:
        path_option_arg = 'mkdir -p %sen/%s/ && rsync' % (SITE_ROOT, x_y_version)
        rsync_command = ['rsync', '-avzh', '--rsync-path', path_option_arg, '--delete', '--exclude', 'nightly', '--exclude', 'testing', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing docs to OpenShift.')

    # rsync the robots.txt to OpenShift
    local_path_arg = 'docs/robots.txt'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing robots.txt to OpenShift.')

    # rsync the testrubyserver.rb to OpenShift
    local_path_arg = 'docs/testrubyserver.rb'
    remote_path_arg = '%s@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
    scp_command = ['scp', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(scp_command)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing testrubyserver.rb to OpenShift.')

    # add symlink for latest
    symlink_cmd = [
        'ssh',
        '%s@%s' % (USERNAME, HOSTNAME),
        'ln -sfn %sen/%s %sen/latest' % (SITE_ROOT, LATEST, SITE_ROOT)
    ]
    exit_code = subprocess.call(symlink_cmd)
    if exit_code != 0:
        raise RuntimeError("An error occurred while creating the 'latest' symlink testrubyserver.rb to OpenShift.")
예제 #10
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release", required=True, help="Build the docs for a given release.")
    opts = parser.parse_args()
    is_pulp3 = opts.release.startswith('3')

    configuration = builder.load_config(opts.release)

    # Get platform build version
    repo_list = builder.components(configuration)
    try:
        pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict['version']

    if version.endswith('alpha') or is_pulp3:
        build_type = 'nightly'
    elif version.endswith('beta'):
        build_type = 'testing'
    elif version.endswith('rc'):
        build_type = 'testing'
    else:
        build_type = 'ga'

    x_y_version = '.'.join(version.split('.')[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    # use the version update scripts to check out git repos and ensure correct versions
    for component in repo_list:
        builder.clone_branch(component)

    # install any apidoc dependencies that exist for pulp 3 docs
    if is_pulp3:
        for repo, packages in APIDOC_PACKAGES.items():
            for package in packages:
                package_dir = os.path.join(WORKING_DIR, repo, package)
                if os.path.exists(package_dir):
                    subprocess.check_call(['pip', 'install', '-e', '.'], cwd=package_dir)

    plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins'])
    builder.ensure_dir(plugins_dir, clean=False)

    for component in repo_list:
        if component['name'] == 'pulp':
            promote.update_versions(os.path.join(WORKING_DIR, 'pulp'), *version.split('-'))
            continue

        if component['name'] == 'pulp_deb':
            continue

        src = os.sep.join([WORKING_DIR, component['name'], 'docs'])
        dst = os.sep.join([plugins_dir, component['name']])
        os.symlink(src, dst)

    if is_pulp3:
        src_index_path = 'docs/pulp_index_pulp3.rst'
        src_all_content_path = 'docs/all_content_index_pulp3.rst'
    else:
        src_index_path = 'docs/pulp_index.rst'
        src_all_content_path = 'docs/all_content_index.rst'

        # copy in the plugin_index.rst file for Pulp 2 only
        # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here,
        # outside of platform code)
        plugin_index_rst = os.sep.join([plugins_dir, 'index.rst'])
        copyfile('docs/plugin_index.rst', plugin_index_rst)

    # copy in the pulp_index.rst file
    pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst'])
    copyfile(src_index_path, pulp_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst'])
    copyfile(src_all_content_path, all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates'])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html'])
    copyfile('docs/layout.html', layout_html_path)

    # build the docs via the Pulp project itself
    print("Building the docs")
    docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs'])

    # Get the latest api.yaml file to build the rest api docs
    if is_pulp3:
        with urllib.request.urlopen("http://*****:*****@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'en',
                         '--omit-dir-times', local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing latest docs.')

        # Also publish to the /en/latest/ directory
        make_directory_with_rsync(['en', 'latest'])
        local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
        remote_path_arg = '%s@%s:%sen/latest/' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError("An error occurred while pushing the 'latest' directory.")

    # rsync the nightly "2-master" docs to an unversioned "nightly" dir for
    # easy linking to in-development docs: /en/nightly/
    if build_type == 'nightly' and opts.release == '2-master':
        local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
        remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, build_type)
        make_directory_with_rsync(['en', build_type])
        rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing nightly docs.')

    # rsync the docs
    local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
    remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT, x_y_version)
    if build_type != 'ga':
        remote_path_arg += build_type + '/'

        make_directory_with_rsync(['en', x_y_version, build_type])
        rsync_command = ['rsync', '-avzh', '--delete', local_path_arg, remote_path_arg]
    else:
        make_directory_with_rsync(['en', x_y_version])
        rsync_command = ['rsync', '-avzh', '--delete', '--exclude', 'nightly', '--exclude',
                         'testing', local_path_arg, remote_path_arg]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing docs.')
예제 #11
0
def main():
    # Parse the args
    parser = argparse.ArgumentParser()
    parser.add_argument("--release",
                        required=True,
                        help="Build the docs for a given release.")
    opts = parser.parse_args()
    is_pulp3 = opts.release.startswith('3')

    configuration = builder.load_config(opts.release)

    # Get platform build version
    repo_list = builder.components(configuration)
    try:
        pulp_dict = list(filter(lambda x: x['name'] == 'pulp', repo_list))[0]
    except IndexError:
        raise RuntimeError("config file does not have an entry for 'pulp'")
    version = pulp_dict['version']

    if version.endswith('alpha') or is_pulp3:
        build_type = 'nightly'
    elif version.endswith('beta'):
        build_type = 'testing'
    elif version.endswith('rc'):
        build_type = 'testing'
    else:
        build_type = 'ga'

    x_y_version = '.'.join(version.split('.')[:2])

    builder.ensure_dir(WORKING_DIR, clean=True)

    # use the version update scripts to check out git repos and ensure correct versions
    for component in repo_list:
        builder.clone_branch(component)

    # install any apidoc dependencies that exist for pulp 3 docs
    if is_pulp3:
        for repo, packages in APIDOC_PACKAGES.items():
            for package in packages:
                package_dir = os.path.join(WORKING_DIR, repo, package)
                if os.path.exists(package_dir):
                    subprocess.check_call(['pip', 'install', '-e', '.'],
                                          cwd=package_dir)

    plugins_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'plugins'])
    builder.ensure_dir(plugins_dir, clean=False)

    for component in repo_list:
        if component['name'] == 'pulp':
            promote.update_versions(os.path.join(WORKING_DIR, 'pulp'),
                                    *version.split('-'))
            continue

        if component['name'] == 'pulp_deb':
            continue

        src = os.sep.join([WORKING_DIR, component['name'], 'docs'])
        dst = os.sep.join([plugins_dir, component['name']])
        os.symlink(src, dst)

    if is_pulp3:
        src_index_path = 'docs/pulp_index_pulp3.rst'
        src_all_content_path = 'docs/all_content_index_pulp3.rst'
    else:
        src_index_path = 'docs/pulp_index.rst'
        src_all_content_path = 'docs/all_content_index.rst'

        # copy in the plugin_index.rst file for Pulp 2 only
        # (currently Pulp 3 has its own plugins/index.rst without a need of managing it here,
        # outside of platform code)
        plugin_index_rst = os.sep.join([plugins_dir, 'index.rst'])
        copyfile('docs/plugin_index.rst', plugin_index_rst)

    # copy in the pulp_index.rst file
    pulp_index_rst = os.sep.join([WORKING_DIR, 'pulp', 'docs', 'index.rst'])
    copyfile(src_index_path, pulp_index_rst)

    # copy in the all_content_index.rst file
    all_content_index_rst = os.sep.join(
        [WORKING_DIR, 'pulp', 'docs', 'all_content_index.rst'])
    copyfile(src_all_content_path, all_content_index_rst)

    # make the _templates dir
    layout_dir = os.sep.join([WORKING_DIR, 'pulp', 'docs', '_templates'])
    os.makedirs(layout_dir)

    # copy in the layout.html file for analytics
    layout_html_path = os.sep.join(
        [WORKING_DIR, 'pulp', 'docs', '_templates', 'layout.html'])
    copyfile('docs/layout.html', layout_html_path)

    # build the docs via the Pulp project itself
    print("Building the docs")
    docs_directory = os.sep.join([WORKING_DIR, 'pulp', 'docs'])

    # Get the latest api.yaml file to build the rest api docs
    if is_pulp3:
        with urllib.request.urlopen("http://*****:*****@%s:%s' % (USERNAME, HOSTNAME, SITE_ROOT)
        rsync_command = [
            'rsync', '-avzh', '--delete', '--exclude', 'en',
            '--omit-dir-times', local_path_arg, remote_path_arg
        ]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing latest docs.')

        # Also publish to the /en/latest/ directory
        make_directory_with_rsync(['en', 'latest'])
        local_path_arg = os.sep.join([docs_directory, '_build', 'html'
                                      ]) + os.sep
        remote_path_arg = '%s@%s:%sen/latest/' % (USERNAME, HOSTNAME,
                                                  SITE_ROOT)
        rsync_command = [
            'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg
        ]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError(
                "An error occurred while pushing the 'latest' directory.")

    # rsync the nightly "2-master" docs to an unversioned "nightly" dir for
    # easy linking to in-development docs: /en/nightly/
    if build_type == 'nightly' and opts.release == '2-master':
        local_path_arg = os.sep.join([docs_directory, '_build', 'html'
                                      ]) + os.sep
        remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT,
                                              build_type)
        make_directory_with_rsync(['en', build_type])
        rsync_command = [
            'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg
        ]
        exit_code = subprocess.call(rsync_command, cwd=docs_directory)
        if exit_code != 0:
            raise RuntimeError('An error occurred while pushing nightly docs.')

    # rsync the docs
    local_path_arg = os.sep.join([docs_directory, '_build', 'html']) + os.sep
    remote_path_arg = '%s@%s:%sen/%s/' % (USERNAME, HOSTNAME, SITE_ROOT,
                                          x_y_version)
    if build_type != 'ga':
        remote_path_arg += build_type + '/'

        make_directory_with_rsync(['en', x_y_version, build_type])
        rsync_command = [
            'rsync', '-avzh', '--delete', local_path_arg, remote_path_arg
        ]
    else:
        make_directory_with_rsync(['en', x_y_version])
        rsync_command = [
            'rsync', '-avzh', '--delete', '--exclude', 'nightly', '--exclude',
            'testing', local_path_arg, remote_path_arg
        ]
    exit_code = subprocess.call(rsync_command, cwd=docs_directory)
    if exit_code != 0:
        raise RuntimeError('An error occurred while pushing docs.')