Exemplo n.º 1
0
 def create_job(os_name, job_name, template_file, additional_dict):
     job_data = dict(data)
     job_data['os_name'] = os_name
     job_data.update(os_configs[os_name])
     job_data.update(additional_dict)
     job_config = expand_template(template_file, job_data)
     configure_job(jenkins, job_name, job_config, **jenkins_kwargs)
def get_job_config(args, config):
    template_name = 'misc/rosdistro_cache_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    reconfigure_job_names = []
    build_files = get_release_build_files(config, args.rosdistro_name)
    for release_build_name in sorted(build_files.keys()):
        group_name = get_release_job_prefix(
            args.rosdistro_name, release_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_job_names.append(job_name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,

        'repository_args': repository_args,

        'reconfigure_job_names': reconfigure_job_names,

        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],

        'git_ssh_credential_id': config.git_ssh_credential_id,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 3
0
def get_job_config(args, config):
    template_name = 'status/repos_status_page_job.xml.em'

    targets_by_repo = get_targets_by_repo(config, args.rosdistro_name)
    status_pages = {}
    for name, repo_urls in config.status_page_repositories.items():
        data = get_status_page_data(repo_urls, targets_by_repo)
        if data is not None:
            status_pages[name] = data
        else:
            print(
                ("Skipping repos status page '%s' since no repository URLs " +
                 'match any of the release build files') % name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository':
        get_repository(),
        'status_pages':
        status_pages,
        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
    })
    job_config = expand_template(template_name, job_data)
    return job_config
def _get_job_config(
    args, config, recipients, template_name, additional_data=None
):
    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = dict(additional_data) if additional_data is not None else {}
    job_data.update({
        'script_generating_key_files': script_generating_key_files,

        'config_url': args.config_url,
        'rosdistro_name': args.rosdistro_name,
        'release_build_name': args.release_build_name,
        'repository_args': repository_args,

        'ros_buildfarm_repository': get_repository(),

        'credentials_src': os.path.join(
            '~', os.path.dirname(get_relative_credential_path())),
        'credentials_dst': os.path.join(
            '/home/buildfarm',
            os.path.dirname(get_relative_credential_path())),

        'recipients': recipients,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 5
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Run the 'sourcerpm' job")
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_sourcepkg_dir(parser)
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'target_repository':
        None,
        'uid':
        get_user_id(),
    })
    create_dockerfile('release/rpm/sourcepkg_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    with open(os.path.join(args.dockerfile_dir, 'mock_config.cfg'),
              'w') as mock_cfg:
        mock_cfg.write(expand_template('release/rpm/mock_config.cfg.em', data))
Exemplo n.º 6
0
def _get_doc_metadata_job_config(
        config, config_url, rosdistro_name, doc_build_name, build_file):
    template_name = 'doc/doc_metadata_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'job_priority': build_file.jenkins_job_priority,
        'node_label': get_node_label(
            build_file.jenkins_job_label,
            get_default_node_label('%s_%s_%s' % (
                rosdistro_name, 'doc', doc_build_name))),

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'rosdistro_name': rosdistro_name,
        'doc_build_name': doc_build_name,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,

        'timeout_minutes': build_file.jenkins_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 7
0
def _get_job_config(args,
                    config,
                    recipients,
                    template_name,
                    additional_data=None):
    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = dict(additional_data) if additional_data is not None else {}
    job_data.update({
        'script_generating_key_files':
        script_generating_key_files,
        'config_url':
        args.config_url,
        'rosdistro_name':
        args.rosdistro_name,
        'release_build_name':
        args.release_build_name,
        'repository_args':
        repository_args,
        'ros_buildfarm_repository':
        get_repository(),
        'credentials_src':
        os.path.join('~', os.path.dirname(get_relative_credential_path())),
        'credentials_dst':
        os.path.join('/home/buildfarm',
                     os.path.dirname(get_relative_credential_path())),
        'recipients':
        recipients,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
def get_reconfigure_jobs_job_config(args, config, build_file):
    template_name = 'doc/doc_reconfigure-jobs_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'script_generating_key_files': script_generating_key_files,

        'config_url': args.config_url,
        'rosdistro_name': args.rosdistro_name,
        'doc_build_name': args.doc_build_name,
        'repository_args': repository_args,

        'ros_buildfarm_repository': get_repository(),

        'credentials_src': os.path.join(
            '~', os.path.dirname(get_relative_credential_path())),
        'credentials_dst': os.path.join(
            '/home/buildfarm',
            os.path.dirname(get_relative_credential_path())),

        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 9
0
def _get_sync_packages_to_main_job_config(rosdistro_name, build_file,
                                          package_format):
    sync_targets = set()
    for os_name, os_versions in build_file.targets.items():
        for os_code_name, os_arches in os_versions.items():
            for os_arch in os_arches.keys():
                sync_targets.add((os_name, os_code_name, os_arch))

    template_name = 'release/%s/sync_packages_to_main_job.xml.em' % package_format
    job_data = {
        'ros_buildfarm_repository':
        get_repository(),
        'rosdistro_name':
        rosdistro_name,
        'deb_sync_to_main_job_name':
        get_sync_packages_to_main_job_name(rosdistro_name, 'deb'),
        'sync_targets':
        sync_targets,
        'notify_emails':
        build_file.notify_emails,
        'credential_id':
        build_file.upload_credential_id,
        'dest_credential_id':
        build_file.upload_destination_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 10
0
def get_reconfigure_jobs_job_config(args, config, build_file):
    template_name = 'doc/doc_reconfigure-jobs_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'script_generating_key_files':
        script_generating_key_files,
        'config_url':
        args.config_url,
        'rosdistro_name':
        args.rosdistro_name,
        'doc_build_name':
        args.doc_build_name,
        'repository_args':
        repository_args,
        'ros_buildfarm_repository':
        get_repository(),
        'credentials_src':
        os.path.join('~', os.path.dirname(get_relative_credential_path())),
        'credentials_dst':
        os.path.join('/home/buildfarm',
                     os.path.dirname(get_relative_credential_path())),
        'recipients':
        build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 11
0
def get_job_config(notification_emails):
    template_name = 'misc/check_agents_job.xml.em'
    job_data = {
        'notification_emails': notification_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def _get_job_config(args, config, build_file, template_name):
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    apt_mirror_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'script_generating_key_files': script_generating_key_files,

        'config_url': args.config_url,
        'rosdistro_name': args.rosdistro_name,
        'release_build_name': args.release_build_name,
        'apt_mirror_args': apt_mirror_args,

        'ros_buildfarm_url': get_repository_url('.'),

        'credentials_src': os.path.join(
            '/var/lib/jenkins',
            os.path.dirname(get_relative_credential_path())),
        'credentials_dst': os.path.join(
            '/home/buildfarm',
            os.path.dirname(get_relative_credential_path())),

        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(args, config):
    template_name = 'status/release_status_page_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'template_name': template_name,
        'now_str': now_str,

        'ros_buildfarm_url': get_repository_url('.'),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,

        'repository_args': repository_args,

        'notification_emails': config.notify_emails,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 14
0
def _get_doc_independent_job_config(
        config, config_url, doc_build_name, build_file):
    template_name = 'doc/doc_independent_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'job_priority': build_file.jenkins_job_priority,
        'node_label': build_file.jenkins_job_label,

        'ros_buildfarm_repository': get_repository(),

        'doc_repositories': build_file.doc_repositories,

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'doc_build_name': doc_build_name,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,

        'timeout_minutes': build_file.jenkins_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 15
0
def _get_doc_independent_job_config(config, config_url, doc_build_name,
                                    build_file):
    template_name = 'doc/doc_independent_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'job_priority': build_file.jenkins_job_priority,
        'node_label': get_node_label(build_file.jenkins_job_label),
        'ros_buildfarm_repository': get_repository(),
        'doc_repositories': build_file.doc_repositories,
        'script_generating_key_files': script_generating_key_files,
        'config_url': config_url,
        'doc_build_name': doc_build_name,
        'repository_args': repository_args,
        'upload_user': build_file.upload_user,
        'upload_host': build_file.upload_host,
        'upload_root': build_file.upload_root,
        'notify_emails': build_file.notify_emails,
        'timeout_minutes': build_file.jenkins_job_timeout,
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'upload_main' and 'upload_testing' jobs.")
    add_argument_config_url(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    template_name = 'release/trigger_upload_repo_job.xml.em'

    config = get_index(args.config_url)
    jenkins = connect(config.jenkins_url)

    for repo in ['main', 'testing']:
        job_name = 'upload_%s' % repo
        block_when_upstream_building = 'true'
        if repo == 'testing':
            block_when_upstream_building = 'false'
        job_config = expand_template(
            template_name, {
                'block_when_upstream_building': block_when_upstream_building,
                'repo': repo,
                'upstream_job_names': get_upstream_job_names(config, repo),
                'recipients': config.notify_emails
            })

        configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
Exemplo n.º 17
0
def _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file):
    template_name = 'release/sync_packages_to_testing_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'ros_buildfarm_url': get_repository_url('.'),

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 18
0
def _get_binarydeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name, arch,
        release_repo_spec, pkg_name, append_timestamp,
        repo_name, dist_cache=None, upstream_job_names=None):
    template_name = 'release/binarydeb_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    apt_mirror_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    binarydeb_files = [
        'binarydeb/*.changes',
        'binarydeb/*.deb',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'job_priority': build_file.jenkins_job_priority,

        'upstream_projects': upstream_job_names,

        'release_repo_spec': release_repo_spec,

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'apt_mirror_args': apt_mirror_args,

        'append_timestamp': append_timestamp,

        'binarydeb_files': binarydeb_files,

        'import_package_job_name': get_import_package_job_name(
            rosdistro_name, release_build_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'notify_emails': set(config.notify_emails + build_file.notify_emails),
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_binarydeb_job_timeout,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(ros_distro):
    template_name = 'misc/check_failing_jobs.xml.em'

    job_data = {
        'ros_distro': ros_distro,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 20
0
def _get_import_package_job_config(build_file):
    template_name = 'release/import_package_job.xml.em'
    job_data = {
        'abi_incompatibility_assumed': build_file.abi_incompatibility_assumed,
        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 21
0
def _get_sync_packages_to_main_job_config(rosdistro_name, build_file):
    template_name = 'release/sync_packages_to_main_job.xml.em'
    job_data = {
        'rosdistro_name': rosdistro_name,
        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 22
0
def get_job_config(args, config):
    template_name = 'status/bloom_status_job.xml.em'
    job_data = {
        'ros_buildfarm_repository': get_repository(),
        'rosdistro_index_url': config.rosdistro_index_url,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 23
0
def _get_import_package_job_config(build_file):
    template_name = 'release/import_package_job.xml.em'
    job_data = {
        'abi_incompatibility_assumed': build_file.abi_incompatibility_assumed,
        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 24
0
def _get_doc_job_config(
        config, config_url, rosdistro_name, doc_build_name,
        build_file, os_name, os_code_name, arch, doc_repo_spec,
        repo_name, dist_cache=None, is_disabled=False):
    template_name = 'doc/doc_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache and repo_name:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'github_url': get_github_project_url(doc_repo_spec.url),

        'job_priority': build_file.jenkins_job_priority,
        'node_label': build_file.jenkins_job_label,

        'doc_repo_spec': doc_repo_spec,

        'disabled': is_disabled,

        'github_orgunit': git_github_orgunit(doc_repo_spec.url),

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'doc_build_name': doc_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,

        'timeout_minutes': build_file.jenkins_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 25
0
def _get_sourcedeb_job_config(
        config_url, rosdistro_name, release_build_name,
        config, build_file, os_name, os_code_name,
        pkg_name, repo_name, release_repository, dist_cache=None,
        is_disabled=False):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),

        'job_priority': build_file.jenkins_source_job_priority,
        'node_label': build_file.jenkins_source_job_label,

        'disabled': is_disabled,

        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,

        'sourcedeb_files': sourcedeb_files,

        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(
            rosdistro_name, pkg_name),

        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,

        'timeout_minutes': build_file.jenkins_source_job_timeout,

        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 26
0
def create_dockerlibrary(template_name, data, dockerlibrary_path, verbose=False):
    data['template_name'] = template_name
    data['wrapper_scripts'] = get_wrapper_scripts()
    content = expand_template(template_name, data)
    if verbose:
        for line in content.splitlines():
            print(' ', line)
    with open(dockerlibrary_path, 'w') as h:
        h.write(content)
Exemplo n.º 27
0
def _get_sync_packages_to_main_job_config(rosdistro_name, build_file):
    template_name = 'release/sync_packages_to_main_job.xml.em'
    job_data = {
        'rosdistro_name': rosdistro_name,

        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_trigger_jobs_job_config(group_name, build_file):
    template_name = 'snippet/trigger-jobs_job.xml.em'
    job_data = {
        'has_force_parameter': True,
        'project_name_pattern': '%s__.*' % group_name,
        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 29
0
def get_trigger_jobs_job_config(group_name, build_file):
    template_name = 'snippet/trigger-jobs_job.xml.em'
    job_data = {
        'has_force_parameter': True,
        'project_name_pattern': '%s__.*' % group_name,
        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 30
0
def _get_doc_job_config(config,
                        config_url,
                        rosdistro_name,
                        doc_build_name,
                        build_file,
                        os_name,
                        os_code_name,
                        arch,
                        doc_repo_spec,
                        repo_name,
                        dist_cache=None,
                        is_disabled=False):
    template_name = 'doc/doc_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache and repo_name:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'github_url': get_github_project_url(doc_repo_spec.url),
        'job_priority': build_file.jenkins_job_priority,
        'node_label': build_file.jenkins_job_label,
        'doc_repo_spec': doc_repo_spec,
        'disabled': is_disabled,
        'github_orgunit': git_github_orgunit(doc_repo_spec.url),
        'ros_buildfarm_repository': get_repository(),
        'script_generating_key_files': script_generating_key_files,
        'config_url': config_url,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'doc_build_name': doc_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,
        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,
        'timeout_minutes': build_file.jenkins_job_timeout,
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 31
0
 def create_job(os_name, job_name, template_file, additional_dict):
     if args.select_jobs_regexp and not args.pattern_select_jobs_regexp.match(job_name):
         return
     job_data = dict(data)
     job_data['os_name'] = os_name
     job_data.update(os_configs[os_name])
     job_data.update(additional_dict)
     job_data.update(os_config_overrides.get(os_name, {}))
     job_config = expand_template(template_file, job_data)
     configure_job(jenkins, job_name, job_config, **jenkins_kwargs)
Exemplo n.º 32
0
def _get_sourcedeb_job_config(config_url,
                              rosdistro_name,
                              release_build_name,
                              config,
                              build_file,
                              os_name,
                              os_code_name,
                              pkg_name,
                              repo_name,
                              release_repository,
                              dist_cache=None,
                              is_disabled=False):
    template_name = 'release/sourcedeb_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    sourcedeb_files = [
        'sourcedeb/*.debian.tar.gz',
        'sourcedeb/*.debian.tar.xz',
        'sourcedeb/*.dsc',
        'sourcedeb/*.orig.tar.gz',
        'sourcedeb/*_source.changes',
    ]

    maintainer_emails = get_maintainer_emails(dist_cache, repo_name) \
        if build_file.notify_maintainers \
        else set([])

    job_data = {
        'github_url': get_github_project_url(release_repository.url),
        'job_priority': build_file.jenkins_source_job_priority,
        'node_label': build_file.jenkins_source_job_label,
        'disabled': is_disabled,
        'ros_buildfarm_repository': get_repository(),
        'script_generating_key_files': script_generating_key_files,
        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'pkg_name': pkg_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'repository_args': repository_args,
        'sourcedeb_files': sourcedeb_files,
        'import_package_job_name': get_import_package_job_name(rosdistro_name),
        'debian_package_name': get_debian_package_name(rosdistro_name,
                                                       pkg_name),
        'notify_emails': build_file.notify_emails,
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'timeout_minutes': build_file.jenkins_source_job_timeout,
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 33
0
def _get_import_package_job_config(build_file, package_format):
    template_name = 'release/%s/import_package_job.xml.em' % package_format
    job_data = {
        'target_queue': build_file.target_queue,
        'abi_incompatibility_assumed': build_file.abi_incompatibility_assumed,
        'notify_emails': build_file.notify_emails,
        'ros_buildfarm_repository': get_repository(),
        'credential_id': build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 34
0
def _get_devel_job_config(
        config, rosdistro_name, source_build_name,
        build_file, os_name, os_code_name, arch, source_repo_spec,
        repo_name, dist_cache=None):
    template_name = 'devel/devel_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config, build_file)

    maintainer_emails = set([])
    if build_file.notify_maintainers and dist_cache:
        # add maintainers listed in latest release to recipients
        repo = dist_cache.distribution_file.repositories[repo_name]
        if repo.release_repository:
            for pkg_name in repo.release_repository.package_names:
                if pkg_name not in dist_cache.release_package_xmls:
                    continue
                pkg_xml = dist_cache.release_package_xmls[pkg_name]
                pkg = parse_package_string(pkg_xml)
                for m in pkg.maintainers:
                    maintainer_emails.add(m.email)

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'job_priority': build_file.jenkins_job_priority,

        'source_repo_spec': source_repo_spec,

        'script_generating_key_files': script_generating_key_files,

        'ros_buildfarm_repo': config.ros_buildfarm_repo,

        'rosdistro_index_url': config.rosdistro_index_url,
        'rosdistro_name': rosdistro_name,
        'source_build_name': source_build_name,
        'os_name': os_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': set(config.notify_emails + build_file.notify_emails),
        'maintainer_emails': maintainer_emails,
        'notify_maintainers': build_file.notify_maintainers,
        'notify_committers': build_file.notify_committers,

        'timeout_minutes': build_file.jenkins_job_timeout,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 35
0
def get_job_config(args, config):
    template_name = 'misc/rosdistro_cache_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    reconfigure_job_names = []
    build_files = get_release_build_files(config, args.rosdistro_name)
    for release_build_name in sorted(build_files.keys()):
        group_name = get_release_job_prefix(args.rosdistro_name,
                                            release_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_job_names.append(job_name)

    reconfigure_doc_job_names = []
    build_files = get_doc_build_files(config, args.rosdistro_name)
    for doc_build_name in sorted(build_files.keys()):
        group_name = get_doc_view_name(args.rosdistro_name, doc_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_doc_job_names.append(job_name)

    reconfigure_source_job_names = []
    build_files = get_source_build_files(config, args.rosdistro_name)
    for source_build_name in sorted(build_files.keys()):
        group_name = get_devel_view_name(args.rosdistro_name,
                                         source_build_name)
        job_name = '%s_%s' % (group_name, 'reconfigure-jobs')
        reconfigure_source_job_names.append(job_name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'rosdistro_index_url':
        config.rosdistro_index_url,
        'repository_args':
        repository_args,
        'reconfigure_job_names':
        reconfigure_job_names,
        'reconfigure_doc_job_names':
        reconfigure_doc_job_names,
        'reconfigure_source_job_names':
        reconfigure_source_job_names,
        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
        'git_ssh_credential_id':
        config.git_ssh_credential_id,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 36
0
def _get_import_package_job_config(build_file):
    template_name = 'release/import_package_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(notification_emails):
    template_name = 'misc/dashboard_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'notification_emails': notification_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 38
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'devel' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scms = []
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch,
        jenkins=False, views=False)

    templates.template_hooks = None

    devel_job_name = get_devel_job_name(
        args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts},
        options={BANGPATH_OPT: False})
    value = value.replace('python3', sys.executable)
    print(value)
Exemplo n.º 39
0
def _get_trigger_broken_with_non_broken_upstream_job_config(
        rosdistro_name, release_build_name, build_file):
    template_name = \
        'release/release_trigger-broken-with-non-broken-upstream_job.xml.em'
    job_data = {
        'source_project_name_prefix':
        get_release_source_view_prefix(rosdistro_name),
        'binary_project_name_prefix':
        get_release_binary_view_prefix(rosdistro_name, release_build_name),
        'recipients':
        build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def _get_trigger_broken_with_non_broken_upstream_job_config(
        rosdistro_name, release_build_name, build_file):
    template_name = \
        'release/release_trigger-broken-with-non-broken-upstream_job.xml.em'
    job_data = {
        'source_project_name_prefix': get_release_source_view_prefix(
            rosdistro_name),
        'binary_project_name_prefix': get_release_binary_view_prefix(
            rosdistro_name, release_build_name),

        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 41
0
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'devel' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            super(IncludeHook, self).__init__()
            self.scms = []
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/scm.xml.em'):
                self.scms.append(
                    (kwargs['locals']['repo_spec'], kwargs['locals']['path']))
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    templates.template_hooks = [hook]

    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch,
        jenkins=False, views=False)

    templates.template_hooks = None

    devel_job_name = get_devel_job_name(
        args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': hook.scms,
            'scripts': hook.scripts},
        options={BANGPATH_OPT: False})
    print(value)
Exemplo n.º 42
0
def _get_doc_independent_job_config(
        config, config_url, doc_build_name, build_file):

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = {
        'job_priority': build_file.jenkins_job_priority,
        'node_label': get_node_label(build_file.jenkins_job_label),

        'ros_buildfarm_repository': get_repository(),

        'doc_repositories': build_file.doc_repositories,

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'doc_build_name': doc_build_name,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,

        'timeout_minutes': build_file.jenkins_job_timeout,
    }

    if build_file.documentation_type == 'make_target':
        template_name = 'doc/doc_independent_job.xml.em'
        job_data.update({
            'install_apt_packages': build_file.install_apt_packages,
            'install_pip_packages': build_file.install_pip_packages,
            'upload_user': build_file.upload_user,
            'upload_host': build_file.upload_host,
            'upload_root': build_file.upload_root,
            'credential_id': build_file.upload_credential_id
        })
    elif build_file.documentation_type == 'docker_build':
        template_name = 'doc/doc_independent_docker_job.xml.em'
        job_data.update({
            'upload_repository_url': build_file.upload_repository_url,
            'upload_repository_branch': build_file.upload_repository_branch,
            'upload_credential_id': build_file.upload_credential_id,
        })
    else:
        raise JobValidationError(
            'Not independent documentation_type: ' +
            build_file.documentation_type
        )
    job_config = expand_template(template_name, job_data)
    return job_config
def get_trigger_jobs_job_config(group_name, build_file):
    template_name = 'devel/devel_trigger-jobs_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    job_data = {
        'template_name': template_name,
        'now_str': now_str,

        'project_name_pattern': '%s__.*' % group_name,

        'recipients': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(args, config):
    template_name = 'status/repos_status_page_job.xml.em'
    now = datetime.utcnow()
    now_str = now.strftime('%Y-%m-%dT%H:%M:%SZ')

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'template_name': template_name,
        'now_str': now_str,

        'ros_buildfarm_url': get_repository_url('.'),

        'notification_emails': config.notify_emails,
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 45
0
def _get_sync_packages_to_main_job_config(rosdistro_name, build_file,
                                          package_format):
    template_name = 'release/%s/sync_packages_to_main_job.xml.em' % package_format
    job_data = {
        'ros_buildfarm_repository':
        get_repository(),
        'rosdistro_name':
        rosdistro_name,
        'deb_sync_to_main_job_name':
        get_sync_packages_to_main_job_name(rosdistro_name, 'deb'),
        'notify_emails':
        build_file.notify_emails,
        'credential_id':
        build_file.upload_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(args, config):
    template_name = "misc/rosdistro_cache_job.xml.em"

    repository_args, script_generating_key_files = get_repositories_and_script_generating_key_files(config=config)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update(
        {
            "ros_buildfarm_repository": get_repository(),
            "script_generating_key_files": script_generating_key_files,
            "rosdistro_index_url": config.rosdistro_index_url,
            "repository_args": repository_args,
            "notification_emails": config.distributions[args.rosdistro_name]["notification_emails"],
            "git_ssh_credential_id": config.git_ssh_credential_id,
        }
    )
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 47
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(description="Run the 'binarypkg' job")
    add_argument_rosdistro_index_url(parser, required=True)
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_target_repository(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_skip_download_sourcepkg(parser)
    add_argument_append_timestamp(parser)
    add_argument_env_vars(parser)
    add_argument_binarypkg_dir(parser)
    args = parser.parse_args(argv)

    data = copy.deepcopy(args.__dict__)
    data.update({
        'uid':
        get_user_id(),
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'target_repository':
        os.path.join(args.target_repository, args.os_code_name, 'SRPMS'),
        'skip_download_sourcepkg':
        args.skip_download_sourcepkg,
        'sourcepkg_dir':
        os.path.join(args.binarypkg_dir, 'source'),
        'build_environment_variables':
        args.env_vars,
    })
    create_dockerfile('release/rpm/binarypkg_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    with open(os.path.join(args.dockerfile_dir, 'mock_config.cfg'),
              'w') as mock_cfg:
        mock_cfg.write(expand_template('release/rpm/mock_config.cfg.em', data))
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'devel' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_repository_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    args = parser.parse_args(argv)

    # collect all template snippets of specific types
    scms = []
    scripts = []

    def template_hook(template_name, data, content):
        if template_name == 'snippet/scm.xml.em':
            scms.append((data['repo_spec'], data['path']))
        if template_name == 'snippet/builder_shell.xml.em':
            scripts.append(data['script'])
    templates.template_hook = template_hook

    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch,
        jenkins=False, view=False)

    templates.template_hook = None

    devel_job_name = get_devel_job_name(
        args.rosdistro_name, args.source_build_name,
        args.repository_name, args.os_name, args.os_code_name, args.arch)

    value = expand_template(
        'devel/devel_script.sh.em', {
            'devel_job_name': devel_job_name,
            'scms': scms,
            'scripts': scripts},
        options={BANGPATH_OPT: False})
    print(value)
def get_job_config(args, config):
    template_name = 'status/release_compare_page_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'rosdistro_index_url':
        config.rosdistro_index_url,
        'repository_args':
        repository_args,
        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
    })
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(args, config):
    template_name = 'status/release_status_page_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(config=config)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'rosdistro_index_url': config.rosdistro_index_url,

        'repository_args': repository_args,

        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 51
0
def _get_sync_packages_to_testing_job_config(config_url, rosdistro_name,
                                             release_build_name, os_code_name,
                                             arch, config, build_file):
    template_name = 'release/sync_packages_to_testing_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    job_data = {
        'ros_buildfarm_repository': get_repository(),
        'script_generating_key_files': script_generating_key_files,
        'config_url': config_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,
        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 52
0
def _get_sync_packages_to_testing_job_config(config_url, rosdistro_name,
                                             release_build_name, os_name,
                                             os_code_name, arch, config,
                                             build_file):
    package_format = package_format_mapping[os_name]
    template_name = 'release/%s/sync_packages_to_testing_job.xml.em' % package_format

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    job_data = {
        'ros_buildfarm_repository':
        get_repository(),
        'script_generating_key_files':
        script_generating_key_files,
        'config_url':
        config_url,
        'rosdistro_name':
        rosdistro_name,
        'release_build_name':
        release_build_name,
        'os_name':
        os_name,
        'os_code_name':
        os_code_name,
        'arch':
        arch,
        'repository_args':
        repository_args,
        'import_package_job_name':
        get_import_package_job_name(rosdistro_name, package_format),
        'notify_emails':
        build_file.notify_emails,
        'credential_id':
        build_file.upload_credential_id,
        'dest_credential_id':
        build_file.upload_destination_credential_id,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def get_job_config(args, config):
    template_name = 'status/repos_status_page_job.xml.em'

    targets_by_repo = get_targets_by_repo(config, args.rosdistro_name)
    status_pages = {}
    for name, repo_urls in config.status_page_repositories.items():
        data = get_status_page_data(repo_urls, targets_by_repo)
        if data is not None:
            status_pages[name] = data
        else:
            print(("Skipping repos status page '%s' since no repository URL" +
                   'matches any of the release build files') % name)

    job_data = copy.deepcopy(args.__dict__)
    job_data.update({
        'ros_buildfarm_repository': get_repository(),

        'status_pages': status_pages,

        'notification_emails':
        config.distributions[args.rosdistro_name]['notification_emails'],
    })
    job_config = expand_template(template_name, job_data)
    return job_config
Exemplo n.º 54
0
def _get_sync_packages_to_testing_job_config(
        config_url, rosdistro_name, release_build_name, os_code_name, arch,
        config, build_file):
    template_name = 'release/sync_packages_to_testing_job.xml.em'

    repository_args, script_generating_key_files = \
        get_repositories_and_script_generating_key_files(build_file=build_file)

    job_data = {
        'ros_buildfarm_repository': get_repository(),

        'script_generating_key_files': script_generating_key_files,

        'config_url': config_url,
        'rosdistro_name': rosdistro_name,
        'release_build_name': release_build_name,
        'os_code_name': os_code_name,
        'arch': arch,
        'repository_args': repository_args,

        'notify_emails': build_file.notify_emails,
    }
    job_config = expand_template(template_name, job_data)
    return job_config
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate the 'upload_main' and 'upload_testing' jobs.")
    add_argument_config_url(parser)
    add_argument_dry_run(parser)
    args = parser.parse_args(argv)

    template_name = 'release/trigger_upload_repo_job.xml.em'

    config = get_index(args.config_url)
    jenkins = connect(config.jenkins_url)

    for repo in ['main', 'testing']:
        job_name = 'upload_%s' % repo
        block_when_upstream_building = 'true'
        if repo == 'testing':
            block_when_upstream_building = 'false'
        job_config = expand_template(template_name, {
            'block_when_upstream_building': block_when_upstream_building,
            'repo': repo,
            'upstream_job_names': get_upstream_job_names(config, repo),
            'recipients': config.notify_emails})

        configure_job(jenkins, job_name, job_config, dry_run=args.dry_run)
Exemplo n.º 56
0
def configure_devel_jobs(config_url,
                         rosdistro_name,
                         source_build_name,
                         groovy_script=None,
                         dry_run=False,
                         whitelist_repository_names=None):
    """
    Configure all Jenkins devel jobs.

    L{configure_release_job} will be invoked for source repository and target
    which matches the build file criteria.
    """
    config = get_config_index(config_url)
    build_files = get_source_build_files(config, rosdistro_name)
    build_file = build_files[source_build_name]

    index = get_index(config.rosdistro_index_url)

    dist_cache = None
    if build_file.notify_maintainers:
        dist_cache = get_distribution_cache(index, rosdistro_name)

    # get targets
    targets = []
    for os_name in build_file.targets.keys():
        for os_code_name in build_file.targets[os_name].keys():
            for arch in build_file.targets[os_name][os_code_name]:
                targets.append((os_name, os_code_name, arch))
    print('The build file contains the following targets:')
    for os_name, os_code_name, arch in targets:
        print('  -', os_name, os_code_name, arch)

    dist_file = get_distribution_file(index, rosdistro_name, build_file)
    if not dist_file:
        print('No distribution file matches the build file')
        return

    devel_view_name = get_devel_view_name(rosdistro_name,
                                          source_build_name,
                                          pull_request=False)
    pull_request_view_name = get_devel_view_name(rosdistro_name,
                                                 source_build_name,
                                                 pull_request=True)

    # all further configuration will be handled by either the Jenkins API
    # or by a generated groovy script
    from ros_buildfarm.jenkins import connect
    jenkins = connect(config.jenkins_url) if groovy_script is None else False

    view_configs = {}
    views = {}
    if build_file.test_commits_force is not False:
        views[devel_view_name] = configure_devel_view(jenkins,
                                                      devel_view_name,
                                                      dry_run=dry_run)
    if build_file.test_pull_requests_force is not False:
        views[pull_request_view_name] = configure_devel_view(
            jenkins, pull_request_view_name, dry_run=dry_run)
    if not jenkins:
        view_configs.update(views)
    groovy_data = {
        'dry_run': dry_run,
        'expected_num_views': len(view_configs),
    }

    repo_names = dist_file.repositories.keys()
    filtered_repo_names = build_file.filter_repositories(repo_names)

    devel_job_names = []
    pull_request_job_names = []
    job_configs = OrderedDict()
    for repo_name in sorted(repo_names):
        if whitelist_repository_names:
            if repo_name not in whitelist_repository_names:
                print(
                    "Skipping repository '%s' not in explicitly passed list" %
                    repo_name,
                    file=sys.stderr)
                continue

        is_disabled = repo_name not in filtered_repo_names
        if is_disabled and build_file.skip_ignored_repositories:
            print("Skipping ignored repository '%s'" % repo_name,
                  file=sys.stderr)
            continue

        repo = dist_file.repositories[repo_name]
        if not repo.source_repository:
            print("Skipping repository '%s': no source section" % repo_name)
            continue
        if not repo.source_repository.version:
            print("Skipping repository '%s': no source version" % repo_name)
            continue

        job_types = []
        # check for testing commits
        if build_file.test_commits_force is False:
            print(("Skipping repository '%s': 'test_commits' is forced to " +
                   "false in the build file") % repo_name)
        elif repo.source_repository.test_commits is False:
            print(("Skipping repository '%s': 'test_commits' of the " +
                   "repository set to false") % repo_name)
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            print(("Skipping repository '%s': 'test_commits' defaults to " +
                   "false in the build file") % repo_name)
        else:
            job_types.append('commit')

        if not is_disabled:
            # check for testing pull requests
            if build_file.test_pull_requests_force is False:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "is forced to false in the build file") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is False:
                # print(("Skipping repository '%s': 'test_pull_requests' of " +
                #        "the repository set to false") % repo_name)
                pass
            elif repo.source_repository.test_pull_requests is None and \
                    not build_file.test_pull_requests_default:
                # print(("Skipping repository '%s': 'test_pull_requests' " +
                #        "defaults to false in the build file") % repo_name)
                pass
            else:
                print("Pull request job for repository '%s'" % repo_name)
                job_types.append('pull_request')

        for job_type in job_types:
            pull_request = job_type == 'pull_request'
            for os_name, os_code_name, arch in targets:
                try:
                    job_name, job_config = configure_devel_job(
                        config_url,
                        rosdistro_name,
                        source_build_name,
                        repo_name,
                        os_name,
                        os_code_name,
                        arch,
                        pull_request,
                        config=config,
                        build_file=build_file,
                        index=index,
                        dist_file=dist_file,
                        dist_cache=dist_cache,
                        jenkins=jenkins,
                        views=views,
                        is_disabled=is_disabled,
                        groovy_script=groovy_script,
                        dry_run=dry_run)
                    if not pull_request:
                        devel_job_names.append(job_name)
                    else:
                        pull_request_job_names.append(job_name)
                    if groovy_script is not None:
                        print("Configuration for job '%s'" % job_name)
                        job_configs[job_name] = job_config
                except JobValidationError as e:
                    print(e.message, file=sys.stderr)

    groovy_data['expected_num_jobs'] = len(job_configs)
    groovy_data['job_prefixes_and_names'] = {}

    devel_job_prefix = '%s__' % devel_view_name
    pull_request_job_prefix = '%s__' % pull_request_view_name
    if not whitelist_repository_names:
        groovy_data['job_prefixes_and_names']['devel'] = \
            (devel_job_prefix, devel_job_names)
        groovy_data['job_prefixes_and_names']['pull_request'] = \
            (pull_request_job_prefix, pull_request_job_names)

        if groovy_script is None:
            # delete obsolete jobs in these views
            from ros_buildfarm.jenkins import remove_jobs
            print('Removing obsolete devel jobs')
            remove_jobs(jenkins,
                        devel_job_prefix,
                        devel_job_names,
                        dry_run=dry_run)
            print('Removing obsolete pull request jobs')
            remove_jobs(jenkins,
                        pull_request_job_prefix,
                        pull_request_job_names,
                        dry_run=dry_run)
    if groovy_script is not None:
        print(
            "Writing groovy script '%s' to reconfigure %d views and %d jobs" %
            (groovy_script, len(view_configs), len(job_configs)))
        content = expand_template('snippet/reconfigure_jobs.groovy.em',
                                  groovy_data)
        write_groovy_script_and_configs(groovy_script,
                                        content,
                                        job_configs,
                                        view_configs=view_configs)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
             'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument(
        '--workspace-root',
        required=True,
        help='The root path of the workspace to compile')
    parser.add_argument(
        '--rosdoc-lite-dir',
        required=True,
        help='The root path of the rosdoc_lite repository')
    parser.add_argument(
        '--catkin-sphinx-dir',
        required=True,
        help='The root path of the catkin-sphinx repository')
    parser.add_argument(
        '--rosdoc-index-dir',
        required=True,
        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument(
        '--os-name',
        required=True,
        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument(
        '--os-code-name',
        required=True,
        help="The OS code name (e.g. 'trusty')")
    parser.add_argument(
        '--arch',
        required=True,
        help="The architecture (e.g. 'amd64')")
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(
            args.workspace_root, 'src', args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name,
                    'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(
                    args.output_dir, 'api', pkg_name, 'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(set([
                d.name for d in depends if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(set([
                    d.name for d in depends if d.name in valid_package_names]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(
                pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(
            args.output_dir, ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(
                    args.output_dir, 'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(os.path.join(
                        pkg_changelog_doc_path, 'changelog.html'), 'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(
                args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url': '../../../api/%s/html' % pkg.name,
                'location': 'file://%s' % os.path.join(
                    args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package': pkg.name,
            }
            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \
                (args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(
                config.jenkins_url, args.rosdistro_name, args.doc_build_name,
                args.repository_name, args.os_name, args.os_code_name,
                args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name,
                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(
                config.jenkins_url, build_files, args.rosdistro_name, pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(
                args.output_dir, 'manifests', pkg.name, 'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope(
        'SUBSECTION',
        'overwrite CMakeLists.txt files to only generate messages'
    ):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type']
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" %
                  pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope(
        'SUBSECTION',
        'determine dependencies and generate Dockerfile'
    ):
        # initialize rosdep view
        context = initialize_resolver(
            args.rosdistro_name, args.os_name, args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(
            pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build')
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        build_files = get_doc_build_files(config, args.rosdistro_name)
        build_file = build_files[args.doc_build_name]

        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        # generate Dockerfile
        data = {
            'os_name': args.os_name,
            'os_code_name': args.os_code_name,
            'arch': args.arch,

            'distribution_repository_urls': args.distribution_repository_urls,
            'distribution_repository_keys': get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),

            'rosdistro_name': args.rosdistro_name,

            'uid': get_user_id(),

            'dependencies': debian_pkg_names,
            'dependency_versions': debian_pkg_versions,

            'canonical_base_url': build_file.canonical_base_url,

            'ordered_pkg_tuples': ordered_pkg_tuples,
            'rosdoc_config_files': rosdoc_config_files,
        }
        create_dockerfile(
            'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]):
    global templates
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'catkin_workspace/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            print(template_path, file=sys.stderr)
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    templates.template_hooks = [hook]

    # use random source repo to pass to devel job template
    source_repository = deepcopy(list(repositories.values())[0])
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - catkin_make_isolated_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'catkin_workspace_overlay:/tmp/catkin_workspace_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')