Example #1
0
def document_necessary(workspace,
                       docspace,
                       ros_distro,
                       repo,
                       rosdoc_lite_version,
                       jenkins_scripts_version,
                       force_doc=False):
    append_pymodules_if_needed()
    print "Working on distro %s and repo %s" % (ros_distro, repo)

    #Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    #Install the repository
    install_repo(docspace, workspace, repo, doc_conf, depends_conf)

    #Load information about existing tags
    tags_db = TagsDb(ros_distro, workspace)

    #Check to see if we need to document this repo list by checking if any of
    #the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    #We also want to make sure that we run documentation generation anytime
    #jenkins_scripts or rosdoc_lite has changed since the last time this job was
    #run
    repo_hashes = tags_db.get_rosinstall_hashes(
        repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None)
    old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None)
    print "REPO HASHES: %s" % repo_hashes

    if changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version:
        print "There were no changes to any of the repositories we document. Not running documentation."
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()
        return False

    #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version
    repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {
        'doc_conf': doc_conf,
        'depends_conf': depends_conf,
        'tags_db': tags_db
    }
def document_necessary(workspace, docspace, ros_distro, repo,
                       rosdoc_lite_version, jenkins_scripts_version, force_doc=False):
    append_pymodules_if_needed()
    print "Working on distro %s and repo %s" % (ros_distro, repo)

    #Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    #Install the repository
    install_repo(docspace, workspace, repo, doc_conf, depends_conf)

    #Load information about existing tags
    tags_db = TagsDb(ros_distro, workspace)

    #Check to see if we need to document this repo list by checking if any of
    #the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    #We also want to make sure that we run documentation generation anytime
    #jenkins_scripts or rosdoc_lite has changed since the last time this job was
    #run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None)
    old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None)
    print "REPO HASHES: %s" % repo_hashes

    if changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version:
        print "There were no changes to any of the repositories we document. Not running documentation."
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()
        return False

    #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version
    repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db}
Example #3
0
def document_necessary(workspace, docspace, ros_distro, repo,
                       rosdoc_lite_version, jenkins_scripts_version, force_doc=False):
    append_pymodules_if_needed()
    print("Working on distro %s and repo %s" % (ros_distro, repo))

    #Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    #Install the repository
    try:
        install_repo(docspace, workspace, repo, doc_conf, depends_conf)
    except BuildException:
        # checkout failed, try to get default branches of repos to notify the maintainers
        print('Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure')
        for tuple in doc_conf:
            for repo in tuple.values():
                repo['version'] = None
        install_repo(docspace, workspace, repo, doc_conf, [])
        extract_notification_recipients(docspace, doc_conf)
        raise

    extract_notification_recipients(docspace, doc_conf)

    #Load information about existing tags
    jenkins_scripts_path = os.path.join(workspace, 'jenkins_scripts')
    if not os.path.exists(jenkins_scripts_path):
        # if jenkins_scripts has not been checked out in the workspace
        # expect that the user call doc from within a jenkins_scripts checkout
        jenkins_scripts_path = os.getcwd()
    rosdoc_tag_index_path = os.path.join(workspace, 'rosdoc_tag_index')
    tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path)

    #Check to see if we need to document this repo list by checking if any of
    #the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    #We also want to make sure that we run documentation generation anytime
    #jenkins_scripts or rosdoc_lite has changed since the last time this job was
    #run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None)
    old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None)
    print("REPO HASHES: %s" % repo_hashes)

    if not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version:
        print("There were no changes to any of the repositories we document. Not running documentation.")
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()

        # create marker files for all packages an upload them
        doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
        if os.path.exists(doc_path):
            shutil.rmtree(doc_path)
        repo_path = os.path.realpath("%s" % (docspace))
        stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf)
        folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
        if folders:
            dsts = ['%s/api/%s/stamp' % (doc_path, f) for f in folders]
            for dst in dsts:
                os.makedirs(os.path.dirname(dst))
                with open(dst, 'w'):
                    pass
            print("Uploading marker files to identify that documentation is up-to-date.")
            command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api' % (doc_path, ros_distro)]
            call_with_list(command)

        return False

    #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version
    repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db}
Example #4
0
def document_repo(workspace, docspace, ros_distro, repo,
                  platform, arch, homepage,
                  rosdoc_lite_version, jenkins_scripts_version):
    append_pymodules_if_needed()
    doc_job = "doc-%s-%s" % (ros_distro, repo)
    print "Working on distro %s and repo %s" % (ros_distro, repo)

    #Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    #Install the repository
    install_repo(docspace, workspace, repo, doc_conf, depends_conf)
    repo_path = os.path.realpath("%s" % (docspace))
    print "Repo path %s" % repo_path

    #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    print "Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys())
    print "Catkin packages: %s" % catkin_packages
    print "Manifest packages: %s" % manifest_packages
    print "Stacks: %s" % stacks

    #Load information about existing tags
    tags_db = TagsDb(ros_distro, workspace)

    #Check to see if we need to document this repo list by checking if any of
    #the repositories revision numbers/hashes have changed
    changes = False
    for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    #We also want to make sure that we run documentation generation anytime
    #jenkins_scripts or rosdoc_lite has changed since the last time this job was
    #run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None)
    old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None)
    print "REPO HASHES: %s" % repo_hashes

    if not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version:
        print "There were no changes to any of the repositories we document. Not running documentation."
        copy_test_results(workspace, docspace)
        return

    #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version
    repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)

    #Get any non local apt dependencies
    ros_dep = RosDepResolver(ros_distro)
    import rosdistro
    if ros_distro == 'electric':
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print "Apt dependencies: %s" % apt_deps

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print "Build order that honors deps:\n%s" % build_order

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    print "Installing all dependencies for %s" % repo
    if apt_deps:
        call("apt-get install %s --yes" % (' '.join(apt_deps)))
    print "Done installing dependencies"

    #Set up the list of things that need to be sourced to run rosdoc_lite
    #TODO: Hack for electric
    if ros_distro == 'electric':
        #lucid doesn't have /usr/local on the path by default... weird
        sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH']
        sources.append('source /opt/ros/fuerte/setup.bash')
        sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH')
    else:
        sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #We assume that there will be no build errors to start
    build_errors = []

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    if catkin_packages \
       and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin the messages arent' generated, TODO
    #to come back and fix this if necessary
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    repo_tags = document_packages(manifest_packages, catkin_packages, build_order,
                                  repos_to_doc, sources, tags_db, full_apt_deps,
                                  ros_dep, repo_map, repo_path, docspace, ros_distro,
                                  homepage, doc_job)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))

    #Copy the files to the appropriate place
    #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite" % (doc_path))
    command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite' % doc_path]
    call_with_list(command)

    #Remove the autogenerated doc files since they take up a lot of space if left on the server
    shutil.rmtree(doc_path)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    #We don't want to write hashes on an unsuccessful build
    excludes = ['rosinstall_hashes'] if build_errors else []
    tags_db.commit_db(excludes)

    #Tell jenkins that we've succeeded
    print "Preparing xml test results"
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print "Created test results directory"
    except:
        pass

    if build_errors:
        copy_test_results(workspace, docspace,
                          """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/tree/master/doc/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s""" % (build_errors,
                              ros_distro,
                              repo,
                              repo,
                              ros_distro,
                              yaml.safe_dump(doc_conf, default_flow_style=False),
                              yaml.safe_dump(depends_conf, default_flow_style=False)),
                          "message_generation_failure")
    else:
        copy_test_results(workspace, docspace)
def document_necessary(
    workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False
):
    append_pymodules_if_needed()
    print("Working on distro %s and repo %s" % (ros_distro, repo))

    # Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    # Install the repository
    try:
        install_repo(docspace, workspace, repo, doc_conf, depends_conf)
    except BuildException:
        # checkout failed, try to get default branches of repos to notify the maintainers
        print(
            "Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure"
        )
        for tuple in doc_conf:
            for repo in tuple.values():
                repo["version"] = None
        install_repo(docspace, workspace, repo, doc_conf, [])
        extract_notification_recipients(docspace, doc_conf)
        raise

    extract_notification_recipients(docspace, doc_conf)

    # Load information about existing tags
    jenkins_scripts_path = os.path.join(workspace, "jenkins_scripts")
    if not os.path.exists(jenkins_scripts_path):
        # if jenkins_scripts has not been checked out in the workspace
        # expect that the user call doc from within a jenkins_scripts checkout
        jenkins_scripts_path = os.getcwd()
    rosdoc_tag_index_path = os.path.join(workspace, "rosdoc_tag_index")
    tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path)

    # Check to see if we need to document this repo list by checking if any of
    # the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [("%s" % repo, doc_conf), ("%s_depends" % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    # We also want to make sure that we run documentation generation anytime
    # jenkins_scripts or rosdoc_lite has changed since the last time this job was
    # run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get("rosdoc_lite-sys", None)
    old_jenkins_scripts_hash = repo_hashes.get("jenkins_scripts-sys", None)
    print("REPO HASHES: %s" % repo_hashes)

    if (
        not changes
        and old_rosdoc_lite_hash == rosdoc_lite_version
        and old_jenkins_scripts_hash == jenkins_scripts_version
    ):
        print("There were no changes to any of the repositories we document. Not running documentation.")
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()

        # create marker files for all packages an upload them
        doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
        if os.path.exists(doc_path):
            shutil.rmtree(doc_path)
        repo_path = os.path.realpath("%s" % (docspace))
        stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf)
        folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
        if folders:
            dsts = ["%s/api/%s/stamp" % (doc_path, f) for f in folders]
            for dst in dsts:
                os.makedirs(os.path.dirname(dst))
                with open(dst, "w"):
                    pass
            print("Uploading marker files to identify that documentation is up-to-date.")
            command = [
                "bash",
                "-c",
                'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api'
                % (doc_path, ros_distro),
            ]
            call_with_list(command)

        return False

    # Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes["rosdoc_lite-sys"] = rosdoc_lite_version
    repo_hashes["jenkins_scripts-sys"] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {"doc_conf": doc_conf, "depends_conf": depends_conf, "tags_db": tags_db}