コード例 #1
0
    def __init__(self, distro_name, jenkins_scripts_path,
                 rosdoc_tag_index_path):
        self.distro_name = distro_name
        self.jenkins_scripts_path = jenkins_scripts_path
        self.path = os.path.abspath(rosdoc_tag_index_path)
        self.delete_tag_index_repo()

        command = [
            'bash', '-c',
            'export GIT_SSH="%s/git_ssh" \
                   && git clone [email protected]:ros-infrastructure/rosdoc_tag_index.git %s'
            % (self.jenkins_scripts_path, self.path)
        ]

        call_with_list(command)

        self.tags = self.read_folder('tags')

        self.forward_deps = self.read_folder('deps')
        self.build_reverse_deps()

        self.metapackages = self.read_folder('metapackages')
        self.build_metapackage_index()

        self.rosinstall_hashes = self.read_folder('rosinstall_hashes')
コード例 #2
0
    def commit_db(self, exclude=[]):
        if not 'tags' in exclude:
            self.write_folder('tags', self.tags)
        if not 'deps' in exclude:
            self.write_folder('deps', self.forward_deps)
        if not 'metapackages' in exclude:
            self.write_folder('metapackages', self.metapackages)
        if not 'rosinstall_hashes' in exclude:
            self.write_folder('rosinstall_hashes', self.rosinstall_hashes)

        old_dir = os.getcwd()
        os.chdir(self.path)
        changes = check_output('git status -s').strip()
        if changes:
            print("Commiting changes to tags and deps lists....")
            call("git add %s" % os.path.join(self.path, self.distro_name))
            command = [
                'git', '-c', 'user.name=jenkins.ros.org', 'commit', '-m',
                'Updating tags and deps lists for %s' % (self.distro_name)
            ]
            call_with_list(command)

            env = os.environ
            env['GIT_SSH'] = "%s/git_ssh" % self.jenkins_scripts_path

            #Have some tolerance for things commiting to the db at the same time
            num_retries = 3
            i = 0
            while True:
                try:
                    call("git fetch origin", env)
                    call("git merge origin/master", env)
                    call("git push origin master", env)
                except BuildException as e:
                    print("Failed to fetch and merge...")
                    if i >= num_retries:
                        raise e
                    time.sleep(2)
                    i += 1
                    print("Trying again attempt %d of %d..." %
                          (i, num_retries))
                    continue

                break
        else:
            print('No changes to tags and deps lists')

        os.chdir(old_dir)
コード例 #3
0
    def commit_db(self, exclude=[]):
        if not 'tags' in exclude:
            self.write_folder('tags', self.tags)
        if not 'deps' in exclude:
            self.write_folder('deps', self.forward_deps)
        if not 'metapackages' in exclude:
            self.write_folder('metapackages', self.metapackages)
        if not 'rosinstall_hashes' in exclude:
            self.write_folder('rosinstall_hashes', self.rosinstall_hashes)

        old_dir = os.getcwd()
        os.chdir(self.path)
        changes = check_output('git status -s').strip()
        if changes:
            print("Commiting changes to tags and deps lists....")
            call("git add %s" % os.path.join(self.path, self.distro_name))
            command = ['git', '-c', 'user.name=jenkins.ros.org', 'commit', '-m', 'Updating tags and deps lists for %s' % (self.distro_name)]
            call_with_list(command)

            env = os.environ
            env['GIT_SSH'] = "%s/git_ssh" % self.jenkins_scripts_path

            #Have some tolerance for things commiting to the db at the same time
            num_retries = 3
            i = 0
            while True:
                try:
                    call("git fetch origin", env)
                    call("git merge origin/master", env)
                    call("git push origin master", env)
                except BuildException as e:
                    print("Failed to fetch and merge...")
                    if i >= num_retries:
                        raise e
                    time.sleep(2)
                    i += 1
                    print("Trying again attempt %d of %d..." % (i, num_retries))
                    continue

                break
        else:
            print('No changes to tags and deps lists')

        os.chdir(old_dir)
コード例 #4
0
    def __init__(self, distro_name, jenkins_scripts_path, rosdoc_tag_index_path):
        self.distro_name = distro_name
        self.jenkins_scripts_path = jenkins_scripts_path
        self.path = os.path.abspath(rosdoc_tag_index_path)
        self.delete_tag_index_repo()

        command = ['bash', '-c', 'export GIT_SSH="%s/git_ssh" \
                   && git clone [email protected]:ros-infrastructure/rosdoc_tag_index.git %s'
                   % (self.jenkins_scripts_path, self.path)]

        call_with_list(command)

        self.tags = self.read_folder('tags')

        self.forward_deps = self.read_folder('deps')
        self.build_reverse_deps()

        self.metapackages = self.read_folder('metapackages')
        self.build_metapackage_index()

        self.rosinstall_hashes = self.read_folder('rosinstall_hashes')
コード例 #5
0
ファイル: tags_db.py プロジェクト: adamantivm/jenkins_scripts
    def __init__(self, distro_name, workspace):
        self.workspace = workspace
        self.distro_name = distro_name
        self.path  = os.path.abspath(os.path.join(self.workspace, 'rosdoc_tag_index'))
        if os.path.exists(self.path):
            shutil.rmtree(self.path)

        command = ['bash', '-c', 'export GIT_SSH="%s/jenkins_scripts/git_ssh" \
                   && git clone [email protected]:ros-infrastructure/rosdoc_tag_index.git %s' \
                   %(workspace, self.path) ]

        call_with_list(command)

        self.tags = self.read_folder('tags')

        self.forward_deps = self.read_folder('deps')
        self.build_reverse_deps()

        self.metapackages = self.read_folder('metapackages')
        self.build_metapackage_index()

        self.rosinstall_hashes = self.read_folder('rosinstall_hashes')
コード例 #6
0
    def __init__(self, distro_name, workspace):
        self.workspace = workspace
        self.distro_name = distro_name
        self.path  = os.path.abspath(os.path.join(self.workspace, 'rosdoc_tag_index'))
        if os.path.exists(self.path):
            shutil.rmtree(self.path)

        command = ['bash', '-c', 'export GIT_SSH="%s/jenkins_scripts/git_ssh" \
                   && git clone [email protected]:ros-infrastructure/rosdoc_tag_index.git %s' \
                   %(workspace, self.path) ]

        call_with_list(command)

        self.tags = self.read_folder('tags')

        self.forward_deps = self.read_folder('deps')
        self.build_reverse_deps()

        self.metapackages = self.read_folder('metapackages')
        self.build_metapackage_index()

        self.rosinstall_hashes = self.read_folder('rosinstall_hashes')
コード例 #7
0
def document_repo(workspace, docspace, ros_distro, repo,
                  platform, arch, homepage, no_chroot, skip_garbage,
                  doc_conf, depends_conf, tags_db):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print("Repo path %s" % repo_path)

    #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    if ros_distro == 'indigo':
        if stacks or manifest_packages:
            print("Ignoring dry packages and stacks in '%s'" % ros_distro)
            stacks = {}
            manifest_packages = {}
        if not catkin_packages:
            raise BuildException('No catkin packages found')
    print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys()))
    #print "Catkin packages: %s" % catkin_packages
    #print "Manifest packages: %s" % manifest_packages
    #print "Stacks: %s" % stacks

    #Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot)
    import rosdistro
    if ros_distro == 'electric':
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print("Apt dependencies: %s" % apt_deps)

    #Get rosdistro release file if there are catkin packages to get status
    if catkin_packages and ros_distro not in ['electric', 'fuerte']:
        print("Fetch rosdistro files for: %s" % ros_distro)
        index = rosdistro.get_index(rosdistro.get_index_url())
        rosdistro_release_file = rosdistro.get_release_file(index, ros_distro)
        rosdistro_source_file = rosdistro.get_source_file(index, ros_distro)
    else:
        rosdistro_release_file = None
        rosdistro_source_file = None

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
    if os.path.exists(doc_path):
        shutil.rmtree(doc_path)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print("Build order that honors deps:\n%s" % build_order)

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    if not no_chroot:
        print("Installing all dependencies for %s" % repo)

        # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass
        # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency
        # which then break catkin_basic since it include the msgs CMake multiple files
        # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471)
        if repo == 'ros_comm' and 'ros-hydro-roslisp' in apt_deps:
            apt_deps.remove('ros-hydro-roslisp')

        if apt_deps:
            call("apt-get install %s --yes" % (' '.join(apt_deps)))
        print("Done installing dependencies")

    #Set up the list of things that need to be sourced to run rosdoc_lite
    #TODO: Hack for electric
    if ros_distro == 'electric':
        #lucid doesn't have /usr/local on the path by default... weird
        sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH']
        sources.append('source /opt/ros/fuerte/setup.bash')
        sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH')
    else:
        sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #We assume that there will be no build errors to start
    build_errors = []

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    local_install_path = os.path.join(docspace, 'local_installs')
    if os.path.exists(local_install_path):
        shutil.rmtree(local_install_path)

    #Make sure to create some subfolders under the local install path
    def makedirs(path):
        if not os.path.exists(path):
            os.makedirs(path)

    makedirs(os.path.join(local_install_path, 'bin'))
    makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages'))
    makedirs(os.path.join(local_install_path, 'share'))

    if catkin_packages \
       and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    #For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path)
    build_errors.extend(errs)
    sources.append(source)

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin, we have to build all the code and
    #install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    #We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    if os.path.exists(tags_location):
        shutil.rmtree(tags_location)
    command = ['bash', '-c',
               'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s' % (ros_distro, tags_location)]
    call_with_list(command)

    repo_tags = document_packages(manifest_packages, catkin_packages, build_order,
                                  repos_to_doc, sources, tags_db, full_apt_deps,
                                  ros_dep, repo_map, repo_path, docspace, ros_distro,
                                  homepage, doc_job, tags_location, doc_path,
                                  rosdistro_release_file, rosdistro_source_file)

    #Copy the files to the appropriate place
    folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
    if folders:
        dsts = ['%s/api/%s' % (doc_path, f) for f in folders]
        for dst in dsts:
            with open(os.path.join(dst, 'stamp'), 'w'):
                pass
        command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api' % (' '.join(dsts), ros_distro)]
        call_with_list(command)
    folders = ['%s/changelogs' % doc_path, '%s/tags' % doc_path]
    folders = [f for f in folders if os.path.exists(f)]
    if folders:
        command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s' % (' '.join(folders), ros_distro)]
        call_with_list(command)

    if not skip_garbage:
        #Remove the autogenerated doc files since they take up a lot of space if left on the server
        shutil.rmtree(tags_location)
        shutil.rmtree(doc_path)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    #We don't want to write hashes on an unsuccessful build
    excludes = ['rosinstall_hashes'] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    #Tell jenkins that we've succeeded
    print("Preparing xml test results")
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print("Created test results directory")
    except Exception:
        pass

    if build_errors:
        import yaml
        copy_test_results(workspace, docspace,
                          """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s""" % (build_errors,
                              ros_distro,
                              repo,
                              repo,
                              ros_distro,
                              yaml.safe_dump(doc_conf, default_flow_style=False),
                              yaml.safe_dump(depends_conf, default_flow_style=False)),
                          "message_generation_failure")
    else:
        copy_test_results(workspace, docspace)
コード例 #8
0
def document_necessary(workspace, docspace, ros_distro, repo,
                       rosdoc_lite_version, jenkins_scripts_version, force_doc=False):
    append_pymodules_if_needed()
    print("Working on distro %s and repo %s" % (ros_distro, repo))

    #Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    #Install the repository
    try:
        install_repo(docspace, workspace, repo, doc_conf, depends_conf)
    except BuildException:
        # checkout failed, try to get default branches of repos to notify the maintainers
        print('Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure')
        for tuple in doc_conf:
            for repo in tuple.values():
                repo['version'] = None
        install_repo(docspace, workspace, repo, doc_conf, [])
        extract_notification_recipients(docspace, doc_conf)
        raise

    extract_notification_recipients(docspace, doc_conf)

    #Load information about existing tags
    jenkins_scripts_path = os.path.join(workspace, 'jenkins_scripts')
    if not os.path.exists(jenkins_scripts_path):
        # if jenkins_scripts has not been checked out in the workspace
        # expect that the user call doc from within a jenkins_scripts checkout
        jenkins_scripts_path = os.getcwd()
    rosdoc_tag_index_path = os.path.join(workspace, 'rosdoc_tag_index')
    tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path)

    #Check to see if we need to document this repo list by checking if any of
    #the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    #We also want to make sure that we run documentation generation anytime
    #jenkins_scripts or rosdoc_lite has changed since the last time this job was
    #run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None)
    old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None)
    print("REPO HASHES: %s" % repo_hashes)

    if not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version:
        print("There were no changes to any of the repositories we document. Not running documentation.")
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()

        # create marker files for all packages an upload them
        doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
        if os.path.exists(doc_path):
            shutil.rmtree(doc_path)
        repo_path = os.path.realpath("%s" % (docspace))
        stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf)
        folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
        if folders:
            dsts = ['%s/api/%s/stamp' % (doc_path, f) for f in folders]
            for dst in dsts:
                os.makedirs(os.path.dirname(dst))
                with open(dst, 'w'):
                    pass
            print("Uploading marker files to identify that documentation is up-to-date.")
            command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api' % (doc_path, ros_distro)]
            call_with_list(command)

        return False

    #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version
    repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db}
コード例 #9
0
def document_repo(workspace, docspace, ros_distro, repo,
                  platform, arch, homepage,
                  doc_conf, depends_conf, tags_db):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print "Repo path %s" % repo_path

    #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    print "Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys())
    #print "Catkin packages: %s" % catkin_packages
    #print "Manifest packages: %s" % manifest_packages
    #print "Stacks: %s" % stacks

    #Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro)
    import rosdistro
    if ros_distro == 'electric':
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print "Apt dependencies: %s" % apt_deps

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print "Build order that honors deps:\n%s" % build_order

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    print "Installing all dependencies for %s" % repo
    if apt_deps:
        call("apt-get install %s --yes" % (' '.join(apt_deps)))
    print "Done installing dependencies"

    #Set up the list of things that need to be sourced to run rosdoc_lite
    #TODO: Hack for electric
    if ros_distro == 'electric':
        #lucid doesn't have /usr/local on the path by default... weird
        sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH']
        sources.append('source /opt/ros/fuerte/setup.bash')
        sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH')
    else:
        sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #We assume that there will be no build errors to start
    build_errors = []

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    if catkin_packages \
       and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    #For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, os.path.join(docspace, 'local_installs'))
    build_errors.extend(errs)
    sources.append(source)

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin, we have to build all the code and
    #install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    #We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    command = ['bash', '-c',
               'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/var/www/www.ros.org/html/doc/%s/tags %s' % (ros_distro, tags_location)]
    call_with_list(command)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))

    repo_tags = document_packages(manifest_packages, catkin_packages, build_order,
                                  repos_to_doc, sources, tags_db, full_apt_deps,
                                  ros_dep, repo_map, repo_path, docspace, ros_distro,
                                  homepage, doc_job, tags_location, doc_path)

    #Copy the files to the appropriate place
    #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s [email protected]:/var/www/www.ros.org/html/rosdoclite" % (doc_path))
    command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/var/www/www.ros.org/html/rosdoclite' % doc_path]
    call_with_list(command)

    #Remove the autogenerated doc files since they take up a lot of space if left on the server
    shutil.rmtree(tags_location)
    shutil.rmtree(doc_path)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    #We don't want to write hashes on an unsuccessful build
    excludes = ['rosinstall_hashes'] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    #Tell jenkins that we've succeeded
    print "Preparing xml test results"
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print "Created test results directory"
    except Exception:
        pass

    if build_errors:
        copy_test_results(workspace, docspace,
                          """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s""" % (build_errors,
                              ros_distro,
                              repo,
                              repo,
                              ros_distro,
                              yaml.safe_dump(doc_conf, default_flow_style=False),
                              yaml.safe_dump(depends_conf, default_flow_style=False)),
                          "message_generation_failure")
    else:
        copy_test_results(workspace, docspace)
コード例 #10
0
def document_repo(
    workspace,
    docspace,
    ros_distro,
    repo,
    platform,
    arch,
    homepage,
    no_chroot,
    skip_garbage,
    doc_conf,
    depends_conf,
    tags_db,
):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    # Get the list of repositories that should have documentation run on them
    # These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print("Repo path %s" % repo_path)

    # Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf)
    if ros_distro == "indigo":
        if stacks or manifest_packages:
            print("Ignoring dry packages and stacks in '%s'" % ros_distro)
            stacks = {}
            manifest_packages = {}
        if not catkin_packages:
            raise BuildException("No catkin packages found")
    print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys()))
    # print "Catkin packages: %s" % catkin_packages
    # print "Manifest packages: %s" % manifest_packages
    # print "Stacks: %s" % stacks

    # Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot)
    import rosdistro

    if ros_distro == "electric":
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages)
    print("Apt dependencies: %s" % apt_deps)

    # Get rosdistro release file if there are catkin packages to get status
    if catkin_packages and ros_distro not in ["electric", "fuerte"]:
        print("Fetch rosdistro files for: %s" % ros_distro)
        index = rosdistro.get_index(rosdistro.get_index_url())
        rosdistro_release_file = rosdistro.get_release_file(index, ros_distro)
        rosdistro_source_file = rosdistro.get_source_file(index, ros_distro)
    else:
        rosdistro_release_file = None
        rosdistro_source_file = None

    # Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
    if os.path.exists(doc_path):
        shutil.rmtree(doc_path)

    # Write stack manifest files for all stacks, we can just do this off the
    # stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage)

    # Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print("Build order that honors deps:\n%s" % build_order)

    # We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    if not no_chroot:
        print("Installing all dependencies for %s" % repo)

        # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass
        # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency
        # which then break catkin_basic since it include the msgs CMake multiple files
        # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471)
        if repo == "ros_comm" and "ros-hydro-roslisp" in apt_deps:
            apt_deps.remove("ros-hydro-roslisp")

        if apt_deps:
            call("apt-get install %s --yes" % (" ".join(apt_deps)))
        print("Done installing dependencies")

    # Set up the list of things that need to be sourced to run rosdoc_lite
    # TODO: Hack for electric
    if ros_distro == "electric":
        # lucid doesn't have /usr/local on the path by default... weird
        sources = ["export PATH=/usr/local/sbin:/usr/local/bin:$PATH"]
        sources.append("source /opt/ros/fuerte/setup.bash")
        sources.append("export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH")
    else:
        sources = ["source /opt/ros/%s/setup.bash" % ros_distro]

    # We assume that there will be no build errors to start
    build_errors = []

    # Everything that is after fuerte supports catkin workspaces, so everything
    # that has packages with package.xml files
    local_install_path = os.path.join(docspace, "local_installs")
    if os.path.exists(local_install_path):
        shutil.rmtree(local_install_path)

    # Make sure to create some subfolders under the local install path
    def makedirs(path):
        if not os.path.exists(path):
            os.makedirs(path)

    makedirs(os.path.join(local_install_path, "bin"))
    makedirs(os.path.join(local_install_path, "lib/python2.7/dist-packages"))
    makedirs(os.path.join(local_install_path, "share"))

    if catkin_packages and not "rosdoc_lite" in catkin_packages.keys() and not "catkin" in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    # For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path)
    build_errors.extend(errs)
    sources.append(source)

    # For all our manifest packages (dry or fuerte catkin) we want to build
    # messages. Note, for fuerte catkin, we have to build all the code and
    # install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    # We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    if os.path.exists(tags_location):
        shutil.rmtree(tags_location)
    command = [
        "bash",
        "-c",
        'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s'
        % (ros_distro, tags_location),
    ]
    call_with_list(command)

    repo_tags = document_packages(
        manifest_packages,
        catkin_packages,
        build_order,
        repos_to_doc,
        sources,
        tags_db,
        full_apt_deps,
        ros_dep,
        repo_map,
        repo_path,
        docspace,
        ros_distro,
        homepage,
        doc_job,
        tags_location,
        doc_path,
        rosdistro_release_file,
        rosdistro_source_file,
    )

    # Copy the files to the appropriate place
    folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
    if folders:
        dsts = ["%s/api/%s" % (doc_path, f) for f in folders]
        for dst in dsts:
            with open(os.path.join(dst, "stamp"), "w"):
                pass
        command = [
            "bash",
            "-c",
            'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api'
            % (" ".join(dsts), ros_distro),
        ]
        call_with_list(command)
    folders = ["%s/changelogs" % doc_path, "%s/tags" % doc_path]
    folders = [f for f in folders if os.path.exists(f)]
    if folders:
        command = [
            "bash",
            "-c",
            'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s'
            % (" ".join(folders), ros_distro),
        ]
        call_with_list(command)

    if not skip_garbage:
        # Remove the autogenerated doc files since they take up a lot of space if left on the server
        shutil.rmtree(tags_location)
        shutil.rmtree(doc_path)

    # Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        # Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace("_", "-"))

        # We only want to write tags for packages that have a valid deb name
        # For others, the only way to get cross referencing is to document everything
        # together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    # Make sure to write changes to tag files and deps
    # We don't want to write hashes on an unsuccessful build
    excludes = ["rosinstall_hashes"] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    # Tell jenkins that we've succeeded
    print("Preparing xml test results")
    try:
        os.makedirs(os.path.join(workspace, "test_results"))
        print("Created test results directory")
    except Exception:
        pass

    if build_errors:
        import yaml

        copy_test_results(
            workspace,
            docspace,
            """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s"""
            % (
                build_errors,
                ros_distro,
                repo,
                repo,
                ros_distro,
                yaml.safe_dump(doc_conf, default_flow_style=False),
                yaml.safe_dump(depends_conf, default_flow_style=False),
            ),
            "message_generation_failure",
        )
    else:
        copy_test_results(workspace, docspace)
コード例 #11
0
def document_necessary(
    workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False
):
    append_pymodules_if_needed()
    print("Working on distro %s and repo %s" % (ros_distro, repo))

    # Load the rosinstall configurations for the repository
    doc_conf, depends_conf = load_configuration(ros_distro, repo)

    # Install the repository
    try:
        install_repo(docspace, workspace, repo, doc_conf, depends_conf)
    except BuildException:
        # checkout failed, try to get default branches of repos to notify the maintainers
        print(
            "Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure"
        )
        for tuple in doc_conf:
            for repo in tuple.values():
                repo["version"] = None
        install_repo(docspace, workspace, repo, doc_conf, [])
        extract_notification_recipients(docspace, doc_conf)
        raise

    extract_notification_recipients(docspace, doc_conf)

    # Load information about existing tags
    jenkins_scripts_path = os.path.join(workspace, "jenkins_scripts")
    if not os.path.exists(jenkins_scripts_path):
        # if jenkins_scripts has not been checked out in the workspace
        # expect that the user call doc from within a jenkins_scripts checkout
        jenkins_scripts_path = os.getcwd()
    rosdoc_tag_index_path = os.path.join(workspace, "rosdoc_tag_index")
    tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path)

    # Check to see if we need to document this repo list by checking if any of
    # the repositories revision numbers/hashes have changed
    changes = False or force_doc
    for conf in [("%s" % repo, doc_conf), ("%s_depends" % repo, depends_conf)]:
        changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes

    # We also want to make sure that we run documentation generation anytime
    # jenkins_scripts or rosdoc_lite has changed since the last time this job was
    # run
    repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {}
    old_rosdoc_lite_hash = repo_hashes.get("rosdoc_lite-sys", None)
    old_jenkins_scripts_hash = repo_hashes.get("jenkins_scripts-sys", None)
    print("REPO HASHES: %s" % repo_hashes)

    if (
        not changes
        and old_rosdoc_lite_hash == rosdoc_lite_version
        and old_jenkins_scripts_hash == jenkins_scripts_version
    ):
        print("There were no changes to any of the repositories we document. Not running documentation.")
        copy_test_results(workspace, docspace)
        tags_db.delete_tag_index_repo()

        # create marker files for all packages an upload them
        doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))
        if os.path.exists(doc_path):
            shutil.rmtree(doc_path)
        repo_path = os.path.realpath("%s" % (docspace))
        stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf)
        folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys()))
        if folders:
            dsts = ["%s/api/%s/stamp" % (doc_path, f) for f in folders]
            for dst in dsts:
                os.makedirs(os.path.dirname(dst))
                with open(dst, "w"):
                    pass
            print("Uploading marker files to identify that documentation is up-to-date.")
            command = [
                "bash",
                "-c",
                'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api'
                % (doc_path, ros_distro),
            ]
            call_with_list(command)

        return False

    # Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list
    repo_hashes["rosdoc_lite-sys"] = rosdoc_lite_version
    repo_hashes["jenkins_scripts-sys"] = jenkins_scripts_version
    tags_db.set_rosinstall_hashes(repo, repo_hashes)
    return {"doc_conf": doc_conf, "depends_conf": depends_conf, "tags_db": tags_db}
コード例 #12
0
def document_repo(workspace, docspace, ros_distro, repo, platform, arch,
                  homepage, doc_conf, depends_conf, tags_db):
    doc_job = "doc-%s-%s" % (ros_distro, repo)

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(doc_conf)

    repo_path = os.path.realpath("%s" % (docspace))
    print "Repo path %s" % repo_path

    #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks
    stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(
        repo_path, doc_conf, depends_conf)
    print "Running documentation generation on\npackages: %s" % (
        manifest_packages.keys() + catkin_packages.keys())
    #print "Catkin packages: %s" % catkin_packages
    #print "Manifest packages: %s" % manifest_packages
    #print "Stacks: %s" % stacks

    #Get any non local apt dependencies
    ros_dep = rosdep.RosDepResolver(ros_distro)
    import rosdistro
    if ros_distro == 'electric':
        apt = rosdistro.AptDistro(platform, arch, shadow=False)
    else:
        apt = rosdistro.AptDistro(platform, arch, shadow=True)
    apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks,
                            manifest_packages)
    print "Apt dependencies: %s" % apt_deps

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages,
                                                   manifest_packages)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db,
                          doc_job, homepage)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print "Build order that honors deps:\n%s" % build_order

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = get_full_apt_deps(apt_deps, apt)

    print "Installing all dependencies for %s" % repo
    if apt_deps:
        call("apt-get install %s --yes" % (' '.join(apt_deps)))
    print "Done installing dependencies"

    #Set up the list of things that need to be sourced to run rosdoc_lite
    #TODO: Hack for electric
    if ros_distro == 'electric':
        #lucid doesn't have /usr/local on the path by default... weird
        sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH']
        sources.append('source /opt/ros/fuerte/setup.bash')
        sources.append(
            'export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH'
        )
    else:
        sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #We assume that there will be no build errors to start
    build_errors = []

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    if catkin_packages \
       and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys():
        source, errs = build_repo_messages(catkin_packages, docspace,
                                           ros_distro)
        build_errors.extend(errs)
        if source:
            sources.append(source)

    #For fuerte catkin, we need to check if we should build catkin stacks
    source, errs = build_repo_messages_catkin_stacks(
        stacks, ros_distro, os.path.join(docspace, 'local_installs'))
    build_errors.extend(errs)
    sources.append(source)

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin, we have to build all the code and
    #install locally to get message generation
    source, errs = build_repo_messages_manifest(manifest_packages, build_order,
                                                ros_distro)
    build_errors.extend(errs)
    sources.append(source)

    #We want to pull all the tagfiles available once from the server
    tags_location = os.path.join(workspace, ros_distro)
    command = [
        'bash', '-c',
        'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/var/www/www.ros.org/html/doc/%s/tags %s'
        % (ros_distro, tags_location)
    ]
    call_with_list(command)

    doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro))

    repo_tags = document_packages(manifest_packages, catkin_packages,
                                  build_order, repos_to_doc, sources, tags_db,
                                  full_apt_deps, ros_dep, repo_map, repo_path,
                                  docspace, ros_distro, homepage, doc_job,
                                  tags_location, doc_path)

    #Copy the files to the appropriate place
    #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s [email protected]:/var/www/www.ros.org/html/rosdoclite" % (doc_path))
    command = [
        'bash', '-c',
        'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/var/www/www.ros.org/html/rosdoclite'
        % doc_path
    ]
    call_with_list(command)

    #Remove the autogenerated doc files since they take up a lot of space if left on the server
    shutil.rmtree(tags_location)
    shutil.rmtree(doc_path)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    #We don't want to write hashes on an unsuccessful build
    excludes = ['rosinstall_hashes'] if build_errors else []
    tags_db.commit_db(excludes)
    tags_db.delete_tag_index_repo()

    #Tell jenkins that we've succeeded
    print "Preparing xml test results"
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print "Created test results directory"
    except Exception:
        pass

    if build_errors:
        copy_test_results(
            workspace, docspace,
            """Failed to generate messages by calling cmake for %s.
Look in the console for cmake failures, search for "CMake Error"

Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below,
you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at
https://github.com/ros/rosdistro/%s

Documentation rosinstall:\n%s

Depends rosinstall:\n%s""" %
            (build_errors, ros_distro, repo, repo, ros_distro,
             yaml.safe_dump(doc_conf, default_flow_style=False),
             yaml.safe_dump(depends_conf, default_flow_style=False)),
            "message_generation_failure")
    else:
        copy_test_results(workspace, docspace)