예제 #1
0
    def setUp(self):
        self.MaxDiff = None

        self.cp = cob_pipe.CobPipe()
        self.cp.load_config_from_url('fmw-jk', 'jenkins-test-server', 'test-user')

        self.repo_test_dict = {'cob_extern': {'type': 'git', 'url': 'git://github.com/ipa320/cob_extern.git',
                                              'version': 'master', 'poll': True, 'ros_distro': ['groovy'],
                                              'prio_ubuntu_distro': 'oneiric', 'prio_arch': 'amd64',
                                              'regular_matrix': None, 'dependencies': None,
                                              'jobs': None, 'robots': None}}
        self.pipe_config_test_dict = {'user_name': 'test-user', 'server_name': 'test-server',
                                      'email': '*****@*****.**', 'committer_email_enabled': False,
                                      'repositories': self.repo_test_dict}
예제 #2
0
 def test__get_custom_dependencies__return_dependency_dict(self):
     dep_test_dict = {'dep_1': {'type': 'git', 'url': 'git://github.com/ipa320/dep_1.git',
                                'poll': True},
                      'dep_2': {'type': 'git', 'url': 'git://github.com/ipa320/dep_2.git',
                                'poll': False}}
     repo_test_dict = {'cob_extern': {'type': 'git', 'url': 'git://github.com/ipa320/cob_extern.git',
                                      'version': 'master', 'poll': True, 'ros_distro': ['groovy'],
                                      'prio_ubuntu_distro': 'oneiric', 'prio_arch': 'amd64',
                                      'regular_matrix': None, 'dependencies': dep_test_dict,
                                      'jobs': None, 'robots': None}}
     self.pipe_config_test_dict['repositories'] = repo_test_dict
     self.cp = cob_pipe.CobPipe()
     self.cp.load_config_from_dict(self.pipe_config_test_dict)
     result = self.cp.get_custom_dependencies()
     self.assertEqual(result, {'dep_1': ['cob_extern'], 'dep_2': ['cob_extern']})
예제 #3
0
    def setUp(self):
        self.maxDiff = None

        #self.test_dict = common.get_buildpipeline_configs('jenkins-test-server', 'test-user')
        self.test_pipe_inst = cob_pipe.CobPipe()
        self.test_pipe_inst.load_config_from_url('fmw-jk',
                                                 'jenkins-test-server',
                                                 'test-user')

        self.job_type_test_list = [
            'pipe_starter', 'prio_build', 'regular_build'
        ]

        with open(
                os.path.expanduser('~/jenkins-config/slave_config.yaml')) as f:
            info = yaml.load(f)
        self.jenkins_instance = jenkins.Jenkins(info['master_url'],
                                                info['jenkins_login'],
                                                info['jenkins_pw'])
예제 #4
0
def main():
    #########################
    ### parsing arguments ###
    #########################
    time_parsing = datetime.datetime.now()
    print "=====> entering argument parsing step at", time_parsing

    # parse parameter values
    parser = optparse.OptionParser()
    parser.add_option('-v', '--verbose', action='store_true', default=False)
    (options, args) = parser.parse_args()

    if len(args) < 5:
        print "Usage: %s pipeline_repos_owner server_name user_name ros_distro build_repo" % sys.argv[
            0]
        raise common.BuildException("Wrong arguments for build script")

    # get arguments
    pipeline_repos_owner = args[0]
    server_name = args[1]
    user_name = args[2]
    ros_distro = args[3]
    build_identifier = args[4]  # repository + suffix
    build_repo = build_identifier.split('__')[0]  # only repository to build
    # environment variables
    workspace = os.environ['WORKSPACE']

    # cob_pipe object
    cp_instance = cob_pipe.CobPipe()
    cp_instance.load_config_from_file(pipeline_repos_owner,
                                      server_name,
                                      user_name,
                                      file_location=os.environ["WORKSPACE"])
    pipe_repos = cp_instance.repositories
    common.output("Pipeline configuration successfully loaded", blankline='b')

    # (debug) output
    print "\n", 50 * 'X'
    print "\nTesting on ros distro: %s" % ros_distro
    print "Testing repository: %s" % build_repo
    if build_repo != build_identifier:
        print "       with suffix: %s" % build_identifier.split('__')[1]
    print "Using source: %s" % pipe_repos[build_identifier].url
    print "Testing branch/version: %s" % pipe_repos[build_identifier].version
    print "\n", 50 * 'X'

    # for hardware build: only support hydro
    if ros_distro == "groovy":
        sys.exit(False)

    # for hardware build: remove old build artifacts
    limit = 3  # limit amount of old builds
    print workspace
    while len(os.listdir(workspace + "/..")
              ) > limit + 1:  # we will not count the "current" sym link
        list = os.listdir(workspace + "/..")
        shutil.rmtree(workspace + "/../" + sorted(list)[0])

    # set up directories variables
    tmpdir = workspace
    repo_checkoutspace = os.path.join(
        tmpdir, 'checkout'
    )  # location to store repositories in (will be separated in wet and dry later on)
    os.makedirs(repo_checkoutspace)
    repo_sourcespace = os.path.join(tmpdir,
                                    'src')  # location to build repositories in
    os.makedirs(repo_sourcespace)
    repo_sourcespace_wet = os.path.join(repo_sourcespace, 'wet',
                                        'src')  # wet (catkin) repositories
    os.makedirs(repo_sourcespace_wet)
    repo_sourcespace_dry = os.path.join(repo_sourcespace,
                                        'dry')  # dry (rosbuild) repositories
    os.makedirs(repo_sourcespace_dry)
    repo_build_logs = os.path.join(tmpdir,
                                   'build_logs')  # location for build logs
    os.makedirs(repo_build_logs)

    # init catkin workspace
    ros_env_repo = common.get_ros_env(
        '/opt/ros/%s/setup.bash' %
        ros_distro)  # source ros_distro (needed to do a catkin_init_workspace)
    # for hardware build: merge workspace with robot account #FIXME: this should be parameterisable in plugin (select a path to a setup.bash file in the admin config and mark a checkbox for the user config)
    if os.path.isdir("/u/robot/git/care-o-bot/devel"):
        ros_env_repo = common.get_ros_env(
            '/u/robot/git/care-o-bot/devel/setup.bash')
    common.call("catkin_init_workspace %s" % repo_sourcespace_wet,
                ros_env_repo,
                verbose=False)  # init wet workspace
    common.call("catkin_make --directory %s/wet" % repo_sourcespace,
                ros_env_repo,
                verbose=False)  # build wet workspace to generate a setup.bash

    ################
    ### checkout ###
    ################
    time_checkout = datetime.datetime.now()
    print "=====> entering checkout step at", time_checkout

    # download build_repo from source #
    print "Creating rosinstall file for repository %s" % build_repo
    rosinstall = ""
    if build_identifier in pipe_repos:  # check if triggering identifier is really present in pipeline config
        rosinstall += pipe_repos[build_identifier].get_rosinstall()
    else:
        err_msg = "Pipeline was triggered by repository %s which is not in pipeline config!" % build_identifier
        raise common.BuildException(err_msg)

    # write rosinstall file
    print "Rosinstall file for repository: \n %s" % rosinstall
    with open(os.path.join(workspace, 'repo.rosinstall'), 'w') as f:
        f.write(rosinstall)
    print "Install repository from source:"
    # rosinstall repos
    common.call("rosinstall -j 8 --verbose %s %s/repo.rosinstall /opt/ros/%s" %
                (repo_checkoutspace, workspace, ros_distro))

    # get the repositories build dependencies
    print "Get build dependencies of repo"

    # get all packages in checkoutspace
    (catkin_packages, stacks,
     manifest_packages) = common.get_all_packages(repo_checkoutspace)

    # check if build_repo is wet or dry and get all corresponding deps
    build_repo_type = ''
    if build_repo in catkin_packages:  # wet repo with metapackage
        print "repo %s is wet" % build_repo
        build_repo_type = 'wet'
        repo_build_dependencies = common.get_nonlocal_dependencies(
            catkin_packages, {}, {}, build_depends=True, test_depends=False)
    elif (build_repo
          not in catkin_packages) and (build_repo not in stacks) and (
              catkin_packages != []):  # wet repo without metapackage
        print "repo %s is wet without metapackage" % build_repo
        build_repo_type = 'wet'
        repo_build_dependencies = common.get_nonlocal_dependencies(
            catkin_packages, {}, {}, build_depends=True, test_depends=False)
    elif build_repo in stacks:  # dry repo with stack
        print "repo %s is dry" % build_repo
        build_repo_type = 'dry'
        repo_build_dependencies = common.get_nonlocal_dependencies({}, stacks,
                                                                   {})
    #TODO elif : # dry repo without stack
    else:  # build_repo is neither wet nor dry
        raise common.BuildException(
            "Repository %s to build not found in checkoutspace" % build_repo)

    # install user-defined/customized dependencies of build_repo from source
    rosinstall = ''
    fulfilled_deps = []
    for dep in repo_build_dependencies:
        if dep in pipe_repos[build_identifier].dependencies:
            print "Install user-defined build dependency %s from source" % dep
            rosinstall += pipe_repos[build_identifier].dependencies[
                dep].get_rosinstall()
            fulfilled_deps.append(dep)

    # install additional, indirect user-defined dependencies
    for dep in pipe_repos[build_identifier].dependencies:
        if dep not in fulfilled_deps:
            print "Install additional user-defined build dependency %s from source" % dep
            rosinstall += pipe_repos[build_identifier].dependencies[
                dep].get_rosinstall()
            fulfilled_deps.append(dep)

    # check if all user-defined/customized dependencies are satisfied
    if sorted(fulfilled_deps) != sorted(
            pipe_repos[build_identifier].dependencies):
        print "Not all user-defined build dependencies are fulfilled"
        print "User-defined build dependencies:\n - %s" % '\n - '.join(
            pipe_repos[build_identifier].dependencies)
        print "Fulfilled dependencies:\n - %s" % '\n - '.join(fulfilled_deps)
        raise common.BuildException(
            "Not all user-defined build dependencies are fulfilled")

    if rosinstall != '':
        # write .rosinstall file
        print "Rosinstall file for user-defined build dependencies: \n %s" % rosinstall
        with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
            f.write(rosinstall)
        print "Install user-defined build dependencies from source"
        # rosinstall depends
        common.call(
            "rosinstall -j 8 --verbose %s %s/repo.rosinstall /opt/ros/%s" %
            (repo_checkoutspace, workspace, ros_distro))

        # get also deps of just installed user-defined/customized dependencies
        (catkin_packages, stacks,
         manifest_packages) = common.get_all_packages(repo_checkoutspace)

        if build_repo_type == 'wet':
            if stacks != {}:
                raise common.BuildException(
                    "Catkin (wet) package %s depends on (dry) stack(s):\n%s" %
                    (build_repo, '- ' + '\n- '.join(stacks)))
            # take only wet packages
            repo_build_dependencies = common.get_nonlocal_dependencies(
                catkin_packages, {}, {},
                build_depends=True,
                test_depends=False)
        else:  # dry build repo
            # take all packages
            repo_build_dependencies = common.get_nonlocal_dependencies(
                catkin_packages,
                stacks, {},
                build_depends=True,
                test_depends=False)
        repo_build_dependencies = [
            dep for dep in repo_build_dependencies if dep not in fulfilled_deps
        ]

    print ""
    print "Found the following packages"
    print "  wet packages:     ", catkin_packages.keys()
    print "  dry stacks:       ", stacks.keys()
    print "  dry packages:     ", manifest_packages.keys()
    print ""

    # separate installed repos in wet and dry
    print "Separate installed repositories in wet and dry"
    # get all folders in repo_checkoutspace
    checkoutspace_dirs = [
        name for name in os.listdir(repo_checkoutspace)
        if os.path.isdir(os.path.join(repo_checkoutspace, name))
    ]
    for dir in checkoutspace_dirs:
        if dir in catkin_packages.keys():  # wet repo with metapackage
            shutil.move(os.path.join(repo_checkoutspace, dir),
                        os.path.join(repo_sourcespace_wet, dir))
        elif build_repo_type == 'wet' and dir == build_repo:  # wet repo without metapackage
            shutil.move(os.path.join(repo_checkoutspace, dir),
                        os.path.join(repo_sourcespace_wet, dir))
        elif dir in stacks.keys():  # dry repo with stack
            shutil.move(os.path.join(repo_checkoutspace, dir),
                        os.path.join(repo_sourcespace_dry, dir))
        #TODO elif: # dry repo without stack
        #else:
        #    raise common.BuildException("Could not separate %s into wet or dry sourcespace." %dir)
    # remove checkout dir
    common.call("rm -rf %s" % repo_checkoutspace)

    # setup ros workspace
    print "Set up ros workspace and setup environment variables"
    ros_env_repo = common.get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)
    common.call("rosws init %s /opt/ros/%s" % (repo_sourcespace, ros_distro),
                verbose=False)  # init workspace pointing to ros_distro

    # for hardware build: merge workspace with robot account #FIXME: this should be parameterisable in plugin (select a path to a setup.bash file in the admin config and mark a checkbox for the user config)
    if os.path.isdir("/u/robot/git/care-o-bot"):
        common.call("rosws merge -t %s /u/robot/git/care-o-bot" %
                    repo_sourcespace,
                    verbose=False)  # merge robot account workspace

    common.call("rosws merge -t %s %s/wet/devel" %
                (repo_sourcespace, repo_sourcespace),
                verbose=False)  # merge wet workspace
    common.call("rosws merge -t %s %s/dry" %
                (repo_sourcespace, repo_sourcespace),
                verbose=False)  # merge dry workspace
    ros_env_repo = common.get_ros_env(
        repo_sourcespace + '/setup.bash')  # source wet and dry workspace

    ############################
    ### install dependencies ###
    ############################
    time_install = datetime.datetime.now()
    print "=====> entering dependency installation step at", time_install

    # Create rosdep object
    rosdep_resolver = None
    print "Create rosdep object"
    try:
        rosdep_resolver = rosdep.RosDepResolver(ros_distro)
    except:  # when init fails the first time
        from time import sleep
        sleep(10)
        rosdep_resolver = rosdep.RosDepResolver(ros_distro)

    print "Install build dependencies: %s" % (
        ', '.join(repo_build_dependencies))
    missing_packages = common.apt_get_check_also_nonrosdep(
        repo_build_dependencies, ros_distro, rosdep_resolver)
    if len(missing_packages) > 0:
        raise common.BuildException(
            "Some dependencies are missing. Please ask your administrator to install the following packages: %s"
            % missing_packages)
    print "All denendencies already installed."

    #############
    ### build ###
    #############
    time_build = datetime.datetime.now()
    print "=====> entering build step at", time_build

    # get amount of cores available on host system
    cores = multiprocessing.cpu_count()

    ### catkin repositories
    if catkin_packages != {}:
        print "Build wet packages: ", catkin_packages.keys()
        try:
            common.call("catkin_make --directory %s/wet" % repo_sourcespace,
                        ros_env_repo)
        except common.BuildException as ex:
            print ex.msg
            raise common.BuildException(
                "Failed to catkin_make wet repositories")

    ### rosbuild repositories
    if build_repo_type == 'dry':
        # build dry repositories
        print "Build dry stacks:   ", stacks.keys()
        print "Build dry packages: ", manifest_packages.keys()
        packages_to_build = " ".join(stacks.keys()) + " ".join(
            manifest_packages.keys())
        try:
            common.call(
                "rosmake -rV --skip-blacklist --profile --pjobs=%s --output=%s %s"
                % (cores + 1, repo_build_logs, packages_to_build),
                ros_env_repo)
        except common.BuildException as ex:
            try:
                shutil.move(repo_build_logs,
                            os.path.join(workspace, "build_logs"))
            finally:
                print ex.msg
                raise common.BuildException(
                    "Failed to rosmake dry repositories")

    ###########
    ### end ###
    ###########
    # for hardware builds: create sym link to new build
    common.call("rm -f %s/../current" % workspace)
    common.call("ln -sf %s %s/../current" % (workspace, workspace))

    # steps: parsing, checkout, install, analysis, build, finish
    time_finish = datetime.datetime.now()
    print "=====> finished script at", time_finish
    print "durations:"
    print "parsing arguments in       ", (time_checkout - time_parsing)
    print "checkout in                ", (time_install - time_checkout)
    print "install dependencies in    ", (time_build - time_install)
    print "build in                   ", (time_finish - time_build)
    print "total                      ", (time_finish - time_parsing)
    print ""
    print "For manual testing, please run the following line in your testing terminal"
    print "source " + repo_sourcespace + "/setup.bash"
    print ""
def main():
    """
    Create a build and test pipeline on a Jenkins CI server.
    Starting point is the pipeline coniguration file (pipeline_config.yaml) of
    the given user stored on github. For this configuration a set of Jenkins
    projects/jobs will be generated.
    """

    # parse options
    usage = "Usage: %prog [masterURL login password configFolder | jenkinsConfigFile] pipelineReposOwner username"
    parser = optparse.OptionParser(usage)
    parser.add_option("-m", "--masterURL", action="store", type="string", dest="master_url",
                      metavar="URL", help="URL of Jenkins server/master")
    parser.add_option("-l", "--login", action="store", type="string", dest="jenkins_login",
                      metavar="LOGIN", help="Login name of Jenkins Admin which has rights to create and delete jobs")
    parser.add_option("-p", "--password", action="store", type="string", dest="jenkins_pw",
                      metavar="PASSWORD", help="Jenkins Admin password")
    parser.add_option("-c", "--configFolder", action="store", type="string", dest="config_folder",
                      metavar="CONFIGFOLDER", help="Folder where '.gitconfig', '.ssh', 'jenkins_setup' and 'jenkins_config' are stored")
    parser.add_option("-t", "--tarballLocation", action="store", type="string", dest="tarball_location",
                      metavar="USER@SERVERADDRESS:PATH", help="Place where the Tarballs are located") #TODO: not used any more: delete

    parser.add_option("--jenkinsConfigFile", action="store", type="string", dest="jenkinsConfigFile",
                      metavar="FILE", help="YAML file that replaces the Jenkins config options and contains values for:\
                                            masterURL, login, password, configFolder")

    parser.add_option("-o", "--pipelineReposOwner", action="store", type="string", dest="pipeline_repos_owner",
                      metavar="PIPELINE_REPOS_OWNER", help="Owner of the GitHub repositories 'jenkins_setup' and 'jenkins_config'")
    parser.add_option("-u", "--username", action="store", type="string", dest="username",
                      metavar="USERNAME", help="Name of user to generate pipeline for")
    parser.add_option("-d", "--delete", action="store_true", default=False,
                      help="Delete")
    (options, args) = parser.parse_args()

    if len(args) != 0:
        print "Usage: %s [masterURL login password configFolder | jenkinsConfigFile] pipelineReposOwner username" % (sys.argv[0])
        sys.exit()

    if options.jenkinsConfigFile:
        # load jenkins config from file
        with open(os.path.expanduser(options.jenkinsConfigFile)) as f:
            jenkins_conf = yaml.load(f)

        master_name = get_master_name(jenkins_conf['masterURL'])

        # create jenkins instance
        jenkins_instance = jenkins.Jenkins(jenkins_conf['masterURL'], jenkins_conf['login'],
                                           jenkins_conf['password'])

    elif options.master_url and options.jenkins_login and options.jenkins_pw:
        master_name = get_master_name(options.master_url)

        # create jenkins instance
        jenkins_instance = jenkins.Jenkins(options.master_url, options.jenkins_login, options.jenkins_pw)

    else:
        print "Usage: %s [masterURL login password configFolder | jenkinsConfigFolder] pipelineReposOwner username" % (sys.argv[0])
        sys.exit()

    if not options.pipeline_repos_owner or not options.username:
        print "Usage: %s [masterURL login password configFolder | jenkinsConfigFolder] pipelineReposOwner username" % (sys.argv[0])
        sys.exit()

    # get all existent jobs for user
    existent_user_jobs = []
    for job in jenkins_instance.get_jobs():
        job_owner = job['name'].split('__')[0]
        if options.username == job_owner:
            existent_user_jobs.append(job['name'])
    modified_jobs = []

    # get pipeline configs object from url
    plc_instance = cob_pipe.CobPipe()
    plc_instance.load_config_from_file(options.pipeline_repos_owner, master_name, options.username, file_location = options.config_folder)
    plc_instance.config_folder = options.config_folder

    # get jobs to create
    job_type_dict = plc_instance.get_jobs_to_create()

############################
### create pipeline jobs ###
############################
    ### scm pipe starter
    # create scm pipe starter for all scm triggers (all repos and polled deps)
    scm_triggers = plc_instance.get_scm_triggers()
    for scm_trigger_name, scm_trigger in scm_triggers.items():
        job_creator_instance = jenkins_job_creator.PipeStarterSCMJob(jenkins_instance, plc_instance, scm_trigger_name, scm_trigger)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### manual pipe starter
    # this pipe starter job won't poll any repository; it has to be started
    # manually. It triggers the priority build job with a repositories from 
    # a pull down menu as parameter
    job_creator_instance = jenkins_job_creator.PipeStarterManualJob(jenkins_instance, plc_instance, plc_instance.repositories.keys())
    if options.delete:
        modified_jobs.append(job_creator_instance.delete_job())
    else:
        manual_pipe_starter_name = job_creator_instance.create_job()
        modified_jobs.append(manual_pipe_starter_name)

    ### manual all pipe starter
    # this pipe starter job won't poll any repository; it has to be started
    # manually. It triggers the priority build job with all defined
    # repositories as parameters
    job_creator_instance = jenkins_job_creator.PipeStarterManualAllJob(jenkins_instance, plc_instance, plc_instance.repositories.keys())
    if options.delete:
        modified_jobs.append(job_creator_instance.delete_job())
    else:
        manual_all_pipe_starter_name = job_creator_instance.create_job()
        modified_jobs.append(manual_all_pipe_starter_name)

    ### priority build
    job_creator_instance = jenkins_job_creator.PriorityBuildJob(jenkins_instance, plc_instance, plc_instance.repositories.keys())
    if options.delete:
        modified_jobs.append(job_creator_instance.delete_job())
    else:
        modified_jobs.append(job_creator_instance.create_job())

    ### regular build
    if 'regular_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.RegularBuildJob(jenkins_instance, plc_instance, job_type_dict['regular_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### priority nongraphics test
    if 'nongraphics_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.PriorityNongraphicsTestJob(jenkins_instance, plc_instance, job_type_dict['nongraphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### regular nongraphics test
    if 'nongraphics_test' in job_type_dict and 'regular_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.RegularNongraphicsTestJob(jenkins_instance, plc_instance, job_type_dict['nongraphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### priority graphics test
    if 'graphics_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.PriorityGraphicsTestJob(jenkins_instance, plc_instance, job_type_dict['graphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### regular graphics test
    if 'graphics_test' in job_type_dict and 'regular_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.RegularGraphicsTestJob(jenkins_instance, plc_instance, job_type_dict['graphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### hardware build and test
    if 'hardware_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.HardwareBuildTrigger(jenkins_instance, plc_instance, job_type_dict['hardware_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

        job_creator_instance = jenkins_job_creator.HardwareBuildJob(jenkins_instance, plc_instance, job_type_dict['hardware_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

        job_creator_instance = jenkins_job_creator.HardwareTestTrigger(jenkins_instance, plc_instance, job_type_dict['hardware_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

        job_creator_instance = jenkins_job_creator.HardwareTestJob(jenkins_instance, plc_instance, job_type_dict['hardware_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### deployment job
    #FIXME, TODO: add check if deployment job is activated --> needs to be an option in the plugin
    #job_creator_instance = jenkins_job_creator.DeploymentJob(jenkins_instance, plc_instance)
    #if options.delete:
    #    modified_jobs.append(job_creator_instance.delete_job())
    #else:
    #    modified_jobs.append(job_creator_instance.create_job())

    ### release job
    # TODO fix if statement
    #if ('release' and 'nongraphics_test' and 'graphics_test'
    #        and 'hardware_build' and 'interactive_hw_test' in job_type_dict):
    #    print "Create release job"
        # TODO

    ### clean up
    # TODO

    # delete old and no more required jobs
    delete_msg = ""
    for job in [job for job in existent_user_jobs if job not in modified_jobs]:
        jenkins_instance.delete_job(job)
        delete_msg += "- %s\n" % job

    if delete_msg != "":
        print "Delete old and no more required jobs:\n" + delete_msg
예제 #6
0
def main():
    #########################
    ### parsing arguments ###
    #########################
    time_parsing = datetime.datetime.now()
    print "=====> entering argument parsing step at", time_parsing

    # parse parameter values
    parser = optparse.OptionParser()
    parser.add_option('-v', '--verbose', action='store_true', default=False)
    (options, args) = parser.parse_args()

    if len(args) < 5:
        print "Usage: %s pipeline_repos_owner server_name user_name ros_distro build_repo" % sys.argv[
            0]
        raise common.BuildException("Wrong arguments for build script")

    # get arguments
    pipeline_repos_owner = args[0]
    server_name = args[1]
    user_name = args[2]
    ros_distro = args[3]
    build_identifier = args[4]  # repository + suffix
    build_repo = build_identifier.split('__')[0]  # only repository to build
    # environment variables
    workspace = os.environ['WORKSPACE']

    # cob_pipe object
    cp_instance = cob_pipe.CobPipe()
    cp_instance.load_config_from_file(pipeline_repos_owner,
                                      server_name,
                                      user_name,
                                      file_location=os.environ["WORKSPACE"])
    pipe_repos = cp_instance.repositories
    common.output("Pipeline configuration successfully loaded", blankline='b')

    # (debug) output
    print "\n", 50 * 'X'
    print "\nTesting on ros distro: %s" % ros_distro
    print "Testing repository: %s" % build_repo
    if build_repo != build_identifier:
        print "       with suffix: %s" % build_identifier.split('__')[1]
    print "Using source: %s" % pipe_repos[build_identifier].url
    print "Testing branch/version: %s" % pipe_repos[build_identifier].version
    print "\n", 50 * 'X'

    # set up directories variables
    tmpdir = '/tmp'
    repo_sourcespace = os.path.join(tmpdir,
                                    'src')  # location to build repositories in
    repo_sourcespace_wet = os.path.join(repo_sourcespace, 'wet',
                                        'src')  # wet (catkin) repositories
    repo_sourcespace_dry = os.path.join(repo_sourcespace,
                                        'dry')  # dry (rosbuild) repositories
    repo_test_results = os.path.join(
        tmpdir, 'test_results')  # location for test results
    if not os.path.exists(repo_test_results):
        os.makedirs(repo_test_results)
    repo_build_logs = os.path.join(tmpdir,
                                   'build_logs')  # location for build logs

    # source wet and dry workspace
    ros_env_repo = common.get_ros_env(repo_sourcespace + '/setup.bash')
    ros_env_repo['ROS_TEST_RESULTS_DIR'] = repo_test_results

    ############
    ### test ###
    ############
    time_test = datetime.datetime.now()
    print "=====> entering testing step at", time_test

    # get amount of cores available on host system
    cores = multiprocessing.cpu_count()

    # get current repository name
    user, repo_name = cp_instance.split_github_url(
        pipe_repos[build_identifier].data['url'])

    ### catkin repositories
    (catkin_packages, stacks,
     manifest_packages) = common.get_all_packages(repo_sourcespace_wet)
    if (len(catkin_packages) > 0) and (repo_name in catkin_packages):
        os.environ['ROS_TEST_RESULTS_DIR'] = os.environ[
            'CATKIN_TEST_RESULTS_DIR']

        # get list of dependencies to test
        test_repos_list_wet = []

        # add all packages in main repository
        (catkin_test_packages, stacks, manifest_packages
         ) = common.get_all_packages(catkin_packages[repo_name] + '/..')
        for pkg_name, pkg_dir in catkin_test_packages.items():
            test_repos_list_wet.append(pkg_name)

        # add all packages in dep repositories (if "test" is set to True)
        for dep in pipe_repos[build_identifier].dependencies.keys():
            if pipe_repos[build_identifier].dependencies[dep].test:
                (catkin_test_dep_packages, stacks, manifest_packages
                 ) = common.get_all_packages(catkin_packages[dep] + '/..')
                for pkg_name, pkg_dir in catkin_test_dep_packages.items():
                    test_repos_list_wet.append(pkg_name)

        # test wet repositories
        print "Testing the following wet repositories %s" % test_repos_list_wet
        test_list = ' '.join(test_repos_list_wet)
        try:
            common.call(
                "/opt/VirtualGL/bin/vglrun catkin_make --directory %s/wet test --pkg %s"
                % (repo_sourcespace, test_list), ros_env_repo)
        finally:
            # clean and copy test xml files
            common.clean_and_copy_test_results(
                repo_sourcespace + "/wet/build/test_results",
                workspace + "/test_results"
            )  # FIXME: is there a way to let catkin write test results to repo_test_results

    ### rosbuild repositories
    (catkin_packages, stacks,
     manifest_packages) = common.get_all_packages(repo_sourcespace_dry)
    if (len(stacks) > 0) and (repo_name in stacks):
        # get list of dependencies to test
        test_repos_list_dry = [build_repo]
        for dep, depObj in pipe_repos[build_identifier].dependencies.items():
            if depObj.test and dep in stacks:
                test_repos_list_dry.append(dep)

        # test dry repositories
        packages_to_test_list = test_repos_list_dry
        for repo in test_repos_list_dry:
            (catkin_packages, stacks, manifest_packages
             ) = common.get_all_packages(repo_sourcespace_dry + "/" + repo)
            packages_to_test_list = packages_to_test_list + manifest_packages.keys(
            )
        print "Test dry packages: ", packages_to_test_list
        packages_to_test = " ".join(test_repos_list_dry) + " " + " ".join(
            packages_to_test_list)
        common.call(
            "/opt/VirtualGL/bin/vglrun rosmake -rV --skip-blacklist --profile --pjobs=%s --test-only --output=%s %s"
            % (cores, repo_build_logs, packages_to_test), ros_env_repo)

        # clean and copy test xml files
        common.clean_and_copy_test_results(repo_test_results,
                                           workspace + "/test_results")

    # in case we have no tests executed (neither wet nor dry), we'll generate some dummy test result
    common.clean_and_copy_test_results(repo_test_results,
                                       workspace + "/test_results")

    ###########
    ### end ###
    ###########
    # steps: parsing, test
    time_finish = datetime.datetime.now()
    print "=====> finished script at", time_finish
    print "durations:"
    print "parsing arguments in       ", (time_test - time_parsing)
    print "test in                    ", (time_finish - time_test)
    print "total                      ", (time_finish - time_parsing)
    print ""
예제 #7
0
def main():
    #########################
    ### parsing arguments ###
    #########################
    time_parsing = datetime.datetime.now()
    print "=====> entering argument parsing step at", time_parsing

    # parse parameter values
    parser = optparse.OptionParser()
    parser.add_option('-v', '--verbose', action='store_true', default=False)
    (options, args) = parser.parse_args()

    if len(args) < 5:
        print "Usage: %s pipeline_repos_owner server_name user_name ros_distro build_repo" % sys.argv[
            0]
        raise common.BuildException("Wrong arguments for build script")

    # get arguments
    pipeline_repos_owner = args[0]
    server_name = args[1]
    user_name = args[2]
    ros_distro = args[3]
    build_identifier = args[4]  # repository + suffix
    build_repo = build_identifier.split('__')[0]  # only repository to build
    # environment variables
    workspace = os.environ['WORKSPACE']
    ros_package_path = os.environ['ROS_PACKAGE_PATH']

    # cob_pipe object
    cp_instance = cob_pipe.CobPipe()
    cp_instance.load_config_from_file(pipeline_repos_owner,
                                      server_name,
                                      user_name,
                                      file_location=os.environ["WORKSPACE"])
    pipe_repos = cp_instance.repositories
    common.output("Pipeline configuration successfully loaded", blankline='b')

    # (debug) output
    print "\n", 50 * 'X'
    print "\nTesting on ros distro:  %s" % ros_distro
    print "Testing repository: %s" % build_repo
    if build_repo != build_identifier:
        print "       with suffix: %s" % build_identifier.split('__')[1]
    print "Using source: %s" % pipe_repos[build_identifier].url
    print "Testing branch/version: %s" % pipe_repos[build_identifier].version
    print "\n", 50 * 'X'

    # set up directories variables
    tmpdir = os.path.join('/tmp', 'test_repositories')
    repo_sourcespace = os.path.join(
        tmpdir, 'src_repository')  # location to store repositories in
    repo_sourcespace_wet = os.path.join(tmpdir, 'src_repository', 'wet',
                                        'src')  # wet (catkin) repositories
    repo_sourcespace_dry = os.path.join(tmpdir, 'src_repository',
                                        'dry')  # dry (rosbuild) repositories
    repo_test_results_dry = os.path.join(
        tmpdir, 'src_repository',
        'test_results')  # location for dry test results
    repo_test_results_wet = os.path.join(
        tmpdir, 'src_repository', 'wet', 'build',
        'test_results')  # location for wet test results
    #repo_buildspace = os.path.join(tmpdir, 'build_repository')                                        # location for build output
    dry_build_logs = os.path.join(repo_sourcespace_dry,
                                  'build_logs')  # location for build logs

    # get environment variables
    ros_env_repo = common.get_ros_env(
        os.path.join(repo_sourcespace, 'wet', 'devel', 'setup.bash'))
    ros_env_repo['ROS_PACKAGE_PATH'] = ':'.join(
        [repo_sourcespace, ros_package_path])

    ############
    ### test ###
    ############
    time_test = datetime.datetime.now()
    print "=====> entering testing step at", time_test

    # FIXME: HACK BEGIN

    common.call("ls", envir=ros_env_repo, verbose=False)
    common.call("roslaunch fiad_scenario_system system.launch",
                envir=ros_env_repo,
                verbose=False)

    # FIXME: HACK END

    # the end (steps: parsing, test)
    time_finish = datetime.datetime.now()
    print "=====> finished script at", time_finish
    print "durations:"
    print "parsing arguments in       ", (time_test - time_parsing)
    print "test in                    ", (time_finish - time_test)
    print "total                      ", (time_finish - time_parsing)
    print ""
예제 #8
0
def main():
    """
    Create a build and test pipeline on a Jenkins CI server.
    Starting point is the pipeline coniguration file (pipeline_config.yaml) of
    the given user stored on github. For this configuration a set of Jenkins
    projects/jobs will be generated.
    """

    # parse options
    usage = "Usage: %prog [masterURL login password tarballLocation | jenkinsConfig] pipelineReposOwner username"
    parser = optparse.OptionParser(usage)
    parser.add_option("-m",
                      "--masterURL",
                      action="store",
                      type="string",
                      dest="master_url",
                      metavar="URL",
                      help="URL of Jenkins server/master")
    parser.add_option(
        "-l",
        "--login",
        action="store",
        type="string",
        dest="jenkins_login",
        metavar="LOGIN",
        help=
        "Login name of Jenkins Admin which has rights to create and delete jobs"
    )
    parser.add_option("-p",
                      "--password",
                      action="store",
                      type="string",
                      dest="jenkins_pw",
                      metavar="PASSWORD",
                      help="Jenkins Admin password")
    parser.add_option("-t",
                      "--tarballLocation",
                      action="store",
                      type="string",
                      dest="tarball_location",
                      metavar="USER@SERVERADDRESS:PATH",
                      help="Place where the Tarballs are located")
    parser.add_option(
        "--jenkinsConfig",
        action="store",
        type="string",
        dest="jenkinsConfigFile",
        metavar="FILE",
        help=
        "YAML file that replaces the Jenkins config options and contains values for:\
                                            masterURL, login, password, tarballLocation"
    )
    parser.add_option(
        "-o",
        "--pipelineReposOwner",
        action="store",
        type="string",
        dest="pipeline_repos_owner",
        metavar="PIPELINE_REPOS_OWNER",
        help=
        "Owner of the GitHub repositories 'jenkins_setup' and 'jenkins_config'"
    )
    parser.add_option("-u",
                      "--username",
                      action="store",
                      type="string",
                      dest="username",
                      metavar="USERNAME",
                      help="Name of user to generate pipeline for")
    parser.add_option("-d",
                      "--delete",
                      action="store_true",
                      default=False,
                      help="Delete")
    (options, args) = parser.parse_args()

    if len(args) != 0:
        print "Usage: %s [masterURL login password tarballLocation | jenkinsConfig] pipelineReposOwner username" % (
            sys.argv[0])
        sys.exit()

    if options.jenkinsConfigFile:
        # load jenkins config from file
        with open(os.path.expanduser(options.jenkinsConfigFile)) as f:
            jenkins_conf = yaml.load(f)

        master_name = get_master_name(jenkins_conf['masterURL'])
        tarball_location = jenkins_conf['tarballLocation']

        # create jenkins instance
        jenkins_instance = jenkins.Jenkins(jenkins_conf['masterURL'],
                                           jenkins_conf['login'],
                                           jenkins_conf['password'])

    elif options.master_url and options.jenkins_login and options.jenkins_pw and options.tarball_location:
        master_name = get_master_name(options.master_url)
        tarball_location = options.tarball_location

        # create jenkins instance
        jenkins_instance = jenkins.Jenkins(options.master_url,
                                           options.jenkins_login,
                                           options.jenkins_pw)

    else:
        print "Usage: %s [masterURL login password tarballLocation | jenkinsConfig] pipelineReposOwner username" % (
            sys.argv[0])
        sys.exit()

    if not options.pipeline_repos_owner or not options.username:
        print "Usage: %s [masterURL login password tarballLocation | jenkinsConfig] pipelineReposOwner username" % (
            sys.argv[0])
        sys.exit()

    # get all existent jobs for user
    existent_user_jobs = []
    for job in jenkins_instance.get_jobs():
        job_owner = job['name'].split('__')[0]
        if options.username == job_owner:
            existent_user_jobs.append(job['name'])
    modified_jobs = []

    # get pipeline configs object from url
    plc_instance = cob_pipe.CobPipe()
    plc_instance.load_config_from_url(options.pipeline_repos_owner,
                                      master_name, options.username)

    # get jobs to create
    job_type_dict = plc_instance.get_jobs_to_create()

    ### create pipeline jobs TODO
    ### pipe starter
    # for each repository and each polled user-defined dependency a pipe
    # starter job will be generated
    polls_dict = plc_instance.get_custom_dependencies(polled_only=True)
    pipe_repo_list = plc_instance.repositories.keys()
    for poll, starts_repo_list in polls_dict.iteritems():
        if poll in pipe_repo_list:
            pipe_repo_list.remove(poll)
        job_creator_instance = jenkins_job_creator.PipeStarterJob(
            jenkins_instance, plc_instance, starts_repo_list, poll)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())
    for repo in pipe_repo_list:
        job_creator_instance = jenkins_job_creator.PipeStarterJob(
            jenkins_instance, plc_instance, [repo], repo)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### general pipe starter
    # this pipe starter job won't poll any repository; it has to be started
    # manually. It triggers the priority build job with all defined
    # repositories as parameters
    job_creator_instance = jenkins_job_creator.PipeStarterGeneralJob(
        jenkins_instance, plc_instance, plc_instance.repositories.keys())
    if options.delete:
        modified_jobs.append(job_creator_instance.delete_job())
    else:
        general_pipe_starter_name = job_creator_instance.create_job()
        modified_jobs.append(general_pipe_starter_name)

    ### priority build
    job_creator_instance = jenkins_job_creator.PriorityBuildJob(
        jenkins_instance, plc_instance, tarball_location,
        plc_instance.repositories.keys())
    if options.delete:
        modified_jobs.append(job_creator_instance.delete_job())
    else:
        modified_jobs.append(job_creator_instance.create_job())

    ### regular build
    if 'regular_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.RegularBuildJob(
            jenkins_instance, plc_instance, tarball_location)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### downstream build
    if 'downstream_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.DownstreamBuildJob(
            jenkins_instance, plc_instance, tarball_location,
            job_type_dict['downstream_build'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### nongraphics test
    if 'downstream_build' and 'nongraphics_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.NongraphicsTestJob(
            jenkins_instance, plc_instance, tarball_location,
            job_type_dict['nongraphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### graphics test
    if 'downstream_build' and 'graphics_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.GraphicsTestJob(
            jenkins_instance, plc_instance, tarball_location,
            job_type_dict['graphics_test'])
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### hardware build
    if 'hardware_build' in job_type_dict:
        job_creator_instance = jenkins_job_creator.HardwareBuildJob(
            jenkins_instance, plc_instance)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### automatic hardware test
    if 'hardware_build' and 'automatic_hw_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.AutomaticHWTestJob(
            jenkins_instance, plc_instance)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### interactive hardware test
    if 'hardware_build' and 'interactive_hw_test' in job_type_dict:
        job_creator_instance = jenkins_job_creator.InteractiveHWTestJob(
            jenkins_instance, plc_instance)
        if options.delete:
            modified_jobs.append(job_creator_instance.delete_job())
        else:
            modified_jobs.append(job_creator_instance.create_job())

    ### release job
    if ('release' and 'downstream_build' and 'nongraphics_test'
            and 'graphics_test' and 'hardware_build' and 'automatic_hw_test'
            and 'interactive_hw_test' in job_type_dict):
        print "Create release job"
        # TODO

    ### clean up
    # TODO

    # delete old and no more required jobs
    delete_msg = ""
    for job in [job for job in existent_user_jobs if job not in modified_jobs]:
        jenkins_instance.delete_job(job)
        delete_msg += "- %s\n" % job

    if delete_msg != "":
        print "Delete old and no more required jobs:\n" + delete_msg
예제 #9
0
def main():
    # parse parameter values
    parser = optparse.OptionParser()
    parser.add_option('-v', '--verbose', action='store_true', default=False)
    (options, args) = parser.parse_args()

    if len(args) < 5:
        print "Usage: %s pipeline_repos_owner server_name user_name ros_distro build_repo" % sys.argv[0]
        raise common.BuildException("Wrong arguments for build script")

    # get arguments
    pipeline_repos_owner = args[0]
    server_name = args[1]
    user_name = args[2]
    ros_distro = args[3]
    build_identifier = args[4]                      # repository + suffix
    build_repo = build_identifier.split('__')[0]    # only repository to build
    # environment variables
    workspace = os.environ['WORKSPACE']
    ros_package_path = os.environ['ROS_PACKAGE_PATH']

    # (debug) output
    print "\n", 50 * 'X'
    print datetime.datetime.now()
    print "\nTesting on ros distro:  %s" % ros_distro
    print "Testing repository: %s" % build_repo
    if build_repo != build_identifier:
        print "       with suffix: %s" % build_identifier.split('__')[1]
    print "\n", 50 * 'X'

    # update sourcelist and upgrade installed basic packages
    #common.output("Updating chroot enviroment")
    #common.call("apt-get update")
    #common.call("apt-get dist-upgrade -y")

    #common.output("Updating rosinstall")  # TODO run install frequently in chroot_tarball_updater an remove here
    #common.call("pip install -U rosinstall")

    # install packages needed for execution (depending on ros_distro)
    #common.output("Installing necessary packages:", decoration='')
    #if ros_distro == 'electric':
        #print "  catkin-pkg and rospkg"
        #common.call("pip install -U catkin-pkg rospkg")
    #elif ros_distro == 'fuerte':
        #print "  rospkg and rosdep"
        #common.call("pip install -U rospkg rosdep")
    #elif ros_distro == 'groovy':
        #print "  python-catkin-pkg and python-rosdistro"
        #common.call("apt-get install python-catkin-pkg python-rosdistro --yes")

    # cob_pipe object
    cp_instance = cob_pipe.CobPipe()
    cp_instance.load_config_from_url(pipeline_repos_owner, server_name, user_name)
    pipe_repos = cp_instance.repositories
    common.output("Pipeline configuration successfully loaded", blankline='b')

    # (debug) output
    print "\n", 50 * 'X'
    print datetime.datetime.now()
    print "\nTesting on ros distro:  %s" % ros_distro
    print "Testing repository: %s" % build_repo
    if build_repo != build_identifier:
        print "       with suffix: %s" % build_identifier.split('__')[1]
    print "Using source: %s" % pipe_repos[build_identifier].url
    print "Testing branch/version: %s" % pipe_repos[build_identifier].version
    print "\n", 50 * 'X'

    # set up directories variables
    tmpdir = os.path.join('/tmp', 'test_repositories')
    repo_sourcespace = os.path.join(tmpdir, 'src_repository')               # location to store repositories in
    repo_sourcespace_wet = os.path.join(tmpdir, 'src_repository', 'wet')    # wet (catkin) repositories
    repo_sourcespace_dry = os.path.join(tmpdir, 'src_repository', 'dry')    # dry (rosbuild) repositories
    repo_buildspace = os.path.join(tmpdir, 'build_repository')              # location for build output
    dry_build_logs = os.path.join(repo_sourcespace_dry, 'build_logs')       # location for build logs

    # download build_repo from source
    print "Creating rosinstall file for repository %s" % build_repo
    rosinstall = ""
    if build_identifier in pipe_repos:  # check if triggering identifier is really present in pipeline config
        rosinstall += pipe_repos[build_identifier].get_rosinstall()
    else:
        err_msg = "Pipeline was triggered by repository %s which is not in pipeline config!" % build_identifier
        raise common.BuildException(err_msg)

    # write rosinstall file
    print "Rosinstall file for repository: \n %s" % rosinstall
    with open(os.path.join(workspace, 'repo.rosinstall'), 'w') as f:
        f.write(rosinstall)
    print "Install repository from source:"
    # create repo sourcespace directory 'src_repository'
    os.makedirs(repo_sourcespace)
    # rosinstall repos
    common.call("rosinstall --verbose --continue-on-error %s %s/repo.rosinstall /opt/ros/%s"
                % (repo_sourcespace, workspace, ros_distro))

    # get the repositories build dependencies
    print "Get build dependencies of repo"

    # get all packages in sourcespace
    (catkin_packages, stacks, manifest_packages) = common.get_all_packages(repo_sourcespace)
    if ros_distro == 'electric' and catkin_packages != {}:
        raise common.BuildException("Found wet packages while building in ros electric")

    # (debug) output
    if options.verbose:
        print "Packages in %s:" % repo_sourcespace
        print "Catkin: ", catkin_packages
        print "Rosbuild:\n  Stacks: ", stacks
        print "  Packages: ", manifest_packages

        # get deps directly for catkin (like in willow code)
        try:
            print "Found wet build dependencies:\n%s" % '- ' + '\n- '.join(sorted(common.get_dependencies(repo_sourcespace, build_depends=True, test_depends=False)))
        except:
            pass
        # deps catkin
        repo_build_dependencies = common.get_nonlocal_dependencies(catkin_packages, {}, {}, build_depends=True, test_depends=False)
        print "Found wet dependencies:\n%s" % '- ' + '\n- '.join(sorted(repo_build_dependencies))
        # deps stacks
        repo_build_dependencies = common.get_nonlocal_dependencies({}, stacks, {})
        print "Found dry dependencies:\n%s" % '- ' + '\n- '.join(sorted(repo_build_dependencies))

    # check if build_repo is wet or dry and get all corresponding deps
    build_repo_type = ''
    if build_repo in catkin_packages:
        build_repo_type = 'wet'
        repo_build_dependencies = common.get_nonlocal_dependencies(catkin_packages, {}, {}, build_depends=True, test_depends=False)
    elif build_repo in stacks:
        build_repo_type = 'dry'
        repo_build_dependencies = common.get_nonlocal_dependencies({}, stacks, {})
    else:
        # build_repo is neither wet nor dry
        raise common.BuildException("Repository %s to build not found in sourcespace" % build_repo)

    # install user-defined/customized dependencies of build_repo from source
    rosinstall = ''
    fulfilled_deps = []
    for dep in repo_build_dependencies:
        if dep in pipe_repos[build_identifier].dependencies:
            print "Install user-defined build dependency %s from source" % dep
            rosinstall += pipe_repos[build_identifier].dependencies[dep].get_rosinstall()
            fulfilled_deps.append(dep)

    # install additional, indirect user-defined dependencies
    for dep in pipe_repos[build_identifier].dependencies:
        if dep not in fulfilled_deps:
            print "Install additional user-defined build dependency %s from source" % dep
            rosinstall += pipe_repos[build_identifier].dependencies[dep].get_rosinstall()
            fulfilled_deps.append(dep)

    # check if all user-defined/customized dependencies are satisfied
    if sorted(fulfilled_deps) != sorted(pipe_repos[build_identifier].dependencies):
        print "Not all user-defined build dependencies are fulfilled"
        print "User-defined build dependencies:\n - %s" % '\n - '.join(pipe_repos[build_identifier].dependencies)
        print "Fulfilled dependencies:\n - %s" % '\n - '.join(fulfilled_deps)
        raise common.BuildException("Not all user-defined build dependencies are fulfilled")

    if rosinstall != '':
        # write .rosinstall file
        print "Rosinstall file for user-defined build dependencies: \n %s" % rosinstall
        with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f:
            f.write(rosinstall)
        print "Install user-defined build dependencies from source"
        # rosinstall depends
        common.call("rosinstall --verbose --continue-on-error %s %s/repo.rosinstall /opt/ros/%s"
                    % (repo_sourcespace, workspace, ros_distro))

        # get also deps of just installed user-defined/customized dependencies
        (catkin_packages, stacks, manifest_packages) = common.get_all_packages(repo_sourcespace)
        if build_repo_type == 'wet':
            if stacks != {}:
                raise common.BuildException("Catkin (wet) package %s depends on (dry) stack(s):\n%s"
                                            % (build_repo, '- ' + '\n- '.join(stacks)))
            # take only wet packages
            repo_build_dependencies = common.get_nonlocal_dependencies(catkin_packages, {}, {}, build_depends=True, test_depends=False)
        else:  # dry build repo
            # take all packages
            repo_build_dependencies = common.get_nonlocal_dependencies(catkin_packages, stacks, {}, build_depends=True, test_depends=False)
        repo_build_dependencies = [dep for dep in repo_build_dependencies if dep not in fulfilled_deps]

    rosdep_resolver = None
    if ros_distro != 'electric':
        # Create rosdep object
        print "Create rosdep object"
        try:
            rosdep_resolver = rosdep.RosDepResolver(ros_distro)
        except:  # when init fails the first time
            from time import sleep
            sleep(10)
            rosdep_resolver = rosdep.RosDepResolver(ros_distro)

    print datetime.datetime.now()
    print "Install build dependencies: %s" % (', '.join(repo_build_dependencies))
    common.apt_get_install_also_nonrosdep(repo_build_dependencies, ros_distro, rosdep_resolver)
    print datetime.datetime.now()

    # separate installed repos in wet and dry
    print "Separate installed repositories in wet and dry"
    os.makedirs(repo_sourcespace_wet)
    os.makedirs(repo_sourcespace_dry)
    # get all folders in repo_sourcespace
    sourcespace_dirs = [name for name in os.listdir(repo_sourcespace) if os.path.isdir(os.path.join(repo_sourcespace, name))]
    for dir in sourcespace_dirs:
        if dir in catkin_packages.keys():
            shutil.move(os.path.join(repo_sourcespace, dir), os.path.join(repo_sourcespace_wet, dir))
        if dir in stacks.keys():
            shutil.move(os.path.join(repo_sourcespace, dir), os.path.join(repo_sourcespace_dry, dir))

    # env
    print "Set up ros environment variables"
    ros_env = common.get_ros_env('/opt/ros/%s/setup.bash' % ros_distro)
    if options.verbose:
        common.call("env", ros_env)

    ### catkin repositories
    print datetime.datetime.now()
    if catkin_packages != {}:
        # set up catkin workspace
        if ros_distro == 'fuerte':
            if 'catkin' not in catkin_packages.keys():
                # add catkin package to rosinstall
                rosinstall = "\n- git: {local-name: catkin, uri: 'git://github.com/ros/catkin.git', version: fuerte-devel}"
                print "Install catkin"
                # rosinstall catkin
                common.call("rosinstall --verbose %s %s/repo.rosinstall /opt/ros/%s"
                            % (repo_sourcespace_wet, workspace, ros_distro))

            print "Create a CMakeLists.txt for catkin packages"
            common.call("ln -s %s %s" % (os.path.join(repo_sourcespace_wet, 'catkin', 'cmake', 'toplevel.cmake'),
                                         os.path.join(repo_sourcespace_wet, 'CMakeLists.txt')))
        else:
            common.call("catkin_init_workspace %s" % repo_sourcespace_wet, ros_env)

        os.mkdir(repo_buildspace)
        os.chdir(repo_buildspace)
        try:
            common.call("cmake %s" % repo_sourcespace_wet + '/', ros_env)
        except common.BuildException as ex:
            print ex.msg
            raise common.BuildException("Failed to cmake wet repositories")
        #ros_env_repo = common.get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash'))

        # build repositories and tests
        print "Build wet repository list"
        try:
            common.call("make", ros_env)
        except common.BuildException as ex:
            print ex.msg
            raise common.BuildException("Failed to make wet packages")

        test_error_msg = None
        try:
            common.call("make tests", ros_env)
        except common.BuildException as ex:
            print ex.msg
            test_error_msg = ex.msg

        # get wet repositories test and run dependencies
        print "Get test and run dependencies of repo list"
        (catkin_packages, stacks, manifest_packages) = common.get_all_packages(repo_sourcespace_wet)
        if stacks != {}:
            raise common.BuildException("Catkin (wet) package %s depends on (dry) stack(s):\n%s"
                                        % (build_repo, '- ' + '\n- '.join(stacks)))
        # take only wet packages
        repo_test_dependencies = common.get_nonlocal_dependencies(catkin_packages, {}, {}, build_depends=False, test_depends=True)
        if repo_test_dependencies != [] and test_error_msg is None:
            print "Install test and run dependencies of repository list: %s" % (', '.join(repo_test_dependencies))
            common.apt_get_install_also_nonrosdep(repo_test_dependencies, ros_distro, rosdep_resolver)

            # run tests
            print "Test repository list"
            try:
                common.call("make run_tests", ros_env)
            except common.BuildException as ex:
                print ex.msg
                test_error_msg = ex.msg

        # copy test results
        common.copy_test_results(workspace, repo_buildspace, test_error_msg)

    ### rosbuild repositories
    print datetime.datetime.now()
    if build_repo_type == 'dry':
        # get list of dependencies to test
        test_repos_list = []
        for dep in pipe_repos[build_identifier].dependencies:
            if dep in stacks:
                test_repos_list.append(dep)

        ros_env_repo = common.get_ros_env(os.path.join(repo_sourcespace, 'setup.bash'))
        ros_env_repo['ROS_PACKAGE_PATH'] = ':'.join([repo_sourcespace, ros_package_path])
        if options.verbose:
            common.call("env", ros_env_repo)

        if ros_distro == 'electric':
            print "Rosdep"
            common.call("rosmake rosdep", ros_env)
        for stack in stacks.keys():
            common.call("rosdep install -y %s" % stack, ros_env_repo)

        # build dry repositories and tests
        print "Build repository %s" % build_repo
        try:
            common.call("rosmake -rV --profile --pjobs=8 --output=%s %s" %
                        (dry_build_logs, build_repo), ros_env_repo)
        except common.BuildException as ex:
            try:
                shutil.move(dry_build_logs, os.path.join(workspace, "build_logs"))
            finally:
                print ex.msg
                raise common.BuildException("Failed to rosmake %s" % build_repo)
        try:
            common.call("rosmake -rV --profile --pjobs=8 --test-only --output=%s %s" %
                        (dry_build_logs, " ".join(test_repos_list + [build_repo])), ros_env_repo)
        except common.BuildException as ex:
            print ex.msg

        # copy test results
        common.call("rosrun rosunit clean_junit_xml.py", ros_env_repo)
        for file in os.listdir(os.path.join(repo_sourcespace, "test_results")):
            file_path = os.path.join(repo_sourcespace, "test_results", file)
            print file_path
            try:
                if not file.startswith("_hudson"):
                    shutil.rmtree(file_path)
            except Exception as e:
                print e

        common.copy_test_results(workspace, repo_sourcespace)
        print datetime.datetime.now()
        try:
            shutil.move(dry_build_logs, os.path.join(workspace, "build_logs"))
        except IOError as ex:
            print "No build logs found: %s" % ex