示例#1
0
def build_repo_structure(repo_path, doc_conf, depends_conf):
    stacks = {}
    manifest_packages = {}
    catkin_packages = {}
    repo_map = {}

    local_info = []
    for install_item in doc_conf + depends_conf:
        key = install_item.keys()[0]
        local_info.append(
            {
                "type": key,
                "name": install_item[key]["local-name"],
                "url": install_item[key]["uri"],
                "version": install_item[key].get("version", None),
            }
        )

    # Get any stacks, manifest packages, or catkin packages (package.xml) in each repo
    for item in local_info:
        local_name = item["name"]
        local_path = os.path.join(repo_path, local_name)
        print "Looking for the following packages in %s" % local_path
        local_stacks = get_repo_manifests(local_path, manifest="stack")
        local_manifest_packages = get_repo_manifests(local_path, manifest="package")
        local_catkin_packages = get_repo_packages(local_path)

        # Since rospkg is kind of screwed up and always finds package.xml files, we
        # need to filter out packages that are catkin_packages but still listed in
        # manifest or stack packages
        for name in local_catkin_packages.iterkeys():
            if name in local_stacks:
                del local_stacks[name]
            if name in local_manifest_packages:
                del local_manifest_packages[name]

        # Now, we need to update our repo map
        for name in local_stacks.keys() + local_manifest_packages.keys() + local_catkin_packages.keys():
            repo_map[name] = item

        # Finally, we'll merge these dictionaries into our global dicts
        stacks.update(local_stacks)
        manifest_packages.update(local_manifest_packages)
        catkin_packages.update(local_catkin_packages)

    return (stacks, manifest_packages, catkin_packages, repo_map)
示例#2
0
def build_repo_structure(repo_path, doc_conf, depends_conf):
    stacks = {}
    manifest_packages = {}
    catkin_packages = {}
    repo_map = {}

    local_info = []
    for install_item in doc_conf + depends_conf:
        key = install_item.keys()[0]
        local_info.append({'type': key, 'name': install_item[key]['local-name'], 'url': install_item[key]['uri'], 'version': install_item[key].get('version', None)})

    #Get any stacks, manifest packages, or catkin packages (package.xml) in each repo
    for item in local_info:
        local_name = item['name']
        local_path = os.path.join(repo_path, local_name)
        print("Looking for the packages in %s" % local_path)
        local_stacks = get_repo_manifests(local_path, manifest='stack')
        local_manifest_packages = get_repo_manifests(local_path, manifest='package')
        local_catkin_packages = get_repo_packages(local_path)

        #Since rospkg is kind of screwed up and always finds package.xml files, we
        #need to filter out packages that are catkin_packages but still listed in
        #manifest or stack packages
        for name in local_catkin_packages.iterkeys():
            if name in local_stacks:
                del local_stacks[name]
            if name in local_manifest_packages:
                del local_manifest_packages[name]

        #Now, we need to update our repo map
        for name in local_stacks.keys() + local_manifest_packages.keys() + local_catkin_packages.keys():
            repo_map[name] = item

        #Finally, we'll merge these dictionaries into our global dicts
        stacks.update(local_stacks)
        manifest_packages.update(local_manifest_packages)
        catkin_packages.update(local_catkin_packages)

    return (stacks, manifest_packages, catkin_packages, repo_map)
示例#3
0
def document_repo(workspace, docspace, ros_distro, repo, platform, arch):
    append_pymodules_if_needed()
    print "Working on distro %s and repo %s" % (ros_distro, repo)
    try:
        repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s.rosinstall'%(ros_distro, repo)
        f = urllib2.urlopen(repo_url)
        if f.code != 200:
            raise BuildException("Could not find a valid rosinstall file for %s at %s" % (repo, repo_url))
        conf = yaml.load(f.read())
    except (urllib2.URLError, urllib2.HTTPError) as e:
        raise BuildException("Could not find a valid rosinstall file for %s at %s" % (repo, repo_url))

    depends_conf = []
    try:
        depends_repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s_depends.rosinstall'%(ros_distro, repo)
        f = urllib2.urlopen(depends_repo_url)
        if f.code == 200:
            print "Found a depends rosinstall file for %s" % repo
            depends_conf = yaml.load(f.read())
    except (urllib2.URLError, urllib2.HTTPError) as e:
        print "Did not find a depends rosinstall file for %s" % repo

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(conf)

    #TODO: Change this or parameterize or whatever
    homepage = 'http://ros.org/rosdoclite'

    #Select the appropriate rosinstall file
    rosinstall = yaml.dump(conf + depends_conf, default_style=False)

    print "Rosinstall for repo %s:\n%s"%(repo, rosinstall)
    with open(workspace+"/repo.rosinstall", 'w') as f:
        f.write(rosinstall)
    print "Created rosinstall file for repo %s, installing repo..."%repo
    #TODO Figure out why rosinstall insists on having ROS available when called with nobuild, but not catkin
    call("rosinstall %s %s/repo.rosinstall --nobuild --catkin" % (docspace, workspace))

    repo_path = os.path.abspath("%s" % (docspace))
    print "Repo path %s" % repo_path

    stacks = {}
    manifest_packages = {}
    catkin_packages = {}
    repo_map = {}

    local_info = []
    for install_item in conf + depends_conf:
        key = install_item.keys()[0]
        local_info.append({'type': key, 'name': install_item[key]['local-name'], 'url': install_item[key]['uri']})

    #Get any stacks, manifest packages, or catkin packages (package.xml) in each repo
    for item in local_info:
        local_name = item['name']
        local_path = os.path.join(repo_path, local_name)
        print "Looking for the following packages in %s" % local_path
        local_stacks = get_repo_manifests(local_path, manifest='stack')
        local_manifest_packages = get_repo_manifests(local_path, manifest='package')
        local_catkin_packages = get_repo_packages(local_path)

        #Since rospkg is kind of screwed up and always finds package.xml files, we
        #need to filter out packages that are catkin_packages but still listed in
        #manifest or stack packages
        for name in local_catkin_packages.iterkeys():
            if name in local_stacks:
                del local_stacks[name]
            if name in local_manifest_packages:
                del local_manifest_packages[name]

        #Now, we need to update our repo map
        for name in local_stacks.keys() + local_manifest_packages.keys() + local_catkin_packages.keys():
            repo_map[name] = item

        #Finally, we'll merge these dictionaries into our global dicts
        stacks.update(local_stacks)
        manifest_packages.update(local_manifest_packages)
        catkin_packages.update(local_catkin_packages)

    print "Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys())

    print "Catkin packages: %s" % catkin_packages
    print "Manifest packages: %s" % manifest_packages
    print "Stacks: %s" % stacks

    #Load information about existing tags
    tags_db = TagsDb(ros_distro, workspace)

    #Get any non local dependencies and install them
    apt_deps = []
    ros_dep = RosDepResolver(ros_distro)
    apt = AptDepends(platform, arch)
    deps = get_nonlocal_dependencies(catkin_packages, stacks, manifest_packages)
    print "Dependencies: %s" % deps
    for dep in deps:
        if ros_dep.has_ros(dep):
            apt_dep = ros_dep.to_apt(dep)
            apt_deps.extend(apt_dep)
        else:
            apt_dep = "ros-%s-%s" % (ros_distro, dep.replace('_', '-'))
            if apt.has_package(apt_dep):
                apt_deps.append(apt_dep)
            else:
                print "WARNING, could not find dependency %s, not adding to list" % dep


    print "Apt dependencies: %s" % apt_deps

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    for stack, path in stacks.iteritems():
        import rospkg
        #Get the dependencies of a dry stack from the stack.xml
        stack_manifest = rospkg.parse_manifest_file(path, rospkg.STACK_FILE)
        stack_packages = get_repo_manifests(path, manifest='package').keys()
        deps = [d.name for d in stack_manifest.depends]
        stack_relative_doc_path = "%s/doc/%s/api/%s" % (docspace, ros_distro, stack)
        stack_doc_path = os.path.abspath(stack_relative_doc_path)
        write_stack_manifest(stack_doc_path, stack, stack_manifest, repo_map[stack]['type'], repo_map[stack]['url'], "%s/%s/api/%s/html" %(homepage, ros_distro, stack), stack_packages, tags_db)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print "Build order that honors deps:\n%s" % build_order

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = copy.deepcopy(apt_deps)
    for dep in apt_deps:
        print "Getting dependencies for %s" % dep
        full_apt_deps.extend(apt.depends(dep))

    #Make sure that we don't have any duplicates
    full_apt_deps = list(set(full_apt_deps))

    print "Installing all dependencies for %s" % repo
    if apt_deps:
        call("apt-get install %s --yes" % (' '.join(apt_deps)))
    print "Done installing dependencies"

    #Set up the list of things that need to be sourced to run rosdoc_lite
    sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    if catkin_packages and not 'rosdoc_lite' in catkin_packages.keys():
        sources.append(build_repo_messages(docspace, ros_distro))

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin the messages arent' generated, TODO
    #to come back and fix this if necessary
    sources.append(build_repo_messages_manifest(manifest_packages, build_order, ros_distro))

    repo_tags = {}
    for package in build_order:
        #don't document packages that we're supposed to build but not supposed to document
        if not repo_map[package]['name'] in repos_to_doc:
            print "Package: %s, in repo: %s, is not supposed to be documented. Skipping." % (package, repo_map[package]['name'])
            continue

        #Pull the package from the correct place
        if package in catkin_packages:
            package_path = catkin_packages[package]
        else:
            package_path = manifest_packages[package]

        #Build a tagfile list from dependencies for use by rosdoc
        build_tagfile(full_apt_deps, tags_db, 'rosdoc_tags.yaml', package, build_order, docspace, ros_distro)

        relative_doc_path = "%s/doc/%s/api/%s" % (docspace, ros_distro, package)
        pkg_doc_path = os.path.abspath(relative_doc_path)
        relative_tags_path = "%s/api/%s/tags/%s.tag" % (ros_distro, package, package)
        tags_path = os.path.abspath("%s/doc/%s" % (docspace, relative_tags_path))
        print "Documenting %s [%s]..." % (package, package_path)
        #Generate the command we'll use to document the stack
        command = ['bash', '-c', '%s \
                   && export ROS_PACKAGE_PATH=%s:$ROS_PACKAGE_PATH \
                   && rosdoc_lite %s -o %s -g %s -t rosdoc_tags.yaml' \
                   %(' && '.join(sources), repo_path, package_path, pkg_doc_path, tags_path) ]
        #proc = subprocess.Popen(command, stdout=subprocess.PIPE)
        proc = subprocess.Popen(command)
        proc.communicate()

        #Some doc runs won't generate tag files, so we need to check if they
        #exist before adding them to the list
        if(os.path.exists(tags_path)):
            package_tags = {'location':'%s/%s'%(homepage, relative_tags_path), 
                                 'docs_url':'../../../api/%s/html'%(package), 
                                 'package':'%s'%package}

            #If the package has a deb name, then we'll store the tags for it
            #alongside that name
            if ros_dep.has_ros(package):
                pkg_deb_name = ros_dep.to_apt(package)[0]
                tags_db.set_tags(pkg_deb_name, [package_tags])
            #Otherwise, we'll store tags for it alongside it's repo, which we
            #assume can be made into a deb name
            else:
                repo_tags.setdefault(repo_map[package]['name'], []).append(package_tags)

        #We also need to add information to each package manifest that we only
        #have availalbe in this script like vcs location and type
        write_distro_specific_manifest(os.path.join(pkg_doc_path, 'manifest.yaml'),
                                       package, repo_map[package]['type'], repo_map[package]['url'], "%s/%s/api/%s/html" %(homepage, ros_distro, package),
                                       tags_db)

        print "Done"

    doc_path = os.path.abspath("%s/doc/%s" % (docspace, ros_distro))

    #Copy the files to the appropriate place
    #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite" % (doc_path))
    command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite' % doc_path]
    call_with_list(command)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    tags_db.commit_db()

    #Tell jenkins that we've succeeded
    print "Preparing xml test results"
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print "Created test results directory"
    except:
        pass

    call("cp %s %s"%(os.path.join(workspace, 'buildfarm/templates/junit_dummy_ouput_template.xml'),
                     os.path.join(workspace, 'test_results/')))
示例#4
0
def document_repo(workspace, docspace, ros_distro, repo, platform, arch):
    append_pymodules_if_needed()
    print "Working on distro %s and repo %s" % (ros_distro, repo)
    try:
        repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s.rosinstall' % (
            ros_distro, repo)
        f = urllib2.urlopen(repo_url)
        if f.code != 200:
            raise BuildException(
                "Could not find a valid rosinstall file for %s at %s" %
                (repo, repo_url))
        conf = yaml.load(f.read())
    except (urllib2.URLError, urllib2.HTTPError) as e:
        raise BuildException(
            "Could not find a valid rosinstall file for %s at %s" %
            (repo, repo_url))

    depends_conf = []
    try:
        depends_repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s_depends.rosinstall' % (
            ros_distro, repo)
        f = urllib2.urlopen(depends_repo_url)
        if f.code == 200:
            print "Found a depends rosinstall file for %s" % repo
            depends_conf = yaml.load(f.read())
    except (urllib2.URLError, urllib2.HTTPError) as e:
        print "Did not find a depends rosinstall file for %s" % repo

    #Get the list of repositories that should have documentation run on them
    #These are all of the repos that are not in the depends rosinsall file
    repos_to_doc = get_repositories_from_rosinstall(conf)

    #TODO: Change this or parameterize or whatever
    homepage = 'http://ros.org/rosdoclite'

    #Select the appropriate rosinstall file
    rosinstall = yaml.dump(conf + depends_conf, default_style=False)

    print "Rosinstall for repo %s:\n%s" % (repo, rosinstall)
    with open(workspace + "/repo.rosinstall", 'w') as f:
        f.write(rosinstall)
    print "Created rosinstall file for repo %s, installing repo..." % repo
    #TODO Figure out why rosinstall insists on having ROS available when called with nobuild, but not catkin
    call("rosinstall %s %s/repo.rosinstall --nobuild --catkin" %
         (docspace, workspace))

    repo_path = os.path.abspath("%s" % (docspace))
    print "Repo path %s" % repo_path

    stacks = {}
    manifest_packages = {}
    catkin_packages = {}
    repo_map = {}

    local_info = []
    for install_item in conf + depends_conf:
        key = install_item.keys()[0]
        local_info.append({
            'type': key,
            'name': install_item[key]['local-name'],
            'url': install_item[key]['uri']
        })

    #Get any stacks, manifest packages, or catkin packages (package.xml) in each repo
    for item in local_info:
        local_name = item['name']
        local_path = os.path.join(repo_path, local_name)
        print "Looking for the following packages in %s" % local_path
        local_stacks = get_repo_manifests(local_path, manifest='stack')
        local_manifest_packages = get_repo_manifests(local_path,
                                                     manifest='package')
        local_catkin_packages = get_repo_packages(local_path)

        #Since rospkg is kind of screwed up and always finds package.xml files, we
        #need to filter out packages that are catkin_packages but still listed in
        #manifest or stack packages
        for name in local_catkin_packages.iterkeys():
            if name in local_stacks:
                del local_stacks[name]
            if name in local_manifest_packages:
                del local_manifest_packages[name]

        #Now, we need to update our repo map
        for name in local_stacks.keys() + local_manifest_packages.keys(
        ) + local_catkin_packages.keys():
            repo_map[name] = item

        #Finally, we'll merge these dictionaries into our global dicts
        stacks.update(local_stacks)
        manifest_packages.update(local_manifest_packages)
        catkin_packages.update(local_catkin_packages)

    print "Running documentation generation on\npackages: %s" % (
        manifest_packages.keys() + catkin_packages.keys())

    print "Catkin packages: %s" % catkin_packages
    print "Manifest packages: %s" % manifest_packages
    print "Stacks: %s" % stacks

    #Load information about existing tags
    tags_db = TagsDb(ros_distro, workspace)

    #Get any non local dependencies and install them
    apt_deps = []
    ros_dep = RosDepResolver(ros_distro)
    apt = AptDepends(platform, arch)
    deps = get_nonlocal_dependencies(catkin_packages, stacks,
                                     manifest_packages)
    print "Dependencies: %s" % deps
    for dep in deps:
        if ros_dep.has_ros(dep):
            apt_dep = ros_dep.to_apt(dep)
            apt_deps.extend(apt_dep)
        else:
            apt_dep = "ros-%s-%s" % (ros_distro, dep.replace('_', '-'))
            if apt.has_package(apt_dep):
                apt_deps.append(apt_dep)
            else:
                print "WARNING, could not find dependency %s, not adding to list" % dep

    print "Apt dependencies: %s" % apt_deps

    #Build a local dependency graph to be used for build order
    local_dep_graph = build_local_dependency_graph(catkin_packages,
                                                   manifest_packages)

    #Write stack manifest files for all stacks, we can just do this off the
    #stack.xml files
    for stack, path in stacks.iteritems():
        import rospkg
        #Get the dependencies of a dry stack from the stack.xml
        stack_manifest = rospkg.parse_manifest_file(path, rospkg.STACK_FILE)
        stack_packages = get_repo_manifests(path, manifest='package').keys()
        deps = [d.name for d in stack_manifest.depends]
        stack_relative_doc_path = "%s/doc/%s/api/%s" % (docspace, ros_distro,
                                                        stack)
        stack_doc_path = os.path.abspath(stack_relative_doc_path)
        write_stack_manifest(
            stack_doc_path, stack, stack_manifest, repo_map[stack]['type'],
            repo_map[stack]['url'],
            "%s/%s/api/%s/html" % (homepage, ros_distro, stack),
            stack_packages, tags_db)

    #Need to make sure to re-order packages to be run in dependency order
    build_order = get_dependency_build_order(local_dep_graph)
    print "Build order that honors deps:\n%s" % build_order

    #We'll need the full list of apt_deps to get tag files
    full_apt_deps = copy.deepcopy(apt_deps)
    for dep in apt_deps:
        print "Getting dependencies for %s" % dep
        full_apt_deps.extend(apt.depends(dep))

    #Make sure that we don't have any duplicates
    full_apt_deps = list(set(full_apt_deps))

    print "Installing all dependencies for %s" % repo
    if apt_deps:
        call("apt-get install %s --yes" % (' '.join(apt_deps)))
    print "Done installing dependencies"

    #Set up the list of things that need to be sourced to run rosdoc_lite
    sources = ['source /opt/ros/%s/setup.bash' % ros_distro]

    #Everything that is after fuerte supports catkin workspaces, so everything
    #that has packages with package.xml files
    if catkin_packages and not 'rosdoc_lite' in catkin_packages.keys():
        sources.append(build_repo_messages(docspace, ros_distro))

    #For all our manifest packages (dry or fuerte catkin) we want to build
    #messages. Note, for fuerte catkin the messages arent' generated, TODO
    #to come back and fix this if necessary
    sources.append(
        build_repo_messages_manifest(manifest_packages, build_order,
                                     ros_distro))

    repo_tags = {}
    for package in build_order:
        #don't document packages that we're supposed to build but not supposed to document
        if not repo_map[package]['name'] in repos_to_doc:
            print "Package: %s, in repo: %s, is not supposed to be documented. Skipping." % (
                package, repo_map[package]['name'])
            continue

        #Pull the package from the correct place
        if package in catkin_packages:
            package_path = catkin_packages[package]
        else:
            package_path = manifest_packages[package]

        #Build a tagfile list from dependencies for use by rosdoc
        build_tagfile(full_apt_deps, tags_db, 'rosdoc_tags.yaml', package,
                      build_order, docspace, ros_distro)

        relative_doc_path = "%s/doc/%s/api/%s" % (docspace, ros_distro,
                                                  package)
        pkg_doc_path = os.path.abspath(relative_doc_path)
        relative_tags_path = "%s/api/%s/tags/%s.tag" % (ros_distro, package,
                                                        package)
        tags_path = os.path.abspath("%s/doc/%s" %
                                    (docspace, relative_tags_path))
        print "Documenting %s [%s]..." % (package, package_path)
        #Generate the command we'll use to document the stack
        command = ['bash', '-c', '%s \
                   && export ROS_PACKAGE_PATH=%s:$ROS_PACKAGE_PATH \
                   && rosdoc_lite %s -o %s -g %s -t rosdoc_tags.yaml' \
                   %(' && '.join(sources), repo_path, package_path, pkg_doc_path, tags_path) ]
        #proc = subprocess.Popen(command, stdout=subprocess.PIPE)
        proc = subprocess.Popen(command)
        proc.communicate()

        #Some doc runs won't generate tag files, so we need to check if they
        #exist before adding them to the list
        if (os.path.exists(tags_path)):
            package_tags = {
                'location': '%s/%s' % (homepage, relative_tags_path),
                'docs_url': '../../../api/%s/html' % (package),
                'package': '%s' % package
            }

            #If the package has a deb name, then we'll store the tags for it
            #alongside that name
            if ros_dep.has_ros(package):
                pkg_deb_name = ros_dep.to_apt(package)[0]
                tags_db.set_tags(pkg_deb_name, [package_tags])
            #Otherwise, we'll store tags for it alongside it's repo, which we
            #assume can be made into a deb name
            else:
                repo_tags.setdefault(repo_map[package]['name'],
                                     []).append(package_tags)

        #We also need to add information to each package manifest that we only
        #have availalbe in this script like vcs location and type
        write_distro_specific_manifest(
            os.path.join(pkg_doc_path, 'manifest.yaml'), package,
            repo_map[package]['type'], repo_map[package]['url'],
            "%s/%s/api/%s/html" % (homepage, ros_distro, package), tags_db)

        print "Done"

    doc_path = os.path.abspath("%s/doc/%s" % (docspace, ros_distro))

    #Copy the files to the appropriate place
    #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite" % (doc_path))
    command = [
        'bash', '-c',
        'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite'
        % doc_path
    ]
    call_with_list(command)

    #Write the new tags to the database if there are any to write
    for name, tags in repo_tags.iteritems():
        #Get the apt name of the current stack/repo
        if ros_dep.has_ros(name):
            deb_name = ros_dep.to_apt(name)[0]
        else:
            deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-'))

        #We only want to write tags for packages that have a valid deb name
        #For others, the only way to get cross referencing is to document everything
        #together with a rosinstall file
        if apt.has_package(deb_name):
            tags_db.set_tags(deb_name, tags)

    #Make sure to write changes to tag files and deps
    tags_db.commit_db()

    #Tell jenkins that we've succeeded
    print "Preparing xml test results"
    try:
        os.makedirs(os.path.join(workspace, 'test_results'))
        print "Created test results directory"
    except:
        pass

    call("cp %s %s" % (os.path.join(
        workspace, 'buildfarm/templates/junit_dummy_ouput_template.xml'),
                       os.path.join(workspace, 'test_results/')))