Exemple #1
0
    def shard_caselist(self, caselist_fn, shard):
        if shard == "0":
            return

        assert(":" in shard)
        shardargs = shard.split(":")
        shardno = int(shardargs[0])
        shardcount = int(shardargs[1])
        assert(shardno <= shardcount)

        shard_tests = []
        test_no = 0
        test_list = open(caselist_fn).readlines()
        for a_test in test_list:
            if (test_no % shardcount) + 1 == shardno:
                shard_tests.append(a_test)
            test_no = test_no + 1

        bs.rmtree(caselist_fn)
        caselist_fh = open(caselist_fn, "w")
        for a_test in shard_tests:
            caselist_fh.write(a_test)
Exemple #2
0
    def build(self):
        has_vulkan = os.path.exists(self._src_dir + "/external/spirv-tools")
        if has_vulkan:
            spirvtools = self._src_dir + "/external/spirv-tools/src"
            if not os.path.islink(spirvtools):
                bs.rmtree(spirvtools)
            if not os.path.exists(spirvtools):
                os.symlink("../../../spirvtools", spirvtools)
            glslang = self._src_dir + "/external/glslang/src"
            if not os.path.islink(glslang):
                bs.rmtree(glslang)
            if not os.path.exists(glslang):
                os.symlink("../../../glslang", glslang)
            spirvheaders_dir = self._src_dir + "/external/spirv-headers"
            if not os.path.exists(spirvheaders_dir):
                os.makedirs(spirvheaders_dir)
            spirvheaders = spirvheaders_dir + "/src"
            if not os.path.islink(spirvheaders):
                bs.rmtree(spirvheaders)
            if not os.path.exists(spirvheaders):
                os.symlink("../../../spirvheaders", spirvheaders)

            # change spirv-tools and glslang to use the commits specified
            # in the vulkancts sources
            sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
            sys.path = [
                gooddir for gooddir in sys.path if "deqp" not in gooddir
            ]
            sys.path.append(self._src_dir + "/external/")
            fetch_sources = importlib.import_module("fetch_sources", ".")
            for package in fetch_sources.PACKAGES:
                try:
                    if not isinstance(package, fetch_sources.GitRepo):
                        continue
                except:
                    continue
                repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
                print "Cleaning: " + repo_path + " : " + package.revision
                savedir = os.getcwd()
                os.chdir(repo_path)
                bs.run_batch_command(["git", "clean", "-xfd"])
                bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
                os.chdir(savedir)
                print "Checking out: " + repo_path + " : " + package.revision
                repo = git.Repo(repo_path)
                repo.git.checkout(package.revision, force=True)

        bs.CMakeBuilder.build(self)
        dest = self._pm.build_root() + "/opt/deqp/"
        if not os.path.exists(dest):
            os.makedirs(dest)
        bs.run_batch_command([
            "rsync", "-rlptD",
            self._pm.project_source_dir() + "/build_" + self._o.arch +
            "/modules", dest
        ])
        bs.Export().export()
print "mesa revisions under bisection:"
found = False
for commit in mesa_repo.iter_commits(max_count=5000):
    mesa_commits.append(commit)
    print commit.hexsha
    if good_revisions["mesa"] in commit.hexsha:
        found = True
        break
assert (found)

# retest build, in case expected failures has been updated
# copy build root to bisect directory
bisect_dir = results_dir + "/bisect/" + hash_dir
cmd = ["rsync", "-rlptD", "/".join(dirnames[:-1]) + "/", bisect_dir]
bs.run_batch_command(cmd)
bs.rmtree(bisect_dir + "/test")
bs.rmtree(bisect_dir + "/piglit-test")
bs.rmtree(bisect_dir + "/deqp-test")
bs.rmtree(bisect_dir + "/cts-test")

j = bs.Jenkins(_revspec, bisect_dir)
o = bs.Options(["bisect_all.py"])
o.result_path = bisect_dir
o.retest_path = args.result_path
depGraph = bs.DependencyGraph(["piglit-gpu-all"], o)

print "Retesting mesa to: " + bisect_dir
j.build_all(depGraph, print_summary=False)

# make sure there is enough time for the test files to sync to nfs
time.sleep(40)
Exemple #4
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self._src_dir)
        try:
            bs.run_batch_command([
                "patch", "-p1", "CMakeLists.txt",
                self._pm.project_build_dir("vulkancts") + "/0001-Fix-PNG.patch"
            ])
        except:
            print "WARN: failed to apply PNG patch"
        try:
            bs.run_batch_command([
                "patch", "-p1",
                "external/vulkancts/modules/vulkan/vktTestPackage.cpp",
                self._pm.project_build_dir("vulkancts") +
                "/0002-Attempt-to-load-prebuilt-spirv-from-cache.patch"
            ])
        except:
            print "WARN: failed to apply prebuilt patch"
        os.chdir(save_dir)
        spirvtools = self._src_dir + "/external/spirv-tools/src"
        if not os.path.islink(spirvtools):
            bs.rmtree(spirvtools)
        if not os.path.exists(spirvtools):
            os.symlink("../../../spirvtools", spirvtools)
        spirvheaders_dir = self._src_dir + "/external/spirv-headers"
        if not os.path.exists(spirvheaders_dir):
            os.makedirs(spirvheaders_dir)
        spirvheaders = spirvheaders_dir + "/src"
        if not os.path.islink(spirvheaders):
            bs.rmtree(spirvheaders)
        if not os.path.exists(spirvheaders):
            os.symlink("../../../spirvheaders", spirvheaders)
        glslang = self._src_dir + "/external/glslang/src"
        if not os.path.islink(glslang):
            bs.rmtree(glslang)
        if not os.path.exists(glslang):
            os.symlink("../../../glslang", glslang)

        # change spirv-tools and glslang to use the commits specified
        # in the vulkancts sources
        sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
        sys.path = [
            gooddir for gooddir in sys.path if "vulkancts" not in gooddir
        ]
        sys.path.append(self._src_dir + "/external/")
        fetch_sources = importlib.import_module("fetch_sources", ".")
        for package in fetch_sources.PACKAGES:
            if not isinstance(package, fetch_sources.GitRepo):
                continue
            repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
            print "Cleaning: " + repo_path + " : " + package.revision
            savedir = os.getcwd()
            os.chdir(repo_path)
            bs.run_batch_command(["git", "clean", "-xfd"])
            bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
            os.chdir(savedir)
            print "Checking out: " + repo_path + " : " + package.revision
            repo = git.Repo(repo_path)
            repo.git.checkout(package.revision, force=True)

        btype = "Release"
        # Vulkan cts is twice as slow for RelDeb builds, which impacts
        # the CI throughput.  For this reason, we unconditionally
        # optimize the build.
        # if self._options.config == "debug":
        #    btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = [
            "cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
            "-DCMAKE_C_COMPILER_LAUNCHER=ccache",
            "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache", "-DCMAKE_C_FLAGS=" + flags,
            "-DCMAKE_CXX_FLAGS=" + flags, "-DCMAKE_C_COMPILER=clang",
            "-DCMAKE_CXX_COMPILER=clang++",
            "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."
        ]
        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja", "vk-build-programs"])
        save_dir = os.getcwd()
        os.chdir("external/vulkancts/modules/vulkan")
        out_dir = os.path.join(self._src_dir, "external", "vulkancts", "data",
                               "vulkan", "prebuilt")
        print "Pre-building spir-v binaries: vk-build-programs -d " + out_dir
        bs.run_batch_command(["./vk-build-programs", "-d", out_dir],
                             quiet=True,
                             streamedOutput=False)
        os.chdir(save_dir)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/opt/deqp/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)

        bs.run_batch_command([
            "rsync", "-rlptD", self._build_dir + "/external/vulkancts/modules",
            bin_dir
        ])

        bs.Export().export()
Exemple #5
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self._src_dir)
        try:
            bs.run_batch_command(["patch", "-p1", "CMakeLists.txt",
                                  self._pm.project_build_dir("vulkancts") + "/0001-Fix-PNG.patch"])
        except:
            print "WARN: failed to apply PNG patch"
        try:
            bs.run_batch_command(["patch", "-p1", "external/vulkancts/modules/vulkan/vktTestPackage.cpp",
                                  self._pm.project_build_dir("vulkancts") + "/0002-Attempt-to-load-prebuilt-spirv-from-cache.patch"])
        except:
            print "WARN: failed to apply prebuilt patch"
        os.chdir(save_dir)
        spirvtools = self._src_dir + "/external/spirv-tools/src"
        if not os.path.islink(spirvtools):
            bs.rmtree(spirvtools)
        if not os.path.exists(spirvtools):
            os.symlink("../../../spirvtools", spirvtools)
        glslang = self._src_dir + "/external/glslang/src"
        if not os.path.islink(glslang):
            bs.rmtree(glslang)
        if not os.path.exists(glslang):
            os.symlink("../../../glslang", glslang)

        # change spirv-tools and glslang to use the commits specified
        # in the vulkancts sources
        sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
        sys.path = [gooddir for gooddir in sys.path if "vulkancts" not in gooddir]
        sys.path.append(self._src_dir + "/external/")
        fetch_sources = importlib.import_module("fetch_sources", ".")
        for package in fetch_sources.PACKAGES:
            if not isinstance(package, fetch_sources.GitRepo):
                continue
            repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
            print "Cleaning: " + repo_path + " : " + package.revision
            savedir = os.getcwd()
            os.chdir(repo_path)
            bs.run_batch_command(["git", "clean", "-xfd"])
            bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
            os.chdir(savedir)
            print "Checking out: " + repo_path + " : " + package.revision
            repo = git.Repo(repo_path)
            repo.git.checkout(package.revision, force=True)
        
        btype = "Release"
        # Vulkan cts is twice as slow for RelDeb builds, which impacts
        # the CI throughput.  For this reason, we unconditionally
        # optimize the build.
        # if self._options.config == "debug":
        #    btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = ["cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
               "-DCMAKE_C_COMPILER_LAUNCHER=ccache",
               "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache",
               "-DCMAKE_C_FLAGS=" + flags, "-DCMAKE_CXX_FLAGS=" + flags,
               "-DCMAKE_C_COMPILER=clang-3.7", "-DCMAKE_CXX_COMPILER=clang++-3.7",
               "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."]
        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja", "vk-build-programs"])
        save_dir = os.getcwd()
        os.chdir("external/vulkancts/modules/vulkan")
        out_dir = os.path.join(self._src_dir, "external", "vulkancts", "data", "vulkan", "prebuilt")
        print "Pre-building spir-v binaries: vk-build-programs -d " + out_dir
        bs.run_batch_command(["./vk-build-programs", "-d", out_dir],
                             quiet=True,
                             streamedOutput=False)
        os.chdir(save_dir)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/opt/deqp/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)

        bs.run_batch_command(["rsync", "-rlptD",
                              self._build_dir + "/external/vulkancts/modules",
                              bin_dir])

        bs.Export().export()
Exemple #6
0
def main():

    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description = "builds a component on jenkins"
    parser = argparse.ArgumentParser(description=description,
                                     parents=[o._parser],
                                     conflict_handler="resolve")

    parser.add_argument('--branch', type=str, default="mesa_master",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")

    parser.add_argument('--revision',
                        type=str,
                        default="",
                        help="specific set of revisions to build.")

    parser.add_argument('--test', type=str, default=None,
                        help="Name of test to execute.  Arch/hardware suffix "\
                        "will override those options")

    args = parser.parse_args()
    branch = args.branch
    revision = args.revision
    test = args.test

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["branch"]
    del vdict["revision"]
    del vdict["test"]

    # override hardware/arch with suffix if available
    if not test:
        print "ERROR: --test argument required"
        sys.exit(-1)

    test_suffix = test.split(".")[-1]
    if test_suffix[-3:] in ["m32", "m64"]:
        vdict["arch"] = test_suffix[-3:]
        vdict["hardware"] = test_suffix[:-3]
    else:
        if vdict["hardware"] == "builder":
            # can't run tests on a builder
            vdict["hardware"] = "bdwgt2"
        # set the suffix in the way that piglit-test expects, eg "ilkm32"
        test = test + "." + vdict["hardware"] + vdict["arch"]

    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    # check out the branch, refined by any manually-specfied revisions
    bspec = bs.BuildSpecification()
    bspec.checkout(branch)
    if (revision):
        revspec = bs.RevisionSpecification.from_cmd_line_param(
            revision.split())
        revspec.checkout()

    revspec = bs.RevisionSpecification()
    print "Building revision: " + revspec.to_cmd_line_param()

    # create a result_path that is unique for this set of builds
    spec_xml = bs.ProjectMap().build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([
        results_dir, branch,
        revspec.to_cmd_line_param().replace(" ", "_"), "single_test"
    ])
    o.result_path = result_path

    # allow re-execution of tests (if different test was specified)
    bs.rmtree(result_path + "/test")

    depGraph = bs.DependencyGraph("piglit-test", o)
    bi = bs.ProjectInvoke(project="piglit-test", options=o)

    # remove the test build, because we want to build it directly
    depGraph.build_complete(bi)
    bi.set_info("status", "single-test-rebuild")

    jen = bs.Jenkins(result_path=result_path, revspec=revspec)
    jen.build_all(depGraph)
    jen.build(bi, extra_arg="--piglit_test=" + test)
    jen.wait_for_build()
    time.sleep(10)

    pm = bs.ProjectMap()
    out_test_dir = pm.output_dir()
    if os.path.exists(out_test_dir):
        bs.rmtree(out_test_dir)
    os.makedirs(out_test_dir)
    collate_tests(result_path, out_test_dir)
Exemple #7
0
 def clean(self):
     pm = bs.ProjectMap()
     bs.git_clean(pm.project_source_dir("mesa"))
     bs.rmtree(self._build_dir)
Exemple #8
0
 def clean(self):
     bs.run_batch_command(["git", "clean", "-xfd"])
     bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
     bs.rmtree("repos")
Exemple #9
0
_revspec.checkout()
_revspec = bs.RevisionSpecification()

pm = bs.ProjectMap()
spec_xml = pm.build_spec()
results_dir = spec_xml.find("build_master").attrib["results_dir"]
hashstr = _revspec.to_cmd_line_param().replace(" ", "_")
bisect_dir = results_dir + "/update/" + hashstr
if os.path.exists(bisect_dir):
    print "Removing existing retest."
    mvdir = os.path.normpath(bisect_dir + "/../" + datetime.datetime.now().isoformat())
    os.rename(bisect_dir, mvdir)
    
cmd = ["rsync", "-rlptD", "/".join(dirnames[:-1]) +"/", bisect_dir]
bs.run_batch_command(cmd)
bs.rmtree(bisect_dir + "/test")
bs.rmtree(bisect_dir + "/piglit-test")
bs.rmtree(bisect_dir + "/deqp-test")
bs.rmtree(bisect_dir + "/cts-test")
bs.rmtree(bisect_dir + "/crucible-test")

j=bs.Jenkins(_revspec, bisect_dir)
o = bs.Options(["bisect_all.py"])
o.result_path = bisect_dir
o.retest_path = args.result_path
depGraph = bs.DependencyGraph(["piglit-gpu-all"], o)

print "Retesting mesa to: " + bisect_dir
try:
    j.build_all(depGraph, print_summary=False)
except bs.BuildFailure:
Exemple #10
0
#  **********************************************************************/


import sys
import os
import time
import stat
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), ".."))
import build_support as bs


# from http://stackoverflow.com/questions/6879364/print-file-age-in-seconds-using-python
def file_age_in_seconds(pathname):
    return time.time() - os.stat(pathname)[stat.ST_MTIME]

def file_age_in_days(pathname):
    return file_age_in_seconds(pathname) / (60*60*24)

result_path = "/mnt/jenkins/results/"

for a_dir in os.listdir(result_path):
    if a_dir == "traceValidator":
        continue
    sub_dir = result_path + a_dir
    for a_build_dir in os.listdir(sub_dir):
        build_dir = sub_dir + "/" + a_build_dir
        if os.path.islink(build_dir):
            continue
        if file_age_in_days(build_dir) > 20:
            bs.rmtree(build_dir)
Exemple #11
0
 def clean(self):
     bs.run_batch_command(["git", "clean", "-xfd"])
     bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
     bs.rmtree("repos")
Exemple #12
0
def main():
    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description = "builds a component on jenkins"
    parser = argparse.ArgumentParser(description=description,
                                     parents=[o._parser],
                                     conflict_handler="resolve")
    parser.add_argument('--project', dest='project', type=str, default="",
                        help='Project to build. Default project is specified '\
                        'for the branch in build_specification.xml')

    parser.add_argument('--branch', type=str, default="mesa_master",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")

    parser.add_argument('--revision',
                        type=str,
                        default="",
                        help="specific set of revisions to build.")

    parser.add_argument('--rebuild',
                        type=str,
                        default="false",
                        choices=['true', 'false'],
                        help="specific set of revisions to build."
                        "(default: %(default)s)")
    parser.add_argument("--tar",
                        help="generate tar for email notification",
                        action="store_true")
    parser.add_argument('--results_subdir',
                        type=str,
                        default="",
                        help="Subdirectory under results_dir to place results."
                        " Use this to prevent conflicts when running"
                        "multiple concurrent tests on the same branch.")

    args = parser.parse_args()
    projects = []
    if args.project:
        projects = args.project.split(",")
    branch = args.branch
    revision = args.revision
    rebuild = args.rebuild
    results_subdir = args.results_subdir or branch

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    del vdict["revision"]
    del vdict["rebuild"]
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    bspec = bs.BuildSpecification()

    pm = bs.ProjectMap()
    bs.rmtree(pm.source_root() + "/test_summary.txt")
    bs.rmtree(pm.source_root() + "results/test/results.tgz")

    # start with the specified branch, then layer any revision spec on
    # top of it
    bspec.checkout(branch)
    revspec = None
    if (revision):
        revspec = bs.RevisionSpecification.from_cmd_line_param(
            revision.split())
        revspec.checkout()

    revspec = bs.RevisionSpecification()
    print "Building revision: " + revspec.to_cmd_line_param()

    hashstr = revspec.to_cmd_line_param().replace(" ", "_")

    # create a result_path that is unique for this set of builds
    spec_xml = pm.build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, results_subdir, hashstr, o.type])
    o.result_path = result_path

    if rebuild == "true" and os.path.exists(result_path):
        print "Removing existing results."
        mvdir = os.path.normpath(result_path + "/../" +
                                 datetime.datetime.now().isoformat())
        os.rename(result_path, mvdir)

    if not projects:
        branchspec = bspec.branch_specification(branch)
        projects = [branchspec.project]

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    global jen

    jen = bs.Jenkins(result_path=result_path, revspec=revspec)

    depGraph = bs.DependencyGraph(projects, o)

    out_test_dir = pm.output_dir()
    if os.path.exists(out_test_dir):
        bs.rmtree(out_test_dir)
    os.makedirs(out_test_dir)

    # to collate all logs in the scheduler
    out_log_dir = pm.output_dir()
    if os.path.exists(out_log_dir):
        bs.rmtree(out_log_dir)
    os.makedirs(out_log_dir)

    # Add a revisions.xml file
    if not os.path.exists(result_path):
        os.makedirs(result_path)
    revspec.to_elementtree().write(os.path.join(result_path, 'revisions.xml'))

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    try:
        jen.build_all(depGraph, branch=branch)
    finally:
        collate_tests(result_path, out_test_dir, make_tar=args.tar)
print "Piglit revisions under bisection:"
found = False
for commit in piglit_repo.iter_commits(max_count=1000):
    piglit_commits.append(commit)
    print commit.hexsha
    if good_revisions["piglit"] in commit.hexsha:
        found = True
        break
assert(found)

# retest build, in case expected failures has been updated
# copy build root to bisect directory
bisect_dir = results_dir + "/bisect/" + hash_dir
cmd = ["rsync", "-rlptD", "/".join(dirnames[:-1]) +"/", bisect_dir]
bs.run_batch_command(cmd)
bs.rmtree(bisect_dir + "/test")
bs.rmtree(bisect_dir + "/piglit-test")
bs.rmtree(bisect_dir + "/deqp-test")
bs.rmtree(bisect_dir + "/cts-test")

j=bs.Jenkins(_revspec, bisect_dir)
o = bs.Options(["bisect_all.py"])
o.result_path = bisect_dir
o.retest_path = args.result_path
depGraph = bs.DependencyGraph(["piglit-gpu-all"], o)
print "Retesting piglit to: " + bisect_dir
j.build_all(depGraph, print_summary=False)

# make sure there is enough time for the test files to sync to nfs
time.sleep(40)
new_failures = bs.TestLister(bisect_dir + "/test/")
def main():

    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description="builds a component on jenkins"
    parser= argparse.ArgumentParser(description=description, 
                                    parents=[o._parser], 
                                    conflict_handler="resolve")

    parser.add_argument('--branch', type=str, default="mesa_master",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")

    parser.add_argument('--revision', type=str, default="",
                        help="specific set of revisions to build.")

    parser.add_argument('--test', type=str, default=None,
                        help="Name of test to execute.  Arch/hardware suffix "\
                        "will override those options")

    args = parser.parse_args()
    branch = args.branch
    revision = args.revision
    test = args.test

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["branch"]
    del vdict["revision"]
    del vdict["test"]

    # override hardware/arch with suffix if available
    if not test:
        print "ERROR: --test argument required"
        sys.exit(-1)
        
    test_suffix = test.split(".")[-1]
    if test_suffix[-3:] in ["m32", "m64"]:
        vdict["arch"] = test_suffix[-3:]
        vdict["hardware"] = test_suffix[:-3]
    else:
        if vdict["hardware"] == "builder":
            # can't run tests on a builder
            vdict["hardware"] = "bdwgt2"
        # set the suffix in the way that piglit-test expects, eg "ilkm32"
        test = test + "." + vdict["hardware"] + vdict["arch"]
        
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_string().split()

    # check out the branch, refined by any manually-specfied revisions
    bspec = bs.BuildSpecification()
    bspec.checkout(branch)
    if (revision):
        revspec = bs.RevisionSpecification(from_cmd_line=revision.split())
        revspec.checkout()

    revspec = bs.RevisionSpecification()
    print "Building revision: " + revspec.to_cmd_line_param()

    # create a result_path that is unique for this set of builds
    spec_xml = bs.ProjectMap().build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, branch,
                            revspec.to_cmd_line_param().replace(" ", "_"), "single_test"])
    o.result_path = result_path

    # allow re-execution of tests (if different test was specified)
    bs.rmtree(result_path + "/test")

    depGraph = bs.DependencyGraph("piglit-test", o)
    bi = bs.ProjectInvoke(project="piglit-test", 
                          options=o)

    # remove the test build, because we want to build it directly
    depGraph.build_complete(bi)
    bi.set_info("status", "single-test-rebuild")

    jen = bs.Jenkins(result_path=result_path,
                     revspec=revspec)
    jen.build_all(depGraph)
    jen.build(bi, extra_arg="--piglit_test=" + test)
    jen.wait_for_build()
    time.sleep(10)

    pm = bs.ProjectMap()
    out_test_dir = pm.output_dir()
    if os.path.exists(out_test_dir):
        bs.rmtree(out_test_dir)
    os.makedirs(out_test_dir)
    collate_tests(result_path, out_test_dir)
Exemple #15
0
def main():
    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description="builds a component on jenkins"
    parser= argparse.ArgumentParser(description=description, 
                                    parents=[o._parser], 
                                    conflict_handler="resolve")
    parser.add_argument('--project', dest='project', type=str, default="",
                        help='Project to build. Default project is specified '\
                        'for the branch in build_specification.xml')

    parser.add_argument('--revision', type=str, default="",
                        help="mesa revision to test.")

    args = parser.parse_args()
    projects = []
    if args.project:
        projects = args.project.split(",")
    revision = args.revision

    bspec = bs.BuildSpecification()
    bspec.checkout("mesa_perf")
    mesa_repo = git.Repo(bs.ProjectMap().project_source_dir("mesa"))

    if ":" in revision:
        (start_rev, end_rev) = revision.split(":")
        if not end_rev:
            # user selected the last point in a plot.  Build current master
            revision = "mesa=" + mesa_repo.git.rev_parse("HEAD", short=True)
        elif not start_rev:
            print "ERROR: user-generated perf builds cannot add older data points to the plot"
            sys.exit(-1)
        else:
            commits = []
            start_commit = mesa_repo.commit(start_rev)
            found = False
            for commit in mesa_repo.iter_commits(end_rev, max_count=8000):
                if commit == start_commit:
                    found = True
                    break
                commits.append(commit.hexsha)
            if not found:
                print "ERROR: " + start_rev + " not found in history of " + end_rev
                sys.exit(-1)
            revision = "mesa=" + commits[len(commits)/2]

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["revision"]
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    pm = bs.ProjectMap()
    bs.rmtree(pm.source_root() + "/test_summary.txt")

    # checkout the desired revision on top of recent revisions
    if not revision:
        # randomly select a commit post 11.2
        branch_commit = mesa_repo.tags["17.0-branchpoint"].commit.hexsha
        commits = []
        for commit in mesa_repo.iter_commits('origin/master', max_count=8000):
            if commit.hexsha == branch_commit:
                break
            commits.append(commit.hexsha)
        revision = "mesa=" + str(commits[int(random.random() * len(commits))])
        
    revspec = bs.RevisionSpecification(from_cmd_line=revision.split())
    revspec.checkout()

    revspec = bs.RevisionSpecification()
    hashstr = "mesa=" + revspec.revision("mesa")
    print "Building revision: " + hashstr

    # create a result_path that is unique for this set of builds
    spec_xml = pm.build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, "perf", hashstr])
    o.result_path = result_path

    if not projects:
        projects = ["perf-all"]

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    global jen

    jen = bs.Jenkins(result_path=result_path,
                     revspec=revspec)

    depGraph = bs.DependencyGraph(projects, o)
    for i in depGraph.all_builds():
        if i.project != "mesa-perf":
            i.set_info("status", "rebuild")

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    try:
        jen.build_all(depGraph, branch="mesa_master")
    except Exception as e:
        print "ERROR: encountered failure: " + str(e)
        raise
Exemple #16
0
_revspec = bs.RevisionSpecification()

pm = bs.ProjectMap()
spec_xml = pm.build_spec()
results_dir = spec_xml.find("build_master").attrib["results_dir"]
hashstr = _revspec.to_cmd_line_param().replace(" ", "_")
bisect_dir = results_dir + "/update/" + hashstr
if os.path.exists(bisect_dir):
    print "Removing existing retest."
    mvdir = os.path.normpath(bisect_dir + "/../" +
                             datetime.datetime.now().isoformat())
    os.rename(bisect_dir, mvdir)

cmd = ["rsync", "-rlptD", "/".join(dirnames[:-1]) + "/", bisect_dir]
bs.run_batch_command(cmd)
bs.rmtree(bisect_dir + "/test")
bs.rmtree(bisect_dir + "/piglit-test")
bs.rmtree(bisect_dir + "/deqp-test")
bs.rmtree(bisect_dir + "/cts-test")
bs.rmtree(bisect_dir + "/crucible-test")

j = bs.Jenkins(_revspec, bisect_dir)
o = bs.Options(["bisect_all.py"])
o.result_path = bisect_dir
o.retest_path = args.result_path
depGraph = bs.DependencyGraph(["piglit-gpu-all"], o)

print "Retesting mesa to: " + bisect_dir
try:
    j.build_all(depGraph, print_summary=False)
except bs.BuildFailure:
Exemple #17
0
 def clean(self):
     pm = bs.ProjectMap()
     bs.git_clean(pm.project_source_dir("mesa"))
     bs.rmtree(self._build_dir)
Exemple #18
0
import stat
sys.path.append(
    os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "..", "repos",
                 "mesa_ci"))
import build_support as bs


# from http://stackoverflow.com/questions/6879364/print-file-age-in-seconds-using-python
def file_age_in_seconds(pathname):
    return time.time() - os.stat(pathname)[stat.ST_MTIME]


def file_age_in_days(pathname):
    return file_age_in_seconds(pathname) / (60 * 60 * 24)


result_path = "/mnt/jenkins/results/"

for a_dir in os.listdir(result_path):
    if a_dir == "traceValidator":
        continue
    if a_dir == "perf_win":
        continue
    sub_dir = result_path + a_dir
    for a_build_dir in os.listdir(sub_dir):
        build_dir = sub_dir + "/" + a_build_dir
        if os.path.islink(build_dir):
            continue
        if file_age_in_days(build_dir) > 20:
            bs.rmtree(build_dir)
Exemple #19
0
def bisect(project, args, commits):
    if not commits:
        return
    current_build = len(commits) / 2
    rev = project + "=" + commits[current_build].hexsha
    print "Range: " + commits[0].hexsha + " - " + commits[-1].hexsha
    print "Building revision: " + rev

    # remove inadvertent whitespace, which is easy to add when
    # triggering builds on jenkins
    test_name = args.test_name.strip()

    hw_arch = test_name.split(".")[-1]
    o = bs.Options(args=["ignore"])
    o.type = "developer"
    o.config = "debug"
    o.arch = hw_arch[-3:]
    o.hardware = hw_arch[:-3]
    o.action = ["build", "test"]

    revspec = bs.RevisionSpecification(from_cmd_line=[rev])
    revspec.checkout()
    revspec = bs.RevisionSpecification()
    hashstr = revspec.to_cmd_line_param().replace(" ", "_")
    spec_xml = bs.ProjectMap().build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, "mesa_master", hashstr, "bisect"])
    o.result_path = result_path
    bs.rmtree(result_path + "/test")

    global jen
    jen = bs.Jenkins(result_path=result_path,
                     revspec=revspec)
    
    depGraph = bs.DependencyGraph("piglit-test", o)
    # remove test build from graph, because we always want to build
    # it.
    bi = bs.ProjectInvoke(project="piglit-test", 
                          options=o)
    bi.set_info("status", "bisect-rebuild")

    depGraph.build_complete(bi)
    try:
        jen.build_all(depGraph, triggered_builds_str, "bisect")
        print "Starting: " + bi.to_short_string()
        test_name_good_chars = re.sub('[_ !:]', ".", test_name)
        jen.build(bi, branch="mesa_master", extra_arg="--piglit_test=" + test_name_good_chars)
        jen.wait_for_build()
    except bs.BuildFailure:
        print "BUILD FAILED - exception: " + rev
        if current_build + 1 == len(commits):
            print "FIRST DETECTED FAILURE: " + rev
            return
        return bisect(project, args, commits[current_build+1:])

    test_result = "/".join([result_path, "test", "piglit-test_" + 
                            o.hardware + "_" + o.arch + ".xml"])
    iteration = 0
    while not os.path.exists(test_result):
        if iteration < 40:
            time.sleep(1)
            iteration = iteration + 1
            continue
        print "BUILD FAILED - no test results: " + rev + " : " + test_result
        if current_build + 1 == len(commits):
            print "FIRST DETECTED FAILURE: " + rev
            return
        return bisect(project, args, commits[current_build + 1:])

    result = ET.parse(test_result)
    for testcase in result.findall("./testsuite/testcase"):
        testname = testcase.attrib["classname"] + "." + testcase.attrib["name"]
        if testname != test_name:
            continue
        if testcase.findall("skipped"):
            print "ERROR: the target test was skipped"
        if testcase.findall("failure") or testcase.findall("error"):
            print "TEST FAILED: " + rev
            if current_build + 1 == len(commits):
                print "FIRST DETECTED FAILURE: " + rev
                return
            return bisect(project, args, commits[current_build + 1:])

        print "TEST PASSED: " + rev
        if current_build == 0:
            print "LAST DETECTED SUCCESS: " + rev
            return
        return bisect(project, args, commits[:current_build])

    print "ERROR -- TEST NOT FOUND: " + test_name
    if current_build == 0:
        print "LAST DETECTED SUCCESS: " + rev
        return
    return bisect(project, args, commits[:current_build])
Exemple #20
0
def main():
    parser = argparse.ArgumentParser(description='Build projects locally.')

    # TODO: provide a pull action to update the repos
    parser.add_argument('--action', type=str, default=["build"],
                        choices=CsvChoice('fetch', 'build', 'clean', 'test'),
                        action=CsvAction,
                        help="Action to recurse with. 'build', 'clean' "\
                        "or 'test'. (default: %(default)s)")

    parser.add_argument('--project',
                        dest='project',
                        type=str,
                        default="mesa",
                        help='project to build. (default: %(default)s)')
    parser.add_argument('--arch',
                        dest='arch',
                        type=str,
                        default='m64',
                        choices=['m64', 'm32'],
                        help='arch to build. (default: %(default)s)')
    parser.add_argument('--config',
                        type=str,
                        default="release",
                        choices=['release', 'debug'],
                        help="Release or Debug build. (default: %(default)s)")

    parser.add_argument('--type', type=str, default="developer",
                        choices=['developer', 'percheckin',
                                 'daily', 'release'],
                        help="category of tests to run. "\
                        "(default: %(default)s)")

    parser.add_argument('--branch', type=str, default="none",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")
    parser.add_argument(
        '--env',
        type=str,
        default="",
        help="If specified, overrides environment variable settings"
        "EG: 'LIBGL_DEBUG=1 INTEL_DEBUG=perf'")
    parser.add_argument('--hardware',
                        type=str,
                        default='builder',
                        help="The hardware to be targeted for test "
                        "('builder', 'snbgt1', 'ivb', 'hsw', 'bdw'). "
                        "(default: %(default)s)")

    args = parser.parse_args()
    project = args.project

    if "fetch" in args.action:
        # fetch not supported by build.py scripts, which will parse argv
        bs.RepoSet().fetch()
    branch = args.branch
    if (branch != "none"):
        bs.BuildSpecification().checkout(branch)

    # some build_local params are not handled by the Options, which is
    # used by other modules
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    if "fetch" in vdict["action"]:
        vdict["action"].remove("fetch")
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    if "clean" in args.action:
        bs.rmtree(bs.ProjectMap().build_root())

    graph = bs.DependencyGraph(project, o)
    ready = graph.ready_builds()
    pm = bs.ProjectMap()
    while ready:
        for bi in ready:
            graph.build_complete(bi)
            proj_build_dir = pm.project_build_dir(bi.project)
            script = proj_build_dir + "/build.py"
            if os.path.exists(script):
                bs.run_batch_command([sys.executable, script] + o.to_list())
        ready = graph.ready_builds()
Exemple #21
0
def main():
    signal.signal(signal.SIGINT, abort_builds)
    signal.signal(signal.SIGABRT, abort_builds)
    signal.signal(signal.SIGTERM, abort_builds)

    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description="builds a component on jenkins"
    parser= argparse.ArgumentParser(description=description, 
                                    parents=[o._parser], 
                                    conflict_handler="resolve")
    parser.add_argument('--project', dest='project', type=str, default="",
                        help='Project to build. Default project is specified '\
                        'for the branch in build_specification.xml')

    parser.add_argument('--branch', type=str, default="mesa_master",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")

    parser.add_argument('--revision', type=str, default="",
                        help="specific set of revisions to build.")

    parser.add_argument('--rebuild', type=str, default="false",
                        choices=['true', 'false'], 
                        help="specific set of revisions to build."
                        "(default: %(default)s)")


    args = parser.parse_args()
    projects = []
    if args.project:
        projects = args.project.split(",")
    branch = args.branch
    revision = args.revision
    rebuild = args.rebuild

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    del vdict["revision"]
    del vdict["rebuild"]
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_string().split()

    bspec = bs.BuildSpecification()

    # start with the specified branch, then layer any revision spec on
    # top of it
    bspec.checkout(branch)
    revspec = None
    if (revision):
        revspec = bs.RevisionSpecification(from_cmd_line=revision.split())
        revspec.checkout()

    revspec = bs.RevisionSpecification()
    print "Building revision: " + revspec.to_cmd_line_param()

    hashstr = revspec.to_cmd_line_param().replace(" ", "_")

    # create a result_path that is unique for this set of builds
    spec_xml = bs.ProjectMap().build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, branch, hashstr, o.type])
    o.result_path = result_path

    if rebuild == "true" and os.path.exists(result_path):
        print "Removing existing results."
        bs.rmtree(result_path)

    pm = bs.ProjectMap()
    if not projects:
        branchspec = bspec.branch_specification(branch)
        projects = [branchspec.project]

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    global jen

    jen = bs.Jenkins(result_path=result_path,
                     revspec=revspec)


    depGraph = bs.DependencyGraph(projects, o)

    ready_for_build = depGraph.ready_builds()
    assert(ready_for_build)

    completed_builds = []
    failure_builds = []

    success = True

    out_test_dir = pm.output_dir()
    if os.path.exists(out_test_dir):
        bs.rmtree(out_test_dir)
    os.makedirs(out_test_dir)

    # to collate all logs in the scheduler
    out_log_dir = pm.output_dir()
    if os.path.exists(out_log_dir):
        bs.rmtree(out_log_dir)
    os.makedirs(out_log_dir)

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    global triggered_builds_str
    while success:
        jen.print_builds()
        builds_in_round = 0
        for an_invoke in ready_for_build:
            status = an_invoke.get_info("status", block=False)

            if status == "success" or status == "unstable":
                # don't rebuild if we have a good build, or just
                # because some tests failure
                completed_builds.append(an_invoke)
                depGraph.build_complete(an_invoke)
                builds_in_round += 1
                print "Already built: " + an_invoke.to_short_string()
                continue

            proj_build_dir = pm.project_build_dir(an_invoke.project)
            script = proj_build_dir + "/build.py"
            if not os.path.exists(script):
                depGraph.build_complete(an_invoke)
                continue

            try:
                print "Starting: " + an_invoke.to_short_string()
                jen.build(an_invoke, branch=branch)
                an_invoke.set_info("trigger_time", time.time())
                triggered_builds_str.append(str(an_invoke))
            except(bs.BuildInProgress) as e:
                print e
                success = False
                break

        if not success:
            break

        finished = None
        try:
            finished = jen.wait_for_build()
            if finished:
                builds_in_round += 1
        except(bs.BuildFailure) as failure:
            failure.invoke.set_info("status", "failure")
            url = failure.url
            job_name = url.split("/")[-3]
            build_number = url.split("/")[-2]
            build_directory = "/var/lib/jenkins/jobs/" \
                              "{0}/builds/{1}".format(job_name.lower(), 
                                                      build_number)
            if os.path.exists(build_directory):
                log_file = os.path.join(build_directory, "log")
                shutil.copy(log_file, out_log_dir)

            # abort the builds, but let daily/release builds continue
            # as far as possible
            if o.type == "percheckin" or o.type == "developer":
                time.sleep(6)  # quiet period
                for an_invoke_str in triggered_builds_str:
                    print "Aborting: " + an_invoke_str
                    pi = bs.ProjectInvoke(from_string=an_invoke_str)
                    jen.abort(pi)
                    failure_builds.append(pi)
                #CleanServer(o).clean()
                bs.write_summary(pm.source_root(), 
                                 failure_builds + completed_builds, 
                                 jen, 
                                 failure=True)
                raise

            # else for release/daily builds, continue waiting for the
            # rest of the builds.
            print "Build failure: " + failure.url
            print "Build failure: " + str(failure.invoke)
            failure_builds.append(failure.invoke)
            builds_in_round += 1

        if finished:
            finished.invoke.set_info("status", finished.status)
            print "Build finished: " + finished.url
            print "Build finished: " + finished.invoke.to_short_string()

            completed_builds.append(finished.invoke)
            depGraph.build_complete(finished.invoke)

        elif not builds_in_round:
            # nothing was built, and there was no failure => the last
            # project is built

            #stub_test_results(out_test_dir, o.hardware)
            # CleanServer(o).clean()
            bs.write_summary(pm.source_root(), 
                             failure_builds + completed_builds, 
                             jen)
            if failure_builds:
                raise bs.BuildFailure(failure_builds[0], "")

            collate_tests(result_path, out_test_dir)

            return
            
        ready_for_build = depGraph.ready_builds()

        # filter out builds that have already been triggered
        ready_for_build = [j for j in ready_for_build 
                           if str(j) not in triggered_builds_str]
Exemple #22
0
def main():
    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description="builds a component on jenkins"
    parser= argparse.ArgumentParser(description=description, 
                                    parents=[o._parser], 
                                    conflict_handler="resolve")
    parser.add_argument('--project', dest='project', type=str, default="",
                        help='Project to build. Default project is specified '\
                        'for the branch in build_specification.xml')

    parser.add_argument('--revision', type=str, default="",
                        help="mesa revision to test.")

    args = parser.parse_args()
    projects = []
    if args.project:
        projects = args.project.split(",")
    revision = args.revision

    bspec = bs.BuildSpecification()
    bspec.checkout("mesa_perf")
    mesa_repo = git.Repo(bs.ProjectMap().project_source_dir("mesa"))

    if ":" in revision:
        (start_rev, end_rev) = revision.split(":")
        if not end_rev:
            # user selected the last point in a plot.  Build current master
            revision = "mesa=" + mesa_repo.git.rev_parse("HEAD", short=True)
        elif not start_rev:
            print "ERROR: user-generated perf builds cannot add older data points to the plot"
            sys.exit(-1)
        else:
            commits = []
            start_commit = mesa_repo.commit(start_rev)
            found = False
            for commit in mesa_repo.iter_commits(end_rev, max_count=8000):
                if commit == start_commit:
                    found = True
                    break
                commits.append(commit.hexsha)
            if not found:
                print "ERROR: " + start_rev + " not found in history of " + end_rev
                sys.exit(-1)
            revision = "mesa=" + commits[len(commits)/2]

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["revision"]
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    pm = bs.ProjectMap()
    bs.rmtree(pm.source_root() + "/test_summary.txt")

    # checkout the desired revision on top of recent revisions
    if not revision:
        # randomly select a commit post 11.2
        branch_commit = mesa_repo.tags["11.2-branchpoint"].commit.hexsha
        commits = []
        for commit in mesa_repo.iter_commits('origin/master', max_count=8000):
            if commit.hexsha == branch_commit:
                break
            commits.append(commit.hexsha)
        revision = "mesa=" + str(commits[int(random.random() * len(commits))])
        
    revspec = bs.RevisionSpecification(from_cmd_line=revision.split())
    revspec.checkout()

    revspec = bs.RevisionSpecification()
    hashstr = "mesa=" + revspec.revision("mesa")
    print "Building revision: " + hashstr

    # create a result_path that is unique for this set of builds
    spec_xml = pm.build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([results_dir, "perf", hashstr])
    o.result_path = result_path

    if not projects:
        projects = ["perf-all"]

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    global jen

    jen = bs.Jenkins(result_path=result_path,
                     revspec=revspec)

    depGraph = bs.DependencyGraph(projects, o)
    for i in depGraph.all_builds():
        if i.project != "mesa-perf":
            i.set_info("status", "rebuild")

    # use a global, so signal handler can abort builds when scheduler
    # is interrupted
    try:
        jen.build_all(depGraph, branch="mesa_master")
    except Exception as e:
        print "ERROR: encountered failure: " + str(e)
        raise
Exemple #23
0
def main():
    parser = argparse.ArgumentParser(description='Build projects locally.')

    # TODO: provide a pull action to update the repos
    parser.add_argument('--action', type=str, default=["build"],
                        choices=CsvChoice('fetch', 'build', 'clean', 'test'),
                        action=CsvAction,
                        help="Action to recurse with. 'build', 'clean' "\
                        "or 'test'. (default: %(default)s)")

    parser.add_argument('--project', dest='project', type=str, default="mesa",
                        help='project to build. (default: %(default)s)')
    parser.add_argument('--arch', dest='arch', type=str, 
                        default='m64', choices=['m64', 'm32'],
                        help='arch to build. (default: %(default)s)')
    parser.add_argument('--config', type=str, default="release", 
                        choices=['release', 'debug'],
                        help="Release or Debug build. (default: %(default)s)")

    parser.add_argument('--type', type=str, default="developer",
                        choices=['developer', 'percheckin', 
                                 'daily', 'release'],
                        help="category of tests to run. "\
                        "(default: %(default)s)")

    parser.add_argument('--branch', type=str, default="none",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")
    parser.add_argument('--env', type=str, default="",
                        help="If specified, overrides environment variable settings"
                        "EG: 'LIBGL_DEBUG=1 INTEL_DEBUG=perf'")
    parser.add_argument('--hardware', type=str, default='builder',
                        help="The hardware to be targeted for test "
                        "('builder', 'snbgt1', 'ivb', 'hsw', 'bdw'). "
                        "(default: %(default)s)")

    args = parser.parse_args()
    project = args.project

    if "fetch" in args.action:
        # fetch not supported by build.py scripts, which will parse argv
        bs.RepoSet().fetch()
    branch = args.branch
    if (branch != "none"):
        bs.BuildSpecification().checkout(branch)

    # some build_local params are not handled by the Options, which is
    # used by other modules
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    if "fetch" in vdict["action"]:
        vdict["action"].remove("fetch")
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    if "clean" in args.action:
        bs.rmtree(bs.ProjectMap().build_root())

    graph = bs.DependencyGraph(project, o)
    ready = graph.ready_builds()
    pm = bs.ProjectMap()
    while ready:
        for bi in ready:
            graph.build_complete(bi)
            if bi.options.hardware != "builder":
                # we cannot hope to successfully run byt/ilk tests on
                # a developer's machine.
                print "Skipping build for specific hardware: " + str(bi)
                continue
            proj_build_dir = pm.project_build_dir(bi.project)
            script = proj_build_dir + "/build.py"
            if os.path.exists(script):
                bs.run_batch_command([sys.executable, 
                                      script] +  
                                     o.to_list())
        ready = graph.ready_builds()