Example #1
0
    def build(self):
        options = ["--enable-glx-tls", 
                   "--enable-gles1",
                   "--enable-gles2",
                   "--with-dri-drivers=i965,i915",

                   # disable video drivers:
                   # bbe6f7f865cd4316b5f885507ee0b128a20686eb
                   # caused build failure unrelated to intel mesa
                   # team.
                   "--disable-xvmc",
                   "--disable-vdpau",

                   # gallium tested with mesa-buildtest
                   "--without-gallium-drivers"]

        save_dir = os.getcwd()
        for hw in ["skl", "bdw", "bsw"]:
            bd = self._build_dir + "/" + hw
            if not os.path.exists(bd):
                os.makedirs(bd)
            os.chdir(bd)
            
            flags = " ".join(self._flags[hw])
            flags = ["CFLAGS=-O2 " + flags,
                     "CXXFLAGS=-O2 " + flags,
                     "CC=ccache gcc",
                     "CXX=ccache g++"]
            cmd = [self._src_dir + "/autogen.sh"] + flags + options
            bs.run_batch_command(cmd)
            bs.run_batch_command(["make", "-j", str(bs.cpu_count()),
                                  "install"],
                                 env={"DESTDIR" : "/tmp/build_root/" + self.opts.arch + "/" + hw} )
        os.chdir(save_dir)
        bs.Export().export_perf()
Example #2
0
    def tests(self, env=None):
        br = self.pm.build_root()
        env = {"MESA_GLES_VERSION_OVERRIDE" : "3.2",
               "LD_LIBRARY_PATH" : bs.get_libdir(),
               "LIBGL_DRIVERS_PATH" : bs.get_libgl_drivers(),
               "MESA_GL_VERSION_OVERRIDE" : "4.6",
               "MESA_GLSL_VERSION_OVERRIDE" : "460"}
        self.o.update_env(env)

        savedir = os.getcwd()
        os.chdir(self.pm.build_root() + "/bin/gl/modules")
        bs.run_batch_command(["./glcts", "--deqp-runmode=xml-caselist"],
                             env=env)
        all_tests = bs.DeqpTrie()
        # Enable GL33 tests for supporting hw
        # Note: ilk, g45, etc are all < GL30 and not supported in glcts
        if self.o.hardware in ['snb', 'ivb', 'byt']:
            all_tests.add_xml("KHR-GL33-cases.xml")
            all_tests.add_xml("GTF-GL33-cases.xml")
        else:
            all_tests.add_xml("KHR-GL46-cases.xml")
            all_tests.add_xml("GTF-GL46-cases.xml")
            all_tests.add_xml("KHR-NoContext-cases.xml")
        os.chdir(savedir)
        return all_tests
Example #3
0
    def build(self):
        options = ["--enable-glx-tls", 
                   "--enable-gles1",
                   "--enable-gles2",
                   "--with-dri-drivers=i965,i915",

                   # disable video drivers:
                   # bbe6f7f865cd4316b5f885507ee0b128a20686eb
                   # caused build failure unrelated to intel mesa
                   # team.
                   "--disable-xvmc",
                   "--disable-vdpau",

                   # gallium tested with mesa-buildtest
                   "--without-gallium-drivers"]

        save_dir = os.getcwd()
        bd = self._build_dir + "/skl"
        if not os.path.exists(bd):
            os.makedirs(bd)
        os.chdir(bd)

        flags = " ".join(self._flags)
        flags = ["CFLAGS=-O2 " + flags,
                 "CXXFLAGS=-O2 " + flags,
                 "CC=ccache gcc",
                 "CXX=ccache g++"]
        cmd = [self._src_dir + "/autogen.sh"] + flags + options
        bs.run_batch_command(cmd)
        bs.run_batch_command(["make", "-j", str(bs.cpu_count()),
                              "install"],
                             env={"DESTDIR" : "/tmp/build_root/" + self.opts.arch + "/skl"} )
        os.chdir(save_dir)
        bs.Export().export_perf()
Example #4
0
 def tests(self, env):
     # don't execute tests that are part of the other suite
     whitelist_txt = None
     cases_xml = None
     if "gles2" in self.binary:
         whitelist_txt = (self.pm.project_source_dir("deqp") +
                          "/android/cts/master/gles2-master.txt")
         cases_xml = "dEQP-GLES2-cases.xml"
     if "gles3" in self.binary:
         whitelist_txt = (self.pm.project_source_dir("deqp") +
                          "/android/cts/master/gles3-master.txt")
         cases_xml = "dEQP-GLES3-cases.xml"
     if "gles31" in self.binary:
         whitelist_txt = (self.pm.project_source_dir("deqp") +
                          "/android/cts/master/gles31-master.txt")
         cases_xml = "dEQP-GLES31-cases.xml"
     if "egl" in self.binary:
         whitelist_txt = (self.pm.project_source_dir("deqp") +
                          "/android/cts/master/egl-master.txt")
         cases_xml = "dEQP-EGL-cases.xml"
     deqp_dir = os.path.dirname(self.binary)
     os.chdir(deqp_dir)
     cmd = [self.binary, "--deqp-runmode=xml-caselist"]
     bs.run_batch_command(cmd, env=env)
     all_tests = bs.DeqpTrie()
     all_tests.add_xml(cases_xml)
     whitelist = bs.DeqpTrie()
     whitelist.add_txt(whitelist_txt)
     all_tests.filter_whitelist(whitelist)
     os.chdir(self.pm.project_build_dir())
     return all_tests
Example #5
0
def robust_clone(url, directory):
    success = False
    while not success:
        try:
            bs.run_batch_command(["/usr/local/bin/git", "clone", "--mirror", url, directory])
            success = True
        except (subprocess.CalledProcessError):
            print("Error: could not clone " + url, file=sys.stderr)
            time.sleep(10)
Example #6
0
 def build(self):
     bs.AutoBuilder.build(self)
     bin_dir = self._build_root + "/bin/"
     if not os.path.exists(bin_dir):
         os.makedirs(bin_dir)
     bs.run_batch_command(
         ["cp", "-a", "-n", self._build_dir + "/bin/crucible", bin_dir])
     bs.run_batch_command(
         ["cp", "-a", "-n", self._build_dir + "/data/", self._build_root])
     bs.Export().export()
Example #7
0
def robust_clone(url, directory):
    success = False
    while not success:
        try:
            bs.run_batch_command(["/usr/local/bin/git", "clone", "--mirror",
                                  url, directory])
            success = True
        except(subprocess.CalledProcessError):
            print("Error: could not clone " + url, file=sys.stderr)
            time.sleep(10)
Example #8
0
 def build(self):
     savedir = os.getcwd()
     pm = bs.ProjectMap()
     os.chdir(pm.project_source_dir())
     f = open(pm.project_build_dir("mesa") + "/deqp_hack.patch", "r")
     try:
         bs.run_batch_command(["patch", "-p1"], stdinput=f)
     except:
         print "WARN: failed to apply deqp patch"
     bs.AutoBuilder.build(self)
Example #9
0
def collate_tests(result_path, out_test_dir):
    src_test_dir = result_path + "/test"
    print "collecting tests from " + src_test_dir
    if not os.path.exists(src_test_dir):
        time.sleep(10)
    if not os.path.exists(src_test_dir):
        print "no test directory found: " + src_test_dir
        return

    cmd = ["cp", "-a", "-n", src_test_dir, out_test_dir]
    bs.run_batch_command(cmd)
Example #10
0
 def build(self):
     bs.AutoBuilder.build(self)
     bin_dir = self._build_root + "/bin/"
     if not os.path.exists(bin_dir):
         os.makedirs(bin_dir)
     bs.run_batch_command(["cp", "-a", "-n",
                           self._build_dir + "/bin/crucible",
                           bin_dir])
     bs.run_batch_command(["cp", "-a", "-n",
                           self._build_dir + "/data/",
                           self._build_root])
     bs.Export().export()
def collate_tests(result_path, out_test_dir):
    src_test_dir = result_path + "/test"
    print "collecting tests from " + src_test_dir
    if not os.path.exists(src_test_dir):
        time.sleep(10)
    if not os.path.exists(src_test_dir):
        print "no test directory found: " + src_test_dir
        return
        
    cmd = ["cp", "-a", "-n",
           src_test_dir,
           out_test_dir]
    bs.run_batch_command(cmd)
Example #12
0
    def tests(self, env=None):
        br = self.pm.build_root()
        env = {
            "MESA_GLES_VERSION_OVERRIDE": "3.2",
            "LD_LIBRARY_PATH": bs.get_libdir(),
            "MESA_GL_VERSION_OVERRIDE": "4.6",
            "MESA_GLSL_VERSION_OVERRIDE": "460",
            "LIBGL_DRIVERS_PATH": bs.get_libgl_drivers()
        }
        self.o.update_env(env)

        savedir = os.getcwd()
        os.chdir(self.pm.build_root() + "/bin/es/modules")
        bs.run_batch_command(["./glcts", "--deqp-runmode=xml-caselist"],
                             env=env)

        must_pass_root = br + "/bin/es/modules/gl_cts/data/mustpass/"
        must_pass_lookup = {
            "KHR-GLES2-cases.xml":
            "gles/khronos_mustpass/3.2.4.x/gles2-khr-master.txt",
            "KHR-GLES3-cases.xml":
            "gles/khronos_mustpass/3.2.4.x/gles3-khr-master.txt",
            "KHR-GLES31-cases.xml":
            "gles/khronos_mustpass/3.2.4.x/gles31-khr-master.txt",
            "KHR-GLES32-cases.xml":
            "gles/khronos_mustpass/3.2.4.x/gles32-khr-master.txt",
            "KHR-GLESEXT-cases.xml": None
        }

        suites = ["KHR-GLES2-cases.xml", "KHR-GLES3-cases.xml"]

        if self.supports_gles_31():
            suites.append("KHR-GLES31-cases.xml")

        if self.supports_gles_32():
            suites.append("KHR-GLES32-cases.xml")
            suites.append("KHR-GLESEXT-cases.xml")

        all_tests = bs.DeqpTrie()
        for a_list in suites:
            tmp_trie = bs.DeqpTrie()
            tmp_trie.add_xml(a_list)
            if must_pass_lookup[a_list]:
                tmp_whitelist = bs.DeqpTrie()
                tmp_whitelist.add_txt(must_pass_root +
                                      must_pass_lookup[a_list])
                tmp_trie.filter_whitelist(tmp_whitelist)
            all_tests.merge(tmp_trie)

        os.chdir(savedir)
        return all_tests
Example #13
0
    def build(self):
        pm = bs.ProjectMap()
        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)

        savedir = os.getcwd()
        os.chdir(self._build_dir)

        cflag = "-m32"
        cxxflag = "-m32"
        if self._options.arch == "m64":
            cflag = "-m64"
            cxxflag = "-m64"
        env = {"CC":"ccache gcc",
               "CXX":"ccache g++",
               "CFLAGS":cflag,
               "CXXFLAGS":cxxflag}
        self._options.update_env(env)
        
        bs.run_batch_command(["cmake", "-GNinja", self._src_dir] + self._extra_definitions,
                             env=env)

        bs.run_batch_command(["ninja","-j" + str(bs.cpu_count())], env=env)

        bs.run_batch_command(["mkdir", "-p", pm.build_root() + "/bin"])
        bs.run_batch_command(["cp", "-a", self._build_dir + "/cts",
                              pm.build_root() + "/bin"])

        os.chdir(savedir)

        bs.Export().export()
Example #14
0
    def tests(self, env):
        # provide a DeqpTrie with all tests
        deqp_dir = os.path.dirname(self.binary())
        os.chdir(deqp_dir)
        cmd = [
            "./" + os.path.basename(self.binary()),
            "--deqp-runmode=xml-caselist"
        ]
        bs.run_batch_command(cmd, env=env)
        trie = bs.DeqpTrie()
        trie.add_xml("dEQP-VK-cases.xml")
        os.chdir(self.pm.project_build_dir())

        whitelist_txt = self.pm.project_source_dir(
            "vulkancts") + "/external/vulkancts/mustpass/1.0.2/vk-default.txt"
        whitelist = bs.DeqpTrie()
        whitelist.add_txt(whitelist_txt)
        trie.filter_whitelist(whitelist)

        return trie
Example #15
0
    def build(self):
        glslang = self._src_dir + "/third_party/glslang"
        if not os.path.exists(glslang):
            os.symlink("../../glslang", glslang)
        gmock = self._src_dir + "/third_party/gmock-1.7.0"
        if not os.path.exists(gmock):
            os.symlink("../../gmock", gmock)
        gtest = self._src_dir + "/third_party/gtest"
        if not os.path.exists(gtest):
            os.symlink("../../gtest", gtest)

        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        btype = "Release"
        if self._options.type == "debug":
            btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = ["cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
               "-DCMAKE_C_FLAGS=" + flags, "-DCMAKE_CXX_FLAGS=" + flags,
               "-DCMAKE_C_COMPILER=clang", "-DCMAKE_CXX_COMPILER=clang++",
               "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."]
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/bin/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)
        
        bs.run_batch_command(["cp", "-a", "-n",
                              self._build_dir + "/glslc/glslc",
                              bin_dir])
        bs.Export().export()
Example #16
0
    def build(self):
        has_vulkan = os.path.exists(self._src_dir + "/external/spirv-tools")
        if has_vulkan:
            spirvtools = self._src_dir + "/external/spirv-tools/src"
            if not os.path.islink(spirvtools):
                bs.rmtree(spirvtools)
            if not os.path.exists(spirvtools):
                os.symlink("../../../spirvtools", spirvtools)
            glslang = self._src_dir + "/external/glslang/src"
            if not os.path.islink(glslang):
                bs.rmtree(glslang)
            if not os.path.exists(glslang):
                os.symlink("../../../glslang", glslang)
            spirvheaders_dir = self._src_dir + "/external/spirv-headers"
            if not os.path.exists(spirvheaders_dir):
                os.makedirs(spirvheaders_dir)
            spirvheaders = spirvheaders_dir + "/src"
            if not os.path.islink(spirvheaders):
                bs.rmtree(spirvheaders)
            if not os.path.exists(spirvheaders):
                os.symlink("../../../spirvheaders", spirvheaders)

            # change spirv-tools and glslang to use the commits specified
            # in the vulkancts sources
            sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
            sys.path = [
                gooddir for gooddir in sys.path if "deqp" not in gooddir
            ]
            sys.path.append(self._src_dir + "/external/")
            fetch_sources = importlib.import_module("fetch_sources", ".")
            for package in fetch_sources.PACKAGES:
                try:
                    if not isinstance(package, fetch_sources.GitRepo):
                        continue
                except:
                    continue
                repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
                print "Cleaning: " + repo_path + " : " + package.revision
                savedir = os.getcwd()
                os.chdir(repo_path)
                bs.run_batch_command(["git", "clean", "-xfd"])
                bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
                os.chdir(savedir)
                print "Checking out: " + repo_path + " : " + package.revision
                repo = git.Repo(repo_path)
                repo.git.checkout(package.revision, force=True)

        bs.CMakeBuilder.build(self)
        dest = self._pm.build_root() + "/opt/deqp/"
        if not os.path.exists(dest):
            os.makedirs(dest)
        bs.run_batch_command([
            "rsync", "-rlptD",
            self._pm.project_source_dir() + "/build_" + self._o.arch +
            "/modules", dest
        ])
        bs.Export().export()
Example #17
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self.src_dir)

        # scons build is broken, will occasionally fail if temporaries
        # are still around.  Use git's nuclear clean method instead of
        # the clean targets.
        bs.run_batch_command(["git", "clean", "-dfx"])

        bs.run_batch_command(["scons", "-j",
                              str(bs.cpu_count())])

        bs.run_batch_command(["git", "clean", "-dfx"])
        os.chdir(save_dir)
Example #18
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self.src_dir)

        # scons build is broken, will occasionally fail if temporaries
        # are still around.  Use git's nuclear clean method instead of
        # the clean targets.
        bs.run_batch_command(["git", "clean", "-dfx"])

        env = {}
        bs.Options().update_env(env)
        bs.run_batch_command(["scons", "-j", str(bs.cpu_count())], env=env)

        bs.run_batch_command(["git", "clean", "-dfx"])
        os.chdir(save_dir)
Example #19
0
    def build(self):
        glslang = self._src_dir + "/third_party/glslang"
        if not os.path.exists(glslang):
            os.symlink("../../glslang", glslang)
        gmock = self._src_dir + "/third_party/gmock-1.7.0"
        if not os.path.exists(gmock):
            os.symlink("../../gmock", gmock)
        gtest = self._src_dir + "/third_party/gtest"
        if not os.path.exists(gtest):
            os.symlink("../../gtest", gtest)
        spirv = self._src_dir + "/third_party/spirv-tools"
        if not os.path.exists(spirv):
            os.symlink("../../spirvtools", spirv)
        spirvheaders = self._src_dir + "/third_party/spirv-tools/external/spirv-headers"
        if not os.path.exists(spirvheaders):
            os.symlink("../../spirvheaders", spirvheaders)

        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        btype = "Release"
        if self._options.type == "debug":
            btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = [
            "cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
            "-DSHADERC_SKIP_TESTS=1", "-DCMAKE_C_FLAGS=" + flags,
            "-DCMAKE_CXX_FLAGS=" + flags,
            "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."
        ]
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/bin/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)

        bs.run_batch_command(
            ["cp", "-a", "-n", self._build_dir + "/glslc/glslc", bin_dir])
        bs.Export().export()
Example #20
0
for commit in proj_repo.iter_commits(max_count=5000):
    commits.append(commit)
    print commit.hexsha
    if good_rev in commit.hexsha:
        found = True
        break
assert(found)

# retest build, in case expected failures has been updated
# copy build root to bisect directory
bisect_dir = args.dir
if bisect_dir == "":
    bisect_dir = results_dir + "/bisect/" + datetime.datetime.now().isoformat()

cmd = ["rsync", "-rlptD", "--exclude", "/*test/", "/".join(dirnames[:-1]) +"/", bisect_dir]
bs.run_batch_command(cmd)

if not bs.retest_failures(args.result_path, bisect_dir):
    print "ERROR: retest failed"

# make sure there is enough time for the test files to sync to nfs
time.sleep(20)
new_failures = bs.TestLister(bisect_dir + "/test/")

if not new_failures.Tests():
    print "All tests fixed"
    sys.exit(0)

print "Found failures:"
new_failures.Print()
    def run(self):
        signal.signal(signal.SIGALRM, signal_handler)
        signal.signal(signal.SIGINT, signal_handler_quit)
        signal.signal(signal.SIGTERM, signal_handler_quit)

        # running a service through intel's proxy requires some
        # annoying settings.
        os.environ["GIT_PYTHON_GIT_EXECUTABLE"] = "/usr/local/bin/git"
        # without this, git-remote-https spins at 100%
        os.environ["http_proxy"] = "http://proxy.jf.intel.com:911/"
        os.environ["https_proxy"] = "http://proxy.jf.intel.com:911/"

        try:
            bs.ProjectMap()
        except:
            sys.argv[0] = "/var/lib/git/mesa_jenkins/foo.py"

        pm = bs.ProjectMap()
        spec_file = pm.source_root() + "/build_specification.xml"
        new_spec_hash = None

        while True:
            orig_spec_hash = self.file_checksum(spec_file)
            if new_spec_hash is not None:
                print "Build Specification updated"
            new_spec_hash = self.file_checksum(spec_file)

            while new_spec_hash == orig_spec_hash:
                buildspec = bs.ProjectMap().build_spec()

                repo_dir = "/var/lib/git/"

                # build up a list of git repo objects for all known repos.  If the
                # origin or the remotes are not already cloned, clone them.
                repos = []
                repo_tags = buildspec.find("repos")
                for tag in repo_tags:
                    url = tag.attrib["repo"]
                    project = tag.tag
                    origin_dir = repo_dir + project + "/origin"
                    if not os.path.exists(origin_dir):
                        self.robust_clone(url, origin_dir)
                        bs.run_batch_command(["touch", origin_dir + "/git-daemon-export-ok"])
                    repos.append(git.Repo(origin_dir))
                    for a_remote in tag.findall("remote"):
                        remote_dir = repo_dir + project + "/" + a_remote.attrib["name"]
                        if not os.path.exists(remote_dir):
                            self.robust_clone(a_remote.attrib["repo"], remote_dir)
                            bs.run_batch_command(["touch", remote_dir + "/git-daemon-export-ok"])
                        repos.append(git.Repo(remote_dir))

                for repo in repos:
                    try:
                        signal.alarm(300)   # 5 minutes
                        repo.git.fetch()
                        signal.alarm(0)
                    except git.GitCommandError as e:
                        print "error fetching, ignoring: " + str(e)
                        signal.alarm(0)
                    except AssertionError as e:
                        print "assertion while fetching: " + str(e)
                        signal.alarm(0)
                    except TimeoutException as e:
                        print str(e)
                        signal.alarm(0)
                # pause a bit before fetching the next round
                time.sleep(20)
                self.robust_update()
                new_spec_hash = self.file_checksum(spec_file)
Example #22
0
    print "ERROR: acceptable projects: " + ",".join(rev_hash.keys())
    sys.exit(-1)

pm = bs.ProjectMap()
spec_xml = pm.build_spec()
results_dir = spec_xml.find("build_master").attrib["results_dir"]
retest_dir = args.dir
if retest_dir == "":
    retest_dir = results_dir + "/update/" + datetime.datetime.now().isoformat()

if rev_hash[blame[0]] == blame[1]:
    # rsync to save build if the blame is the same as the build
    src_dir = "/".join(dirnames[:-1]) + "/"
    dest_dir = bs.convert_rsync_path(retest_dir)
    cmd = ["rsync", "-rlptD", "--exclude", "/*test/", src_dir, dest_dir]
    bs.run_batch_command(cmd)
else:
    rev_hash[blame[0]] = blame[1]

# retest the set of failed tests on the specified blame revision
repos = bs.RepoSet()
_revspec = bs.RevisionSpecification.from_xml_file(
    os.path.join(os.path.abspath(args.result_path), 'revisions.xml'))
_revspec.checkout()
_revspec = bs.RevisionSpecification()

# use the full sha for the blame, so it can be looked up in a map when
# processing the config file
blame[1] = str(repos.repo(blame[0]).git.rev_parse(blame[1]))

if not bs.retest_failures(args.result_path, retest_dir):
Example #23
0
    def test(self):
        # todo: now that there is more than one component that needs
        # to call mesa_version, it should be moved to a more sharable
        # location
        o = bs.Options()
        pm = bs.ProjectMap()
        conf_file = bs.get_conf_file(o.hardware, o.arch, "cts-test")

        savedir = os.getcwd()
        cts_dir = self.build_root + "/bin/cts"
        os.chdir(cts_dir)

        # invoke piglit
        self.env["PIGLIT_CTS_BIN"] = cts_dir + "/glcts"
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            for atest in testlist.Tests(project="cts-test"):
                test_name_good_chars = re.sub('[_ !:=]', ".", atest.test_name)
                # drop the spec
                test_name = ".".join(test_name_good_chars.split(".")[1:])
                include_tests = include_tests + ["--include-tests", test_name]
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return

        extra_excludes = []
        if ("ilk" in o.hardware or "g33" in o.hardware
            or "g45" in o.hardware or "g965" in o.hardware):
            extra_excludes = extra_excludes + ["--exclude-tests", "es3-cts"]
        cmd = [self.build_root + "/bin/piglit",
               "run",
               #"-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--exclude-tests", "es31-cts",
               "--exclude-tests", "esext-cts",
               "--junit_suffix", "." + o.hardware + o.arch] + \
               extra_excludes + \
               include_tests + ["cts", out_dir]

        bs.run_batch_command(cmd, env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)
        os.chdir(savedir)
        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-cts",
                                   o.hardware,
                                   o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(revisions,
                              out_dir + "/results.xml",
                              single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = ["cp", "-a", "-n",
                   self.build_root + "/../test", pm.source_root()]
            bs.run_batch_command(cmd)
            bs.Export().export_tests()
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.PiglitTester().check_gpu_hang()
Example #24
0
def main():
    # Write the PID file
    with open('/var/run/fetch_mesa_mirrors.pid', 'w') as f:
        f.write(str(os.getpid()))

    signal.signal(signal.SIGALRM, signal_handler)
    signal.signal(signal.SIGINT, signal_handler_quit)
    signal.signal(signal.SIGTERM, signal_handler_quit)

    # running a service through intel's proxy requires some
    # annoying settings.
    os.environ["GIT_PYTHON_GIT_EXECUTABLE"] = "/usr/local/bin/git"
    # without this, git-remote-https spins at 100%
    os.environ["http_proxy"] = "http://proxy.jf.intel.com:911/"
    os.environ["https_proxy"] = "http://proxy.jf.intel.com:911/"

    try:
        bs.ProjectMap()
    except:
        sys.argv[0] = "/var/lib/git/mesa_jenkins/foo.py"

    pm = bs.ProjectMap()
    spec_file = pm.source_root() + "/build_specification.xml"
    new_spec_hash = None

    while True:
        orig_spec_hash = file_checksum(spec_file)
        if new_spec_hash is not None:
            print("Build Specification updated")
        new_spec_hash = file_checksum(spec_file)

        while new_spec_hash == orig_spec_hash:
            buildspec = bs.ProjectMap().build_spec()

            repo_dir = "/var/lib/git/"

            # build up a list of git repo objects for all known repos.  If the
            # origin or the remotes are not already cloned, clone them.
            repos = []
            repo_tags = buildspec.find("repos")
            for tag in repo_tags:
                url = tag.attrib["repo"]
                project = tag.tag
                origin_dir = repo_dir + project + "/origin"
                if not os.path.exists(origin_dir):
                    robust_clone(url, origin_dir)
                    bs.run_batch_command(["touch", origin_dir + "/git-daemon-export-ok"])
                repos.append(git.Repo(origin_dir))
                for a_remote in tag.findall("remote"):
                    remote_dir = repo_dir + project + "/" + a_remote.attrib["name"]
                    if not os.path.exists(remote_dir):
                        robust_clone(a_remote.attrib["repo"], remote_dir)
                        bs.run_batch_command(["touch", remote_dir + "/git-daemon-export-ok"])
                    repos.append(git.Repo(remote_dir))

            for repo in repos:
                try:
                    signal.alarm(300)   # 5 minutes
                    repo.git.fetch()
                    signal.alarm(0)
                except git.GitCommandError as e:
                    print("error fetching, ignoring: " + str(e), file=sys.stderr)
                    signal.alarm(0)
                except AssertionError as e:
                    print("assertion while fetching: " + str(e), file=sys.stderr)
                    signal.alarm(0)
                except TimeoutException as e:
                    print (str(e), file=sys.stderr)
                    signal.alarm(0)
            # pause a bit before fetching the next round
            time.sleep(20)
            robust_update()
            new_spec_hash = file_checksum(spec_file)
Example #25
0
    def test(self):
        pm = bs.ProjectMap()
        build_root = pm.build_root()
        env = { "LD_LIBRARY_PATH" : build_root + "/lib"}
        o = bs.Options()
        o.update_env(env)
        br = bs.ProjectMap().build_root()
        out_dir = br + "/../test"
        if not path.exists(out_dir):
            os.makedirs(out_dir)
        out_xml = out_dir + "/piglit-crucible_" + o.hardware + "_"  + o.arch + ".xml"
        include_tests = []
        if o.retest_path:
            include_tests = bs.TestLister(o.retest_path + "/test/").RetestIncludes("crucible-test")

        # flaky
        excludes = ["!func.query.timestamp"]
        parallelism = []

        if "hsw" in o.hardware:
            # issue 4
            excludes += ["!func.copy.copy-buffer.large",
                         "!func.interleaved-cmd-buffers.end1*",
                         "!func.miptree.d32-sfloat.aspect-depth.view*",
                         "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                         "!func.miptree.s8-uint.aspect-stencil*",
                         "!func.renderpass.clear.color08",
                         "!func.ssbo.interleve"]
        if "ivb" in o.hardware:
            # issue 5
            excludes += ["!func.depthstencil*",
                         "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                         "!func.miptree.s8-uint.aspect-stencil*",
                         "!func.miptree.d32-sfloat.aspect-depth.view*",
                         "!stress.lots-of-surface-state.fs.static"]
            parallelism = ['-j', '1']
            
        if "byt" in o.hardware:
            # issue 6
            excludes += ["!func.miptree.d32-sfloat.aspect-depth.view-3d.levels0*",
                         "!func.depthstencil*",
                         "!func.miptree.s8-uint.aspect-stencil*",
                         "!stress.lots-of-surface-state.fs.static"]
            parallelism = ['-j', '1']

        if "bsw in o.hardware":
            excludes += ["!func.event.cmd_buffer"] # intermittent fail/crash


        if "bxt" in o.hardware:
            excludes += ["!func.miptree.s8-uint.aspect-stencil*",
                         "!stress.lots-of-surface-state.fs.static"]

        bs.run_batch_command([ br + "/bin/crucible",
                               "run", "--fork", "--log-pids",
                               "--junit-xml=" + out_xml] + parallelism + include_tests + excludes,
                             env=env,
                             expected_return_code=None)
        post_process_results(out_xml)
        bs.run_batch_command(["cp", "-a", "-n",
                              out_dir, pm.source_root()])

        bs.check_gpu_hang()
        bs.Export().export_tests()
Example #26
0
def main():
    parser = argparse.ArgumentParser(description='Build projects locally.')

    # TODO: provide a pull action to update the repos
    parser.add_argument('--action', type=str, default=["build"],
                        choices=CsvChoice('fetch', 'build', 'clean', 'test'),
                        action=CsvAction,
                        help="Action to recurse with. 'build', 'clean' "\
                        "or 'test'. (default: %(default)s)")

    parser.add_argument('--project', dest='project', type=str, default="mesa",
                        help='project to build. (default: %(default)s)')
    parser.add_argument('--arch', dest='arch', type=str, 
                        default='m64', choices=['m64', 'm32'],
                        help='arch to build. (default: %(default)s)')
    parser.add_argument('--config', type=str, default="release", 
                        choices=['release', 'debug'],
                        help="Release or Debug build. (default: %(default)s)")

    parser.add_argument('--type', type=str, default="developer",
                        choices=['developer', 'percheckin', 
                                 'daily', 'release'],
                        help="category of tests to run. "\
                        "(default: %(default)s)")

    parser.add_argument('--branch', type=str, default="none",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")
    parser.add_argument('--env', type=str, default="",
                        help="If specified, overrides environment variable settings"
                        "EG: 'LIBGL_DEBUG=1 INTEL_DEBUG=perf'")
    parser.add_argument('--hardware', type=str, default='builder',
                        help="The hardware to be targeted for test "
                        "('builder', 'snbgt1', 'ivb', 'hsw', 'bdw'). "
                        "(default: %(default)s)")

    args = parser.parse_args()
    project = args.project

    if "fetch" in args.action:
        # fetch not supported by build.py scripts, which will parse argv
        bs.RepoSet().fetch()
    branch = args.branch
    if (branch != "none"):
        bs.BuildSpecification().checkout(branch)

    # some build_local params are not handled by the Options, which is
    # used by other modules
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    if "fetch" in vdict["action"]:
        vdict["action"].remove("fetch")
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    if "clean" in args.action:
        bs.rmtree(bs.ProjectMap().build_root())

    graph = bs.DependencyGraph(project, o)
    ready = graph.ready_builds()
    pm = bs.ProjectMap()
    while ready:
        for bi in ready:
            graph.build_complete(bi)
            if bi.options.hardware != "builder":
                # we cannot hope to successfully run byt/ilk tests on
                # a developer's machine.
                print "Skipping build for specific hardware: " + str(bi)
                continue
            proj_build_dir = pm.project_build_dir(bi.project)
            script = proj_build_dir + "/build.py"
            if os.path.exists(script):
                bs.run_batch_command([sys.executable, 
                                      script] +  
                                     o.to_list())
        ready = graph.ready_builds()
Example #27
0
 def clean(self):
     bs.run_batch_command(["git", "clean", "-xfd"])
     bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
     bs.rmtree("repos")
Example #28
0
    def test(self):
        o = bs.Options()
        pm = bs.ProjectMap()

        mesa_version = bs.PiglitTester().mesa_version()
        if o.hardware == "bxt" or o.hardware == "kbl":
            if "11.0" in mesa_version:
                print "WARNING: bxt/kbl not supported by stable mesa"
                return

        conf_file = bs.get_conf_file(o.hardware, o.arch, "cts-test")

        savedir = os.getcwd()
        cts_dir = self.build_root + "/bin/cts"
        os.chdir(cts_dir)

        # invoke piglit
        self.env["PIGLIT_CTS_BIN"] = cts_dir + "/glcts"
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            include_tests = testlist.RetestIncludes(project="cts-test")
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return

        extra_excludes = []
        if ("ilk" in o.hardware or "g33" in o.hardware
            or "g45" in o.hardware or "g965" in o.hardware):
            extra_excludes += ["--exclude-tests", "es3-cts",
                               "--exclude-tests", "es31-cts"]

        if ("snb" in o.hardware or
            "ivb" in o.hardware or
            "byt" in o.hardware or
            "hsw" in o.hardware):
            extra_excludes += ["--exclude-tests", "es31-cts"]

        if "11.1" in mesa_version or "11.0" in mesa_version:
            extra_excludes += ["--exclude-tests", "es31-cts"]

        cmd = [self.build_root + "/bin/piglit",
               "run",
               #"-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--exclude-tests", "esext-cts",
               "--junit_suffix", "." + o.hardware + o.arch] + \
               extra_excludes + \
               include_tests + ["cts", out_dir]

        bs.run_batch_command(cmd, env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)
        os.chdir(savedir)
        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-cts",
                                   o.hardware,
                                   o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(revisions,
                              out_dir + "/results.xml",
                              single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = ["cp", "-a", "-n",
                   self.build_root + "/../test", pm.source_root()]
            bs.run_batch_command(cmd)
            bs.Export().export_tests()
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.PiglitTester().check_gpu_hang()
Example #29
0
def collate_tests(result_path, out_test_dir, make_tar=False):
    src_test_dir = result_path + "/test"
    print "collecting tests from " + src_test_dir
    i = 0
    while i < 10 and not os.path.exists(src_test_dir):
        i += 1
        print "sleeping, waiting for test directory: " + src_test_dir
        time.sleep(10)
    if not os.path.exists(src_test_dir):
        print "no test directory found: " + src_test_dir
        return

    cmd = ["cp", "-a", "-n", src_test_dir, out_test_dir]
    bs.run_batch_command(cmd)

    # Junit files must have a recent time stamp or else Jenkins will
    # not parse them.
    for a_file in os.listdir(out_test_dir + "/test"):
        os.utime(out_test_dir + "/test/" + a_file, None)

    revisions_path = bs.ProjectMap().source_root() + "/revisions.txt"
    with open(revisions_path, "w") as revs:
        revs.write(create_revision_table())

    if not make_tar:
        return

    # else generate a results.tgz that can be used with piglit summary
    save_dir = os.getcwd()
    os.chdir("/tmp/")
    tar = tarfile.open(out_test_dir + "/test/results.tar", "w:")
    shards = {}
    for a_file in os.listdir(out_test_dir + "/test"):
        if "piglit" not in a_file:
            continue
        if ":" in a_file:
            shard_base_name = "_".join(a_file.split("_")[:-1])
            if not shards.has_key(shard_base_name):
                shards[shard_base_name] = []
            shards[shard_base_name].append(a_file)
            continue
        t = et.parse(out_test_dir + "/test/" + a_file)
        r = t.getroot()
        strip_passes(r)
        t.write(a_file)
        # occasionally, t.write() finishes, but the file is not available
        t = None
        for _ in range(0, 5):
            try:
                tar.add(a_file)
                break
            except:
                print "WARN: failed to add file: " + a_file
                time.sleep(10)
        os.unlink(a_file)
    for (shard, files) in shards.items():
        t = et.parse(out_test_dir + "/test/" + files[0])
        r = t.getroot()
        strip_passes(r)
        suite = r.find("testsuite")
        for shards in files[1:]:
            st = et.parse(out_test_dir + "/test/" + shards)
            sr = st.getroot()
            strip_passes(sr)
            for a_suite in sr.findall("testsuite"):
                for a_test in a_suite.findall("testcase"):
                    suite.append(a_test)
        shard_file = shard + ".xml"
        t.write(shard_file)
        # occasionally, t.write() finishes, but the file is not available
        t = None
        for _ in range(0, 5):
            try:
                tar.add(shard_file)
                break
            except:
                print "WARN: failed to add file: " + shard_file
                time.sleep(10)
        os.unlink(shard_file)

    if os.path.exists(out_test_dir + "/test/logs"):
        save_dir = os.getcwd()
        os.chdir(out_test_dir + "/test")
        tar.add("logs")
        os.chdir(save_dir)

    tar.close()
    bs.run_batch_command(["xz", "-9", out_test_dir + "/test/results.tar"])
    os.chdir(save_dir)

    tl = bs.TestLister(out_test_dir + "/test")
    tests = tl.Tests()
    if tests:
        with open("test_summary.txt", "w") as fh:
            for atest in tests:
                atest.PrettyPrint(fh)
            fh.flush()
            # end users report that sometimes the summary is empty
            os.fsync(fh.fileno())
            fh.close()
        time.sleep(10)

        tc = bs.TestLister(out_test_dir + "/test", include_passes=True)
        all_tests = len(tc.Tests())

        failed_tests = 0
        tf = bs.TestLister(out_test_dir + "/test")
        failed_tests = len(tf.Tests())
        passed_tests = all_tests - failed_tests
        percent = (passed_tests * 100) / all_tests
        percentage = format(percent, '.2f')
        if all_tests:
            with open("test_summary.txt", "a") as fh:
                fh.write("""

        Tests passed: {} / {} ({}%)
                """.format(passed_tests, all_tests, percentage))
Example #30
0
    def test(self):
        pm = bs.ProjectMap()
        build_root = pm.build_root()
        global_opts = bs.Options()
        if global_opts.arch == "m64":
            icd_name = "intel_icd.x86_64.json"
        elif global_opts.arch == "m32":
            icd_name = "intel_icd.i686.json"
        env = {
            "LD_LIBRARY_PATH": build_root + "/lib",
            "VK_ICD_FILENAMES": build_root + "/share/vulkan/icd.d/" + icd_name,
            "ANV_ABORT_ON_DEVICE_LOSS": "true"
        }
        o = bs.Options()
        o.update_env(env)
        br = bs.ProjectMap().build_root()
        out_dir = br + "/../test"
        if not path.exists(out_dir):
            os.makedirs(out_dir)
        out_xml = out_dir + "/piglit-crucible_" + o.hardware + "_" + o.arch + ".xml"
        include_tests = []
        if o.retest_path:
            include_tests = bs.TestLister(
                o.retest_path + "/test/").RetestIncludes("crucible-test")

        # flaky
        excludes = ["!func.query.timestamp", "!func.ssbo.interleve"]
        parallelism = []

        if "hsw" in o.hardware:
            # issue 4
            excludes += [
                "!func.copy.copy-buffer.large",
                "!func.interleaved-cmd-buffers.end1*",
                "!func.miptree.d32-sfloat.aspect-depth.view*",
                "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                "!func.miptree.s8-uint.aspect-stencil*",
                "!func.renderpass.clear.color08", "!func.ssbo.interleve"
            ]
        if "ivb" in o.hardware:
            # issue 5
            excludes += [
                "!func.depthstencil*",
                "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                "!func.miptree.s8-uint.aspect-stencil*",
                "!func.miptree.d32-sfloat.aspect-depth.view*",
                "!stress.lots-of-surface-state.fs.static"
            ]
            parallelism = ['-j', '1']

        if "byt" in o.hardware:
            # issue 6
            excludes += [
                "!func.miptree.d32-sfloat.aspect-depth.view-3d.levels0*",
                "!func.depthstencil*", "!func.miptree.s8-uint.aspect-stencil*",
                "!stress.lots-of-surface-state.fs.static"
            ]
            parallelism = ['-j', '1']

        if "bsw" in o.hardware:
            excludes += ["!func.event.cmd_buffer"]  # intermittent fail/crash

        if "bxt" in o.hardware:
            excludes += [
                "!func.miptree.s8-uint.aspect-stencil*",
                "!stress.lots-of-surface-state.fs.static"
            ]

        bs.run_batch_command([
            br + "/bin/crucible", "run", "--fork", "--log-pids",
            "--junit-xml=" + out_xml
        ] + parallelism + include_tests + excludes,
                             env=env,
                             expected_return_code=None)
        post_process_results(out_xml)
        bs.run_batch_command(["cp", "-a", "-n", out_dir, pm.source_root()])

        bs.check_gpu_hang()
        bs.Export().export_tests()
Example #31
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self._src_dir)
        try:
            bs.run_batch_command(["patch", "-p1", "CMakeLists.txt",
                                  self._pm.project_build_dir("vulkancts") + "/0001-Fix-PNG.patch"])
        except:
            print "WARN: failed to apply PNG patch"
        try:
            bs.run_batch_command(["patch", "-p1", "external/vulkancts/modules/vulkan/vktTestPackage.cpp",
                                  self._pm.project_build_dir("vulkancts") + "/0002-Attempt-to-load-prebuilt-spirv-from-cache.patch"])
        except:
            print "WARN: failed to apply prebuilt patch"
        os.chdir(save_dir)
        spirvtools = self._src_dir + "/external/spirv-tools/src"
        if not os.path.islink(spirvtools):
            bs.rmtree(spirvtools)
        if not os.path.exists(spirvtools):
            os.symlink("../../../spirvtools", spirvtools)
        glslang = self._src_dir + "/external/glslang/src"
        if not os.path.islink(glslang):
            bs.rmtree(glslang)
        if not os.path.exists(glslang):
            os.symlink("../../../glslang", glslang)

        # change spirv-tools and glslang to use the commits specified
        # in the vulkancts sources
        sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
        sys.path = [gooddir for gooddir in sys.path if "vulkancts" not in gooddir]
        sys.path.append(self._src_dir + "/external/")
        fetch_sources = importlib.import_module("fetch_sources", ".")
        for package in fetch_sources.PACKAGES:
            if not isinstance(package, fetch_sources.GitRepo):
                continue
            repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
            print "Cleaning: " + repo_path + " : " + package.revision
            savedir = os.getcwd()
            os.chdir(repo_path)
            bs.run_batch_command(["git", "clean", "-xfd"])
            bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
            os.chdir(savedir)
            print "Checking out: " + repo_path + " : " + package.revision
            repo = git.Repo(repo_path)
            repo.git.checkout(package.revision, force=True)
        
        btype = "Release"
        # Vulkan cts is twice as slow for RelDeb builds, which impacts
        # the CI throughput.  For this reason, we unconditionally
        # optimize the build.
        # if self._options.config == "debug":
        #    btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = ["cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
               "-DCMAKE_C_COMPILER_LAUNCHER=ccache",
               "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache",
               "-DCMAKE_C_FLAGS=" + flags, "-DCMAKE_CXX_FLAGS=" + flags,
               "-DCMAKE_C_COMPILER=clang-3.7", "-DCMAKE_CXX_COMPILER=clang++-3.7",
               "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."]
        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja", "vk-build-programs"])
        save_dir = os.getcwd()
        os.chdir("external/vulkancts/modules/vulkan")
        out_dir = os.path.join(self._src_dir, "external", "vulkancts", "data", "vulkan", "prebuilt")
        print "Pre-building spir-v binaries: vk-build-programs -d " + out_dir
        bs.run_batch_command(["./vk-build-programs", "-d", out_dir],
                             quiet=True,
                             streamedOutput=False)
        os.chdir(save_dir)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/opt/deqp/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)

        bs.run_batch_command(["rsync", "-rlptD",
                              self._build_dir + "/external/vulkancts/modules",
                              bin_dir])

        bs.Export().export()
Example #32
0
def main():
    parser = argparse.ArgumentParser(description='Build projects locally.')

    # TODO: provide a pull action to update the repos
    parser.add_argument('--action', type=str, default=["build"],
                        choices=CsvChoice('fetch', 'build', 'clean', 'test'),
                        action=CsvAction,
                        help="Action to recurse with. 'build', 'clean' "\
                        "or 'test'. (default: %(default)s)")

    parser.add_argument('--project',
                        dest='project',
                        type=str,
                        default="mesa",
                        help='project to build. (default: %(default)s)')
    parser.add_argument('--arch',
                        dest='arch',
                        type=str,
                        default='m64',
                        choices=['m64', 'm32'],
                        help='arch to build. (default: %(default)s)')
    parser.add_argument('--config',
                        type=str,
                        default="release",
                        choices=['release', 'debug'],
                        help="Release or Debug build. (default: %(default)s)")

    parser.add_argument('--type', type=str, default="developer",
                        choices=['developer', 'percheckin',
                                 'daily', 'release'],
                        help="category of tests to run. "\
                        "(default: %(default)s)")

    parser.add_argument('--branch', type=str, default="none",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")
    parser.add_argument(
        '--env',
        type=str,
        default="",
        help="If specified, overrides environment variable settings"
        "EG: 'LIBGL_DEBUG=1 INTEL_DEBUG=perf'")
    parser.add_argument('--hardware',
                        type=str,
                        default='builder',
                        help="The hardware to be targeted for test "
                        "('builder', 'snbgt1', 'ivb', 'hsw', 'bdw'). "
                        "(default: %(default)s)")

    args = parser.parse_args()
    project = args.project

    if "fetch" in args.action:
        # fetch not supported by build.py scripts, which will parse argv
        bs.RepoSet().fetch()
    branch = args.branch
    if (branch != "none"):
        bs.BuildSpecification().checkout(branch)

    # some build_local params are not handled by the Options, which is
    # used by other modules
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    if "fetch" in vdict["action"]:
        vdict["action"].remove("fetch")
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    if "clean" in args.action:
        bs.rmtree(bs.ProjectMap().build_root())

    graph = bs.DependencyGraph(project, o)
    ready = graph.ready_builds()
    pm = bs.ProjectMap()
    while ready:
        for bi in ready:
            graph.build_complete(bi)
            proj_build_dir = pm.project_build_dir(bi.project)
            script = proj_build_dir + "/build.py"
            if os.path.exists(script):
                bs.run_batch_command([sys.executable, script] + o.to_list())
        ready = graph.ready_builds()
Example #33
0
    def test(self):
        o = bs.Options()
        pm = bs.ProjectMap()

        if not self.version:
            self.version = bs.mesa_version()

        conf_file = bs.get_conf_file(o.hardware, o.arch, "cts-test")

        # invoke piglit
        self.env["PIGLIT_CTS_GL_BIN"] = self.build_root + "/bin/gl/cts/glcts"
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            include_tests = testlist.RetestIncludes(project="cts-test")
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return

        # this test is flaky in glcts.  It passes enough for
        # submission, but as per Ken, no developer will want to look
        # at it to figure out why the test is flaky.
        extra_excludes = ["packed_depth_stencil.packed_depth_stencil_copyteximage"]

        suite_names = []
        # disable gl cts on stable versions of mesa, which do not
        # support the feature set.
        if "13.0" in self.version or "12.0" in self.version:
            return
        suite_names.append("cts_gl")
        # as per Ian, only run gl45
        extra_excludes += ["gl30-cts",
                           "gl31-cts",
                           "gl32-cts",
                           "gl33-cts",
                           "gl40-cts",
                           "gl41-cts",
                           "gl42-cts",
                           "gl43-cts",
                           "gl44-cts"]
        if "hsw" in o.hardware:
            # flaky cts_gl tests
            extra_excludes += ["shader_image_load_store.multiple-uniforms",
                               "shader_image_size.basic-nonms-fs",
                               "shader_image_size.advanced-nonms-fs",
                               "texture_gather.gather-tesselation-shader",
                               "vertex_attrib_binding.basic-inputl-case1",
                               "gpu_shader_fp64.named_uniform_blocks",
                               # gpu hang
                               "gl45-cts.tessellation_shader.vertex_spacing",
                               "gl45-cts.tessellation_shader.vertex_ordering",
                               "gl45-cts.tessellation_shader.tessellation_control_to_tessellation_evaluation.gl_maxpatchvertices_position_pointsize"]

        exclude_tests = []
        for  a in extra_excludes:
            exclude_tests += ["--exclude-tests", a]
        if not suite_names:
            # for master, on old hardware, this component will not
            # test anything.  The gles tests are instead targeted with
            # the gles32 cts, in the glescts-test component
            return
        cmd = [self.build_root + "/bin/piglit",
               "run",
               #"-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--exclude-tests", "esext-cts",
               "--junit_suffix", "." + o.hardware + o.arch] + \
               exclude_tests + \
               include_tests + suite_names + [out_dir]

        bs.run_batch_command(cmd, env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)
        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-cts",
                                   o.hardware,
                                   o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(revisions,
                              out_dir + "/results.xml",
                              single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = ["cp", "-a", "-n",
                   self.build_root + "/../test", pm.source_root()]
            bs.run_batch_command(cmd)
            bs.Export().export_tests()
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.check_gpu_hang()
Example #34
0
    def build(self):
        save_dir = os.getcwd()
        os.chdir(self._src_dir)
        try:
            bs.run_batch_command([
                "patch", "-p1", "CMakeLists.txt",
                self._pm.project_build_dir("vulkancts") + "/0001-Fix-PNG.patch"
            ])
        except:
            print "WARN: failed to apply PNG patch"
        try:
            bs.run_batch_command([
                "patch", "-p1",
                "external/vulkancts/modules/vulkan/vktTestPackage.cpp",
                self._pm.project_build_dir("vulkancts") +
                "/0002-Attempt-to-load-prebuilt-spirv-from-cache.patch"
            ])
        except:
            print "WARN: failed to apply prebuilt patch"
        os.chdir(save_dir)
        spirvtools = self._src_dir + "/external/spirv-tools/src"
        if not os.path.islink(spirvtools):
            bs.rmtree(spirvtools)
        if not os.path.exists(spirvtools):
            os.symlink("../../../spirvtools", spirvtools)
        spirvheaders_dir = self._src_dir + "/external/spirv-headers"
        if not os.path.exists(spirvheaders_dir):
            os.makedirs(spirvheaders_dir)
        spirvheaders = spirvheaders_dir + "/src"
        if not os.path.islink(spirvheaders):
            bs.rmtree(spirvheaders)
        if not os.path.exists(spirvheaders):
            os.symlink("../../../spirvheaders", spirvheaders)
        glslang = self._src_dir + "/external/glslang/src"
        if not os.path.islink(glslang):
            bs.rmtree(glslang)
        if not os.path.exists(glslang):
            os.symlink("../../../glslang", glslang)

        # change spirv-tools and glslang to use the commits specified
        # in the vulkancts sources
        sys.path = [os.path.abspath(os.path.normpath(s)) for s in sys.path]
        sys.path = [
            gooddir for gooddir in sys.path if "vulkancts" not in gooddir
        ]
        sys.path.append(self._src_dir + "/external/")
        fetch_sources = importlib.import_module("fetch_sources", ".")
        for package in fetch_sources.PACKAGES:
            if not isinstance(package, fetch_sources.GitRepo):
                continue
            repo_path = self._src_dir + "/external/" + package.baseDir + "/src/"
            print "Cleaning: " + repo_path + " : " + package.revision
            savedir = os.getcwd()
            os.chdir(repo_path)
            bs.run_batch_command(["git", "clean", "-xfd"])
            bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
            os.chdir(savedir)
            print "Checking out: " + repo_path + " : " + package.revision
            repo = git.Repo(repo_path)
            repo.git.checkout(package.revision, force=True)

        btype = "Release"
        # Vulkan cts is twice as slow for RelDeb builds, which impacts
        # the CI throughput.  For this reason, we unconditionally
        # optimize the build.
        # if self._options.config == "debug":
        #    btype = "RelDeb"
        flags = "-m64"
        if self._options.arch == "m32":
            flags = "-m32"
        cmd = [
            "cmake", "-GNinja", "-DCMAKE_BUILD_TYPE=" + btype,
            "-DCMAKE_C_COMPILER_LAUNCHER=ccache",
            "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache", "-DCMAKE_C_FLAGS=" + flags,
            "-DCMAKE_CXX_FLAGS=" + flags, "-DCMAKE_C_COMPILER=clang",
            "-DCMAKE_CXX_COMPILER=clang++",
            "-DCMAKE_INSTALL_PREFIX:PATH=" + self._build_root, ".."
        ]
        if not os.path.exists(self._build_dir):
            os.makedirs(self._build_dir)
        os.chdir(self._build_dir)
        bs.run_batch_command(cmd)
        bs.run_batch_command(["ninja", "vk-build-programs"])
        save_dir = os.getcwd()
        os.chdir("external/vulkancts/modules/vulkan")
        out_dir = os.path.join(self._src_dir, "external", "vulkancts", "data",
                               "vulkan", "prebuilt")
        print "Pre-building spir-v binaries: vk-build-programs -d " + out_dir
        bs.run_batch_command(["./vk-build-programs", "-d", out_dir],
                             quiet=True,
                             streamedOutput=False)
        os.chdir(save_dir)
        bs.run_batch_command(["ninja"])
        bin_dir = self._build_root + "/opt/deqp/"
        if not os.path.exists(bin_dir):
            os.makedirs(bin_dir)

        bs.run_batch_command([
            "rsync", "-rlptD", self._build_dir + "/external/vulkancts/modules",
            bin_dir
        ])

        bs.Export().export()
Example #35
0
    def test(self):
        o = bs.Options()
        pm = bs.ProjectMap()

        if not self.version:
            self.version = bs.mesa_version()

        conf_file = bs.get_conf_file(o.hardware, o.arch, "cts-test")

        # invoke piglit
        self.env["PIGLIT_CTS_GL_BIN"] = self.build_root + "/bin/gl/cts/glcts"
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            include_tests = testlist.RetestIncludes(project="cts-test")
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return

        # this test is flaky in glcts.  It passes enough for
        # submission, but as per Ken, no developer will want to look
        # at it to figure out why the test is flaky.
        extra_excludes = [
            "packed_depth_stencil.packed_depth_stencil_copyteximage"
        ]

        suite_names = []
        # disable gl cts on stable versions of mesa, which do not
        # support the feature set.
        if "13.0" in self.version:
            return
        if "17.0" in self.version and "glk" in o.hardware:
            # glk not supported by stable mesa
            return
        suite_names.append("cts_gl")
        # as per Ian, only run gl45
        extra_excludes += [
            "gl30-cts", "gl31-cts", "gl32-cts", "gl33-cts", "gl40-cts",
            "gl41-cts", "gl42-cts", "gl43-cts", "gl44-cts"
        ]
        if "hsw" in o.hardware:
            # flaky cts_gl tests
            extra_excludes += [
                "shader_image_load_store.multiple-uniforms",
                "shader_image_size.basic-nonms-fs",
                "shader_image_size.advanced-nonms-fs",
                "texture_gather.gather-tesselation-shader",
                "vertex_attrib_binding.basic-inputl-case1",
                "gpu_shader_fp64.named_uniform_blocks",
                # gpu hang
                "gl45-cts.tessellation_shader.vertex_spacing",
                "gl45-cts.tessellation_shader.vertex_ordering",
                "gl45-cts.tessellation_shader.tessellation_control_to_tessellation_evaluation.gl_maxpatchvertices_position_pointsize"
            ]

        exclude_tests = []
        for a in extra_excludes:
            exclude_tests += ["--exclude-tests", a]
        if not suite_names:
            # for master, on old hardware, this component will not
            # test anything.  The gles tests are instead targeted with
            # the gles32 cts, in the glescts-test component
            return
        cmd = [self.build_root + "/bin/piglit",
               "run",
               #"-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--exclude-tests", "esext-cts",
               "--junit_suffix", "." + o.hardware + o.arch] + \
               exclude_tests + \
               include_tests + suite_names + [out_dir]

        bs.run_batch_command(cmd,
                             env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)
        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-cts", o.hardware, o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(
                revisions, out_dir + "/results.xml",
                single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = [
                "cp", "-a", "-n", self.build_root + "/../test",
                pm.source_root()
            ]
            bs.run_batch_command(cmd)
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.check_gpu_hang()
        bs.Export().export_tests()
Example #36
0
 def clean(self):
     bs.run_batch_command(["git", "clean", "-xfd"])
     bs.run_batch_command(["git", "reset", "--hard", "HEAD"])
     bs.rmtree("repos")
Example #37
0
    def test(self):
        # todo: now that there is more than one component that needs
        # to call mesa_version, it should be moved to a more sharable
        # location
        mesa_version = bs.PiglitTester().mesa_version()
        if "10.5" in mesa_version or "10.6" in mesa_version:
            print "WARNING: deqp not supported on 10.6 and earlier."
            return
        
        o = bs.Options()
        pm = bs.ProjectMap()
        src_dir = pm.project_source_dir(pm.current_project())
        savedir = os.getcwd()

        deqp_options = ["./deqp-gles2",
                        "--deqp-surface-type=fbo",
                        "--deqp-log-images=disable",
                        "--deqp-surface-width=256",
                        "--deqp-surface-height=256"]


        expectations_dir = None
        # identify platform
        if "byt" in o.hardware:
            expectations_dir = src_dir + "/chromiumos-autotest/graphics_dEQP/expectations/baytrail"
        elif "bdw" in o.hardware:
            expectations_dir = pm.project_build_dir(pm.current_project()) + "/bdw_expectations"
        elif "hsw" in o.hardware:
            expectations_dir = pm.project_build_dir(pm.current_project()) + "/hsw_expectations"
        elif "ivb" in o.hardware:
            expectations_dir = src_dir + "/chromiumos-autotest/graphics_dEQP/expectations/ivybridge"
        elif "snb" in o.hardware:
            expectations_dir = src_dir + "/chromiumos-autotest/graphics_dEQP/expectations/sandybridge"
        elif "bsw" in o.hardware:
            expectations_dir = pm.project_build_dir(pm.current_project()) + "/bsw_expectations"

        conf_file = bs.get_conf_file(o.hardware, o.arch, "deqp-test")

        for module in ["gles2", "gles3"]:
            skip = DeqpTrie()
            # for each skip list, parse into skip trie
            if expectations_dir and os.path.exists(expectations_dir):
                for askipfile in os.listdir(expectations_dir):
                    if module not in askipfile.lower():
                        continue
                    skip.add_txt(expectations_dir + "/" + askipfile)
            else:
                skip._trie["empty"] = None

            # create test trie
            os.chdir(self.build_root + "/opt/deqp/modules/" + module)
            # generate list of tests
            bs.run_batch_command(["./deqp-" + module] + deqp_options + ["--deqp-runmode=xml-caselist"],
                                 env=self.env)
            outfile = "dEQP-" + module.upper() + "-cases.xml"
            assert(os.path.exists(outfile))
            testlist = DeqpTrie()
            testlist.add_xml(outfile)

            # filter skip trie from testlist trie
            testlist.filter(skip)

            # filter intermittent tests
            # TODO(janesma) : write bug
            skips = ["functional.fragment_ops.interaction.basic_shader",
                     "functional.shaders.random.basic_expression.combined",
                     "functional.shaders.random.conditionals.combined",
                     # fails intermittently on at least bdw and hsw
                     "functional.flush_finish.flush",
                     "functional.flush_finish.finish",
                     "functional.flush_finish.finish_wait"]
            
            if "snb" in o.hardware:
                skips = skips + ["functional.shaders.random.texture.vertex.45",
                                 "functional.shaders.random.texture.vertex.1",
                                 "functional.shaders.random.texture.vertex.34"]
                
            intermittent = DeqpTrie()
            for skip in skips:
                intermittent.add_line("dEQP-" + module.upper() + "." + skip)
            testlist.filter(intermittent)

            # generate testlist file
            caselist_fn = module + "-cases.txt"
            caselist = open(caselist_fn, "w")
            testlist.write_caselist(caselist)
            caselist.close()
            self.shard_caselist(caselist_fn, o.shard)

        os.chdir(savedir)

        # invoke piglit
        self.env["PIGLIT_DEQP_GLES2_BIN"] = self.build_root + "/opt/deqp/modules/gles2/deqp-gles2"
        self.env["PIGLIT_DEQP_GLES2_EXTRA_ARGS"] =  ("--deqp-surface-type=fbo "
                                                     "--deqp-log-images=disable "
                                                     '--deqp-surface-width=256 '
                                                     '--deqp-surface-height=256 '
                                                     "--deqp-caselist-file=" +
                                                     self.build_root + "/opt/deqp/modules/gles2/gles2-cases.txt")
        self.env["PIGLIT_DEQP_GLES3_EXE"] = self.build_root + "/opt/deqp/modules/gles3/deqp-gles3"
        self.env["PIGLIT_DEQP_GLES3_EXTRA_ARGS"] = ("--deqp-surface-type=fbo "
                                                    "--deqp-log-images=disable "
                                                    '--deqp-surface-width=256 '
                                                    '--deqp-surface-height=256 '
                                                    "--deqp-caselist-file=" +
                                                    self.build_root + "/opt/deqp/modules/gles3/gles3-cases.txt")
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            for atest in testlist.Tests(project="deqp-test"):
                test_name_good_chars = re.sub('[_ !:=]', ".", atest.test_name)
                # drop the spec
                test_name = ".".join(test_name_good_chars.split(".")[1:])
                include_tests = include_tests + ["--include-tests", test_name]
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return
            
        cmd = [self.build_root + "/bin/piglit",
               "run",
               "-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--junit_suffix", "." + o.hardware + o.arch] + \
            include_tests + \
            ["deqp_gles2", "deqp_gles3", out_dir ]
        
        bs.run_batch_command(cmd, env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)

        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-deqp",
                                   o.hardware,
                                   o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(revisions,
                              out_dir + "/results.xml",
                              single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = ["cp", "-a", "-n",
                   self.build_root + "/../test", pm.source_root()]
            bs.run_batch_command(cmd)
            bs.Export().export_tests()
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.PiglitTester().check_gpu_hang()