예제 #1
0
 def __init__(self, binary):
     self.binary = binary
     self.o = bs.Options()
     self.pm = bs.ProjectMap()
     self.blacklist_txt = None
     self.version = None
     bd = self.pm.project_build_dir()
     hw_prefix = self.o.hardware[:3]
     if self.o.hardware == "g965":
         hw_prefix = self.o.hardware
     # Note: bsw has known failures that need to be resolved
     # first: https://bugs.freedesktop.org/show_bug.cgi?id=104981
     if hw_prefix in ['g33', 'g45', 'ilk', 'g965', 'hsw', 'byt', 'bsw']:
         raise Exception(("ERROR: This platform is not supported by "
                          "this test"))
     if "gles2" in self.binary:
         self.blacklist_txt = (bd + hw_prefix +
                               "_expectations/gles2_unstable_tests.txt")
     if "gles3" in self.binary:
         self.blacklist_txt = (bd + hw_prefix +
                               "_expectations/gles3_unstable_tests.txt")
     if "gles31" in self.binary:
         self.blacklist_txt = (bd + hw_prefix +
                               "_expectations/gles31_unstable_tests.txt")
     if "egl" in self.binary:
         self.blacklist_txt = (bd + hw_prefix +
                               "_expectations/egl_unstable_tests.txt")
예제 #2
0
    def build(self):
        all_scores = {}
        for a_score_file in glob.glob(self.opts.result_path +
                                      "/../*/m64/scores/*/*/*.json"):
            with open(a_score_file, "r") as f:
                a_score = json.load(f)
            self.merge_scores(all_scores, a_score)
        scale_file = bs.ProjectMap().project_build_dir(
            "sixonix") + "/scale.yml"
        with open(scale_file, 'r') as inf:
            scale = yaml.load(inf)
        for benchmark, platform in all_scores.iteritems():
            for platform_name, pscores in platform.iteritems():
                for _, series in pscores.iteritems():
                    runs = []
                    for run in series:
                        runs += [r for r in run["score"]]
                    mean = numpy.mean(runs, dtype=numpy.float64)
                    scale[benchmark][platform_name] = float(mean)
                    print benchmark + " " + platform_name + ": " + str(
                        mean) + " " + str(
                            numpy.std(runs, dtype=numpy.float64) / mean)

        with open(scale_file, 'w') as of:
            yaml.dump(scale, of)
예제 #3
0
    def __init__(self):
        self.opts = bs.Options()

        if self.opts.config == 'debug':
            print "ERROR: perf not supported for debug"
            assert (False)

        pm = bs.ProjectMap()
        self._src_dir = pm.project_source_dir("mesa")
        self._build_dir = "/tmp/mesa-perf/build_" + self.opts.arch

        # generated with `gcc -E -v -march=native - < /dev/null 2>&1 | grep cc1`
        self._flags = [
            '-march=silvermont', '-mmmx', '-mno-3dnow', '-msse', '-msse2',
            '-msse3', '-mssse3', '-mno-sse4a', '-mcx16', '-msahf', '-mmovbe',
            '-maes', '-msha', '-mpclmul', '-mpopcnt', '-mno-abm', '-mno-lwp',
            '-mno-fma', '-mno-fma4', '-mno-xop', '-mno-bmi', '-mno-bmi2',
            '-mno-tbm', '-mno-avx', '-mno-avx2', '-msse4.2', '-msse4.1',
            '-mno-lzcnt', '-mno-rtm', '-mno-hle', '-mrdrnd', '-mno-f16c',
            '-mfsgsbase', '-mrdseed', '-mprfchw', '-mno-adx', '-mfxsr',
            '-mno-xsave', '-mno-xsaveopt', '-mno-avx512f', '-mno-avx512er',
            '-mno-avx512cd', '-mno-avx512pf', '-mno-prefetchwt1',
            '-mclflushopt', '-mno-xsavec', '-mno-xsaves', '-mno-avx512dq',
            '-mno-avx512bw', '-mno-avx512vl', '-mno-avx512ifma',
            '-mno-avx512vbmi', '-mno-clwb', '-mno-mwaitx', '-mno-clzero',
            '-mno-pku', '--param', 'l1-cache-size=24', '--param',
            'l1-cache-line-size=64', '--param', 'l2-cache-size=1024',
            '-mtune=generic'
        ]
예제 #4
0
 def __init__(self, extra_definitions=None, compiler="gcc"):
     bs.CMakeBuilder.__init__(self,
                              extra_definitions=extra_definitions,
                              compiler=compiler,
                              install=False)
     self._o = bs.Options()
     self._pm = bs.ProjectMap()
예제 #5
0
    def __init__(self):
        o = bs.Options()
        pm = bs.ProjectMap()
        self.build_root = pm.build_root()
        libdir = "x86_64-linux-gnu"
        self.version = None
        if o.arch == "m32":
            libdir = "i386-linux-gnu"
        self.env = { "LD_LIBRARY_PATH" : self.build_root + "/lib:" + \
                     self.build_root + "/lib/" + libdir + ":" + self.build_root + "/lib/dri",
                     "LIBGL_DRIVERS_PATH" : self.build_root + "/lib/dri",
                     "GBM_DRIVERS_PATH" : self.build_root + "/lib/dri",
                     # fixes dxt subimage tests that fail due to a
                     # combination of unreasonable tolerances and possibly
                     # bugs in debian's s2tc library.  Recommended by nroberts
                     "S2TC_DITHER_MODE" : "NONE",

                     # without this, Xorg limits frame rate to 1 FPS
                     # when display sleeps, cratering tests execution
                     "vblank_mode": "0"
        }

        self.env["MESA_GL_VERSION_OVERRIDE"] = "4.5"
        self.env["MESA_GLSL_VERSION_OVERRIDE"] = "450"
        o.update_env(self.env)
예제 #6
0
    def __init__(self):
        global_opts = bs.Options()

        options = []
        options = options + [
            "--enable-gbm",
            "--with-egl-platforms=x11,drm",
            "--enable-glx-tls",
            "--enable-gles1",
            "--enable-gles2",
            "--with-dri-drivers=i965,swrast,i915",

            # disable video drivers:
            # bbe6f7f865cd4316b5f885507ee0b128a20686eb
            # caused build failure unrelated to intel mesa
            # team.
            "--disable-xvmc",
            "--disable-vdpau",

            # gallium tested with mesa-buildtest
            "--without-gallium-drivers"
        ]
        if os.path.exists(bs.ProjectMap().project_source_dir() +
                          "/src/intel/vulkan"):
            options.append("--with-vulkan-drivers=intel")

        if global_opts.config == 'debug':
            options.append('--enable-debug')

        # always enable optimizations in mesa because tests are too slow
        # without them.
        bs.AutoBuilder.__init__(self,
                                configure_options=options,
                                opt_flags="-O2")
예제 #7
0
def main():
    pm = bs.ProjectMap()
    sd = pm.project_source_dir(pm.current_project())
    if os.path.exists(os.path.join(sd, 'meson.build')):
        meson_build()
    else:
        bs.build(MesaBuilder())
예제 #8
0
 def __init__(self):
     self._pm = bs.ProjectMap()
     glslc = "GLSLC=" + self._pm.build_root() + "/bin/glslc"
     mesa_lib = "MESA_LDFLAGS=-L" + self._pm.build_root() + "/lib"
     mesa_include = "MESA_CPPFLAGS=-I" + os.path.abspath(
         self._pm.project_source_dir() + "/../mesa/include")
     bs.AutoBuilder.__init__(
         self, configure_options=[glslc, mesa_lib, mesa_include])
     self._build_dir = self._src_dir
예제 #9
0
    def __init__(self):
        self.o = bs.Options()
        self.pm = bs.ProjectMap()

        self.env = {"MESA_GLES_VERSION_OVERRIDE" : ""}
        if self._gles_32():
            self.env["MESA_GLES_VERSION_OVERRIDE"] = "3.2"
        elif self._gles_31():
            self.env["MESA_GLES_VERSION_OVERRIDE"] = "3.1"
예제 #10
0
def main():
    # Disable test if using < Mesa 18.0
    pm = bs.ProjectMap()
    sd = pm.project_source_dir("mesa")
    if not os.path.exists(os.path.join(sd,
                                       'src/mesa/drivers/osmesa/meson.build')):
        return 0

    bs.build(bs.AndroidBuilder(src_location="~/android-ia",
                               module="libGLES_mesa"))
예제 #11
0
def main():
    parser = argparse.ArgumentParser(
        description="Choose a random revision of mesa")
    parser.add_argument(
        '--revision',
        type=str,
        default="",
        help="bounds for mesa revision to test, start:[finish]")

    args = parser.parse_args()
    revision = args.revision
    bspec = bs.BuildSpecification()
    bspec.checkout("mesa_master")
    mesa_repo = git.Repo(bs.ProjectMap().project_source_dir("mesa"))
    if ":" in revision:
        (start_rev, end_rev) = revision.split(":")
        if not end_rev:
            # user selected the last point in a plot.  Build current master
            revision = "mesa=" + mesa_repo.git.rev_parse("HEAD", short=True)
        elif not start_rev:
            print "ERROR: user-generated perf builds cannot add older data points to the plot"
            sys.exit(-1)
        else:
            commits = []
            start_commit = mesa_repo.commit(start_rev)
            found = False
            for commit in mesa_repo.iter_commits(end_rev, max_count=8000):
                if commit == start_commit:
                    found = True
                    break
                commits.append(commit.hexsha)
            if not found:
                print "ERROR: " + start_rev + " not found in history of " + end_rev
                sys.exit(-1)
            revision = "mesa=" + str(commits[len(commits) / 2])
        print revision
        sys.exit(0)

    # else choose random revision
    branch_commit = mesa_repo.tags["17.1-branchpoint"].commit.hexsha
    commits = []
    for commit in mesa_repo.iter_commits('origin/master', max_count=8000):
        if commit.hexsha == branch_commit:
            break
        commits.append(commit.hexsha)
    revision = "mesa=" + str(commits[int(random.random() * len(commits))])
    print revision
예제 #12
0
 def __init__(self, binary, cts_tests):
     self.binary = binary
     self.o = bs.Options()
     self.pm = bs.ProjectMap()
     self.blacklist_txt = None
     self.cts_blacklist = cts_tests
     self.version = None
     bd = self.pm.project_build_dir()
     if "glk" in self.o.hardware:
         bd = self.pm.project_source_dir("prerelease") + "/deqp-test/"
     if "gles2" in self.binary:
         self.blacklist_txt = bd + self.o.hardware[:3] + "_expectations/gles2_unstable_tests.txt"
     if "gles3" in self.binary:
         self.blacklist_txt = bd + self.o.hardware[:3] + "_expectations/gles3_unstable_tests.txt"
     if "gles31" in self.binary:
         self.blacklist_txt = bd + self.o.hardware[:3] + "_expectations/gles31_unstable_tests.txt"
     if "egl" in self.binary:
         self.blacklist_txt = bd + self.o.hardware[:3] + "_expectations/egl_unstable_tests.txt"
예제 #13
0
 def __init__(self, binary):
     self.binary = binary
     self.o = bs.Options()
     self.pm = bs.ProjectMap()
     self.blacklist_txt = None
     self.version = None
     bd = self.pm.project_build_dir()
     hw_prefix = self.o.hardware[:3]
     if self.o.hardware == "g965":
         hw_prefix = self.o.hardware
     if "gles2" in self.binary:
         self.blacklist_txt = bd + hw_prefix + "_expectations/gles2_unstable_tests.txt"
     if "gles3" in self.binary:
         self.blacklist_txt = bd + hw_prefix + "_expectations/gles3_unstable_tests.txt"
     if "gles31" in self.binary:
         self.blacklist_txt = bd + hw_prefix + "_expectations/gles31_unstable_tests.txt"
     if "egl" in self.binary:
         self.blacklist_txt = bd + hw_prefix + "_expectations/egl_unstable_tests.txt"
예제 #14
0
 def test(self):
     pm = bs.ProjectMap()
     global_opts = bs.Options()
     if global_opts.arch == "m64":
         icd_name = "intel_icd.x86_64.json"
     elif global_opts.arch == "m32":
         icd_name = "intel_icd.i686.json"
     env = {"VK_ICD_FILENAMES" : pm.build_root() + \
            "/share/vulkan/icd.d/" + icd_name,
            "ANV_ABORT_ON_DEVICE_LOSS" : "true"}
     tester = bs.DeqpTester()
     binary = pm.build_root() + "/opt/deqp/modules/vulkan/deqp-vk"
     results = tester.test(binary,
                           VulkanTestList(), ["--deqp-surface-type=fbo"],
                           env=env)
     o = bs.Options()
     config = bs.get_conf_file(o.hardware,
                               o.arch,
                               project=pm.current_project())
     tester.generate_results(results, bs.ConfigFilter(config, o))
예제 #15
0
class CaseConfig(ConfigParser.SafeConfigParser):
    def optionxform(self, optionstr):
        return optionstr


# put the commit needs to be removed
blame_test = "piglit 84c528875537237af5e942c4b9864cdbdc2aa782"

tests = [
    "crucible-test", "cts-test", "deqp-test", "glescts-test", "piglit-test",
    "vulkancts-test"
]
# tests = ["deqp-test"]

source_root = bs.ProjectMap().source_root()


def remove_tests(sections):
    for a_test in tests:
        path_to_test = source_root + "/" + a_test
        configs = []
        b_configs = os.listdir(path_to_test)
        for j in b_configs:
            if re.search("\w.*\.conf", str(j)):
                configs.append(j)


#        print(configs)

        for config in configs:
예제 #16
0
for a_rev in revs:
    proj = a_rev.split("=")[0]
    rev = a_rev.split("=")[1]
    rev_hash[proj] = rev

blame = args.blame_revision.split("=")
if len(blame) != 2:
    print "ERROR: --blame_revision must be in the format: project=rev"
    sys.exit(-1)

if not rev_hash.has_key(blame[0]):
    print "ERROR: invalid project in --blame_revision: " + blame[0]
    print "ERROR: acceptable projects: " + ",".join(rev_hash.keys())
    sys.exit(-1)

pm = bs.ProjectMap()
spec_xml = pm.build_spec()
results_dir = spec_xml.find("build_master").attrib["results_dir"]
retest_dir = args.dir
if retest_dir == "":
    retest_dir = results_dir + "/update/" + datetime.datetime.now().isoformat()

if rev_hash[blame[0]] == blame[1]:
    # rsync to save build if the blame is the same as the build
    src_dir = "/".join(dirnames[:-1]) + "/"
    dest_dir = bs.convert_rsync_path(retest_dir)
    cmd = ["rsync", "-rlptD", "--exclude", "/*test/", src_dir, dest_dir]
    bs.run_batch_command(cmd)
else:
    rev_hash[blame[0]] = blame[1]
예제 #17
0
    def test(self):
        # create per-benchmark-per-platform files containing all
        # scores to date

        # at a later point, we may want to combine data sets so
        # developers can see all skylake benchmarks together, for
        # example.

        # canoninical path is
        # /mnt/jenkins/results/perf/{rev_spec}/{type}/m64/scores/{benchmark}/{platform}/{date}.json:
        all_scores = {}
        score_glob = "/mnt/jenkins/results/mesa_master/*/*/m64/scores/*/*/*json"
        for a_score_file in glob.glob(score_glob):
            with open(a_score_file, "r") as f:
                a_score = json.load(f)
            self.merge_scores(all_scores, a_score)
        if not all_scores:
            print "WARN: no results to merge"
            return

        # canoninical windows path is
        # /mnt/jenkins/results/perf_win/scores/{benchmark}/{platform}/{date}.json:
        for a_score_file in glob.glob("/mnt/jenkins/results/perf_win/scores/*/*/*.json"):
            with open(a_score_file, "r") as f:
                a_score = json.load(f)
            self.merge_scores(all_scores, a_score)

        pm =  bs.ProjectMap()
        mesa_repo = git.Repo(pm.project_source_dir("mesa"))

        # add mean score and date to data set
        for benchmark, platform in all_scores.iteritems():
            for platform_name, pscores in platform.iteritems():
                scores_by_date = {}
                UFO_score = None
                for commit, series in pscores.iteritems():
                    accumulated_score = {}
                    runs = []
                    for run in series:
                        runs += run["score"]
                    if not runs:
                        continue
                    accumulated_score["score"] = numpy.mean(runs, dtype=numpy.float64)
                    accumulated_score["deviation"] = numpy.std(runs, dtype=numpy.float64) / accumulated_score["score"]
                    accumulated_score["commit"] = commit
                    if "UFO" in commit:
                        UFO_score = accumulated_score["score"]
                        continue
                    mesa_commit = None
                    try:
                        mesa_commit = mesa_repo.commit(commit.split("=")[1])
                    except:
                        print "WARN: commit not found: " + commit.split("=")[1]
                        continue
                    date = mesa_commit.committed_date
                    accumulated_score["date"] = date
                    pscores[commit] = accumulated_score
                    scores_by_date[date] = accumulated_score
                dates = scores_by_date.keys()
                dates.sort()
                platform[platform_name] = {"mesa": [scores_by_date[d] for d in dates]}
                if UFO_score:
                    platform[platform_name]["UFO"] = UFO_score

        with open("/mnt/jenkins/results/mesa_master/scores.json", "w") as of:
            json.dump(all_scores, fp=of)
예제 #18
0
 def __init__(self):
     self.src_dir = bs.ProjectMap().source_root() + "/repos/mesa"
예제 #19
0
def main():
    # Write the PID file
    with open('/var/run/fetch_mesa_mirrors.pid', 'w') as f:
        f.write(str(os.getpid()))

    signal.signal(signal.SIGALRM, signal_handler)
    signal.signal(signal.SIGINT, signal_handler_quit)
    signal.signal(signal.SIGTERM, signal_handler_quit)

    # running a service through intel's proxy requires some
    # annoying settings.
    os.environ["GIT_PYTHON_GIT_EXECUTABLE"] = "/usr/local/bin/git"
    # without this, git-remote-https spins at 100%
    os.environ["http_proxy"] = "http://proxy.jf.intel.com:911/"
    os.environ["https_proxy"] = "http://proxy.jf.intel.com:911/"

    try:
        bs.ProjectMap()
    except:
        sys.argv[0] = "/var/lib/git/mesa_jenkins/foo.py"

    pm = bs.ProjectMap()
    spec_file = pm.source_root() + "/build_specification.xml"
    new_spec_hash = None

    while True:
        orig_spec_hash = file_checksum(spec_file)
        if new_spec_hash is not None:
            print("Build Specification updated")
        new_spec_hash = file_checksum(spec_file)

        while new_spec_hash == orig_spec_hash:
            buildspec = bs.ProjectMap().build_spec()

            repo_dir = "/var/lib/git/"

            # build up a list of git repo objects for all known repos.  If the
            # origin or the remotes are not already cloned, clone them.
            repos = []
            repo_tags = buildspec.find("repos")
            for tag in repo_tags:
                url = tag.attrib["repo"]
                project = tag.tag
                origin_dir = repo_dir + project + "/origin"
                if not os.path.exists(origin_dir):
                    robust_clone(url, origin_dir)
                    bs.run_batch_command(["touch", origin_dir + "/git-daemon-export-ok"])
                repos.append(git.Repo(origin_dir))
                for a_remote in tag.findall("remote"):
                    remote_dir = repo_dir + project + "/" + a_remote.attrib["name"]
                    if not os.path.exists(remote_dir):
                        robust_clone(a_remote.attrib["repo"], remote_dir)
                        bs.run_batch_command(["touch", remote_dir + "/git-daemon-export-ok"])
                    repos.append(git.Repo(remote_dir))

            for repo in repos:
                try:
                    signal.alarm(300)   # 5 minutes
                    repo.git.fetch()
                    signal.alarm(0)
                except git.GitCommandError as e:
                    print("error fetching, ignoring: " + str(e), file=sys.stderr)
                    signal.alarm(0)
                except AssertionError as e:
                    print("assertion while fetching: " + str(e), file=sys.stderr)
                    signal.alarm(0)
                except TimeoutException as e:
                    print (str(e), file=sys.stderr)
                    signal.alarm(0)
            # pause a bit before fetching the next round
            time.sleep(20)
            robust_update()
            new_spec_hash = file_checksum(spec_file)
예제 #20
0
def main():

    # reuse the options from the gasket
    o = bs.Options([sys.argv[0]])
    description = "builds a component on jenkins"
    parser = argparse.ArgumentParser(description=description,
                                     parents=[o._parser],
                                     conflict_handler="resolve")

    parser.add_argument('--branch', type=str, default="mesa_master",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")

    parser.add_argument('--revision',
                        type=str,
                        default="",
                        help="specific set of revisions to build.")

    parser.add_argument('--test', type=str, default=None,
                        help="Name of test to execute.  Arch/hardware suffix "\
                        "will override those options")

    args = parser.parse_args()
    branch = args.branch
    revision = args.revision
    test = args.test

    # some build_local params are not handled by the Options, which is
    # used by other modules.  This code strips out incompatible args
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["branch"]
    del vdict["revision"]
    del vdict["test"]

    # override hardware/arch with suffix if available
    if not test:
        print "ERROR: --test argument required"
        sys.exit(-1)

    test_suffix = test.split(".")[-1]
    if test_suffix[-3:] in ["m32", "m64"]:
        vdict["arch"] = test_suffix[-3:]
        vdict["hardware"] = test_suffix[:-3]
    else:
        if vdict["hardware"] == "builder":
            # can't run tests on a builder
            vdict["hardware"] = "bdwgt2"
        # set the suffix in the way that piglit-test expects, eg "ilkm32"
        test = test + "." + vdict["hardware"] + vdict["arch"]

    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    # check out the branch, refined by any manually-specfied revisions
    bspec = bs.BuildSpecification()
    bspec.checkout(branch)
    if (revision):
        revspec = bs.RevisionSpecification.from_cmd_line_param(
            revision.split())
        revspec.checkout()

    revspec = bs.RevisionSpecification()
    print "Building revision: " + revspec.to_cmd_line_param()

    # create a result_path that is unique for this set of builds
    spec_xml = bs.ProjectMap().build_spec()
    results_dir = spec_xml.find("build_master").attrib["results_dir"]
    result_path = "/".join([
        results_dir, branch,
        revspec.to_cmd_line_param().replace(" ", "_"), "single_test"
    ])
    o.result_path = result_path

    # allow re-execution of tests (if different test was specified)
    bs.rmtree(result_path + "/test")

    depGraph = bs.DependencyGraph("piglit-test", o)
    bi = bs.ProjectInvoke(project="piglit-test", options=o)

    # remove the test build, because we want to build it directly
    depGraph.build_complete(bi)
    bi.set_info("status", "single-test-rebuild")

    jen = bs.Jenkins(result_path=result_path, revspec=revspec)
    jen.build_all(depGraph)
    jen.build(bi, extra_arg="--piglit_test=" + test)
    jen.wait_for_build()
    time.sleep(10)

    pm = bs.ProjectMap()
    out_test_dir = pm.output_dir()
    if os.path.exists(out_test_dir):
        bs.rmtree(out_test_dir)
    os.makedirs(out_test_dir)
    collate_tests(result_path, out_test_dir)
예제 #21
0
    def test(self):
        pm = bs.ProjectMap()
        build_root = pm.build_root()
        global_opts = bs.Options()
        if global_opts.arch == "m64":
            icd_name = "intel_icd.x86_64.json"
        elif global_opts.arch == "m32":
            icd_name = "intel_icd.i686.json"
        env = {
            "LD_LIBRARY_PATH": build_root + "/lib",
            "VK_ICD_FILENAMES": build_root + "/share/vulkan/icd.d/" + icd_name,
            "ANV_ABORT_ON_DEVICE_LOSS": "true"
        }
        o = bs.Options()
        o.update_env(env)
        br = bs.ProjectMap().build_root()
        out_dir = br + "/../test"
        if not path.exists(out_dir):
            os.makedirs(out_dir)
        out_xml = out_dir + "/piglit-crucible_" + o.hardware + "_" + o.arch + ".xml"
        include_tests = []
        if o.retest_path:
            include_tests = bs.TestLister(
                o.retest_path + "/test/").RetestIncludes("crucible-test")

        # flaky
        excludes = ["!func.query.timestamp", "!func.ssbo.interleve"]
        parallelism = []

        if "hsw" in o.hardware:
            # issue 4
            excludes += [
                "!func.copy.copy-buffer.large",
                "!func.interleaved-cmd-buffers.end1*",
                "!func.miptree.d32-sfloat.aspect-depth.view*",
                "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                "!func.miptree.s8-uint.aspect-stencil*",
                "!func.renderpass.clear.color08", "!func.ssbo.interleve"
            ]
        if "ivb" in o.hardware:
            # issue 5
            excludes += [
                "!func.depthstencil*",
                "!func.miptree.r8g8b8a8-unorm.aspect-color.view*",
                "!func.miptree.s8-uint.aspect-stencil*",
                "!func.miptree.d32-sfloat.aspect-depth.view*",
                "!stress.lots-of-surface-state.fs.static"
            ]
            parallelism = ['-j', '1']

        if "byt" in o.hardware:
            # issue 6
            excludes += [
                "!func.miptree.d32-sfloat.aspect-depth.view-3d.levels0*",
                "!func.depthstencil*", "!func.miptree.s8-uint.aspect-stencil*",
                "!stress.lots-of-surface-state.fs.static"
            ]
            parallelism = ['-j', '1']

        if "bsw" in o.hardware:
            excludes += ["!func.event.cmd_buffer"]  # intermittent fail/crash

        if "bxt" in o.hardware:
            excludes += [
                "!func.miptree.s8-uint.aspect-stencil*",
                "!stress.lots-of-surface-state.fs.static"
            ]

        bs.run_batch_command([
            br + "/bin/crucible", "run", "--fork", "--log-pids",
            "--junit-xml=" + out_xml
        ] + parallelism + include_tests + excludes,
                             env=env,
                             expected_return_code=None)
        post_process_results(out_xml)
        bs.run_batch_command(["cp", "-a", "-n", out_dir, pm.source_root()])

        bs.check_gpu_hang()
        bs.Export().export_tests()
예제 #22
0
def main():
    parser = argparse.ArgumentParser(description='Build projects locally.')

    # TODO: provide a pull action to update the repos
    parser.add_argument('--action', type=str, default=["build"],
                        choices=CsvChoice('fetch', 'build', 'clean', 'test'),
                        action=CsvAction,
                        help="Action to recurse with. 'build', 'clean' "\
                        "or 'test'. (default: %(default)s)")

    parser.add_argument('--project',
                        dest='project',
                        type=str,
                        default="mesa",
                        help='project to build. (default: %(default)s)')
    parser.add_argument('--arch',
                        dest='arch',
                        type=str,
                        default='m64',
                        choices=['m64', 'm32'],
                        help='arch to build. (default: %(default)s)')
    parser.add_argument('--config',
                        type=str,
                        default="release",
                        choices=['release', 'debug'],
                        help="Release or Debug build. (default: %(default)s)")

    parser.add_argument('--type', type=str, default="developer",
                        choices=['developer', 'percheckin',
                                 'daily', 'release'],
                        help="category of tests to run. "\
                        "(default: %(default)s)")

    parser.add_argument('--branch', type=str, default="none",
                        help="Branch specification to build.  "\
                        "See build_specification.xml/branches")
    parser.add_argument(
        '--env',
        type=str,
        default="",
        help="If specified, overrides environment variable settings"
        "EG: 'LIBGL_DEBUG=1 INTEL_DEBUG=perf'")
    parser.add_argument('--hardware',
                        type=str,
                        default='builder',
                        help="The hardware to be targeted for test "
                        "('builder', 'snbgt1', 'ivb', 'hsw', 'bdw'). "
                        "(default: %(default)s)")

    args = parser.parse_args()
    project = args.project

    if "fetch" in args.action:
        # fetch not supported by build.py scripts, which will parse argv
        bs.RepoSet().fetch()
    branch = args.branch
    if (branch != "none"):
        bs.BuildSpecification().checkout(branch)

    # some build_local params are not handled by the Options, which is
    # used by other modules
    o = bs.Options(["bogus"])
    vdict = vars(args)
    del vdict["project"]
    del vdict["branch"]
    if "fetch" in vdict["action"]:
        vdict["action"].remove("fetch")
    o.__dict__.update(vdict)
    sys.argv = ["bogus"] + o.to_list()

    if "clean" in args.action:
        bs.rmtree(bs.ProjectMap().build_root())

    graph = bs.DependencyGraph(project, o)
    ready = graph.ready_builds()
    pm = bs.ProjectMap()
    while ready:
        for bi in ready:
            graph.build_complete(bi)
            proj_build_dir = pm.project_build_dir(bi.project)
            script = proj_build_dir + "/build.py"
            if os.path.exists(script):
                bs.run_batch_command([sys.executable, script] + o.to_list())
        ready = graph.ready_builds()
예제 #23
0
    def test(self):
        o = bs.Options()
        pm = bs.ProjectMap()

        if not self.version:
            self.version = bs.mesa_version()

        conf_file = bs.get_conf_file(o.hardware, o.arch, "cts-test")

        # invoke piglit
        self.env["PIGLIT_CTS_GL_BIN"] = self.build_root + "/bin/gl/cts/glcts"
        out_dir = self.build_root + "/test/" + o.hardware

        include_tests = []
        if o.retest_path:
            testlist = bs.TestLister(o.retest_path + "/test/")
            include_tests = testlist.RetestIncludes(project="cts-test")
            if not include_tests:
                # we were supposed to retest failures, but there were none
                return

        # this test is flaky in glcts.  It passes enough for
        # submission, but as per Ken, no developer will want to look
        # at it to figure out why the test is flaky.
        extra_excludes = [
            "packed_depth_stencil.packed_depth_stencil_copyteximage"
        ]

        suite_names = []
        # disable gl cts on stable versions of mesa, which do not
        # support the feature set.
        if "13.0" in self.version:
            return
        if "17.0" in self.version and "glk" in o.hardware:
            # glk not supported by stable mesa
            return
        suite_names.append("cts_gl")
        # as per Ian, only run gl45
        extra_excludes += [
            "gl30-cts", "gl31-cts", "gl32-cts", "gl33-cts", "gl40-cts",
            "gl41-cts", "gl42-cts", "gl43-cts", "gl44-cts"
        ]
        if "hsw" in o.hardware:
            # flaky cts_gl tests
            extra_excludes += [
                "shader_image_load_store.multiple-uniforms",
                "shader_image_size.basic-nonms-fs",
                "shader_image_size.advanced-nonms-fs",
                "texture_gather.gather-tesselation-shader",
                "vertex_attrib_binding.basic-inputl-case1",
                "gpu_shader_fp64.named_uniform_blocks",
                # gpu hang
                "gl45-cts.tessellation_shader.vertex_spacing",
                "gl45-cts.tessellation_shader.vertex_ordering",
                "gl45-cts.tessellation_shader.tessellation_control_to_tessellation_evaluation.gl_maxpatchvertices_position_pointsize"
            ]

        exclude_tests = []
        for a in extra_excludes:
            exclude_tests += ["--exclude-tests", a]
        if not suite_names:
            # for master, on old hardware, this component will not
            # test anything.  The gles tests are instead targeted with
            # the gles32 cts, in the glescts-test component
            return
        cmd = [self.build_root + "/bin/piglit",
               "run",
               #"-p", "gbm",
               "-b", "junit",
               "--config", conf_file,
               "-c",
               "--exclude-tests", "esext-cts",
               "--junit_suffix", "." + o.hardware + o.arch] + \
               exclude_tests + \
               include_tests + suite_names + [out_dir]

        bs.run_batch_command(cmd,
                             env=self.env,
                             expected_return_code=None,
                             streamedOutput=True)
        single_out_dir = self.build_root + "/../test"
        if not os.path.exists(single_out_dir):
            os.makedirs(single_out_dir)

        if os.path.exists(out_dir + "/results.xml"):
            # Uniquely name all test files in one directory, for
            # jenkins
            filename_components = ["/piglit-cts", o.hardware, o.arch]
            if o.shard != "0":
                # only put the shard suffix on for non-zero shards.
                # Having _0 suffix interferes with bisection.
                filename_components.append(o.shard)

            revisions = bs.RepoSet().branch_missing_revisions()
            print "INFO: filtering tests from " + out_dir + "/results.xml"
            self.filter_tests(
                revisions, out_dir + "/results.xml",
                single_out_dir + "_".join(filename_components) + ".xml")

            # create a copy of the test xml in the source root, where
            # jenkins can access it.
            cmd = [
                "cp", "-a", "-n", self.build_root + "/../test",
                pm.source_root()
            ]
            bs.run_batch_command(cmd)
        else:
            print "ERROR: no results at " + out_dir + "/results.xml"

        bs.check_gpu_hang()
        bs.Export().export_tests()
예제 #24
0
def main():
    pm = bs.ProjectMap()
    sd = pm.project_source_dir(pm.current_project())
    if not os.path.exists(
            os.path.join(sd, 'src/mesa/drivers/osmesa/meson.build')):
        return 0

    save_dir = os.getcwd()

    global_opts = bs.Options()

    options = [
        '-Dbuild-tests=true',
        '-Dgallium-drivers=r300,r600,radeonsi,nouveau,swrast,swr,freedreno,vc4,pl111,etnaviv,imx,svga,virgl',
        '-Dgallium-vdpau=true',
        '-Dgallium-xvmc=true',
        '-Dgallium-xa=true',
        '-Dgallium-va=true',
        '-Dgallium-nine=true',
        '-Dgallium-opencl=standalone',
    ]

    # the knob for omx changed durring the 18.1 cycle, if tizonia support is
    # present we need to use bellagio, otherwise we need true.
    with open(os.path.join(sd, 'meson_options.txt')) as f:
        for l in f:
            if 'tizonia' in l:
                options.append('-Dgallium-omx=bellagio')
                break
        else:
            options.append('-Dgallium-omx=true')
    if global_opts.config != 'debug':
        options.extend(['-Dbuildtype=release', '-Db_ndebug=true'])
    b = bs.builders.MesonBuilder(extra_definitions=options, install=False)

    b.tests += [
        # TODO: These need runtime discovery, probably using `find` or to point
        # at the DSOs in the install directory
        #
        #'es1-ABI-check',
        #'es2-ABI-check',
        #'gbm-symbols-check',
        #'wayland-egl-symbols-check',
        #'wayland-egl-abi-check',
        #'egl-symbols-check',
        #'egl-entrypoint-check',
        'anv_block_pool_no_free',
        'anv_state_pool',
        'anv_state_pool_free_list_only',
        'anv_state_pool_no_free',
        'blob_test',
        'cache_test',
        'clear',
        'collision',
        'delete_and_lookup',
        'delete_management',
        'destroy_callback',
        'eu_compact',
        'glx-dispatch-index-check',
        'insert_and_lookup',
        'insert_many',
        'isl_surf_get_image_offset',
        'lp_test_arit',
        'lp_test_blend',
        'lp_test_conv',
        'lp_test_format',
        'lp_test_printf',
        'mesa-sha1',
        'null_destroy',
        'random_entry',
        'remove_null',
        'replacement',
        'roundeven',
        'u_atomic',
    ]

    b.gtests += [
        'eu_validate',
        'fs_cmod_propagation',
        'fs_copy_propagation',
        'fs_saturate_propagation',
        'general_ir_test',
        'glx-test',
        'main-test',
        'nir_control_flow',
        'sampler_types_test',
        'shared-glapi-test',
        'string_buffer',
        'uniform_initializer_test',
        'vec4_cmod_propagation',
        'vec4_copy_propagation',
        'vec4_register_coalesce',
        'vf_float_conversions',
    ]

    try:
        bs.build(b)
    except subprocess.CalledProcessError as e:
        # build may have taken us to a place where ProjectMap doesn't work
        os.chdir(save_dir)
        bs.Export().create_failing_test("mesa-meson-buildtest", str(e))
예제 #25
0
 def __init__(self):
     self.pm = bs.ProjectMap()
예제 #26
0
 def __init__(self):
     self._pm = bs.ProjectMap()
     self._options = bs.Options()
     self._src_dir = self._pm.project_source_dir()
     self._build_dir = self._src_dir + "/build_" + self._options.arch
     self._build_root = self._pm.build_root()
예제 #27
0
 def clean(self):
     pm = bs.ProjectMap()
     bs.git_clean(pm.project_source_dir("mesa"))
     bs.rmtree(self._build_dir)
예제 #28
0
def collate_tests(result_path, out_test_dir, make_tar=False):
    src_test_dir = result_path + "/test"
    print "collecting tests from " + src_test_dir
    i = 0
    while i < 10 and not os.path.exists(src_test_dir):
        i += 1
        print "sleeping, waiting for test directory: " + src_test_dir
        time.sleep(10)
    if not os.path.exists(src_test_dir):
        print "no test directory found: " + src_test_dir
        return

    cmd = ["cp", "-a", "-n", src_test_dir, out_test_dir]
    bs.run_batch_command(cmd)

    # Junit files must have a recent time stamp or else Jenkins will
    # not parse them.
    for a_file in os.listdir(out_test_dir + "/test"):
        os.utime(out_test_dir + "/test/" + a_file, None)

    revisions_path = bs.ProjectMap().source_root() + "/revisions.txt"
    with open(revisions_path, "w") as revs:
        revs.write(create_revision_table())

    if not make_tar:
        return

    # else generate a results.tgz that can be used with piglit summary
    save_dir = os.getcwd()
    os.chdir("/tmp/")
    tar = tarfile.open(out_test_dir + "/test/results.tar", "w:")
    shards = {}
    for a_file in os.listdir(out_test_dir + "/test"):
        if "piglit" not in a_file:
            continue
        if ":" in a_file:
            shard_base_name = "_".join(a_file.split("_")[:-1])
            if not shards.has_key(shard_base_name):
                shards[shard_base_name] = []
            shards[shard_base_name].append(a_file)
            continue
        t = et.parse(out_test_dir + "/test/" + a_file)
        r = t.getroot()
        strip_passes(r)
        t.write(a_file)
        # occasionally, t.write() finishes, but the file is not available
        t = None
        for _ in range(0, 5):
            try:
                tar.add(a_file)
                break
            except:
                print "WARN: failed to add file: " + a_file
                time.sleep(10)
        os.unlink(a_file)
    for (shard, files) in shards.items():
        t = et.parse(out_test_dir + "/test/" + files[0])
        r = t.getroot()
        strip_passes(r)
        suite = r.find("testsuite")
        for shards in files[1:]:
            st = et.parse(out_test_dir + "/test/" + shards)
            sr = st.getroot()
            strip_passes(sr)
            for a_suite in sr.findall("testsuite"):
                for a_test in a_suite.findall("testcase"):
                    suite.append(a_test)
        shard_file = shard + ".xml"
        t.write(shard_file)
        # occasionally, t.write() finishes, but the file is not available
        t = None
        for _ in range(0, 5):
            try:
                tar.add(shard_file)
                break
            except:
                print "WARN: failed to add file: " + shard_file
                time.sleep(10)
        os.unlink(shard_file)

    if os.path.exists(out_test_dir + "/test/logs"):
        save_dir = os.getcwd()
        os.chdir(out_test_dir + "/test")
        tar.add("logs")
        os.chdir(save_dir)

    tar.close()
    bs.run_batch_command(["xz", "-9", out_test_dir + "/test/results.tar"])
    os.chdir(save_dir)

    tl = bs.TestLister(out_test_dir + "/test")
    tests = tl.Tests()
    if tests:
        with open("test_summary.txt", "w") as fh:
            for atest in tests:
                atest.PrettyPrint(fh)
            fh.flush()
            # end users report that sometimes the summary is empty
            os.fsync(fh.fileno())
            fh.close()
        time.sleep(10)

        tc = bs.TestLister(out_test_dir + "/test", include_passes=True)
        all_tests = len(tc.Tests())

        failed_tests = 0
        tf = bs.TestLister(out_test_dir + "/test")
        failed_tests = len(tf.Tests())
        passed_tests = all_tests - failed_tests
        percent = (passed_tests * 100) / all_tests
        percentage = format(percent, '.2f')
        if all_tests:
            with open("test_summary.txt", "a") as fh:
                fh.write("""

        Tests passed: {} / {} ({}%)
                """.format(passed_tests, all_tests, percentage))
예제 #29
0
#!/usr/bin/python

import os
import sys
import urllib2
import ast
import time
sys.path.append(
    os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), ".."))
import build_support as bs
server = bs.ProjectMap().build_spec().find("build_master").attrib["host"]

url = "http://" + server + "/computer/api/python"
f = urllib2.urlopen(url)
host_dict = ast.literal_eval(f.read())


def is_excluded():
    if ("builder" in host or host == "master" or "win" in host):
        return True


for a_host in host_dict['computer']:
    host = a_host['displayName']
    if is_excluded():
        continue
    url = "http://" + server + "/job/Leeroy/buildWithParameters?token=noauth&label=" + host + "&project=clean-workspace"
    urllib2.urlopen(url)
    time.sleep(5)
예제 #30
0
 def __init__(self):
     self.pm = bs.ProjectMap()
     self.o = bs.Options()
     self.env = {}
     self.version = None