Пример #1
0
def test_DataPath_different_working_dir():
  """Test that DataPath is not affected by current working dir."""
  p = bazelutil.DataPath('phd/labm8/data/test/hello_world')
  with fs.chdir('/tmp'):
    assert bazelutil.DataPath('phd/labm8/data/test/hello_world') == p
  with tempfile.TemporaryDirectory() as d:
    with fs.chdir(d):
      assert bazelutil.DataPath('phd/labm8/data/test/hello_world') == p
Пример #2
0
def test_DataPath_path_not_found():
  """Test that FileNotFoundError is raised if the file is not found."""
  with pytest.raises(FileNotFoundError) as e_info:
    bazelutil.DataPath('')
  assert f"No such file or directory: ''" in str(e_info)

  with pytest.raises(FileNotFoundError) as e_info:
    bazelutil.DataPath('/not/a/real/path')
  assert f"No such file or directory: '/not/a/real/path'" in str(e_info)
Пример #3
0
def test_config_is_valid():
    """Test that config proto is valid."""
    with tempfile.TemporaryDirectory() as d:
        config = pbutil.FromFile(
            bazelutil.DataPath(
                'phd/deeplearning/clgen/tests/data/c99/config.pbtxt'),
            clgen_pb2.Instance())
        # Change the working directory and corpus path to our bazel run dir.
        config.working_dir = d
        config.model.corpus.local_directory = str(
            bazelutil.DataPath('phd/deeplearning/clgen/tests/data/c99/src/'))
        clgen.Instance(config)
Пример #4
0
class _SingleSource_Benchmarks_McGill(object):
    """The McGill benchmarks."""
    queens = benchmarks_pb2.Benchmark(
        name='queens',
        usage='queens [-ac] <n>',
        binary=str(
            bazelutil.DataPath(
                'llvm_test_suite/SingleSource_Benchmarks_McGill_queens')),
        srcs=[
            str(
                bazelutil.DataPath(
                    'llvm_test_suite/SingleSource/Benchmarks/McGill/queens.c')
            ),
        ],
    )
Пример #5
0
def test_DataPath_missing_data_dep():
  """FileNotFoundError is raised if the file exists is not in target data."""
  # The file //labm8/data/test/diabetes.csv exists, but is not a data
  # dependency of this test target, so is not found.
  with pytest.raises(FileNotFoundError) as e_info:
    bazelutil.DataPath('phd/labm8/data/test/diabetes.csv')
  assert ("No such file or directory: "
          "'phd/labm8/data/test/diabetes.csv'") in str(e_info)
Пример #6
0
def ProcessBudgetJsonFile(path: pathlib.Path) -> me_pb2.SeriesCollection:
    if not path.is_file():
        raise FileNotFoundError(str(path))
    try:
        return pbutil.RunProcessMessageInPlace([
            str(
                bazelutil.DataPath(
                    'phd/datasets/me_db/providers/ynab/json_budget_worker'))
        ], me_pb2.SeriesCollection(source=str(path)))
    except subprocess.CalledProcessError as e:
        raise importers.ImporterError('LifeCycle', path, str(e)) from e
Пример #7
0
def EnumerateLanguageInstanceConfigs(
    language: typing.Dict[str, typing.List[str]]
) -> typing.List[clgen_pb2.Instance]:
    """Enumerate the options for a language."""
    configs = []
    for corpus, model, sampler in itertools.product(language['corpuses'],
                                                    EnumerateModels(),
                                                    language['samplers']):
        instance_config = clgen_pb2.Instance()
        instance_config.working_dir = FLAGS.working_dir
        instance_config.model.CopyFrom(model)
        instance_config.model.corpus.CopyFrom(
            pbutil.FromFile(
                bazelutil.DataPath(
                    f'phd/experimental/deeplearning/polyglot/corpuses/{corpus}.pbtxt'
                ), corpus_pb2.Corpus()))
        instance_config.sampler.CopyFrom(
            pbutil.FromFile(
                bazelutil.DataPath(
                    f'phd/experimental/deeplearning/polyglot/samplers/{sampler}.pbtxt'
                ), sampler_pb2.Sampler()))
        configs.append(instance_config)
    return configs
Пример #8
0
    def __init__(self, data_path: str):
        """Constructor.

    Args:
      path: The path to the data, including the name of the workspace.

    Raises:
      FileNotFoundError: If path is not a file.
    """
        super(BazelPy3Image, self).__init__()
        self.data_path = data_path
        self.tar_path = bazelutil.DataPath(f'phd/{data_path}.tar')

        components = self.data_path.split('/')
        self.image_name = f'bazel/{"/".join(components[:-1])}:{components[-1]}'
Пример #9
0
def ExpandConfigPath(path: str, path_prefix: str = None) -> pathlib.Path:
    """Resolve an absolute path from a config proto string field.

  This performs shell-style expansion of $VARS, and prefixes the
  --clgen_local_path_prefix flag value, if it is set.

  Args:
    path: The string value as it appears in the proto.
    path_prefix: An optional string to prepend to the resolved path.

  Returns:
    An absolute path.
  """
    # Set a useful variable for expansion.
    if 'HOME' not in os.environ:
        os.environ['HOME'] = str(pathlib.Path('~').expanduser())
    os.environ['BAZEL_RUNFILES'] = str(bazelutil.DataPath('.'))
    return pathlib.Path(os.path.expandvars((path_prefix or '') +
                                           path)).expanduser().absolute()
Пример #10
0
def MapNativeProcessingBinaries(
    binaries: typing.List[str],
    input_protos: typing.List[pbutil.ProtocolBuffer],
    output_proto_classes: typing.List[typing.Type],
    pool: typing.Optional[multiprocessing.Pool] = None,
    num_processes: typing.Optional[int] = None) -> typing.Iterator[_MapWorker]:
  """Run a protocol buffer processing binary over a set of inputs.

  Args:
    binary_data_path: The path of the binary to execute, as provied to
      bazelutil.DataPath().
    input_protos: An iterable list of input protos.
    output_proto_class: The proto class of the output.
    binary_args: An optional list of additional arguments to pass to binaries.
    pool: The multiprocessing pool to use.
    num_processes: The number of processes for the multiprocessing pool.

  Returns:
    A generator of _MapWorker instances. The order is random.
  """
  if not len(binaries) == len(input_protos):
    raise ValueError('Number of binaries does not equal protos')

  cmds = [[bazelutil.DataPath(b)] for b in binaries]

  # Read all inputs to a list. We need the inputs in a list so that we can
  # map an inputs position in the list to a _MapWorker.id.
  input_protos = list(input_protos)
  output_proto_classes = list(output_proto_classes)

  # Create the multiprocessing pool to use, if not provided.
  pool = pool or multiprocessing.Pool(processes=num_processes)

  map_worker_iterator = (
    _MapWorker(id, cmd, input_proto) for
    id, (cmd, input_proto) in enumerate(zip(cmds, input_protos)))

  for map_worker in pool.imap_unordered(
      _RunNativeProtoProcessingWorker, map_worker_iterator):
    map_worker.SetProtos(
        input_protos[map_worker.id], output_proto_classes[map_worker.id])
    yield map_worker
Пример #11
0
def MapNativeProtoProcessingBinary(
    binary_data_path: str, input_protos: typing.List[pbutil.ProtocolBuffer],
    output_proto_class: typing.Type,
    binary_args: typing.Optional[typing.List[str]] = None,
    pool: typing.Optional[multiprocessing.Pool] = None,
    num_processes: typing.Optional[int] = None) -> typing.Iterator[_MapWorker]:
  """Run a protocol buffer processing binary over a set of inputs.

  Args:
    binary_data_path: The path of the binary to execute, as provied to
      bazelutil.DataPath().
    input_protos: An iterable list of input protos.
    output_proto_class: The proto class of the output.
    binary_args: An optional list of additional arguments to pass to binaries.
    pool: The multiprocessing pool to use.
    num_processes: The number of processes for the multiprocessing pool.

  Returns:
    A generator of _MapWorker instances. The order is random.
  """
  binary_path = bazelutil.DataPath(binary_data_path)
  binary_args = binary_args or []
  cmd = [str(binary_path)] + binary_args

  # Read all inputs to a list. We need the inputs in a list so that we can
  # map an inputs position in the list to a _MapWorker.id.
  input_protos = list(input_protos)

  # Create the multiprocessing pool to use, if not provided.
  pool = pool or multiprocessing.Pool(processes=num_processes)

  map_worker_iterator = (
    _MapWorker(i, cmd, input_proto) for
    i, input_proto in enumerate(input_protos))

  for map_worker in pool.imap_unordered(
      _RunNativeProtoProcessingWorker, map_worker_iterator):
    map_worker.SetProtos(input_protos[map_worker.id], output_proto_class)
    yield map_worker
Пример #12
0
def ProcessXmlFile(path: pathlib.Path) -> me_pb2.SeriesCollection:
    """Process a HealthKit XML data export.

  Args:
    path: Path of the XML file.

  Returns:
    A SeriesCollection message.

  Raises:
    FileNotFoundError: If the requested file is not found.
  """
    if not path.is_file():
        raise FileNotFoundError(str(path))
    try:
        return pbutil.RunProcessMessageInPlace([
            str(
                bazelutil.DataPath(
                    'phd/datasets/me_db/providers/health_kit/xml_export_worker'
                ))
        ], me_pb2.SeriesCollection(source=str(path)))
    except subprocess.CalledProcessError as e:
        raise importers.ImporterError('HealthKit', path, str(e)) from e
Пример #13
0
def ProcessCsvFile(path: pathlib.Path) -> me_pb2.SeriesCollection:
    """Process a LifeCycle CSV data export.

  Args:
    path: Path of the CSV file.

  Returns:
    A SeriesCollection message.

  Raises:
    FileNotFoundError: If the requested file is not found.
  """
    if not path.is_file():
        raise FileNotFoundError(str(path))
    try:
        return pbutil.RunProcessMessageInPlace([
            str(
                bazelutil.DataPath(
                    'phd/datasets/me_db/providers/life_cycle/lc_export_csv_worker'
                ))
        ], me_pb2.SeriesCollection(source=str(path)))
    except subprocess.CalledProcessError as e:
        raise importers.ImporterError('LifeCycle', path, str(e)) from e
Пример #14
0
from compilers.llvm import llvm
from labm8 import bazelutil
from labm8 import system


FLAGS = flags.FLAGS

flags.DEFINE_integer(
    'llvm_as_timeout_seconds', 60,
    'The maximum number of seconds to allow process to run.')

_LLVM_REPO = 'llvm_linux' if system.is_linux() else 'llvm_mac'

# Path to llvm-as binary.
LLVM_AS = bazelutil.DataPath(f'{_LLVM_REPO}/bin/llvm-as')


class LlvmAsError(llvm.LlvmError):
  """An error from llvm-as."""
  pass


def Exec(args: typing.List[str],
         stdin: typing.Optional[str] = None,
         timeout_seconds: int = 60) -> subprocess.Popen:
  """Run llvm-as.

  Args:
    args: A list of arguments to pass to binary.
    stdin: Optional input string to pass to binary.
Пример #15
0
"""Python entry point to the clang_rewriter binary."""
import os
import subprocess
import tempfile
import typing

from absl import flags
from absl import logging

from clgen import errors
from labm8 import bazelutil

FLAGS = flags.FLAGS

CLGEN_REWRITER = bazelutil.DataPath('clgen/preprocessors/clang_rewriter.cpp')
assert CLGEN_REWRITER.is_file()

# On Linux we must preload the LLVM sharded libraries.
CLGEN_REWRITER_ENV = os.environ.copy()
if bazelutil.DataPath('llvm_linux', must_exist=False).is_dir():
    libclang = bazelutil.DataPath('llvm_linux/lib/libclang.so')
    liblto = bazelutil.DataPath('llvm_linux/lib/libLTO.so')
    CLGEN_REWRITER_ENV['LD_PRELOAD'] = f'{libclang}:{liblto}'


def NormalizeIdentifiers(text: str,
                         suffix: str,
                         cflags: typing.List[str],
                         timeout_seconds: int = 60) -> str:
    """Normalize identifiers in source code.
Пример #16
0
"""Preprocessor passes for the OpenCL programming language."""
import typing

from absl import flags

from deeplearning.clgen.preprocessors import clang
from deeplearning.clgen.preprocessors import normalizer
from deeplearning.clgen.preprocessors import public
from labm8 import bazelutil

FLAGS = flags.FLAGS

LIBCLC = bazelutil.DataPath('phd/third_party/libclc/generic/include')
OPENCL_H = bazelutil.DataPath('phd/deeplearning/clgen/data/include/opencl.h')
SHIMFILE = bazelutil.DataPath(
    'phd/deeplearning/clgen/data/include/opencl-shim.h')


def GetClangArgs(use_shim: bool) -> typing.List[str]:
    """Get the arguments to pass to clang for handling OpenCL.

  Args:
    use_shim: If true, inject the shim OpenCL header.
    error_limit: The number of errors to print before arboting

  Returns:
    A list of command line arguments to pass to Popen().
  """
    args = [
        '-I' + str(LIBCLC), '-include',
        str(OPENCL_H), '-target', 'nvptx64-nvidia-nvcl', f'-ferror-limit=1',
Пример #17
0
import platform
import subprocess
import sys
import typing
from typing import Iterator

from gpu.clinfo.proto import clinfo_pb2
from gpu.oclgrind import oclgrind
from labm8 import bazelutil
from labm8 import pbutil

CLINFO = bazelutil.DataPath('phd/gpu/clinfo/clinfo')


class OpenCLEnvironment(object):
    def __init__(self, device: clinfo_pb2.OpenClDevice):
        self.name = device.name
        self.platform_name = device.platform_name
        self.device_name = device.device_name
        self.driver_version = device.driver_version
        self.opencl_version = device.opencl_version
        self.device_type = device.device_type
        self.platform_id = device.platform_id
        self.device_id = device.device_id
        self.opencl_opt = device.opencl_opt

    def ids(self) -> typing.Tuple[int, int]:
        """Return platform and device ID numbers.

    The ID numbers can be used to index into the list of platforms and
    devices. Note that the stability of these IDs is *not* guaranteed
Пример #18
0
FLAGS = flags.FLAGS

# The set of standard headers available in C99.
C99_HEADERS = {
  'assert.h', 'complex.h', 'ctype.h', 'errno.h', 'fenv.h', 'float.h',
  'inttypes.h', 'iso646.h', 'limits.h', 'locale.h', 'math.h', 'setjmp.h',
  'signal.h', 'stdalign.h', 'stdarg.h', 'stdatomic.h', 'stdbool.h', 'stddef.h',
  'stdint.h', 'stdio.h', 'stdlib.h', 'stdnoreturn.h', 'string.h', 'tgmath.h',
  'threads.h', 'time.h', 'uchar.h', 'wchar.h', 'wctype.h',
}

# The set of headers in the C++ standard library.
_UNAME = 'mac' if sys.platform == 'darwin' else 'linux'
CXX_HEADERS = set(
    public.GetAllFilesRelativePaths(
        bazelutil.DataPath(f'libcxx_{_UNAME}/include/c++/v1'),
        follow_symlinks=True) +
    public.GetAllFilesRelativePaths(
        bazelutil.DataPath(f'libcxx_{_UNAME}/lib/clang/6.0.0/include'),
        follow_symlinks=True)
)


@public.dataset_preprocessor
def CxxHeaders(import_root: pathlib.Path, file_relpath: str, text: str,
               all_file_relpaths: typing.List[str]) -> typing.List[str]:
  """Inline C++ includes.

  Searches for occurrences of '#include <$file>' and attempts to resolve $file
  to a path within import_root. If successful, the include directive is
  replaced.
Пример #19
0
 def __init__(self):
     self._src_tree_root = pathlib.Path(bazelutil.DataPath("linux_srcs"))
     self.generated_hdrs_root = pathlib.Path(
         bazelutil.DataPath("phd/datasets/linux/generated_headers"))
Пример #20
0
import tempfile

import pytest
from absl import flags

from datasets.me_db import me_db
from labm8 import bazelutil

FLAGS = flags.FLAGS
flags.DEFINE_string(
    'integration_tests_inbox', None,
    'If set, this sets the inbox path to be used by the '
    'integration tests. This overrides the default in '
    '//datasets/me_db/integration_tests/inbox.')

TEST_INBOX_PATH = bazelutil.DataPath('phd/datasets/me_db/tests/test_inbox')


@pytest.fixture(scope='function')
def mutable_db() -> me_db.Database:
    """Returns a populated database for the scope of the function."""
    with tempfile.TemporaryDirectory(prefix='phd_') as d:
        db = me_db.Database(f'sqlite:///{d}/me.db')
        db.ImportMeasurementsFromInboxImporters(TEST_INBOX_PATH)
        yield db


@pytest.fixture(scope='session')
def db() -> me_db.Database:
    """Returns a populated database that is reused for all tests.
Пример #21
0
import typing

from compilers.llvm import llvm
from labm8 import app
from labm8 import bazelutil
from labm8 import system

FLAGS = app.FLAGS

app.DEFINE_integer('clang_timeout_seconds', 60,
                   'The maximum number of seconds to allow process to run.')

_LLVM_REPO = 'llvm_linux' if system.is_linux() else 'llvm_mac'

# Path to clang binary.
CLANG = bazelutil.DataPath(f'{_LLVM_REPO}/bin/clang')

# Valid optimization levels.
OPTIMIZATION_LEVELS = {"-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz"}

# A structured representation of the output of clang's bisect debugging, e.g.
#     $ clang foo.c -mllvm -opt-bisect-limit=-1.
# The output is of the form:
#     BISECT: running pass (<number>) <name> on <target_type> (<target>)
#
# See ClangBisectMessageToInvocation() for the conversion.
OptPassRunInvocation = collections.namedtuple(
    'OptPassRunInvocation', ['name', 'target', 'target_type'])


class ClangException(llvm.LlvmError):
Пример #22
0
from absl import flags
from absl import logging

from compilers.llvm import llvm
from labm8 import bazelutil
from labm8 import system

FLAGS = flags.FLAGS

flags.DEFINE_integer('opt_timeout_seconds', 60,
                     'The maximum number of seconds to allow process to run.')

_LLVM_REPO = 'llvm_linux' if system.is_linux() else 'llvm_mac'

# Path to opt binary.
OPT = bazelutil.DataPath(f'{_LLVM_REPO}/bin/opt')

# The list of LLVM opt transformation passes.
# See: https://llvm.org/docs/Passes.html#transform-passes
TRANSFORM_PASSES = {
    '-aa',
    '-aa-eval',
    '-aarch64-a57-fp-load-balancing',
    '-aarch64-ccmp',
    '-aarch64-collect-loh',
    '-aarch64-condopt',
    '-aarch64-copyelim',
    '-aarch64-dead-defs',
    '-aarch64-expand-pseudo',
    '-aarch64-fix-cortex-a53-835769-pass',
    '-aarch64-ldst-opt',
Пример #23
0
import tempfile
import typing

from datasets.github.scrape_repos.preprocessors import extractors
from deeplearning.clgen import errors
from deeplearning.clgen.preprocessors import clang
from deeplearning.clgen.preprocessors import public
from labm8 import app
from labm8 import bazelutil

FLAGS = app.FLAGS

CLASS_NAME_RE = re.compile(r'public\s+class\s+(\w+)')

# Path to the compiled java rewriter.
JAVA_REWRITER = bazelutil.DataPath(
    'phd/deeplearning/clgen/preprocessors/JavaRewriter')


@public.clgen_preprocessor
def ClangFormat(text: str) -> str:
    """Run clang-format on a source to enforce code style.

  Args:
    text: The source code to run through clang-format.

  Returns:
    The output of clang-format.

  Raises:
    ClangFormatException: In case of an error.
    ClangTimeout: If clang-format does not complete before timeout_seconds.
Пример #24
0
import subprocess
import sys

import pytest
from absl import app
from absl import flags
from absl import logging

import deeplearning.clgen
from deeplearning.clgen import errors
from deeplearning.clgen.preprocessors import opencl
from labm8 import bazelutil

FLAGS = flags.FLAGS

SHIMFILE = bazelutil.DataPath(
    'phd/deeplearning/clgen/data/include/opencl-shim.h')


class MockProcess(object):
    """Mock class for subprocess.Popen() return."""
    def __init__(self, returncode):
        self.returncode = returncode

    def communicate(self, *args):
        del args
        return '', ''


# GetClangArgs() tests.

Пример #25
0
import re
import sys

from compilers.llvm import clang as clanglib
from compilers.llvm import llvm
from deeplearning.clgen import errors
from deeplearning.clgen.preprocessors import clang
from deeplearning.clgen.preprocessors import normalizer
from deeplearning.clgen.preprocessors import public
from labm8 import app
from labm8 import bazelutil

FLAGS = app.FLAGS

_UNAME = 'mac' if sys.platform == 'darwin' else 'linux'
LIBCXX_HEADERS = bazelutil.DataPath(f'libcxx_{_UNAME}/include/c++/v1')
CLANG_HEADERS = bazelutil.DataPath(f'libcxx_{_UNAME}/lib/clang/6.0.0/include')

C_COMMENT_RE = re.compile(
    r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
    re.DOTALL | re.MULTILINE)

# Flags to compile C99 files with. I've replicated the default search path,
# but substituted the sandboxed header locations in place of the defaults.
#   bazel-phd/bazel-out/*-py3-opt/bin/deeplearning/clgen/preprocessors/\
#     cxx_test.runfiles/llvm_mac/bin/clang -xc++ -E - -v
#
# Note: Since C++ is a proper superset of C99, the CXX flags work just fine
CLANG_ARGS = [
    '-xc++', '-isystem',
    str(LIBCXX_HEADERS), '-isystem', '/usr/local/include', '-isystem',
Пример #26
0
from absl import app
from absl import flags
from absl import logging

from deeplearning.deepsmith.generators import clgen
from deeplearning.deepsmith.proto import generator_pb2
from labm8 import bazelutil
from labm8 import crypto
from labm8 import pbutil

FLAGS = flags.FLAGS

flags.DEFINE_string(
    'generator',
    str(
        bazelutil.DataPath('phd/docs/2018_07_issta/artifact_evaluation/'
                           'data/clgen.pbtxt')),
    'The path of the generator config proto.')
flags.DEFINE_integer('num_testcases', 1024,
                     'The number of testcases to generate.')
flags.DEFINE_string(
    'output_directory', '/tmp/phd/docs/2018_07_issta/artifact_evaluation',
    'The directory to write generated programs and testcases to.')


def GenerateTestcases(generator_config: generator_pb2.ClgenGenerator,
                      output_directory: pathlib.Path,
                      num_testcases: int) -> None:
    logging.info('Writing output to %s', output_directory)
    (output_directory / 'generated_kernels').mkdir(parents=True, exist_ok=True)
    (output_directory / 'generated_testcases').mkdir(parents=True,
                                                     exist_ok=True)
Пример #27
0
"""Get a baseline reading of CLgen OpenCL models."""
from absl import app
from absl import flags

from deeplearning.clgen import clgen
from labm8 import bazelutil

FLAGS = flags.FLAGS

PROTOS = [
    bazelutil.DataPath('phd/experimental/clgen/keras/opencl_baseline_a.pbtxt'),
    bazelutil.DataPath('phd/experimental/clgen/keras/opencl_baseline_b.pbtxt'),
]


def main(argv):
    del argv
    for proto in PROTOS:
        instance = clgen.Instance.FromFile(proto)
        instance.Sample(min_num_samples=1000)


if __name__ == '__main__':
    app.run(main)
Пример #28
0
from deeplearning.deepsmith.proto import harness_pb2_grpc
from deeplearning.deepsmith.proto import service_pb2
from gpu.cldrive import cgen
from gpu.cldrive import data
from gpu.cldrive import driver
from gpu.cldrive import env
from labm8 import bazelutil
from labm8 import fs
from labm8 import labdate
from labm8 import system

FLAGS = flags.FLAGS

_UNAME = 'linux' if system.is_linux() else 'mac'
# Path to clang binary.
CLANG_PATH = bazelutil.DataPath(f'llvm_{_UNAME}/bin/clang')
# Flags for compiling with libcxx.
LIBCXX_LIB_DIR = bazelutil.DataPath(f'llvm_{_UNAME}/lib')
# Path to OpenCL headers.
OPENCL_HEADERS_DIR = bazelutil.DataPath('opencl_120_headers')
if system.is_linux():
    LIBOPENCL_DIR = bazelutil.DataPath('libopencl')


class DriverCompilationError(OSError):
    """Exception raised in case driver compilation fails."""
    pass


class CldriveHarness(harness.HarnessBase,
                     harness_pb2_grpc.HarnessServiceServicer):
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Access to the build information."""

import config_pb2
import datetime
import functools
import re
import typing

from labm8 import bazelutil
from labm8 import pbutil

# Path to the proto file generated by bazel that contains build information.
_BUILD_INFO = bazelutil.DataPath("phd/build_info.pbtxt")


@functools.lru_cache()
def GetBuildInfo() -> config_pb2.BuildInfo:
    """Return the build state."""
    return pbutil.FromFile(_BUILD_INFO,
                           config_pb2.BuildInfo(),
                           uninitialized_okay=False)


def GetGithubCommitUrl(
        remote_url: typing.Optional[str] = None,
        commit_hash: typing.Optional[str] = None) -> typing.Optional[str]:
    """Calculate the GitHub URL for a commit."""
    build_info = GetBuildInfo()
Пример #30
0
import subprocess
import sys
import typing

from absl import app
from absl import flags

from gpu.clinfo.proto import clinfo_pb2
from labm8 import bazelutil
from labm8 import system

FLAGS = flags.FLAGS

_OCLGRIND_PKG = 'oclgrind_linux' if system.is_linux() else 'oclgrind_mac'
# The path to the oclgrind binary.
OCLGRIND_PATH = bazelutil.DataPath(f'{_OCLGRIND_PKG}/bin/oclgrind')
# The clinfo description of the local Oclgrind binary.
CLINFO_DESCRIPTION = clinfo_pb2.OpenClDevice(
    name='Emulator|Oclgrind|Oclgrind_Simulator|Oclgrind_18.3|1.2',
    platform_name='Oclgrind',
    device_name='Oclgrind Simulator',
    driver_version='Oclgrind 18.3',
    opencl_version='1.2',
    device_type='Emulator',
    platform_id=0,
    device_id=0,
)


def Exec(argv: typing.List[str],
         env: typing.Dict[str, str] = None) -> subprocess.Popen: