예제 #1
0
def validate_fragment(type_name, fragment):
    """Validate a dictionary representing a JSON/YAML fragment against an Envoy API proto3 type.

  Throws Protobuf errors on parsing exceptions, successful validations produce
  no result.

  Args:
    type_name: a string providing the type name, e.g.
      envoy.config.bootstrap.v3.Bootstrap.
    fragment: a dictionary representing the parsed JSON/YAML configuration
      fragment.
  """
    json_fragment = json.dumps(fragment)

    r = runfiles.Create()
    all_protos_pb_text_path = r.Rlocation(
        'envoy/tools/type_whisperer/all_protos_with_ext_pb_text.pb_text')
    file_desc_set = descriptor_pb2.FileDescriptorSet()
    text_format.Parse(pathlib.Path(all_protos_pb_text_path).read_text(),
                      file_desc_set,
                      allow_unknown_extension=True)

    pool = descriptor_pool.DescriptorPool()
    for f in file_desc_set.file:
        pool.Add(f)
    desc = pool.FindMessageTypeByName(type_name)
    msg = message_factory.MessageFactory(pool=pool).GetPrototype(desc)()
    json_format.Parse(json_fragment, msg, descriptor_pool=pool)
예제 #2
0
def symlink_input(filegroup_resource_path, temp_dir, strip_prefix=None):
    """Symlinks a rule's input data into a temporary directory.

    This is useful both to create a hermetic set of inputs to pass to a
    documentation builder, or also in case we need to adjust the input data
    before passing it along.

    Args:
        filegroup_resource_path: Names a file created by enumerate_filegroup
          (in defs.bzl) which contains resource paths.
        temp_dir: Destination directory, which must already exist.
        strip_prefix: Optional; a list[str] of candidate strings to remove
          from the resource path when linking into temp_dir.  The first match
          wins, and it is valid for no prefixes to match.
    """
    assert os.path.isdir(temp_dir)
    manifest = runfiles.Create()
    with open(manifest.Rlocation(filegroup_resource_path)) as f:
        input_filenames = f.read().splitlines()
    for name in input_filenames:
        orig_name = manifest.Rlocation(name)
        assert os.path.exists(orig_name), name
        dest_name = name
        for prefix in (strip_prefix or []):
            if dest_name.startswith(prefix):
                dest_name = dest_name[len(prefix):]
                break
        temp_name = join(temp_dir, dest_name)
        os.makedirs(os.path.dirname(temp_name), exist_ok=True)
        os.symlink(orig_name, temp_name)
예제 #3
0
파일: build.py 프로젝트: IndianBoy42/drake
def main():
    parser = argparse.ArgumentParser(description=__doc__.strip())
    parser.add_argument(
        "--out_dir",
        type=str,
        metavar="DIR",
        required=True,
        help="Output directory. Must be an absolute path and must not exist.")

    args = parser.parse_args()
    out_dir = args.out_dir
    if not os.path.isabs(out_dir):
        parser.error(f"--out_dir={out_dir} is not an absolute path")
    if os.path.exists(out_dir):
        parser.error(f"--out_dir={out_dir} already exists")

    manifest = runfiles.Create()
    gen_sphinx = manifest.Rlocation("drake/doc/pydrake/gen_sphinx")
    gen_jekyll = manifest.Rlocation("drake/doc/gen_jekyll")
    doxygen = manifest.Rlocation("drake/doc/doxygen_cxx/build")
    styleguide_build = manifest.Rlocation("drake/doc/styleguide/build")
    for item in [gen_sphinx, gen_jekyll, doxygen, styleguide_build]:
        assert os.path.exists(item), item

    _check_call([gen_jekyll, f"--out_dir={out_dir}"])
    _check_call([gen_sphinx, f"--out_dir={out_dir}/pydrake"])
    _check_call([styleguide_build, f"--out_dir={out_dir}/styleguide"])
    doxygen_scratch = f"{out_dir}/doxygen_scratch"
    _check_call([doxygen, f"--out_dir={doxygen_scratch}"])
    print(f"+ mv {doxygen_scratch}/html {out_dir}/doxygen_cxx")
    os.rename(f"{doxygen_scratch}/html", f"{out_dir}/doxygen_cxx")
    print(f"+ rm -rf {doxygen_scratch}")
    shutil.rmtree(doxygen_scratch)

    _build_sitemap(out_dir)
예제 #4
0
    def test_enum_cross_check(self):
        """Checks that the Drake-created flavor of nlopt.cpp (via a patch file)
        is consistent with the upstream-generated flavor of same (via CMake).

        If this test fails during an NLopt version pin upgrade, you will need
        to update patches/gen_enums.patch with the reported differences.
        """
        # Load both input files.
        # "actual" refers to the the Drake-created flavor (via a patch file).
        # "expected" refers to the upstream-generated flavor (via CMake).
        manifest = runfiles.Create()
        actual_file = manifest.Rlocation(
            "nlopt_internal/genrule/nlopt.hpp")
        with open(actual_file) as f:
            actual = f.read()
        expected_file = manifest.Rlocation(
            "drake/tools/workspace/nlopt_internal/test/nlopt-upstream.hpp")
        with open(expected_file) as f:
            expected = f.read()

        # When CMake is processing the header file, it removes blank lines.
        # We will do the same to our actual file to prep for comparison.
        actual = actual.replace("\n\n", "\n")

        # CMake also does something inexplicable to tab-spaced macro line
        # endings. Canonicalize those in both files for comparison.
        actual = re.sub(r'\s+\\', r' \\', actual)
        expected = re.sub(r'\s+\\', r' \\', expected)

        # Compare.
        self.assertMultiLineEqual(expected, actual)
 def setUp(self):
     filename = runfiles.Create().Rlocation(
         "drake/tools/skylark/pathutils.bzl")
     with open(filename, "r", encoding="utf-8") as f:
         bzl_contents = f.read()
     self.bzl_globals = dict()
     exec(bzl_contents, self.bzl_globals)
예제 #6
0
 def setUp(self):
     super(PkgDebTest, self).setUp()
     self.runfiles = runfiles.Create()
     # Note: Rlocation requires forward slashes. os.path.join() will not work.
     self.deb_path = self.runfiles.Rlocation(
         'rules_pkg/tests/deb/fizzbuzz_test_all.deb')
     self.deb_file = DebInspect(self.deb_path)
예제 #7
0
파일: foo.py 프로젝트: zzmp/bazel
def main():
    print("Hello Python Foo!")
    r = runfiles.Create()
    print("rloc=%s" % r.Rlocation("foo_ws/foo/datadep/hello.txt"))

    # Run a subprocess, propagate the runfiles envvar to it. The subprocess will
    # use this process's runfiles manifest or runfiles directory.
    if IsWindows():
        env = {"SYSTEMROOT": os.environ["SYSTEMROOT"]}
    else:
        env = {}
    env.update(r.EnvVars())
    for lang in ["py", "java", "sh", "cc"]:
        p = subprocess.Popen([r.Rlocation(ChildBinaryName(lang))],
                             env=env,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        out = SplitToLines(out)
        if len(out) >= 2:
            print(out[0])  # e.g. "Hello Python Bar!"
            print(out[1])  # e.g. "rloc=/tmp/foo_ws/bar/bar-py-data.txt"
        else:
            raise Exception("ERROR: error running bar-%s: %s" %
                            (lang, SplitToLines(err)))
예제 #8
0
def main():
    drake_workspace = os.path.dirname(
        runfiles.Create().Rlocation("drake/.bazelproject"))
    assert os.path.exists(drake_workspace), drake_workspace
    parser = argparse.ArgumentParser(description=__doc__.strip())
    parser.add_argument('--quick',
                        action='store_true',
                        default=False,
                        help="Disable slow features (e.g., all graphs).")
    parser.add_argument(
        "--out_dir",
        type=str,
        metavar="DIR",
        default=os.path.join(drake_workspace, "build/drake/doc/doxygen_cxx"),
        help="Output directory. Does not have to exist beforehand.")
    parser.add_argument(
        'inputs',
        nargs='*',
        help="Process only these files and/or directories; e.g., "
        "'bazel-bin/doc/doxygen --quick systems/framework' "
        "or using shell globbing, e.g., "
        "'bazel-bin/doc/doxygen --quick systems/framework/*leaf*.h'.")
    args = parser.parse_args()
    for x in args.inputs:
        if not os.path.exists(x):
            print("Inputs must be files and/or directories, but "
                  "'{}' does not exist".format(x))
            sys.exit(1)
    _run_doxygen(drake_workspace, args)
def main():
    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/simple_judge/solution.data')
    assert location and os.path.exists(location), location

    print(int(input()) * 42)
예제 #10
0
파일: protodoc.py 프로젝트: zhenshub/envoy
 def __init__(self):
   r = runfiles.Create()
   with open(r.Rlocation('envoy/docs/protodoc_manifest.yaml'), 'r') as f:
     # Load as YAML, emit as JSON and then parse as proto to provide type
     # checking.
     protodoc_manifest_untyped = yaml.safe_load(f.read())
     self.protodoc_manifest = manifest_pb2.Manifest()
     json_format.Parse(json.dumps(protodoc_manifest_untyped), self.protodoc_manifest)
예제 #11
0
    def setUp(self):
        manifest = runfiles.Create()
        self._stub_path = manifest.Rlocation(
            "drake/lcm/initialization_sequence_test_stub")

        # We need a non-memq URL for this test to be meaningful.  (By default,
        # our configuration for the "bazel test" environment uses "memq://".)
        self._lcm_url = "udpm://239.255.76.67:7671"
예제 #12
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--multiplier', type=int, default=11)
    options = parser.parse_args()

    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/dataset_derive/deriver.data')
    assert location and os.path.exists(location), location

    value = int(input())
    print(value * options.multiplier)
예제 #13
0
def main():
    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/dataset_test/validator.data')
    assert location and os.path.exists(location), location

    if len(sys.argv) < 2:
        n = int(input())
    else:
        with open(sys.argv[1]) as f:
            n = int(f.read().strip())

    assert n % 2 == 0
예제 #14
0
    def assertTarFilesAreAlmostNew(self, file_name):
        """Assert that tarfile contains files with an mtime of roughly now.

    This is used to prove that the test data was a file which was presumably:
    built with 'stamp=1' or ('stamp=-1' and --stamp) contains files which
    all have a fairly recent mtime, thus indicating they are "current" time
    rather than the epoch or some other time.

    Args:
        file_name: the path to the TAR file to test.
    """
        file_path = runfiles.Create().Rlocation('rules_pkg/tests/' + file_name)
        with tarfile.open(file_path, 'r:*') as f:
            for info in f:
                self.check_mtime(info.mtime, file_path, info.name)
예제 #15
0
def _build(*, out_dir, temp_dir, modules, quick):
    """Generates into out_dir; writes scratch files into temp_dir.
    As a precondition, both directories must already exist and be empty.
    """
    manifest = runfiles.Create()

    # Find drake's sources.
    drake_workspace = os.path.dirname(
        os.path.realpath(manifest.Rlocation("drake/.bazelproject")))
    assert os.path.exists(drake_workspace), drake_workspace
    assert os.path.exists(join(drake_workspace, "WORKSPACE")), drake_workspace

    # Find doxygen.
    doxygen = manifest.Rlocation("doxygen/doxygen")
    assert os.path.exists(doxygen), doxygen

    # Find dot.
    dot = "/usr/bin/dot"
    assert os.path.exists(dot), dot

    # Configure doxygen.
    doxyfile = _generate_doxyfile(manifest=manifest,
                                  out_dir=out_dir,
                                  temp_dir=temp_dir,
                                  dot=(dot if not quick else ""))

    # Prepare our input.
    symlink_input("drake/doc/doxygen_cxx/doxygen_input.txt", temp_dir)
    _symlink_headers(drake_workspace=drake_workspace,
                     temp_dir=temp_dir,
                     modules=modules)

    # Run doxygen.
    check_call([doxygen, doxyfile], cwd=temp_dir)

    # Post-process its log, and check for errors. If we are building only a
    # subset of the docs, we are likely to encounter errors due to the missing
    # sections, so we'll only enable the promotion of warnings to errors when
    # we're building all of the C++ documentation.
    check_for_errors = (len(modules) == 0)
    with open(f"{temp_dir}/doxygen.log", encoding="utf-8") as f:
        lines = [
            line.strip().replace(f"{temp_dir}/", "") for line in f.readlines()
        ]
    _postprocess_doxygen_log(lines, check_for_errors)

    # The nominal pages to offer for preview.
    return ["", "classes.html", "modules.html"]
예제 #16
0
    def testBuild(self):
        # Set up a fresh Bazel workspace using the currently build repo.
        tempdir = os.path.join(os.environ['TEST_TMPDIR'], 'build')
        if not os.path.exists(tempdir):
            os.makedirs(tempdir)
        with open(os.path.join(tempdir, 'WORKSPACE'), 'w') as workspace:
            file_name = release_tools.package_basename(self.source_repo,
                                                       self.version)
            local_path = runfiles.Create().Rlocation(
                os.path.join('rules_pkg', 'distro', file_name))
            sha256 = release_tools.get_package_sha256(local_path)
            workspace_content = '\n'.join(
                ('workspace(name = "test_rules_pkg_packaging")',
                 release_tools.workspace_content(
                     'file://%s' % local_path,
                     self.source_repo,
                     sha256,
                     rename_repo=self.dest_repo,
                     deps_method='rules_pkg_dependencies')))
            workspace.write(workspace_content)
            if _VERBOSE:
                print('=== WORKSPACE ===')
                print(workspace_content)

        # We do a little dance of renaming *.tmpl to *, mostly so that we do not
        # have a BUILD file in testdata, which would create a package boundary.
        def CopyTestFile(source_name, dest_name):
            source_path = self.data_files.Rlocation(
                os.path.join('rules_pkg', 'distro', 'testdata', source_name))
            with open(source_path) as inp:
                with open(os.path.join(tempdir, dest_name), 'w') as out:
                    content = inp.read()
                    out.write(content)

        CopyTestFile('BUILD.tmpl', 'BUILD')

        os.chdir(tempdir)
        build_result = subprocess.check_output(
            ['bazel', 'build', ':dummy_tar'])
        if _VERBOSE:
            print('=== Build Result ===')
            print(build_result)

        # TODO(aiuto): Find tar in a disciplined way
        content = subprocess.check_output(
            ['tar', 'tzf', 'bazel-bin/dummy_tar.tar.gz'])
        self.assertEqual(b'./\n./BUILD\n', content)
예제 #17
0
def main():
    notebooks = sys.argv[1:]
    manifest = runfiles.Create()

    num_errors = 0
    for item in notebooks:
        filename = manifest.Rlocation(f"drake/tutorials/{item}")
        with open(filename, encoding="utf-8") as f:
            contents = f.read().splitlines()
        name = f"tutorials/{item}"
        num_errors += _check_preamble(name, contents)
        num_errors += _check_matplotlib(name, contents)
        num_errors += _check_katex(name, contents)
        num_errors += _check_cell_outputs(name, contents)

    if num_errors > 0:
        sys.exit(1)
예제 #18
0
 def test_show(self):
     """Test that show_model doesn't crash."""
     manifest = runfiles.Create()
     model_runpaths = [
         # Simple SDFormat file.
         "drake/multibody/benchmarks/acrobot/acrobot.sdf",
         # Simple URDF file.
         "drake/multibody/benchmarks/acrobot/acrobot.urdf",
         # Nested SDFormat file.
         "drake/manipulation/util/test/simple_nested_model.sdf",
         # SDFormat world file with multiple models.
         "drake/manipulation/util/test/simple_world_with_two_models.sdf",
     ]
     bin = manifest.Rlocation("drake/manipulation/util/show_model")
     for model_runpath in model_runpaths:
         print(model_runpath)
         model_file = manifest.Rlocation(model_runpath)
         subprocess.check_call([bin, model_file])
예제 #19
0
    def assertTarFileContent(self, file_name, content, verbose=False):
        """Assert that tarfile contains exactly the entry described by `content`.

    Args:
        file_name: the path to the TAR file to test.
        content: an array describing the expected content of the TAR file.
            Each entry in that list should be a dictionary where each field
            is a field to test in the corresponding TarInfo. For
            testing the presence of a file "x", then the entry could simply
            be `{'name': 'x'}`, the missing field will be ignored. To match
            the content of a file entry, use the key 'data'.
    """
        # NOTE: This is portable to Windows. os.path.join('rules_pkg', 'tests',
        # filename) is not.
        file_path = runfiles.Create().Rlocation('rules_pkg/tests/tar/' +
                                                file_name)
        got = []
        with tarfile.open(file_path, 'r:*') as f:
            i = 0
            for info in f:
                if verbose:
                    print('  >> from tar file:', info.name)
                error_msg = 'Extraneous file at end of archive %s: %s' % (
                    file_path, info.name)
                self.assertLess(i, len(content), error_msg)
                for k, v in content[i].items():
                    if k == 'data':
                        value = f.extractfile(info).read()
                    elif k == 'isdir':
                        value = info.isdir()
                    else:
                        value = getattr(info, k)
                    error_msg = ' '.join([
                        'Value `%s` for key `%s` of file' % (value, k),
                        '%s in archive %s does' % (info.name, file_path),
                        'not match expected value `%s`' % v
                    ])
                    self.assertEqual(value, v, error_msg)
                    if value != v:
                        print(error_msg)
                i += 1
            if i < len(content):
                self.fail('Missing file %s in archive %s of [%s]' %
                          (content[i], file_path, ',\n    '.join(got)))
예제 #20
0
    def test_contents(self):
        """Ensure that .pyi files contain 'reasonable' contents.

        For now, this is more or less just a smoke test, with a very cursory
        check on the contents.
        """
        manifest = runfiles.Create()

        # Get the base directory where our data files can be found.
        output_dir = manifest.Rlocation('drake/bindings/pydrake')

        # Find some of the expected output and look for an expected function.
        expected = os.path.join(output_dir, 'pydrake', '__init__.pyi')
        found_expected_decl = False
        for line in open(expected, 'r'):
            if line.startswith('def getDrakePath():'):
                found_expected_decl = True
                break
        self.assertTrue(found_expected_decl)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('input_file')
    parser.add_argument('output_file')
    parser.add_argument('answer_file')
    options = parser.parse_args()

    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/simple_judge/comparator.data')
    assert location and os.path.exists(location), location

    with open(options.input_file) as f:
        input_num = int(f.read().strip())
    with open(options.output_file) as f:
        output_num = int(f.read().strip())

    assert output_num == input_num * 42
예제 #22
0
def collect_signals(words):
  """Collect signals for tokens."""
  global FEMALE_NAMES
  global MALE_NAMES

  r = runfiles.Create()
  if not FEMALE_NAMES:
    with open(r.Rlocation('contrack/data/female_names.txt'), 'r') as f:
      FEMALE_NAMES = f.read().splitlines()
  if not MALE_NAMES:
    with open(r.Rlocation('contrack/data/male_names.txt'), 'r') as f:
      MALE_NAMES = f.read().splitlines()

  result = []
  for word in words:
    signals = []
    if word in MALE_NAMES or word in FEMALE_NAMES:
      signals.append('first_name')
    result.append(signals)

  return result
예제 #23
0
class ContentManifestTest(unittest.TestCase):
    """Test harness to see if we wrote the content manifest correctly."""

    run_files = runfiles.Create()

    def assertManifestsMatch(self, expected, got):
        """Check two manifest files for equality.

    Args:
        expected: The path to the content we expect.
        got: The path to the content we got.
    """
        e_file = ContentManifestTest.run_files.Rlocation(
            'rules_pkg/tests/mappings/' + expected)
        with open(e_file, mode='rb') as e_fp:
            expected = json.load(e_fp)
        g_file = ContentManifestTest.run_files.Rlocation(
            'rules_pkg/tests/mappings/' + got)
        with open(g_file, mode='rb') as g_fp:
            got = json.load(g_fp)
        self.assertEqual(expected, got)
예제 #24
0
def _build(*, out_dir, temp_dir, modules, quick):
    """Generates into out_dir; writes scratch files into temp_dir.
    As a precondition, both directories must already exist and be empty.
    """
    manifest = runfiles.Create()

    # Find drake's sources.
    drake_workspace = os.path.dirname(os.path.realpath(
        manifest.Rlocation("drake/.bazelproject")))
    assert os.path.exists(drake_workspace), drake_workspace
    assert os.path.exists(join(drake_workspace, "WORKSPACE")), drake_workspace

    # Find doxygen.
    doxygen = manifest.Rlocation("doxygen/doxygen")
    assert os.path.exists(doxygen), doxygen

    # Find dot.
    dot = "/usr/bin/dot"
    assert os.path.exists(dot), dot

    # Configure doxygen.
    doxyfile = _generate_doxyfile(
        manifest=manifest,
        out_dir=out_dir,
        temp_dir=temp_dir,
        dot=(dot if not quick else ""))

    # Prepare our input.
    symlink_input(
        "drake/doc/doxygen_cxx/doxygen_input.txt", temp_dir)
    _symlink_headers(
        drake_workspace=drake_workspace,
        temp_dir=temp_dir,
        modules=modules)

    # Run doxygen.
    check_call([doxygen, doxyfile], cwd=temp_dir)

    # The nominal pages to offer for preview.
    return ["", "classes.html", "modules.html"]
예제 #25
0
def main():
    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/interactive_judge/solution.data')
    assert location and os.path.exists(location), location

    print('SOLUTION: output: q 0', file=sys.stderr)
    print('q 0')
    sys.stdout.flush()
    b = int(input())
    print('SOLUTION: input: %d' % b, file=sys.stderr)

    print('SOLUTION: output: q 1', file=sys.stderr)
    print('q 1')
    sys.stdout.flush()
    ab = int(input())
    print('SOLUTION: input: %d' % ab, file=sys.stderr)

    a = ab - b

    print('SOLUTION: output: a %d %d' % (a, b), file=sys.stderr)
    print('a %d %d' % (a, b))
예제 #26
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--passphrase', required=True)
    parser.add_argument('--mode', required=True)
    options = parser.parse_args()

    assert options.passphrase == 'a b c', 'key is %r' % options.key

    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation('rules_contest/tests/dataset_generate/generator.data')
    assert location and os.path.exists(location), location

    out_dir = os.environ.get('OUTPUT_DIR')
    assert os.path.isdir(out_dir), 'OUTPUT_DIR does not exist'
    if options.mode == 'default':
        with open(os.path.join(out_dir, 'data1.in'), 'w'):
            pass
        with open(os.path.join(out_dir, 'data2.ans'), 'w'):
            pass
    elif options.mode == 'empty':
        pass
    else:
        assert False, '--mode=%s' % options.mode
예제 #27
0
def main():
    # Make sure we have access to the data file.
    resolver = runfiles.Create()
    location = resolver.Rlocation(
        'rules_contest/tests/interactive_judge/server.data')
    assert location and os.path.exists(location), location

    with open(sys.argv[1]) as f:
        a, b = map(int, f.read().split())

    print('SERVER: init: a=%d, b=%d' % (a, b), file=sys.stderr)

    while True:
        line = input()
        print('SERVER: input: %s' % line, file=sys.stderr)
        if line.startswith('q '):
            x = int(line.split()[1])
            print('SERVER: output: %d' % (a * x + b), file=sys.stderr)
            print(a * x + b)
            sys.stdout.flush()
        elif line.startswith('a '):
            p, q = map(int, line.split()[1:])
            assert (p, q) == (a, b)
            break
예제 #28
0
    def assertZipFilesAreAlmostNew(self, file_name):
        """Assert that zipfile contains files with an mtime of roughly now.

    This is used to prove that the test data was a file which was presumably:
    built with 'stamp=1' or ('stamp=-1' and --stamp) contains files which
    all have a fairly recent mtime, thus indicating they are "current" time
    rather than the epoch or some other time.

    Args:
        file_name: the path to the ZIP file to test.
    """
        file_path = runfiles.Create().Rlocation('rules_pkg/tests/' + file_name)
        target_mtime = int(time.time())
        with zipfile.ZipFile(file_path, mode='r') as f:
            for info in f.infolist():
                d = info.date_time
                dt = datetime.datetime(d[0],
                                       d[1],
                                       d[2],
                                       d[3],
                                       d[4],
                                       d[5],
                                       tzinfo=datetime.timezone.utc)
                self.check_mtime(int(dt.timestamp()), file_path, info.filename)
예제 #29
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
import os
import re
import subprocess
import tempfile
import unittest

from bazel_tools.tools.python.runfiles import runfiles

RUNFILES = runfiles.Create()
MOTEUS_TOOL = RUNFILES.Rlocation(
    "com_github_mjbots_moteus/utils/moteus_tool")
DYNAMOMETER_DRIVE = RUNFILES.Rlocation(
    "com_github_mjbots_moteus/utils/dynamometer_drive")
TORQUE_RIPPLE = RUNFILES.Rlocation(
    "com_github_mjbots_moteus/utils/dyno_static_torque_ripple")

def dyno(*args, keep_log=False):
    tmp = tempfile.NamedTemporaryFile(
        prefix='{}-moteus_firmware_validate-'.format(
            datetime.datetime.now().isoformat()),
        delete = False)

    try:
        subprocess.run(args = [DYNAMOMETER_DRIVE,
예제 #30
0
    'used in deployments where both the downstream and upstream are '
    'trusted.',
    'data_plane_agnostic':
    'This extension does not operate on the data plane and hence is intended to be robust against untrusted traffic.',
}

# A map from the extension status value to a human readable text for extension
# docs.
EXTENSION_STATUS_VALUES = {
    'alpha':
    'This extension is functional but has not had substantial production burn time, use only with this caveat.',
    'wip':
    'This extension is work-in-progress. Functionality is incomplete and it is not intended for production use.',
}

r = runfiles.Create()

EXTENSION_DB = utils.from_yaml(
    r.Rlocation("envoy/source/extensions/extensions_metadata.yaml"))
CONTRIB_EXTENSION_DB = utils.from_yaml(
    r.Rlocation("envoy/contrib/extensions_metadata.yaml"))


# create an index of extension categories from extension db
def build_categories(extensions_db):
    ret = {}
    for _k, _v in extensions_db.items():
        for _cat in _v['categories']:
            ret.setdefault(_cat, []).append(_k)
    return ret