Beispiel #1
0
def replay_dir(request):
    def make_replay(tactic):
        return TacticReplayData().add(
            "layer0",
            Algorithm.from_trt(fake_context("layer0"), fake_algo(0, tactic)))

    with tempfile.TemporaryDirectory() as dir:

        def make_path(prefix, *args):
            path = os.path.join(dir, prefix)
            if request.param:
                path = os.path.join(path, request.param)
            path = os.path.join(path, *args)
            return path

        # Good tactics
        save_json(make_replay(0), make_path("good", "0.json"))
        save_json(make_replay(1), make_path("good", "1.json"))

        # Bad tactics
        save_json(make_replay(1), make_path("bad", "0.json"))
        save_json(make_replay(2), make_path("bad", "1.json"))

        EXPECTED_OUTPUT = dedent("""
        [I] Loaded 2 good tactic replays.
        [I] Loaded 2 bad tactic replays.
        [I] Found potentially bad tactics:
        [I] Layer: layer0
                Algorithms: ["(Implementation: 0, Tactic: 2) | Inputs: (('TensorFormat.LINEAR', 'DataType.FLOAT'),) | Outputs: (('TensorFormat.LINEAR', 'DataType.FLOAT'),)"]
        """)
        yield dir, EXPECTED_OUTPUT
Beispiel #2
0
 def __init__(self, arr):
     """
     Args:
         arr (np.ndarray): The NumPy array.
     """
     self.arr = None
     self.tmpfile = None
     if config.ARRAY_SWAP_THRESHOLD_MB >= 0 and arr.nbytes > (config.ARRAY_SWAP_THRESHOLD_MB << 20):
         self.tmpfile = tempfile.NamedTemporaryFile(mode="w+", suffix=".json")
         G_LOGGER.extra_verbose("Evicting large array ({:.3f} MiB) from memory and saving to {:}".format(
                                     arr.nbytes / (1024.0 ** 2), self.tmpfile.name))
         save_json(arr, self.tmpfile)
     else:
         self.arr = arr
Beispiel #3
0
    def run(self, args):
        import pickle

        import polygraphy
        from polygraphy.comparator.struct import RunResults

        class LegacyRunResults(list):
            pass

        polygraphy.comparator.struct.RunResults = LegacyRunResults

        with open(args.pickle_data, "rb") as f:
            data = pickle.load(f)

            if isinstance(data, LegacyRunResults):
                data = RunResults(list(data))

            save_json(data, args.output)
Beispiel #4
0
    def set_input_metadata(self, input_metadata):
        """
        Set the input metadata for the data loader.

        Args:
            input_metadata (TensorMetadata):
                    Input Metadata, including shape and type information. The cache may attempt to transform inputs to
                    match the specified input_metadata when data already in the cache does not exactly match.
        """
        self.input_metadata = input_metadata
        with contextlib.suppress(AttributeError):
            self.data_loader.input_metadata = input_metadata

        if not self.cache:
            G_LOGGER.verbose("Loading inputs from data loader")
            self.cache = list(self.data_loader)
            if not self.cache:
                G_LOGGER.warning("Data loader did not yield any input data.")

            # Only save inputs the first time the cache is generated
            if self.save_inputs_path is not None:
                save_json(self.cache, self.save_inputs_path, "inference input data")