Exemple #1
0
    def _download_worker(self, context: DownloadWork):
        '''Download and extract a test vector'''
        test_vector = context.test_vector
        dest_dir = os.path.join(
            context.out_dir, context.test_suite_name, test_vector.name)
        dest_path = os.path.join(
            dest_dir, os.path.basename(test_vector.source))
        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)
        if context.verify and os.path.exists(dest_path) and \
                test_vector.source_checksum == utils.file_checksum(dest_path):
            if not context.keep_file:
                os.remove(dest_path)
            return
        print(f'\tDownloading test vector {test_vector.name} from {dest_dir}')
        utils.download(test_vector.source, dest_dir)
        checksum = utils.file_checksum(dest_path)
        if test_vector.source_checksum != checksum:
            raise Exception(
                f'Checksum error for test vector \'{test_vector.name}\': \'{checksum}\' instead of '
                f'\'{test_vector.source_checksum}\'')

        if utils.is_extractable(dest_path):
            print(
                f'\tExtracting test vector {test_vector.name} to {dest_dir}')
            utils.extract(
                dest_path, dest_dir, file=test_vector.input_file if not context.extract_all else None)
            if not context.keep_file:
                os.remove(dest_path)
Exemple #2
0
 def decode(self, input_filepath: str, output_filepath: str,
            output_format: PixelFormat, timeout: int, verbose: bool) -> str:
     '''Decode the test vector and do the checksum'''
     pipeline = self.gen_pipeline(input_filepath, output_filepath,
                                  output_format)
     run_command(shlex.split(pipeline), timeout=timeout, verbose=verbose)
     return file_checksum(output_filepath)
Exemple #3
0
 def decode(self, input_filepath: str, output_filepath: str,
            output_format: PixelFormat, timeout: int, verbose: bool) -> str:
     '''Decodes input_filepath in output_filepath'''
     run_command([self.binary, '-b', input_filepath, '-o', output_filepath],
                 timeout=timeout,
                 verbose=verbose)
     return file_checksum(output_filepath)
Exemple #4
0
 def decode(self, input_filepath: str, output_filepath: str, output_format: PixelFormat, timeout: int,
            verbose: bool) -> str:
     '''Decodes input_filepath in output_filepath'''
     cmd = shlex.split(FFMPEG_TPL.format(
         self.cmd, input_filepath, str(output_format.value), output_filepath))
     run_command(cmd, timeout=timeout, verbose=verbose)
     return file_checksum(output_filepath)
Exemple #5
0
 def decode(
     self,
     input_filepath: str,
     output_filepath: str,
     output_format: OutputFormat,
     timeout: int,
     verbose: bool,
 ) -> str:
     return file_checksum(input_filepath)
Exemple #6
0
    def _download_worker(self, ctx: DownloadWork) -> None:
        """Download and extract a test vector"""
        test_vector = ctx.test_vector
        dest_dir = os.path.join(ctx.out_dir, ctx.test_suite_name, test_vector.name)
        dest_path = os.path.join(dest_dir, os.path.basename(test_vector.source))
        if not os.path.exists(dest_dir):
            os.makedirs(dest_dir)
        if (
            ctx.verify
            and os.path.exists(dest_path)
            and test_vector.source_checksum == utils.file_checksum(dest_path)
        ):
            # Remove file only in case the input file was extractable.
            # Otherwise, we'd be removing the original file we want to work
            # with every even time we execute the download subcommand.
            if utils.is_extractable(dest_path) and not ctx.keep_file:
                os.remove(dest_path)
            return
        print(f"\tDownloading test vector {test_vector.name} from {dest_dir}")
        # Catch the exception that download may throw to make sure pickle can serialize it properly
        # This avoids:
        # Error sending result: '<multiprocessing.pool.ExceptionWithTraceback object at 0x7fd7811ecee0>'.
        # Reason: 'TypeError("cannot pickle '_io.BufferedReader' object")'
        try:
            utils.download(test_vector.source, dest_dir)
        except Exception as ex:
            raise Exception(str(ex)) from ex
        if test_vector.source_checksum != "__skip__":
            checksum = utils.file_checksum(dest_path)
            if test_vector.source_checksum != checksum:
                raise Exception(
                    f"Checksum error for test vector '{test_vector.name}': '{checksum}' instead of "
                    f"'{test_vector.source_checksum}'"
                )

        if utils.is_extractable(dest_path):
            print(f"\tExtracting test vector {test_vector.name} to {dest_dir}")
            utils.extract(
                dest_path,
                dest_dir,
                file=test_vector.input_file if not ctx.extract_all else None,
            )
            if not ctx.keep_file:
                os.remove(dest_path)
Exemple #7
0
 def decode(self, input_filepath: str, output_filepath: str,
            output_format: OutputFormat, timeout: int,
            verbose: bool) -> str:
     '''Decodes input_filepath in output_filepath'''
     # Addition of .pcm as extension is a must. If it is something else, e.g. ".out" the decoder will output a
     # ".wav", which is undesirable.
     output_filepath += ".pcm"
     run_command([self.binary, input_filepath, output_filepath],
                 timeout=timeout,
                 verbose=verbose)
     return file_checksum(output_filepath)
Exemple #8
0
    def generate(self, download, jobs):
        '''Generates the test suite and saves it to a file'''
        output_filepath = os.path.join(self.suite_name + '.json')
        test_suite = TestSuite(output_filepath, 'resources',
                               self.suite_name, self.codec, self.description, dict())

        hparser = HREFParser()
        print(f'Download list of bitstreams from {self.site + self.name}')
        with urllib.request.urlopen(self.site + self.name) as resp:
            data = str(resp.read())
            hparser.feed(data)

        for url in hparser.links[1:]:
            # The first item in the AVCv1 list is a readme file
            if '00readme_H' in url:
                continue
            file_url = os.path.basename(url)
            name = os.path.splitext(file_url)[0]
            file_input = f'{name}.bin'
            test_vector = TestVector(
                name, url, '', file_input, PixelFormat.YUV420P, '')
            test_suite.test_vectors[name] = test_vector

        if download:
            test_suite.download(jobs=jobs, out_dir=test_suite.resources_dir, verify=False,
                                extract_all=True, keep_file=True)

        for test_vector in test_suite.test_vectors.values():
            dest_dir = os.path.join(
                test_suite.resources_dir, test_suite.name, test_vector.name)
            dest_path = os.path.join(
                dest_dir, os.path.basename(test_vector.source))
            test_vector.input_file = self._find_by_ext(
                dest_dir, BITSTREAM_EXTS)
            test_vector.input_file = test_vector.input_file.replace(os.path.join(
                test_suite.resources_dir, test_suite.name, test_vector.name) + os.sep, '')
            if not test_vector.input_file:
                raise Exception(f'Bitstream file not found in {dest_dir}')
            test_vector.source_checksum = utils.file_checksum(dest_path)
            if 'main10' in test_vector.name.lower():
                test_vector.output_format = PixelFormat.YUV420P10LE

            if self.codec == Codec.H265:
                self._fill_checksum_h265(test_vector, dest_dir)
            elif self.codec == Codec.H264:
                self._fill_checksum_h264(test_vector, dest_dir)

        test_suite.to_json_file(output_filepath)
        print('Generate new test suite: ' + test_suite.name + '.json')
Exemple #9
0
 def decode(
     self,
     input_filepath: str,
     output_filepath: str,
     output_format: OutputFormat,
     timeout: int,
     verbose: bool,
 ) -> str:
     """Decodes input_filepath in output_filepath"""
     run_command(
         [self.binary, "--i420", input_filepath, "-o", output_filepath],
         timeout=timeout,
         verbose=verbose,
     )
     return file_checksum(output_filepath)
Exemple #10
0
 def _fill_checksum_h264(self, test_vector, dest_dir):
     raw_file = self._find_by_ext(dest_dir, RAW_EXTS)
     if raw_file is None:
         raise Exception(f'RAW file not found in {dest_dir}')
     test_vector.result = utils.file_checksum(raw_file)
Exemple #11
0
    def generate(self, download, jobs):
        """Generates the test suite and saves it to a file"""
        output_filepath = os.path.join(self.suite_name + ".json")
        test_suite = TestSuite(
            output_filepath,
            "resources",
            self.suite_name,
            self.codec,
            self.description,
            dict(),
        )

        print(f"Download list of bitstreams from {self.site + self.name}")
        opener = urllib.request.build_opener()
        root = ET.parse(opener.open(self.site)).getroot()
        opener.close()

        for entry in root.findall("{*}Contents"):
            if "Contents" not in entry.tag:
                continue
            fname = entry.find("{*}Key").text
            name = fname[:-4]
            if fname[-4:] not in BITSTREAM_EXTS or "invalid" in fname:
                continue

            file_url = f"{AV1_URL}/{entry.find('{*}Key').text}"
            test_vector = TestVector(name, file_url, "__skip__", fname,
                                     OutputFormat.YUV420P, "")
            test_suite.test_vectors[name] = test_vector

        if download:
            test_suite.download(
                jobs=jobs,
                out_dir=test_suite.resources_dir,
                verify=False,
                extract_all=True,
                keep_file=True,
            )

        for test_vector in test_suite.test_vectors.values():
            dest_dir = os.path.join(test_suite.resources_dir, test_suite.name,
                                    test_vector.name)
            dest_path = os.path.join(dest_dir,
                                     os.path.basename(test_vector.source))
            test_vector.input_file = dest_path.replace(
                os.path.join(test_suite.resources_dir, test_suite.name,
                             test_vector.name) + os.sep,
                "",
            )
            if not test_vector.input_file:
                raise Exception(f"Bitstream file not found in {dest_dir}")
            test_vector.source_checksum = utils.file_checksum(dest_path)
            out420 = f"{dest_path}.i420"
            # Run the libaom av1 decoder to get the checksum as the .md5 files are per-frame
            test_vector.result = self.decoder.decode(dest_path, out420,
                                                     test_vector.output_format,
                                                     30, False)
            os.remove(out420)

        test_suite.to_json_file(output_filepath)
        print("Generate new test suite: " + test_suite.name + ".json")
Exemple #12
0
    def generate(self, download, jobs):
        """Generates the test suite and saves it to a file"""
        output_filepath = os.path.join(self.suite_name + ".json")
        test_suite = TestSuite(
            output_filepath,
            "resources",
            self.suite_name,
            self.codec,
            self.description,
            dict(),
        )

        print(f"Download list of bitstreams from {DOWNLOAD_URL}")

        if self.bpp == 10:
            TESTS = TESTS_10BPP
        elif self.bpp == 8:
            TESTS = TESTS_8BPP
        else:
            return

        for test in TESTS:
            file_url = f"{DOWNLOAD_URL}/{test}"
            name = re.sub("_[\d]*", "", test)

            test_vector = TestVector(name, file_url, "__skip__", test,
                                     OutputFormat.YUV420P, "")

            test_suite.test_vectors[name] = test_vector

        if download:
            test_suite.download(
                jobs=jobs,
                out_dir=test_suite.resources_dir,
                verify=False,
                extract_all=True,
                keep_file=True,
            )

        for test_vector in test_suite.test_vectors.values():
            dest_dir = os.path.join(test_suite.resources_dir, test_suite.name,
                                    test_vector.name)
            dest_path = os.path.join(dest_dir,
                                     os.path.basename(test_vector.source))
            test_vector.input_file = dest_path.replace(
                os.path.join(test_suite.resources_dir, test_suite.name,
                             test_vector.name) + os.sep,
                "",
            )

            if not test_vector.input_file:
                raise Exception(f"Bitstream file not found in {dest_dir}")
            test_vector.source_checksum = utils.file_checksum(dest_path)
            out420 = f"{dest_path}.i420"
            # Run the libaom av1 decoder to get the checksum as the .md5 in the JSONs are per-frame
            test_vector.result = self.decoder.decode(dest_path, out420,
                                                     test_vector.output_format,
                                                     30, False)
            os.remove(out420)

        test_suite.to_json_file(output_filepath)
        print("Generate new test suite: " + test_suite.name + ".json")