def test_valid_bitstream(self):
        # This test runs all of the option sets produced through the bitstream
        # generator to verify that 1: the dictionaries all contain the expected
        # fields and 2: that they encode the options they're supposed to. This
        # test also indirectly tests all of the contributing option generating
        # functions.
        base_video_format = BaseVideoFormats.hd1080p_50
        video_parameters = set_source_defaults(base_video_format)
        level_constraints_dict = defaultdict(AnyValue)

        source_parameters_sets = list(
            iter_source_parameter_options(
                video_parameters, video_parameters, level_constraints_dict
            )
        )

        for context in source_parameters_sets:
            state = State()

            f = BytesIO()
            with Serialiser(BitstreamWriter(f), context) as ser:
                resulting_video_parameters = source_parameters(
                    ser,
                    state,
                    base_video_format,
                )

            assert resulting_video_parameters == video_parameters
    def sanity_check(self, slice_size_scaler, slice):
        """
        Checks that the provided slice serializes correctly.
        """
        slice.setdefault("prefix_bytes", bytes())
        slice.setdefault("y_block_padding", bitarray())
        slice.setdefault("c1_block_padding", bitarray())
        slice.setdefault("c2_block_padding", bitarray())

        f = BytesIO()
        with Serialiser(BitstreamWriter(f), slice) as ser:
            hq_slice(
                ser,
                State(
                    slice_prefix_bytes=0,
                    slice_size_scaler=slice_size_scaler,
                    dwt_depth=0,
                    dwt_depth_ho=0,
                    luma_width=len(slice["y_transform"]),
                    luma_height=1,
                    color_diff_width=len(slice["c1_transform"]),
                    color_diff_height=1,
                    slices_x=1,
                    slices_y=1,
                ),
                0,
                0,
            )
def serialise(context, pseudocode, state=None, file=None, *args, **kwargs):
    state = state if state is not None else State()
    file = file if file is not None else BytesIO()
    with Serialiser(
        BitstreamWriter(file),
        context,
        vc2_default_values,
    ) as ser:
        return pseudocode(ser, state, *args, **kwargs)
def test_generate_exp_golomb_numbers_with_ascending_lengths(sign):
    for length, number in islice(
            generate_exp_golomb_with_ascending_lengths(sign),
            128,
    ):
        # Use a known-good implementation of a signed exp-golmb encoder and
        # check length is correct.
        f = BytesIO()
        w = BitstreamWriter(f)
        w.write_sint(number)
        actual_length = to_bit_offset(*w.tell())
        assert actual_length == length

        # Check sign of number
        if sign < 0:
            assert number <= 0
        else:
            assert number >= 0
def test_source_parameters_encodings():
    codec_features = MINIMAL_CODEC_FEATURES

    test_cases = list(source_parameters_encodings(codec_features))

    base_video_formats = set()
    flag_states = defaultdict(set)
    decoder_states_and_parameters = []

    for test_case in test_cases:
        for sequence in test_case.value["sequences"]:
            sh = sequence["data_units"][0]["sequence_header"]

            # Capture all base video formats
            base_video_formats.add(sh["base_video_format"])

            # Capture all flag values
            to_visit = [sh]
            while to_visit:
                d = to_visit.pop(0)
                for field, value in d.items():
                    if field.endswith("_flag"):
                        flag_states[field].add(value)
                    if isinstance(value, dict):
                        to_visit.append(value)

            # Capture actual resulting codec configuration
            state = State()
            with Serialiser(BitstreamWriter(BytesIO()), sh,
                            vc2_default_values) as ser:
                video_parameters = sequence_header(ser, state)
            decoder_states_and_parameters.append((state, video_parameters))

    # Special cases: our MINIMAL_CODEC_FEATURES configuration overrides the
    # frame size, frame rate and clean area fields with tiny sizes and so these
    # options are always set to custom
    assert flag_states.pop("custom_dimensions_flag") == set([True])
    assert flag_states.pop("custom_clean_area_flag") == set([True])
    assert flag_states.pop("custom_frame_rate_flag") == set([True])

    # Expect all flag states to have been execercised
    assert all(value == set([True, False]) for value in flag_states.values())

    # Expect all base video formats with matching top-field-first setting to be
    # tried
    assert base_video_formats == set(
        base_video_format for base_video_format in BaseVideoFormats
        if (codec_features["video_parameters"]["top_field_first"] ==
            BASE_VIDEO_FORMAT_PARAMETERS[base_video_format].top_field_first))

    # Final decoder states must all be identical (i.e. encoded values must be
    # identical
    assert all(state_and_params == decoder_states_and_parameters[0]
               for state_and_params in decoder_states_and_parameters[1:])
Beispiel #6
0
def make_dummy_end_of_sequence(previous_parse_offset=PARSE_INFO_HEADER_BYTES):
    """
    Make (and serialise) an end-of-sequence data unit to be placed within a
    padding data unit.
    """
    f = BytesIO()
    state = State()
    context = ParseInfo(
        parse_code=ParseCodes.end_of_sequence,
        next_parse_offset=0,
        previous_parse_offset=previous_parse_offset,
    )
    with Serialiser(BitstreamWriter(f), context, vc2_default_values) as ser:
        parse_info(ser, state)

    return f.getvalue()
    def test_adds_dangling_data(self, profile):
        codec_features = MINIMAL_CODEC_FEATURES.copy()
        codec_features["profile"] = profile

        test_cases = list(dangling_bounded_block_data(codec_features))
        assert len(test_cases) == (4 * 3 if profile == Profiles.high_quality
                                   else 4 * 2)

        for test_case in test_cases:
            # Use a MonitoredSerialiser to log where dangling values occur and
            # make sure they occur in the right places.
            #
            # {"?_transform": set([num_dangling_bits, ...]), ...}
            dangling_values = defaultdict(set)
            last_bits_remaining = [0]

            def monitor(ser, target, value):
                if (target.endswith("_transform")
                        and (last_bits_remaining[0] is None
                             or last_bits_remaining[0] >= 0)
                        and ser.io.bits_remaining < 0):
                    dangling_values[target].add(-ser.io.bits_remaining)
                last_bits_remaining[0] = ser.io.bits_remaining

            with MonitoredSerialiser(
                    monitor,
                    BitstreamWriter(BytesIO()),
                    test_case.value,
                    vc2_default_values,
            ) as ser:
                parse_stream(ser, State())

            # Check that things are as expected
            if test_case.subcase_name.startswith("zero_dangling"):
                expected_bits = 1
            elif test_case.subcase_name.startswith("sign_dangling"):
                expected_bits = 1
            elif test_case.subcase_name.startswith("stop_and_sign_dangling"):
                expected_bits = 2
            elif test_case.subcase_name.startswith(
                    "lsb_stop_and_sign_dangling"):
                expected_bits = 3

            component = test_case.subcase_name.rpartition("_")[2]
            assert dangling_values["{}_transform".format(
                component.lower())] == set([expected_bits])
    def sanity_check(self, slice):
        """
        Checks that the provided slice serializes correctly.
        """
        slice.setdefault("y_block_padding", bitarray())
        slice.setdefault("c_block_padding", bitarray())

        f = BytesIO()
        state = self.make_state()
        state.update(
            State(
                dwt_depth=0,
                dwt_depth_ho=0,
                luma_width=len(slice["y_transform"]),
                luma_height=1,
                color_diff_width=len(slice["c_transform"]) // 2,
                color_diff_height=1,
            ))
        with Serialiser(BitstreamWriter(f), slice) as ser:
            ld_slice(ser, state, 0, 0)
 def sanity_check(self, slice_size_scaler, slice):
     """
     Checks that the provided slice serializes correctly.
     """
     f = BytesIO()
     with Serialiser(BitstreamWriter(f), slice, vc2_default_values) as ser:
         hq_slice(
             ser,
             State(
                 slice_prefix_bytes=0,
                 slice_size_scaler=slice_size_scaler,
                 dwt_depth=0,
                 dwt_depth_ho=0,
                 luma_width=len(slice["y_transform"]),
                 luma_height=1,
                 color_diff_width=len(slice["c1_transform"]),
                 color_diff_height=1,
                 slices_x=1,
                 slices_y=1,
             ),
             0,
             0,
         )
 def sanity_check(self, slice):
     """
     Checks that the provided slice serializes correctly.
     """
     f = BytesIO()
     with Serialiser(BitstreamWriter(f), slice, vc2_default_values) as ser:
         ld_slice(
             ser,
             State(
                 slice_bytes_numerator=4,
                 slice_bytes_denominator=1,
                 dwt_depth=0,
                 dwt_depth_ho=0,
                 luma_width=len(slice["y_transform"]),
                 luma_height=1,
                 color_diff_width=len(slice["c_transform"]),
                 color_diff_height=1,
                 slices_x=1,
                 slices_y=1,
             ),
             0,
             0,
         )
    def test_finalizer_works(self):
        f = BytesIO()
        w = BitstreamWriter(f)

        # Sequence with every data unit type and fully automatic numbers
        stream = Stream(
            sequences=[
                Sequence(
                    data_units=[
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.sequence_header
                            ),
                            sequence_header=SequenceHeader(
                                parse_parameters=ParseParameters(major_version=3),
                                video_parameters=SourceParameters(
                                    # Tiny custom frame-size used to reduce test suite
                                    # runtime
                                    frame_size=FrameSize(
                                        custom_dimensions_flag=True,
                                        frame_width=4,
                                        frame_height=4,
                                    )
                                ),
                            ),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.high_quality_picture
                            ),
                            picture_parse=PictureParse(
                                picture_header=PictureHeader(picture_number=0)
                            ),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.low_delay_picture
                            ),
                            picture_parse=PictureParse(
                                picture_header=PictureHeader(picture_number=0)
                            ),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.high_quality_picture_fragment
                            ),
                            fragment_parse=FragmentParse(
                                fragment_header=FragmentHeader(picture_number=0)
                            ),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.high_quality_picture_fragment
                            ),
                            fragment_parse=FragmentParse(
                                fragment_header=FragmentHeader(picture_number=0)
                            ),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.padding_data
                            ),
                            padding=Padding(bytes=b"123"),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.auxiliary_data
                            ),
                            auxiliary_data=AuxiliaryData(bytes=b"123"),
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.end_of_sequence
                            ),
                        ),
                    ]
                )
            ]
        )

        (
            next_parse_offsets_to_autofill,
            previous_parse_offsets_to_autofill,
        ) = autofill_parse_offsets(stream)

        with Serialiser(w, stream, vc2_default_values_with_auto) as serdes:
            vc2.parse_stream(serdes, State())
        w.flush()

        offset_before = w.tell()
        autofill_parse_offsets_finalize(
            w,
            serdes.context,
            next_parse_offsets_to_autofill,
            previous_parse_offsets_to_autofill,
        )
        assert w.tell() == offset_before

        f.seek(0)
        r = BitstreamReader(f)
        with Deserialiser(r) as serdes:
            vc2.parse_stream(serdes, State())

        parse_infos = [
            data_unit["parse_info"]
            for sequence in serdes.context["sequences"]
            for data_unit in sequence["data_units"]
        ]

        # Check for start/end offsets being zero
        assert parse_infos[0]["previous_parse_offset"] == 0
        assert parse_infos[-1]["next_parse_offset"] == 0

        # Check for consistency and plusibility of offsets
        for pi1, pi2 in zip(parse_infos, parse_infos[1:]):
            assert pi1["next_parse_offset"] > 13
            assert pi2["previous_parse_offset"] > 13

            assert pi1["next_parse_offset"] == pi2["previous_parse_offset"]
    def test_works_on_multiple_sequences(self):
        f = BytesIO()
        w = BitstreamWriter(f)

        # Sequence with every data unit type and fully automatic numbers
        stream = Stream(
            sequences=[
                Sequence(
                    data_units=[
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.padding_data
                            )
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.padding_data
                            )
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.end_of_sequence
                            )
                        ),
                    ]
                ),
                Sequence(
                    data_units=[
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.padding_data
                            )
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.padding_data
                            )
                        ),
                        DataUnit(
                            parse_info=ParseInfo(
                                parse_code=tables.ParseCodes.end_of_sequence
                            )
                        ),
                    ]
                ),
            ]
        )

        (
            next_parse_offsets_to_autofill,
            previous_parse_offsets_to_autofill,
        ) = autofill_parse_offsets(stream)

        print(stream)
        with Serialiser(w, stream, vc2_default_values_with_auto) as serdes:
            vc2.parse_stream(serdes, State())
        w.flush()

        autofill_parse_offsets_finalize(
            w,
            serdes.context,
            next_parse_offsets_to_autofill,
            previous_parse_offsets_to_autofill,
        )

        f.seek(0)
        r = BitstreamReader(f)
        with Deserialiser(r) as serdes:
            vc2.parse_stream(serdes, State())

        parse_infos = [
            [data_unit["parse_info"] for data_unit in sequence["data_units"]]
            for sequence in serdes.context["sequences"]
        ]

        # Check for start/end offsets being zero
        for sequence_pis in parse_infos:
            assert sequence_pis[0]["previous_parse_offset"] == 0
            assert sequence_pis[-1]["next_parse_offset"] == 0

            # Check for offset correctness
            for pi1, pi2 in zip(sequence_pis, sequence_pis[1:]):
                assert pi1["next_parse_offset"] == 13
                assert pi2["previous_parse_offset"] == 13
    def test_happy_cases(self, block_bits, num_values, magnitude):
        value_sets = {
            dangle_type: generate_dangling_transform_values(
                block_bits,
                num_values,
                dangle_type,
                magnitude,
            )
            for dangle_type in DanglingTransformValueType
        }

        values_and_bits_beyond_ends = {}
        for description, values in value_sets.items():
            # Should all have required number of values
            assert len(values) == num_values

            # Should correctly encode into bounded block
            f = BytesIO()
            w = BitstreamWriter(f)
            w.bounded_block_begin(block_bits)
            for value in values:
                w.write_sint(value)

            # Should completely fill the block
            length_used = to_bit_offset(*w.tell())
            assert length_used == block_bits

            # Check we actually wrote 'beyond' the end of the block
            assert w.bits_remaining < 0

            # Work out which value and which bits actually first crossed the
            # end-of-block boundary (we'll later check that these actually
            # match our expectations later)
            w.flush()
            f.seek(0)
            r = BitstreamReader(f)
            r.bounded_block_begin(block_bits)
            value_beyond_end = None
            bits_beyond_end = None
            while r.bits_remaining >= 0:
                value_beyond_end = r.read_sint()
                bits_beyond_end = -r.bits_remaining
            values_and_bits_beyond_ends[description] = (
                value_beyond_end,
                bits_beyond_end,
            )

        # Check that the dangling value dangles in the expected way
        v, b = values_and_bits_beyond_ends[
            DanglingTransformValueType.zero_dangling]
        assert v == 0
        assert b == 1

        v, b = values_and_bits_beyond_ends[
            DanglingTransformValueType.sign_dangling]
        assert v != 0
        assert (-v).bit_length() == magnitude
        assert b == 1

        v, b = values_and_bits_beyond_ends[
            DanglingTransformValueType.stop_and_sign_dangling]
        assert v != 0
        assert (-v).bit_length() == magnitude
        assert b == 2

        v, b = values_and_bits_beyond_ends[
            DanglingTransformValueType.lsb_stop_and_sign_dangling]
        assert v != 0
        # NB: Larger due to exp-golmb code needing to end in 1
        assert (-v).bit_length() == magnitude + 1
        assert b == 3