Example #1
0
    def __init__(self, input: ImageStream, width, height):
        self.input = input
        self.output = ImageStream(input.payload.shape(),
                                  name="wavelet_2D_output")

        self.height = height
        self.width = width
Example #2
0
    def __init__(self, input: ImageStream, width, height, direction_y):
        self.input = input
        self.output = ImageStream(input.payload.shape(),
                                  name="wavelet_1D_output")

        self.height = height
        self.width = width
        self.direction_y = direction_y
Example #3
0
    def test_stream_splitter(self):
        image = imageio.imread(join(dirname(__file__), "che_128.png"))
        h, w = image.shape
        platform = SimPlatform()
        m = Module()

        input = ImageStream(8)
        transformer = m.submodules.transformer = Wavelet2D(input, w, h)
        splitter = m.submodules.splitter = ImageSplitter(transformer.output, w, h)

        def write_process():
            yield from write_frame_to_stream(input, image, pause=False)
            yield Passive()
            yield from write_frame_to_stream(input, image, pause=False)
            while True:
                yield from write_to_stream(input, line_last=0, frame_last=0, payload=0)
        platform.add_process(write_process, "sync")

        for i, stream in enumerate(splitter.outputs):
            def gen_read_process():
                i_captured = i
                stream_captured = stream
                def read_process():
                    image = (yield from read_frame_from_stream(stream_captured, timeout=1000, pause=False))
                    imageio.imsave(platform.output_filename_base + "_output_{}.png".format(i_captured), image)
                return read_process
            platform.add_process(gen_read_process(), "sync")

        platform.add_sim_clock("sync", 100e6)
        platform.sim(m)
Example #4
0
    def test_wavelet_2d(self):
        image = imageio.imread(join(dirname(__file__), "che_128.png"))
        h, w = image.shape
        platform = SimPlatform()
        m = Module()

        input = ImageStream(8)
        transformer = m.submodules.transformer = Wavelet2D(input, w, h)

        def write_process():
            yield from write_frame_to_stream(input, image, pause=False)
            yield Passive()
            while True:
                yield from write_to_stream(input, line_last=0, frame_last=0, payload=0)
        platform.add_process(write_process, "sync")

        def read_process():
            image = (yield from read_frame_from_stream(transformer.output, timeout=1000, pause=False))
            target_image = np.copy(image)
            for y, row in enumerate(image):
                for x, px in enumerate(row):
                    target_image[y // 2 + ((y % 2) * len(image) // 2)][x // 2 + ((x % 2) * len(row) // 2)] = px
            imageio.imsave(platform.output_filename_base + ".png", target_image)
        platform.add_process(read_process, "sync")

        platform.add_sim_clock("sync", 1000e6)
        platform.sim(m)
Example #5
0
    def __init__(self, input: ImageStream, width, height, stages, level=1):
        self.input = input
        self.output = input.clone(name="wavelet_level{}_output".format(level))
        self.output.is_hf = Signal() @ DOWNWARDS

        self.width = width
        self.level = level
        self.height = height
        self.stages = stages
        self.fifos = []
Example #6
0
    def __init__(self, resource):
        self.resource = resource

        self.blanking_threshold = ControlSignal(16, reset=(480 * 16))

        self.measured_width = StatusSignal(16)
        self.measured_height = StatusSignal(16)

        self.width = ControlSignal(16, reset=1440)
        self.height = ControlSignal(16, reset=480)

        self.blank_r = ControlSignal()
        self.blank_g = ControlSignal()
        self.blank_b = ControlSignal()

        self.stable_lines_needed = 2000
        self.lines_stable = StatusSignal(range(self.stable_lines_needed + 1))
        self.frames_stable = StatusSignal(32)
        self.stable = StatusSignal()
        self.always_valid = ControlSignal()

        self.output_not_ready = StatusSignal(32)

        self.output = ImageStream(24)
Example #7
0
    def check_non_moving_xy(self,
                            transformer_function,
                            crop_top=0,
                            crop_left=0,
                            crop_bottom=0,
                            crop_right=0):
        m = Module()

        width, height = 9, 9
        input = ImageStream(32)
        transformer = m.submodules.transformer = ImageConvoluter(
            input, transformer_function, width, height)

        def write_process():
            testdata = [[x * y for x in range(width)] for y in range(height)]
            yield from write_frame_to_stream(input, testdata, pause=True)
            yield from write_frame_to_stream(input, testdata, pause=True)
            yield from write_frame_to_stream(input, testdata, pause=True)
            yield Passive()
            while True:
                yield from write_to_stream(input,
                                           line_last=0,
                                           frame_last=0,
                                           payload=0)

        def read_process():
            (yield from read_frame_from_stream(transformer.output, pause=True))
            first = crop((yield from read_frame_from_stream(transformer.output,
                                                            pause=True)),
                         left=crop_left,
                         right=crop_right,
                         bottom=crop_bottom,
                         top=crop_top)
            second = crop(
                (yield from read_frame_from_stream(transformer.output,
                                                   pause=True)),
                left=crop_left,
                right=crop_right,
                bottom=crop_bottom,
                top=crop_top)
            self.assertEqual(first, second)

        platform = SimPlatform()
        platform.add_sim_clock("sync", 100e6)
        platform.add_process(write_process, "sync")
        platform.sim(m, read_process)
Example #8
0
    def check_move_transformer(self,
                               transform_xy,
                               testdata,
                               testdata_transformed,
                               crop_top=0,
                               crop_left=0,
                               crop_bottom=0,
                               crop_right=0):
        m = Module()
        tx, ty = transform_xy

        def transformer_function(x, y, image):
            return image[x + tx, y + ty]

        input = ImageStream(32)
        transformer = m.submodules.transformer = ImageConvoluter(
            input, transformer_function, 10, 10)

        def write_process():
            yield from write_frame_to_stream(input, testdata, pause=True)
            yield Passive()
            while True:
                yield from write_to_stream(input,
                                           line_last=0,
                                           frame_last=0,
                                           payload=0)

        def read_process():
            self.assertEqual(
                crop((yield from read_frame_from_stream(transformer.output,
                                                        pause=True)),
                     left=crop_left,
                     right=crop_right,
                     bottom=crop_bottom,
                     top=crop_top), testdata_transformed)

        platform = SimPlatform()
        platform.add_sim_clock("sync", 100e6)
        platform.add_process(write_process, "sync")
        platform.sim(m, read_process)
Example #9
0
    def check_multistage(self, n):
        image = imageio.imread(join(dirname(__file__), "che_64.png"))
        h, w = image.shape
        platform = SimPlatform()
        m = Module()


        input = ImageStream(8)
        wavelet = m.submodules.wavelet = MultiStageWavelet2D(input, w, h, stages=n)

        def write_process():
            yield from write_frame_to_stream(input, image, pause=False, timeout=10000)
            yield Passive()
            while True:
                yield from write_frame_to_stream(input, image, pause=False, timeout=10000)
        platform.add_process(write_process, "sync")

        fifo_levels = defaultdict(lambda: defaultdict(int))
        def find_maximum_fifo_level():
            def find_max_levels(wavelet, level=1):
                for i, fifo in enumerate(wavelet.fifos):
                    current_level = yield fifo.r_level
                    fifo_levels[level][i] = max(current_level, fifo_levels[level][i])
                if hasattr(wavelet, 'next_stage'):
                    yield from find_max_levels(wavelet.next_stage, level + 1)
            yield Passive()
            while True:
                yield from find_max_levels(wavelet)
                yield
        platform.add_process(find_maximum_fifo_level, "sync")

        def read_process():
            for i in range(2):
                image = (yield from read_frame_from_stream(wavelet.output, timeout=1000, pause=False))
                imageio.imsave(platform.output_filename_base + str(i) + ".png", image)
        platform.add_process(read_process, "sync")

        platform.add_sim_clock("sync", 100e6)
        platform.sim(m)
        print("fifo levels:", list(fifo_levels.items()))
Example #10
0
class Wavelet2D(Elaboratable):
    def __init__(self, input: ImageStream, width, height):
        self.input = input
        self.output = ImageStream(input.payload.shape(),
                                  name="wavelet_2D_output")

        self.height = height
        self.width = width

    def elaborate(self, platform):
        m = Module()

        wavelet_x = m.submodules.wavelet_x = Wavelet1D(self.input,
                                                       self.width,
                                                       self.height,
                                                       direction_y=False)
        wavelet_y = m.submodules.wavelet_y = Wavelet1D(wavelet_x.output,
                                                       self.width,
                                                       self.height,
                                                       direction_y=True)
        m.d.comb += self.output.connect_upstream(wavelet_y.output)

        return m
Example #11
0
class Wavelet1D(Elaboratable):
    def __init__(self, input: ImageStream, width, height, direction_y):
        self.input = input
        self.output = ImageStream(input.payload.shape(),
                                  name="wavelet_1D_output")

        self.height = height
        self.width = width
        self.direction_y = direction_y

    def elaborate(self, platform):
        m = Module()

        def transformer_function(x, y, image_proxy):
            output = Signal.like(image_proxy[x, y])

            def px(shift):
                if self.direction_y:
                    return image_proxy[x, y + shift]
                else:
                    return image_proxy[x + shift, y]

            # even pixels are lf while odd pixels are hf
            with m.If((y if self.direction_y else x) % 2 == 0):
                m.d.comb += output.eq((px(0) + px(1)) // 2)
            with m.Else():
                m.d.comb += output.eq((
                    (px(0) - px(1) +
                     (-px(-2) - px(-1) + px(2) + px(3)) // 8) // 2) +
                                      (2**len(self.input.payload) // 2))
            return output

        video_transformer = m.submodules.video_transformer = ImageConvoluter(
            self.input, transformer_function, self.width, self.height)
        m.d.comb += self.output.connect_upstream(video_transformer.output)

        return m
Example #12
0
    def test_image(self):
        platform = SimPlatform()
        m = Module()

        input = ImageStream(8)
        transformer = m.submodules.transformer = ImageSplitter2(
            input, 16, 4, 80)
        image = imageio.imread(join(dirname(__file__), "wavelet/che_64.png"))

        def write_process():
            for i in range(2):
                yield from write_frame_to_stream(input, image, pause=False)
            yield Passive()
            yield from do_nothing(100)

        platform.add_process(write_process, "sync")

        for i in range(4):

            def makefunc(i):
                def read_process():
                    for n in range(2):
                        frame = (yield from
                                 read_frame_from_stream(transformer.outputs[i],
                                                        timeout=1000,
                                                        pause=False))
                        imageio.imsave(
                            platform.output_filename_base + f"_{i}_{n}.png",
                            frame)

                return read_process

            platform.add_process(makefunc(i), "sync")

        platform.add_sim_clock("sync", 100e6)
        platform.sim(m)
Example #13
0
class HdmiStreamSource(Elaboratable):
    def __init__(self, resource):
        self.resource = resource

        self.blanking_threshold = ControlSignal(16, reset=(480 * 16))

        self.measured_width = StatusSignal(16)
        self.measured_height = StatusSignal(16)

        self.width = ControlSignal(16, reset=1440)
        self.height = ControlSignal(16, reset=480)

        self.blank_r = ControlSignal()
        self.blank_g = ControlSignal()
        self.blank_b = ControlSignal()

        self.stable_lines_needed = 2000
        self.lines_stable = StatusSignal(range(self.stable_lines_needed + 1))
        self.frames_stable = StatusSignal(32)
        self.stable = StatusSignal()
        self.always_valid = ControlSignal()

        self.output_not_ready = StatusSignal(32)

        self.output = ImageStream(24)

    def elaborate(self, platform):
        m = Module()

        resource = self.resource

        self.lane_b = m.submodules.lane_b = HdmiRxLane(resource.b, "hdmi_eclk",
                                                       "hdmi_qdr")
        self.lane_g = m.submodules.lane_g = HdmiRxLane(resource.g, "hdmi_eclk",
                                                       "hdmi_qdr")
        self.lane_r = m.submodules.lane_r = HdmiRxLane(resource.r, "hdmi_eclk",
                                                       "hdmi_qdr")

        m.d.comb += self.stable.eq(
            self.lines_stable > self.stable_lines_needed - 1)

        de = Signal()
        m.d.comb += de.eq((self.lane_b.data_enable + self.lane_r.data_enable +
                           self.lane_g.data_enable) > 1)

        ce = Signal()
        m.d.comb += ce.eq(
            (~self.lane_b.data_enable + ~self.lane_r.data_enable +
             ~self.lane_g.data_enable) > 1)

        x_ctr = Signal(16)
        y_ctr = Signal(16)

        long_blanking = Signal()
        blanking_ctr = Signal.like(self.blanking_threshold)
        with m.If(ce):
            with m.If(blanking_ctr < self.blanking_threshold):
                m.d.sync += blanking_ctr.eq(blanking_ctr + 1)
            with m.If(blanking_ctr == (self.blanking_threshold - 1)):
                m.d.comb += long_blanking.eq(1)
        with m.Else():
            m.d.sync += blanking_ctr.eq(0)

        probe(m, self.lane_b.data_enable, "de_b")
        probe(m, self.lane_g.data_enable, "de_g")
        probe(m, self.lane_r.data_enable, "de_r")
        probe(m, long_blanking)
        trigger(m, long_blanking)

        output = self.output.clone()
        line_started = Signal()

        with m.If(de | (x_ctr > 0)):
            m.d.sync += x_ctr.eq(x_ctr + 1)

            with m.If(x_ctr < self.width):

                with m.If(~output.ready):
                    self.output_not_ready.eq(self.output_not_ready + 1)

                m.d.comb += output.valid.eq(self.stable | self.always_valid)
                m.d.comb += output.payload.eq(
                    Cat(
                        self.lane_r.data & Repl(~self.blank_r, 8),
                        self.lane_g.data & Repl(~self.blank_g, 8),
                        self.lane_b.data & Repl(~self.blank_b, 8),
                    ))

                m.d.comb += output.line_last.eq(x_ctr == self.width - 1)
                m.d.comb += output.frame_last.eq((x_ctr == self.width - 1)
                                                 & (y_ctr == self.height - 1))

        with m.If(ce & ((x_ctr >= self.width) | (x_ctr == 0))):
            m.d.sync += x_ctr.eq(0)
            with m.If(x_ctr > 128):
                m.d.sync += y_ctr.eq(y_ctr + 1)
                m.d.sync += self.measured_width.eq(x_ctr)
                with m.If(x_ctr == self.measured_width):
                    with m.If(self.lines_stable < self.stable_lines_needed):
                        m.d.sync += self.lines_stable.eq(self.lines_stable + 1)
                with m.Else():
                    m.d.sync += self.frames_stable.eq(0)
                    m.d.sync += self.lines_stable.eq(0)

            with m.If(long_blanking):
                m.d.sync += y_ctr.eq(0)
                with m.If(y_ctr > 128):
                    m.d.sync += self.measured_height.eq(y_ctr)
                    with m.If(y_ctr == self.height):
                        m.d.sync += self.frames_stable.eq(self.frames_stable +
                                                          1)
                    with m.Else():
                        m.d.sync += self.frames_stable.eq(0)

        buffer = m.submodules.buffer = StreamBuffer(output)
        m.d.comb += self.output.connect_upstream(buffer.output)

        return m

    @driver_method
    def train(self):
        print("training hdmi")
        print("tranining lane b...")
        _, delay, alignment = self.lane_b.train()
        self.set_delay(delay)
        self.lane_g.select.offset = alignment
        self.lane_r.select.offset = alignment

    @driver_method
    def set_delay(self, delay):
        self.lane_b.delayf.set_delay(delay)
        self.lane_g.delayf.set_delay(delay)
        self.lane_r.delayf.set_delay(delay)