Example #1
0
def main():
    img = Image.open("splashscreen.gif")

    # Convert to an array of gray values - data[row, column]
    data = numpy.asarray(img.convert('L'))

    # Flip
    data = data[::-1]

    # Invert
    data = numpy.subtract(255, data)

    # Split up into pages - pages[page, page_row:8, column]
    pages = numpy.array_split(data, range(8, img.size[1], 8), 0)

    # Reverse order of pages, too
    pages = pages[::-1]

    # Rearrange each page to be column-major - pages[page, column, page_row:8]
    pages = numpy.transpose(pages, (0, 2, 1))

    # Unpack into bits - pages[page, column, page_row:8, pixel_bit:8]
    pages = numpy.expand_dims(pages, 3)
    pages = numpy.unpackbits(pages, 3)

    # Truncate to 4bpp - pages[page, column, page_row:8, pixel_bit:4]
    pages = pages[:, :, :, :4]

    # Transpose into bit planes - pages[page, column, pixel_bit:4, page_row:8]
    pages = numpy.transpose(pages, (0, 1, 3, 2))

    # Collapse bit-planes - pages[page, column, pixel_bit:4, value:1]
    pages = numpy.packbits(pages, 3)

    # Flatten into pages - pages[page, byte]
    pages = numpy.reshape(pages, (pages.shape[0], -1))

    # Compress each page - pages[page]
    pages = [lzf.compress(page.tostring('C')) for page in pages]

    # Neatly format the image for output
    indexes = [0]
    for i in range(len(pages)):
        indexes.append(indexes[i] + len(pages[i]))

    out = open('splashscreen.c', 'w')
    out.write('#include "splashscreen.h"\n\n')
    out.write("const uint8 splashscreen_data[] = {\n")
    for i, page in enumerate(pages):
        out.write("    // %d-%d: Page %d\n" % (indexes[i], indexes[i + 1] - 1, i))
        for line in range(0, len(page), 10):
            value = ''.join('0x%02X, ' % ord(x) for x in page[line:line + 10]).strip()
            out.write("    %s\n" % (value,))
    out.write("};\n\n")

    out.write("const int16 splashscreen_indexes[] = {%s};\n" % (', '.join(str(x) for x in indexes)))
    out.close()

    print "Image output in %d bytes" % (sum(len(page) + 2 for page in pages) + 2,)
Example #2
0
def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
    """ Write pointcloud as .pcd to fileobj.
    If data_compression is not None it overrides pc.data.
    """
    metadata = pc.get_metadata()
    if data_compression is not None:
        data_compression = data_compression.lower()
        assert (data_compression in ('ascii', 'binary', 'binary_compressed'))
        metadata['data'] = data_compression

    header = write_header(metadata).encode('utf-8')
    fileobj.write(header)
    if metadata['data'].lower() == 'ascii':
        fmtstr = build_ascii_fmtstr(pc)
        if pc.pc_data.size == 1:
            np.savetxt(fileobj, pc.pc_data.reshape((1, )), fmt=fmtstr)
        else:
            np.savetxt(fileobj, pc.pc_data, fmt=fmtstr)

    elif metadata['data'].lower() == 'binary':
        fileobj.write(pc.pc_data.tostring())
    elif metadata['data'].lower() == 'binary_compressed':
        # TODO
        # a '_' field is ignored by pcl and breaks compressed point clouds.
        # changing '_' to '_padding' or other name fixes this.
        # admittedly padding shouldn't be compressed in the first place.
        # reorder to column-by-column
        uncompressed_lst = []
        for fieldname in pc.pc_data.dtype.names:
            column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring()
            uncompressed_lst.append(column)
        uncompressed = b''.join(uncompressed_lst)
        uncompressed_size = len(uncompressed)
        # print("uncompressed_size = %r"%(uncompressed_size))
        buf = lzf.compress(uncompressed)
        if buf is None:
            # compression didn't shrink the file
            # TODO what do to do in this case when reading?
            buf = uncompressed
            compressed_size = uncompressed_size
        else:
            compressed_size = len(buf)
        fmt = 'II'
        fileobj.write(struct.pack(fmt, compressed_size, uncompressed_size))
        fileobj.write(buf)
    else:
        raise ValueError('unknown DATA type')
Example #3
0
    def write_binary_compressed(self, file, cloud):
        '''
        Save point cloud data to a PCD file containing n-D points, in compressed binary format

        # Parameters
            file : file-like object or str
                The output file or name of it.
            cloud : PointCloud
                The the point cloud data that need saving
        '''
        import struct
        try:
            import lzf
        except ImportError:
            raise ImportError(
                'lzf decompression lib is required to read compressed .pcd file.' +
                'lzf can be install from setup.py in https://github.com/teepark/python-lzf')

        file, own = _check_file(file, 0, 'pcd', 'wb')

        try:
            file.write(self.generate_header(cloud, 'binary_compressed').encode('ascii'))

            uncompressed_lst = []
            for field in cloud.names:
                column = np.ascontiguousarray(cloud.data[field]).tostring('C')
                uncompressed_lst.append(column)
            uncompressed = b''.join(uncompressed_lst)
            uncompressed_size = len(uncompressed)
            buf = lzf.compress(uncompressed)
            if buf is None:
                logging.getLogger('pcl.io.PCDReader.write_binary_compressed')\
                    .warning("compression didn't shrink the file during processing, \
which may cause data loss")
                buf = uncompressed
                compressed_size = uncompressed_size
            else:
                compressed_size = len(buf)
            fmt = 'II'
            file.write(struct.pack(fmt, compressed_size, uncompressed_size))
            file.write(buf)
        finally:
            if own:
                file.close()
Example #4
0
def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
    """ write pointcloud as .pcd to fileobj.
    if data_compression is not None it overrides pc.data.
    """
    metadata = pc.get_metadata()
    if data_compression is not None:
        data_compression = data_compression.lower()
        assert(data_compression in ('ascii', 'binary', 'binary_compressed'))
        metadata['data'] = data_compression

    header = write_header(metadata)
    fileobj.write(header)
    if metadata['data'].lower() == 'ascii':
        fmtstr = build_ascii_fmtstr(pc)
        np.savetxt(fileobj, pc.pc_data, fmt=fmtstr)
    elif metadata['data'].lower() == 'binary':
        fileobj.write(pc.pc_data.tostring('C'))
    elif metadata['data'].lower() == 'binary_compressed':
        # TODO
        # a '_' field is ignored by pcl and breakes compressed point clouds.
        # changing '_' to '_padding' or other name fixes this.
        # admittedly padding shouldn't be compressed in the first place
        # reorder to column-by-column
        uncompressed_lst = []
        for fieldname in pc.pc_data.dtype.names:
            column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring('C')
            uncompressed_lst.append(column)
        uncompressed = ''.join(uncompressed_lst)
        uncompressed_size = len(uncompressed)
        # print("uncompressed_size = %r"%(uncompressed_size))
        buf = lzf.compress(uncompressed)
        if buf is None:
            # compression didn't shrink the file
            # TODO what do to do in this case when reading?
            buf = uncompressed
            compressed_size = uncompressed_size
        else:
            compressed_size = len(buf)
        fmt = 'II'
        fileobj.write(struct.pack(fmt, compressed_size, uncompressed_size))
        fileobj.write(buf)
    else:
        raise ValueError('unknown DATA type')
Example #5
0
def pure_python_dumps(item, default=None, depth=0, compress=True):
    "serialize a native python object into a mummy string"
    if default and not hasattr(default, "__call__"):
        raise TypeError("default must be callable or None")
    if depth >= MAX_DEPTH:
        raise ValueError("max depth exceeded")
    try:
        kind = _get_type_code(item)
    except ValueError:
        if default is None:
            raise TypeError("unserializable type")
        item = default(item)
        kind = _get_type_code(item)
    data = _dumpers[kind](item, depth, default)
    datalen = len(data)
    if compress and lzf and datalen > 5:
        compressed = lzf.compress(data, datalen - 5)
        if compressed:
            data = struct.pack("!i", datalen) + compressed
            kind = kind | 0x80
    kind = _dump_char(kind)

    return kind + data
Example #6
0
def pure_python_dumps(item, default=None, depth=0, compress=True):
    """serialize a native python object into a mummy string
    
    :param object: the python object to serialize
    :param function default:
        If the 'object' parameter is not serializable and this parameter is
        provided, this function will be used to generate a fallback value to
        serialize. It should take one argument (the original object), and
        return something serilizable.
    :param bool compress:
        whether or not to attempt to compress the serialized data (default
        True)

    :returns: the bytestring of the serialized data
    """
    if default and not hasattr(default, "__call__"):
        raise TypeError("default must be callable or None")
    if depth >= MAX_DEPTH:
        raise ValueError("max depth exceeded")
    try:
        kind = _get_type_code(item)
    except ValueError:
        if default is None:
            raise TypeError("unserializable type")
        item = default(item)
        kind = _get_type_code(item)
    data = _dumpers[kind](item, depth, default)
    datalen = len(data)
    if compress and lzf and datalen > 5:
        compressed = lzf.compress(data, datalen - 5)
        if compressed:
            data = struct.pack("!i", datalen) + compressed
            kind = kind | 0x80
    kind = _dump_char(kind)

    return kind + data
Example #7
0
def lzf_encode(data, level=None, header=False, out=None):
    """Compress LZF."""
    return lzf.compress(data)
Example #8
0
 def compress(self, chunk):
     """Compresses a block of data using LZF compression."""
     return lzf.compress(chunk)
Example #9
0
def convert_graphic_to_c(graphic_fname, output_filename, compress, chunksize,
                         fmt_rgb565, fmt_4bpp, fmt_2bpp, fmt_1bpp):
    print("Converting %s to gfx-%s.c/h" % (graphic_fname, output_filename))
    sane_name = re.sub(r"[^a-zA-Z0-9]", "_", output_filename)
    graphic_image = Image.open(graphic_fname)

    if compress == True:
        if chunksize < 64:
            raise Exception("Chunk size must be >= 64")
        if chunksize > 512:
            raise Exception("Chunk size must be <= 512")

    # Get image dimensions
    (width, height) = graphic_image.size

    # Convert image to rgb565 byte list
    if fmt_rgb565:
        graphic_data = image_to_rgb565(graphic_image)
        newline_counter = int(width * 2)
        image_format = "IMAGE_FORMAT_RGB565"
        format_name = "rgb565"
    elif fmt_4bpp:
        graphic_data = image_to_mono4bpp(graphic_image)
        newline_counter = int(width / 2)
        image_format = "IMAGE_FORMAT_MONO4BPP"
        format_name = "4bpp"
    elif fmt_2bpp:
        graphic_data = image_to_mono2bpp(graphic_image)
        newline_counter = int(width / 4)
        image_format = "IMAGE_FORMAT_MONO2BPP"
        format_name = "2bpp"
    elif fmt_1bpp:
        graphic_data = image_to_mono1bpp(graphic_image)
        newline_counter = int(width / 8)
        image_format = "IMAGE_FORMAT_MONO1BPP"
        format_name = "1bpp"

    # Generate the output filenames
    gfx_source_filename = "gfx-%s.c" % (output_filename)
    gfx_header_filename = "gfx-%s.h" % (output_filename)

    # Generate the C source file
    gfx_source_file = open(gfx_source_filename, "w")
    gfx_source_file.write(fileHeader)

    gfx_source_file.write("/* generated from %s */\n\n" % (graphic_fname))

    gfx_source_file.write("#include <stdint.h>\n")
    gfx_source_file.write("#include <qp.h>\n")
    gfx_source_file.write("#include <qp_common.h>\n\n")
    gfx_source_file.write("// clang-format off\n\n")

    if compress == True:
        compressed_data = []
        compressed_chunk_offsets = []
        uncompressed_graphic_data = graphic_data.copy()
        while len(uncompressed_graphic_data) > 0:
            chunk_size = min(chunksize, len(uncompressed_graphic_data))
            uncompressed_chunk = uncompressed_graphic_data[0:chunk_size]
            uncompressed_graphic_data = uncompressed_graphic_data[chunk_size:]
            compressed = lzf.compress(bytes(uncompressed_chunk),
                                      int(len(uncompressed_chunk) * 2))
            compressed_chunk_offsets.append(
                (len(compressed_data),
                 len(compressed)))  # keep track of where this chunk starts
            compressed_data.extend(compressed)

        # Write out the compressed chunk offsets
        gfx_source_file.write("const uint32_t gfx_%s_chunk_offsets[%d] = {\n" %
                              (sane_name, len(compressed_chunk_offsets)))
        for n in range(0, len(compressed_chunk_offsets)):
            gfx_source_file.write(
                "  %4d,  // chunk %-4d // compressed size: %4d / %6.2f%%\n" %
                (compressed_chunk_offsets[n][0], n,
                 compressed_chunk_offsets[n][1],
                 (100 * compressed_chunk_offsets[n][1] / chunksize)))
        gfx_source_file.write("};\n\n")

        # Write out the compressed chunk data
        gfx_source_file.write(
            "static const uint8_t gfx_%s_chunk_data[%d] = {\n " %
            (sane_name, len(compressed_data)))
        count = 0
        for j in compressed_data:
            gfx_source_file.write(" 0b{0:08b}".format(j))
            count += 1
            if count < len(compressed_data):
                gfx_source_file.write(",")
                if (
                        count % 16
                ) == 0:  # Place a new line when we reach the same number of pixels as each row
                    gfx_source_file.write("\n ")
        gfx_source_file.write("\n};\n\n")

        # Write out the image descriptor
        gfx_source_file.write(
            "const painter_compressed_image_descriptor_t gfx_%s_compressed = {"
            % (sane_name))
        gfx_source_file.write("\n  .base = {")
        gfx_source_file.write("\n    .image_format = %s," % (image_format))
        gfx_source_file.write("\n    .compression  = IMAGE_COMPRESSED_LZF,")
        gfx_source_file.write("\n    .width        = %d," % (width))
        gfx_source_file.write("\n    .height       = %d" % (height))
        gfx_source_file.write("\n  },")
        gfx_source_file.write("\n  .chunk_count     = %d," %
                              (len(compressed_chunk_offsets)))
        gfx_source_file.write("\n  .chunk_size      = %d," % (chunksize))
        gfx_source_file.write("\n  .chunk_offsets   = gfx_%s_chunk_offsets," %
                              (sane_name))
        gfx_source_file.write("\n  .compressed_data = gfx_%s_chunk_data," %
                              (sane_name))
        gfx_source_file.write(
            "\n  .compressed_size = %d  // original = %d bytes (%s) / %6.2f%% of original // rgb24 = %d bytes / %6.2f%% of rgb24"
            % (len(compressed_data), len(graphic_data), format_name,
               (100 * len(compressed_data) / len(graphic_data)),
               (3 * width * height),
               (100 * len(compressed_data) / (3 * width * height))))
        gfx_source_file.write("\n};\n\n")
        gfx_source_file.write(
            "painter_image_t gfx_%s = (painter_image_t)&gfx_%s_compressed;\n\n"
            % (sane_name, sane_name))

    else:
        # Generate image data lookup table
        gfx_source_file.write("static const uint8_t gfx_%s_data[%d] = {\n " %
                              (sane_name, len(graphic_data)))
        count = 0
        for j in graphic_data:
            gfx_source_file.write(" 0b{0:08b}".format(j))
            count += 1
            if count < len(graphic_data):
                gfx_source_file.write(",")
                if (
                        count % newline_counter
                ) == 0:  # Place a new line when we reach the same number of pixels as each row
                    gfx_source_file.write("\n ")
        gfx_source_file.write("\n};\n\n")

        # Write out the image descriptor
        gfx_source_file.write(
            "const painter_raw_image_descriptor_t gfx_%s_raw = {" %
            (sane_name))
        gfx_source_file.write("\n  .base = {")
        gfx_source_file.write("\n    .image_format = %s," % (image_format))
        gfx_source_file.write("\n    .compression  = IMAGE_UNCOMPRESSED,")
        gfx_source_file.write("\n    .width        = %d," % (width))
        gfx_source_file.write("\n    .height       = %d" % (height))
        gfx_source_file.write("\n  },")
        gfx_source_file.write("\n  .byte_count   = %d," % (len(graphic_data)))
        gfx_source_file.write("\n  .image_data   = gfx_%s_data," % (sane_name))
        gfx_source_file.write("\n};\n\n")
        gfx_source_file.write(
            "painter_image_t gfx_%s = (painter_image_t)&gfx_%s_raw;\n\n" %
            (sane_name, sane_name))

    gfx_source_file.write("// clang-format on\n")
    gfx_source_file.close()

    # Generate the C header file
    gfx_header_file = open(gfx_header_filename, "w")
    gfx_header_file.write(fileHeader)
    gfx_header_file.write("/* generated from %s */\n\n" % (graphic_fname))
    gfx_header_file.write("#pragma once\n\n")
    gfx_header_file.write("#include <qp.h>\n\n")
    gfx_header_file.write("extern painter_image_t gfx_%s;\n" % (sane_name))
    gfx_header_file.close()
Example #10
0
    def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
        """ write pointcloud as .pcd to fileobj.
        if data_compression is not None it overrides pc.data.
        """

        def write_header(_metadata, rename_padding=False):
            """ given metadata as dictionary return a string header.
            """
            template = """\
        VERSION {version}
        FIELDS {fields}
        SIZE {size}
        TYPE {type}
        COUNT {count}
        WIDTH {width}
        HEIGHT {height}
        VIEWPOINT {viewpoint}
        POINTS {points}
        DATA {data}
        """
            str_metadata = _metadata.copy()

            if not rename_padding:
                str_metadata['fields'] = ' '.join(_metadata['fields'])
            else:
                new_fields = []
                for f in _metadata['fields']:
                    if f == '_':
                        new_fields.append('padding')
                    else:
                        new_fields.append(f)
                str_metadata['fields'] = ' '.join(new_fields)
            str_metadata['size'] = ' '.join(map(str, _metadata['size']))
            str_metadata['type'] = ' '.join(_metadata['type'])
            str_metadata['count'] = ' '.join(map(str, _metadata['count']))
            str_metadata['width'] = str(_metadata['width'])
            str_metadata['height'] = str(_metadata['height'])
            str_metadata['viewpoint'] = ' '.join(map(str, _metadata['viewpoint']))
            str_metadata['points'] = str(_metadata['points'])
            tmpl = template.format(**str_metadata)
            return tmpl

        def build_ascii_fmtstr(pc_):
            """ make a format string for printing to ascii, using fields
            %.8f minimum for rgb
            %.10f for more general use?
            """
            fmtstr = []
            for t, cnt in zip(pc_.type, pc_.count):
                if t == 'F':
                    fmtstr.extend(['%.10f'] * cnt)
                elif t == 'I':
                    fmtstr.extend(['%d'] * cnt)
                elif t == 'U':
                    fmtstr.extend(['%u'] * cnt)
                else:
                    raise ValueError("don't know about type %s" % t)
            return fmtstr

        metadata = pc.get_metadata()
        if data_compression is not None:
            data_compression = data_compression.lower()
            assert (data_compression in ('ascii', 'binary', 'binary_compressed'))
            metadata['data'] = data_compression

        header = write_header(metadata)
        fileobj.write(header)
        if metadata['data'].lower() == 'ascii':
            fmtstr = build_ascii_fmtstr(pc)
            np.savetxt(fileobj, pc.pc_data, fmt=fmtstr)
        elif metadata['data'].lower() == 'binary':
            fileobj.write(pc.pc_data.tostring('C'))
        elif metadata['data'].lower() == 'binary_compressed':
            # TO-DO
            # a '_' field is ignored by pcl and breakes compressed point clouds.
            # changing '_' to '_padding' or other name fixes this.
            # admittedly padding shouldn't be compressed in the first place
            # reorder to column-by-column
            uncompressed_lst = []
            for fieldname in pc.pc_data.dtype.names:
                column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring('C')
                uncompressed_lst.append(column)
            uncompressed = ''.join(uncompressed_lst)
            uncompressed_size = len(uncompressed)
            # print("uncompressed_size = %r"%(uncompressed_size))
            buf = lzf.compress(uncompressed)
            if buf is None:
                # compression didn't shrink the file
                # TO-DO what do to do in this case when reading?
                buf = uncompressed
                compressed_size = uncompressed_size
            else:
                compressed_size = len(buf)
            fmt = 'II'
            fileobj.write(struct.pack(fmt, compressed_size, uncompressed_size))
            fileobj.write(buf)
        else:
            raise ValueError('unknown DATA type')
Example #11
0
 def compress(self, text):
     # lzf guarantees that even if the compressed version is longer, it is
     # within 104% of the original size (rounded up), so this should work
     return lzf.compress(text, len(text) * 2)
Example #12
0
def compress(data):
    return chr(len(data) % 256) + chr(len(data) / 256) + lzf.compress(data)
Example #13
0
def main():
    img = Image.open("splashscreen.gif")

    # Convert to an array of gray values - data[row, column]
    data = numpy.asarray(img.convert('L'))

    # Flip
    data = data[::-1]

    # Invert
    data = numpy.subtract(255, data)

    # Split up into pages - pages[page, page_row:8, column]
    pages = numpy.array_split(data, range(8, img.size[1], 8), 0)

    # Reverse order of pages, too
    pages = pages[::-1]

    # Rearrange each page to be column-major - pages[page, column, page_row:8]
    pages = numpy.transpose(pages, (0, 2, 1))

    # Unpack into bits - pages[page, column, page_row:8, pixel_bit:8]
    pages = numpy.expand_dims(pages, 3)
    pages = numpy.unpackbits(pages, 3)

    # Truncate to 4bpp - pages[page, column, page_row:8, pixel_bit:4]
    pages = pages[:, :, :, :4]

    # Transpose into bit planes - pages[page, column, pixel_bit:4, page_row:8]
    pages = numpy.transpose(pages, (0, 1, 3, 2))

    # Collapse bit-planes - pages[page, column, pixel_bit:4, value:1]
    pages = numpy.packbits(pages, 3)

    # Flatten into pages - pages[page, byte]
    pages = numpy.reshape(pages, (pages.shape[0], -1))

    # Compress each page - pages[page]
    pages = [lzf.compress(page.tostring('C')) for page in pages]

    # Neatly format the image for output
    indexes = [0]
    for i in range(len(pages)):
        indexes.append(indexes[i] + len(pages[i]))

    out = open('splashscreen.c', 'w')
    out.write('#include "splashscreen.h"\n\n')
    out.write("const uint8 splashscreen_data[] = {\n")
    for i, page in enumerate(pages):
        out.write("    // %d-%d: Page %d\n" %
                  (indexes[i], indexes[i + 1] - 1, i))
        for line in range(0, len(page), 10):
            value = ''.join('0x%02X, ' % ord(x)
                            for x in page[line:line + 10]).strip()
            out.write("    %s\n" % (value, ))
    out.write("};\n\n")

    out.write("const int16 splashscreen_indexes[] = {%s};\n" %
              (', '.join(str(x) for x in indexes)))
    out.close()

    print "Image output in %d bytes" % (sum(len(page) + 2
                                            for page in pages) + 2, )
Example #14
0
 def compress(data, *args, **kwargs):
     return lzf.compress(data, *args, **kwargs)
Example #15
0
 def compress(self, text):
     # lzf guarantees that even if the compressed version is longer, it is
     # within 104% of the original size (rounded up), so this should work
     return lzf.compress(text, len(text) * 2)
Example #16
0
def compress(data):
    return struct.pack('<H', len(data)) + lzf.compress(data)