def split_image(img): pixels = [] width, height = img.size loaded_img = img.load() chunk_size = 32 x_chunks_count = width // chunk_size y_chunks_count = height // chunk_size x_rest = width % chunk_size y_rest = height % chunk_size for y_chunk in range(y_chunks_count): for x_chunk in range(x_chunks_count): for y in range(chunk_size): for x in range(chunk_size): pixels.append(loaded_img[x + (x_chunk * chunk_size), y + (y_chunk * chunk_size)]) for y in range(chunk_size): for x in range(x_rest): pixels.append(loaded_img[x + (width - x_rest), y + (y_chunk * chunk_size)]) Console.progress_bar(locale.split_pic, y_chunk, y_chunks_count) for x_chunk in range(width // chunk_size): for y in range(y_rest): for x in range(chunk_size): pixels.append(loaded_img[x + (x_chunk * chunk_size), y + (height - y_rest)]) for y in range(y_rest): for x in range(x_rest): pixels.append(loaded_img[x + (width - x_rest), y + (height - y_rest)]) img.putdata(pixels)
def rgba2bytes(sc, img, _type): write_pixel = None if _type in (0, 1): def write_pixel(pixel): return struct.pack('4B', *pixel) if _type == 2: def write_pixel(pixel): r, g, b, a = pixel return struct.pack( '<H', a >> 4 | b >> 4 << 4 | g >> 4 << 8 | r >> 4 << 12) if _type == 3: def write_pixel(pixel): r, g, b, a = pixel return struct.pack( '<H', a >> 7 | b >> 3 << 1 | g >> 3 << 6 | r >> 3 << 11) if _type == 4: def write_pixel(pixel): r, g, b = pixel return struct.pack('<H', b >> 3 | g >> 2 << 5 | r >> 3 << 11) if _type == 6: def write_pixel(pixel): return struct.pack('2B', *pixel[::-1]) if _type == 10: def write_pixel(pixel): return struct.pack('B', pixel) if write_pixel is not None: width, height = img.size pix = img.getdata() point = -1 for y in range(height): for x in range(width): sc.write(write_pixel(pix[y * width + x])) curr = Console.percent(y, height) if curr > point: Console.progress_bar(locale.writing_pic, y, height) point = curr
def bytes2rgba(data: Reader, _type, img, pix): read_pixel = None if _type in (0, 1): def read_pixel(): return data.read_ubyte(), data.read_ubyte(), data.read_ubyte( ), data.read_ubyte() elif _type == 2: def read_pixel(): p = data.read_uint16() return (p >> 12 & 15) << 4, (p >> 8 & 15) << 4, ( p >> 4 & 15) << 4, (p >> 0 & 15) << 4 elif _type == 3: def read_pixel(): p = data.read_uint16() return (p >> 11 & 31) << 3, (p >> 6 & 31) << 3, ( p >> 1 & 31) << 3, (p & 255) << 7 elif _type == 4: def read_pixel(): p = data.read_uint16() return (p >> 11 & 31) << 3, (p >> 5 & 63) << 2, (p & 31) << 3 elif _type == 6: def read_pixel(): return (data.read_ubyte(), data.read_ubyte())[::-1] elif _type == 10: def read_pixel(): return data.read_ubyte() if read_pixel is not None: width, height = img.size point = -1 for y in range(height): for x in range(width): pix.append(read_pixel()) curr = Console.percent(y, height) if curr > point: Console.progress_bar(locale.crt_pic, y, height) point = curr print() img.putdata(pix)
def join_image(img, pixels): width, height = img.size loaded_img = img.load() pixel_index = 0 chunk_size = 32 x_chunks_count = width // chunk_size y_chunks_count = height // chunk_size x_rest = width % chunk_size y_rest = height % chunk_size for y_chunk in range(y_chunks_count): for x_chunk in range(x_chunks_count): for y in range(chunk_size): for x in range(chunk_size): loaded_img[x_chunk * chunk_size + x, y_chunk * chunk_size + y] = pixels[pixel_index] pixel_index += 1 for y in range(chunk_size): for x in range(x_rest): loaded_img[(width - x_rest) + x, y_chunk * chunk_size + y] = pixels[pixel_index] pixel_index += 1 Console.progress_bar(locale.join_pic, y_chunk, y_chunks_count) for x_chunk in range(x_chunks_count): for y in range(y_rest): for x in range(chunk_size): loaded_img[x_chunk * chunk_size + x, (height - y_rest) + y] = pixels[pixel_index] pixel_index += 1 for y in range(y_rest): for x in range(x_rest): loaded_img[x + (width - x_rest), y + (height - y_rest)] = pixels[pixel_index] pixel_index += 1
def cut_sprites(swf: SupercellSWF, folder_export): os.makedirs(f'{folder_export}/overwrite', exist_ok=True) os.makedirs(f'{folder_export}/shapes', exist_ok=True) shapes_count = len(swf.shapes) swf.xcod_writer.write_uint16(shapes_count) for shape_index in range(shapes_count): Console.progress_bar( locale.cut_sprites_process % (shape_index + 1, shapes_count), shape_index, shapes_count) shape = swf.shapes[shape_index] rendered_shape = shape.render(swf) rendered_shape.save(f'{folder_export}/shapes/{shape.id}.png') regions_count = len(shape.regions) swf.xcod_writer.write_uint16(shape.id) swf.xcod_writer.write_uint16(regions_count) for region_index in range(regions_count): region = shape.regions[region_index] swf.xcod_writer.write_ubyte(region.texture_id) swf.xcod_writer.write_ubyte(region.points_count) for point in region.sheet_points: swf.xcod_writer.write_uint16(int(point.x)) swf.xcod_writer.write_uint16(int(point.y)) swf.xcod_writer.write_ubyte(1 if region.mirroring else 0) swf.xcod_writer.write_ubyte(region.rotation // 90) rendered_region = region.render(swf) rendered_region.save( f'{folder_export}/shape_{shape.id}_{region_index}.png') print()
def compile_sc(_dir, from_memory=None, img_data=None, folder_export=None): sc_data = None name = _dir.split('/')[-2] if from_memory: files = from_memory else: files = [] [ files.append(i) if i.endswith('.png') else None for i in os.listdir(_dir) ] files.sort() if not files: return logger.info(locale.dir_empty % _dir.split('/')[-2]) files = [Image.open(f'{_dir}{i}') for i in files] logger.info(locale.collecting_inf) sc = Writer() has_xcod = False use_lzham = False if from_memory: use_lzham = img_data['use_lzham'] else: try: sc_data = open(f'{_dir}/{name}.xcod', 'rb') sc_data.read(4) use_lzham, = struct.unpack('?', sc_data.read(1)) sc_data.read(1) has_xcod = True except OSError: logger.info(locale.not_xcod) logger.info(locale.default_types) for picture_index in range(len(files)): img = files[picture_index] print() if from_memory: file_type = img_data['data'][picture_index]['file_type'] pixel_type = img_data['data'][picture_index]['pixel_type'] else: if has_xcod: file_type, pixel_type, width, height = struct.unpack( '>BBHH', sc_data.read(6)) if (width, height) != img.size: logger.info(locale.illegal_size % (width, height, img.width, img.height)) if Console.question(locale.resize_qu): logger.info(locale.resizing) img = img.resize((width, height), Image.ANTIALIAS) else: file_type, pixel_type = 1, 0 width, height = img.size pixel_size = get_pixel_size(pixel_type) img = img.convert('RGBA') x = Image.new('RGBA', img.size, (0, 0, 0, 1)) x.paste(img, (0, 0), img) img = x img = img.convert(pixel_type2str(pixel_type)) file_size = width * height * pixel_size + 5 logger.info(locale.about_sc % (name, picture_index, pixel_type, width, height)) sc.write( struct.pack('<BIBHH', file_type, file_size, pixel_type, width, height)) if file_type in (27, 28): split_image(img) print() rgba2bytes(sc, img, pixel_type) print() sc.write(bytes(5)) print() write_sc(f'{folder_export}/{name}.sc', sc.getvalue(), use_lzham)
def refill_menu(): menu.categories.clear() try: import sc_compression del sc_compression from system.lib.features.csv.compress import compress_csv from system.lib.features.csv.decompress import decompress_csv try: import PIL del PIL from system.lib.features.sc.assembly_encode import sc1_encode from system.lib.features.sc.decode import sc_decode from system.lib.features.sc.decode_and_cut import sc1_decode from system.lib.features.sc.sc_encode import sc_encode sc_category = Menu.Category(0, locale.sc_label) sc_category.add( Menu.Item(locale.decode_sc, locale.decode_sc_description, sc_decode)) sc_category.add( Menu.Item(locale.encode_sc, locale.encode_sc_description, sc_encode)) sc_category.add( Menu.Item(locale.decode_by_parts, locale.decode_by_parts_description, sc1_decode)) sc_category.add( Menu.Item(locale.encode_by_parts, locale.encode_by_parts_description, sc1_encode)) sc_category.add( Menu.Item(locale.overwrite_by_parts, locale.overwrite_by_parts_description, lambda: sc1_encode(True))) menu.add_category(sc_category) except ImportError: logger.warning(locale.install_to_unlock % 'PILLOW') csv_category = Menu.Category(1, locale.csv_label) csv_category.add( Menu.Item(locale.decompress_csv, locale.decompress_csv_description, decompress_csv)) csv_category.add( Menu.Item(locale.compress_csv, locale.compress_csv_description, compress_csv)) menu.add_category(csv_category) except ImportError: logger.warning(locale.install_to_unlock % 'sc-compression') other = Menu.Category(10, locale.other_features_label) try: import requests del requests other.add( Menu.Item(locale.check_update, locale.version % config.version, check_update)) except ImportError: logger.warning(locale.install_to_unlock % 'requests') other.add(Menu.Item(locale.check_for_outdated, None, check_for_outdated)) other.add( Menu.Item(locale.reinit, locale.reinit_description, lambda: (initialize(), refill_menu()))) other.add( Menu.Item( locale.change_language, locale.change_lang_description % config.language, lambda: (config.change_language(locale.change()), refill_menu()))) other.add( Menu.Item( locale.clear_directories, locale.clean_dirs_description, lambda: clear_directories() if Console.question(locale.clear_qu) else -1)) other.add( Menu.Item(locale.toggle_update_auto_checking, locale.enabled if config.auto_update else locale.disabled, lambda: (config.toggle_auto_update(), refill_menu()))) other.add(Menu.Item(locale.exit, None, lambda: (clear(), exit()))) menu.add_category(other)
locale.load(config.language) try: import requests del requests if config.auto_update and time.time( ) - config.last_update > 60 * 60 * 24 * 7: check_update() config.last_update = int(time.time()) config.dump() if config.has_update: logger.opt( colors=True).info(f'<green>{locale.update_done % ""}</green>') if Console.question(locale.done_qu): latest_tag = get_tags('vorono4ka', 'xcoder')[0] latest_tag_name = latest_tag['name'][1:] config.has_update = False config.version = latest_tag_name config.last_update = int(time.time()) config.dump() else: exit() except ImportError: pass @logger.catch() def refill_menu():