Пример #1
0
    def proceed(self):
        chunks = []
        for i, model in enumerate(self.models):

            cm.check_dict(i, "model", model, 
            {
                "name": (cm.def_string_comp, True),
                "fn": (cm.def_string_comp, True),

                "normalize_x": (cm.def_bool_comp, False),
                "normalize_y": (cm.def_bool_comp, False),
                "normalize_z": (cm.def_bool_comp, False),
                "normalize_scale": (cm.def_bool_comp, False),
                "norm_sym": (cm.def_bool_comp, False),
                "compression": (cm.def_bool_comp, False),

                "index": (cm.def_int_comp, False),
            })

            files = [model["fn"]]
            id = cm.get_id(self.path, model)
            if cm.is_file_cached("model", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Model \"{}\" already cached".format(i + 1, len(self.models), model["name"]))
                continue

            data = ""
            path = self.path + model["fn"]
            zip_path = "tmp/" + model["fn"].split('.')[0] + ".obj"

            with ZipFile(path, 'r') as zip_ref:
                zip_ref.extractall("tmp")

            with open(zip_path) as file:
                data = file.read()
            
            rmtree("tmp")

            print("[{}/{}]: Packing model \"{}\" ({} bytes)".format(i + 1, len(self.models), model["name"], len(data)))

            index = i
            if "index" in model:
                index = model["index"]

            archive = self.default_archive
            if "arhive" in model:
                archive = model["arhive"]

            norm_x = self.default_normalize_x
            if "normalize_x" in model:
                norm_x = model["normalize_x"]

            norm_y = self.default_normalize_y
            if "normalize_y" in model:
                norm_y = model["normalize_y"]

            norm_z = self.default_normalize_z
            if "normalize_z" in model:
                norm_z = model["normalize_z"]

            norm_scale = self.default_normalize_scale
            if "normalize_scale" in model:
                norm_scale = model["normalize_scale"]

            norm_sym = self.default_norm_sym
            if "norm_sym" in model:
                norm_sym = model["norm_sym"]

            normBitField = 0
            if norm_x:     normBitField |= 0x1
            if norm_y:     normBitField |= 0x2
            if norm_z:     normBitField |= 0x4
            if norm_scale: normBitField |= 0x8
            if norm_sym:   normBitField |= 0x10


            chunk = []
            chunk += cm.int32tobytes(index)
            chunk += cm.int16tobytes(len(model["name"]))
            chunk += model["name"].encode("utf-8")
            chunk += cm.int8tobytes(normBitField)
            chunk += [1 if archive == True else 0]

            if archive:
                compresssed = compress(bytes(data.encode("utf-8")))
                chunk += cm.int32tobytes(len(compresssed))
                chunk += cm.int32tobytes(len(data))
                chunk += compresssed
            else:
                chunk += cm.int32tobytes(len(data))
                chunk += cm.int32tobytes(len(data))
                chunk += data.encode("utf-8")

            chunk = cm.create_chunk(chunk, cm.MODEL_CHUNK_TYPE)
            chunks += chunk

            cm.cache_chunk(id, chunk)

        return (chunks, len(self.models))
Пример #2
0
    def proceed(self):
        chunks = []
        for i, material in enumerate(self.materials):

            cm.check_dict(
                i, "material", material, {
                    "name": (cm.def_string_comp, True),
                    "fn": (cm.def_string_comp, True),
                    "index": (cm.def_int_comp, False),
                })

            files = [material["fn"]]
            id = cm.get_id(self.path, material)
            if cm.is_file_cached("material", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Material \"{}\" already cached".format(
                    i + 1, len(self.materials), material["name"]))
                continue

            print("[{0}/{1}]: Packing material \"{2}\"".format(
                i + 1, len(self.materials), material["name"]))

            mat_json = cm.loadJSON(self.path + material["fn"])

            cm.check_dict(
                i, "mat_json", mat_json, {
                    "mode": (check_mode, False),
                    "ambient": (check_vector, False),
                    "map_ambient": (cm.def_string_comp, False),
                    "diffuse": (check_vector, False),
                    "map_diffuse": (cm.def_string_comp, False),
                    "shininess": (cm.def_int_comp, False),
                    "specular": (check_vector, False),
                    "map_specular": (cm.def_string_comp, False),
                    "transparent": (cm.def_int_comp, False),
                    "map_transparent": (cm.def_string_comp, False),
                    "map_normal": (cm.def_string_comp, False),
                })

            mode = self.default_mode
            if "mode" in mat_json:
                mode = mat_json["mode"]

            ambient = self.default_ambient
            if "ambient" in mat_json:
                ambient = mat_json["ambient"]

            map_ambient = self.default_map_ambient
            if "map_ambient" in mat_json:
                map_ambient = mat_json["map_ambient"]

            diffuse = self.default_diffuse
            if "diffuse" in mat_json:
                diffuse = mat_json["diffuse"]

            map_diffuse = self.default_map_diffuse
            if "map_diffuse" in mat_json:
                map_diffuse = mat_json["map_diffuse"]

            shininess = self.default_shininess
            if "shininess" in mat_json:
                shininess = mat_json["shininess"]

            specular = self.default_specular
            if "specular" in mat_json:
                specular = mat_json["specular"]

            map_specular = self.default_map_specular
            if "map_specular" in mat_json:
                map_specular = mat_json["map_specular"]

            transparent = self.default_transparent
            if "transparent" in mat_json:
                transparent = mat_json["transparent"]

            map_transparent = self.default_map_transparent
            if "map_transparent" in mat_json:
                map_transparent = mat_json["map_transparent"]

            map_normal = self.default_map_normal
            if "map_normal" in mat_json:
                map_normal = mat_json["map_normal"]

            index = i
            if "index" in material:
                index = material["index"]

            chunk = []
            chunk += cm.int32tobytes(index)
            chunk += cm.int16tobytes(len(material["name"]))
            chunk += material["name"].encode("utf-8")
            chunk += cm.int8tobytes(mode)

            chunk += cm.float32Arraytobytes(ambient)
            chunk += cm.int16tobytes(len(map_ambient))
            chunk += map_ambient.encode("utf-8")

            chunk += cm.float32Arraytobytes(diffuse)
            chunk += cm.int16tobytes(len(map_diffuse))
            chunk += map_diffuse.encode("utf-8")

            chunk += cm.float32tobytes(shininess)
            chunk += cm.float32Arraytobytes(specular)
            chunk += cm.int16tobytes(len(map_specular))
            chunk += map_specular.encode("utf-8")

            chunk += cm.float32tobytes(transparent)
            chunk += cm.int16tobytes(len(map_transparent))
            chunk += map_transparent.encode("utf-8")

            chunk += cm.int16tobytes(len(map_normal))
            chunk += map_normal.encode("utf-8")

            chunk = cm.create_chunk(chunk, cm.MATERIAL_CHUNK_TYPE)
            cm.cache_chunk(id, chunk)
            chunks += chunk

        return (chunks, len(self.materials))
Пример #3
0
    def proceed(self):
        chunks = []
        for i, shader in enumerate(self.shaders):

            cm.check_dict(i, "shader", shader, 
            {
                "name": (cm.def_string_comp, True),
                "vertex": (cm.def_string_comp, False),
                "fragment": (cm.def_string_comp, False),
                "geometry": (cm.def_string_comp, False),
                "compression": (cm.def_bool_comp, False),
                "index": (cm.def_int_comp, False),
            })

            files = []
            if "vertex" in shader: files.append(shader["vertex"])
            if "fragment" in shader: files.append(shader["fragment"])
            if "geometry" in shader: files.append(shader["geometry"])

            id = cm.get_id(self.path, shader)
            if cm.is_file_cached("shader", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Shader \"{}\" already cached".format(i + 1, len(self.shaders), shader["name"]))
                continue

            compression = self.default_compression
            if "compression" in shader:
                compression = shader["compression"]

            lists = []
            shader_flag = 0

            if "vertex" in shader:
                with open(self.path + shader["vertex"]) as file:
                    lists.append(file.read())
                shader_flag |= 0x1

            if "fragment" in shader:
                with open(self.path + shader["fragment"]) as file:
                    lists.append(file.read())
                shader_flag |= 0x2

            if "geometry" in shader:
                with open(self.path + shader["geometry"]) as file:
                    lists.append(file.read())
                shader_flag |= 0x4

            str = "[{}/{}]: Packing shader \"{}\" (".format(i + 1, len(self.shaders), shader["name"])
            for list_i, list in enumerate(lists):
                str += "{} bytes{}".format(len(list), ", " if list_i != len(lists) - 1 else ")")
            print(str)

            index = i
            if "index" in shader:
                index = shader["index"]

            chunk = []
            chunk += cm.int32tobytes(index)            
            chunk += cm.int32tobytes(len(shader["name"]))            
            chunk += shader["name"].encode("utf-8")
            chunk += cm.int8tobytes(shader_flag)
            chunk += cm.int8tobytes(compression)
            for list in lists:
                list += '\n'
                if compression:
                    compresssed = compress(bytes(list.encode("utf-8")))
                    chunk += cm.int32tobytes(len(compresssed))
                    chunk += cm.int32tobytes(len(list))
                    chunk += compresssed
                else:   
                    chunk += cm.int32tobytes(len(list))
                    chunk += cm.int32tobytes(len(list))
                    chunk += list.encode("utf-8")

            chunk = cm.create_chunk(chunk, cm.SHADER_CHUNK_TYPE)
            chunks += chunk
            cm.cache_chunk(id, chunk)

        return (chunks, len(self.shaders))
Пример #4
0
    def proceed(self):
        chunks = []
        max_tex = self.get_max_texture_index()

        for i, font in enumerate(self.fonts):

            cm.check_dict(
                i, "font", font, {
                    "name": (cm.def_string_comp, True),
                    "fn": (cm.def_string_comp, True),
                    "shader": (cm.def_string_comp, True),
                    "min_filter": (tp.check_min, False),
                    "mag_filter": (tp.check_mag, False),
                    "compression": (tp.check_comp, False),
                    "index": (cm.def_int_comp, False),
                })

            files = [font["fn"]]
            id = cm.get_id(self.path, font)
            if cm.is_file_cached("font", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Font \"{}\" already cached".format(
                    i + 1, len(self.fonts), font["name"]))
                continue

            print("[{0}/{1}]: Packing font \"{2}\"".format(
                i + 1, len(self.fonts), font["name"]))

            font_fn = self.path + font["fn"]
            font_image = FONTBM_OUTPUT + "_0.png"
            font_file = FONTBM_OUTPUT + ".fnt"

            os.system(FONTBM_PATH + " " +
                      FONTBM_ARG.format(font_fn, FONTBM_OUTPUT))
            os.system(IMAGE_MAGIC_ARG.format(font_image))

            index = i
            if "index" in font:
                index = font["index"]

            name = font["name"]

            min = self.default_font_min
            if "min_filter" in font:
                min = font["min_filter"]

            mag = self.default_font_mag
            if "mag_filter" in font:
                mag = font["mag_filter"]

            compression = self.default_font_compression
            if "compression" in font:
                compression = font["compression"]

            shader = font["shader"]

            chunk = []
            chunk += cm.int32tobytes(index)
            chunk += cm.int16tobytes(len(name))
            chunk += name.encode("utf-8")

            with open(font_file, mode="rb") as ff:
                data = ff.read()
                chunk += cm.int32tobytes(len(data))
                chunk += data

            chunk += cm.int32tobytes(max_tex + i + 1)
            chunk += cm.int16tobytes(len(shader))
            chunk += shader.encode("utf-8")

            tex = tp.texture_packer.create_chunk(FONT_WRAPPING, min, mag, 0,
                                                 compression, font_image,
                                                 max_tex + i + 1,
                                                 "___font_tex_" + name)

            os.unlink(font_image)
            os.unlink(font_file)

            chunk = cm.create_chunk(chunk, cm.FONT_CHUNK_TYPE)

            chunks += tex
            chunks += chunk

            cm.cache_chunk(id, tex + chunk)

        return (chunks, len(self.fonts) * 2)
Пример #5
0
    def proceed(self):
        chunks = []
        for i, cubemap in enumerate(self.cubemaps):

            cm.check_dict(
                i, "cubemap", cubemap, {
                    "name": (cm.def_string_comp, True),
                    "fns": (check_fns, True),
                    "wrapping": (check_wrap, False),
                    "min_filter": (check_min, False),
                    "mag_filter": (check_mag, False),
                    "flip": (check_flip, False),
                    "compression": (check_comp, False),
                    "index": (cm.def_int_comp, False),
                })

            files = cubemap["fns"]
            id = cm.get_id(self.path, cubemap)
            if cm.is_file_cached("cubemap", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Cubemap \"{}\" already cached".format(
                    i + 1, len(self.cubemaps), cubemap["name"]))
                continue

            wrapping = self.default_wrapping
            if "wrapping" in cubemap:
                wrapping = cubemap["wrapping"]

            min = self.default_min
            if "min_filter" in cubemap:
                compress = cubemap["min_filter"]

            mag = self.default_mag
            if "mag_filter" in cubemap:
                compress = cubemap["mag_filter"]

            flip = self.default_flip
            if "flip" in cubemap:
                flip = cubemap["flip"]

            compress = self.default_compression
            if "compression" in cubemap:
                compress = cubemap["compression"]

            if compress == "dds_no":
                compress == "no"

            if compress == "png":
                tmps = [
                    'tmp{}.png'.format(i)
                    for i in range(0, len(cubemap["fns"]))
                ]
            else:
                tmps = [
                    'tmp{}.dds'.format(i)
                    for i in range(0, len(cubemap["fns"]))
                ]

            imsize = ()
            for j, fn in enumerate(cubemap["fns"]):
                with image.Image(filename=self.path + fn) as img:
                    imsize = img.size
                    if compress != "png":
                        img.compression = compress

                    img.save(filename=tmps[j])

            print("[{}/{}]: Packing cubemap \"{}\" ({}x{}, {} maps) as {}".
                  format(i + 1, len(self.cubemaps), cubemap["name"], imsize[0],
                         imsize[1], len(cubemap["fns"]), compress.upper()))

            index = i
            if "index" in cubemap:
                index = cubemap["index"]

            chunk = []
            chunk += cm.int32tobytes(index)
            chunk += cm.int8tobytes(len(cubemap["fns"]))
            chunk += cm.int16tobytes(len(cubemap["name"]))
            chunk += cubemap["name"].encode("utf-8")
            chunk += cm.int32tobytes(imsize[0])
            chunk += cm.int32tobytes(imsize[1])
            chunk += cm.int8tobytes(compression_dict[compress])
            chunk += cm.int8tobytes(texture_wrapping_dict[wrapping])
            chunk += cm.int8tobytes(min_dict[min])
            chunk += cm.int8tobytes(mag_dict[mag])
            chunk += cm.int8tobytes(flip)

            for tmp in tmps:
                with open(tmp, mode='rb') as file:
                    b = file.read()
                    chunk += cm.int32tobytes(len(b))
                    chunk += b
                os.remove(tmp)

            chunk = cm.create_chunk(chunk, cm.CUBEMAP_CHUNK_TYPE)
            chunks += chunk

            cm.cache_chunk(id, chunk)

        return (chunks, len(self.cubemaps))
Пример #6
0
    def proceed(self):
        chunks = []
        for i, texture in enumerate(self.textures):

            cm.check_dict(
                i, "texture", texture, {
                    "name": (cm.def_string_comp, True),
                    "fn": (cm.def_string_comp, True),
                    "wrapping": (check_wrap, False),
                    "min_filter": (check_min, False),
                    "mag_filter": (check_mag, False),
                    "flip": (check_flip, False),
                    "compression": (check_comp, False),
                    "index": (cm.def_int_comp, False),
                })

            files = [texture["fn"]]
            id = cm.get_id(self.path, texture)
            if cm.is_file_cached("texture", i, id, self.path, files):
                chunks += cm.get_cached_chunk(id)
                print("[{}/{}]: Texture \"{}\" already cached".format(
                    i + 1, len(self.textures), texture["name"]))
                continue

            wrapping = self.default_wrapping
            if "wrapping" in texture:
                wrapping = texture["wrapping"]

            min = self.default_min
            if "min_filter" in texture:
                compress = texture["min_filter"]

            mag = self.default_mag
            if "mag_filter" in texture:
                compress = texture["mag_filter"]

            flip = self.default_flip
            if "flip" in texture:
                flip = texture["flip"]

            compress = self.default_compression
            if "compression" in texture:
                compress = texture["compression"]

            index = i
            if "index" in texture:
                index = texture["index"]

            if compress == "dds_no":
                compress == "no"

            name = texture["name"]
            full_path = self.path + texture["fn"]

            imsize = (0, 0)  #todo: ?
            print("[{}/{}]: Packing texture \"{}\" ({}x{}) as {}".format(
                i + 1, len(self.textures), texture["name"], imsize[0],
                imsize[1], compress.upper()))

            chunk = self.create_chunk(wrapping, min, mag, flip, compress,
                                      full_path, index, name)
            chunks += chunk

            cm.cache_chunk(id, chunk)

        return (chunks, len(self.textures))