Exemple #1
0
def extract_gif_chunk():
    """
    Extract all of the GIF images from gifchunk.bin as PNGs.

    gifchunk.bin is just raw gifs that were found lying around in the wdb.
    They aren't specifically assigned to any models, and are likely just
    loaded into memory when Lego Island loads.
    """
    bin_file = open(SETTINGS["gif_path"] + "/gifchunk.bin", "rb")

    data = get_formatted_data(bin_file, "wdb", "gifchunk")

    for image in data["images"]:
        export_gif(image, SETTINGS["gif_path"], bin_file)
Exemple #2
0
def extract_gif_chunk():
    """
    Extract all of the GIF images from gifchunk.bin as PNGs.

    gifchunk.bin is just raw gifs that were found lying around in the wdb.
    They aren't specifically assigned to any models, and are likely just
    loaded into memory when Lego Island loads.
    """
    bin_file = open(SETTINGS["gif_path"] + "/gifchunk.bin", "rb")

    data = get_formatted_data(bin_file, "wdb", "gifchunk")

    for image in data["images"]:
        export_gif(image, SETTINGS["gif_path"], bin_file)
Exemple #3
0
def extract_wdb():
    """
    Open WORLD.WDB and write all the sections as *.bin files.

    Open WORLD.WDB as specified by the structure found in the wdb format
    file. Export each section of the wdb as a bin file in the folder
    hiearchy found in the header of the wdb file.
    """
    bin_file = open(SETTINGS["wdb_path"], "rb")

    data = get_formatted_data(bin_file, "wdb", "wdb")

    for each_group in data["groups"]:
        group_title = get_raw(each_group["group_title"], bin_file)
        i = -1
        for each_subgroup in each_group["subgroups"]:
            i += 1
            subgroup_title = "sub" + str(i)
            for each_sub_item in each_subgroup["sub_items"]:
                sub_item_title = get_raw(each_sub_item["sub_item_title"],
                                         bin_file)
                item_offset = get_raw(each_sub_item["item_offset"], bin_file)
                size_of_item = get_raw(each_sub_item["size_of_item"], bin_file)

                directory = create_dir(SETTINGS["bin_path"] + "/" +
                                       group_title[:-1] + "/" +
                                       subgroup_title + "/")

                write_file = open(
                    directory + "/" + sub_item_title[:-1] + ".bin", "wb")
                write_file.truncate()

                bin_file.seek(item_offset)
                write_file.write(bin_file.read(size_of_item))
                write_file.close()

    #write gif chunk to be extracted by extract_gif_chunk()
    directory = create_dir(SETTINGS["gif_path"])
    write_file = open(directory + "/gifchunk.bin", "wb")
    bin_file.seek(data["gif_chunk_size"][1] + 4)
    write_file.write(bin_file.read(get_raw(data["gif_chunk_size"], bin_file)))
    write_file.close()

    #write model chunk to be extracted by extract_model_chunk()
    directory = create_dir(SETTINGS["gif_path"])
    write_file = open(directory + "/modelchunk.bin", "wb")
    bin_file.seek(data["model_chunk_size"][1] + 4)
    write_file.write(bin_file.read(get_raw(data["model_chunk_size"],
                                           bin_file)))
    write_file.close()
Exemple #4
0
def extract_model_chunk():
    bin_file = open(SETTINGS["gif_path"] + "/modelchunk.bin", "rb")

    data = get_formatted_data(bin_file, "wdb", "modelchunk")
    bin_file.seek(0)
    for binn in data["bins"]:
        end_bin_offset = get_raw(binn["end_bin_offset"], bin_file)
        size_of_item = end_bin_offset - bin_file.tell()

        write_file = open(SETTINGS["bin_path"] + "/" + get_raw(binn["bin_name"], bin_file) + ".bin", "wb")
        write_file.truncate()

        write_file.write(bin_file.read(size_of_item))
        write_file.close()

        bin_file.seek(end_bin_offset)
Exemple #5
0
def extract_wdb():
    """
    Open WORLD.WDB and write all the sections as *.bin files.

    Open WORLD.WDB as specified by the structure found in the wdb format
    file. Export each section of the wdb as a bin file in the folder
    hiearchy found in the header of the wdb file.
    """
    bin_file = open(SETTINGS["wdb_path"], "rb")

    data = get_formatted_data(bin_file, "wdb", "wdb")

    for each_group in data["groups"]:
        group_title = get_raw(each_group["group_title"], bin_file)
        i = -1
        for each_subgroup in each_group["subgroups"]:
            i += 1
            subgroup_title = "sub" + str(i)
            for each_sub_item in each_subgroup["sub_items"]:
                sub_item_title = get_raw(each_sub_item["sub_item_title"], bin_file)
                item_offset = get_raw(each_sub_item["item_offset"], bin_file)
                size_of_item = get_raw(each_sub_item["size_of_item"], bin_file)

                directory = create_dir(SETTINGS["bin_path"] + "/" + group_title[:-1] + "/" + subgroup_title + "/")

                write_file = open(directory + "/" + sub_item_title[:-1] + ".bin", "wb")
                write_file.truncate()

                bin_file.seek(item_offset)
                write_file.write(bin_file.read(size_of_item))
                write_file.close()

    #write gif chunk to be extracted by extract_gif_chunk()
    directory = create_dir(SETTINGS["gif_path"])
    write_file = open(directory + "/gifchunk.bin", "wb")
    bin_file.seek(data["gif_chunk_size"][1]+4)
    write_file.write(bin_file.read(get_raw(data["gif_chunk_size"], bin_file)))
    write_file.close()

    #write model chunk to be extracted by extract_model_chunk()
    directory = create_dir(SETTINGS["gif_path"])
    write_file = open(directory + "/modelchunk.bin", "wb")
    bin_file.seek(data["model_chunk_size"][1]+4)
    write_file.write(bin_file.read(get_raw(data["model_chunk_size"], bin_file)))
    write_file.close()
Exemple #6
0
def extract_model_chunk():
    bin_file = open(SETTINGS["gif_path"] + "/modelchunk.bin", "rb")

    data = get_formatted_data(bin_file, "wdb", "modelchunk")
    bin_file.seek(0)
    for binn in data["bins"]:
        end_bin_offset = get_raw(binn["end_bin_offset"], bin_file)
        size_of_item = end_bin_offset - bin_file.tell()

        write_file = open(
            SETTINGS["bin_path"] + "/" + get_raw(binn["bin_name"], bin_file) +
            ".bin", "wb")
        write_file.truncate()

        write_file.write(bin_file.read(size_of_item))
        write_file.close()

        bin_file.seek(end_bin_offset)
Exemple #7
0
def extract_pattern(file_path, pattern):
    """
    Attempt to extract data from a .bin file and return the progress made.

    There are five steps to extraction:
     * Interpret data from the wdb using the wdb format file
     * Export the object files using the data
     * Export textures
     * Export materials
     * Export the material file mtl
    These steps are represented by the list of five items returned by the
    function that signify if that step was successful. '_' means success,
    and 'X' means failure.
    """
    global MATERIALS
    global STATS
    bin_file = open(file_path, "rb")

    #reset materials to empty
    MATERIALS = {}

    progress = ["X", "X", "X", "X", "X"]

    #INTERPRET FORMAT FROM FILE
    try:
        data = get_formatted_data(bin_file, "wdb", pattern)
        #trace(str(data))
        progress[0] = "_"
    except:
        trace_error()
        return progress

    #EXPORT OBJ
    try:
        file_name = get_raw(data["file_name"], bin_file)

        #trace(data)
        for component in data["components"]:
            component_name = get_raw(component["component_name"], bin_file)
            if "models" in component:
                model_index = len(component["models"])
                for model in component["models"]:
                    model_index -= 1
                    #determine whether or not to export this LOD model based on SETTINGS
                    export = False
                    if SETTINGS["highest_lod_only"]:
                        if model_index == 0:
                            export = True
                    else:
                        export = True

                    if export:
                        if SETTINGS["highest_lod_only"] and not SETTINGS["lod_labels"]:
                            end_string = ""
                        else:
                            end_string = "_lod" + str(model_index)
                        file_path = file_path.replace("\\", "/")
                        obj_path = create_dir(SETTINGS["obj_path"] + "/" + file_path[file_path.find("/", 3):file_path.rfind("/")] + "/" + file_name + "/")
                        export_obj(data, model, bin_file, obj_path + "/" + component_name + end_string)
            else:
                #no models in this component, only the component header
                pass
        progress[1] = "_"
    except:
        trace_error()
        return progress


    found_materials = []
    #EXPORT TEXTURES
    try:
        num_images = get_raw(data["num_images"], bin_file)

        #export textures embedded in this bin group as .png
        for image in data["images"]:
            #normal gif
            export_gif(image, obj_path, bin_file)
            #special hidden gif, only seen on isle and isle_hi gifs
            if "extra_images" in image:
                image["extra_images"][0]["gif_name"] = image["gif_name"]
                export_gif(image["extra_images"][0], obj_path, bin_file, pretext="hidden_")
            found_materials.append(get_raw(image["gif_name"], bin_file)[:-4])
        progress[2] = "_"
    except:
        trace_error()
        return progress

    #EXPORT MATERIALS
    try:
        #export materials without textures as .png, just their rgb on a 4x4 texture
        for material in MATERIALS:
            if material not in found_materials:
                found_materials.append(material)
                #write 4x4 png of color c
                c = MATERIALS[material]
                rows = []
                for row in range(4):
                    rows.append([])
                    for pixel in range(4):
                        rows[row].append(c[0])
                        rows[row].append(c[1])
                        rows[row].append(c[2])
                f = open(obj_path + "/" + material + ".png", "wb")
                f.truncate()
                w = png.Writer(4, 4)
                w.write(f, rows)
                f.close()


        #statistics for materials
        for material in found_materials:
            found_duplicate = False
            for row in STATS["csv"]["materials"]:
                if row[0] == material:
                    found_duplicate = True
                    row[2] += 1
            if not found_duplicate:
                STATS["csv"]["materials"].append([material, "No", 1])


        progress[3] = "_"
    except:
        trace_error()
        return progress

    #EXPORT MTL FILE
    try:
        export_mtl(data, obj_path, bin_file)
        progress[4] = "_"
    except:
        trace_error()
        return progress

    return progress
Exemple #8
0
def main():
    bin_file = open(SETTINGS["filename"], "rb")

    editor = Editor(bin_file)
    overlay = Overlay()

    bin_file.seek(0)
    if SETTINGS["ignore_pattern"] == False:
        data = get_formatted_data(bin_file, SETTINGS["format"], SETTINGS["pattern"])

        #tuple_data is (key, (data_string, offset))
        nested_values = sorted(get_nested_values(data), key=lambda x: x[1][1])
        nested_index = 0

        for tuple_data in nested_values:
            key = tuple_data[0]
            data_string = tuple_data[1][0]
            offset = tuple_data[1][1]

            new_group = Group(key, data_string, offset, bin_file)
            editor.groups.append(new_group)

    selected_data = None
    while(True):
        for event in pygame.event.get():
            if event.type == pygame.QUIT:
                #editor.save()
                sys.exit(0)
            if event.type == pygame.KEYDOWN:
                INFO["update_display"] = True
                if event.key == pygame.K_i:
                    editor.groups.append(Group("UNDEFINED", "i", selected_data.offset, bin_file))
                if event.key == pygame.K_h:
                    editor.groups.append(Group("UNDEFINED", "h", selected_data.offset, bin_file))
                if event.key == pygame.K_b:
                    editor.groups.append(Group("UNDEFINED", "B", selected_data.offset, bin_file))
                if event.key == pygame.K_f:
                    editor.groups.append(Group("UNDEFINED", "f", selected_data.offset, bin_file))
                if event.key == pygame.K_s:
                    length = get_num()
                    editor.groups.append(Group("UNDEFINED", "s"+str(length), selected_data.offset, bin_file))

                if event.key == pygame.K_BACKSPACE:
                    for each_group in editor.groups:
                        if each_group.offset == selected_data.offset:
                            editor.groups.remove(each_group)

                if event.key == pygame.K_g:
                    offset = get_num()
                    INFO["y_offset"] = -int(offset/SETTINGS["cells_per_row"])
                    editor.update_rects()
                    editor.populate()

                if event.key == pygame.K_SPACE:
                    if nested_index < len(nested_values)-1:
                        tuple_data = nested_values[nested_index]
                        key = tuple_data[0]
                        data_string = tuple_data[1][0]
                        offset = tuple_data[1][1]

                        if not data_string.startswith("s"):
                            new_group = Group(key, data_string, offset, bin_file)
                            editor.groups.append(new_group)

                        nested_index += 1
                    else:
                        print("NO MORE NESTED VALUES")

            if event.type == pygame.MOUSEBUTTONDOWN:
                INFO["update_display"] = True
                if event.button == 4:
                    if INFO["y_offset"] < 0:
                        INFO["y_offset"] += 1
                        editor.update_rects()
                        editor.populate()
                        selected_data = None
                elif event.button == 5:
                    INFO["y_offset"] -= 1
                    editor.update_rects()
                    editor.populate()
                    selected_data = None

        #select data with the mouse
        mouse_pos = pygame.mouse.get_pos()
        mouse_pressed = pygame.mouse.get_pressed()
        if mouse_pressed[0]:
            for each_data in editor.data:
                if each_data.is_in(mouse_pos):
                    INFO["update_display"] = True
                    selected_data = each_data

        #hover over groups with the mouse
        try:
            del(overlay.icons["mouse_icon"])
        except:
            pass
        for each_group in editor.groups:
            if each_group.is_in(mouse_pos):
                INFO["update_overlay"] = True
                name_text = main_font.render(each_group.name, True, (50, 50, 50))
                name_rect = pygame.surface.Surface((name_text.get_width(), name_text.get_height()))
                name_rect.fill((255, 255, 255))
                name_rect.blit(name_text, (0, 0))
                name_pos = [mouse_pos[0], mouse_pos[1]+10]
                if name_pos[0]+name_rect.get_width() > SETTINGS["screen_x"]:
                    name_pos[0] = SETTINGS["screen_x"]-name_rect.get_width()
                overlay.icons["mouse_icon"] = Icon(name_rect, name_pos)

        #only redraw editor if required
        if INFO["update_display"]:
            INFO["update_display"] = False

            #draw editor and selected_data
            editor.draw()
            if selected_data != None:
                selected_data.draw(True)
            #save a bg_image so that it does not have to be redrawn with every overlay update
            INFO["bg_image"] = pygame.surface.Surface((screen.get_width(), screen.get_height()))
            INFO["bg_image"].blit(screen, (0, 0))

        if INFO["update_overlay"]:
            screen.blit(INFO["bg_image"], (0, 0))
            overlay.draw()


        pygame.display.flip()
Exemple #9
0
def extract_pattern(file_path, pattern):
    """
    Attempt to extract data from a .bin file and return the progress made.

    There are five steps to extraction:
     * Interpret data from the wdb using the wdb format file
     * Export the object files using the data
     * Export textures
     * Export materials
     * Export the material file mtl
    These steps are represented by the list of five items returned by the
    function that signify if that step was successful. '_' means success,
    and 'X' means failure.
    """
    global MATERIALS
    global STATS
    bin_file = open(file_path, "rb")

    #reset materials to empty
    MATERIALS = {}

    progress = ["X", "X", "X", "X", "X"]

    #INTERPRET FORMAT FROM FILE
    try:
        data = get_formatted_data(bin_file, "wdb", pattern)
        #trace(str(data))
        progress[0] = "_"
    except:
        trace_error()
        return progress

    #EXPORT OBJ
    try:
        file_name = get_raw(data["file_name"], bin_file)

        #trace(data)
        for component in data["components"]:
            component_name = get_raw(component["component_name"], bin_file)
            if "models" in component:
                model_index = len(component["models"])
                for model in component["models"]:
                    model_index -= 1
                    #determine whether or not to export this LOD model based on SETTINGS
                    export = False
                    if SETTINGS["highest_lod_only"]:
                        if model_index == 0:
                            export = True
                    else:
                        export = True

                    if export:
                        if SETTINGS["highest_lod_only"] and not SETTINGS[
                                "lod_labels"]:
                            end_string = ""
                        else:
                            end_string = "_lod" + str(model_index)
                        file_path = file_path.replace("\\", "/")
                        obj_path = create_dir(
                            SETTINGS["obj_path"] + "/" +
                            file_path[file_path.find("/", 3):file_path.
                                      rfind("/")] + "/" + file_name + "/")
                        export_obj(
                            data, model, bin_file,
                            obj_path + "/" + component_name + end_string)
            else:
                #no models in this component, only the component header
                pass
        progress[1] = "_"
    except:
        trace_error()
        return progress

    found_materials = []
    #EXPORT TEXTURES
    try:
        num_images = get_raw(data["num_images"], bin_file)

        #export textures embedded in this bin group as .png
        for image in data["images"]:
            #normal gif
            export_gif(image, obj_path, bin_file)
            #special hidden gif, only seen on isle and isle_hi gifs
            if "extra_images" in image:
                image["extra_images"][0]["gif_name"] = image["gif_name"]
                export_gif(image["extra_images"][0],
                           obj_path,
                           bin_file,
                           pretext="hidden_")
            found_materials.append(get_raw(image["gif_name"], bin_file)[:-4])
        progress[2] = "_"
    except:
        trace_error()
        return progress

    #EXPORT MATERIALS
    try:
        #export materials without textures as .png, just their rgb on a 4x4 texture
        for material in MATERIALS:
            if material not in found_materials:
                found_materials.append(material)
                #write 4x4 png of color c
                c = MATERIALS[material]
                rows = []
                for row in range(4):
                    rows.append([])
                    for pixel in range(4):
                        rows[row].append(c[0])
                        rows[row].append(c[1])
                        rows[row].append(c[2])
                f = open(obj_path + "/" + material + ".png", "wb")
                f.truncate()
                w = png.Writer(4, 4)
                w.write(f, rows)
                f.close()

        #statistics for materials
        for material in found_materials:
            found_duplicate = False
            for row in STATS["csv"]["materials"]:
                if row[0] == material:
                    found_duplicate = True
                    row[2] += 1
            if not found_duplicate:
                STATS["csv"]["materials"].append([material, "No", 1])

        progress[3] = "_"
    except:
        trace_error()
        return progress

    #EXPORT MTL FILE
    try:
        export_mtl(data, obj_path, bin_file)
        progress[4] = "_"
    except:
        trace_error()
        return progress

    return progress
Exemple #10
0
def main():
    args = parser.parse_args()

    file_name = args.file.name
    bin_file = args.file

    data = get_formatted_data(bin_file, "crp", "crp")

    name_of_mod = get_raw(data.get("name_of_mod", ""), bin_file)
    if name_of_mod == "":
        name_of_mod = file_name[:-4]
    output_path = os.path.join(args.output_dir, name_of_mod.decode('utf-8'))
    if not os.path.exists(output_path):
        os.makedirs(output_path)

    end_header_offset = get_raw(data["end_header_offset"], bin_file)

    metadata = {}

    #go through each file found
    for file_header in data["file_headers"]:
        file_name = get_raw(file_header["file_name"], bin_file).decode('utf-8')
        offset_from_header = get_raw(file_header["offset_from_header"],
                                     bin_file)
        file_size = get_raw(file_header["file_size"], bin_file)

        #absolute_offset = offset_from_header+end_header_offset+1
        absolute_offset = offset_from_header + end_header_offset

        bin_file.seek(absolute_offset)
        try:
            id_string = str(unpack(bin_file, "s", 48)).lower()
        except:
            id_string = ""

        #manually search for PNG header in data
        png_header = [137, 80, 78, 71, 13, 10, 26, 10]
        bin_file.seek(absolute_offset)
        found_header = True
        for i in range(8):
            if unpack(bin_file, "B") != png_header[i]:
                found_header = False

        #TEXTURE2D
        if "unityengine.texture2d" in id_string:
            print("found texture2d")
            bin_file.seek(absolute_offset)

            dds_string = ""
            #find "DDS " in the file to mark the start of the dds
            while (True):
                value = unpack(bin_file, "B")
                if value == 68:
                    dds_string += "D"
                elif value == 83:
                    dds_string += "S"
                elif value == 32:
                    dds_string += " "
                else:
                    dds_string = ""

                if dds_string == "DDS ":
                    dds_offset = bin_file.tell() - 4

                    meta_offset = absolute_offset
                    meta_size = dds_offset - absolute_offset

                    final_path = os.path.join(output_path, file_name + '.dds')
                    final_offset = dds_offset
                    final_size = file_size - meta_size
                    break

        #STEAM PREVIEW PNG (AND RANDOM PNGS)
        elif "icolossalframework.importers.image" in id_string or found_header:
            print("found png")
            bin_file.seek(absolute_offset)

            png_string = ""
            #find "PNG" in the file to mark the start of the png
            while (True):
                value = unpack(bin_file, "B")
                if value == 137:
                    png_string += "89"
                elif value == 80:
                    png_string += "50"
                elif value == 78:
                    png_string += "4E"
                elif value == 71:
                    png_string += "47"
                else:
                    png_string = ""

                if png_string == "89504E47":
                    png_offset = bin_file.tell() - 4

                    meta_offset = absolute_offset
                    meta_size = png_offset - absolute_offset

                    final_path = os.path.join(output_path, file_name + ".png")
                    final_offset = png_offset
                    final_size = file_size - meta_size
                    break

        #GENERIC
        else:
            print("found generic")
            meta_offset = absolute_offset
            meta_size = 0

            final_path = os.path.join(output_path, file_name)
            final_offset = absolute_offset
            final_size = file_size

        #add metadata to the metadata dictionary
        if meta_size == 0:
            metadata[final_path] = ""
        else:
            bin_file.seek(meta_offset)
            metadata[final_path] = unpack(bin_file, "s",
                                          meta_size).decode('utf-8', 'ignore')

        #write file
        write_file = open(final_path, "wb")
        write_file.truncate()

        bin_file.seek(final_offset)
        write_file.write(bin_file.read(final_size))
        write_file.close()

    #save the metadata dictionary using json
    with open(os.path.join(output_path, "metadata.json"), "w") as f:
        json.dump(metadata, f, indent=4, sort_keys=True)