def export(armature, bone_list, filepath, filename): """Exports PIA animation :param armature: :type armature: :param bone_list: :type bone_list: :param filepath: path to export :type filepath: str :param filename: name of exported file :type filename: str :return: :rtype: """ scs_globals = _get_scs_globals() # anim_file_name = os.path.splitext(os.path.split(filepath)[1])[0] print("\n************************************") print("** SCS PIA Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING skeleton_file = str(filename + ".pis") action = armature.animation_data.action total_time = action.scs_props.action_length anim_export_filepath = action.scs_props.anim_export_filepath bone_channels = _get_bone_channels(bone_list, action, scs_globals.export_scale) custom_channels = _get_custom_channels(action) # DATA CREATION header_section = _fill_header_section(action, scs_globals.sign_export) custom_channel_sections = _fill_channel_sections(custom_channels, "CustomChannel") bone_channel_sections = _fill_channel_sections(bone_channels, "BoneChannel") global_section = _fill_global_section(skeleton_file, total_time, len(bone_channels), len(custom_channels)) # DATA ASSEMBLING pia_container = [header_section, global_section] for section in custom_channel_sections: pia_container.append(section) for section in bone_channel_sections: pia_container.append(section) # EXPORT PIA TO CUSTOM LOCATION # pia_filepath = str(filepath[:-1] + "a") dir_path = os.path.dirname(filepath) if anim_export_filepath: if os.path.isdir(anim_export_filepath): dir_path = anim_export_filepath else: pass # TODO: Create location? # FILE EXPORT ind = " " pia_filepath = os.path.join(dir_path, str(action.name + ".pia")) _pix_container.write_data_to_file(pia_container, pia_filepath, ind) # print("************************************") return {'FINISHED'}
def write_file(container, filepath, ind, postfix=""): """Data container export into file. :param container: List of SCS PIx Sections :type container: list[structures.SectionData] :param filepath: Config absolute Filepath :type filepath: str :param ind: Indentation string for the File :type ind: str :param postfix: Postfix to be added to the end of the filename (optional) :type postfix: str """ path, ext = os.path.splitext(filepath) export_filepath = str(path + postfix + ext) _pix.write_data_to_file(container, export_filepath, ind)
def write_file(container, filepath, ind, postfix=""): """Data container export into file. :param container: List of SCS PIx Sections :type container: list[structures.SectionData] :param filepath: Config absolute Filepath :type filepath: str :param ind: Indentation string for the File :type ind: str :param postfix: Postfix to be added to the end of the filename (optional) :type postfix: str """ path, ext = os.path.splitext(filepath) export_filepath = str(path + postfix + ext) _pix.write_data_to_file(container, export_filepath, ind)
def export(bone_list, filepath, filename): scs_globals = _get_scs_globals() print("\n************************************") print("** SCS PIS Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING # bone_list = [] # BONES... # TODO: SORT "bone_list"... # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) bones_section = _fill_bones_sections(bone_list, scs_globals.export_scale) global_section = _fill_global_section(len(bone_list)) # DATA ASSEMBLING pis_container = [header_section, global_section, bones_section] # FILE EXPORT ind = " " pis_filepath = str(filepath + ".pis") result = _pix_container.write_data_to_file(pis_container, pis_filepath, ind) # print("************************************") return result
def export(scs_root_obj, armature, scs_animation, dirpath, skeleton_filepath): """Exports PIA animation :param scs_root_obj: root object of current animation :type scs_root_obj: bpy.types.Object :param armature: armature object of current animation :type armature: bpy.types.Object :param scs_animation: animation which should get exported :type scs_animation: io_scs_tools.properties.object.ObjectAnimationInventory :param dirpath: path to export :type dirpath: str :param skeleton_filepath: name of skeleton file that this animation works on :type skeleton_filepath: str """ # safety checks if scs_animation.action not in bpy.data.actions: lprint(str("E Action %r requested by %r animation doesn't exists. Animation won't be exported!\n\t " "Make sure proper action is assigned to SCS Animation."), (scs_animation.action, scs_animation.name)) return False scs_globals = _get_scs_globals() print("\n************************************") print("** SCS PIA Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING total_time = scs_animation.length action = bpy.data.actions[scs_animation.action] bone_channels = _get_bone_channels(scs_root_obj, armature, scs_animation, action, scs_globals.export_scale) custom_channels = _get_custom_channels(scs_animation, action) # DATA CREATION header_section = _fill_header_section(scs_animation.name, scs_globals.sign_export) custom_channel_sections = _fill_channel_sections(custom_channels, "CustomChannel") bone_channel_sections = _fill_channel_sections(bone_channels, "BoneChannel") global_section = _fill_global_section(skeleton_filepath, total_time, len(bone_channels), len(custom_channels)) # post creation safety checks if len(bone_channels) + len(custom_channels) == 0: lprint(str("E PIA file won't be exported, as SCS Animation %r\n\t " "doesn't effect armature or it's bones or data are invalid."), (scs_animation.name,)) return False # DATA ASSEMBLING pia_container = [header_section, global_section] for section in custom_channel_sections: pia_container.append(section) for section in bone_channel_sections: pia_container.append(section) # FILE EXPORT ind = " " filepath = os.path.join(dirpath, scs_animation.name + ".pia") # print("************************************") return _pix_container.write_data_to_file(pia_container, filepath, ind)
def export(bone_list, filepath, filename): scs_globals = _get_scs_globals() print("\n************************************") print("** SCS PIS Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING # bone_list = [] # BONES... # TODO: SORT "bone_list"... # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) bones_section = _fill_bones_sections(bone_list, scs_globals.export_scale) global_section = _fill_global_section(len(bone_list)) # DATA ASSEMBLING pis_container = [header_section, global_section, bones_section] # FILE EXPORT ind = " " pis_filepath = str(filepath + ".pis") result = _pix_container.write_data_to_file(pis_container, pis_filepath, ind) # print("************************************") return result
def export(collision_locator_list, filepath, filename, used_parts): """Exports PIC colliders :param collision_locator_list: :type collision_locator_list: :param filepath: :type filepath: :param filename: :type filename: :param used_parts: dictionary of used parts for current game object (it will get extended if some part from pic is not yet in) :type: dict :return: :rtype: """ # scene = context.scene scs_globals = _get_scs_globals() # output_type = scs_globals.output_type # TODO: UNUSED! print("\n************************************") print("** SCS PIC Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) piece_sections = [] materials = 0 len_vertices = 0 len_faces = 0 convex_coll_locators = [loc for loc in collision_locator_list if loc.scs_props.locator_collider_type == "Convex"] if convex_coll_locators: len_vertices, len_faces, piece_sections = _fill_piece_sections(convex_coll_locators, scs_globals.export_scale) materials += 1 part_sections = _fill_part_sections(collision_locator_list, used_parts) collision_locator_sections = _fill_collision_locator_sections(collision_locator_list) global_section = _fill_global_section(len_vertices, len_faces, materials, len(piece_sections), len(part_sections), len(collision_locator_sections)) # DATA ASSEMBLING pic_container = [header_section, global_section] if convex_coll_locators: material_section = _fill_coll_material_section() pic_container.append(material_section) if piece_sections: for section in piece_sections: pic_container.append(section) for section in part_sections: pic_container.append(section) for section in collision_locator_sections: pic_container.append(section) # print(' pic_container:\n%s' % str(pic_container)) # FILE EXPORT ind = " " pic_filepath = str(filepath + ".pic") result = _pix_container.write_data_to_file(pic_container, pic_filepath, ind) # print("************************************") return result
def update_item_in_file(item_pointer, new_value): """Resaves config file with updated given item to a new value. The "item_pointer" variable must be in form of 'SectionName.PropertyName', example: 'Paths.ProjectPath'.""" # interrupt if config update is locked if _get_scs_globals().config_update_lock: return False # interrupt if settings storage place is set to blend file (except when config storage place itself is being updated) if _get_scs_globals( ).config_storage_place == "BlendFile" and not item_pointer == "Header.ConfigStoragePlace": return False filepath = get_config_filepath() ind = ' ' config_container = _pix.get_data_from_file(filepath, ind) # if config container is still none, there had to be permission denied # by it's creation, so no sense to try to write it again if config_container is None: return False new_settings_container = [] new_value_changed = False item_pointer_split = item_pointer.split('.', 1) for section in config_container: new_section = _SectionData(section.type) for prop in section.props: if section.type == item_pointer_split[0] and prop[ 0] == item_pointer_split[1]: new_section.props.append((prop[0], new_value)) new_value_changed = True else: new_section.props.append((prop[0], prop[1])) # append new properties if they are not yet there if not new_value_changed and section.type == item_pointer_split[0]: new_section.props.append((item_pointer_split[1], new_value)) new_settings_container.append(new_section) _pix.write_data_to_file(new_settings_container, filepath, ind) return True
def new_config_file(filepath): """Creates a new config file at given location and name.""" config_container = gather_default() ind = " " success = _pix.write_data_to_file(config_container, filepath, ind) if success: return filepath else: return None
def new_config_file(filepath): """Creates a new config file at given location and name.""" config_container = gather_default() ind = " " success = _pix.write_data_to_file(config_container, filepath, ind) if success: return filepath else: return None
def update_item_in_file(item_pointer, new_value): """Resaves config file with updated given item to a new value. The "item_pointer" variable must be in form of 'SectionName.PropertyName', example: 'Paths.ProjectPath'.""" # interrupt if config update is locked if _get_scs_globals().config_update_lock: return False # interrupt if settings storage place is set to blend file (except when config storage place itself is being updated) if _get_scs_globals().config_storage_place == "BlendFile" and not item_pointer == "Header.ConfigStoragePlace": return False filepath = get_config_filepath() ind = ' ' config_container = _pix.get_data_from_file(filepath, ind) # if config container is still none, there had to be permission denied # by it's creation, so no sense to try to write it again if config_container is None: return False new_settings_container = [] new_value_changed = False item_pointer_split = item_pointer.split('.', 1) for section in config_container: new_section = _SectionData(section.type) for prop in section.props: if section.type == item_pointer_split[0] and prop[0] == item_pointer_split[1]: new_section.props.append((prop[0], new_value)) new_value_changed = True else: new_section.props.append((prop[0], prop[1])) # append new properties if they are not yet there if not new_value_changed and section.type == item_pointer_split[0]: new_section.props.append((item_pointer_split[1], new_value)) new_settings_container.append(new_section) _pix.write_data_to_file(new_settings_container, filepath, ind) return True
def export(collision_locator_list, filepath, filename, used_parts): """Exports PIC colliders :param collision_locator_list: :type collision_locator_list: :param filepath: :type filepath: :param filename: :type filename: :param used_parts: parts transitional structure for storing used parts inside this PIC export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans """ scs_globals = _get_scs_globals() print("\n************************************") print("** SCS PIC Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA CREATION header_section = _fill_header_section(filename, scs_globals.export_write_signature) piece_sections = [] materials = 0 len_vertices = 0 len_faces = 0 convex_coll_locators = [loc for loc in collision_locator_list if loc.scs_props.locator_collider_type == "Convex"] if convex_coll_locators: len_vertices, len_faces, piece_sections = _fill_piece_sections(convex_coll_locators, scs_globals.export_scale) materials += 1 part_sections = _fill_part_sections(collision_locator_list, used_parts) collision_locator_sections = _fill_collision_locator_sections(collision_locator_list) global_section = _fill_global_section(len_vertices, len_faces, materials, len(piece_sections), len(part_sections), len(collision_locator_sections)) # DATA ASSEMBLING pic_container = [header_section, global_section] if convex_coll_locators: material_section = _fill_coll_material_section() pic_container.append(material_section) if piece_sections: for section in piece_sections: pic_container.append(section) for section in part_sections: pic_container.append(section) for section in collision_locator_sections: pic_container.append(section) # print(' pic_container:\n%s' % str(pic_container)) # FILE EXPORT ind = " " pic_filepath = str(filepath + ".pic") result = _pix_container.write_data_to_file(pic_container, pic_filepath, ind) # print("************************************") return result
def export(collision_locator_list, filepath, filename, used_parts): """Exports PIC colliders :param collision_locator_list: :type collision_locator_list: :param filepath: :type filepath: :param filename: :type filename: :param used_parts: parts transitional structure for storing used parts inside this PIC export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans """ scs_globals = _get_scs_globals() print("\n************************************") print("** SCS PIC Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) piece_sections = [] materials = 0 len_vertices = 0 len_faces = 0 convex_coll_locators = [loc for loc in collision_locator_list if loc.scs_props.locator_collider_type == "Convex"] if convex_coll_locators: len_vertices, len_faces, piece_sections = _fill_piece_sections(convex_coll_locators, scs_globals.export_scale) materials += 1 part_sections = _fill_part_sections(collision_locator_list, used_parts) collision_locator_sections = _fill_collision_locator_sections(collision_locator_list) global_section = _fill_global_section(len_vertices, len_faces, materials, len(piece_sections), len(part_sections), len(collision_locator_sections)) # DATA ASSEMBLING pic_container = [header_section, global_section] if convex_coll_locators: material_section = _fill_coll_material_section() pic_container.append(material_section) if piece_sections: for section in piece_sections: pic_container.append(section) for section in part_sections: pic_container.append(section) for section in collision_locator_sections: pic_container.append(section) # print(' pic_container:\n%s' % str(pic_container)) # FILE EXPORT ind = " " pic_filepath = str(filepath + ".pic") result = _pix_container.write_data_to_file(pic_container, pic_filepath, ind) # print("************************************") return result
def new_config_file(filepath): """Creates a new config file at given location and name.""" config_container = gather_default() ind = " " try: if _pix.write_data_to_file(config_container, filepath, ind): return filepath except PermissionError: # NOTE: as config.txt is crucial for running blender tools we have to warn user (even in 3D viewport) # so he can not miss the problem with creation of config file; solution also provided in message lprint("E Cannot create configuration file (permission denied), please ensure read/write permissions for:\n\t %r\n\n\t " "Without configuration file Blender Tools might not work as expected!", (os.path.dirname(filepath),), report_errors=1, report_warnings=1) return None
def new_config_file(filepath): """Creates a new config file at given location and name.""" config_container = gather_default() ind = " " try: if _pix.write_data_to_file(config_container, filepath, ind): return filepath except PermissionError: # NOTE: as config.txt is crucial for running blender tools we have to warn user (even in 3D viewport) # so he can not miss the problem with creation of config file; solution also provided in message lprint( "E Cannot create configuration file (permission denied), please ensure read/write permissions for:\n\t %r\n\n\t " "Without configuration file Blender Tools might not work as expected!", (os.path.dirname(filepath), ), report_errors=1, report_warnings=1) return None
def export(filepath, scs_root_obj, armature_object, used_bones): print("\n************************************") print("** SCS PIS Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() # DATA CREATION header_section = _fill_header_section(scs_root_obj.name, scs_globals.sign_export) bones_section = _fill_bones_sections(scs_root_obj, armature_object, used_bones, scs_globals.export_scale) global_section = _fill_global_section(len(used_bones)) # DATA ASSEMBLING pis_container = [header_section, global_section, bones_section] # FILE EXPORT ind = " " result = _pix_container.write_data_to_file(pis_container, filepath, ind) # print("************************************") return result
def export(root_object, used_parts, used_materials, scene, filepath): scs_globals = _get_scs_globals() output_type = scs_globals.output_type file_name = root_object.name print("\n************************************") print("** SCS PIT Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING look_list = [] variant_list = [] saved_active_look = root_object.scs_props.active_scs_look looks_inventory = root_object.scs_object_look_inventory looks_count = len(looks_inventory) if looks_count <= 0: looks_count = 1 for i in range(0, looks_count): # apply each look from inventory first if len(looks_inventory) > 0: root_object.scs_props.active_scs_look = i # actually write values to material because Blender might not refresh data yet _looks.apply_active_look(root_object) curr_look_name = looks_inventory[i].name else: # if no looks create default curr_look_name = "default" material_dict = {} material_list = [] # get materials data for material in used_materials: if material is not None: # if material in ("_empty_slot_", "_empty_material_"): # NOTE: only workaround until module doesn't gets rewritten if material in bpy.data.materials: material = bpy.data.materials[material] if isinstance(material, str): material_name = str(material + "-_default_settings_") # DEFAULT MATERIAL material_export_data = _default_material(material_name) material_list.append(material_name) else: # print('material name: %r' % material.name) material_name = material.name material_list.append(material) # SUBSTANCE if material.scs_props.substance != 'None': lprint('D material.name: %r\tmat.scs_props.substance: "%s"', (material.name, str(material.scs_props.substance))) # TODO: Substance Export... # MATERIAL EFFECT # shader_data = material.get("scs_shader_attributes", {}) # effect_name = shader_data.get('effect', "NO EFFECT") effect_name = material.scs_props.mat_effect_name # CgFX SHADERS # print("\n=== GET SHADER EXPORT DATA =======================") ## NOTE: The following code is OBSOLETE!!! # cgfx_export_data = None # print(" cgfx_export_data:\n%s" % str(cgfx_export_data)) # if cgfx_export_data: # print("\nAttributes:") # for attribute in cgfx_export_data['attributes']: # if cgfx_export_data['attributes'][attribute]: # print(" %s:" % str(attribute)) # for rec in cgfx_export_data['attributes'][attribute]: # print(" %s: %s" % (str(rec), str(cgfx_export_data['attributes'][attribute][rec]))) # else: # print("%s:\n %s" % (str(attribute), cgfx_export_data['attributes'][attribute])) # print("\nTextures:") # for attribute in cgfx_export_data['textures']: # if cgfx_export_data['textures'][attribute]: # print(" %s:" % str(attribute)) # for rec in cgfx_export_data['textures'][attribute]: # print(" %s: %s" % (str(rec), str(cgfx_export_data['textures'][attribute][rec]))) # else: # print("%s:\n %s" % (str(attribute), cgfx_export_data['textures'][attribute])) # else: # Print(1, 'E No CgFX data for material %r!' % material.name) # print("==================================================") # PRESET SHADERS preset_found = False alias = "NO SHADER" def_cnt = attribute_cnt = texture_cnt = 0 def_sections = [] attribute_sections = [] texture_sections = [] active_shader_preset_name = material.scs_props.active_shader_preset_name # print(' active_shader_preset_name: %r' % active_shader_preset_name) for preset_i, preset in enumerate(scene.scs_shader_presets_inventory): # print(' preset[%i]: %r' % (preset_i, preset.name)) if preset.name == active_shader_preset_name: # print(' - material %r - %r' % (material.name, preset.name)) # LOAD PRESET shader_presets_abs_path = _path_utils.get_abs_path(scs_globals.shader_presets_filepath) # shader_presets_filepath = _get_scs_globals().shader_presets_filepath # print('shader_presets_filepath: %r' % shader_presets_filepath) # if shader_presets_filepath.startswith(str(os.sep + os.sep)): ## RELATIVE PATH # shader_presets_abs_path = get_abs_path(shader_presets_filepath) # else: # shader_presets_abs_path = shader_presets_filepath if os.path.isfile(shader_presets_abs_path): presets_container = _pix_container.get_data_from_file(shader_presets_abs_path, ' ') # FIND THE PRESET IN FILE if presets_container: for section in presets_container: if section.type == "Shader": section_properties = _get_properties(section) if 'PresetName' in section_properties: preset_name = section_properties['PresetName'] if preset_name == active_shader_preset_name: alias = material.name # print(' + preset name: %r' % preset_name) # COLLECT ATTRIBUTES AND TEXTURES for item in section.sections: # DATA EXCHANGE FORMAT ATRIBUTE if item.type == "DataExchangeFormat": def_data = _SectionData("DataExchangeFormat") for rec in item.props: def_data.props.append((rec[0], rec[1])) def_sections.append(def_data) def_cnt += 1 # ATTRIBUTES if item.type == "Attribute": # print(' Attribute:') attribute_data = _SectionData("Attribute") for rec in item.props: # print(' rec: %r' % str(rec)) if rec[0] == "Format": attribute_data.props.append((rec[0], rec[1])) elif rec[0] == "Tag": # tag_prop = rec[1].replace("[", "").replace("]", "") # attribute_data.props.append((rec[0], tag_prop)) attribute_data.props.append((rec[0], rec[1])) elif rec[0] == "Value": format_prop = item.get_prop("Format")[1] tag_prop = item.get_prop("Tag")[1] tag_prop = tag_prop.replace("[", "").replace("]", "") # print(' format_prop: %r' % str(format_prop)) # print(' tag_prop: %r' % str(tag_prop)) if "aux" in tag_prop: aux_props = getattr(material.scs_props, "shader_attribute_" + tag_prop) value = [] for aux_prop in aux_props: value.append(aux_prop.value) else: value = getattr(material.scs_props, "shader_attribute_" + tag_prop, "NO TAG") # print(' value: %s' % str(value)) if format_prop == 'FLOAT': attribute_data.props.append((rec[0], ["&&", (value,)])) else: attribute_data.props.append((rec[0], ["i", tuple(value)])) attribute_sections.append(attribute_data) attribute_cnt += 1 # TEXTURES elif item.type == "Texture": # print(' Texture:') texture_data = _SectionData("Texture") for rec in item.props: # print(' rec: %r' % str(rec)) if rec[0] == "Tag": tag_prop = rec[1].split(":")[1] tag = str("texture[" + str(texture_cnt) + "]:" + tag_prop) texture_data.props.append((rec[0], tag)) elif rec[0] == "Value": tag_prop = item.get_prop("Tag")[1].split(":")[1] # print(' tag_prop: %r' % str(tag_prop)) # create and get path to tobj tobj_rel_path = _get_texture_path_from_material(material, tag_prop, os.path.dirname(filepath)) texture_data.props.append((rec[0], tobj_rel_path)) texture_sections.append(texture_data) texture_cnt += 1 preset_found = True break else: lprint('\nW The file path "%s" is not valid!', (shader_presets_abs_path,)) if preset_found: break if preset_found: material_export_data = _SectionData("Material") material_export_data.props.append(("Alias", alias)) material_export_data.props.append(("Effect", effect_name)) material_export_data.props.append(("Flags", 0)) if output_type.startswith('def'): material_export_data.props.append(("DataExchangeFormatCount", def_cnt)) material_export_data.props.append(("AttributeCount", attribute_cnt)) material_export_data.props.append(("TextureCount", texture_cnt)) if output_type.startswith('def'): for def_section in def_sections: material_export_data.sections.append(def_section) for attribute in attribute_sections: material_export_data.sections.append(attribute) for texture in texture_sections: material_export_data.sections.append(texture) elif active_shader_preset_name == "<imported>": material_attributes = material['scs_shader_attributes']['attributes'].to_dict().values() material_textures = material['scs_shader_attributes']['textures'].to_dict().values() material_export_data = _SectionData("Material") material_export_data.props.append(("Alias", material.name)) material_export_data.props.append(("Effect", effect_name)) material_export_data.props.append(("Flags", 0)) material_export_data.props.append(("AttributeCount", len(material_attributes))) material_export_data.props.append(("TextureCount", len(material_textures))) for attribute_dict in material_attributes: attribute_section = _SectionData("Attribute") format_value = "" for attr_prop in sorted(attribute_dict.keys()): # get the format of current attribute (we assume that "Format" attribute is before "Value" attribute in this for loop) if attr_prop == "Format": format_value = attribute_dict[attr_prop] if attr_prop == "Value" and ("FLOAT" in format_value or "STRING" in format_value): attribute_section.props.append((attr_prop, ["i", tuple(attribute_dict[attr_prop])])) elif attr_prop == "Tag" and "aux" in attribute_dict[attr_prop]: attribute_section.props.append((attr_prop, "aux[" + attribute_dict[attr_prop][3:] + "]")) else: attribute_section.props.append((attr_prop, attribute_dict[attr_prop])) material_export_data.sections.append(attribute_section) for texture_dict in material_textures: texture_section = _SectionData("Texture") tag_id_string = "" for tex_prop in sorted(texture_dict.keys()): if tex_prop == "Tag": tag_id_string = texture_dict[tex_prop].split(':')[1] if tex_prop == "Value" and tag_id_string != "": tobj_rel_path = _get_texture_path_from_material(material, tag_id_string, os.path.dirname(filepath)) texture_section.props.append((tex_prop, tobj_rel_path)) else: texture_section.props.append((tex_prop, texture_dict[tex_prop])) material_export_data.sections.append(texture_section) else: # DEFAULT MATERIAL material_name = str("_" + material_name + "_-_default_settings_") material_export_data = _default_material(material_name) material_dict[material_name] = material_export_data # create materials sections for looks material_sections = _fill_material_sections(material_list, material_dict) look_data = { "name": curr_look_name, "material_sections": material_sections } look_list.append(look_data) # restore look applied before export root_object.scs_props.active_scs_look = saved_active_look # PARTS AND VARIANTS... part_list_cnt = len(used_parts.keys()) if len(root_object.scs_object_variant_inventory) == 0: # If there is no Variant, add the Default one... part_list = _fill_part_list(root_object.scs_object_part_inventory, used_parts, all_parts=True) variant_list.append((_VARIANT_consts.default_name, part_list), ) else: for variant in root_object.scs_object_variant_inventory: part_list = _fill_part_list(variant.parts, used_parts) variant_list.append((variant.name, part_list), ) # DATA CREATION header_section = _fill_header_section(file_name, scs_globals.sign_export) look_section = _fill_look_sections(look_list) # part_sections = fill_part_section(part_list) variant_section = _fill_variant_sections(variant_list) comment_header_section = _fill_comment_header_section(look_list, variant_list) global_section = _fill_global_section(len(look_list), len(variant_list), part_list_cnt, len(used_materials)) # DATA ASSEMBLING pit_container = [comment_header_section, header_section, global_section] for section in look_section: pit_container.append(section) for section in variant_section: pit_container.append(section) # FILE EXPORT ind = " " pit_filepath = str(filepath + ".pit") result = _pix_container.write_data_to_file(pit_container, pit_filepath, ind) # print("************************************") return result
def export(root_object, filepath, used_materials, used_parts): """Export PIT. :param root_object: SCS root object :type root_object: bpy.types.Object :param filepath: PIT file path :type filepath: str :param used_materials: materials transitional structure for accessing stored materials from PIM :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_parts: parts transitional structure for accessing stored parts from PIM, PIC and PIP :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :return: True if successful; False otherwise; :rtype: bool """ scs_globals = _get_scs_globals() output_type = scs_globals.output_type file_name = root_object.name print("\n************************************") print("** SCS PIT Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING look_list = [] variant_list = [] saved_active_look = root_object.scs_props.active_scs_look looks_inventory = root_object.scs_object_look_inventory looks_count = len(looks_inventory) if looks_count <= 0: looks_count = 1 used_materials_pairs = used_materials.get_as_pairs() for i in range(0, looks_count): # apply each look from inventory first if len(looks_inventory) > 0: root_object.scs_props.active_scs_look = i # actually write values to material because Blender might not refresh data yet _looks.apply_active_look(root_object) curr_look_name = looks_inventory[i].name else: # if no looks create default curr_look_name = "default" material_dict = {} material_list = [] # get materials data for material_name, material in used_materials_pairs: if material is None: material_name = str("_default_material_-_default_settings_") # DEFAULT MATERIAL material_export_data = _default_material(material_name) material_list.append(material_name) else: # print('material name: %r' % material.name) material_list.append(material) # MATERIAL EFFECT effect_name = material.scs_props.mat_effect_name # PRESET SHADERS flags = 0 def_cnt = attribute_cnt = texture_cnt = 0 def_sections = [] attribute_sections = [] texture_sections = [] active_shader_preset_name = material.scs_props.active_shader_preset_name # SUBSTANCE substance_value = material.scs_props.substance # only write substance to material if it's assigned if substance_value != "None" and substance_value != "": substance_data = _SectionData("Attribute") substance_data.props.append(("Format", "STRING")) substance_data.props.append(("Tag", "substance")) substance_data.props.append(("Value", ["i", (substance_value,)])) attribute_sections.append(substance_data) attribute_cnt += 1 if active_shader_preset_name in _get_shader_presets_inventory() and active_shader_preset_name != "<none>": preset = _get_shader_presets_inventory()[active_shader_preset_name] flavors_str = effect_name[len(preset.effect):] section = _shader_presets_cache.get_section(preset, flavors_str) # FLAGS for prop in section.props: if prop[0] == "Flags": flags = int(not material.scs_props.enable_aliasing) break # COLLECT ATTRIBUTES AND TEXTURES for item in section.sections: # DATA EXCHANGE FORMAT ATRIBUTE if item.type == "DataExchangeFormat": def_data = _SectionData("DataExchangeFormat") for rec in item.props: def_data.props.append((rec[0], rec[1])) def_sections.append(def_data) def_cnt += 1 # if attribute is hidden in shader preset ignore it on export # this is useful for flavor hiding some attributes from original material # eg: airbrush on "truckpaint" hides R G B aux attributes which are not present # when using airbrush flavor hidden = item.get_prop_value("Hide") if hidden and hidden == "True": continue preview_only = item.get_prop_value("PreviewOnly") if preview_only and preview_only == "True": continue # ATTRIBUTES if item.type == "Attribute": # print(' Attribute:') attribute_data = _SectionData("Attribute") for rec in item.props: # print(' rec: %r' % str(rec)) if rec[0] == "Format": attribute_data.props.append((rec[0], rec[1])) elif rec[0] == "Tag": # tag_prop = rec[1].replace("[", "").replace("]", "") # attribute_data.props.append((rec[0], tag_prop)) attribute_data.props.append((rec[0], rec[1])) elif rec[0] == "Value": format_prop = item.get_prop("Format")[1] tag_prop = item.get_prop("Tag")[1] tag_prop = tag_prop.replace("[", "").replace("]", "") # print(' format_prop: %r' % str(format_prop)) # print(' tag_prop: %r' % str(tag_prop)) if "aux" in tag_prop: aux_props = getattr(material.scs_props, "shader_attribute_" + tag_prop) value = [] for aux_prop in aux_props: value.append(aux_prop.value) # extract list if there is only one value inside and tagged as FLOAT # otherwise it gets saved as: "Value: ( [0.0] )" instead of: "Value: ( 0.0 )" if len(value) == 1 and format_prop == "FLOAT": value = value[0] else: value = getattr(material.scs_props, "shader_attribute_" + tag_prop, "NO TAG") # print(' value: %s' % str(value)) if format_prop == 'FLOAT': attribute_data.props.append((rec[0], ["&&", (value,)])) else: attribute_data.props.append((rec[0], ["i", tuple(value)])) attribute_sections.append(attribute_data) attribute_cnt += 1 # TEXTURES elif item.type == "Texture": # print(' Texture:') texture_data = _SectionData("Texture") for rec in item.props: # print(' rec: %r' % str(rec)) if rec[0] == "Tag": tag_prop = rec[1].split(":")[1] tag = str("texture[" + str(texture_cnt) + "]:" + tag_prop) texture_data.props.append((rec[0], tag)) elif rec[0] == "Value": tag_prop = item.get_prop("Tag")[1].split(":")[1] # print(' tag_prop: %r' % str(tag_prop)) # create and get path to tobj tobj_rel_path = _get_texture_path_from_material(material, tag_prop, os.path.dirname(filepath)) texture_data.props.append((rec[0], tobj_rel_path)) texture_sections.append(texture_data) texture_cnt += 1 material_export_data = _SectionData("Material") material_export_data.props.append(("Alias", material.name)) material_export_data.props.append(("Effect", effect_name)) material_export_data.props.append(("Flags", flags)) if output_type.startswith('def'): material_export_data.props.append(("DataExchangeFormatCount", def_cnt)) material_export_data.props.append(("AttributeCount", attribute_cnt)) material_export_data.props.append(("TextureCount", texture_cnt)) if output_type.startswith('def'): for def_section in def_sections: material_export_data.sections.append(def_section) for attribute in attribute_sections: material_export_data.sections.append(attribute) for texture in texture_sections: material_export_data.sections.append(texture) elif active_shader_preset_name == "<imported>": material_attributes = material['scs_shader_attributes']['attributes'].to_dict().values() material_textures = material['scs_shader_attributes']['textures'].to_dict().values() material_export_data = _SectionData("Material") material_export_data.props.append(("Alias", material.name)) material_export_data.props.append(("Effect", effect_name)) material_export_data.props.append(("Flags", int(not material.scs_props.enable_aliasing))) material_export_data.props.append(("AttributeCount", len(material_attributes))) material_export_data.props.append(("TextureCount", len(material_textures))) for attribute_dict in material_attributes: attribute_section = _SectionData("Attribute") format_value = "" for attr_prop in sorted(attribute_dict.keys()): # get the format of current attribute (we assume that "Format" attribute is before "Value" attribute in this for loop) if attr_prop == "Format": format_value = attribute_dict[attr_prop] if attr_prop == "Value" and ("FLOAT" in format_value or "STRING" in format_value): tag_prop = attribute_dict["Tag"].replace("[", "").replace("]", "") if "aux" in tag_prop: aux_props = getattr(material.scs_props, "shader_attribute_" + tag_prop) value = [] for aux_prop in aux_props: value.append(aux_prop.value) else: value = getattr(material.scs_props, "shader_attribute_" + tag_prop, None) if isinstance(value, float): value = [value] if value is None: attribute_section.props.append((attr_prop, ["i", tuple(attribute_dict[attr_prop])])) else: attribute_section.props.append((attr_prop, ["i", tuple(value)])) elif attr_prop == "Tag" and "aux" in attribute_dict[attr_prop]: attribute_section.props.append((attr_prop, "aux[" + attribute_dict[attr_prop][3:] + "]")) else: attribute_section.props.append((attr_prop, attribute_dict[attr_prop])) material_export_data.sections.append(attribute_section) for texture_dict in material_textures: texture_section = _SectionData("Texture") tag_id_string = "" for tex_prop in sorted(texture_dict.keys()): if tex_prop == "Tag": tag_id_string = texture_dict[tex_prop].split(':')[1] if tex_prop == "Value" and tag_id_string != "": tobj_rel_path = _get_texture_path_from_material(material, tag_id_string, os.path.dirname(filepath)) texture_section.props.append((tex_prop, tobj_rel_path)) else: texture_section.props.append((tex_prop, texture_dict[tex_prop])) material_export_data.sections.append(texture_section) else: # DEFAULT MATERIAL material_name = str("_" + material_name + "_-_default_settings_") material_export_data = _default_material(material_name) material_dict[material_name] = material_export_data # create materials sections for looks material_sections = _fill_material_sections(material_list, material_dict) look_data = { "name": curr_look_name, "material_sections": material_sections } look_list.append(look_data) # restore look applied before export root_object.scs_props.active_scs_look = saved_active_look # PARTS AND VARIANTS... used_parts_names = used_parts.get_as_list() part_list_cnt = len(used_parts_names) if len(root_object.scs_object_variant_inventory) == 0: # If there is no Variant, add the Default one... part_list = _fill_part_list(root_object.scs_object_part_inventory, used_parts_names, all_parts=True) variant_list.append((_VARIANT_consts.default_name, part_list), ) else: for variant in root_object.scs_object_variant_inventory: part_list = _fill_part_list(variant.parts, used_parts_names) variant_list.append((variant.name, part_list), ) # DATA CREATION header_section = _fill_header_section(file_name, scs_globals.sign_export) look_section = _fill_look_sections(look_list) # part_sections = fill_part_section(part_list) variant_section = _fill_variant_sections(variant_list) comment_header_section = _fill_comment_header_section(look_list, variant_list) global_section = _fill_global_section(len(look_list), len(variant_list), part_list_cnt, len(used_materials_pairs)) # DATA ASSEMBLING pit_container = [comment_header_section, header_section, global_section] for section in look_section: pit_container.append(section) for section in variant_section: pit_container.append(section) # FILE EXPORT ind = " " pit_filepath = str(filepath + ".pit") result = _pix_container.write_data_to_file(pit_container, pit_filepath, ind) # print("************************************") return result
def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_points): """Exports PIP file from given locator list. :param prefab_locator_list: :type prefab_locator_list: list of bpy.types.Object :param dirpath: directory export path :type dirpath: str :param filename: name of PIP file :type filename: str :param offset_matrix: offset matrix for locators :type offset_matrix: mathutils.Matrix :param used_terrain_points: terrain points transitional structure for accessing terrain points stored during PIM export :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if successfull; otherwise False :rtype: bool """ # CLEANUP CONNECTIONS DATA _connections_group_wrapper.cleanup_on_export() print("\n************************************") print("** SCS PIP Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") (control_node_locs, nav_point_locs, sign_locs, spawn_point_locs, semaphore_locs, map_point_locs, trigger_point_locs) = __sort_locators_by_type__(prefab_locator_list) pip_header = Header(2, filename) pip_global = Globall() pip_nodes = OrderedDict() """:type: dict[int,Node]""" pip_curves = OrderedDict() """:type: dict[int, Curve]""" pip_signs = [] """:type: list[Sign]""" pip_spawn_points = [] """:type: list[SpawnPoint]""" pip_semaphores = [] """:type: list[Semaphore]""" pip_map_points = OrderedDict() """:type: dict[str, MapPoint]""" pip_trigger_points = OrderedDict() """:type: dict[str, TriggerPoint]""" pip_intersections = [OrderedDict(), OrderedDict(), OrderedDict()] """:type: list[dict[str, list[Intersection]]]""" # nodes creation for locator in control_node_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" curr_node_i = int(locator_scs_props.locator_prefab_con_node_index) if curr_node_i not in pip_nodes: pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) rot = Quaternion(rot) * Vector((0, 0, -1)) # create node with position and direction cn = Node(curr_node_i, pos, rot) # add terrain points terrain_points = used_terrain_points.get(curr_node_i) for variant_i in terrain_points: # ensure variant entry for no terrain points case cn.ensure_variant(variant_i) for tp_entry in terrain_points[variant_i]: cn.add_terrain_point(tp_entry.position, tp_entry.normal, variant_i) pip_nodes[curr_node_i] = cn else: lprint("W Multiple Control Nodes with same index detected, only one per index will be exported!\n\t " "Check Control Nodes in SCS Game Object with Root: %r", (filename,)) # curves creation curves_dict = _connections_group_wrapper.get_curves(nav_point_locs.values()) for key, curve_entry in curves_dict.items(): loc0 = nav_point_locs[curves_dict[key].start] loc0_scs_props = loc0.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" loc1 = nav_point_locs[curves_dict[key].end] loc1_scs_props = loc1.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create curve and set properties curve = __get_curve__(pip_curves, curve_entry.index, loc0.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc0.matrix_world) curve.set_start(pos, rot) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc1.matrix_world) curve.set_end(pos, rot) curve.set_input_boundaries(loc0_scs_props) curve.set_output_boundaries(loc1_scs_props) curve.set_flags(loc0.scs_props, True) curve.set_flags(loc1.scs_props, False) curve.set_semaphore_id(int(loc0_scs_props.locator_prefab_np_traffic_semaphore)) curve.set_traffic_rule(loc1_scs_props.locator_prefab_np_traffic_rule) # set next/prev curves for next_key in curve_entry.next_curves: next_curve = __get_curve__(pip_curves, curves_dict[next_key].index, curves_dict[next_key].start) assert curve.add_next_curve(next_curve) for prev_key in curve_entry.prev_curves: prev_curve = __get_curve__(pip_curves, curves_dict[prev_key].index, curves_dict[prev_key].start) assert curve.add_prev_curve(prev_curve) # sync nodes input lanes boundary_node_i = curve.get_input_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_input_lane(curve.get_input_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc0.name,)) # sync nodes output lanes boundary_node_i = curve.get_output_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_output_lane(curve.get_output_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc1.name,)) Curve.prepare_curves(pip_curves.values()) # signs creation for locator in sign_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create sign and set properties sign = Sign(locator.name, locator_scs_props.scs_part) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) sign.set_position(pos) sign.set_rotation(rot) if ":" in locator_scs_props.locator_prefab_sign_model: sign.set_model(locator_scs_props.locator_prefab_sign_model.split(":")[1].strip()) else: lprint("W Invalid Sign Model: %r on locator: %r", (locator_scs_props.locator_prefab_sign_model, locator.name)) pip_signs.append(sign) # spawn points creation for locator in spawn_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create spawn point and set properties spawn_point = SpawnPoint(locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) spawn_point.set_position(pos) spawn_point.set_rotation(rot) spawn_point.set_type(int(locator_scs_props.locator_prefab_spawn_type)) pip_spawn_points.append(spawn_point) # semaphores creation for locator in semaphore_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create semaphore and set properties semaphore = Semaphore(int(locator_scs_props.locator_prefab_tsem_type)) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) semaphore.set_position(pos) semaphore.set_rotation(rot) semaphore.set_semaphore_id(int(locator_scs_props.locator_prefab_tsem_id)) if ":" in locator_scs_props.locator_prefab_tsem_profile: semaphore.set_profile(locator_scs_props.locator_prefab_tsem_profile.split(":")[1].strip()) else: lprint("W Invalid Profile: %r on Traffic Semaphore locator: %r", (locator_scs_props.locator_prefab_tsem_profile, locator.name)) semaphore.set_intervals((locator_scs_props.locator_prefab_tsem_gs, locator_scs_props.locator_prefab_tsem_os1, locator_scs_props.locator_prefab_tsem_rs, locator_scs_props.locator_prefab_tsem_os2)) semaphore.set_cycle(locator_scs_props.locator_prefab_tsem_cyc_delay) pip_semaphores.append(semaphore) # map points creation for locator in map_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create map point and set properties map_point = __get_map_point__(pip_map_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) map_point.set_position(pos) map_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert map_point.add_neighbour(__get_map_point__(pip_map_points, neighbour_name)) MapPoint.test_map_points(pip_map_points.values()) MapPoint.auto_generate_map_points(pip_map_points, pip_nodes) # trigger points creation for locator in trigger_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create trigger point and set properties trigger_point = __get_trigger_point__(pip_trigger_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) trigger_point.set_position(pos) if ":" in locator_scs_props.locator_prefab_tp_action: trigger_point.set_action(locator_scs_props.locator_prefab_tp_action.split(":")[1].strip()) else: lprint("W Invalid Action: %r on Trigger Point locator: %r", (locator_scs_props.locator_prefab_tp_action, locator.name)) trigger_point.set_trigger_range(locator_scs_props.locator_prefab_tp_range) trigger_point.set_reset_delay(locator_scs_props.locator_prefab_tp_reset_delay) trigger_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert trigger_point.add_neighbour(__get_trigger_point__(pip_trigger_points, neighbour_name)) TriggerPoint.prepare_trigger_points(pip_trigger_points.values()) # intersections creation for c0_i, c0 in enumerate(sorted(pip_curves.values())): for c1_i, c1 in enumerate(sorted(pip_curves.values())): if c1_i <= c0_i: # only search each pair of curves once continue # get the intersection point and curves coefficient positions intersect_p, c0_pos, c1_pos = Intersection.get_intersection(c0, c1) if intersect_p: intersect_p_str = str(intersect_p) # Format: '<Vector (0.0000, 0.0000, 0.0000)>' is_start = c0_pos == 0 and c0_pos == c1_pos is_end = c1_pos == 1 and c0_pos == c1_pos if is_start: inter_type = 0 # fork elif is_end: inter_type = 1 # joint else: inter_type = 2 # cross # if there is indication of cross intersection filter out intersections with common fork and joint # NOTE: this condition might not be sufficient, so if anyone will have problems, # this is the point that has to be improved if Intersection.have_common_fork(c0, c1) or Intersection.have_common_joint(c0, c1): continue # calculate radius for the same directions on curves forward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, 1) backward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, -1) final_radius = max(forward_radius, backward_radius) # special calculations only for cross intersections if inter_type == 2: # calculate radius also for opposite directions final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, -1)) final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, 1)) # calculate position of intersection point on curves with better precision c0_pos = c0.get_closest_point(intersect_p) c1_pos = c1.get_closest_point(intersect_p) lprint("D Found cross intersection point: %r", (intersect_p,)) # creating intersection class instances intersection = Intersection(c0.get_index(), c0.get_ui_name(), c0_pos * c0.get_length()) intersection1 = Intersection(c1.get_index(), c1.get_ui_name(), c1_pos * c1.get_length()) # init list of intersections for current intersecting point if intersect_p_str not in pip_intersections[inter_type]: pip_intersections[inter_type][intersect_p_str] = [] # append intersections to list and calculate new siblings new_siblings = 2 if intersection not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection) else: del intersection new_siblings -= 1 if intersection1 not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection1) else: del intersection1 new_siblings -= 1 # always set flags on first entry in current intersection point list # this way siblings count is getting updated properly pip_intersections[inter_type][intersect_p_str][0].set_flags(is_start, is_end, new_siblings) # update radius on all of intersection in the same intersecting point for inter in pip_intersections[inter_type][intersect_p_str]: inter.set_radius(pip_intersections[inter_type][intersect_p_str][0].get_radius()) inter.set_radius(final_radius) # create container pip_container = [pip_header.get_as_section(), pip_global.get_as_section()] for node in pip_nodes.values(): pip_container.append(node.get_as_section()) for curve_key in sorted(pip_curves): pip_container.append(pip_curves[curve_key].get_as_section()) for sign in pip_signs: pip_container.append(sign.get_as_section()) for spawn_point in pip_spawn_points: pip_container.append(spawn_point.get_as_section()) for semaphore in pip_semaphores: pip_container.append(semaphore.get_as_section()) for map_point in pip_map_points.values(): pip_container.append(map_point.get_as_section()) for trigger_point in pip_trigger_points.values(): pip_container.append(trigger_point.get_as_section()) for inter_type in range(3): for intersect_p_str in pip_intersections[inter_type]: for intersection in pip_intersections[inter_type][intersect_p_str]: pip_container.append(intersection.get_as_section()) # write to file ind = " " pip_filepath = path.join(dirpath, str(filename + ".pip")) result = _pix_container.write_data_to_file(pip_container, pip_filepath, ind) return result
def export(context, root_object, used_parts, used_materials, object_list, model_locator_list, bone_list, vg_list, filepath): """ :param context: Blender Context :type context: bpy.types.Context :param root_object: SCS Root Object :type root_object: bpy.types.Object :param used_parts: dictionary of used parts for current game object (it will get extended if some part from pic is not yet in) :type: dict :param used_materials: All Materials used in 'SCS Game Object' :type used_materials: list :param object_list: Objects for export :type object_list: list :param model_locator_list: Locators for export :type model_locator_list: list :param bone_list: Bones for export :type bone_list: list :param vg_list: ... :type vg_list: list :param filepath: ... :type filepath: str :return: Return state statuses (Usually 'FINISHED') :rtype: dict """ scene = context.scene scs_globals = _get_scs_globals() output_type = scs_globals.output_type file_name = root_object.name offset_matrix = root_object.matrix_world print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # NEW SORTING # for piece_key in piece_keys_sorted: # piece_data = piece_dict[piece_key] # print('\n%r' % piece_key) # print(' piece_data: %s' % str(piece_data)) # print(' obj: %r' % str(piece_data[0])) # print(' mat: %r' % str(piece_data[1])) # print(' uvs: %r' % str(piece_data[2])) # print(' vcl: %r' % str(piece_data[3])) # print(' arm: %r' % str(piece_data[4])) # SKELETONS AND ANIMATIONS # if scs_globals.export_anim_file != 'anim': if root_object.scs_props.scs_root_animated != 'anim': bone_list = [] # DATA CREATION header_section = _fill_header_section(file_name, output_type) material_sections = _fill_material_sections(used_materials) piece_sections = [] # container for all "Pieces" global_vertex_count = 0 global_face_count = 0 global_edge_count = 0 piece_index_obj = {} skin_list2 = [] skin_weights_cnt = 0 skin_clones_cnt = 0 bones_section = None skin_section = None if not output_type.startswith('def'): (piece_sections, global_vertex_count, global_face_count, piece_index_obj, skin_list2, skin_weights_cnt, skin_clones_cnt) = _fill_piece_sections_5(root_object, object_list, bone_list, scene, used_materials, offset_matrix, scs_globals) global_edge_count = 0 else: (piece_sections, global_vertex_count, global_face_count, global_edge_count, piece_index_obj, skin_list2, skin_weights_cnt, skin_clones_cnt) = _fill_piece_sections_7(root_object, object_list, bone_list, scene, vg_list, used_materials, offset_matrix, scs_globals, output_type) locator_sections = _fill_locator_sections(model_locator_list) part_sections = _pix_container.fill_part_sections(piece_index_obj, model_locator_list, used_parts) if bone_list: bones_section = _fill_bones_section(bone_list) skin_section = _fill_skin_section(skin_list2, skin_weights_cnt, skin_clones_cnt) global_section = _fill_global_section(file_name, global_vertex_count, global_face_count, global_edge_count, len(material_sections), len(piece_sections), len(part_sections), len(bone_list), len(locator_sections), output_type) # DATA ASSEMBLING pim_container = [header_section, global_section] for section in material_sections: pim_container.append(section) for section in piece_sections: pim_container.append(section) for section in part_sections: pim_container.append(section) for section in locator_sections: pim_container.append(section) if bone_list: pim_container.append(bones_section) # for section in skin_section: pim_container.append(section) pim_container.append(skin_section) # FILE EXPORT ind = " " pim_filepath = str(filepath + ".pim") result = _pix_container.write_data_to_file(pim_container, pim_filepath, ind) # print("************************************") return result, piece_index_obj
def export(armature, bone_list, filepath, filename): """Exports PIA animation :param armature: :type armature: :param bone_list: :type bone_list: :param filepath: path to export :type filepath: str :param filename: name of exported file :type filename: str :return: :rtype: """ scs_globals = _get_scs_globals() # anim_file_name = os.path.splitext(os.path.split(filepath)[1])[0] print("\n************************************") print("** SCS PIA Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING skeleton_file = str(filename + ".pis") action = armature.animation_data.action total_time = action.scs_props.action_length anim_export_filepath = action.scs_props.anim_export_filepath bone_channels = _get_bone_channels(bone_list, action, scs_globals.export_scale) custom_channels = _get_custom_channels(action) # DATA CREATION header_section = _fill_header_section(action, scs_globals.sign_export) custom_channel_sections = _fill_channel_sections(custom_channels, "CustomChannel") bone_channel_sections = _fill_channel_sections(bone_channels, "BoneChannel") global_section = _fill_global_section(skeleton_file, total_time, len(bone_channels), len(custom_channels)) # DATA ASSEMBLING pia_container = [header_section, global_section] for section in custom_channel_sections: pia_container.append(section) for section in bone_channel_sections: pia_container.append(section) # EXPORT PIA TO CUSTOM LOCATION # pia_filepath = str(filepath[:-1] + "a") dir_path = os.path.dirname(filepath) if anim_export_filepath: if os.path.isdir(anim_export_filepath): dir_path = anim_export_filepath else: pass # TODO: Create location? # FILE EXPORT ind = " " pia_filepath = os.path.join(dir_path, str(action.name + ".pia")) _pix_container.write_data_to_file(pia_container, pia_filepath, ind) # print("************************************") return {'FINISHED'}
def export(prefab_locator_list, filepath, filename, offset_matrix): scs_globals = _get_scs_globals() # CLEANUP CONNECTIONS DATA _connections_group_wrapper.cleanup_on_export() print("\n************************************") print("** SCS PIP Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING node_list = [] terrain_point_list = [] sign_list = [] spawn_point_list = [] traffic_light_list = [] nav_point_list = [] map_point_list = [] trigger_point_list = [] for locator in prefab_locator_list: # print('locator: "%s"' % str(locator.scs_props.locator_prefab_type)) if locator.scs_props.locator_prefab_type == 'Control Node': node_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Sign': sign_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Spawn Point': spawn_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Traffic Semaphore': traffic_light_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Navigation Point': nav_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Map Point': map_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Trigger Point': trigger_point_list.append(locator) # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) node_sections = _fill_node_sections(node_list, offset_matrix) sign_sections = _fill_sign_sections(sign_list, scs_globals.scs_sign_model_inventory) spawn_point_sections = _fill_spawn_point_sections(spawn_point_list) traffic_light_sections = _fill_semaphore_sections(traffic_light_list, scs_globals.scs_tsem_profile_inventory) nav_curve_sections = _fill_nav_curve_sections(nav_point_list, offset_matrix) map_point_sections = _fill_map_point_sections(map_point_list, offset_matrix) trigger_point_sections = _fill_trigger_point_sections(trigger_point_list, offset_matrix) # nav_curve_intersections_sections = _fill_nav_curve_intersections_sections(nav_curve_sections) global_section = _fill_global_section( len(node_list), len(terrain_point_list), len(sign_list), len(spawn_point_list), len(traffic_light_list), len(nav_curve_sections), len(map_point_list), len(trigger_point_list), 0 # len(nav_curve_intersections_sections) ) # DATA ASSEMBLING pip_container = [header_section, global_section] for section in node_sections: pip_container.append(section) for section in nav_curve_sections: pip_container.append(section) for section in sign_sections: pip_container.append(section) for section in spawn_point_sections: pip_container.append(section) for section in traffic_light_sections: pip_container.append(section) for section in map_point_sections: pip_container.append(section) for section in trigger_point_sections: pip_container.append(section) # for section in nav_curve_intersections_sections: # pip_container.append(section) # FILE EXPORT ind = " " pip_filepath = str(filepath + ".pip") result = _pix_container.write_data_to_file(pip_container, pip_filepath, ind) # print("************************************") return result
def export(prefab_locator_list, filepath, filename, offset_matrix): scs_globals = _get_scs_globals() # CLEANUP CONNECTIONS DATA _connections_group_wrapper.cleanup_on_export() print("\n************************************") print("** SCS PIP Exporter **") print("** (c)2014 SCS Software **") print("************************************\n") # DATA GATHERING node_list = [] terrain_point_list = [] sign_list = [] spawn_point_list = [] traffic_light_list = [] nav_point_list = [] map_point_list = [] trigger_point_list = [] for locator in prefab_locator_list: # print('locator: "%s"' % str(locator.scs_props.locator_prefab_type)) if locator.scs_props.locator_prefab_type == 'Control Node': node_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Sign': sign_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Spawn Point': spawn_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Traffic Semaphore': traffic_light_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Navigation Point': nav_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Map Point': map_point_list.append(locator) elif locator.scs_props.locator_prefab_type == 'Trigger Point': trigger_point_list.append(locator) # DATA CREATION header_section = _fill_header_section(filename, scs_globals.sign_export) node_sections = _fill_node_sections(node_list, offset_matrix) sign_sections = _fill_sign_sections(sign_list, scs_globals.scs_sign_model_inventory) spawn_point_sections = _fill_spawn_point_sections(spawn_point_list) traffic_light_sections = _fill_semaphore_sections( traffic_light_list, scs_globals.scs_tsem_profile_inventory) nav_curve_sections = _fill_nav_curve_sections(nav_point_list, offset_matrix) map_point_sections = _fill_map_point_sections(map_point_list, offset_matrix) trigger_point_sections = _fill_trigger_point_sections( trigger_point_list, offset_matrix) # nav_curve_intersections_sections = _fill_nav_curve_intersections_sections(nav_curve_sections) global_section = _fill_global_section( len(node_list), len(terrain_point_list), len(sign_list), len(spawn_point_list), len(traffic_light_list), len(nav_curve_sections), len(map_point_list), len(trigger_point_list), 0 # len(nav_curve_intersections_sections) ) # DATA ASSEMBLING pip_container = [header_section, global_section] for section in node_sections: pip_container.append(section) for section in nav_curve_sections: pip_container.append(section) for section in sign_sections: pip_container.append(section) for section in spawn_point_sections: pip_container.append(section) for section in traffic_light_sections: pip_container.append(section) for section in map_point_sections: pip_container.append(section) for section in trigger_point_sections: pip_container.append(section) # for section in nav_curve_intersections_sections: # pip_container.append(section) # FILE EXPORT ind = " " pip_filepath = str(filepath + ".pip") result = _pix_container.write_data_to_file(pip_container, pip_filepath, ind) # print("************************************") return result
def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators, used_parts, used_materials, used_bones, used_terrain_points): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param armature_object: Blender Aramture object belonging to this SCS game object :type armature_object: bpy.types.Object :param skeleton_filepath: relative file path of PIS file :type skeleton_filepath: str :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :param used_parts: parts transitional structure for storing used parts inside this PIM export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_materials: materials transitional structure for storing used materials inside this PIM export :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_bones: bones transitional structure for storing used bones inside this PIM export :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans :param used_terrain_points: terrain points transitional structure for storing used terrain points :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() if scs_globals.output_type == "5": format_version = 5 format_type = "" else: format_version = 1 format_type = "def" is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim") pim_header = Header(format_type, format_version, root_object.name) pim_global = Globall(skeleton_filepath) pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials """:type: dict[str, Material]""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list[Piece]""" pim_parts = {} # list of Part class instances representing used parts """:type: dict[str, Part]""" pim_locators = [] # list of Locator class instances representing model locators """:type: list[Locator]""" objects_with_default_material = {} # stores object names which has no material set missing_mappings_data = {} # indicates if material doesn't have set any uv layer for export bones = skin = skin_stream = None if is_skin_used: # create bones data section bones = Bones() for bone in armature_object.data.bones: bones.add_bone(bone.name) used_bones.add(bone.name) # create skin data section skin_stream = SkinStream(SkinStream.Types.POSITION) skin = Skin(skin_stream) # create mesh object data sections for mesh_obj in mesh_objects: vert_groups = mesh_obj.vertex_groups mesh_pieces = collections.OrderedDict() # calculate faces flip state from all ancestors of current object scale_sign = 1 parent = mesh_obj while parent and parent.scs_props.empty_object_type != "SCS_Root": for scale_axis in parent.scale: scale_sign *= scale_axis parent = parent.parent face_flip = scale_sign < 0 # calculate transformation matrix for current object (root object transforms are always subtracted!) mesh_transf_mat = root_object.matrix_world.inverted() * mesh_obj.matrix_world # calculate transformation matrices for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted()) nor_transf_mat = _scs_to_blend_matrix().inverted() # get initial mesh and vertex groups for it mesh = _object_utils.get_mesh(mesh_obj) _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat, face_flip) mesh.calc_normals_split() missing_uv_layers = {} # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex color layer missing_vcolor_a = False # indicates if object is missing vertex color alpha layer for poly in mesh.polygons: mat_index = poly.material_index # check material existence and decide what material name and effect has to be used if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_not_existing_material_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.add(pim_mat_name, material) # create new piece if piece with this material doesn't exists yet -> split to pieces by material if pim_mat_name not in mesh_pieces: mesh_pieces[pim_mat_name] = Piece(len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name]) nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name() if nmap_uv_layer: # if there is uv layer used for normal maps then calculate tangents on it mesh.calc_tangents(uvmap=nmap_uv_layer) mesh_piece = mesh_pieces[pim_mat_name] """:type: Piece""" piece_vert_indices = [] for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> loop.normal -> calc_normals_split() has to be called before normal = nor_transf_mat * loop.normal normal = tuple(Vector(normal).normalized()) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] tex_coord_alias_map = pim_materials[pim_mat_name].get_tex_coord_map() if len(tex_coord_alias_map) < 1: # no textures or none uses uv mapping in current material effect uvs.append((0.0, 0.0)) uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with textures using uv mappings if material and pim_materials[pim_mat_name].uses_textures_with_uv(): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[material.name]: missing_mappings_data[material.name][mesh_obj.name] = 1 else: for uv_lay_name in tex_coord_alias_map: if uv_lay_name not in mesh.uv_layers: uvs.append((0.0, 0.0)) # properly report missing uv layers where name of uv layout is key and materials that misses it are values if uv_lay_name not in missing_uv_layers: missing_uv_layers[uv_lay_name] = [] if pim_mat_name not in missing_uv_layers[uv_lay_name]: # add material if not already there missing_uv_layers[uv_lay_name].append(pim_mat_name) else: uv_lay = mesh.uv_layers[uv_lay_name] uvs.append(_change_to_scs_uv_coordinates(uv_lay.data[loop_i].uv)) aliases = [] for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier if _MESH_consts.default_vcol not in mesh.vertex_colors: # get RGB component of RGBA vcol = (1.0,) * 3 missing_vcolor = True else: color = mesh.vertex_colors[_MESH_consts.default_vcol].data[loop_i].color vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi) if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors: # get A component of RGBA vcol += (1.0,) missing_vcolor_a = True else: alpha = mesh.vertex_colors[_MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi,) # take avg of colors for alpha # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before if pim_materials[pim_mat_name].get_nmap_uv_name(): # calculate tangents only if needed tangent = tuple(nor_transf_mat * loop.tangent) tangent = tuple(Vector(tangent).normalized()) tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign) else: tangent = None # save internal vertex index to array to be able to construct triangle afterwards piece_vert_index = mesh_piece.add_vertex(vert_i, position, normal, uvs, uvs_aliases, vcol, tangent) piece_vert_indices.append(piece_vert_index) if is_skin_used: # get skinning data for vertex and save it to skin stream bone_weights = {} for v_group_entry in mesh.vertices[vert_i].groups: bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name) bone_weight = v_group_entry.weight # proceed only if bone exists in our armature if bone_indx != -1: bone_weights[bone_indx] = bone_weight skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights) skin_stream.add_entry(skin_entry) # save to terrain points storage if present in correct vertex group for group in mesh.vertices[vert_i].groups: curr_vg_name = mesh_obj.vertex_groups[group.group].name # if vertex group name doesn't match prescribed one ignore this vertex group if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name): continue # if node index is not in bounds ignore this vertex group node_index = int(curr_vg_name[-1]) if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX: continue # if no variants defined add globally (without variant block) if len(root_object.scs_object_variant_inventory) == 0: used_terrain_points.add(-1, node_index, position, normal) continue # finally iterate variant parts entries to find where this part is included # and add terrain points to transitional structure # # NOTE: variant index is donated by direct order of variants in inventory # so export in PIT has to use the same order otherwise variant # indices will be misplaced for variant_i, variant in enumerate(root_object.scs_object_variant_inventory): used_terrain_points.ensure_entry(variant_i, node_index) for variant_part in variant.parts: if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include: used_terrain_points.add(variant_i, node_index, position, normal) break mesh_piece.add_triangle(tuple(piece_vert_indices[::-1])) # invert indices because of normals flip # free normals calculations _mesh_utils.cleanup_mesh(mesh) # create part if it doesn't exists yet part_name = mesh_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) mesh_pieces = mesh_pieces.values() for piece in mesh_pieces: # put pieces of current mesh to global list pim_pieces.append(piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint("W Object '%s' is missing UV layer '%s' specified by materials: %s\n", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint("W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!", (mesh_obj.name, _MESH_consts.default_vcol)) if missing_vcolor_a: lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)", (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix)) # report missing data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint("W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint("W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()),)) # create locators data sections for loc_obj in model_locators: pos, qua, sca = _get_scs_transformation_components(root_object.matrix_world.inverted() * loc_obj.matrix_world) if sca[0] * sca[1] * sca[2] < 0: lprint("W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t " + "Model locators must have positive scale!", (loc_obj.name, root_object.name)) continue name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup if hookup_string != "" and ":" in hookup_string: hookup = hookup_string.split(':', 1)[1].strip() else: if hookup_string != "": lprint("W The Hookup %r has no expected value!", hookup_string) hookup = None # create locator object for export locator = Locator(len(pim_locators), name, hookup) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = loc_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts.get_as_list(): # export all parts even empty ones gathered from PIC and PIP if part_name in pim_parts: pim_container.append(pim_parts[part_name].get_as_section()) else: pim_container.append(Part(part_name).get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) if is_skin_used: pim_container.append(bones.get_as_section()) pim_container.append(skin.get_as_section()) # write to file ind = " " pim_filepath = os.path.join(dirpath, root_object.name + ".pim") return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
def execute(dirpath, root_object, mesh_objects, model_locators, used_parts, used_materials): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() if scs_globals.output_type == "5": format_version = 5 format_type = "" else: format_version = 1 format_type = "def" pim_header = Header(format_type, format_version, root_object.name) pim_global = Globall(root_object.name + ".pis") pim_materials = collections.OrderedDict( ) # dict of Material class instances representing used materials """:type: dict of Material""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list of Piece""" pim_parts = collections.OrderedDict( ) # list of Part class instances representing used parts """:type: dict of Part""" pim_locators = [ ] # list of Locator class instances representing model locators """:type: list of Locator""" objects_with_default_material = { } # stores object names which has no material set missing_mappings_data = { } # indicates if material doesn't have set any uv layer for export # create mesh object data sections for mesh_obj in mesh_objects: mesh_pieces = collections.OrderedDict() # get initial mesh mesh = _object_utils.get_mesh(mesh_obj) _mesh_utils.bm_triangulate(mesh) mesh.calc_normals_split() # calculate transformation matrices for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted() * root_object.matrix_world.inverted() * mesh_obj.matrix_world) nor_transf_mat = ( _scs_to_blend_matrix().inverted() * root_object.matrix_world. inverted().to_quaternion().to_matrix().to_4x4() * mesh_obj.matrix_world.to_quaternion().to_matrix().to_4x4()) missing_uv_layers = { } # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex colors for poly in mesh.polygons: mat_index = poly.material_index # check material existance and decide what material name and effect has to be used if mat_index >= len( mesh_obj.material_slots ) or mesh_obj.material_slots[ mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_not_existing_material_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.append(pim_mat_name) # create new piece if piece with this material doesn't exists yet -> split to pieces by material if pim_mat_name not in mesh_pieces: mesh_pieces[pim_mat_name] = Piece( len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name]) nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name() if nmap_uv_layer: # if there is uv layer used for normal maps then calculate tangents on it mesh.calc_tangents(uvmap=nmap_uv_layer) mesh_piece = mesh_pieces[pim_mat_name] """:type: Piece""" piece_vert_indices = [] for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> loop.normal -> calc_normals_split() has to be called before normal = nor_transf_mat * loop.normal normal = tuple(Vector(normal).normalized()) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] tex_coord_alias_map = pim_materials[ pim_mat_name].get_tex_coord_map() if len(tex_coord_alias_map ) < 1: # no textures for current material effect uvs.append((0.0, 0.0)) uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with texture entries if material and pim_materials[pim_mat_name].uses_textures( ): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[ material.name]: missing_mappings_data[material.name][ mesh_obj.name] = 1 else: for uv_lay_name in tex_coord_alias_map: if uv_lay_name not in mesh.uv_layers: uvs.append((0.0, 0.0)) # properly report missing uv layers where name of uv layout is key and materials that misses it are values if uv_lay_name not in missing_uv_layers: missing_uv_layers[uv_lay_name] = [] if pim_mat_name not in missing_uv_layers[ uv_lay_name]: # add material if not already there missing_uv_layers[uv_lay_name].append( pim_mat_name) else: uv_lay = mesh.uv_layers[uv_lay_name] uvs.append( _change_to_scs_uv_coordinates( uv_lay.data[loop_i].uv)) aliases = [] for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color if len(mesh.vertex_colors) < 1: vcol = (1.0, 1.0, 1.0, 1.0) missing_vcolor = True else: multiplier = mesh_obj.data.scs_props.vertex_color_multiplier color = mesh.vertex_colors[0].data[loop_i].color vcol = (color[0] * multiplier, color[1] * multiplier, color[2] * multiplier, 1.0) # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before if pim_materials[pim_mat_name].get_nmap_uv_name( ): # calculate tangents only if needed tangent = tuple(nor_transf_mat * loop.tangent) tangent = tuple(Vector(tangent).normalized()) tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign) else: tangent = None # save internal vertex index to array to be able to construct triangle afterwards piece_vert_index = mesh_piece.add_vertex( vert_i, position, normal, uvs, uvs_aliases, vcol, tangent) piece_vert_indices.append(piece_vert_index) mesh_piece.add_triangle(tuple(piece_vert_indices[::-1]) ) # invert indices because of normals flip # create part if it doesn't exists yet part_name = mesh_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) used_parts[part_name] = 1 mesh_pieces = mesh_pieces.values() for piece in mesh_pieces: # put pieces of current mesh to global list pim_pieces.append(piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint( "W Object '%s' is missing UV layer '%s' specified by materials: %s\n", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint( "W Object '%s' is missing vertex color layer! Default color will be exported (1, 1, 1, 1)!", (mesh_obj.name, )) # report mising data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint( "W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint( "W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()), )) # create locators data sections for loc_obj in model_locators: name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup if hookup_string != "" and ":" in hookup_string: hookup = hookup_string.split(':', 1)[1].strip() else: if hookup_string != "": lprint("W The Hookup %r has no expected value!", hookup_string) hookup = None pos, qua, sca = _get_scs_transformation_components( loc_obj.matrix_world) # create locator object for export locator = Locator(len(pim_locators), name, hookup) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = loc_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) used_parts[part_name] = 1 # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts: pim_container.append(pim_parts[part_name].get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) # write to file ind = " " pim_filepath = dirpath + os.sep + root_object.name + ".pim" return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)