def execute(dirpath, filename, name_suffix, prefab_locator_list, offset_matrix, used_parts, used_terrain_points): """Exports PIP file from given locator list. :param prefab_locator_list: :type prefab_locator_list: list of bpy.types.Object :param dirpath: directory export path :type dirpath: str :param filename: name of PIP file :type filename: str :param name_suffix: file name suffix :type name_suffix: str :param offset_matrix: offset matrix for locators :type offset_matrix: mathutils.Matrix :param used_parts: parts transitional structure for storing used parts inside this PIP export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_terrain_points: terrain points transitional structure for accessing terrain points stored during PIM export :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if successfull; otherwise False :rtype: bool """ # CLEANUP CONNECTIONS DATA _connections_group_wrapper.cleanup_on_export() print("\n************************************") print("** SCS PIP Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") (control_node_locs, nav_point_locs, sign_locs, spawn_point_locs, semaphore_locs, map_point_locs, trigger_point_locs) = __sort_locators_by_type__(prefab_locator_list) pip_header = Header(2, filename) pip_global = Globall() pip_nodes = OrderedDict() """:type: dict[int,Node]""" pip_curves = OrderedDict() """:type: dict[int, Curve]""" pip_signs = [] """:type: list[Sign]""" pip_spawn_points = [] """:type: list[SpawnPoint]""" pip_semaphores = [] """:type: list[Semaphore]""" pip_map_points = OrderedDict() """:type: dict[str, MapPoint]""" pip_trigger_points = OrderedDict() """:type: dict[str, TriggerPoint]""" pip_intersections = [OrderedDict(), OrderedDict(), OrderedDict()] """:type: list[dict[str, list[Intersection]]]""" # nodes creation for locator in control_node_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" curr_node_i = int(locator_scs_props.locator_prefab_con_node_index) if curr_node_i not in pip_nodes: pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) rot = Quaternion(rot) * Vector((0, 0, -1)) # create node with position and direction cn = Node(curr_node_i, pos, rot) # add terrain points terrain_points = used_terrain_points.get(curr_node_i) for variant_i in terrain_points: # ensure variant entry for no terrain points case cn.ensure_variant(variant_i) for tp_entry in terrain_points[variant_i]: cn.add_terrain_point(tp_entry.position, tp_entry.normal, variant_i) pip_nodes[curr_node_i] = cn else: lprint("W Multiple Control Nodes with same index detected, only one per index will be exported!\n\t " "Check Control Nodes in SCS Game Object with Root: %r", (filename,)) # curves creation curves_dict = _connections_group_wrapper.get_curves(nav_point_locs.values()) for key, curve_entry in curves_dict.items(): loc0 = nav_point_locs[curves_dict[key].start] loc0_scs_props = loc0.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" loc1 = nav_point_locs[curves_dict[key].end] loc1_scs_props = loc1.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create curve and set properties curve = __get_curve__(pip_curves, curve_entry.index, loc0.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc0.matrix_world) curve.set_start(pos, rot) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc1.matrix_world) curve.set_end(pos, rot) curve.set_input_boundaries(loc0_scs_props) curve.set_output_boundaries(loc1_scs_props) curve.set_flags(loc0.scs_props, True) curve.set_flags(loc1.scs_props, False) curve.set_semaphore_id(int(loc0_scs_props.locator_prefab_np_traffic_semaphore)) curve.set_traffic_rule(loc0_scs_props.locator_prefab_np_traffic_rule) # set next/prev curves for next_key in curve_entry.next_curves: next_curve = __get_curve__(pip_curves, curves_dict[next_key].index, curves_dict[next_key].start) assert curve.add_next_curve(next_curve) for prev_key in curve_entry.prev_curves: prev_curve = __get_curve__(pip_curves, curves_dict[prev_key].index, curves_dict[prev_key].start) assert curve.add_prev_curve(prev_curve) # sync nodes input lanes boundary_node_i = curve.get_input_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_input_lane(curve.get_input_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc0.name,)) # sync nodes output lanes boundary_node_i = curve.get_output_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_output_lane(curve.get_output_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc1.name,)) Curve.prepare_curves(pip_curves.values()) # signs creation for locator in sign_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create sign and set properties sign = Sign(locator.name, used_parts.ensure_part(locator)) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) sign.set_position(pos) sign.set_rotation(rot) if ":" in locator_scs_props.locator_prefab_sign_model: sign.set_model(locator_scs_props.locator_prefab_sign_model.split(":")[1].strip()) else: lprint("W Invalid Sign Model: %r on locator: %r", (locator_scs_props.locator_prefab_sign_model, locator.name)) pip_signs.append(sign) # spawn points creation for locator in spawn_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create spawn point and set properties spawn_point = SpawnPoint(locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) spawn_point.set_position(pos) spawn_point.set_rotation(rot) spawn_point.set_type(int(locator_scs_props.locator_prefab_spawn_type)) pip_spawn_points.append(spawn_point) # semaphores creation for locator in semaphore_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create semaphore and set properties semaphore = Semaphore(int(locator_scs_props.locator_prefab_tsem_type)) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) semaphore.set_position(pos) semaphore.set_rotation(rot) semaphore.set_semaphore_id(int(locator_scs_props.locator_prefab_tsem_id)) if ":" in locator_scs_props.locator_prefab_tsem_profile: semaphore.set_profile(locator_scs_props.locator_prefab_tsem_profile.split(":")[1].strip()) else: lprint("W Invalid Profile: %r on Traffic Semaphore locator: %r", (locator_scs_props.locator_prefab_tsem_profile, locator.name)) semaphore.set_intervals((locator_scs_props.locator_prefab_tsem_gs, locator_scs_props.locator_prefab_tsem_os1, locator_scs_props.locator_prefab_tsem_rs, locator_scs_props.locator_prefab_tsem_os2)) semaphore.set_cycle(locator_scs_props.locator_prefab_tsem_cyc_delay) pip_semaphores.append(semaphore) # map points creation for locator in map_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create map point and set properties map_point = __get_map_point__(pip_map_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) map_point.set_position(pos) map_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert map_point.add_neighbour(__get_map_point__(pip_map_points, neighbour_name)) MapPoint.calc_segment_extensions(pip_map_points.values()) MapPoint.test_map_points(pip_map_points.values()) MapPoint.auto_generate_map_points(pip_map_points, pip_nodes) # trigger points creation for locator in trigger_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create trigger point and set properties trigger_point = __get_trigger_point__(pip_trigger_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) trigger_point.set_position(pos) if ":" in locator_scs_props.locator_prefab_tp_action: trigger_point.set_action(locator_scs_props.locator_prefab_tp_action.split(":")[1].strip()) else: lprint("W Invalid Action: %r on Trigger Point locator: %r", (locator_scs_props.locator_prefab_tp_action, locator.name)) trigger_point.set_trigger_range(locator_scs_props.locator_prefab_tp_range) trigger_point.set_reset_delay(locator_scs_props.locator_prefab_tp_reset_delay) trigger_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert trigger_point.add_neighbour(__get_trigger_point__(pip_trigger_points, neighbour_name)) TriggerPoint.prepare_trigger_points(pip_trigger_points.values()) # intersections creation for c0_i, c0 in enumerate(sorted(pip_curves.values())): for c1_i, c1 in enumerate(sorted(pip_curves.values())): if c1_i <= c0_i: # only search each pair of curves once continue # get the intersection point and curves coefficient positions intersect_p, c0_pos, c1_pos = Intersection.get_intersection(c0, c1) if intersect_p: intersect_p_str = str(intersect_p) # Format: '<Vector (0.0000, 0.0000, 0.0000)>' is_start = c0_pos == 0 and c0_pos == c1_pos is_end = c1_pos == 1 and c0_pos == c1_pos is_split_sharp = False if is_start: inter_type = 0 # fork elif is_end: inter_type = 1 # joint else: inter_type = 2 # cross # if there is indication of cross intersection filter out intersections with common fork and joint # NOTE: this condition might not be sufficient, so if anyone will have problems, # this is the point that has to be improved if Intersection.have_common_fork(c0, c1) or Intersection.have_common_joint(c0, c1): continue # calculate radius for the same directions on curves forward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, 1) backward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, -1) final_radius = max(forward_radius, backward_radius) # special calculations only for cross intersections if inter_type == 2: # calculate radius also for opposite directions final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, -1)) final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, 1)) # calculate position of intersection point on curves with better precision c0_pos = c0.get_closest_point(intersect_p) c1_pos = c1.get_closest_point(intersect_p) # calculate if split cross intersection is too sharp for allowing of smother traffic flow c0_dir = c0.get_curve_tangent_at_position(c0_pos) c1_dir = c1.get_curve_tangent_at_position(c1_pos) is_split_sharp = c0_dir.dot(c1_dir) >= _PL_consts.CURVE_SPLIT_CROSS_DOT lprint("D Found cross intersection point: %r", (intersect_p,)) # creating intersection class instances intersection = Intersection(c0.get_index(), c0.get_ui_name(), c0_pos * c0.get_length()) intersection1 = Intersection(c1.get_index(), c1.get_ui_name(), c1_pos * c1.get_length()) # init list of intersections for current intersecting point if intersect_p_str not in pip_intersections[inter_type]: pip_intersections[inter_type][intersect_p_str] = [] # append intersections to list and calculate new siblings new_siblings = 2 if intersection not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection) else: del intersection new_siblings -= 1 if intersection1 not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection1) else: del intersection1 new_siblings -= 1 # always set flags on first entry in current intersection point list # this way siblings count is getting updated properly pip_intersections[inter_type][intersect_p_str][0].set_flags(is_start, is_end, is_split_sharp, new_siblings) # update radius on all of intersection in the same intersecting point for inter in pip_intersections[inter_type][intersect_p_str]: inter.set_radius(pip_intersections[inter_type][intersect_p_str][0].get_radius()) inter.set_radius(final_radius) # create container pip_container = [pip_header.get_as_section(), pip_global.get_as_section()] for node in pip_nodes.values(): pip_container.append(node.get_as_section()) for curve_key in sorted(pip_curves): pip_container.append(pip_curves[curve_key].get_as_section()) for sign in pip_signs: pip_container.append(sign.get_as_section()) for spawn_point in pip_spawn_points: pip_container.append(spawn_point.get_as_section()) for semaphore in pip_semaphores: pip_container.append(semaphore.get_as_section()) for map_point in pip_map_points.values(): pip_container.append(map_point.get_as_section()) for trigger_point in pip_trigger_points.values(): pip_container.append(trigger_point.get_as_section()) for inter_type in range(3): for intersect_p_str in pip_intersections[inter_type]: for intersection in pip_intersections[inter_type][intersect_p_str]: pip_container.append(intersection.get_as_section()) # write to file ind = " " pip_filepath = path.join(dirpath, str(filename + ".pip" + name_suffix)) result = _pix_container.write_data_to_file(pip_container, pip_filepath, ind) return result
def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators, used_parts, used_materials, used_bones, used_terrain_points): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param name_suffix: file name suffix :type name_suffix: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param armature_object: Blender Aramture object belonging to this SCS game object :type armature_object: bpy.types.Object :param skeleton_filepath: relative file path of PIS file :type skeleton_filepath: str :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :param used_parts: parts transitional structure for storing used parts inside this PIM export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_materials: materials transitional structure for storing used materials inside this PIM export :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_bones: bones transitional structure for storing used bones inside this PIM export :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans :param used_terrain_points: terrain points transitional structure for storing used terrain points :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM.EF Exporter **") print("** (c)2017 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() format_version = 1 is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim") pim_header = Header("", format_version, root_object.name) pim_global = Globall(used_parts.count(), skeleton_filepath) pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials """:type: dict[str, Material]""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list[Piece]""" pim_parts = {} # list of Part class instances representing used parts """:type: dict[str, Part]""" pim_locators = [] # list of Locator class instances representing model locators """:type: list[Locator]""" objects_with_default_material = {} # stores object names which has no material set missing_mappings_data = {} # indicates if material doesn't have set any uv layer for export bones = skin = skin_stream = None if is_skin_used: # create bones data section bones = Bones() for bone in armature_object.data.bones: bones.add_bone(bone.name) used_bones.add(bone.name) # create skin data section skin_stream = SkinStream(SkinStream.Types.POSITION) skin = Skin(skin_stream) # create mesh object data sections for mesh_obj in mesh_objects: vert_groups = mesh_obj.vertex_groups # calculate faces flip state from all ancestors of current object scale_sign = 1 parent = mesh_obj while parent and parent.scs_props.empty_object_type != "SCS_Root": for scale_axis in parent.scale: scale_sign *= scale_axis parent = parent.parent winding_order = 1 if scale_sign < 0: winding_order = -1 # calculate transformation matrix for current object (root object transforms are always subtracted!) mesh_transf_mat = root_object.matrix_world.inverted() * mesh_obj.matrix_world """:type: mathutils.Matrix""" # calculate vertex position transformation matrix for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted()) """:type: mathutils.Matrix""" # calculate vertex normals transformation matrix for this object # NOTE: as normals will be read from none export prepared mesh we have to add rotation and scale from mesh transformation matrix _, rot, scale = mesh_transf_mat.decompose() scale_matrix_x = Matrix.Scale(scale.x, 3, Vector((1, 0, 0))).to_4x4() scale_matrix_y = Matrix.Scale(scale.y, 3, Vector((0, 1, 0))).to_4x4() scale_matrix_z = Matrix.Scale(scale.z, 3, Vector((0, 0, 1))).to_4x4() nor_transf_mat = (_scs_to_blend_matrix().inverted() * rot.to_matrix().to_4x4() * scale_matrix_x * scale_matrix_y * scale_matrix_z) """:type: mathutils.Matrix""" # get initial mesh and vertex groups for it mesh = _object_utils.get_mesh(mesh_obj) _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat) # get extra mesh only for normals mesh_for_normals = _object_utils.get_mesh(mesh_obj) mesh_for_normals.calc_normals_split() missing_uv_layers = {} # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex color layer missing_vcolor_a = False # indicates if object is missing vertex color alpha layer missing_skinned_verts = set() # indicates if object is having only partial skin, which is not allowed in our models has_unnormalized_skin = False # indicates if object has vertices which bones weight sum is smaller then one hard_edges = set() mesh_piece = Piece(len(pim_pieces)) """:type: Piece""" for poly in mesh.polygons: mat_index = poly.material_index # check material existence and decide what material name and effect has to be used if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_default_material_-_default_settings_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.add(pim_mat_name, material) piece_vert_indices = [] vert_normals = [] vert_uvs = [] uvs_aliases = [] uvs_names = collections.OrderedDict() vert_rgbas = [] rgbas_names = collections.OrderedDict() tex_coord_alias_map = pim_materials[pim_mat_name].get_tex_coord_map() for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # as we are already looping first find out if edge is hard and put it to set if mesh.edges[loop.edge_index].use_edge_sharp: hard_edges.add(loop.edge_index) # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> loop.normal -> calc_normals_split() has to be called before normal = nor_transf_mat * mesh_for_normals.loops[loop_i].normal normal = tuple(Vector(normal).normalized()) vert_normals.append(normal) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] if len(tex_coord_alias_map) < 1: # no textures or none uses uv mapping in current material effect uvs.append((0.0, 0.0)) uvs_names["generated"] = True uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with textures using uv mappings if material and pim_materials[pim_mat_name].uses_textures_with_uv(): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[material.name]: missing_mappings_data[material.name][mesh_obj.name] = 1 else: for uv_lay_name in mesh.uv_layers.keys(): uv_lay = mesh.uv_layers[uv_lay_name] uvs.append(_change_to_scs_uv_coordinates(uv_lay.data[loop_i].uv)) uvs_names[uv_lay_name] = True aliases = [] if uv_lay_name in tex_coord_alias_map: for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) vert_uvs.append(uvs) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color rgbas = [] vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier if _MESH_consts.default_vcol not in mesh.vertex_colors: # get RGB component of RGBA vcol = (1.0,) * 3 missing_vcolor = True else: color = mesh.vertex_colors[_MESH_consts.default_vcol].data[loop_i].color vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi) if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors: # get A component of RGBA vcol += (1.0,) missing_vcolor_a = True else: alpha = mesh.vertex_colors[_MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi,) # take avg of colors for alpha rgbas.append(vcol) rgbas_names[_MESH_consts.default_vcol] = True # export rest of the vertex colors too, but do not apply extra multiplies of SCS exporter # as rest of the layers are just artist layers for vcol_layer in mesh.vertex_colors: # we already computed thoose so ignore them if vcol_layer.name in [_MESH_consts.default_vcol, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix]: continue color = vcol_layer.data[loop_i].color vcol = (color[0], color[1], color[2], 1.0) rgbas.append(vcol) rgbas_names[vcol_layer.name] = True vert_rgbas.append(rgbas) # save internal vertex index to array to be able to construct triangle afterwards piece_vert_index = mesh_piece.add_vertex(vert_i, position) piece_vert_indices.append(piece_vert_index) if is_skin_used: # get skinning data for vertex and save it to skin stream bone_weights = {} bone_weights_sum = 0 for v_group_entry in mesh.vertices[vert_i].groups: bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name) bone_weight = v_group_entry.weight # proceed only if bone exists in our armature if bone_indx != -1: bone_weights[bone_indx] = bone_weight bone_weights_sum += bone_weight skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum) skin_stream.add_entry(skin_entry) # report un-skinned vertices (no bones or zero sum weight) or badly skinned model if bone_weights_sum <= 0: missing_skinned_verts.add(vert_i) elif bone_weights_sum < 1: has_unnormalized_skin = True # save to terrain points storage if present in correct vertex group for group in mesh.vertices[vert_i].groups: # if current object doesn't have vertex group found in mesh data, then ignore that group # This can happen if multiple objects are using same mesh and # some of them have vertex groups, but others not. if group.group >= len(mesh_obj.vertex_groups): continue curr_vg_name = mesh_obj.vertex_groups[group.group].name # if vertex group name doesn't match prescribed one ignore this vertex group if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name): continue # if node index is not in bounds ignore this vertex group node_index = int(curr_vg_name[-1]) if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX: continue # if no variants defined add globally (without variant block) if len(root_object.scs_object_variant_inventory) == 0: used_terrain_points.add(-1, node_index, position, normal) continue # finally iterate variant parts entries to find where this part is included # and add terrain points to transitional structure # # NOTE: variant index is donated by direct order of variants in inventory # so export in PIT has to use the same order otherwise variant # indices will be misplaced for variant_i, variant in enumerate(root_object.scs_object_variant_inventory): used_terrain_points.ensure_entry(variant_i, node_index) for variant_part in variant.parts: if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include: used_terrain_points.add(variant_i, node_index, position, normal) break assert mesh_piece.add_face(pim_materials[pim_mat_name], tuple(piece_vert_indices[::winding_order * -1]), # invert indices because of conversion to scs system tuple(vert_normals[::winding_order]), tuple(vert_uvs[::winding_order]), list(uvs_names.keys()), uvs_aliases, tuple(vert_rgbas[::winding_order]), list(rgbas_names.keys()) ) # as we captured all hard edges collect them now and put it into Piece for hard_edge in hard_edges: (vert1_i, vert2_i) = mesh.edges[hard_edge].vertices assert mesh_piece.add_edge(vert1_i, vert2_i, blender_mesh_indices=True) # free normals calculations and eventually remove mesh object _mesh_utils.cleanup_mesh(mesh) _mesh_utils.cleanup_mesh(mesh_for_normals) # create part if it doesn't exists yet part_name = mesh_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) # put pieces of current mesh to global list pim_pieces.append(mesh_piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(mesh_piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint("W Object '%s' is missing UV layer '%s' specified by materials: %s\n", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint("W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!", (mesh_obj.name, _MESH_consts.default_vcol)) if missing_vcolor_a: lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)", (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix)) if len(missing_skinned_verts) > 0: lprint("E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!", (mesh_obj.name, root_object.name, len(missing_skinned_verts))) if has_unnormalized_skin: lprint("W Object %r from SCS Root %r has unormalized skinning, exporting normalized weights!\n\t " "You can normalize weights by selecting object & executing 'Normalize All Vertex Groups'.", (mesh_obj.name, root_object.name)) # report missing data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint("W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint("W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()),)) # create locators data sections for loc_obj in model_locators: pos, qua, sca = _get_scs_transformation_components(root_object.matrix_world.inverted() * loc_obj.matrix_world) if sca[0] * sca[1] * sca[2] < 0: lprint("W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t " + "Model locators must have positive scale!", (loc_obj.name, root_object.name)) continue name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup if hookup_string != "" and ":" in hookup_string: hookup = hookup_string.split(':', 1)[1].strip() else: if hookup_string != "": lprint("W The Hookup %r has no expected value!", hookup_string) hookup = None # create locator object for export locator = Locator(len(pim_locators), name, hookup) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = loc_obj.scs_props.scs_part if part_name not in pim_parts: assert used_parts.is_present(part_name) pim_parts[part_name] = Part(part_name) # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts.get_as_list(): # export all parts even empty ones gathered from PIC and PIP if part_name in pim_parts: pim_container.append(pim_parts[part_name].get_as_section()) else: pim_container.append(Part(part_name).get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) if is_skin_used: pim_container.append(bones.get_as_section()) pim_container.append(skin.get_as_section()) # write to file ind = " " pim_filepath = os.path.join(dirpath, root_object.name + ".pim" + name_suffix) return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators, used_parts, used_materials, used_bones, used_terrain_points): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param armature_object: Blender Aramture object belonging to this SCS game object :type armature_object: bpy.types.Object :param skeleton_filepath: relative file path of PIS file :type skeleton_filepath: str :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :param used_parts: parts transitional structure for storing used parts inside this PIM export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_materials: materials transitional structure for storing used materials inside this PIM export :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_bones: bones transitional structure for storing used bones inside this PIM export :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans :param used_terrain_points: terrain points transitional structure for storing used terrain points :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() if scs_globals.output_type == "5": format_version = 5 format_type = "" else: format_version = 1 format_type = "def" is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim") pim_header = Header(format_type, format_version, root_object.name) pim_global = Globall(skeleton_filepath) pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials """:type: dict[str, Material]""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list[Piece]""" pim_parts = {} # list of Part class instances representing used parts """:type: dict[str, Part]""" pim_locators = [] # list of Locator class instances representing model locators """:type: list[Locator]""" objects_with_default_material = {} # stores object names which has no material set missing_mappings_data = {} # indicates if material doesn't have set any uv layer for export bones = skin = skin_stream = None if is_skin_used: # create bones data section bones = Bones() for bone in armature_object.data.bones: bones.add_bone(bone.name) used_bones.add(bone.name) # create skin data section skin_stream = SkinStream(SkinStream.Types.POSITION) skin = Skin(skin_stream) # create mesh object data sections for mesh_obj in mesh_objects: vert_groups = mesh_obj.vertex_groups mesh_pieces = collections.OrderedDict() # calculate faces flip state from all ancestors of current object scale_sign = 1 parent = mesh_obj while parent and parent.scs_props.empty_object_type != "SCS_Root": for scale_axis in parent.scale: scale_sign *= scale_axis parent = parent.parent face_flip = scale_sign < 0 # calculate transformation matrix for current object (root object transforms are always subtracted!) mesh_transf_mat = root_object.matrix_world.inverted() * mesh_obj.matrix_world # calculate transformation matrices for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted()) nor_transf_mat = _scs_to_blend_matrix().inverted() # get initial mesh and vertex groups for it mesh = _object_utils.get_mesh(mesh_obj) _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat, face_flip) mesh.calc_normals_split() missing_uv_layers = {} # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex color layer missing_vcolor_a = False # indicates if object is missing vertex color alpha layer for poly in mesh.polygons: mat_index = poly.material_index # check material existence and decide what material name and effect has to be used if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_not_existing_material_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.add(pim_mat_name, material) # create new piece if piece with this material doesn't exists yet -> split to pieces by material if pim_mat_name not in mesh_pieces: mesh_pieces[pim_mat_name] = Piece(len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name]) nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name() if nmap_uv_layer: # if there is uv layer used for normal maps then calculate tangents on it mesh.calc_tangents(uvmap=nmap_uv_layer) mesh_piece = mesh_pieces[pim_mat_name] """:type: Piece""" piece_vert_indices = [] for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> loop.normal -> calc_normals_split() has to be called before normal = nor_transf_mat * loop.normal normal = tuple(Vector(normal).normalized()) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] tex_coord_alias_map = pim_materials[pim_mat_name].get_tex_coord_map() if len(tex_coord_alias_map) < 1: # no textures or none uses uv mapping in current material effect uvs.append((0.0, 0.0)) uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with textures using uv mappings if material and pim_materials[pim_mat_name].uses_textures_with_uv(): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[material.name]: missing_mappings_data[material.name][mesh_obj.name] = 1 else: for uv_lay_name in tex_coord_alias_map: if uv_lay_name not in mesh.uv_layers: uvs.append((0.0, 0.0)) # properly report missing uv layers where name of uv layout is key and materials that misses it are values if uv_lay_name not in missing_uv_layers: missing_uv_layers[uv_lay_name] = [] if pim_mat_name not in missing_uv_layers[uv_lay_name]: # add material if not already there missing_uv_layers[uv_lay_name].append(pim_mat_name) else: uv_lay = mesh.uv_layers[uv_lay_name] uvs.append(_change_to_scs_uv_coordinates(uv_lay.data[loop_i].uv)) aliases = [] for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier if _MESH_consts.default_vcol not in mesh.vertex_colors: # get RGB component of RGBA vcol = (1.0,) * 3 missing_vcolor = True else: color = mesh.vertex_colors[_MESH_consts.default_vcol].data[loop_i].color vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi) if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors: # get A component of RGBA vcol += (1.0,) missing_vcolor_a = True else: alpha = mesh.vertex_colors[_MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi,) # take avg of colors for alpha # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before if pim_materials[pim_mat_name].get_nmap_uv_name(): # calculate tangents only if needed tangent = tuple(nor_transf_mat * loop.tangent) tangent = tuple(Vector(tangent).normalized()) tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign) else: tangent = None # save internal vertex index to array to be able to construct triangle afterwards piece_vert_index = mesh_piece.add_vertex(vert_i, position, normal, uvs, uvs_aliases, vcol, tangent) piece_vert_indices.append(piece_vert_index) if is_skin_used: # get skinning data for vertex and save it to skin stream bone_weights = {} for v_group_entry in mesh.vertices[vert_i].groups: bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name) bone_weight = v_group_entry.weight # proceed only if bone exists in our armature if bone_indx != -1: bone_weights[bone_indx] = bone_weight skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights) skin_stream.add_entry(skin_entry) # save to terrain points storage if present in correct vertex group for group in mesh.vertices[vert_i].groups: curr_vg_name = mesh_obj.vertex_groups[group.group].name # if vertex group name doesn't match prescribed one ignore this vertex group if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name): continue # if node index is not in bounds ignore this vertex group node_index = int(curr_vg_name[-1]) if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX: continue # if no variants defined add globally (without variant block) if len(root_object.scs_object_variant_inventory) == 0: used_terrain_points.add(-1, node_index, position, normal) continue # finally iterate variant parts entries to find where this part is included # and add terrain points to transitional structure # # NOTE: variant index is donated by direct order of variants in inventory # so export in PIT has to use the same order otherwise variant # indices will be misplaced for variant_i, variant in enumerate(root_object.scs_object_variant_inventory): used_terrain_points.ensure_entry(variant_i, node_index) for variant_part in variant.parts: if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include: used_terrain_points.add(variant_i, node_index, position, normal) break mesh_piece.add_triangle(tuple(piece_vert_indices[::-1])) # invert indices because of normals flip # free normals calculations _mesh_utils.cleanup_mesh(mesh) # create part if it doesn't exists yet part_name = mesh_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) mesh_pieces = mesh_pieces.values() for piece in mesh_pieces: # put pieces of current mesh to global list pim_pieces.append(piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint("W Object '%s' is missing UV layer '%s' specified by materials: %s\n", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint("W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!", (mesh_obj.name, _MESH_consts.default_vcol)) if missing_vcolor_a: lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)", (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix)) # report missing data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint("W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint("W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()),)) # create locators data sections for loc_obj in model_locators: pos, qua, sca = _get_scs_transformation_components(root_object.matrix_world.inverted() * loc_obj.matrix_world) if sca[0] * sca[1] * sca[2] < 0: lprint("W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t " + "Model locators must have positive scale!", (loc_obj.name, root_object.name)) continue name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup if hookup_string != "" and ":" in hookup_string: hookup = hookup_string.split(':', 1)[1].strip() else: if hookup_string != "": lprint("W The Hookup %r has no expected value!", hookup_string) hookup = None # create locator object for export locator = Locator(len(pim_locators), name, hookup) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = loc_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts.get_as_list(): # export all parts even empty ones gathered from PIC and PIP if part_name in pim_parts: pim_container.append(pim_parts[part_name].get_as_section()) else: pim_container.append(Part(part_name).get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) if is_skin_used: pim_container.append(bones.get_as_section()) pim_container.append(skin.get_as_section()) # write to file ind = " " pim_filepath = os.path.join(dirpath, root_object.name + ".pim") return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators, used_parts, used_materials, used_bones, used_terrain_points): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param name_suffix: file name suffix :type name_suffix: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param armature_object: Blender Aramture object belonging to this SCS game object :type armature_object: bpy.types.Object :param skeleton_filepath: relative file path of PIS file :type skeleton_filepath: str :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :param used_parts: parts transitional structure for storing used parts inside this PIM export :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_materials: materials transitional structure for storing used materials inside this PIM export :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_bones: bones transitional structure for storing used bones inside this PIM export :type used_bones: io_scs_tools.exp.transition_structs.bones.BonesTrans :param used_terrain_points: terrain points transitional structure for storing used terrain points :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2017 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() format_version = 5 is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim") pim_header = Header("", format_version, root_object.name) pim_global = Globall(used_parts.count(), skeleton_filepath) pim_materials = collections.OrderedDict( ) # dict of Material class instances representing used materials """:type: dict[str, Material]""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list[Piece]""" pim_parts = {} # list of Part class instances representing used parts """:type: dict[str, Part]""" pim_locators = [ ] # list of Locator class instances representing model locators """:type: list[Locator]""" objects_with_default_material = { } # stores object names which has no material set missing_mappings_data = { } # indicates if material doesn't have set any uv layer for export invalid_objects_for_tangents = set( ) # stores object names which tangents calculation failed because of N-gons existence bones = skin = skin_stream = None if is_skin_used: invalid_bone_names = set( ) # set for saving bones with invalid names, they are used for reporting to user # create bones data section bones = Bones() for bone in armature_object.data.bones: bones.add_bone(bone.name) used_bones.add(bone.name) # do bones name checks if _name_utils.tokenize_name(bone.name) != bone.name: invalid_bone_names.add(bone.name) # create skin data section skin_stream = SkinStream(SkinStream.Types.POSITION) skin = Skin(skin_stream) # report invalid bone names if len(invalid_bone_names) > 0: lprint( "W Invalid bone names detected, max. length of valid bone name is 12 and must consists from [a-z, 0-9 and _ ] characters.\n\t " "Conversion will generalize names, however expect problems by re-import! List of invalid bone names for %r:\n\t " "%r", (armature_object.name, list(invalid_bone_names))) # create mesh object data sections for mesh_obj in mesh_objects: lprint("I Preparing mesh object: %r ...", (mesh_obj.name, )) vert_groups = mesh_obj.vertex_groups mesh_pieces = collections.OrderedDict() # calculate faces flip state from all ancestors of current object scale_sign = 1 parent = mesh_obj while parent and parent.scs_props.empty_object_type != "SCS_Root": for scale_axis in parent.scale: scale_sign *= scale_axis parent = parent.parent face_flip = scale_sign < 0 # calculate transformation matrix for current object (root object transforms are always subtracted!) mesh_transf_mat = root_object.matrix_world.inverted( ) * mesh_obj.matrix_world """:type: mathutils.Matrix""" # calculate vertex position transformation matrix for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted()) """:type: mathutils.Matrix""" # calculate vertex normals transformation matrix for this object # NOTE: as normals will be read from none export prepared mesh we have to add rotation and scale from mesh transformation matrix _, rot, scale = mesh_transf_mat.decompose() scale_matrix_x = Matrix.Scale(scale.x, 3, Vector((1, 0, 0))).to_4x4() scale_matrix_y = Matrix.Scale(scale.y, 3, Vector((0, 1, 0))).to_4x4() scale_matrix_z = Matrix.Scale(scale.z, 3, Vector((0, 0, 1))).to_4x4() nor_transf_mat = (_scs_to_blend_matrix().inverted() * rot.to_matrix().to_4x4() * scale_matrix_x * scale_matrix_y * scale_matrix_z) """:type: mathutils.Matrix""" tangent_transf_mat = _scs_to_blend_matrix().inverted() """:type: mathutils.Matrix""" # get initial mesh & extra copy of the mesh for normals mesh = _object_utils.get_mesh(mesh_obj) mesh_for_normals = mesh.copy() # prepare meshes faces_mapping = _mesh_utils.bm_prepare_mesh_for_export( mesh, mesh_transf_mat, triangulate=True) mesh_for_normals.calc_normals_split() missing_uv_layers = { } # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex color layer missing_vcolor_a = False # indicates if object is missing vertex color alpha layer missing_skinned_verts = set( ) # indicates if object is having only partial skin, which is not allowed in our models has_unnormalized_skin = False # indicates if object has vertices which bones weight sum is smaller then one for poly in mesh.polygons: mat_index = poly.material_index # check material existence and decide what material name and effect has to be used if mat_index >= len( mesh_obj.material_slots ) or mesh_obj.material_slots[ mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_default_material_-_default_settings_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.add(pim_mat_name, material) # create new piece if piece with this material doesn't exists yet -> split to pieces by material if pim_mat_name not in mesh_pieces: mesh_pieces[pim_mat_name] = Piece( len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name]) nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name() # if there is uv layer used for normal maps and that uv layer exists on mesh then calculate tangents on it otherwise report warning if nmap_uv_layer: if nmap_uv_layer in mesh.uv_layers: try: mesh.calc_tangents(uvmap=nmap_uv_layer) except RuntimeError: invalid_objects_for_tangents.add(mesh_obj.name) else: lprint( "W Unable to calculate normal map tangents for object %r,\n\t " "as it's missing UV layer with name: %r, expect problems!", (mesh_obj.name, nmap_uv_layer)) mesh_piece = mesh_pieces[pim_mat_name] """:type: Piece""" # get polygon loop indices for normals depending on mapped triangulated face if poly.index in faces_mapping: normals_poly_loop_indices = list(mesh_for_normals.polygons[ faces_mapping[poly.index]].loop_indices) else: normals_poly_loop_indices = list( mesh_for_normals.polygons[poly.index].loop_indices) # vertex data triangle_pvert_indices = [ ] # storing vertex indices for this polygon triangle for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> mesh_for_normals.loops[loop_i].normal -> calc_normals_split() has to be called before normal = (0, 0, 0) for i, normals_poly_loop_i in enumerate( normals_poly_loop_indices): normal_loop = mesh_for_normals.loops[normals_poly_loop_i] # match by vertex index as triangle will for sure have three unique vertices if vert_i == normal_loop.vertex_index: normal = nor_transf_mat * normal_loop.normal normal = tuple(Vector(normal).normalized()) del normals_poly_loop_indices[i] break else: lprint( "E Normals data gathering went wrong, expect corrupted mesh! Shouldn't happen..." ) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] tex_coord_alias_map = pim_materials[ pim_mat_name].get_tex_coord_map() if len( tex_coord_alias_map ) < 1: # no textures or none uses uv mapping in current material effect uvs.append((0.0, 0.0)) uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with textures using uv mappings if material and pim_materials[ pim_mat_name].uses_textures_with_uv(): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[ material.name]: missing_mappings_data[material.name][ mesh_obj.name] = 1 else: for uv_lay_name in tex_coord_alias_map: if uv_lay_name not in mesh.uv_layers: uvs.append((0.0, 0.0)) # properly report missing uv layers where name of uv layout is key and materials that misses it are values if uv_lay_name not in missing_uv_layers: missing_uv_layers[uv_lay_name] = [] if pim_mat_name not in missing_uv_layers[ uv_lay_name]: # add material if not already there missing_uv_layers[uv_lay_name].append( pim_mat_name) else: uv_lay = mesh.uv_layers[uv_lay_name] uvs.append( _change_to_scs_uv_coordinates( uv_lay.data[loop_i].uv)) aliases = [] for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color vcol_multi = mesh_obj.data.scs_props.vertex_color_multiplier if _MESH_consts.default_vcol not in mesh.vertex_colors: # get RGB component of RGBA vcol = (1.0, ) * 3 missing_vcolor = True else: color = mesh.vertex_colors[ _MESH_consts.default_vcol].data[loop_i].color vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi) if _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix not in mesh.vertex_colors: # get A component of RGBA vcol += (1.0, ) missing_vcolor_a = True else: alpha = mesh.vertex_colors[ _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix].data[loop_i].color vcol += ((alpha[0] + alpha[1] + alpha[2]) / 3.0 * 2 * vcol_multi, ) # take avg of colors for alpha # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before if pim_materials[pim_mat_name].get_nmap_uv_name( ): # calculate tangents only if needed tangent = tuple(tangent_transf_mat * loop.tangent) tangent = tuple(Vector(tangent).normalized()) tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign) else: tangent = None # 6. There we go, vertex data collected! Now create internal vertex index, for triangle and skin stream construction piece_vert_index = mesh_piece.add_vertex( vert_i, position, normal, uvs, uvs_aliases, vcol, tangent) # 7. Add vertex to triangle creation list triangle_pvert_indices.append(piece_vert_index) # 8. Get skinning data for vertex and save it to skin stream if is_skin_used: bone_weights = {} bone_weights_sum = 0 for v_group_entry in mesh.vertices[vert_i].groups: bone_indx = bones.get_bone_index( vert_groups[v_group_entry.group].name) bone_weight = v_group_entry.weight # proceed only if bone exists in our armature if bone_indx != -1: bone_weights[bone_indx] = bone_weight bone_weights_sum += bone_weight skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum) skin_stream.add_entry(skin_entry) # report un-skinned vertices (no bones or zero sum weight) or badly skinned model if bone_weights_sum <= 0: missing_skinned_verts.add(vert_i) elif bone_weights_sum < 1: has_unnormalized_skin = True # Addition - Terrain Points: save vertex to terrain points storage, if present in correct vertex group for group in mesh.vertices[vert_i].groups: # if current object doesn't have vertex group found in mesh data, then ignore that group # This can happen if multiple objects are using same mesh and # some of them have vertex groups, but others not. if group.group >= len(mesh_obj.vertex_groups): continue curr_vg_name = mesh_obj.vertex_groups[group.group].name # if vertex group name doesn't match prescribed one ignore this vertex group if not match(_OP_consts.TerrainPoints.vg_name_regex, curr_vg_name): continue # if node index is not in bounds ignore this vertex group node_index = int(curr_vg_name[-1]) if node_index >= _PL_consts.PREFAB_NODE_COUNT_MAX: continue # if no variants defined add globally (without variant block) if len(root_object.scs_object_variant_inventory) == 0: used_terrain_points.add(-1, node_index, position, normal) continue # finally iterate variant parts entries to find where this part is included # and add terrain points to transitional structure # # NOTE: variant index is donated by direct order of variants in inventory # so export in PIT has to use the same order otherwise variant # indices will be misplaced for variant_i, variant in enumerate( root_object.scs_object_variant_inventory): used_terrain_points.ensure_entry(variant_i, node_index) for variant_part in variant.parts: if variant_part.name == mesh_obj.scs_props.scs_part and variant_part.include: used_terrain_points.add( variant_i, node_index, position, normal) break # triangles if face_flip: mesh_piece.add_triangle(tuple(triangle_pvert_indices)) else: mesh_piece.add_triangle( tuple(triangle_pvert_indices[::-1] )) # yep it's weird but it simply works vice versa # free normals calculations _mesh_utils.cleanup_mesh(mesh) _mesh_utils.cleanup_mesh(mesh_for_normals) # create part if it doesn't exists yet part_name = used_parts.ensure_part(mesh_obj) if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) mesh_pieces = mesh_pieces.values() for piece in mesh_pieces: # now as pieces are created we can check for it's flaws if piece.get_vertex_count() > 65536: lprint( "E Object %r has exceeded maximum vertex count (65536), expect errors during conversion!", (mesh_obj.name, )) # put pieces of current mesh to global list pim_pieces.append(piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint( "W Object %r is missing UV layer %r specified by materials: %r", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint( "W Object %r is missing vertex color layer with name %r! Default RGB color will be exported (0.5, 0.5, 0.5)!", (mesh_obj.name, _MESH_consts.default_vcol)) if missing_vcolor_a: lprint( "W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)", (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix)) if len(missing_skinned_verts) > 0: lprint( "E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!", (mesh_obj.name, root_object.name, len(missing_skinned_verts))) if has_unnormalized_skin: lprint( "W Object %r from SCS Root %r has unormalized skinning, exporting normalized weights!\n\t " "You can normalize weights by selecting object & executing 'Normalize All Vertex Groups'.", (mesh_obj.name, root_object.name)) # report missing data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint( "W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint( "W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()), )) if len(invalid_objects_for_tangents) > 0: lprint( "E N-gons present in some objects, thus normal map tangent calculation failed.\n\t " "Visualization in game will be distorted for this objects:\n\t %s", (list(invalid_objects_for_tangents), )) # create locators data sections for loc_obj in model_locators: pos, qua, sca = _get_scs_transformation_components( root_object.matrix_world.inverted() * loc_obj.matrix_world) if sca[0] * sca[1] * sca[2] < 0: lprint( "W Model locator %r inside SCS Root Object %r not exported because of invalid scale.\n\t " + "Model locators must have positive scale!", (loc_obj.name, root_object.name)) continue name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup hookup_id = None if hookup_string != "": hookup_id = _hookup_name_to_hookup_id(hookup_string) if hookup_id is None: lprint("W Model locator %r has unexpected hookup value %r.", (loc_obj.name, loc_obj.scs_props.locator_model_hookup)) # create locator object for export locator = Locator(len(pim_locators), name, hookup_id) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = used_parts.ensure_part(loc_obj) if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts.get_as_list(): # export all parts even empty ones used only in PIC and/or PIP if part_name in pim_parts: pim_container.append(pim_parts[part_name].get_as_section()) else: pim_container.append(Part(part_name).get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) if is_skin_used: pim_container.append(bones.get_as_section()) pim_container.append(skin.get_as_section()) # write to file ind = " " pim_filepath = os.path.join(dirpath, root_object.name + ".pim" + name_suffix) return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_points): """Exports PIP file from given locator list. :param prefab_locator_list: :type prefab_locator_list: list of bpy.types.Object :param dirpath: directory export path :type dirpath: str :param filename: name of PIP file :type filename: str :param offset_matrix: offset matrix for locators :type offset_matrix: mathutils.Matrix :param used_terrain_points: terrain points transitional structure for accessing terrain points stored during PIM export :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if successfull; otherwise False :rtype: bool """ # CLEANUP CONNECTIONS DATA _connections_group_wrapper.cleanup_on_export() print("\n************************************") print("** SCS PIP Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") (control_node_locs, nav_point_locs, sign_locs, spawn_point_locs, semaphore_locs, map_point_locs, trigger_point_locs) = __sort_locators_by_type__(prefab_locator_list) pip_header = Header(2, filename) pip_global = Globall() pip_nodes = OrderedDict() """:type: dict[int,Node]""" pip_curves = OrderedDict() """:type: dict[int, Curve]""" pip_signs = [] """:type: list[Sign]""" pip_spawn_points = [] """:type: list[SpawnPoint]""" pip_semaphores = [] """:type: list[Semaphore]""" pip_map_points = OrderedDict() """:type: dict[str, MapPoint]""" pip_trigger_points = OrderedDict() """:type: dict[str, TriggerPoint]""" pip_intersections = [OrderedDict(), OrderedDict(), OrderedDict()] """:type: list[dict[str, list[Intersection]]]""" # nodes creation for locator in control_node_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" curr_node_i = int(locator_scs_props.locator_prefab_con_node_index) if curr_node_i not in pip_nodes: pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) rot = Quaternion(rot) * Vector((0, 0, -1)) # create node with position and direction cn = Node(curr_node_i, pos, rot) # add terrain points terrain_points = used_terrain_points.get(curr_node_i) for variant_i in terrain_points: # ensure variant entry for no terrain points case cn.ensure_variant(variant_i) for tp_entry in terrain_points[variant_i]: cn.add_terrain_point(tp_entry.position, tp_entry.normal, variant_i) pip_nodes[curr_node_i] = cn else: lprint("W Multiple Control Nodes with same index detected, only one per index will be exported!\n\t " "Check Control Nodes in SCS Game Object with Root: %r", (filename,)) # curves creation curves_dict = _connections_group_wrapper.get_curves(nav_point_locs.values()) for key, curve_entry in curves_dict.items(): loc0 = nav_point_locs[curves_dict[key].start] loc0_scs_props = loc0.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" loc1 = nav_point_locs[curves_dict[key].end] loc1_scs_props = loc1.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create curve and set properties curve = __get_curve__(pip_curves, curve_entry.index, loc0.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc0.matrix_world) curve.set_start(pos, rot) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * loc1.matrix_world) curve.set_end(pos, rot) curve.set_input_boundaries(loc0_scs_props) curve.set_output_boundaries(loc1_scs_props) curve.set_flags(loc0.scs_props, True) curve.set_flags(loc1.scs_props, False) curve.set_semaphore_id(int(loc0_scs_props.locator_prefab_np_traffic_semaphore)) curve.set_traffic_rule(loc1_scs_props.locator_prefab_np_traffic_rule) # set next/prev curves for next_key in curve_entry.next_curves: next_curve = __get_curve__(pip_curves, curves_dict[next_key].index, curves_dict[next_key].start) assert curve.add_next_curve(next_curve) for prev_key in curve_entry.prev_curves: prev_curve = __get_curve__(pip_curves, curves_dict[prev_key].index, curves_dict[prev_key].start) assert curve.add_prev_curve(prev_curve) # sync nodes input lanes boundary_node_i = curve.get_input_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_input_lane(curve.get_input_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc0.name,)) # sync nodes output lanes boundary_node_i = curve.get_output_node_index() if 0 <= boundary_node_i < _PL_consts.PREFAB_NODE_COUNT_MAX: if boundary_node_i in pip_nodes: assert pip_nodes[boundary_node_i].set_output_lane(curve.get_output_lane_index(), curve.get_index()) else: lprint("E None existing Boundary Node with index: %s used in Navigation Point: %r", (boundary_node_i, loc1.name,)) Curve.prepare_curves(pip_curves.values()) # signs creation for locator in sign_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create sign and set properties sign = Sign(locator.name, locator_scs_props.scs_part) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) sign.set_position(pos) sign.set_rotation(rot) if ":" in locator_scs_props.locator_prefab_sign_model: sign.set_model(locator_scs_props.locator_prefab_sign_model.split(":")[1].strip()) else: lprint("W Invalid Sign Model: %r on locator: %r", (locator_scs_props.locator_prefab_sign_model, locator.name)) pip_signs.append(sign) # spawn points creation for locator in spawn_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create spawn point and set properties spawn_point = SpawnPoint(locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) spawn_point.set_position(pos) spawn_point.set_rotation(rot) spawn_point.set_type(int(locator_scs_props.locator_prefab_spawn_type)) pip_spawn_points.append(spawn_point) # semaphores creation for locator in semaphore_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create semaphore and set properties semaphore = Semaphore(int(locator_scs_props.locator_prefab_tsem_type)) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) semaphore.set_position(pos) semaphore.set_rotation(rot) semaphore.set_semaphore_id(int(locator_scs_props.locator_prefab_tsem_id)) if ":" in locator_scs_props.locator_prefab_tsem_profile: semaphore.set_profile(locator_scs_props.locator_prefab_tsem_profile.split(":")[1].strip()) else: lprint("W Invalid Profile: %r on Traffic Semaphore locator: %r", (locator_scs_props.locator_prefab_tsem_profile, locator.name)) semaphore.set_intervals((locator_scs_props.locator_prefab_tsem_gs, locator_scs_props.locator_prefab_tsem_os1, locator_scs_props.locator_prefab_tsem_rs, locator_scs_props.locator_prefab_tsem_os2)) semaphore.set_cycle(locator_scs_props.locator_prefab_tsem_cyc_delay) pip_semaphores.append(semaphore) # map points creation for locator in map_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create map point and set properties map_point = __get_map_point__(pip_map_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) map_point.set_position(pos) map_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert map_point.add_neighbour(__get_map_point__(pip_map_points, neighbour_name)) MapPoint.test_map_points(pip_map_points.values()) MapPoint.auto_generate_map_points(pip_map_points, pip_nodes) # trigger points creation for locator in trigger_point_locs.values(): locator_scs_props = locator.scs_props """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create trigger point and set properties trigger_point = __get_trigger_point__(pip_trigger_points, locator.name) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) trigger_point.set_position(pos) if ":" in locator_scs_props.locator_prefab_tp_action: trigger_point.set_action(locator_scs_props.locator_prefab_tp_action.split(":")[1].strip()) else: lprint("W Invalid Action: %r on Trigger Point locator: %r", (locator_scs_props.locator_prefab_tp_action, locator.name)) trigger_point.set_trigger_range(locator_scs_props.locator_prefab_tp_range) trigger_point.set_reset_delay(locator_scs_props.locator_prefab_tp_reset_delay) trigger_point.set_flags(locator_scs_props) for neighbour_name in _connections_group_wrapper.get_neighbours(locator): assert trigger_point.add_neighbour(__get_trigger_point__(pip_trigger_points, neighbour_name)) TriggerPoint.prepare_trigger_points(pip_trigger_points.values()) # intersections creation for c0_i, c0 in enumerate(sorted(pip_curves.values())): for c1_i, c1 in enumerate(sorted(pip_curves.values())): if c1_i <= c0_i: # only search each pair of curves once continue # get the intersection point and curves coefficient positions intersect_p, c0_pos, c1_pos = Intersection.get_intersection(c0, c1) if intersect_p: intersect_p_str = str(intersect_p) # Format: '<Vector (0.0000, 0.0000, 0.0000)>' is_start = c0_pos == 0 and c0_pos == c1_pos is_end = c1_pos == 1 and c0_pos == c1_pos if is_start: inter_type = 0 # fork elif is_end: inter_type = 1 # joint else: inter_type = 2 # cross # if there is indication of cross intersection filter out intersections with common fork and joint # NOTE: this condition might not be sufficient, so if anyone will have problems, # this is the point that has to be improved if Intersection.have_common_fork(c0, c1) or Intersection.have_common_joint(c0, c1): continue # calculate radius for the same directions on curves forward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, 1) backward_radius = Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, -1) final_radius = max(forward_radius, backward_radius) # special calculations only for cross intersections if inter_type == 2: # calculate radius also for opposite directions final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, 1, -1)) final_radius = max(final_radius, Intersection.get_intersection_radius(c0, c1, c0_pos, c1_pos, -1, 1)) # calculate position of intersection point on curves with better precision c0_pos = c0.get_closest_point(intersect_p) c1_pos = c1.get_closest_point(intersect_p) lprint("D Found cross intersection point: %r", (intersect_p,)) # creating intersection class instances intersection = Intersection(c0.get_index(), c0.get_ui_name(), c0_pos * c0.get_length()) intersection1 = Intersection(c1.get_index(), c1.get_ui_name(), c1_pos * c1.get_length()) # init list of intersections for current intersecting point if intersect_p_str not in pip_intersections[inter_type]: pip_intersections[inter_type][intersect_p_str] = [] # append intersections to list and calculate new siblings new_siblings = 2 if intersection not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection) else: del intersection new_siblings -= 1 if intersection1 not in pip_intersections[inter_type][intersect_p_str]: pip_intersections[inter_type][intersect_p_str].append(intersection1) else: del intersection1 new_siblings -= 1 # always set flags on first entry in current intersection point list # this way siblings count is getting updated properly pip_intersections[inter_type][intersect_p_str][0].set_flags(is_start, is_end, new_siblings) # update radius on all of intersection in the same intersecting point for inter in pip_intersections[inter_type][intersect_p_str]: inter.set_radius(pip_intersections[inter_type][intersect_p_str][0].get_radius()) inter.set_radius(final_radius) # create container pip_container = [pip_header.get_as_section(), pip_global.get_as_section()] for node in pip_nodes.values(): pip_container.append(node.get_as_section()) for curve_key in sorted(pip_curves): pip_container.append(pip_curves[curve_key].get_as_section()) for sign in pip_signs: pip_container.append(sign.get_as_section()) for spawn_point in pip_spawn_points: pip_container.append(spawn_point.get_as_section()) for semaphore in pip_semaphores: pip_container.append(semaphore.get_as_section()) for map_point in pip_map_points.values(): pip_container.append(map_point.get_as_section()) for trigger_point in pip_trigger_points.values(): pip_container.append(trigger_point.get_as_section()) for inter_type in range(3): for intersect_p_str in pip_intersections[inter_type]: for intersection in pip_intersections[inter_type][intersect_p_str]: pip_container.append(intersection.get_as_section()) # write to file ind = " " pip_filepath = path.join(dirpath, str(filename + ".pip")) result = _pix_container.write_data_to_file(pip_container, pip_filepath, ind) return result
def execute(dirpath, root_object, mesh_objects, model_locators, used_parts, used_materials): """Executes export of PIM file for given data. :param dirpath: directory path for PIM file :type dirpath: str :param root_object: Blender SCS Root empty object :type root_object: bpy.types.Object :param mesh_objects: all the meshes which should be exported for current game object :type mesh_objects: list of bpy.types.Object :param model_locators: all Blender empty objecs which represents model locators and should be exported for current game object :type model_locators: list of bpy.types.Object :return: True if export was successfull; False otherwise :rtype: bool """ print("\n************************************") print("** SCS PIM Exporter **") print("** (c)2015 SCS Software **") print("************************************\n") scs_globals = _get_scs_globals() if scs_globals.output_type == "5": format_version = 5 format_type = "" else: format_version = 1 format_type = "def" pim_header = Header(format_type, format_version, root_object.name) pim_global = Globall(root_object.name + ".pis") pim_materials = collections.OrderedDict( ) # dict of Material class instances representing used materials """:type: dict of Material""" pim_pieces = [] # list of Piece class instances representing mesh pieces """:type: list of Piece""" pim_parts = collections.OrderedDict( ) # list of Part class instances representing used parts """:type: dict of Part""" pim_locators = [ ] # list of Locator class instances representing model locators """:type: list of Locator""" objects_with_default_material = { } # stores object names which has no material set missing_mappings_data = { } # indicates if material doesn't have set any uv layer for export # create mesh object data sections for mesh_obj in mesh_objects: mesh_pieces = collections.OrderedDict() # get initial mesh mesh = _object_utils.get_mesh(mesh_obj) _mesh_utils.bm_triangulate(mesh) mesh.calc_normals_split() # calculate transformation matrices for this object pos_transf_mat = (Matrix.Scale(scs_globals.export_scale, 4) * _scs_to_blend_matrix().inverted() * root_object.matrix_world.inverted() * mesh_obj.matrix_world) nor_transf_mat = ( _scs_to_blend_matrix().inverted() * root_object.matrix_world. inverted().to_quaternion().to_matrix().to_4x4() * mesh_obj.matrix_world.to_quaternion().to_matrix().to_4x4()) missing_uv_layers = { } # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex colors for poly in mesh.polygons: mat_index = poly.material_index # check material existance and decide what material name and effect has to be used if mat_index >= len( mesh_obj.material_slots ) or mesh_obj.material_slots[ mat_index].material is None: # no material or invalid index material = None pim_mat_name = "_not_existing_material_" pim_mat_effect = "eut2.dif" objects_with_default_material[mesh_obj.name] = 1 else: material = mesh_obj.material_slots[mat_index].material pim_mat_name = material.name pim_mat_effect = material.scs_props.mat_effect_name # create new pim material if material with that name doesn't yet exists if pim_mat_name not in pim_materials: pim_material = Material(len(pim_materials), pim_mat_name, pim_mat_effect, material) pim_materials[pim_mat_name] = pim_material used_materials.append(pim_mat_name) # create new piece if piece with this material doesn't exists yet -> split to pieces by material if pim_mat_name not in mesh_pieces: mesh_pieces[pim_mat_name] = Piece( len(pim_pieces) + len(mesh_pieces), pim_materials[pim_mat_name]) nmap_uv_layer = pim_materials[pim_mat_name].get_nmap_uv_name() if nmap_uv_layer: # if there is uv layer used for normal maps then calculate tangents on it mesh.calc_tangents(uvmap=nmap_uv_layer) mesh_piece = mesh_pieces[pim_mat_name] """:type: Piece""" piece_vert_indices = [] for loop_i in poly.loop_indices: loop = mesh.loops[loop_i] """:type: bpy.types.MeshLoop""" vert_i = loop.vertex_index # get data of current vertex # 1. position -> mesh.vertices[loop.vertex_index].co position = tuple(pos_transf_mat * mesh.vertices[vert_i].co) # 2. normal -> loop.normal -> calc_normals_split() has to be called before normal = nor_transf_mat * loop.normal normal = tuple(Vector(normal).normalized()) # 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv uvs = [] uvs_aliases = [] tex_coord_alias_map = pim_materials[ pim_mat_name].get_tex_coord_map() if len(tex_coord_alias_map ) < 1: # no textures for current material effect uvs.append((0.0, 0.0)) uvs_aliases.append(["_TEXCOORD0"]) # report missing mappings only on actual materials with texture entries if material and pim_materials[pim_mat_name].uses_textures( ): if material.name not in missing_mappings_data: missing_mappings_data[material.name] = {} if mesh_obj.name not in missing_mappings_data[ material.name]: missing_mappings_data[material.name][ mesh_obj.name] = 1 else: for uv_lay_name in tex_coord_alias_map: if uv_lay_name not in mesh.uv_layers: uvs.append((0.0, 0.0)) # properly report missing uv layers where name of uv layout is key and materials that misses it are values if uv_lay_name not in missing_uv_layers: missing_uv_layers[uv_lay_name] = [] if pim_mat_name not in missing_uv_layers[ uv_lay_name]: # add material if not already there missing_uv_layers[uv_lay_name].append( pim_mat_name) else: uv_lay = mesh.uv_layers[uv_lay_name] uvs.append( _change_to_scs_uv_coordinates( uv_lay.data[loop_i].uv)) aliases = [] for alias_index in tex_coord_alias_map[uv_lay_name]: aliases.append("_TEXCOORD" + str(alias_index)) uvs_aliases.append(aliases) # 4. vcol -> vcol_lay = mesh.vertex_colors[0].data; vcol_lay[loop_i].color if len(mesh.vertex_colors) < 1: vcol = (1.0, 1.0, 1.0, 1.0) missing_vcolor = True else: multiplier = mesh_obj.data.scs_props.vertex_color_multiplier color = mesh.vertex_colors[0].data[loop_i].color vcol = (color[0] * multiplier, color[1] * multiplier, color[2] * multiplier, 1.0) # 5. tangent -> loop.tangent; loop.bitangent_sign -> calc_tangents() has to be called before if pim_materials[pim_mat_name].get_nmap_uv_name( ): # calculate tangents only if needed tangent = tuple(nor_transf_mat * loop.tangent) tangent = tuple(Vector(tangent).normalized()) tangent = (tangent[0], tangent[1], tangent[2], loop.bitangent_sign) else: tangent = None # save internal vertex index to array to be able to construct triangle afterwards piece_vert_index = mesh_piece.add_vertex( vert_i, position, normal, uvs, uvs_aliases, vcol, tangent) piece_vert_indices.append(piece_vert_index) mesh_piece.add_triangle(tuple(piece_vert_indices[::-1]) ) # invert indices because of normals flip # create part if it doesn't exists yet part_name = mesh_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) used_parts[part_name] = 1 mesh_pieces = mesh_pieces.values() for piece in mesh_pieces: # put pieces of current mesh to global list pim_pieces.append(piece) # add pieces of current mesh to part pim_part = pim_parts[part_name] pim_part.add_piece(piece) # report missing data for each object if len(missing_uv_layers) > 0: for uv_lay_name in missing_uv_layers: lprint( "W Object '%s' is missing UV layer '%s' specified by materials: %s\n", (mesh_obj.name, uv_lay_name, missing_uv_layers[uv_lay_name])) if missing_vcolor: lprint( "W Object '%s' is missing vertex color layer! Default color will be exported (1, 1, 1, 1)!", (mesh_obj.name, )) # report mising data for whole model if len(missing_mappings_data) > 0: for material_name in missing_mappings_data: lprint( "W Material '%s' is missing mapping data! Objects using it are exported with default UV:\n\t %s", (material_name, list(missing_mappings_data[material_name].keys()))) if len(objects_with_default_material) > 0: lprint( "W Some objects don't use any material. Default material and UV mapping is used on them:\n\t %s", (list(objects_with_default_material.keys()), )) # create locators data sections for loc_obj in model_locators: name = _name_utils.tokenize_name(loc_obj.name) hookup_string = loc_obj.scs_props.locator_model_hookup if hookup_string != "" and ":" in hookup_string: hookup = hookup_string.split(':', 1)[1].strip() else: if hookup_string != "": lprint("W The Hookup %r has no expected value!", hookup_string) hookup = None pos, qua, sca = _get_scs_transformation_components( loc_obj.matrix_world) # create locator object for export locator = Locator(len(pim_locators), name, hookup) locator.set_position(pos) locator.set_rotation(qua) locator.set_scale(sca) # create part if it doesn't exists yet part_name = loc_obj.scs_props.scs_part if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) used_parts[part_name] = 1 # add locator to part pim_part = pim_parts[part_name] pim_part.add_locator(locator) # add locator to locator list pim_locators.append(locator) # create container pim_container = [pim_header.get_as_section(), pim_global.get_as_section()] for mat_name in pim_materials: pim_container.append(pim_materials[mat_name].get_as_section()) for pim_piece in pim_pieces: pim_container.append(pim_piece.get_as_section()) for part_name in used_parts: pim_container.append(pim_parts[part_name].get_as_section()) for locator in pim_locators: pim_container.append(locator.get_as_section()) # write to file ind = " " pim_filepath = dirpath + os.sep + root_object.name + ".pim" return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)