Exemplo n.º 1
0
def get_data_from_file(filepath):
    """Returns entire data in data container from specified raw material file.

    :rtype: MatContainer | None
    """

    container = None
    if filepath:
        if os.path.isfile(filepath) and filepath.lower().endswith(".mat"):

            data_dict, effect = _mat.read_data(filepath)

            if data_dict:
                if len(data_dict) < 1:
                    lprint('\nI MAT file "%s" is empty!', (_path_utils.readable_norm(filepath),))
                    return None

                container = MatContainer(data_dict, effect)
            else:
                lprint('\nI MAT file "%s" is empty!', (_path_utils.readable_norm(filepath),))
                return None
        else:
            lprint('\nW Invalid MAT file path %r!', (_path_utils.readable_norm(filepath),))
    else:
        lprint('\nI No MAT file path provided!')

    return container
Exemplo n.º 2
0
def get_data_from_file(filepath, is_sui=False):
    """Returns entire data in data container from specified SII definition file.

    :param filepath: absolute file path where SII should be read from
    :type filepath: str
    :param is_sui: True if file should be read as SUI, in that case only one unit will be returned
    :type is_sui: bool
    :return: list of SII Units if parsing succeded; otherwise None
    :rtype: list[io_scs_tools.internals.structure.UnitData] | None
    """

    container = None
    if filepath:
        if os.path.isfile(filepath):
            container = _sii_reader.parse_file(filepath, is_sui=is_sui)
            if container:
                if len(container) < 1:
                    lprint('D SII file "%s" is empty!',
                           (_path_utils.readable_norm(filepath), ))
                    return None
            else:
                lprint('D SII file "%s" is empty!',
                       (_path_utils.readable_norm(filepath), ))
                return None
        else:
            lprint('W Invalid SII file path %r!',
                   (_path_utils.readable_norm(filepath), ))
    else:
        lprint('I No SII file path provided!')

    return container
Exemplo n.º 3
0
def load(locator):
    """Makes a preview model for a locator and link it to it
    NOTE: locator preview model path must be set

    :param locator: locator object to which preview model should be set
    :type locator: bpy.types.Object
    :return: True if preview model was set; False otherwise
    :rtype: bool
    """

    load_model = True
    abs_filepath = ""
    if not locator.scs_props.locator_show_preview_model:
        load_model = False
    else:
        filepath = locator.scs_props.locator_preview_model_path
        if filepath:
            if filepath.lower().endswith(".pim"):
                abs_filepath = _path_utils.get_abs_path(filepath,
                                                        skip_mod_check=True)
                if not os.path.isfile(abs_filepath):
                    lprint(
                        "W Locator %r has invalid path to Preview Model PIM file: %r",
                        (locator.name,
                         _path_utils.readable_norm(abs_filepath)))
                    load_model = False
            else:
                lprint(
                    "W Locator %r has invalid path to Preview Model PIM file: %r",
                    (locator.name, _path_utils.readable_norm(filepath)))
                load_model = False
        else:
            load_model = False

    if load_model:

        unload(locator)

        prem_name = str("prem_" + locator.name)
        obj = _get_model_mesh(locator, prem_name)

        if not obj:
            from io_scs_tools.imp import pim as _pim_import

            obj = _pim_import.load_pim_file(bpy.context,
                                            abs_filepath,
                                            preview_model=True)
            obj.name = prem_name
            obj.data.name = prem_name
            obj.data.scs_props.locator_preview_model_path = locator.scs_props.locator_preview_model_path
            obj.select = False

        link(locator, obj)

        return True
    else:
        return False
Exemplo n.º 4
0
def get_data_from_file(filepath, ind, print_info=False):
    """Returns entire data in data container from specified PIX file.

    :param filepath: File path to be read
    :type filepath: str
    :param ind: Indentation which is expected in the file
    :type ind: str
    :param print_info: Whether to print the debug printouts
    :type print_info: bool
    :return: PIX Section Object Data
    :rtype: list of SectionData
    """

    if filepath is None:
        lprint("D Aborting PIX file read, 'None' file!")
        return None

    # print('    filepath: "%s"\n' % filepath)
    container, state = _pix_parser.read_data(filepath, ind, print_info)
    if len(container) < 1:
        lprint('\nE File "%s" is empty!', (_path_utils.readable_norm(filepath),))
        return None

    # print_container(container)  # TEST PRINTOUTS
    # write_config_file(container, filepath, ind, "_reex")  # TEST REEXPORT

    return container
Exemplo n.º 5
0
def write_data_to_file(filepath, container, is_sui=False, create_dirs=False):
    """Write given unit data container into SII file.

    :param filepath: absolute file path where SII should be written to
    :type filepath: str
    :param container: iterable of unit data objects to be written
    :type container: tuple[io_scs_tools.internals.structure.UnitData]|list[io_scs_tools.internals.structure.UnitData]
    :param is_sui: True if unit should be written as SUI, meaning without SiiNunit header
    :type is_sui: bool
    :param create_dirs: True if directories should be created before export
    :type create_dirs: bool
    :return: True if container was successfully written; otherwise False
    :rtype: bool
    """

    file_type = "SUI" if is_sui else "SII"

    if filepath:
        if container:
            return _sii_writer.write_data(filepath,
                                          container,
                                          is_sui=is_sui,
                                          create_dirs=create_dirs)
        else:
            lprint("W Empty %s container, abort file write: %r!", (
                file_type,
                _path_utils.readable_norm(filepath),
            ))
    else:
        lprint('I No %s file path provided!', (file_type, ))

    return False
Exemplo n.º 6
0
def get_data_from_file(filepath, ind, print_info=False):
    """Returns entire data in data container from specified PIX file.

    :param filepath: File path to be read
    :type filepath: str
    :param ind: Indentation which is expected in the file
    :type ind: str
    :param print_info: Whether to print the debug printouts
    :type print_info: bool
    :return: PIX Section Object Data
    :rtype: list of SectionData
    """

    if filepath is None:
        lprint("D Aborting PIX file read, 'None' file!")
        return None

    # print('    filepath: "%s"\n' % filepath)
    container, state = _pix_parser.read_data(filepath, ind, print_info)
    if len(container) < 1:
        lprint('\nE File "%s" is empty!',
               (_path_utils.readable_norm(filepath), ))
        return None

    # print_container(container)  # TEST PRINTOUTS
    # write_config_file(container, filepath, ind, "_reex")  # TEST REEXPORT

    return container
Exemplo n.º 7
0
def write_data_to_file(container, filepath, ind, print_info=False):
    """Exports given container in given filepath.

    :param container:
    :type container:
    :param filepath: path to file where container should be exported
    :type filepath: str
    :param ind: intendention for printout
    :type ind: str
    :param print_info: should infos be printed
    :type print_info: bool
    :return: True if export was successfull, otherwise False
    :rtype: bool
    """

    # convert filepath in readable form so when file writting will be logged
    # path will be properly readable even on windows. Without mixed back and forward slashes.
    filepath = _path_utils.readable_norm(filepath)

    result = _pix_writer.write_data(container,
                                    filepath,
                                    ind,
                                    print_info=print_info)
    if result != {'FINISHED'}:
        lprint(
            "E Unable to export data into file:\n\t   %r\n\t   For details check printouts above.",
            (filepath, ))
        return False
    else:
        lprint("I File created!")
        return True
Exemplo n.º 8
0
def write_data_to_file(container, filepath, ind, print_info=False):
    """Exports given container in given filepath.

    :param container:
    :type container:
    :param filepath: path to file where container should be exported
    :type filepath: str
    :param ind: intendention for printout
    :type ind: str
    :param print_info: should infos be printed
    :type print_info: bool
    :return: True if export was successfull, otherwise False
    :rtype: bool
    """

    # convert filepath in readable form so when file writting will be logged
    # path will be properly readable even on windows. Without mixed back and forward slashes.
    filepath = _path_utils.readable_norm(filepath)

    result = _pix_writer.write_data(container, filepath, ind, print_info=print_info)
    if result != {'FINISHED'}:
        lprint("E Unable to export data into file:\n\t   %r\n\t   For details check printouts above.", (filepath,))
        return False
    else:
        lprint("I File created!")
        return True
Exemplo n.º 9
0
def get_data_from_file(filepath):
    """Returns entire data in data container from specified SII definition file."""

    container = None
    if filepath:
        if os.path.isfile(filepath):
            container = _sii.parse_file(filepath)
            if container:
                if len(container) < 1:
                    lprint('D SII file "%s" is empty!', (_path_utils.readable_norm(filepath),))
                    return None
            else:
                lprint('D SII file "%s" is empty!', (_path_utils.readable_norm(filepath),))
                return None
        else:
            lprint('W Invalid SII file path %r!', (_path_utils.readable_norm(filepath),))
    else:
        lprint('I No SII file path provided!')

    return container
Exemplo n.º 10
0
def get_data_from_file(filepath):
    """Returns entire data in data container from specified SII definition file."""

    container = None
    if filepath:
        if os.path.isfile(filepath):
            container = _sii_reader.parse_file(filepath)
            if container:
                if len(container) < 1:
                    lprint('D SII file "%s" is empty!',
                           (_path_utils.readable_norm(filepath), ))
                    return None
            else:
                lprint('D SII file "%s" is empty!',
                       (_path_utils.readable_norm(filepath), ))
                return None
        else:
            lprint('W Invalid SII file path %r!',
                   (_path_utils.readable_norm(filepath), ))
    else:
        lprint('I No SII file path provided!')

    return container
Exemplo n.º 11
0
def parse_file(filepath, print_info=False):
    """Reads data from TOBJ file and returns it's records as list.

    :param filepath: tobj file path
    :type filepath: str
    :param print_info: switch for printing parsing info
    :type print_info: bool
    :return: all TOBJ entries as list in the order they are saved in the file
    :rtype: list[str]
    """

    if print_info:
        print('** TOBJ Parser ...')
        print('   filepath: %r' % str(filepath))

    data = []
    with open(filepath, encoding="utf8") as f:

        try:

            for i, line in enumerate(f):
                line_split = line.strip().split()
                if len(line_split) != 0:

                    # ignore C-like comment lines
                    if line_split[0].startswith("//"):
                        continue

                    # ignore comment lines starting with #
                    if line_split[0].startswith("#"):
                        continue

                    for word in line_split:
                        data.append(word)

        except UnicodeDecodeError as e:

            # someone wants to open binary tobj, no go
            if e.reason == "invalid start byte":
                lprint(
                    "E Can't read TOBJ, most probably it's in binary version: %r",
                    (_path.readable_norm(filepath), ))

    f.close()

    return data
Exemplo n.º 12
0
def batch_export(operator_instance, init_obj_list, menu_filepath=None):
    """This function calls other sorting functions and depending on the resulting output
    dictionary it exports all available 'SCS Game Objects' into specified locations.

    :param operator_instance: operator from within this function is called (used for report)
    :type operator_instance: bpy.types.Operator
    :param init_obj_list: initial object list which should be exported
    :type init_obj_list: tuple of Blender objects
    :param menu_filepath: filepath used from menu export
    :type menu_filepath: str
    """

    lprint("", report_errors=-1, report_warnings=-1
           )  # Clear the 'error_messages' and 'warning_messages'
    game_objects_dict = _object_utils.sort_out_game_objects_for_export(
        init_obj_list)

    # exclude game objects that were manually omitted from export by property
    game_objects_dict = _object_utils.exclude_switched_off(game_objects_dict)

    if game_objects_dict:
        scs_game_objects_exported = []
        scs_game_objects_rejected = []

        global_filepath = _path_utils.get_global_export_path()

        for root_object in game_objects_dict:

            # update root object location to invoke update tagging on it and
            # then update scene to make sure all children objects will have all transforms up to date
            # NOTE: needed because Blender doesn't update objects on invisible layers on it's own
            root_object.location = root_object.location
            for scene in bpy.data.scenes:
                scene.update()

            game_object_list = game_objects_dict[root_object]

            # GET CUSTOM FILE PATH
            custom_filepath = _path_utils.get_custom_scs_root_export_path(
                root_object)

            # MAKE FINAL FILEPATH
            if menu_filepath:
                filepath = _path_utils.readable_norm(menu_filepath)
                filepath_message = "Export path selected in file browser:\n\t   \"" + filepath + "\""
            elif custom_filepath:
                filepath = _path_utils.readable_norm(custom_filepath)
                filepath_message = "Custom export path used for \"" + root_object.name + "\" is:\n\t   \"" + filepath + "\""
            else:
                filepath = _path_utils.readable_norm(global_filepath)
                filepath_message = "Default export path used for \"" + root_object.name + "\":\n\t   \"" + filepath + "\""

            scs_project_path = _path_utils.readable_norm(
                _get_scs_globals().scs_project_path)
            if os.path.isdir(filepath) and _path_utils.startswith(
                    filepath, scs_project_path) and scs_project_path != "":

                # EXPORT ENTRY POINT
                export_success = pix.export(filepath, root_object,
                                            game_object_list)

                if export_success:
                    scs_game_objects_exported.append("> \"" +
                                                     root_object.name +
                                                     "\" exported to: '" +
                                                     filepath + "'")
                else:
                    scs_game_objects_rejected.append("> \"" +
                                                     root_object.name + "\"")

            else:
                if filepath:
                    message = (
                        "No valid export path found!\n\t   " +
                        "Export path does not exists or it's not inside SCS Project Base Path.\n\t   "
                        + "SCS Project Base Path:\n\t   \"" +
                        scs_project_path + "\"\n\t   " + filepath_message)
                else:
                    message = "No valid export path found! Please check \"SCS Project Base Path\" first."
                lprint('E ' + message)
                operator_instance.report({'ERROR'},
                                         message.replace("\t", "").replace(
                                             "   ", ""))
                return {'CANCELLED'}

        if not lprint(
                "\nI Export procces completed, summaries are printed below!",
                report_errors=True,
                report_warnings=True):
            operator_instance.report(
                {'INFO'},
                "Export successfully completed, exported %s game object(s)!" %
                len(scs_game_objects_exported))
            bpy.ops.wm.show_3dview_report(
                'INVOKE_DEFAULT',
                abort=True)  # abort 3d view reporting operator

        if len(scs_game_objects_exported) > 0:
            message = "EXPORTED GAME OBJECTS (" + str(
                len(scs_game_objects_exported)
            ) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_exported:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_rejected) > 0:
            message = "REJECTED GAME OBJECTS (" + str(
                len(scs_game_objects_rejected)
            ) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_rejected:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_exported) + len(
                scs_game_objects_rejected) == 0:
            message = "Nothing to export! Please set at least one 'SCS Root Object'."
            lprint('E ' + message)
            operator_instance.report({'ERROR'}, message)
            return {'CANCELLED'}
    else:
        message = "No 'SCS Root Object' present or all of them were manually exluded from export in their settings.\n\t   " \
                  "(For more information, please refer to 'SCS Blender Tools' documentation.)"
        lprint('E ' + message)
        operator_instance.report({'ERROR'}, message.replace("\n\t   ", "\n"))
        return {'CANCELLED'}

    return {'FINISHED'}
Exemplo n.º 13
0
def load(locator, deep_reload=False):
    """Makes a preview model for a locator and link it to it
    NOTE: locator preview model path must be set

    :param locator: locator object to which preview model should be set
    :type locator: bpy.types.Object
    :param deep_reload: should model be reloaded completely? Use in case model mesh should be freshly loaded from disc
    :type deep_reload: bool
    :return: True if preview model was set; False otherwise
    :rtype: bool
    """

    load_model = True
    abs_filepath = ""
    if not locator.scs_props.locator_show_preview_model:
        load_model = False
    else:
        filepath = locator.scs_props.locator_preview_model_path
        if filepath:
            if filepath.lower().endswith(".pim"):
                abs_filepath = _path_utils.get_abs_path(filepath, skip_mod_check=True)
                if not os.path.isfile(abs_filepath):
                    lprint("W Locator %r has invalid path to Preview Model PIM file: %r",
                           (locator.name, _path_utils.readable_norm(abs_filepath)))
                    load_model = False
            else:
                lprint("W Locator %r has invalid path to Preview Model PIM file: %r",
                       (locator.name, _path_utils.readable_norm(filepath)))
                load_model = False
        else:
            load_model = False

    if load_model:

        unload(locator, do_mesh_unlink=deep_reload)

        prem_name = str("prem_" + locator.name)
        obj = _get_model_mesh(locator, prem_name)

        if not obj:
            from io_scs_tools.imp import pim as _pim_import

            obj = _pim_import.load_pim_file(bpy.context, abs_filepath, preview_model=True)

            # in case used preview model doesn't have any mesh, abort loading, report error and reset path
            # Path has to be reset to prevent loading preview model over and over again
            # from possible callbacks trying to fix not present preview model
            if not obj:
                message = "Selected PIM model doesn't have any mesh inside, so it can not be used as a preview model."
                bpy.ops.wm.show_warning_message('INVOKE_DEFAULT', is_modal=True, title="Preview Model Load Error!", message=message,
                                                width=500,
                                                height=100)
                lprint("E " + message)
                locator.scs_props.locator_preview_model_path = ""
                return False

            obj.name = prem_name
            obj.data.name = prem_name
            obj.data.scs_props.locator_preview_model_path = locator.scs_props.locator_preview_model_path
            obj.select = False

        link(locator, obj)

        return True
    else:
        return False
Exemplo n.º 14
0
def load(context, filepath, name_suffix="", suppress_reports=False):
    """

    :param context: Blender Context currently used for window_manager.update_progress and bpy_object_utils.object_data_add
    :type context: bpy.types.Context
    :param filepath: File path to be imported
    :type filepath: str
    :param name_suffix: files name suffix (exchange format is using .ef)
    :type name_suffix: str
    :param suppress_reports: True if you don't want for reports to be flushed & summaries to be printed out; False otherwise
    :type suppress_reports: bool
    :return: Return state statuses (Usually 'FINISHED')
    :rtype: set
    """
    import time

    t = time.time()
    bpy.context.window.cursor_modal_set('WAIT')
    scs_globals = _get_scs_globals()

    if not suppress_reports:
        lprint("", report_errors=-1, report_warnings=-1
               )  # Clear the 'error_messages' and 'warning_messages'

    collision_locators = []
    prefab_locators = []
    loaded_variants = []
    loaded_looks = []
    objects = []
    locators = []
    mats_info = []
    scs_root_object = skeleton = bones = armature = None

    # TRANSITIONAL STRUCTURES
    terrain_points = TerrainPntsTrans()

    # IMPORT PIP -> has to be loaded before PIM because of terrain points
    if scs_globals.import_pip_file:
        lprint("I Importing PIP ...")
        pip_filepath = filepath + ".pip" + name_suffix
        if os.path.isfile(pip_filepath):
            lprint('\nD PIP filepath:\n  %s', (pip_filepath, ))
            # print('PIP filepath:\n  %s' % pip_filepath)
            result, prefab_locators = _pip.load(pip_filepath, terrain_points)
        else:
            lprint('\nI No PIP file.')
            # print('INFO - No PIP file.')

    # IMPORT PIM
    if scs_globals.import_pim_file or scs_globals.import_pis_file:
        lprint("I Importing PIM ...")
        pim_filepath = filepath + ".pim" + name_suffix
        if pim_filepath:
            if os.path.isfile(pim_filepath):
                lprint('\nD PIM filepath:\n  %s',
                       (_path_utils.readable_norm(pim_filepath), ))

                if pim_filepath.endswith(".pim"):
                    result, objects, locators, armature, skeleton, mats_info = _pim.load(
                        context,
                        pim_filepath,
                        terrain_points_trans=terrain_points)
                elif pim_filepath.endswith(".pim.ef"):
                    result, objects, locators, armature, skeleton, mats_info = _pim_ef.load(
                        context,
                        pim_filepath,
                        terrain_points_trans=terrain_points)
                else:
                    lprint(
                        "\nE Unknown PIM file extension! Shouldn't happen...")
            else:
                lprint('\nI No file found at %r!' %
                       (_path_utils.readable_norm(pim_filepath), ))
        else:
            lprint('\nI No filepath provided!')

    # IMPORT PIT
    bpy.context.view_layer.objects.active = None
    if scs_globals.import_pit_file:
        lprint("I Importing PIT ...")
        pit_filepath = filepath + ".pit" + name_suffix
        if os.path.isfile(pit_filepath):
            lprint('\nD PIT filepath:\n  %s', (pit_filepath, ))
            # print('PIT filepath:\n  %s' % pit_filepath)
            result, loaded_variants, loaded_looks = _pit.load(pit_filepath)
        else:
            lprint('\nI No PIT file.')
            # print('INFO - No PIT file.')

    # IMPORT PIC
    if scs_globals.import_pic_file:
        lprint("I Importing PIC ...")
        pic_filepath = filepath + ".pic" + name_suffix
        if os.path.isfile(pic_filepath):
            lprint('\nD PIC filepath:\n  %s', (pic_filepath, ))
            # print('PIC filepath:\n  %s' % pic_filepath)
            result, collision_locators = _pic.load(pic_filepath)
        else:
            lprint('\nI No PIC file.')
            # print('INFO - No PIC file.')

    # SETUP 'SCS GAME OBJECTS'
    lprint("I Setup of SCS game object ...")
    for item in collision_locators:
        locators.append(item)
    for item in prefab_locators:
        locators.append(item)
    path, filename = os.path.split(filepath)
    if objects or locators or (armature and skeleton):
        scs_root_object = _create_scs_root_object(filename, loaded_variants,
                                                  loaded_looks, mats_info,
                                                  objects, locators, armature)

        # Additionally if user wants to have automatically set custom export path, then let him have it :P
        if scs_globals.import_preserve_path_for_export:
            relative_export_path = _path_utils.relative_path(
                scs_globals.scs_project_path, path)
            if path.startswith(scs_globals.scs_project_path
                               ) and relative_export_path != path:
                scs_root_object.scs_props.scs_root_object_export_filepath = relative_export_path
                scs_root_object.scs_props.scs_root_object_allow_custom_path = True
            else:
                lprint(
                    "W Can not preserve import path for export on import SCS Root %r, "
                    "as import was done from outside of current SCS Project Base Path!",
                    (scs_root_object.name, ))

    # IMPORT PIS
    if scs_globals.import_pis_file:
        lprint("I Importing PIS ...")
        # pis file path is created from directory of pim file and skeleton definition inside pim header
        pis_filepath = os.path.dirname(filepath) + os.sep + skeleton
        if os.path.isfile(pis_filepath):
            lprint('\nD PIS filepath:\n  %s', (pis_filepath, ))

            # strip off name suffix from skeleton path
            skeleton = skeleton[:-len(name_suffix)]

            # fill in custom data if PIS file is from other directory
            if skeleton[:-4] != scs_root_object.name:
                armature.scs_props.scs_skeleton_custom_export_dirpath = "//" + os.path.relpath(
                    os.path.dirname(pis_filepath),
                    scs_globals.scs_project_path)
                armature.scs_props.scs_skeleton_custom_name = os.path.basename(
                    skeleton[:-4])

            bones = _pis.load(pis_filepath, armature)
        else:
            bones = None
            lprint('\nI No PIS file.')

        # IMPORT PIA
        if scs_globals.import_pia_file and bones:
            lprint("I Importing PIAs ...")
            basepath = os.path.dirname(filepath)
            # Search for PIA files in model's directory and its subdirectiories...
            lprint('\nD Searching the directory for PIA files:\n   %s',
                   (basepath, ))
            # print('\nSearching the directory for PIA files:\n   %s' % str(basepath))
            pia_files = []
            index = 0
            for root, dirs, files in os.walk(basepath):
                if not scs_globals.import_include_subdirs_for_pia:
                    if index > 0:
                        break
                # print('  root: %s - dirs: %s - files: %s' % (str(root), str(dirs), str(files)))
                for file in files:
                    if file.endswith(".pia" + name_suffix):
                        pia_filepath = os.path.join(root, file)
                        pia_files.append(pia_filepath)
                index += 1

            if len(pia_files) > 0:
                lprint('D PIA files found:')
                for pia_filepath in pia_files:
                    lprint('D %r', pia_filepath)
                # print('armature: %s\nskeleton: %r\nbones: %s\n' % (str(armature), str(skeleton), str(bones)))
                _pia.load(scs_root_object, pia_files, armature, pis_filepath,
                          bones)
            else:
                lprint('\nI No PIA files.')

    # fix scene objects count so it won't trigger copy cycle
    bpy.context.scene.scs_cached_num_objects = len(bpy.context.scene.objects)

    # FINAL FEEDBACK
    bpy.context.window.cursor_modal_restore()
    if suppress_reports:
        lprint('\nI Import compleeted in %.3f sec.', time.time() - t)
    else:
        lprint('\nI Import compleeted in %.3f sec.',
               time.time() - t,
               report_errors=True,
               report_warnings=True)

    return True
Exemplo n.º 15
0
    def read_data_from_file(cls, filepath, skip_validation=False):
        """Reads data from given TOBJ file path, validates it and returns container with data.
        If validation process fails nothing will be returned,
        except if skipping of validation is requested container will be returned as it is.

        :param filepath: absolute TOBJ filepath from which to read data
        :type filepath: str
        :param skip_validation: True if reading should skip validation process
        :type skip_validation: bool
        :return: TOBJ container if everything is valid and TOBJ file exists; None otherwise
        :rtype: TobjContainer | None
        """

        if not filepath:
            lprint("I No TOBJ file path provided!")
            return None

        if not (os.path.isfile(filepath) and filepath.lower().endswith(".tobj")):
            lprint("W Invalid TOBJ file path %r!", (_path_utils.readable_norm(filepath),))
            return None

        records = _tobj.parse_file(filepath)
        records_len = len(records)
        records_iter = iter(records)

        if records is None or records_len <= 0:
            lprint("I TOBJ file %r is empty!", (_path_utils.readable_norm(filepath),))
            return None

        container = cls()
        container.filepath = os.path.normpath(filepath)
        for curr_rec in records_iter:

            # if "map" then we are expecting map type and texture/textures
            if curr_rec.lower() == "map":

                try:
                    curr_rec = next(records_iter).lower()
                except StopIteration:
                    break

                if curr_rec not in cls.MAP_TYPES:
                    break

                container.map_type = curr_rec
                tex_count = 6 if container.map_type == "cube" else 1  # only cube map has 6 textures
                not_enough_records = False
                while tex_count > 0:

                    try:
                        container.map_names.append(next(records_iter))
                    except StopIteration:
                        not_enough_records = True
                        break

                    tex_count -= 1

                if not_enough_records:
                    break

            # if "addr" we are expecting address types
            elif curr_rec.lower() == "addr":

                addr_count = 3 if container.map_type == "cube" else int(container.map_type[0])
                not_enough_records = False
                while addr_count > 0:

                    try:
                        container.addr.append(next(records_iter).lower())
                    except StopIteration:
                        not_enough_records = True
                        break

                    addr_count -= 1

                if not_enough_records:
                    break

            # if "border_color" we are expecting 9 records after it, each second is actual value
            elif curr_rec.lower() == "border_color":

                not_enough_records = False
                for i in range(9):

                    try:
                        curr_rec = next(records_iter)
                        if i % 2 == 1:
                            container.border_color.append(curr_rec)
                    except StopIteration:
                        not_enough_records = True
                        break

                if not_enough_records:
                    break

            # if "filter" we are expecting two values after it
            elif curr_rec.lower() == "filter":

                not_enough_records = False
                for i in range(2):

                    try:
                        container.filter.append(next(records_iter).lower())
                    except StopIteration:
                        not_enough_records = True
                        break

                if not_enough_records:
                    break

            elif curr_rec.lower() in ("bias", "target", "color_space", "usage"):

                try:
                    setattr(container, curr_rec.lower(), next(records_iter).lower())
                except StopIteration:
                    break

            # ignore obsolete "quality"
            elif curr_rec == "quality":

                try:
                    _ = next(records_iter)
                except StopIteration:
                    break

                continue

            # set any other known flag attribute to True
            elif hasattr(container, curr_rec.lower()):

                setattr(container, curr_rec.lower(), True)

            else:

                lprint("D Unkown TOBJ attribute: %r; TOBJ file:\n\t   %r", (curr_rec, container.filepath))

        # if not valid or not
        if not skip_validation and not container.validate():
            lprint("E TOBJ file: %r settings reading aborted, check printings above.", (container.filepath,))
            return None

        return container
Exemplo n.º 16
0
    def read_data_from_file(cls, filepath, skip_validation=False):
        """Reads data from given TOBJ file path, validates it and returns container with data.
        If validation process fails nothing will be returned,
        except if skipping of validation is requested container will be returned as it is.

        :param filepath: absolute TOBJ filepath from which to read data
        :type filepath: str
        :param skip_validation: True if reading should skip validation process
        :type skip_validation: bool
        :return: TOBJ container if everything is valid and TOBJ file exists; None otherwise
        :rtype: TobjContainer | None
        """

        if not filepath:
            lprint("I No TOBJ file path provided!")
            return None

        if not (os.path.isfile(filepath)
                and filepath.lower().endswith(".tobj")):
            lprint("W Invalid TOBJ file path %r!",
                   (_path_utils.readable_norm(filepath), ))
            return None

        records = _tobj.parse_file(filepath)
        records_len = len(records)
        records_iter = iter(records)

        if records is None or records_len <= 0:
            lprint("I TOBJ file %r is empty!",
                   (_path_utils.readable_norm(filepath), ))
            return None

        container = cls()
        container.filepath = os.path.normpath(filepath)
        for curr_rec in records_iter:

            # if "map" then we are expecting map type and texture/textures
            if curr_rec.lower() == "map":

                try:
                    curr_rec = next(records_iter).lower()
                except StopIteration:
                    break

                if curr_rec not in cls.MAP_TYPES:
                    break

                container.map_type = curr_rec
                tex_count = 6 if container.map_type == "cube" else 1  # only cube map has 6 textures
                not_enough_records = False
                while tex_count > 0:

                    try:
                        container.map_names.append(next(records_iter))
                    except StopIteration:
                        not_enough_records = True
                        break

                    tex_count -= 1

                if not_enough_records:
                    break

            # if "addr" we are expecting address types
            elif curr_rec.lower() == "addr":

                addr_count = 3 if container.map_type == "cube" else int(
                    container.map_type[0])
                not_enough_records = False
                while addr_count > 0:

                    try:
                        container.addr.append(next(records_iter).lower())
                    except StopIteration:
                        not_enough_records = True
                        break

                    addr_count -= 1

                if not_enough_records:
                    break

            # if "border_color" we are expecting 9 records after it, each second is actual value
            elif curr_rec.lower() == "border_color":

                not_enough_records = False
                for i in range(9):

                    try:
                        curr_rec = next(records_iter)
                        if i % 2 == 1:
                            container.border_color.append(curr_rec)
                    except StopIteration:
                        not_enough_records = True
                        break

                if not_enough_records:
                    break

            # if "filter" we are expecting two values after it
            elif curr_rec.lower() == "filter":

                not_enough_records = False
                for i in range(2):

                    try:
                        container.filter.append(next(records_iter).lower())
                    except StopIteration:
                        not_enough_records = True
                        break

                if not_enough_records:
                    break

            elif curr_rec.lower() in ("bias", "target", "color_space",
                                      "usage"):

                try:
                    setattr(container, curr_rec.lower(),
                            next(records_iter).lower())
                except StopIteration:
                    break

            # ignore obsolete "quality"
            elif curr_rec == "quality":

                try:
                    _ = next(records_iter)
                except StopIteration:
                    break

                continue

            # set any other known flag attribute to True
            elif hasattr(container, curr_rec.lower()):

                setattr(container, curr_rec.lower(), True)

            else:

                lprint("D Unkown TOBJ attribute: %r; TOBJ file:\n\t   %r",
                       (curr_rec, container.filepath))

        # if not valid or not
        if not skip_validation and not container.validate():
            lprint(
                "E TOBJ file: %r settings reading aborted, check printings above.",
                (container.filepath, ))
            return None

        return container
Exemplo n.º 17
0
def get_texture_image(texture_path, texture_type, report_invalid=False):
    """Creates and returns image for given texture path and type.

    :param texture_path: Texture path
    :type texture_path: str
    :param texture_type: Texture type keyword
    :type texture_type: str
    :param report_invalid: flag indicating if invalid texture should be reported in 3d view
    :type report_invalid: bool
    :return: loaded image datablock to be used in SCS material
    :rtype: bpy.types.Image
    """

    # get reflection image texture
    if texture_path.endswith(".tobj") and texture_type == "reflection":
        return get_reflection_image(texture_path, report_invalid=report_invalid)

    # CREATE TEXTURE/IMAGE ID NAME
    teximag_id_name = _path.get_filename(texture_path, with_ext=False)

    # CREATE ABSOLUTE FILEPATH
    abs_texture_filepath = _path.get_abs_path(texture_path)

    # return None on non-existing texture file path
    if not abs_texture_filepath or not os.path.isfile(abs_texture_filepath):
        return None

    if abs_texture_filepath.endswith(".tobj"):
        abs_texture_filepath = _path.get_texture_path_from_tobj(abs_texture_filepath)

        # if not existing or none supported file
        if abs_texture_filepath is None or abs_texture_filepath[-4:] not in (".tga", ".png", ".dds"):

            if report_invalid:
                lprint("", report_warnings=-1, report_errors=-1)

            # take care of none existing paths referenced in tobj texture names
            if abs_texture_filepath:

                lprint("W Texture can't be displayed as TOBJ file: %r is referencing non texture file:\n\t   %r",
                       (texture_path, _path.readable_norm(abs_texture_filepath)))

            else:

                lprint("W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file.",
                       (texture_path,))

            if report_invalid:
                lprint("", report_warnings=1, report_errors=1)

            return None

    image = None
    if abs_texture_filepath and os.path.isfile(abs_texture_filepath):

        # reuse existing image texture if possible
        postfix = 0
        postfixed_tex = teximag_id_name
        while postfixed_tex in bpy.data.images:

            img_exists = postfixed_tex in bpy.data.images
            if img_exists and _path.repair_path(bpy.data.images[postfixed_tex].filepath) == _path.repair_path(abs_texture_filepath):
                image = bpy.data.images[postfixed_tex]
                break

            postfix += 1
            postfixed_tex = teximag_id_name + "." + str(postfix).zfill(3)

        # if image wasn't found create new one
        if not image:

            image = None

            # reuse existing image if possible
            postfix = 0
            postfixed_img = teximag_id_name
            while postfixed_img in bpy.data.images:

                if _path.repair_path(bpy.data.images[postfixed_img].filepath) == _path.repair_path(abs_texture_filepath):
                    image = bpy.data.images[postfixed_img]
                    break

                postfix += 1
                postfixed_img = teximag_id_name + "." + str(postfix).zfill(3)

            # if image wasn't found load it
            if not image:
                image = bpy.data.images.load(abs_texture_filepath)
                image.name = teximag_id_name
                image.alpha_mode = 'CHANNEL_PACKED'

                # try to get relative path to the Blender file and set it to the image
                if bpy.data.filepath != '':  # empty file path means blender file is not saved
                    try:
                        rel_path = _path.relative_path(os.path.dirname(bpy.data.filepath), abs_texture_filepath)
                    except ValueError:  # catch different mount paths: "path is on mount 'C:', start on mount 'E:'"
                        rel_path = None

                    if rel_path:
                        image.filepath = rel_path

    if image is None and texture_path.endswith(".tobj"):
        if report_invalid:
            lprint("", report_warnings=-1, report_errors=-1)

        lprint("W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file:\n\t   %r",
               (texture_path, _path.readable_norm(abs_texture_filepath)))

        if report_invalid:
            lprint("", report_warnings=1, report_errors=1)

    return image
Exemplo n.º 18
0
def load(context, filepath):
    """

    :param context: Blender Context currently used for window_manager.update_progress and bpy_object_utils.object_data_add
    :type context: bpy.types.Context
    :param filepath: File path to be imported
    :type filepath: str
    :return: Return state statuses (Usually 'FINISHED')
    :rtype: dict
    """
    import time

    t = time.time()
    bpy.context.window.cursor_modal_set('WAIT')
    scs_globals = _get_scs_globals()
    lprint("", report_errors=-1, report_warnings=-1
           )  # Clear the 'error_messages' and 'warning_messages'

    collision_locators = []
    prefab_locators = []
    loaded_variants = []
    loaded_looks = []
    objects = []
    locators = []
    mats_info = []
    scs_root_object = skeleton = bones = armature = None

    # TRANSITIONAL STRUCTURES
    terrain_points = TerrainPntsTrans()

    # IMPORT PIP -> has to be loaded before PIM because of terrain points
    if scs_globals.import_pip_file:
        pip_filepath = str(filepath[:-1] + 'p')
        if os.path.isfile(pip_filepath):
            lprint('\nD PIP filepath:\n  %s', (pip_filepath, ))
            # print('PIP filepath:\n  %s' % pip_filepath)
            result, prefab_locators = _pip.load(pip_filepath, terrain_points)
        else:
            lprint('\nI No PIP file.')
            # print('INFO - No PIP file.')

    # IMPORT PIM
    if scs_globals.import_pim_file or scs_globals.import_pis_file:
        if filepath:
            if os.path.isfile(filepath):
                lprint('\nD PIM filepath:\n  %s',
                       (_path_utils.readable_norm(filepath), ))
                result, objects, locators, armature, skeleton, mats_info = _pim.load(
                    context, filepath, terrain_points_trans=terrain_points)
                # print('  armature:\n%s\n  skeleton:\n%s' % (str(armature), str(skeleton)))
            else:
                lprint('\nI No file found at %r!' %
                       (_path_utils.readable_norm(filepath), ))
        else:
            lprint('\nI No filepath provided!')

    # IMPORT PIT
    bpy.context.scene.objects.active = None
    if scs_globals.import_pit_file:
        pit_filepath = str(filepath[:-1] + 't')
        if os.path.isfile(pit_filepath):
            lprint('\nD PIT filepath:\n  %s', (pit_filepath, ))
            # print('PIT filepath:\n  %s' % pit_filepath)
            result, loaded_variants, loaded_looks = _pit.load(pit_filepath)
        else:
            lprint('\nI No PIT file.')
            # print('INFO - No PIT file.')

    # IMPORT PIC
    if scs_globals.import_pic_file:
        pic_filepath = str(filepath[:-1] + 'c')
        if os.path.isfile(pic_filepath):
            lprint('\nD PIC filepath:\n  %s', (pic_filepath, ))
            # print('PIC filepath:\n  %s' % pic_filepath)
            result, collision_locators = _pic.load(pic_filepath)
        else:
            lprint('\nI No PIC file.')
            # print('INFO - No PIC file.')

    # SETUP 'SCS GAME OBJECTS'
    for item in collision_locators:
        locators.append(item)
    for item in prefab_locators:
        locators.append(item)
    path, file = os.path.split(filepath)
    # print('  path: %r\n  file: %r' % (path, file))
    lod_name, ext = os.path.splitext(file)
    if objects or locators or (armature and skeleton):
        scs_root_object = _create_scs_root_object(lod_name, loaded_variants,
                                                  loaded_looks, mats_info,
                                                  objects, locators, armature)

        # Additionally if user wants to have automatically set custom export path, then let him have it :P
        if scs_globals.import_preserve_path_for_export:
            relative_export_path = _path_utils.relative_path(
                scs_globals.scs_project_path, path)
            if path.startswith(scs_globals.scs_project_path
                               ) and relative_export_path != path:
                scs_root_object.scs_props.scs_root_object_export_filepath = relative_export_path
                scs_root_object.scs_props.scs_root_object_allow_custom_path = True
            else:
                lprint(
                    "W Can not preserve import path for export on import SCS Root %r, "
                    "as import was done from outside of current SCS Project Base Path!",
                    (scs_root_object.name, ))

    # IMPORT PIS
    if scs_globals.import_pis_file:
        # pis file path is created from directory of pim file and skeleton definition inside pim header
        pis_filepath = os.path.dirname(filepath) + os.sep + skeleton
        if os.path.isfile(pis_filepath):
            lprint('\nD PIS filepath:\n  %s', (pis_filepath, ))

            # fill in custom data if PIS file is from other directory
            if skeleton[:-4] != scs_root_object.name:
                armature.scs_props.scs_skeleton_custom_export_dirpath = "//" + os.path.relpath(
                    os.path.dirname(pis_filepath),
                    scs_globals.scs_project_path)
                armature.scs_props.scs_skeleton_custom_name = os.path.basename(
                    skeleton[:-4])

            bones = _pis.load(pis_filepath, armature)
        else:
            bones = None
            lprint('\nI No PIS file.')

        # IMPORT PIA
        if scs_globals.import_pia_file and bones:
            basepath = os.path.dirname(filepath)
            # Search for PIA files in model's directory and its subdirectiories...
            lprint('\nD Searching the directory for PIA files:\n   %s',
                   (basepath, ))
            # print('\nSearching the directory for PIA files:\n   %s' % str(basepath))
            pia_files = []
            index = 0
            for root, dirs, files in os.walk(basepath):
                if not scs_globals.import_include_subdirs_for_pia:
                    if index > 0:
                        break
                # print('  root: %s - dirs: %s - files: %s' % (str(root), str(dirs), str(files)))
                for file in files:
                    if file.endswith(".pia"):
                        pia_filepath = os.path.join(root, file)
                        pia_files.append(pia_filepath)
                index += 1

            if len(pia_files) > 0:
                lprint('D PIA files found:')
                for pia_filepath in pia_files:
                    lprint('D %r', pia_filepath)
                # print('armature: %s\nskeleton: %r\nbones: %s\n' % (str(armature), str(skeleton), str(bones)))
                _pia.load(scs_root_object, pia_files, armature, pis_filepath,
                          bones)
            else:
                lprint('\nI No PIA files.')

    # fix scene objects count so it won't trigger copy cycle
    bpy.context.scene.scs_cached_num_objects = len(bpy.context.scene.objects)

    # Turn on Textured Solid in 3D view...
    for bl_screen in bpy.data.screens:
        for bl_area in bl_screen.areas:
            for bl_space in bl_area.spaces:
                if bl_space.type == 'VIEW_3D':
                    bl_space.show_textured_solid = True

    # Turn on GLSL in 3D view...
    bpy.context.scene.game_settings.material_mode = 'GLSL'

    # Turn on "Frame Dropping" for animation playback...
    bpy.context.scene.use_frame_drop = True

    # FINAL FEEDBACK
    bpy.context.window.cursor_modal_restore()
    lprint('\nI Import compleeted in %.3f sec.',
           time.time() - t,
           report_errors=True,
           report_warnings=True)
    return True
Exemplo n.º 19
0
def load(context, filepath):
    """

    :param context: Blender Context currently used for window_manager.update_progress and bpy_object_utils.object_data_add
    :type context: bpy.types.Context
    :param filepath: File path to be imported
    :type filepath: str
    :return: Return state statuses (Usually 'FINISHED')
    :rtype: dict
    """
    import time

    t = time.time()
    bpy.context.window.cursor_modal_set('WAIT')
    scs_globals = _get_scs_globals()
    lprint("", report_errors=-1, report_warnings=-1)  # Clear the 'error_messages' and 'warning_messages'

    collision_locators = []
    prefab_locators = []
    loaded_variants = []
    loaded_looks = []
    objects = []
    locators = []
    mats_info = []
    scs_root_object = skeleton = bones = armature = None

    # TRANSITIONAL STRUCTURES
    terrain_points = TerrainPntsTrans()

    # IMPORT PIP -> has to be loaded before PIM because of terrain points
    if scs_globals.import_pip_file:
        pip_filepath = str(filepath[:-1] + 'p')
        if os.path.isfile(pip_filepath):
            lprint('\nD PIP filepath:\n  %s', (pip_filepath,))
            # print('PIP filepath:\n  %s' % pip_filepath)
            result, prefab_locators = _pip.load(pip_filepath, terrain_points)
        else:
            lprint('\nI No PIP file.')
            # print('INFO - No PIP file.')

    # IMPORT PIM
    if scs_globals.import_pim_file or scs_globals.import_pis_file:
        if filepath:
            if os.path.isfile(filepath):
                lprint('\nD PIM filepath:\n  %s', (_path_utils.readable_norm(filepath),))
                result, objects, locators, armature, skeleton, mats_info = _pim.load(
                    context,
                    filepath,
                    terrain_points_trans=terrain_points
                )
                # print('  armature:\n%s\n  skeleton:\n%s' % (str(armature), str(skeleton)))
            else:
                lprint('\nI No file found at %r!' % (_path_utils.readable_norm(filepath),))
        else:
            lprint('\nI No filepath provided!')

    # IMPORT PIT
    bpy.context.scene.objects.active = None
    if scs_globals.import_pit_file:
        pit_filepath = str(filepath[:-1] + 't')
        if os.path.isfile(pit_filepath):
            lprint('\nD PIT filepath:\n  %s', (pit_filepath,))
            # print('PIT filepath:\n  %s' % pit_filepath)
            result, loaded_variants, loaded_looks = _pit.load(pit_filepath)
        else:
            lprint('\nI No PIT file.')
            # print('INFO - No PIT file.')

    # IMPORT PIC
    if scs_globals.import_pic_file:
        pic_filepath = str(filepath[:-1] + 'c')
        if os.path.isfile(pic_filepath):
            lprint('\nD PIC filepath:\n  %s', (pic_filepath,))
            # print('PIC filepath:\n  %s' % pic_filepath)
            result, collision_locators = _pic.load(pic_filepath)
        else:
            lprint('\nI No PIC file.')
            # print('INFO - No PIC file.')

    # SETUP 'SCS GAME OBJECTS'
    for item in collision_locators:
        locators.append(item)
    for item in prefab_locators:
        locators.append(item)
    path, file = os.path.split(filepath)
    # print('  path: %r\n  file: %r' % (path, file))
    lod_name, ext = os.path.splitext(file)
    if objects or locators or (armature and skeleton):
        scs_root_object = _create_scs_root_object(lod_name, loaded_variants, loaded_looks, mats_info, objects, locators, armature)

    # IMPORT PIS
    if scs_globals.import_pis_file:
        # pis file path is created from directory of pim file and skeleton definition inside pim header
        pis_filepath = os.path.dirname(filepath) + os.sep + skeleton
        if os.path.isfile(pis_filepath):
            lprint('\nD PIS filepath:\n  %s', (pis_filepath,))

            # fill in custom data if PIS file is from other directory
            if skeleton[:-4] != scs_root_object.name:
                armature.scs_props.scs_skeleton_custom_export_dirpath = "//" + os.path.relpath(os.path.dirname(pis_filepath),
                                                                                               scs_globals.scs_project_path)
                armature.scs_props.scs_skeleton_custom_name = os.path.basename(skeleton[:-4])

            bones = _pis.load(pis_filepath, armature)
        else:
            bones = None
            lprint('\nI No PIS file.')

        # IMPORT PIA
        if scs_globals.import_pia_file and bones:
            basepath = os.path.dirname(filepath)
            # Search for PIA files in model's directory and its subdirectiories...
            lprint('\nD Searching the directory for PIA files:\n   %s', (basepath,))
            # print('\nSearching the directory for PIA files:\n   %s' % str(basepath))
            pia_files = []
            index = 0
            for root, dirs, files in os.walk(basepath):
                if not scs_globals.include_subdirs_for_pia:
                    if index > 0:
                        break
                # print('  root: %s - dirs: %s - files: %s' % (str(root), str(dirs), str(files)))
                for file in files:
                    if file.endswith(".pia"):
                        pia_filepath = os.path.join(root, file)
                        pia_files.append(pia_filepath)
                index += 1

            if len(pia_files) > 0:
                lprint('D PIA files found:')
                for pia_filepath in pia_files:
                    lprint('D %r', pia_filepath)
                # print('armature: %s\nskeleton: %r\nbones: %s\n' % (str(armature), str(skeleton), str(bones)))
                _pia.load(scs_root_object, pia_files, armature, pis_filepath, bones)
            else:
                lprint('\nI No PIA files.')

    # fix scene objects count so it won't trigger copy cycle
    bpy.context.scene.scs_cached_num_objects = len(bpy.context.scene.objects)

    # Turn on Textured Solid in 3D view...
    for bl_screen in bpy.data.screens:
        for bl_area in bl_screen.areas:
            for bl_space in bl_area.spaces:
                if bl_space.type == 'VIEW_3D':
                    bl_space.show_textured_solid = True

    # Turn on GLSL in 3D view...
    bpy.context.scene.game_settings.material_mode = 'GLSL'

    # Turn on "Frame Dropping" for animation playback...
    bpy.context.scene.use_frame_drop = True

    # FINAL FEEDBACK
    bpy.context.window.cursor_modal_restore()
    lprint('\nI Import compleeted in %.3f sec.', time.time() - t, report_errors=True, report_warnings=True)
    return True
Exemplo n.º 20
0
def get_reflection_image(texture_path, report_invalid=False):
    """Gets reflection image for given texture path.

    1. gets all textures names and check existance
    2. create image objects for all planes
    3. setup scene, create planes, create camera projector and assign images
    4. render & save image
    5. cleanup & scene restoring
    6. load temp image and pack it
    7. set filepath to TOBJ

    :param texture_path: Texture path
    :type texture_path: str
    :param report_invalid: flag indicating if invalid texture should be reported in 3d view
    :type report_invalid: bool
    :return: loaded image datablock to be used in SCS material
    :rtype: bpy.types.Image
    """

    # CREATE TEXTURE/IMAGE ID NAME
    teximag_id_name = _path.get_filename(texture_path, with_ext=False) + "_cubemap"

    # CREATE ABSOLUTE FILEPATH
    abs_tobj_filepath = _path.get_abs_path(texture_path)

    # return None on non-existing TOBJ
    if not abs_tobj_filepath or not os.path.isfile(abs_tobj_filepath):
        return None

    # check existance of this cubemap
    if teximag_id_name in bpy.data.images:

        if _path.get_abs_path(bpy.data.images[teximag_id_name].filepath) == abs_tobj_filepath:
            return bpy.data.images[teximag_id_name]

        bpy.data.images.remove(bpy.data.images[teximag_id_name])

    # 1. get all textures file paths and check their existance

    abs_texture_filepaths = _path.get_texture_paths_from_tobj(abs_tobj_filepath)

    # should be a cubemap with six images
    if not abs_texture_filepaths or len(abs_texture_filepaths) != 6:
        return None

    # all six images have to exist
    for abs_texture_filepath in abs_texture_filepaths:

        if abs_texture_filepath[-4:] not in (".tga", ".png", ".dds"):  # none supported file

            if report_invalid:
                lprint("", report_warnings=-1, report_errors=-1)

            lprint("W Texture can't be displayed as TOBJ file: %r is referencing non texture file:\n\t   %r",
                   (texture_path, _path.readable_norm(abs_texture_filepath)))

            if report_invalid:
                lprint("", report_warnings=1, report_errors=1)

            return None

        elif not os.path.isfile(abs_texture_filepath):  # none existing file

            if report_invalid:
                lprint("", report_warnings=-1, report_errors=-1)

            # take care of none existing paths referenced in tobj texture names
            lprint("W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file:\n\t   %r",
                   (texture_path, _path.readable_norm(abs_texture_filepath)))

            if report_invalid:
                lprint("", report_warnings=1, report_errors=1)

            return None

    # 2. create image objects for all planes

    images = []
    for abs_texture_filepath in abs_texture_filepaths:
        images.append(bpy.data.images.load(abs_texture_filepath))

    # 3. setup scene, create planes, create camera projector and assign images

    old_scene = bpy.context.window.scene
    tmp_scene = bpy.data.scenes.new("cubemap")
    bpy.context.window.scene = tmp_scene

    meshes = []
    materials = []
    objects = []
    for i, plane in enumerate(("x+", "x-", "y+", "y-", "z+", "z-")):
        # mesh creation
        bm = bmesh.new(use_operators=True)

        bmesh.ops.create_grid(bm, x_segments=1, y_segments=1, size=1, calc_uvs=True)

        mesh = bpy.data.meshes.new(plane)
        bm.to_mesh(mesh)
        bm.free()

        mesh.uv_layers.new()

        meshes.append(mesh)

        # material creation
        material = bpy.data.materials.new(plane)
        material.use_nodes = True
        material.node_tree.nodes.clear()

        out_node = material.node_tree.nodes.new("ShaderNodeOutputMaterial")
        emission_node = material.node_tree.nodes.new("ShaderNodeEmission")
        tex_node = material.node_tree.nodes.new("ShaderNodeTexImage")
        tex_node.image = images[i]

        material.node_tree.links.new(emission_node.inputs['Color'], tex_node.outputs['Color'])
        material.node_tree.links.new(out_node.inputs['Surface'], emission_node.outputs['Emission'])

        mesh.materials.append(material)

        materials.append(material)

        # object creation
        obj = bpy.data.objects.new(mesh.name, mesh)
        obj.location = (0,) * 3
        obj.rotation_euler = (0,) * 3
        if plane == "x+":
            obj.rotation_euler.x = pi * 0.5
            obj.location.y = 1
        elif plane == "x-":
            obj.rotation_euler.x = pi * 0.5
            obj.rotation_euler.z = pi
            obj.location.y = -1
        elif plane == "y+":
            obj.rotation_euler.x = pi
            obj.rotation_euler.z = pi * 0.5
            obj.location.z = 1
        elif plane == "y-":
            obj.rotation_euler.z = pi * 0.5
            obj.location.z = -1
        elif plane == "z+":
            obj.rotation_euler.x = pi * 0.5
            obj.rotation_euler.z = pi * 0.5
            obj.location.x = -1
        elif plane == "z-":
            obj.rotation_euler.x = pi * 0.5
            obj.rotation_euler.z = -pi * 0.5
            obj.location.x = 1

        tmp_scene.collection.objects.link(obj)
        objects.append(obj)

    # camera creation
    camera = bpy.data.cameras.new("projector")
    camera.type = "PANO"
    camera.lens = 5
    camera.sensor_width = 32
    camera.cycles.panorama_type = "EQUIRECTANGULAR"
    camera.cycles.latitude_min = -pi * 0.5
    camera.cycles.latitude_max = pi * 0.5
    camera.cycles.longitude_min = pi
    camera.cycles.longitude_max = -pi

    cam_obj = bpy.data.objects.new(camera.name, camera)
    cam_obj.location = (0,) * 3
    cam_obj.rotation_euler = (pi * 0.5, 0, 0)

    tmp_scene.collection.objects.link(cam_obj)

    # 4. render & save image

    final_image_path = os.path.join(tempfile.gettempdir(), teximag_id_name + ".tga")

    tmp_scene.render.engine = "CYCLES"
    tmp_scene.cycles.samples = 1
    tmp_scene.camera = cam_obj
    tmp_scene.render.image_settings.file_format = "TARGA"
    tmp_scene.render.image_settings.color_mode = "RGBA"
    tmp_scene.render.resolution_percentage = 100
    tmp_scene.render.resolution_x = images[0].size[0] * 4
    tmp_scene.render.resolution_y = images[0].size[1] * 2
    tmp_scene.render.filepath = final_image_path
    bpy.ops.render.render(write_still=True, scene=tmp_scene.name)

    # 5. cleanup & scene restoring

    for obj in objects:
        bpy.data.objects.remove(obj)

    for mesh in meshes:
        bpy.data.meshes.remove(mesh)

    for material in materials:
        bpy.data.materials.remove(material)

    for image in images:
        bpy.data.images.remove(image)

    bpy.data.objects.remove(cam_obj)
    bpy.data.cameras.remove(camera)

    bpy.context.window.scene = old_scene
    bpy.data.scenes.remove(tmp_scene)

    # 6. load temp image and pack it

    final_image = bpy.data.images.load(final_image_path)
    final_image.name = teximag_id_name
    final_image.alpha_mode = 'CHANNEL_PACKED'
    final_image.pack()

    # 7. set filepath to original image
    final_image.filepath = abs_tobj_filepath

    return final_image
Exemplo n.º 21
0
def get_texture(texture_path, texture_type, report_invalid=False):
    """Creates and setup Texture and Image data on active Material.

    :param texture_path: Texture path
    :type texture_path: str
    :param texture_type: Texture type keyword
    :type texture_type: str
    :param report_invalid: flag indicating if invalid texture should be reported in 3d view
    :type report_invalid: bool
    """

    # CREATE TEXTURE/IMAGE ID NAME
    teximag_id_name = _path.get_filename(texture_path, with_ext=False)

    # CREATE ABSOLUTE FILEPATH
    abs_texture_filepath = _path.get_abs_path(texture_path)

    # return None on non-existing texture file path
    if not abs_texture_filepath or not os.path.isfile(abs_texture_filepath):
        return None

    if abs_texture_filepath.endswith(".tobj"):
        abs_texture_filepath = _path.get_texture_path_from_tobj(
            abs_texture_filepath)

        # if not existing or none supported file
        if abs_texture_filepath is None or abs_texture_filepath[-4:] not in (
                ".tga", ".png", ".dds"):

            if report_invalid:
                lprint("", report_warnings=-1, report_errors=-1)

            # take care of none existing paths referenced in tobj texture names
            if abs_texture_filepath:

                lprint(
                    "W Texture can't be displayed as TOBJ file: %r is referencing non texture file:\n\t   %r",
                    (texture_path, _path.readable_norm(abs_texture_filepath)))

            else:

                lprint(
                    "W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file.",
                    (texture_path, ))

            if report_invalid:
                lprint("", report_warnings=1, report_errors=1)

            return None

    texture = None
    if abs_texture_filepath and os.path.isfile(abs_texture_filepath):

        # find existing texture with this image
        if teximag_id_name in bpy.data.textures:

            # reuse existing image texture if possible
            postfix = 0
            postfixed_tex = teximag_id_name
            while postfixed_tex in bpy.data.textures:

                img_exists = bpy.data.textures[postfixed_tex].image is not None
                if img_exists and _path.repair_path(
                        bpy.data.textures[postfixed_tex].image.filepath
                ) == _path.repair_path(abs_texture_filepath):
                    texture = bpy.data.textures[postfixed_tex]
                    break

                postfix += 1
                postfixed_tex = teximag_id_name + "." + str(postfix).zfill(3)

        # if texture wasn't found create new one
        if not texture:

            texture = bpy.data.textures.new(teximag_id_name, 'IMAGE')
            image = None

            # reuse existing image if possible
            postfix = 0
            postfixed_img = teximag_id_name
            while postfixed_img in bpy.data.images:

                if _path.repair_path(
                        bpy.data.images[postfixed_img].filepath
                ) == _path.repair_path(abs_texture_filepath):
                    image = bpy.data.images[postfixed_img]
                    break

                postfix += 1
                postfixed_img = teximag_id_name + "." + str(postfix).zfill(3)

            # if image wasn't found load it
            if not image:
                image = bpy.data.images.load(abs_texture_filepath)
                image.name = teximag_id_name

                # try to get relative path to the Blender file and set it to the image
                if bpy.data.filepath != '':  # empty file path means blender file is not saved
                    try:
                        rel_path = _path.relative_path(
                            os.path.dirname(bpy.data.filepath),
                            abs_texture_filepath)
                    except ValueError:  # catch different mount paths: "path is on mount 'C:', start on mount 'E:'"
                        rel_path = None

                    if rel_path:
                        image.filepath = rel_path

            # finally link image to texture
            texture.image = image
            image.use_alpha = True

        # set proper color space depending on texture type
        if texture_type == "nmap":
            # For TGA normal maps texture use Non-Color color space as it should be,
            # but for 16-bits PNG normal maps texture sRGB has to be used
            # otherwise Blender completely messes up normals calculation
            if texture.image.filepath.endswith(".tga"):
                texture.image.colorspace_settings.name = "Non-Color"
            else:
                texture.image.colorspace_settings.name = "sRGB"
        else:
            texture.image.colorspace_settings.name = "sRGB"

        # set usage of normal map if texture type is correct
        texture.use_normal_map = (texture_type == "nmap")

    if texture is None and texture_path.endswith(".tobj"):
        if report_invalid:
            lprint("", report_warnings=-1, report_errors=-1)

        lprint(
            "W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file:\n\t   %r",
            (texture_path, _path.readable_norm(abs_texture_filepath)))

        if report_invalid:
            lprint("", report_warnings=1, report_errors=1)

    return texture
Exemplo n.º 22
0
def batch_export(operator_instance,
                 init_obj_list,
                 name_suffix="",
                 menu_filepath=None):
    """This function calls other sorting functions and depending on the resulting output
    dictionary it exports all available 'SCS Game Objects' into specified locations.

    :param operator_instance: operator from within this function is called (used for report)
    :type operator_instance: bpy.types.Operator
    :param init_obj_list: initial object list which should be exported
    :type init_obj_list: tuple of Blender objects
    :param name_suffix: files name suffix (exchange format is using .ef)
    :type name_suffix: str
    :param menu_filepath: filepath used from menu export
    :type menu_filepath: str
    """

    lprint("", report_errors=-1, report_warnings=-1
           )  # Clear the 'error_messages' and 'warning_messages'
    game_objects_dict = _object_utils.sort_out_game_objects_for_export(
        init_obj_list)

    # exclude game objects that were manually omitted from export by property
    game_objects_dict = _object_utils.exclude_switched_off(game_objects_dict)

    if game_objects_dict:
        scs_game_objects_exported = []
        scs_game_objects_rejected = []

        global_filepath = _path_utils.get_global_export_path()

        for root_object in game_objects_dict:

            if not _name_utils.is_valid_scs_root_object_name(root_object.name):
                lprint(
                    "E Rejecting Game Object with invalid SCS Root Object name: %r.\n\t   "
                    "Only a-z, A-Z, 0-9 and \"._-\" characters can be used." %
                    root_object.name)
                scs_game_objects_rejected.append("> \"" + root_object.name +
                                                 "\"")
                continue

            game_object_list = game_objects_dict[root_object]
            if len(game_object_list) == 0:
                lprint(
                    "E Rejecting empty Game Object with SCS Root Object name: %r\n\t   "
                    +
                    "Game Object has to have at least one mesh object or model locator!",
                    (root_object.name, ))
                scs_game_objects_rejected.append("> \"" + root_object.name +
                                                 "\"")
                continue

            # GET CUSTOM FILE PATH
            custom_filepath = _path_utils.get_custom_scs_root_export_path(
                root_object)

            # MAKE FINAL FILEPATH
            if menu_filepath:
                filepath = _path_utils.readable_norm(menu_filepath)
                filepath_message = "Export path selected in file browser:\n\t   \"" + filepath + "\""
            elif custom_filepath:
                filepath = _path_utils.readable_norm(custom_filepath)
                filepath_message = "Custom export path used for \"" + root_object.name + "\" is:\n\t   \"" + filepath + "\""
            else:
                filepath = _path_utils.readable_norm(global_filepath)
                filepath_message = "Default export path used for \"" + root_object.name + "\":\n\t   \"" + filepath + "\""

            scs_project_path = _path_utils.readable_norm(
                _get_scs_globals().scs_project_path)
            if os.path.isdir(filepath) and _path_utils.startswith(
                    filepath, scs_project_path) and scs_project_path != "":

                # EXPORT ENTRY POINT
                export_success = pix.export(filepath, name_suffix, root_object,
                                            game_object_list)

                if export_success:
                    scs_game_objects_exported.append("> \"" +
                                                     root_object.name +
                                                     "\" exported to: '" +
                                                     filepath + "'")
                else:
                    scs_game_objects_rejected.append("> \"" +
                                                     root_object.name + "\"")

            else:
                if filepath:
                    message = (
                        "No valid export path found!\n\t   " +
                        "Export path does not exists or it's not inside SCS Project Base Path.\n\t   "
                        + "SCS Project Base Path:\n\t   \"" +
                        scs_project_path + "\"\n\t   " + filepath_message)
                else:
                    message = "No valid export path found! Please check 'SCS Project Base Path' first."
                lprint('E ' + message)
                operator_instance.report({'ERROR'},
                                         message.replace("\t", "").replace(
                                             "   ", ""))
                return {'CANCELLED'}

        if not lprint(
                "\nI Export procces completed, summaries are printed below!",
                report_errors=True,
                report_warnings=True):
            operator_instance.report(
                {'INFO'},
                "Export successfully completed, exported %s game object(s)!" %
                len(scs_game_objects_exported))
            bpy.ops.wm.scs_tools_show_3dview_report(
                'INVOKE_DEFAULT',
                abort=True)  # abort 3d view reporting operator

        if len(scs_game_objects_exported) > 0:
            message = "EXPORTED GAME OBJECTS (" + str(
                len(scs_game_objects_exported)
            ) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_exported:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_rejected) > 0:
            message = "REJECTED GAME OBJECTS (" + str(
                len(scs_game_objects_rejected)
            ) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_rejected:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_exported) + len(
                scs_game_objects_rejected) == 0:
            message = "Nothing to export! Please setup at least one SCS Root Object."
            lprint('E ' + message)
            operator_instance.report({'ERROR'}, message)
            return {'CANCELLED'}
    else:
        message = "No Game Objects to export because:\n\t   " \
                  "1. Selection export is used and none of selected objects belongs to any SCS Game Object or\n\t   " \
                  "2. all of the SCS Root Objects were manually exluded from export or\n\t   " \
                  "3. there is no SCS Root Objects in the scene."
        lprint('E ' + message)
        operator_instance.report({'ERROR'}, message.replace("\n\t   ", "\n"))
        return {'CANCELLED'}

    return {'FINISHED'}
Exemplo n.º 23
0
def load(root_object, pia_files, armature, pis_filepath=None, bones=None):
    scs_globals = _get_scs_globals()

    print("\n************************************")
    print("**      SCS PIA Importer          **")
    print("**      (c)2014 SCS Software      **")
    print("************************************\n")

    import_scale = scs_globals.import_scale
    ind = '    '
    imported_count = 0
    for pia_filepath in pia_files:
        # Check if PIA file is for the actual skeleton...
        if pis_filepath and bones:
            skeleton_match = _pix_container.fast_check_for_pia_skeleton(
                pia_filepath, pis_filepath)
        else:
            skeleton_match, pia_skeleton = _pix_container.utter_check_for_pia_skeleton(
                pia_filepath, armature)

            if skeleton_match:

                path = os.path.split(pia_filepath)[0]
                pia_skeleton = os.path.join(path, pia_skeleton)
                if os.path.isfile(pia_skeleton):
                    bones = _pis.load(pia_skeleton, armature, get_only=True)
                else:
                    lprint("\nE The filepath %r doesn't exist!",
                           (_path_utils.readable_norm(pia_skeleton), ))

            else:
                lprint(
                    str("E Animation doesn't match the skeleton. Animation won't be loaded!\n\t   "
                        "Animation file: %r"), (pia_filepath, ))

        if skeleton_match:
            lprint('I ++ "%s" IMPORTING animation data...',
                   (os.path.basename(pia_filepath), ))
            pia_container = _pix_container.get_data_from_file(
                pia_filepath, ind)
            if not pia_container:
                lprint('\nE File "%s" is empty!',
                       (_path_utils.readable_norm(pia_filepath), ))
                continue

            # TEST PRINTOUTS
            # ind = '  '
            # for section in pia_container:
            # print('SEC.: "%s"' % section.type)
            # for prop in section.props:
            # print('%sProp: %s' % (ind, prop))
            # for data in section.data:
            # print('%sdata: %s' % (ind, data))
            # for sec in section.sections:
            # print_section(sec, ind)
            # print('\nTEST - Source: "%s"' % pia_container[0].props[1][1])
            # print('')

            # TEST EXPORT
            # path, file = os.path.splitext(pia_filepath)
            # export_filepath = str(path + '_reex' + file)
            # result = pix_write.write_data(pia_container, export_filepath, ind)
            # if result == {'FINISHED'}:
            # Print(dump_level, '\nI Test export succesful! The new file:\n  "%s"', export_filepath)
            # else:
            # Print(dump_level, '\nE Test export failed! File:\n  "%s"', export_filepath)

            # LOAD HEADER
            format_version, source, f_type, animation_name, source_filename, author = _get_header(
                pia_container)
            if format_version != 3 or f_type != "Animation":
                continue

            # LOAD GLOBALS
            skeleton, total_time, bone_channel_count, custom_channel_count = _get_globals(
                pia_container)

            # CREATE ANIMATION ACTIONS
            anim_action = bpy.data.actions.new(animation_name + "_action")
            anim_action.use_fake_user = True
            anim_data = armature.animation_data if armature.animation_data else armature.animation_data_create(
            )
            anim_data.action = anim_action

            # LOAD BONE CHANNELS
            bone_channels = _get_anim_channels(pia_container,
                                               section_name="BoneChannel")
            if len(bone_channels) > 0:

                for bone_name in bone_channels:

                    if bone_name in armature.data.bones:
                        '''
                        NOTE: skipped for now as no data needs to be readed
                        stream_count = bone_channels[bone_name][0]
                        keyframe_count = bone_channels[bone_name][1]
                        '''
                        streams = bone_channels[bone_name][2]

                        # CREATE ANIMATION GROUP
                        anim_group = anim_action.groups.new(bone_name)
                        armature.pose.bones[
                            bone_name].rotation_mode = 'XYZ'  # Set rotation mode.

                        # use pose bone scale set on PIS import
                        init_scale = Vector((1, 1, 1))
                        if _BONE_consts.init_scale_key in armature.pose.bones[
                                bone_name]:
                            init_scale = armature.pose.bones[bone_name][
                                _BONE_consts.init_scale_key]

                        # CREATE FCURVES
                        (pos_fcurves, rot_fcurves,
                         sca_fcurves) = _create_fcurves(anim_action,
                                                        anim_group,
                                                        str('pose.bones["' +
                                                            bone_name + '"]'),
                                                        rot_euler=True)

                        # GET BONE REST POSITION MATRIX
                        bone_rest_matrix_scs = bones[bone_name][1].transposed()
                        parent_bone_name = bones[bone_name][0]
                        if parent_bone_name in bones:
                            parent_bone_rest_matrix_scs = bones[
                                parent_bone_name][1].transposed()
                        else:
                            parent_bone_rest_matrix_scs = Matrix()
                            parent_bone_rest_matrix_scs.identity()

                        for key_time_i, key_time in enumerate(streams[0]):
                            keyframe = key_time_i + 1

                            # GET BONE ANIMATION MATRIX
                            bone_animation_matrix_scs = streams[1][
                                key_time_i].transposed()

                            # CREATE DELTA MATRIX
                            delta_matrix = _get_delta_matrix(
                                bone_rest_matrix_scs,
                                parent_bone_rest_matrix_scs,
                                bone_animation_matrix_scs, import_scale)

                            # DECOMPOSE ANIMATION MATRIX
                            location, rotation, scale = delta_matrix.decompose(
                            )

                            # CALCULATE CURRENT SCALE - subtract difference between initial bone scale and current scale from 1
                            # NOTE: if imported PIS had initial bone scale different than 1,
                            # initial scale was saved into pose bones custom properties and
                            # has to be used here as bones after import in Blender always have scale of 1
                            scale = Vector((1 + scale[0] - init_scale[0],
                                            1 + scale[1] - init_scale[1],
                                            1 + scale[2] - init_scale[2]))

                            # NOTE: this scaling rotation switch came from UK variants which had scale -1
                            loc, rot, sca = bone_rest_matrix_scs.decompose()
                            if sca.y < 0:
                                rotation.y *= -1
                            if sca.z < 0:
                                rotation.z *= -1

                            rotation = rotation.to_euler('XYZ')

                            # BUILD TRANSFORMATION CURVES
                            for i in range(0, 3):
                                pos_fcurves[i].keyframe_points.insert(
                                    frame=float(keyframe),
                                    value=location[i],
                                    options={'FAST'})
                                rot_fcurves[i].keyframe_points.insert(
                                    frame=float(keyframe),
                                    value=rotation[i],
                                    options={'FAST'})
                                sca_fcurves[i].keyframe_points.insert(
                                    frame=float(keyframe),
                                    value=scale[i],
                                    options={'FAST'})

                        # SET LINEAR INTERPOLATION FOR ALL CURVES
                        color_mode = 'AUTO_RAINBOW'  # Or better 'AUTO_RGB'?
                        for curve in pos_fcurves:
                            curve.color_mode = color_mode
                            for keyframe in curve.keyframe_points:
                                keyframe.interpolation = 'LINEAR'
                        for curve in rot_fcurves:
                            curve.color_mode = color_mode
                            for keyframe in curve.keyframe_points:
                                keyframe.interpolation = 'LINEAR'
                        for curve in sca_fcurves:
                            curve.color_mode = color_mode
                            for keyframe in curve.keyframe_points:
                                keyframe.interpolation = 'LINEAR'

                        for curve in rot_fcurves:
                            _animation_utils.apply_euler_filter(curve)

            # LOAD CUSTOM CHANNELS (ARMATURE OFFSET ANIMATION)
            custom_channels = _get_anim_channels(pia_container,
                                                 section_name="CustomChannel")
            if len(custom_channels) > 0:
                for channel_name in custom_channels:
                    # print(' >>> channel %r - %s' % (channel_name, str(custom_channels[channel_name])))
                    if channel_name == 'Prism Movement':
                        '''
                        NOTE: skipped for now as no data needs to be readed
                        stream_count = custom_channels[channel_name][0]
                        keyframe_count = custom_channels[channel_name][1]
                        '''
                        streams = custom_channels[channel_name][2]
                        # print('  channel %r - streams %s - keyframes %s' % (channel_name, stream_count, keyframe_count))

                        # CREATE ANIMATION GROUP
                        # anim_group = anim_action.groups.new(channel_name)
                        anim_group = anim_action.groups.new('Location')
                        # armature.[channel_name].rotation_mode = 'XYZ' ## Set rotation mode.
                        # active_bone = armature.data.bones[channel_name]
                        # parent_bone = active_bone.parent

                        # CREATE FCURVES
                        # pos_fcurves, rot_fcurves, sca_fcurves = _create_fcurves(anim_action, anim_group, anim_curve, rot_euler=True,
                        # types='LocRotSca')
                        # pos_fcurves, rot_fcurves, sca_fcurves = _create_fcurves(anim_action, anim_group, anim_curve, types='Loc')
                        fcurve_pos_x = anim_action.fcurves.new('location', 0)
                        fcurve_pos_y = anim_action.fcurves.new('location', 1)
                        fcurve_pos_z = anim_action.fcurves.new('location', 2)
                        fcurve_pos_x.group = anim_group
                        fcurve_pos_y.group = anim_group
                        fcurve_pos_z.group = anim_group
                        pos_fcurves = (fcurve_pos_x, fcurve_pos_y,
                                       fcurve_pos_z)

                        location = None
                        for key_time_i, key_time in enumerate(streams[0]):
                            # print(' key_time: %s' % str(key_time[0]))
                            # keyframe = key_time_i * (key_time[0] * 10) ## TODO: Do proper timing...
                            keyframe = key_time_i + 1
                            scs_offset = _convert_utils.change_to_scs_xyz_coordinates(
                                custom_channels[channel_name][2][1]
                                [key_time_i], import_scale)
                            offset = Vector(scs_offset)
                            if location is None:
                                location = offset
                            else:
                                location = location + offset
                            # print(' > location: %s' % str(location))

                            # BUILD TRANSLATION CURVES
                            pos_fcurves[0].keyframe_points.insert(
                                frame=float(keyframe),
                                value=location[0],
                                options={'FAST'})
                            pos_fcurves[1].keyframe_points.insert(
                                frame=float(keyframe),
                                value=location[1],
                                options={'FAST'})
                            pos_fcurves[2].keyframe_points.insert(
                                frame=float(keyframe),
                                value=location[2],
                                options={'FAST'})

                        # SET LINEAR INTERPOLATION FOR ALL CURVES
                        for curve in pos_fcurves:
                            for keyframe in curve.keyframe_points:
                                keyframe.interpolation = 'LINEAR'
                    else:
                        lprint('W Unknown channel %r in "%s" file.',
                               (channel_name, os.path.basename(pia_filepath)))

            # CREATE SCS ANIMATION
            animation = _animation_utils.add_animation_to_root(
                root_object, animation_name)
            animation.export = True
            animation.action = anim_action.name
            animation.anim_start = anim_action.frame_range[0]
            animation.anim_end = anim_action.frame_range[1]

            if total_time:
                animation.length = total_time

                # WARNING PRINTOUTS
                # if piece_count < 0: Print(dump_level, '\nW More Pieces found than were declared!')
                # if piece_count > 0: Print(dump_level, '\nW Some Pieces not found, but were declared!')
                # if dump_level > 1: print('')

            imported_count += 1
        else:
            lprint('I    "%s" file REJECTED',
                   (os.path.basename(pia_filepath), ))

    # at the end of batch import make sure to select last animation always
    if imported_count > 0:
        root_object.scs_props.active_scs_animation = len(
            root_object.scs_object_animation_inventory) - 1

    print("************************************")
    return imported_count
Exemplo n.º 24
0
def get_texture(texture_path, texture_type, report_invalid=False):
    """Creates and setup Texture and Image data on active Material.

    :param texture_path: Texture path
    :type texture_path: str
    :param texture_type: Texture type keyword
    :type texture_type: str
    :param report_invalid: flag indicating if invalid texture should be reported in 3d view
    :type report_invalid: bool
    """

    # CREATE TEXTURE/IMAGE ID NAME
    teximag_id_name = _path.get_filename(texture_path, with_ext=False)

    # CREATE ABSOLUTE FILEPATH
    abs_texture_filepath = _path.get_abs_path(texture_path)

    # return None on non-existing texture file path
    if not abs_texture_filepath or not os.path.isfile(abs_texture_filepath):
        return None

    if abs_texture_filepath.endswith(".tobj"):
        abs_texture_filepath = _path.get_texture_path_from_tobj(abs_texture_filepath)

        # if not existing or none supported file
        if abs_texture_filepath is None or abs_texture_filepath[-4:] not in (".tga", ".png", ".dds"):

            if report_invalid:
                lprint("", report_warnings=-1, report_errors=-1)

            # take care of none existing paths referenced in tobj texture names
            if abs_texture_filepath:

                lprint("W Texture can't be displayed as TOBJ file: %r is referencing non texture file:\n\t   %r",
                       (texture_path, _path.readable_norm(abs_texture_filepath)))

            else:

                lprint("W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file.",
                       (texture_path,))

            if report_invalid:
                lprint("", report_warnings=1, report_errors=1)

            return None

    texture = None
    if abs_texture_filepath and os.path.isfile(abs_texture_filepath):

        # find existing texture with this image
        if teximag_id_name in bpy.data.textures:

            # reuse existing image texture if possible
            postfix = 0
            postfixed_tex = teximag_id_name
            while postfixed_tex in bpy.data.textures:

                img_exists = bpy.data.textures[postfixed_tex].image is not None
                if img_exists and _path.repair_path(bpy.data.textures[postfixed_tex].image.filepath) == _path.repair_path(abs_texture_filepath):
                    texture = bpy.data.textures[postfixed_tex]
                    break

                postfix += 1
                postfixed_tex = teximag_id_name + "." + str(postfix).zfill(3)

        # if texture wasn't found create new one
        if not texture:

            texture = bpy.data.textures.new(teximag_id_name, 'IMAGE')
            image = None

            # reuse existing image if possible
            postfix = 0
            postfixed_img = teximag_id_name
            while postfixed_img in bpy.data.images:

                if _path.repair_path(bpy.data.images[postfixed_img].filepath) == _path.repair_path(abs_texture_filepath):
                    image = bpy.data.images[postfixed_img]
                    break

                postfix += 1
                postfixed_img = teximag_id_name + "." + str(postfix).zfill(3)

            # if image wasn't found load it
            if not image:
                image = bpy.data.images.load(abs_texture_filepath)
                image.name = teximag_id_name

                # try to get relative path to the Blender file and set it to the image
                if bpy.data.filepath != '':  # empty file path means blender file is not saved
                    try:
                        rel_path = _path.relative_path(os.path.dirname(bpy.data.filepath), abs_texture_filepath)
                    except ValueError:  # catch different mount paths: "path is on mount 'C:', start on mount 'E:'"
                        rel_path = None

                    if rel_path:
                        image.filepath = rel_path

            # finally link image to texture
            texture.image = image
            image.use_alpha = True

        # set proper color space depending on texture type
        if texture_type == "nmap":
            # For TGA normal maps texture use Non-Color color space as it should be,
            # but for 16-bits PNG normal maps texture sRGB has to be used
            # otherwise Blender completely messes up normals calculation
            if texture.image.filepath.endswith(".tga"):
                texture.image.colorspace_settings.name = "Non-Color"
            else:
                texture.image.colorspace_settings.name = "sRGB"
        else:
            texture.image.colorspace_settings.name = "sRGB"

        # set usage of normal map if texture type is correct
        texture.use_normal_map = (texture_type == "nmap")

    if texture is None and texture_path.endswith(".tobj"):
        if report_invalid:
            lprint("", report_warnings=-1, report_errors=-1)

        lprint("W Texture can't be displayed as TOBJ file: %r is referencing non existing texture file:\n\t   %r",
               (texture_path, _path.readable_norm(abs_texture_filepath)))

        if report_invalid:
            lprint("", report_warnings=1, report_errors=1)

    return texture
Exemplo n.º 25
0
def batch_export(operator_instance, init_obj_list, menu_filepath=None):
    """This function calls other sorting functions and depending on the resulting output
    dictionary it exports all available 'SCS Game Objects' into specified locations.

    :param operator_instance: operator from within this function is called (used for report)
    :type operator_instance: bpy.types.Operator
    :param init_obj_list: initial object list which should be exported
    :type init_obj_list: tuple of Blender objects
    :param menu_filepath: filepath used from menu export
    :type menu_filepath: str
    """

    lprint("", report_errors=-1, report_warnings=-1)  # Clear the 'error_messages' and 'warning_messages'
    game_objects_dict = _object_utils.sort_out_game_objects_for_export(init_obj_list)

    # exclude game objects that were manually omitted from export by property
    game_objects_dict = _object_utils.exclude_switched_off(game_objects_dict)

    if game_objects_dict:
        scs_game_objects_exported = []
        scs_game_objects_rejected = []

        global_filepath = _path_utils.get_global_export_path()

        for root_object in game_objects_dict:

            # update root object location to invoke update tagging on it and
            # then update scene to make sure all children objects will have all transforms up to date
            # NOTE: needed because Blender doesn't update objects on invisible layers on it's own
            root_object.location = root_object.location
            for scene in bpy.data.scenes:
                scene.update()

            game_object_list = game_objects_dict[root_object]

            # GET CUSTOM FILE PATH
            custom_filepath = _path_utils.get_custom_scs_root_export_path(root_object)

            # MAKE FINAL FILEPATH
            if menu_filepath:
                filepath = _path_utils.readable_norm(menu_filepath)
                filepath_message = "Export path selected in file browser:\n\t   \"" + filepath + "\""
            elif custom_filepath:
                filepath = _path_utils.readable_norm(custom_filepath)
                filepath_message = "Custom export path used for \"" + root_object.name + "\" is:\n\t   \"" + filepath + "\""
            else:
                filepath = _path_utils.readable_norm(global_filepath)
                filepath_message = "Default export path used for \"" + root_object.name + "\":\n\t   \"" + filepath + "\""

            scs_project_path = _path_utils.readable_norm(_get_scs_globals().scs_project_path)
            if os.path.isdir(filepath) and _path_utils.startswith(filepath, scs_project_path) and scs_project_path != "":

                # EXPORT ENTRY POINT
                export_success = pix.export(filepath, root_object, game_object_list)

                if export_success:
                    scs_game_objects_exported.append("> \"" + root_object.name + "\" exported to: '" + filepath + "'")
                else:
                    scs_game_objects_rejected.append("> \"" + root_object.name + "\"")

            else:
                if filepath:
                    message = (
                        "No valid export path found!\n\t   " +
                        "Export path does not exists or it's not inside SCS Project Base Path.\n\t   " +
                        "SCS Project Base Path:\n\t   \"" + scs_project_path + "\"\n\t   " +
                        filepath_message
                    )
                else:
                    message = "No valid export path found! Please check \"SCS Project Base Path\" first."
                lprint('E ' + message)
                operator_instance.report({'ERROR'}, message.replace("\t", "").replace("   ", ""))
                return {'CANCELLED'}

        if not lprint("\nI Export procces completed, summaries are printed below!", report_errors=True, report_warnings=True):
            operator_instance.report({'INFO'}, "Export successfully completed, exported %s game object(s)!" % len(scs_game_objects_exported))
            bpy.ops.wm.show_3dview_report('INVOKE_DEFAULT', abort=True)  # abort 3d view reporting operator

        if len(scs_game_objects_exported) > 0:
            message = "EXPORTED GAME OBJECTS (" + str(len(scs_game_objects_exported)) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_exported:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_rejected) > 0:
            message = "REJECTED GAME OBJECTS (" + str(len(scs_game_objects_rejected)) + "):\n\t   " + "=" * 26 + "\n\t   "
            for scs_game_object_export_message in scs_game_objects_rejected:
                message += scs_game_object_export_message + "\n\t   "
            message += "=" * 26
            lprint("I " + message)

        if len(scs_game_objects_exported) + len(scs_game_objects_rejected) == 0:
            message = "Nothing to export! Please set at least one 'SCS Root Object'."
            lprint('E ' + message)
            operator_instance.report({'ERROR'}, message)
            return {'CANCELLED'}
    else:
        message = "No 'SCS Root Object' present or all of them were manually exluded from export in their settings.\n\t   " \
                  "(For more information, please refer to 'SCS Blender Tools' documentation.)"
        lprint('E ' + message)
        operator_instance.report({'ERROR'}, message.replace("\n\t   ", "\n"))
        return {'CANCELLED'}

    return {'FINISHED'}