Example #1
0
def test_ReadFileList_SimpleGlobPatternNonDynamic_Success(fake_waf_context, mock_globbing_files, tmpdir, input_waf_file_list, sample_files, dynamic_globbing, expected_results):
    
    # Arrange
    def _mock_is_option_true(option):
        assert option == 'enable_dynamic_file_globbing'
        return dynamic_globbing
    fake_waf_context.is_option_true = _mock_is_option_true

    # Act
    try:
        old_config_context = Configure.ConfigurationContext
        Configure.ConfigurationContext = unit_test.FakeContext
        result = read_file_list(fake_waf_context, mock_globbing_files)
    finally:
        Configure.ConfigurationContext = old_config_context
    
    # Assert
    bintemp_path = fake_waf_context.bintemp_node.abspath()
    src_code_path = fake_waf_context.path
    expected_cached_waf_files = os.path.join(bintemp_path, src_code_path.name, mock_globbing_files)
    if not dynamic_globbing:
        assert os.path.isfile(expected_cached_waf_files)
        cached_waf_file_result = utils.parse_json_file(expected_cached_waf_files)
        assert cached_waf_file_result == expected_results
    else:
        assert not os.path.isfile(expected_cached_waf_files)
    
    assert result == expected_results
def get_project_settings(ctx, project_setting_file):
    """
    Read, parse, and cache project settings files (by their absolute file path). The parsing is relaxed and will accept
    '#' as comment tags within the file.
    :param ctx:                     Context
    :param project_setting_file:    The absolute file path to the settings file
    :return: The parsed dictionary from the settings file
    """
    try:
        return ctx.project_settings[project_setting_file]
    except AttributeError:
        ctx.project_settings = {
            project_setting_file: utils.parse_json_file(project_setting_file, True)
        }
    except KeyError:
        ctx.project_settings[project_setting_file] = utils.parse_json_file(project_setting_file,True)
    return ctx.project_settings[project_setting_file]
Example #3
0
 def __init__(self, input: Path, checkpoint: Path, top_k: int,
              category_names: Path, gpu: bool):
     self.image_path: Path = input
     self.checkpoint_path: Path = checkpoint
     self.top_k: int = top_k
     self.cat_name_map: Dict = parse_json_file(
         category_names) if category_names else None
     self.device = get_device(gpu)
     self.model = load_model(self.checkpoint_path)
Example #4
0
def calculate_engine_path(ctx):
    """
    Determine the engine root path from SetupAssistantUserPreferences. if it exists

    :param conf     Context
    """
    def _make_engine_node_lst(node):
        engine_node_lst = []
        cur_node = node
        while cur_node.parent:
            if ' ' in cur_node.name:
                engine_node_lst.append(cur_node.name.replace(' ', '_'))
            else:
                engine_node_lst.append(cur_node.name)
            cur_node = cur_node.parent
        engine_node_lst.reverse()
        # For win32, remove the ':' if the node represents a drive letter
        if engine_node_lst and Utils.is_win32 and len(
                engine_node_lst[0]) == 2 and engine_node_lst[0].endswith(':'):
            engine_node_lst[0] = engine_node_lst[0][0]
        return engine_node_lst

    # Default the engine path, node, and version
    ctx.engine_node = ctx.path
    ctx.engine_path = ctx.path.abspath()
    ctx.engine_root_version = '0.0.0.0'
    ctx.engine_node_list = _make_engine_node_lst(ctx.engine_node)

    engine_json_file_path = ctx.path.make_node('engine.json').abspath()
    if os.path.exists(engine_json_file_path):
        engine_json = parse_json_file(engine_json_file_path)
        ctx.engine_root_version = engine_json.get('LumberyardVersion',
                                                  ctx.engine_root_version)
        if 'ExternalEnginePath' in engine_json:
            engine_root_abs = engine_json['ExternalEnginePath']
            if not os.path.exists(engine_root_abs):
                ctx.fatal(
                    '[ERROR] Invalid external engine path in engine.json : {}'.
                    format(engine_root_abs))
            if os.path.normcase(engine_root_abs) != os.path.normcase(
                    ctx.engine_path):
                ctx.engine_node = ctx.root.make_node(engine_root_abs)
                ctx.engine_path = engine_root_abs
                ctx.engine_node_list = _make_engine_node_lst(ctx.engine_node)

    if ctx.engine_path != ctx.path.abspath():
        last_build_engine_version_node = ctx.get_bintemp_folder_node(
        ).make_node(LAST_ENGINE_BUILD_VERSION_TAG_FILE)
        if os.path.exists(last_build_engine_version_node.abspath()):
            last_built_version = last_build_engine_version_node.read()
            if last_built_version != ctx.engine_root_version:
                Logs.warn(
                    '[WARN] The current engine version ({}) does not match the last version {} that this project was built against'
                    .format(ctx.engine_root_version, last_built_version))
                last_build_engine_version_node.write(ctx.engine_root_version)
Example #5
0
def restore_vs_version_from_cached_path(conf, vs_version, windows_kit, fingerprint, versions):
    """
    Attempt to restore the versions array (used for setting up the paths needed for visual studio) from the
    cached value from environment.json if possible.  If it is not set, or the file does not exist, or the
    fingerprint has changed, then return False without returning any cached value.

    :param conf:        Configuration Context
    :param vs_version:  The visual studio version the cache is being lookup up for
    :param windows_kit: The windows kit version the cache is being looked up for
    :param fingerprint: The current input fingerprint to compare against any cached fingerprint if any
    :param versions:    The result array of version tuples to populate if a cached version is found
    :return: True if a cached version of versions is restored, False otherwise
    """

    try:
        if not getattr(conf, TOOL_CACHE_ATTR_USE_CACHE_FLAG, False):
            return False
        environment_json_path = os.path.join(conf.bldnode.abspath(), BINTEMP_CACHE_TOOLS, CACHED_TOOL_ENVIRONMENT_FILE)
        if not os.path.exists(environment_json_path):
            return False

        environment_json = parse_json_file(environment_json_path)
        if 'vs_compilers' not in environment_json:
            return False

        vs_compilers_node = environment_json.get('vs_compilers')
        ver_winkit_key = _make_vsversion_winkit_key(vs_version, windows_kit)
        if str(ver_winkit_key) not in vs_compilers_node:
            return False

        compiler_settings = vs_compilers_node.get(ver_winkit_key)
        previous_fingerprint = compiler_settings.get('fingerprint', '')
        if previous_fingerprint != fingerprint:
            return False

        cached_versions = compiler_settings.get('versions')
        for cached_version in cached_versions:
            versions.append(cached_version)

        # Mark the current vs_version as information that was read from a cache file (in case of error during tool detection)
        cache_read_dict = getattr(conf, TOOL_CACHE_ATTR_READ_DICTIONARY, {})
        cache_read_dict[vs_version] = True
        setattr(conf, TOOL_CACHE_ATTR_READ_DICTIONARY, cache_read_dict)

        return True

    except Exception as err:
        conf.warn_once('Unable to use visual studio environment cache.  Will run msvc tool detection scripts. ({})'.format(err.message or err.msg))
        return False
 def __init__(self, ctx, path, additional_aliases):
     
     self.path = path
     self.ctx = ctx
     
     # Parse the json input
     self.dict = utils.parse_json_file(path, True)
     
     # Read in any "includes" section to add a dependency
     self.included_settings = {}
     include_settings_files = self.dict.get('includes', [])
     for include_settings_file in include_settings_files:
         if include_settings_file in self.included_settings:
             continue
         include_settings = ctx.get_project_settings_file(include_settings_file, additional_aliases)
         self.included_settings[include_settings_file] = include_settings
Example #7
0
def store_vs_version_to_cache(conf, vs_version, windows_kit, fingerprint, versions):
    """
    Store the version tuples for a visual studio environment to the environment.json file

    :param conf:        Configuration Context
    :param vs_version:  The visual studio version the cache is being lookup up for
    :param windows_kit: The windows kit value to store to the cache
    :param fingerprint: The current input fingerprint to compare against any cached fingerprint if any
    :param versions:    The result array of version tuples to populate if a cached version is found
    """
    try:
        cache_path = os.path.join(conf.bldnode.abspath(), BINTEMP_CACHE_TOOLS)
        if not os.path.isdir(cache_path):
            os.makedirs(cache_path)

        environment_json_path = os.path.join(cache_path, CACHED_TOOL_ENVIRONMENT_FILE)
        if os.path.exists(environment_json_path):
            environment_json = parse_json_file(environment_json_path)
        else:
            environment_json = {}

        if 'vs_compilers' not in environment_json:
            vs_compilers = {}
            environment_json['vs_compilers'] = vs_compilers
        else:
            vs_compilers = environment_json.get('vs_compilers')

        ver_winkit_key = _make_vsversion_winkit_key(vs_version, windows_kit)
        if not ver_winkit_key in vs_compilers:
            vs_compiler_setting = {}
            vs_compilers[ver_winkit_key] = vs_compiler_setting
        else:
            vs_compiler_setting = vs_compilers.get(ver_winkit_key)

        vs_compiler_setting['fingerprint'] = fingerprint
        vs_compiler_setting['versions'] = versions

        write_json_file(environment_json, environment_json_path)

    except Exception as err:
        conf.warn_once('Unable to use visual studio environment cache.  Will run msvc tool detection scripts. ({})'.format(err.message or err.msg))
Example #8
0
def read_file_list(bld, file):
    """
    Read and process a file list file (.waf_file) and manage duplicate files and possible globbing patterns to prepare
    the list for injestion by the project

    :param bld:     The build context
    :param file:    The .waf_file file list to process
    :return:        The processed list file
    """

    if not os.path.isfile(os.path.join(bld.path.abspath(), file)):
        raise Errors.WafError(
            "Invalid waf file list file: {}.  File not found.".format(file))

    # Manage duplicate files and glob hits
    dup_set = set()
    glob_hits = 0

    waf_file_node = bld.path.make_node(file)
    waf_file_node_abs = waf_file_node.abspath()
    base_path_abs = waf_file_node.parent.abspath()

    if not os.path.exists(waf_file_node_abs):
        raise Errors.WafError(
            'Invalid WAF file list: {}'.format(waf_file_node_abs))

    def _invalid_alias_callback(alias_key):
        error_message = "Invalid alias '{}' specified in {}".format(
            alias_key, file)
        raise Errors.WafError(error_message)

    def _alias_not_enabled_callback(alias_key, roles):
        required_checks = utils.convert_roles_to_setup_assistant_description(
            roles)
        error_message = "3rd Party alias '{}' specified in {} is not enabled. Make sure that at least one of the " \
                        "following items are checked in SetupAssistant: [{}]".format(alias_key, file, ', '.join(required_checks))
        raise Errors.WafError(error_message)

    def _determine_vs_filter(input_rel_folder_path, input_filter_name,
                             input_filter_pattern):
        """
        Calculate the vvs filter based on the resulting relative path, the input filter name,
        and the pattern used to derive the input relative path
        """
        vs_filter = input_filter_name

        if len(input_rel_folder_path) > 0:
            # If the resulting relative path has a subfolder, the base the filter on the following conditions

            if input_filter_name.lower() == 'root':
                # This is the root folder, use the relative folder subpath as the filter
                vs_filter = input_rel_folder_path
            else:
                # This is a named filter, the filter will place all results under this filter
                pattern_dirname = os.path.dirname(input_filter_pattern)
                if len(pattern_dirname) > 0:
                    if input_rel_folder_path != pattern_dirname:
                        # Strip out the base of the filter name
                        vs_filter = input_filter_name + '/' + input_rel_folder_path.replace(
                            pattern_dirname, '')
                    else:
                        vs_filter = input_filter_name
                else:
                    vs_filter = input_filter_name + '/' + input_rel_folder_path

        return vs_filter

    def _process_glob_entry(glob_content, filter_name, current_uber_dict):
        """
        Process a glob content from the input file list
        """
        if 'pattern' not in glob_content:
            raise Errors.WafError(
                'Missing keyword "pattern" from the glob entry"')

        original_pattern = glob_content.pop('pattern').replace('\\', '/')
        if original_pattern.startswith('@'):

            ALIAS_PATTERN = re.compile('@.*@')
            alias_match = ALIAS_PATTERN.search(original_pattern)
            if alias_match:
                alias = alias_match.group(0)[1:-1]
                pattern = original_pattern[len(alias) + 2:]
                if alias == 'ENGINE':
                    search_node = bld.path
                else:
                    search_node = bld.root.make_node(bld.ThirdPartyPath(alias))
            else:
                pattern = original_pattern
                search_node = waf_file_node.parent
        else:
            pattern = original_pattern
            search_node = waf_file_node.parent

        while pattern.startswith('../'):
            pattern = pattern[3:]
            search_node = search_node.parent

        glob_results = search_node.ant_glob(pattern, **glob_content)

        for globbed_file in glob_results:

            rel_path = globbed_file.path_from(waf_file_node.parent).replace(
                '\\', '/')
            abs_path = globbed_file.abspath().replace('\\', '/')
            rel_folder_path = os.path.dirname(rel_path)

            vs_filter = _determine_vs_filter(rel_folder_path, filter_name,
                                             original_pattern)

            if vs_filter not in current_uber_dict:
                current_uber_dict[vs_filter] = []
            if abs_path in dup_set:
                Logs.warn(
                    "[WARN] File '{}' specified by the pattern '{}' in waf file '{}' is a duplicate.  It will be ignored"
                    .format(abs_path, original_pattern, waf_file_node_abs))
            else:
                current_uber_dict[vs_filter].append(rel_path)
                dup_set.add(abs_path)

    def _clear_empty_uber_dict(current_uber_dict):
        """
        Perform house clean in case glob pattern overrides move all files out of a 'root' group.
        """
        empty_filters = []
        for filter_name, filter_contents in current_uber_dict.items():
            if len(filter_contents) == 0:
                empty_filters.append(filter_name)
        for empty_filter in empty_filters:
            current_uber_dict.pop(empty_filter)
        return current_uber_dict

    def _process_uber_dict(uber_section, uber_dict):
        """
        Process each uber dictionary value
        """
        processed_uber_dict = {}

        for filter_name, filter_contents in uber_dict.items():
            for filter_content in filter_contents:

                if isinstance(filter_content, str):

                    if '*' in filter_content or '?' in filter_content:
                        # If this is a raw glob pattern, stuff it into the expected glob dictionary
                        _process_glob_entry(dict(pattern=filter_content),
                                            filter_name, processed_uber_dict)
                    else:
                        # This is a straight up file reference.
                        # Do any processing on an aliased reference
                        if filter_content.startswith('@'):
                            processed_path = bld.PreprocessFilePath(
                                filter_content, _invalid_alias_callback,
                                _alias_not_enabled_callback)
                        else:
                            processed_path = os.path.normpath(
                                os.path.join(base_path_abs, filter_content))

                        if not os.path.exists(processed_path):
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' does not exist.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        elif not os.path.isfile(processed_path):
                            Logs.warn(
                                "[WARN] Path '{}' specified in '{}' is a folder, only files or glob patterns are "
                                "allowed.  It will be ignored".format(
                                    processed_path, waf_file_node_abs))
                        elif processed_path in dup_set:
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' is a duplicate.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        else:
                            if filter_name not in processed_uber_dict:
                                processed_uber_dict[filter_name] = []
                            processed_uber_dict[filter_name].append(
                                processed_path)
                            dup_set.add(processed_path)

                elif isinstance(filter_content, dict):
                    # Dictionaries automatically go through the glob pattern working
                    _process_glob_entry(filter_content, filter_name,
                                        processed_uber_dict)
                else:
                    raise Errors.WafError(
                        "Invalid entry '{}' in file '{}', section '{}/{}'".
                        format(filter_content, file, uber_section,
                               filter_name))

        return _clear_empty_uber_dict(processed_uber_dict)

    def _get_cached_file_list():
        """
        Calculate the location of the cached waf_files path
        """
        bld_node = file_node.get_bld()
        return bld_node.abspath()

    file_node = bld.path.make_node(file)

    if not bld.is_option_true('enable_dynamic_file_globbing'):
        # Unless this is a configuration context (where we want to always calculate any potential glob patterns in the
        # waf_file list) check if the file list exists from any previous waf configure.  If the waf_files had changed
        # in between builds, auto-configure will pick up that change and force a re-write of the waf_files list
        processed_waf_files_path = _get_cached_file_list()
        if os.path.exists(processed_waf_files_path) and not isinstance(
                bld, Configure.ConfigurationContext):
            processed_file_list = utils.parse_json_file(
                processed_waf_files_path)
            return processed_file_list

    # Read the source waf_file list
    source_file_list = bld.parse_json_file(file_node)

    # Prepare a processed waf_file list
    processed_file_list = {}

    for uber_file_entry, uber_file_dict in source_file_list.items():
        processed_file_list[uber_file_entry] = _process_uber_dict(
            uber_file_entry, uber_file_dict)
        pass

    if glob_hits > WAF_FILE_GLOB_WARNING_THRESHOLD:
        Logs.warn(
            '[WARN] Source file globbing for waf file {} resulted in over {} files.  If this is expected, '
            'consider increasing the warning limit value WAF_FILE_GLOB_WARNING_THRESHOLD in waf_branch_spec.py'
            .format(file_node.abspath(), WAF_FILE_GLOB_WARNING_THRESHOLD))

    if not bld.is_option_true('enable_dynamic_file_globbing') and isinstance(
            bld, Configure.ConfigurationContext):
        # If dynamic file globbing is off, then store the cached file list during every configure command
        processed_waf_files_path = _get_cached_file_list()
        processed_waf_files_dir = os.path.dirname(processed_waf_files_path)
        if not os.path.exists(processed_waf_files_dir):
            os.makedirs(processed_waf_files_dir)
        utils.write_json_file(processed_file_list, processed_waf_files_path)

    return processed_file_list
Example #9
0
 def parse_json_file(self, node):
     return utils.parse_json_file(node.abspath())