def load_win_x64_win_x64_vs2015_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env global PLATFORM # Add defines to indicate a win64 build v['DEFINES'] += ['_WIN32', '_WIN64', 'NOMINMAX'] # Make sure this is a supported platform if PLATFORM not in conf.get_supported_platforms(): return # Attempt to detect the C++ compiler for VS 2015 ( msvs version 14.0 ) windows_kit = conf.options.win_vs2015_winkit try: conf.auto_detect_msvc_compiler('msvc 14.0', 'x64', windows_kit) except: Logs.warn( 'Unable to find Visual Studio 2015 C++ compiler and/or Windows Kit {}, removing build target' .format(windows_kit)) conf.mark_supported_platform_for_removal(PLATFORM) return # Detect the QT binaries conf.find_qt5_binaries(PLATFORM) # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64'] VS2015_FLAGS = [ '/FS', # Fix for issue writing to pdb files '/Wv:18' # Stick with 2013 warnings for the time being... ] v['CFLAGS'] += VS2015_FLAGS v['CXXFLAGS'] += VS2015_FLAGS if conf.options.use_uber_files: v['CFLAGS'] += ['/bigobj'] v['CXXFLAGS'] += ['/bigobj'] azcg_dir = conf.Path('Tools/AzCodeGenerator/bin/vc140') if not os.path.exists(azcg_dir): conf.fatal( 'Unable to locate the AzCodeGenerator subfolder. Make sure that you have VS2015 AzCodeGenerator binaries available' ) v['CODE_GENERATOR_PATH'] = [azcg_dir] crcfix_dir = conf.Path('Tools/crcfix/bin/vc140') if not os.path.exists(crcfix_dir): Logs.warn( 'Unable to locate the crcfix subfolder. Make sure that you have VS2015 crcfix binaries available' ) v['CRCFIX_PATH'] = [crcfix_dir] conf.find_dx12(windows_kit)
def load_win_x64_win_x64_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env # Add defines to indicate a win64 build v['DEFINES'] += ['_WIN32', '_WIN64', 'NOMINMAX'] # currently only the Visual Studio 2013 tool chain can build the win x64 target platform # Once we can compile across multiple ranges, enable the min and max versions properly try: conf.auto_detect_msvc_compiler('msvc 12.0', 'x64') except: if 'win_x64' in conf.get_supported_platforms(): Logs.warn( 'Unable to find Visual Studio 2013 for win_x64, removing build target' ) conf.mark_supported_platform_for_removal('win_x64') return # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64'] VS2013_FLAGS = [ '/FS' # Fix for issue writing to pdb files ] v['CFLAGS'] += VS2013_FLAGS v['CXXFLAGS'] += VS2013_FLAGS if conf.options.use_uber_files: v['CFLAGS'] += ['/bigobj'] v['CXXFLAGS'] += ['/bigobj']
def detect_durango_sdk(required_durango_edition, required_durango_edition_name): """ Attempt to detect the durango sdk folder, xdk folder based on a required durango edition. If the specific edition cannot be found, a system error is raised :param required_durango_edition: The durango edition (string) :param required_durango_edition_name: The durango edition name for error reporting :return: The xdk and sdk folder tuple """ if not winreg_available: raise SystemError('Windows registry is not supported on this platform') cache_key = 'durango_xdk_{}'.format(required_durango_edition) if cache_key in cached_folders: return cached_folders[cache_key] # try to read the path from the registry try: durango_sdk_folder_entry = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, "Software\\Wow6432Node\\Microsoft\\Durango XDK", 0, _winreg.KEY_READ) (durango_sdk_dir, reg_type) = _winreg.QueryValueEx(durango_sdk_folder_entry, 'InstallPath') except: raise SystemError( 'Unable to read xdk installation folder from the registry') durango_sdk_dir = durango_sdk_dir.encode( 'ascii') # Make asci string (as we get unicode) # get rid of trailing slash durango_sdk_dir = durango_sdk_dir[0:-1] (durango_edition, reg_type) = _winreg.QueryValueEx(durango_sdk_folder_entry, 'Latest') durango_edition = durango_edition.encode( 'ascii') # Make asci string (as we get unicode) # make sure this is the correct xdk for the code if required_durango_edition not in durango_edition: Logs.debug( 'lumberyard: Durango Latest XDK Found is: {}. Looking for {}.'. format(durango_edition, required_durango_edition)) durango_edition = required_durango_edition if not os.path.exists(durango_sdk_dir + '\\' + durango_edition + '\\xdk'): if 'durango' in conf.get_supported_platforms(): raise SystemError( "Durango XDK {0} ({1}) not found! Version {0} ({1}) " "is the latest offically supported xdk".format( required_durango_edition, required_durango_edition_name)) durango_xdk_dir = durango_sdk_dir + '\\' + durango_edition + '\\xdk' cached_folders[cache_key] = (durango_xdk_dir, durango_sdk_dir) return durango_xdk_dir, durango_sdk_dir
def load_win_x64_win_x64_vs2013_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env global PLATFORM # Add defines to indicate a win64 build v['DEFINES'] += ['_WIN32', '_WIN64', 'NOMINMAX'] # Make sure this is a supported platform if PLATFORM not in conf.get_supported_platforms(): return # Attempt to detect the C++ compiler for VS 2013 ( msvs version 12.0 ) try: conf.auto_detect_msvc_compiler('msvc 12.0', 'x64') except: Logs.warn( 'Unable to detect find the C++ compiler for Visual Studio 2013, removing build target' ) conf.mark_supported_platform_for_removal(PLATFORM) return # Detect the QT binaries conf.find_qt5_binaries(PLATFORM) # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64'] VS2013_FLAGS = [ '/FS', # Fix for issue writing to pdb files '/Zo' # Enhanced optimized debugging (increases pdb size but has no effect on code size and improves debugging) - this is enabled by default for vs2015. ] v['CFLAGS'] += VS2013_FLAGS v['CXXFLAGS'] += VS2013_FLAGS if conf.options.use_uber_files: v['CFLAGS'] += ['/bigobj'] v['CXXFLAGS'] += ['/bigobj'] azcg_dir = conf.srcnode.make_node( 'Tools/AzCodeGenerator/bin/vc120').abspath() if not os.path.exists(azcg_dir): conf.fatal( 'Unable to locate the AzCodeGenerator subfolder. Make sure that you have VS2013 AzCodeGenerator binaries available' ) v['CODE_GENERATOR_PATH'] = [azcg_dir] crcfix_dir = conf.srcnode.make_node('Tools/crcfix/bin/vc120').abspath() if not os.path.exists(crcfix_dir): Logs.warn( 'Unable to locate the crcfix subfolder. Make sure that you have VS2013 crcfix binaries available' ) v['CRCFIX_PATH'] = [crcfix_dir]
def load_win_x64_win_x64_vs2015_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env global PLATFORM # Add defines to indicate a win64 build v['DEFINES'] += ['_WIN32', '_WIN64', 'NOMINMAX'] restricted_tool_list_macro_header = 'AZ_TOOLS_EXPAND_FOR_RESTRICTED_PLATFORMS=' restricted_tool_list_macro = restricted_tool_list_macro_header if len(restricted_tool_list_macro) > len(restricted_tool_list_macro_header): v['DEFINES'] += [ restricted_tool_list_macro ] # Make sure this is a supported platform if PLATFORM not in conf.get_supported_platforms(): return # Attempt to detect the C++ compiler for VS 2015 ( msvs version 14.0 ) windows_kit = conf.options.win_vs2015_winkit try: conf.auto_detect_msvc_compiler('msvc 14.0', 'x64', windows_kit) except: Logs.warn('Unable to find Visual Studio 2015 C++ compiler and/or Windows Kit {}, removing build target'.format(windows_kit)) conf.mark_supported_platform_for_removal(PLATFORM) return # Detect the QT binaries, if the current capabilities selected requires it. _, enabled, _, _ = conf.tp.get_third_party_path(PLATFORM, 'qt') if enabled: conf.find_qt5_binaries(PLATFORM) # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64'] VS2015_FLAGS = [ '/FS' # Fix for issue writing to pdb files ] v['CFLAGS'] += VS2015_FLAGS v['CXXFLAGS'] += VS2015_FLAGS if conf.options.use_uber_files: v['CFLAGS'] += ['/bigobj'] v['CXXFLAGS'] += ['/bigobj'] crcfix_dir = conf.Path('Tools/crcfix/bin/vc140') if not os.path.exists(crcfix_dir): Logs.warn('Unable to locate the crcfix subfolder. Make sure that you have VS2015 crcfix binaries available') v['CRCFIX_PATH'] = [crcfix_dir] conf.find_dx12(windows_kit)
def load_android_armv8_common_settings(conf): """ Setup all compiler and linker settings shared over all android armv8 configurations """ # remove the armv8 android build target if it doesn't meet the min API requirement. # letting the platform finish configuring is harmless. if (not conf.is_android_armv8_api_valid()) and ( 'android_armv8_clang' in conf.get_supported_platforms()): Logs.warn( '[WARN] Attempting to configure Android ARMv8 with an API that is lower than the min spec: API 21. Disabling the Android ARMv8 build target.' ) conf.remove_platform_from_available_platforms('android_armv8_clang') env = conf.env env['ANDROID_ARCH'] = 'arm64-v8a' ndk_root = env['ANDROID_NDK_HOME'] ndk_rev = env['ANDROID_NDK_REV_MAJOR'] is_ndk_19_plus = (ndk_rev >= 19) defines = [ 'LINUX64', '__ARM_NEON__', ] append_to_unique_list(env['DEFINES'], defines) if not is_ndk_19_plus: platform_root_compile = os.path.join(ndk_root, 'sysroot') platform_root_link = os.path.join(ndk_root, 'platforms', env['ANDROID_NDK_PLATFORM'], 'arch-arm64') env['INCLUDES'] += [ os.path.join(platform_root_compile, 'usr', 'include'), ] common_flags = [ '--sysroot={}'.format(platform_root_compile), '-isystem', os.path.join(platform_root_compile, 'usr', 'include', 'aarch64-linux-android'), ] env['CFLAGS'] += common_flags[:] env['CXXFLAGS'] += common_flags[:] env['LIBPATH'] += [os.path.join(platform_root_link, 'usr', 'lib')] env['LINKFLAGS'] += [ '--sysroot={}'.format(platform_root_link), ]
def load_win_x64_win_x64_vs2010_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64_vs2010 configurations """ try: conf.auto_detect_msvc_compiler('msvc 10.0', 'x64') except: if 'win_x64_vs2010' in conf.get_supported_platforms(): Logs.warn( 'Unable to find Visual Studio 2010, removing build target') conf.mark_supported_platform_for_removal('win_x64_vs2010') return v = conf.env # Add defines to indicate a win64 build v['DEFINES'] += ['_WIN32', '_WIN64', 'NOMINMAX'] # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64']
def process_custom_configure_commands(conf): """ Add any additional custom commands that need to be run during the configure phase :param conf: Configuration context """ host = Utils.unversioned_sys_platform() if host == 'win32': # Win32 platform optional commands # Generate the visual studio projects & solution if specified if conf.is_option_true('generate_vs_projects_automatically'): Options.commands.insert(0, 'msvs') elif host == 'darwin': # Darwin/Mac platform optional commands # Create Xcode-iOS-Projects automatically during configure when running on mac if conf.is_option_true('generate_ios_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_ios') else: Options.commands.append('xcode_ios') # Create Xcode-AppleTV-Projects automatically during configure when running on mac if conf.is_option_true('generate_appletv_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_appletv') else: Options.commands.append('xcode_appletv') # Create Xcode-darwin-Projects automatically during configure when running on mac if conf.is_option_true('generate_mac_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_mac') else: Options.commands.append('xcode_mac') # Android target platform commands if any(platform for platform in conf.get_supported_platforms() if 'android' in platform): # this is required for building any android projects. It adds the Android launchers # to the list of build directories android_builder_func = getattr( conf, 'create_and_add_android_launchers_to_build', None) if android_builder_func != None and android_builder_func(): SUBFOLDERS.append(conf.get_android_project_relative_path()) # rebuild the project if invoked from android studio or sepcifically requested to do so if conf.options.from_android_studio or conf.is_option_true( 'generate_android_studio_projects_automatically'): if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'android_studio') else: Options.commands.append('android_studio') # generate header def _indent_text(indent_level, text, *args): indent_space = ' ' * indent_level * 4 return str.format('{}{}', indent_space, text % args) recordingMode = [ 'AZ::Debug::AllocationRecords::Mode::RECORD_NO_RECORDS', 'AZ::Debug::AllocationRecords::Mode::RECORD_STACK_NEVER', 'AZ::Debug::AllocationRecords::Mode::RECORD_STACK_IF_NO_FILE_LINE', 'AZ::Debug::AllocationRecords::Mode::RECORD_FULL', 'AZ::Debug::AllocationRecords::Mode::RECORD_MAX', ] outputString = "" outputString += "////////////////////////////////////////////////////////////////\n" outputString += "// This file was automatically created by WAF\n" outputString += "// WARNING! All modifications will be lost!\n" outputString += "////////////////////////////////////////////////////////////////\n\n" outputString += "void SetupAndroidDescriptor(const char* gameName, AZ::ComponentApplication::Descriptor &desc)\n{\n" for project in conf.get_enabled_game_project_list(): targetFile = os.path.join(conf.path.abspath(), project, "Config", "Game.xml") tree = ET.parse(targetFile) root = tree.getroot() descriptor = root[0] outputString += _indent_text( 1, "if(stricmp(gameName, \"%s\") == 0)\n", project) outputString += _indent_text(1, "{\n") outputString += _indent_text( 2, "desc.m_useExistingAllocator = %s;\n", descriptor.findall("*[@field='useExistingAllocator']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_grabAllMemory = %s;\n", descriptor.findall("*[@field='grabAllMemory']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_allocationRecords = %s;\n", descriptor.findall("*[@field='allocationRecords']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_autoIntegrityCheck = %s;\n", descriptor.findall("*[@field='autoIntegrityCheck']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_markUnallocatedMemory = %s;\n", descriptor.findall("*[@field='markUnallocatedMemory']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_doNotUsePools = %s;\n", descriptor.findall("*[@field='doNotUsePools']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_pageSize = %s;\n", descriptor.findall("*[@field='pageSize']")[0].get("value")) outputString += _indent_text( 2, "desc.m_poolPageSize = %s;\n", descriptor.findall("*[@field='poolPageSize']")[0].get("value")) outputString += _indent_text( 2, "desc.m_memoryBlockAlignment = %s;\n", descriptor.findall("*[@field='blockAlignment']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_memoryBlocksByteSize = %s;\n", descriptor.findall("*[@field='blockSize']")[0].get("value")) outputString += _indent_text( 2, "desc.m_reservedOS = %s;\n", descriptor.findall("*[@field='reservedOS']")[0].get("value")) outputString += _indent_text( 2, "desc.m_reservedDebug = %s;\n", descriptor.findall("*[@field='reservedDebug']")[0].get( "value")) if descriptor.find("*[@field='recordingMode']") is not None: field = "recordingMode" else: field = "recordsMode" id = int( descriptor.findall(str.format("*[@field='{}']", field))[0].get("value")) outputString += _indent_text(2, "desc.m_recordingMode = %s;\n", recordingMode[id]) outputString += _indent_text( 2, "desc.m_stackRecordLevels = %s;\n", descriptor.findall("*[@field='stackRecordLevels']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_enableDrilling = %s;\n", descriptor.findall("*[@field='enableDrilling']")[0].get( "value")) outputString += _indent_text( 2, "desc.m_x360IsPhysicalMemory = %s;\n", descriptor.findall("*[@field='x360PhysicalMemory']")[0].get( "value")) modulesElement = descriptor.findall("*[@field='modules']")[0] for moduleEntry in modulesElement.findall("*[@field='element']"): outputString += _indent_text(2, "desc.m_modules.push_back();\n") outputString += _indent_text( 2, "desc.m_modules.back().m_dynamicLibraryPath = \"%s\";\n", moduleEntry.findall( "*[@field='dynamicLibraryPath']")[0].get("value")) outputString += _indent_text(1, "}\n") outputString += "}\n" filePath = os.path.join(conf.path.abspath(), "Code", "Launcher", "AndroidLauncher", "android_descriptor.h") fp = open(filePath, 'w') fp.write(outputString) fp.close() # Make sure the intermediate files are generated and updated if len(Options.commands) == 0: Options.commands.insert(0, 'generate_uber_files') Options.commands.insert(1, 'generate_module_def_files') else: has_generate_uber_files = 'generate_uber_files' in Options.commands has_generate_module_def_files = 'generate_module_def_files' in Options.commands if not has_generate_uber_files: Options.commands.insert(0, 'generate_uber_files') if not has_generate_module_def_files: Options.commands.insert(1, 'generate_module_def_files')
def load_compile_rules_for_supported_platforms(conf, platform_configuration_filter): """ Load the compile rules for all the supported target platforms for the current host platform :param conf: Configuration context :param platform_configuration_filter: List of target platforms to filter out """ host_platform = conf.get_waf_host_platform() absolute_lmbr_waf_tool_path = conf.path.make_node( LMBR_WAF_TOOL_DIR).abspath() vanilla_conf = conf.env.derive( ) # grab a snapshot of conf before you pollute it. host_function_name = load_compile_rules_for_host(conf, host_platform) installed_platforms = [] for platform in conf.get_available_platforms(): platform_spec_vanilla_conf = vanilla_conf.derive() platform_spec_vanilla_conf.detach() # Determine the compile rules module file and remove it and its support if it does not exist compile_rule_script = 'compile_rules_' + host_platform + '_' + platform if not os.path.exists( os.path.join(absolute_lmbr_waf_tool_path, compile_rule_script + '.py')): conf.remove_platform_from_available_platforms(platform) continue Logs.info( '[INFO] Configure "%s - [%s]"' % (platform, ', '.join(conf.get_supported_configurations(platform)))) conf.load(compile_rule_script, tooldir=LMBR_WAF_TOOL_DIR) # platform installed installed_platforms.append(platform) # Keep track of uselib's that we found in the 3rd party config files conf.env['THIRD_PARTY_USELIBS'] = [ uselib_name for uselib_name in conf.read_and_mark_3rd_party_libs() ] for configuration in conf.get_supported_configurations(): # if the platform isn't going to generate a build command, don't require that the configuration exists either if platform in platform_configuration_filter: if configuration not in platform_configuration_filter[ platform]: continue conf.setenv(platform + '_' + configuration, platform_spec_vanilla_conf.derive()) conf.init_compiler_settings() # add the host settings into the current env getattr(conf, host_function_name)() # make a copy of the config for certain variant loading redirection (e.g. test, dedicated) # this way we can pass the raw configuration to the third pary reader to properly configure # each library config_redirect = configuration # Use the normal configurations as a base for dedicated server is_dedicated = False if config_redirect.endswith('_dedicated'): config_redirect = config_redirect.replace('_dedicated', '') is_dedicated = True # Use the normal configurations as a base for test is_test = False if '_test' in config_redirect: config_redirect = config_redirect.replace('_test', '') is_test = True # Use the specialized function to load platform specifics function_name = 'load_%s_%s_%s_settings' % ( config_redirect, host_platform, platform) if not hasattr(conf, function_name): conf.fatal( '[ERROR] Required Configuration Function \'%s\' not found' % function_name) # Try to load the function getattr(conf, function_name)() # Apply specific dedicated server settings if is_dedicated: getattr(conf, 'load_dedicated_settings')() # Apply specific test settings if is_test: getattr(conf, 'load_test_settings')() if platform in conf.get_supported_platforms(): # If the platform is still supported (it will be removed if the load settings function fails), then # continue to attempt to load the 3rd party uselib defs for the platform path_alias_map = {'ROOT': conf.srcnode.abspath()} config_3rdparty_folder_legacy = conf.root.make_node( Context.launch_dir).make_node('_WAF_/3rd_party') config_3rdparty_folder_legacy_path = config_3rdparty_folder_legacy.abspath( ) config_3rdparty_folder = conf.root.make_node( Context.launch_dir).make_node('_WAF_/3rdParty') config_3rdparty_folder_path = config_3rdparty_folder.abspath() if os.path.exists( config_3rdparty_folder_legacy_path) and os.path.exists( config_3rdparty_folder_path): has_legacy_configs = len( os.listdir(config_3rdparty_folder_legacy_path)) > 0 # Both legacy and current 3rd party exists. Print a warning and use the current 3rd party if has_legacy_configs: conf.warn_once( 'Legacy 3rd Party configuration path ({0}) will be ignored in favor of ({1}). ' 'Merge & remove the configuration files from the legacy path ({0}) to the current path ({1})' .format(config_3rdparty_folder_legacy_path, config_3rdparty_folder_path)) thirdparty_error_msgs, uselib_names = conf.detect_all_3rd_party_libs( config_3rdparty_folder, platform, configuration, path_alias_map) elif os.path.exists(config_3rdparty_folder_legacy_path): # Only the legacy 3rd party config folder exists. thirdparty_error_msgs, uselib_names = conf.detect_all_3rd_party_libs( config_3rdparty_folder_legacy, platform, configuration, path_alias_map) elif os.path.exists(config_3rdparty_folder_path): # Only the current 3rd party config folder exists. thirdparty_error_msgs, uselib_names = conf.detect_all_3rd_party_libs( config_3rdparty_folder, platform, configuration, path_alias_map) else: # Neither folder exists, report a warning thirdparty_error_msgs = [ 'Unable to find 3rd party configuration path ({}). No 3rd party libraries will ' 'be configured.'.format(config_3rdparty_folder_path) ] for thirdparty_error_msg in thirdparty_error_msgs: conf.warn_once(thirdparty_error_msg)
def load_win_x64_win_x64_vs2017_common_settings(conf): """ Setup all compiler and linker settings shared over all win_x64_win_x64 configurations """ v = conf.env global PLATFORM # Add defines to indicate a win64 build and configure VS2017 warnings v['DEFINES'] += [ '_WIN32', '_WIN64', 'NOMINMAX', '_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING', '_ENABLE_EXTENDED_ALIGNED_STORAGE' ] restricted_tool_list_macro_header = 'AZ_TOOLS_EXPAND_FOR_RESTRICTED_PLATFORMS=' restricted_tool_list_macro = restricted_tool_list_macro_header if len(restricted_tool_list_macro) > len( restricted_tool_list_macro_header): v['DEFINES'] += [restricted_tool_list_macro] # Make sure this is a supported platform if PLATFORM not in conf.get_supported_platforms(): return # Attempt to detect the C++ compiler for VS 2017 ( msvs version 15.0 ) windows_kit = conf.options.win_vs2017_winkit vcvarsall_args = windows_kit + ' ' + conf.options.win_vs2017_vcvarsall_args try: conf.auto_detect_msvc_compiler('msvc 15', 'x64', vcvarsall_args) except: Logs.warn( 'MSVS 2017 will be removed as a build target. We were unable to find an installation of Visual Studio 2017 that matches win_vs2017_vswhere_args=({}), Windows Kit with win_vs2017_winkit=({}), and win_vs2017_vcvarsall_args=({}).' .format(conf.options.win_vs2017_vswhere_args, windows_kit, conf.options.win_vs2017_vcvarsall_args)) Logs.warn( 'Lumberyard defaults use a known good set of options at the time of product release. If your project requires different configuration for MSVS 2017, you can modify these settings in _WAF_/user_settings.options under [Windows Options].' ) conf.mark_supported_platform_for_removal(PLATFORM) return # Detect the QT binaries, if the current capabilities selected requires it. _, enabled, _, _ = conf.tp.get_third_party_path(PLATFORM, 'qt') if enabled: conf.find_qt5_binaries(PLATFORM) # Introduce the linker to generate 64 bit code v['LINKFLAGS'] += ['/MACHINE:X64'] v['ARFLAGS'] += ['/MACHINE:X64'] VS2017_FLAGS = [ '/FS' # Fix for issue writing to pdb files ] v['CFLAGS'] += VS2017_FLAGS v['CXXFLAGS'] += VS2017_FLAGS if conf.options.use_uber_files: v['CFLAGS'] += ['/bigobj'] v['CXXFLAGS'] += ['/bigobj'] azcg_dir = conf.Path('Tools/AzCodeGenerator/bin/vc141') if not os.path.exists(azcg_dir): azcg_dir = conf.Path('Tools/AzCodeGenerator/bin/vc140') if not os.path.exists(azcg_dir): conf.fatal( 'Unable to locate the AzCodeGenerator subfolder. Make sure that you have VS2017 AzCodeGenerator binaries available' ) v['CODE_GENERATOR_PATH'] = [azcg_dir] crcfix_dir = conf.Path('Tools/crcfix/bin/vc141') if not os.path.exists(crcfix_dir): crcfix_dir = conf.Path('Tools/crcfix/bin/vc140') if not os.path.exists(crcfix_dir): Logs.warn( 'Unable to locate the crcfix subfolder. Make sure that you have VS2017 crcfix binaries available' ) v['CRCFIX_PATH'] = [crcfix_dir] conf.find_dx12(windows_kit)
def process_custom_configure_commands(conf): """ Add any additional custom commands that need to be run during the configure phase :param conf: Configuration context """ host = Utils.unversioned_sys_platform() if host == 'win32': # Win32 platform optional commands # Generate the visual studio projects & solution if specified if conf.is_option_true('generate_vs_projects_automatically'): Options.commands.insert(0, 'msvs') elif host == 'darwin': # Darwin/Mac platform optional commands # Create Xcode-iOS-Projects automatically during configure when running on mac if conf.is_option_true('generate_ios_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_ios') else: Options.commands.append('xcode_ios') # Create Xcode-AppleTV-Projects automatically during configure when running on mac if conf.is_option_true('generate_appletv_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_appletv') else: Options.commands.append('xcode_appletv') # Create Xcode-darwin-Projects automatically during configure when running on mac if conf.is_option_true('generate_mac_projects_automatically'): # Workflow improvement: for all builds generate projects after the build # except when using the default build target 'utilities' then do it before if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'xcode_mac') else: Options.commands.append('xcode_mac') # Android target platform commands if any(platform for platform in conf.get_supported_platforms() if platform.startswith('android')): # this is required for building any android projects. It adds the Android launchers # to the list of build directories android_builder_func = getattr(conf, 'create_and_add_android_launchers_to_build', None) if android_builder_func != None and android_builder_func(): SUBFOLDERS.append(conf.get_android_project_absolute_path()) # rebuild the project if invoked from android studio or sepcifically requested to do so if conf.options.from_android_studio or conf.is_option_true('generate_android_studio_projects_automatically'): if 'build' in Options.commands: build_cmd_idx = Options.commands.index('build') Options.commands.insert(build_cmd_idx, 'android_studio') else: Options.commands.append('android_studio') # Make sure the intermediate files are generated and updated if len(Options.commands) == 0: Options.commands.insert(0, 'generate_uber_files') Options.commands.insert(1, 'generate_module_def_files') else: has_generate_uber_files = 'generate_uber_files' in Options.commands has_generate_module_def_files = 'generate_module_def_files' in Options.commands if not has_generate_uber_files: Options.commands.insert(0, 'generate_uber_files') if not has_generate_module_def_files: Options.commands.insert(1, 'generate_module_def_files')
def load_compile_rules_for_supported_platforms(conf, platform_configuration_filter): """ Load the compile rules for all the supported target platforms for the current host platform :param conf: Configuration context :param platform_configuration_filter: List of target platforms to filter out """ host_platform = conf.get_waf_host_platform() absolute_lmbr_waf_tool_path = LMBR_WAF_TOOL_DIR if os.path.isabs(LMBR_WAF_TOOL_DIR) else conf.path.make_node(LMBR_WAF_TOOL_DIR).abspath() vanilla_conf = conf.env.derive() # grab a snapshot of conf before you pollute it. host_function_name = load_compile_rules_for_host(conf, host_platform) installed_platforms = [] for platform in conf.get_available_platforms(): platform_spec_vanilla_conf = vanilla_conf.derive() platform_spec_vanilla_conf.detach() # Determine the compile rules module file and remove it and its support if it does not exist compile_rule_script = 'compile_rules_' + host_platform + '_' + platform if not os.path.exists(os.path.join(absolute_lmbr_waf_tool_path, compile_rule_script + '.py')): conf.remove_platform_from_available_platforms(platform) continue Logs.info('[INFO] Configure "%s - [%s]"' % (platform, ', '.join(conf.get_supported_configurations(platform)))) conf.load(compile_rule_script, tooldir=[LMBR_WAF_TOOL_DIR]) # platform installed installed_platforms.append(platform) # Keep track of uselib's that we found in the 3rd party config files third_party_uselib_map = conf.read_and_mark_3rd_party_libs() conf.env['THIRD_PARTY_USELIBS'] = [uselib_name for uselib_name in third_party_uselib_map] # Save off configuration values from the uselib which are necessary during build for modules built with the uselib configuration_settings_map = {} for uselib_name in third_party_uselib_map: configuration_values = conf.get_configuration_settings(uselib_name) if configuration_values: configuration_settings_map[uselib_name] = configuration_values conf.env['THIRD_PARTY_USELIB_SETTINGS'] = configuration_settings_map for supported_configuration in conf.get_supported_configurations(): # if the platform isn't going to generate a build command, don't require that the configuration exists either if platform in platform_configuration_filter: if supported_configuration not in platform_configuration_filter[platform]: continue conf.setenv(platform + '_' + supported_configuration, platform_spec_vanilla_conf.derive()) conf.init_compiler_settings() # add the host settings into the current env getattr(conf, host_function_name)() # make a copy of the config for certain variant loading redirection (e.g. test, dedicated) # this way we can pass the raw configuration to the third pary reader to properly configure # each library config_redirect = supported_configuration # Use the normal configurations as a base for dedicated server is_dedicated = False if config_redirect.endswith('_dedicated'): config_redirect = config_redirect.replace('_dedicated', '') is_dedicated = True # Use the normal configurations as a base for test is_test = False if '_test' in config_redirect: config_redirect = config_redirect.replace('_test', '') is_test = True # Use the specialized function to load platform specifics function_name = 'load_%s_%s_%s_settings' % ( config_redirect, host_platform, platform ) if not hasattr(conf, function_name): conf.fatal('[ERROR] Required Configuration Function \'%s\' not found' % function_name) # Try to load the function getattr(conf, function_name)() # Apply specific dedicated server settings if is_dedicated: getattr(conf, 'load_dedicated_settings')() # Apply specific test settings if is_test: getattr(conf, 'load_test_settings')() if platform in conf.get_supported_platforms(): # If the platform is still supported (it will be removed if the load settings function fails), then # continue to attempt to load the 3rd party uselib defs for the platform for uselib_info in third_party_uselib_map: third_party_config_file = third_party_uselib_map[uselib_info][0] path_alias_map = third_party_uselib_map[uselib_info][1] thirdparty_error_msgs, uselib_names = conf.read_3rd_party_config(third_party_config_file, platform, supported_configuration, path_alias_map) for thirdparty_error_msg in thirdparty_error_msgs: conf.warn_once(thirdparty_error_msg)