示例#1
0
def load_mods_zip(path='mods'):
	api.log(f'(loader) Loading mods from {path!r} directory...')
	mods = []
	if (exists(path) and isdir(path)):
		files = listdir(path)
		for filename in files:
			if (zipfile.is_zipfile(path + '/' + filename)):
				mod = load_mod_zip(path + '/' + filename)
				mods.append(mod)
	return mods
示例#2
0
文件: gnu_make.py 项目: ejulien/Smack
def generate(make, toolchains, output_path):
	api.log('GNU makefile generator', 0)
	api.log('Target directory: ' + output_path, 1)

	if not os.path.exists(output_path):
	    os.makedirs(output_path)

	# retrieve all workspaces
	workspaces = make.getConfigurationKeyValues('workspace')

	# process all workspaces
	for workspace in workspaces:
		builds = make.getConfigurationKeyValuesFilterByContext('build', smack.context({'workspace': workspace}))  # all build in this workspace

		for toolchain in toolchains:
			target = toolchain['target']
			if not isTargetSupported(target):
				print("Skipping unsupported target '%s'.\n" % target)
				continue

			for arch in toolchain['arch']:
				build_env = getTargetBuildEnv(target, arch)
				if not build_env:
					api.log('GNU make ignoring unsupported architecture ' + arch + ' for target ' + target + '.\n', 1)
					continue
				build_env['builds'] = builds

				f = open(output_path + '/' + target + '-' + arch + '_' + workspace + '.mk', 'w')
				outputHeader(f)
				outputGlobals(f, build_env)
				outputWorkspace(f, build_env, make, smack.context({'workspace': workspace, 'target': target, 'arch': arch}), output_path)
示例#3
0
def generateMakefile(make, ctx, toolchains, output_path):
	api.log('Android makefile: ' + ctx['workspace'], 0)
	api.log('Target directory: ' + output_path, 1)
	api.warning('Build and architecture support with NDK-BUILD is done through Application.mk', 1)

	if not os.path.exists(output_path):
	    os.makedirs(output_path)

	# convert projects
	groups = make.getConfigurationKeyValuesFilterByContext('group', ctx)

	mk_projects = []
	for group in groups:
		group_ctx = ctx.clone({'group': group})
		projects = make.getConfigurationKeyValuesFilterByContext('project', group_ctx)
		for project in projects:
			# filter out any build specific configuration keys, they will be added to Application.mk
			mk_projects.append(generateProject(make, group_ctx.clone({'project': project, 'build': '@Exclude'}), output_path))

	# write Android makefile
	api.log("Output makefile 'Android.mk'", 1)
	f = open(output_path + '/' + 'Android.mk', 'w')
	outputHeader(f)
	f.write('LOCAL_PATH := $(call my-dir)\n\n')
	for project in mk_projects:
		outputProject(f, make, project, mk_projects, output_path)
	f = None

	# write Application makefile
	outputApplicationMk(make, ctx, toolchains, mk_projects, output_path)
示例#4
0
文件: vs2010.py 项目: ejulien/Smack
def generateProject(make, ctx, toolchains, output_path):
	api.log("Generating project '" + ctx['project'] + "'...", 1)

	# begin generation
	project = {}
	project['ctx'] = copy.deepcopy(ctx)
	project['group'] = ctx['group']
	project['name'] = ctx['project']
	project['guid'] = str(uuid.uuid4()).upper()

	# configurations
	project['configurations'] = []

	# get builds
	builds = make.getConfigurationKeyValuesFilterByContext('build', ctx)

	for build in builds:
		for toolchain in toolchains:

			# generate a configuration for each targeted architecture
			for arch in toolchain['arch']:
				target = toolchain['target']
				vsplatform = platformFromTargetArchitecture(target, arch)

				if vsplatform == None:
					api.log('Ignoring unsupported target ' + target + ' on architecture ' + arch, 1)
					continue

				# setup configuration
				cfg = {'ctx': ctx.clone({'arch': arch, 'build': build, 'target': target}), 'vsplatform': vsplatform}

				if platformImpliesArch(vsplatform) == True:
					cfg['name'] = build
				else:
					cfg['name'] = build + ' ' + vsplatform + ' ' + arch

				project['configurations'].append(cfg)

	return project
示例#5
0
def load_mod_zip(path):
	api.log(f'(loader) Mod from {path!r}')
	with TemporaryDirectory() as tmp_dir:
		with zipfile.ZipFile(path) as zip_ref:
			api._tmp_dir = tmp_dir
			zip_ref.extractall(tmp_dir)
			try:
				return load_mod(tmp_dir + '/main.py')
			except Exception as exception:
				api.log(f"(loader)[ERROR] While loading mod in {path!r}, occured an exception:")
				api.log(f'(loader)[ERROR] {exception}')
				raise exception
示例#6
0
def outputApplicationMk(make, ctx, toolchains, mk_projects, output_path):
	api.log("Output makefile 'Application.mk'", 1)
	f = open(output_path + '/' + 'Application.mk', 'w')
	outputHeader(f)

	# grab builds
	builds = make.getConfigurationKeyValuesFilterByContext('build', ctx)
	if len(builds) == 0:
		api.warning("No build configuration to make for this application", 1)
		return

	# output build selection
	f.write('# Select the build configuration. Possible values: ')
	first = True
	for build in builds:
		if first == False:
			f.write(', ')
		f.write('"' + build + '"')
		first = False
	f.write('.\n')
	f.write('BUILD = "' + builds[0] + '"\n')

	# output architecture selection (ugh...)
	supported_archs = []
	for toolchain in toolchains:
		if toolchain['target'] == 'android':
			for arch in toolchain['arch']:
				if getArchABI(arch) != None:
					supported_archs.append(arch)

	f.write('# Select the target architecture. Possible values: ')
	first = True
	for arch in supported_archs:
		if first == False:
			f.write(', ')
		f.write('"' + arch + '"')
		first = False
	f.write('.\n')
	f.write('ARCH = "' + supported_archs[0] + '"\n')

	f.write('\n')

	# global settings.
	app_platform = make.get('android_app_platform', ctx)
	if app_platform:
		f.write('APP_PLATFORM := android-' + str(app_platform[0]) + '\n')

	# output build rules
	f.write('\n')

	for build in builds:
		for toolchain in toolchains:
			if toolchain['target'] != 'android':
				continue
			for arch in toolchain['arch']:
				abi = getArchABI(arch)
				if abi == None:
					api.warning('Unsupported architecture: ' + arch + '\n', 1)
					continue

				app_ctx = ctx.clone({'build': build, 'arch': arch, 'project': '@exclude'})	# in this build for this architecture, not specific to a project

				f.write('# Configuration for build ' + build + ' ' + arch + '\n')
				f.write('#------------------------------------------------\n')
				f.write('ifeq (${BUILD}, "' + build + '")\n')
				f.write('  ifeq (${ARCH}, "' + arch + '")\n\n')

				# app cflags
				app_cflags = ''
				defines = make.get('define', app_ctx)
				if defines != None:
					for define in defines:
						app_cflags += '-D' + define + ' '

				cflags = make.get('cflags', app_ctx)
				if cflags != None:
					if 'use-neon' in cflags:
						f.write('LOCAL_ARM_NEON := true\n')

					gflags = convertCFlags(cflags)
					for v in gflags:
						app_cflags += v + ' '

				if app_cflags != '':
					f.write('APP_CFLAGS := ' + app_cflags + '\n')

				f.write('APP_OPTIM := ' + ('debug' if 'debug' in cflags else 'release') +'\n')
				f.write('APP_ABI := ' + abi + '\n')

				if 'use-stlport' in cflags:
					f.write('APP_STL := stlport_static\n')

				f.write('\n  endif\n')
				f.write('endif\n\n')
示例#7
0
import pyglet
from pyglet.gl import *
from pyglet.window import key
import numpy

import api
import loader
import world
import camera
import entity

api.log('(game) Start init')
pyglet.clock.tick()

window = pyglet.window.Window(width=800,
                              height=800,
                              vsync=0,
                              caption='Tile game',
                              resizable=True)
main_batch = pyglet.graphics.Batch()
hud_batch = pyglet.graphics.Batch()
api.main_batch = main_batch

mods = loader.load_mods_zip()

objects_group = pyglet.graphics.OrderedGroup(0)
api.objects_group = objects_group
tex_group = pyglet.graphics.TextureGroup(api.images['default:wall'])

# opengl stuff
glEnable(GL_TEXTURE_2D)
示例#8
0
文件: vs2010.py 项目: ejulien/Smack
def generateSolution(make, ctx, toolchains, output_path):
	api.log('VS2010 solution: ' + ctx['workspace'], 0)
	api.log('Target directory: ' + output_path, 1)

	if not os.path.exists(output_path):
	    os.makedirs(output_path)

	# convert projects
	groups = make.getConfigurationKeyValuesFilterByContext('group', ctx)

	vs_projects = []
	for group in groups:
		group_ctx = ctx.clone({'group': group})
		projects = make.getConfigurationKeyValuesFilterByContext('project', group_ctx)
		for project in projects:
			vs_projects.append(generateProject(make, group_ctx.clone({'project': project}), toolchains, output_path))

	# write projects
	for project in vs_projects:
		outputProject(make, project, vs_projects, output_path)

	# output solution filters (saved as one file per project)
	outputSolutionFilters(make, ctx, vs_projects, output_path)

	# output solutions
	if output_master_solution:
		api.log('Output Master Solution : Start...')
		outputSolution(make, ctx, vs_projects, output_path)
		api.log('Output Master Solution : Stop...')
	if output_per_project_solution:
		api.log('Output per project solution : Start...')
		outputPerProjectSolutions(make, ctx, vs_projects, output_path)
		api.log('Output per project solution : Stop...')
	if output_per_group_solution:
		api.log('Output per group solution : Start...')
		outputPerGroupSolutions(make, ctx, vs_projects, groups, output_path)
		api.log('Output per group solution : Stop...')
	if output_per_condition_solution and per_condition_solution_key:
		api.log('Output per name solution : Start...')
		outputPerConditionSolutions(make, ctx, vs_projects, per_condition_solution_key, output_path)
		api.log('Output per name solution : Stop...')
示例#9
0
文件: vs2010.py 项目: ejulien/Smack
def outputSolution(make, ctx, projects, output_path, output_name = None):
	solution = {}
	solution['guid'] = str(uuid.uuid4()).upper()


	fullPath = output_path + '/' + (output_name if output_name else ctx['workspace']) + '.sln'
	api.log('output solution: ' + fullPath)

	f = open(fullPath, 'w')

	f.write('Microsoft Visual Studio Solution File, Format Version 11.00\n')
	f.write('# Visual Studio 2010\n')

	# output projects
	for prj in projects:
		f.write('Project("{' + solution['guid'] + '}") = "' + prj['name'] + '", "' + prj['name'] + '.vcxproj", "{' + prj['guid'] + '}"\n')
		# this does not seem mandatory and I'm not too sure how this plays with the conditional references in the project files)
		# outputSolutionLevelProjectDependencies(f, prj, projects)
		f.write('EndProject\n')

	# solution folder
	groups = make.getConfigurationKeyValues('group')

	root_guid = '2150E333-8FDC-42A3-9474-1A3956D46DE8'	# solution folder guid
	group_projects = []
	for group in groups:
		if len([p for p in projects if p['group'] == group]) == 0:
			continue # make sure we have at least one project in this group before outputing it

		grp = {'name': group, 'root_guid': root_guid, 'guid': str(uuid.uuid4()).upper()}
		f.write('Project("{' + grp['root_guid'] + '}") = "' + group + '", "' + group + '", "{' + grp['guid'] + '}"\n')
		f.write('EndProject\n')
		group_projects.append(grp)

	f.write('Global\n')

	# pre solution
	f.write('	GlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
	platforms = getSolutionPlatformsFromProjects(projects)
	for platform in platforms:
		f.write('		' + platform + ' = ' + platform + '\n')
	f.write('	EndGlobalSection\n')

	# post solution
	f.write('	GlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
	for prj in projects:
		for cfg in prj['configurations']:
			f.write('		{' + prj['guid'] + '}.' + cfg['qualified_name'] + '.ActiveCfg = ' + cfg['qualified_name'] + '\n')
			if skipProjectBuild(cfg) == False:
				f.write('		{' + prj['guid'] + '}.' + cfg['qualified_name'] + '.Build.0 = ' + cfg['qualified_name'] + '\n')
	f.write('	EndGlobalSection\n')

	# project in solution folder
	if len(group_projects) > 0:
		f.write('	GlobalSection(NestedProjects) = preSolution\n')
		for group in group_projects:
			group_projects = make.getConfigurationKeyValuesFilterByContext('project', ctx.clone({'group': group['name']}), False)
			if group_projects != None:
				for name in group_projects:
					prj = getProject(projects, name)
					if prj != None:
						f.write('		{' + prj['guid'] + '} = {' + group['guid'] + '}\n')
		f.write('	EndGlobalSection\n')

	f.write('EndGlobal\n')
	return solution
示例#10
0
文件: vs2010.py 项目: ejulien/Smack
def outputProject(make, project, projects, output_path):
	project_ctx = project['ctx']

	api.log("Output project '" + project['name'] + "'", 1)
	f = open(output_path + '/' + project['name'] + '.vcxproj', 'w')

	f.write('<?xml version="1.0" encoding="utf-8"?>\n')
	f.write('<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">\n')

	# configurations
	f.write('  <ItemGroup Label="ProjectConfigurations">\n')
	for cfg in project['configurations']:
		cfg['qualified_name'] = cfg['name'] + '|' + cfg['vsplatform']
		f.write('    <ProjectConfiguration Include="' + cfg['qualified_name'] + '">\n')
		f.write('      <Configuration>' + cfg['name'] + '</Configuration>\n')
		f.write('      <Platform>' + cfg['vsplatform'] + '</Platform>\n')
		f.write('    </ProjectConfiguration>\n')
	f.write('  </ItemGroup>\n')

	# global properties
	f.write('  <PropertyGroup Label="Globals">\n')
	f.write('    <ProjectGuid>{' + project['guid'] + '}</ProjectGuid>\n')
	f.write('    <RootNamespace>' + project['name'] + '</RootNamespace>\n')
	f.write('    <Keyword>' + getProjectKeyword(make, project_ctx) + '</Keyword>\n')
	f.write('  </PropertyGroup>\n')

	# store a few commonly used values directly in the configuration
	for cfg in project['configurations']:
		cfg['deps'] = make.getDependencies(cfg['ctx'])
		cfg['links'] = make.getLinksAcrossDependencies(cfg['deps'], cfg['ctx'])
		cfg['type'] = make.getBestMatch('type', cfg['ctx'])
		cfg['cflags'] = make.get('cflags', cfg['ctx'])
		cfg['pflags'] = make.get('pflags', cfg['ctx'])

	# build the project link list across all configurations (we'll disable unused ones on a per project basis in the solution)
	project['all_link'] = None
	for cfg in project['configurations']:
		project['all_link'] = api.appendToList(project['all_link'], cfg['links'])
	if project['all_link'] != None:
		project['all_link'] = list(set(project['all_link']))

	# cpp default properties
	f.write('  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />\n')

	# general project properties
	for cfg in project['configurations']:
		f.write('  <PropertyGroup ' + getCondition(cfg) + ' Label="Configuration">\n')
		outputGeneralProjectProperty(f, make, project, cfg)
		f.write('  </PropertyGroup>\n')

	# cpp extension settings
	f.write('  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />\n')
	f.write('  <ImportGroup Label="ExtensionSettings">\n')
	f.write('  </ImportGroup>\n')

	# default props
	for cfg in project['configurations']:
		f.write('  <ImportGroup Label="PropertySheets" ' + getCondition(cfg) + '>\n')
		f.write('    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists(\'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props\')" Label="LocalAppDataPlatform" />\n')
		f.write('  </ImportGroup>\n')

	# user macros
	f.write('  <PropertyGroup Label="UserMacros" />\n')

	# binary output
	for cfg in project['configurations']:
		f.write('  <PropertyGroup ' + getCondition(cfg) + '>\n')
		f.write('    <OutDir>' + getBinaryPath(make, cfg) + '</OutDir>\n')
		f.write('    <IntDir>' + getIntermediatePath(make, cfg) + '</IntDir>\n')

		target_name = make.getBestMatch('target_name', cfg['ctx'])
		if not target_name:
			target_name = getBinaryName(project['name'], cfg['type'], cfg['ctx']['target'])

		suffix = make.getBestMatch('bin_suffix', cfg['ctx'])
		if suffix:
			target_name += suffix

		target_ext = make.getBestMatch('bin_ext', cfg['ctx'])
		if not target_ext:
			target_ext = getBinaryExt(cfg['type'], cfg['ctx']['target'])

		f.write('    <TargetName>' + target_name + '</TargetName>\n')
		f.write('    <TargetExt>' + target_ext + '</TargetExt>\n')
		f.write('  </PropertyGroup>\n')

	# compiler / linker properties
	for cfg in project['configurations']:
		ctx = cfg['ctx']
		suffix = make.get('bin_suffix', ctx)

		f.write('  <ItemDefinitionGroup ' + getCondition(cfg) + '>\n')

		# compiler
		cflags = cfg['cflags']

		outDir = '$(OutDir)' if 'debug' in cfg['cflags'] else '$(IntDir)'

		f.write('    <ClCompile>\n')
		f.write('      <PrecompiledHeader>NotUsing</PrecompiledHeader>\n')
		f.write('      <WarningLevel>' + getWarningLevel(cflags) + '</WarningLevel>\n')
		f.write('      <PreprocessorDefinitions>' + getDefines(make, cfg) + '</PreprocessorDefinitions>\n')
		f.write('      <AdditionalIncludeDirectories>' + getAdditionalIncludes(make, cfg, output_path) + '</AdditionalIncludeDirectories>\n')
		f.write('      <DebugInformationFormat>' + getDebugInformation(cflags) + '</DebugInformationFormat>\n')
		f.write('      <ProgramDataBaseFileName>'+ outDir + getPDBName(project['name'], suffix) + '.pdb</ProgramDataBaseFileName>\n')
		f.write('      <Optimization>' + getOptimization(cflags) + '</Optimization>\n')
		f.write('      <ExceptionHandling>' + getUseExceptions(cflags) + '</ExceptionHandling>\n')
		f.write('      <RuntimeTypeInfo>' + getUseRTTI(cflags) + '</RuntimeTypeInfo>\n')
		f.write('      <FloatingPointModel>' + getFloatingPointModel(cflags) + '</FloatingPointModel>\n')
		
		if 'omit-frame-pointers' in cflags:
			f.write('      <OmitFramePointers>true</OmitFramePointers>\n')

		f.write('      <RuntimeLibrary>' + getRuntimeLibrary(make, cfg) + '</RuntimeLibrary>\n')

		f.write('      <MultiProcessorCompilation>' + getMultiProcessorCompilation(cflags) + '</MultiProcessorCompilation>\n')
		f.write('      <MinimalRebuild>' + getMinimalRebuild(cflags) + '</MinimalRebuild>\n')

		additionalClOptions = getAdditionalClOptions(make, cfg)
		if additionalClOptions != None:
			f.write('      <AdditionalOptions>' + additionalClOptions + ' %(AdditionalOptions)</AdditionalOptions>\n')

		align_dict = {'struct-member-align-1': 1, 'struct-member-align-2': 2, 'struct-member-align-4': 4, 'struct-member-align-8': 8, 'struct-member-align-16': 16}
		for key in align_dict.keys():
			if key in cflags:
				f.write('      <StructMemberAlignment>' + str(align_dict[key]) + 'Bytes</StructMemberAlignment>\n')
				break

		if 'x64' not in ctx['arch'] and 'use-sse2' in cflags:
			f.write('      <EnableEnhancedInstructionSet>StreamingSIMDExtensions2</EnableEnhancedInstructionSet>\n')

		outputPrecompiledHeaderTags(f, make, cfg)
		f.write('    </ClCompile>\n')

		f.write('    <Link>\n')
		f.write('      <SubSystem>' + getSubSystem(make, cfg) + '</SubSystem>\n')
		f.write('      <AdditionalDependencies>' + getAdditionalDependencies(make, cfg, projects) + '</AdditionalDependencies>\n')
		f.write('      <AdditionalLibraryDirectories>' + getAdditionalLibraryDirectories(make, cfg, output_path) +'</AdditionalLibraryDirectories>\n')
		f.write('      <GenerateDebugInformation>' + ('True' if ('debug' in cfg['cflags'] or 'debug-info' in cfg['cflags']) else 'False') + '</GenerateDebugInformation>\n')

		ModuleDefFile = getModuleDefinitionFile(make, cfg);
		if (ModuleDefFile != None):
			f.write('      <ModuleDefinitionFile>' + ModuleDefFile + '</ModuleDefinitionFile>\n')

		additionalLinkOptions = getAdditionalLinkOptions(make, cfg)
		if additionalLinkOptions != None:
			f.write('      <AdditionalOptions>' + additionalLinkOptions + ' %(AdditionalOptions)</AdditionalOptions>\n')

		UACExecutionLevel = getUACExecutionLevel(make, cfg)
		if UACExecutionLevel != None:
			f.write('      <UACExecutionLevel>' + UACExecutionLevel + '</UACExecutionLevel>\n')

		outputAdditionalLinkTags(f, make, cfg)

		f.write('    </Link>\n')

		f.write('    <Lib>\n')

		additionalLibDependencies = getAdditionalLibDependencies(make, cfg)
		if additionalLibDependencies != None:
			f.write('      <AdditionalDependencies>' + additionalLibDependencies + '</AdditionalDependencies>\n')

		LibTargetMachine = getLibTargetMachine(make, cfg)
		if LibTargetMachine != None:
			f.write('      <TargetMachine>' + LibTargetMachine + '</TargetMachine>\n')

		f.write('    </Lib>\n')

		PreBuildEventCommand = getPreBuildEventCommand(make, cfg)
		PreBuildEventMessage = getPreBuildEventMessage(make, cfg)
		if (PreBuildEventCommand != None):
			f.write('    <PreBuildEvent>\n')
			f.write('      <Command>' + PreBuildEventCommand + '</Command>\n')
			if (PreBuildEventMessage != None):
				f.write('      <Message>' + PreBuildEventMessage + '</Message>\n')
			f.write('    </PreBuildEvent>\n')


		PreLinkEventCommand = getPreLinkEventCommand(make, cfg)
		PreLinkEventMessage = getPreLinkEventMessage(make, cfg)
		if (PreLinkEventCommand != None):
			f.write('    <PreLinkEvent>\n')
			f.write('      <Command>' + PreLinkEventCommand + '</Command>\n')
			if (PreLinkEventMessage != None):
				f.write('      <Message>' + PreLinkEventMessage + '</Message>\n')
			f.write('    </PreLinkEvent>\n')

		PostBuildEventCommand = getPostBuildEventCommand(make, cfg)
		PostBuildEventMessage = getPostBuildEventMessage(make, cfg)
		if (PostBuildEventCommand != None):
			f.write('    <PostBuildEvent>\n')
			f.write('      <Command>' + PostBuildEventCommand + '</Command>\n')
			if (PostBuildEventMessage != None):
				f.write('      <Message>' + PostBuildEventMessage + '</Message>\n')
			f.write('    </PostBuildEvent>\n')

		f.write('  </ItemDefinitionGroup>\n')

	# source files
	project['files'] = []
	for cfg in project['configurations']:	# grab across all configurations
		cfg['files'] = make.get('files', cfg['ctx'])
		if cfg['files']:
			cfg['files'] = [getSolutionFileName(file, output_path) for file in cfg['files']]
			project['files'].extend(cfg['files'])

	project['files'] = list(set(project['files']))
	project['files'] = [{'name': file, 'skip_cfg': [], 'bigobj_cfg': [], 'nopch_cfg': []} for file in project['files']]

	if len(project['files']) == 0:
		api.warning("No files added to project '" + project['name'] + "' in context " + str(project_ctx), 1)

	# skipped configurations per file
	def getProjectFile(name):
		for file in project['files']:
			if file['name'] == name:
				return file
		return None

	for cfg in project['configurations']:
		skips = make.get('skip_files', cfg['ctx'])
		if skips:
			for skip in skips:
				file = getProjectFile(getSolutionFileName(skip, output_path))
				if file:
					file['skip_cfg'].append(cfg)

	for file in project['files']:
		for cfg in project['configurations']:
			if file['name'] not in cfg['files']:
				file['skip_cfg'].append(cfg)

	# PCH creation per configuration
	for cfg in project['configurations']:
		create_pch = make.get('create_pch', cfg['ctx'])
		cfg['create_pch'] = api.getRelativePath(create_pch[0], output_path, 'windows') if create_pch != None else ''

	# big obj per file
	for cfg in project['configurations']:
		nopchs = make.get('big_obj', cfg['ctx'])
		if nopchs:
			for nopch in nopchs:
				file = getProjectFile(getSolutionFileName(nopch, output_path))
				if file:
					file['bigobj_cfg'].append(cfg)

	for file in project['files']:
		for cfg in project['configurations']:
			if file['name'] not in cfg['files']:
				file['bigobj_cfg'].append(cfg)
	
	# no pch per file
	for cfg in project['configurations']:
		nopchs = make.get('no_pch', cfg['ctx'])
		if nopchs:
			for nopch in nopchs:
				file = getProjectFile(getSolutionFileName(nopch, output_path))
				if file:
					file['nopch_cfg'].append(cfg)

	for file in project['files']:
		for cfg in project['configurations']:
			if file['name'] not in cfg['files']:
				file['nopch_cfg'].append(cfg)

	# distribute over file categories	
	distributeProjectFiles(make, project, output_path)

	# output include files
	f.write('  <ItemGroup>\n')
	for file in project['include_files']:
		openIncludeFileClDirective(f, project, file, output_path)
		outputIncludeFileClDirective(f, make, project, file, output_path)
		closeIncludeFileClDirective(f, project, file, output_path)
	f.write('  </ItemGroup>\n')

	# output compilation units
	f.write('  <ItemGroup>\n')
	for file in project['source_files']:
		openCompileFileClDirective(f, project, file, output_path)
		outputCompileFileClDirective(f, make, project, file, output_path)
		outputPCHDirective(f, project, file)
		outputBigObjDirective(f, project, file)
		outputNoPchDirective(f, file)
		outputExcludeFileFromBuildDirective(f, file)
		closeCompileFileClDirective(f, project, file, output_path)
	f.write('  </ItemGroup>\n')

	# output resource compilation
	f.write('  <ItemGroup>\n')
	for file in project['resource_files']:
		f.write('    <ResourceCompile Include=\"' + file['name'] + '\" />\n')
	f.write('  </ItemGroup>\n')

	# output custom units
	f.write('  <ItemGroup>\n')
	for file in project['custom_files']:
		openCustomFileClDirective(f, project, file, output_path)
		outputCustomFileClDirective(f, make, project, file, output_path)
		outputPCHDirective(f, project, file)
		outputBigObjDirective(f, project, file)
		outputNoPchDirective(f, file)
		outputExcludeFileFromBuildDirective(f, file)
		closeCustomFileClDirective(f, project, file, output_path)
	f.write('  </ItemGroup>\n')

	# project dependencies
	common_links = copy.deepcopy(project['all_link'])	# links common to all configurations
	for cfg in project['configurations']:
		if cfg['links'] != None:
			common_links = [link for link in common_links if link in cfg['links']]

	for cfg in project['configurations']:				# links specific to this configuration
		if cfg['links'] != None:
			cfg['cfg_links'] = [link for link in cfg['links'] if link not in common_links]

	if common_links and len(common_links) > 0:
		f.write('  <ItemGroup>\n')
		for link in common_links:
			prj = getProject(projects, link)
			if prj != None:
				f.write('    <ProjectReference Include="' + prj['name'] +'.vcxproj">\n')
				f.write('      <Project>{' + prj['guid'] +'}</Project>\n')
				f.write('    </ProjectReference>\n')
		f.write('  </ItemGroup>\n')

	for cfg in project['configurations']:
		if 'cfg_links' in cfg and len(cfg['cfg_links']) > 0:
			f.write('  <ItemGroup ' + getCondition(cfg) + '>\n')
			for link in cfg['cfg_links']:
				prj = getProject(projects, link)
				if prj != None:
					f.write('    <ProjectReference Include="' + prj['name'] +'.vcxproj">\n')
					f.write('      <Project>{' + prj['guid'] +'}</Project>\n')
					f.write('    </ProjectReference>\n')
			f.write('  </ItemGroup>\n')

	# extensions
	f.write('  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />\n')
	f.write('  <ImportGroup Label="ExtensionTargets">\n')
	f.write('  </ImportGroup>\n')

	f.write('  <ProjectExtensions>\n')
	outputProjectExtensionTag(f, make, project)
	f.write('  </ProjectExtensions>\n')

	# project done, next!
	f.write('</Project>\n')
示例#11
0
def load_world(name):
	api.log(f'(loader) loading world {name!r}...')
	with open('worlds/' + name, 'rb') as file:
		return pickle.load(file)
示例#12
0
def load_mod(filename):
	spec = spec_from_file_location(filename.split('.')[0], filename)
	mod = module_from_spec(spec)
	spec.loader.exec_module(mod)
	api.log(f'(loader/mod) {api.current_mod_name!r}')
	return mod