def writeVars(argList, argName): outData = [] args = [] whspc = re.compile(r"\s+") for arg in argList: tmpArgs = [] for tmpArg in whspc.split(arg): if tmpArg[0] == '"' and tmpArg[-1] == '"': args.append(tmpArg[1:-1]) else: args.append(tmpArg) args.extend(filter(lambda x : len(x) > 0, tmpArgs)) i = 0 while i < len(args): if i + 1 >= len(args) or not (args[i + 1] == ":=" or args[i + 1] == "+="): userErrorExit( "Missing assigmnet operator for '%s' for %s." % (args[i], argName)) j = i + 2 while j < len(args) and args[j] != ":=" and args[j] != "+=": j += 1 if j < len(args): k = j - 1 else: k = j spcLen = len(args[i]) + len(args[i + 1]) + 2 values = [a.replace("$(_HYPHEN_)", "-") for a in args[i + 2:k]] assignment = "%s %s %s" % (args[i], args[i + 1], (" \\\n" + spcLen * " ").join(values)) outData.append(assignment) outData.append("\n\n") i = k return "".join(outData)
def _preProcPath(_path): if os.path.isfile(_path): _p, _name = os.path.split(_path) if len(_p) == 0: userErrorExit( "Bad path '%s'." % (_path)) return absPath(_p) else: return absPath(_path)
def get_view_dir( args_view): caller_dir = os.path.abspath('.') view_dir = os.path.abspath(args_view) os.chdir(view_dir) view_dir = os.path.abspath('') while not dir_has_rspec(view_dir): os.chdir('..') if view_dir == os.path.abspath(''): userErrorExit('ctx could not find an rspec in the supplied argument or any subdirectory') view_dir = os.path.abspath('') return view_dir
def cmd_updateview(args): if args.updates_only == True and args.checkouts_only == True: userErrorExit("Options '--updates_only' and '--checkouts-only' are mutually exclusive.") cview = ctx_view.CTXView( args.view, getAccessPolicy(args), updating=True, validate=True ) if args.checkouts_only == False: cview.updateRepositories() if args.updates_only == False: cview.checkoutRepositories()
def check_unmerged( self ): import subprocess for repo in self.git_repos: os.chdir(repo.getAbsLocalPath()) args = [self.git, 'status', '--porcelain'] p = subprocess.Popen(args, bufsize=4096, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.stderr.read() stdout = p.stdout.read() #p.wait() for line in stdout.split('\n'): if line[1:2] == 'U' or line[:1] == 'U': msg = 'there are unmerged changes in \''+os.path.basename(repo.getAbsLocalPath()) + '\', cannot continue' userErrorExit(msg) os.chdir(self.view_dir)
def cmd_updateview(args): if args.updates_only == True and args.checkouts_only == True: userErrorExit("Options '--updates_only' and '--checkouts-only' are mutually exclusive.") deprecated_nra_warning(args) view_dir = get_view_dir(args.view) cview = ctx_view.CTXView( view_dir, updating=True, validate=True ) if args.checkouts_only == False: cview.updateRepositories() if args.updates_only == False: cview.checkoutRepositories()
def getPathsFromOption(option): user_paths = list() if os.path.isdir( option ): user_paths.append(option) elif not os.path.isfile( option ): userErrorExit("Cannot find option file or directory'%s'"%option) else: file = open( option, "r" ) for line in file.readlines(): line = line.strip() user_paths += line.split(";") file.close() user_paths = filter(lambda x: x.strip(" ") != '',user_paths) dirname = os.path.abspath(os.path.dirname(filename)) return map (lambda path: os.path.join(dirname, path), user_paths)
def prebuiltMk(args): content = [] def _localPath(path): return absPath(path.replace("\\", "/")) for preb in args.prebuilt: if not os.path.isabs(preb): preb = os.path.join(os.getcwd(), preb) if not os.path.isfile(preb): userErrorExit("Prebuilt library '%s' doesn't exist." % (preb)) name, ext = os.path.splitext(preb) name = os.path.basename(name) content.append("include $(CLEAR_VARS)\n") content.append("LOCAL_PATH := %s\n" % (_localPath(os.path.dirname(preb)))) content.append("LOCAL_MODULE := %s\n" % (name)) content.append("LOCAL_PREBUILT_LIBS := %s\n" % (name + ext)) content.append("include $(BUILD_MULTI_PREBUILT) \n") return "".join(content)
def check_unmerged(self): import subprocess for repo in self.git_repos: os.chdir(repo.getAbsLocalPath()) args = [self.git, "status", "--porcelain"] p = subprocess.Popen(args, bufsize=4096, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stderr = p.stderr.read() stdout = p.stdout.read() # p.wait() for line in stdout.split("\n"): if line[1:2] == "U" or line[:1] == "U": msg = ( "there are unmerged changes in '" + os.path.basename(repo.getAbsLocalPath()) + "', cannot continue" ) userErrorExit(msg) os.chdir(self.view_dir)
def getBuildConfiguration( cview , args): from contexo import ctx_bc from contexo import config if args.bconf != None: bcFile = args.bconf else: if CTX_DEFAULT_BCONF != None: infoMessage("Using default build configuration '%s'"%(CTX_DEFAULT_BCONF), 2) bcFile = CTX_DEFAULT_BCONF else: userErrorExit("No build configuration specified.") # Uglyness: # Historically the BCFile class located both the bc file and the cdef file # on its own from a provided list of search locations. We work around this # by providing only the single paths to these items which we get from the # view (maintaining backward compatibility). # Since we don't know the name of the CDEF yet, we have to violate some # good coding morale and extract it manually from the bc file. Some of this # code was copied from BCFile::__process_bc(). # TODO: Make this a lot more pretty if possible.. bcFilePath = cview.locateItem( bcFile, 'bconf' ) bcFilename = os.path.basename( bcFilePath ) bcPath = os.path.dirname( bcFilePath ) bcDict = config.Config( bcFilePath ) section = bcDict.get_section( 'config' ) if not section.has_key( 'CDEF' ): userErrorExit("Mandatory BC option 'CDEF' is missing.") cdefFilename = section[ 'CDEF' ] cdefFilePath = cview.locateItem( cdefFilename, 'cdef' ) cdefPath = os.path.dirname( cdefFilePath ) ctxAssert( os.path.basename( os.path.normcase(cdefFilePath) ) == os.path.normcase(cdefFilename), "Something went wrong in our workaround.." ) bc = ctx_bc.BCFile( bcFilename, bcPath, cdefPath, cfgFile) return bc
# # Process commandline (the first option is implicit) # knownOptions = ['--comp', '--bc', '--o', '--sl', '--sh', '--sc', '--env', '--log', '--verb', '--optfile'] options = digestCommandline( sys.argv[1:], True, knownOptions ) # # Check mandatory options # for opt in ['--comp', '--bc']: if opt not in options.keys(): ctx_common.userErrorExit("Missing mandatory option: '%s'"%opt) # # Assign default values to omitted options # if not options.has_key( '--o' ): options['--o'] = [os.getcwd(),] if not options.has_key( '--sl' ): options['--sl'] = ["",] if not options.has_key( '--sh' ): options['--sh'] = ["",] #
def cmd_parse( args ): import string infoMessage("Receiving export data from Contexo...", 1) package = ctx_export.CTXExportData() package.receive() # Reads pickled export data from stdin infoMessage("Received export data:", 4) for item in package.export_data.keys(): infoMessage("%s: %s"%(item, str(package.export_data[item])), 4) # Retrieve build config from session bc_file = package.export_data['SESSION'].getBCFile() build_params = bc_file.getBuildParams() #TODO? debugmode = bool( not args.release ) # # Add module paths/repositories as include directories # modTags = list() incPaths = list() depRoots = package.export_data['PATHS']['MODULES'] depMgr = package.export_data['DEPMGR'] for depRoot in depRoots: incPathCandidates = os.listdir( depRoot ) for cand in incPathCandidates: path = os.path.join(depRoot, cand) if contexo.ctx_cmod.isContexoCodeModule( path ): rawMod = contexo.ctx_cmod.CTXRawCodeModule(path) incPaths.append( path ) # Only include private headers for projects containing the specified module #incPaths.append( os.path.join(rawMod.getRootPath(), rawMod.getPrivHeaderDir()) ) modTags.append( 'COMPILING_MOD_' + string.upper( rawMod.getName() ) ) # # Determine if we're exporting components or modules, and do some related # sanity checks # comp_export = bool( package.export_data['COMPONENTS'] != None ) if comp_export: #Exporting components pass else: # Exporting modules userErrorExit( "No components specified. Currently no support for module-export.") # Regardless if we export components or modules, all modules are located in export_data['MODULES'] module_map = create_module_mapping_from_module_list( package.export_data['MODULES'].values() ) allSources = [] # Used to find a common path. allCtxMods = {} staticLibs = [] if comp_export: for comp in package.export_data['COMPONENTS']: for library, modules in comp.libraries.iteritems(): ctxMods = [ mod for mod in module_map if mod['MODNAME'] in modules ] staticLibs.append( { 'PROJNAME': library, 'LIBNAME': library, 'MODULELIST': ctxMods } ) for ctxMod in ctxMods: allSources.extend(ctxMod["SOURCES"]) if not allCtxMods.has_key(ctxMod['MODNAME']): allCtxMods[ctxMod['MODNAME']] = [] allCtxMods[ctxMod['MODNAME']].append(comp) for ctxModName, comps in allCtxMods.iteritems(): if len(comps) > 1: warningMessage("Contexo module, '%s' specified in multiple .comp-files:" % (ctxModName)) for comp in comps: warningMessage(" %s." % (comp.path)) # Basic argument checks if args.ndk == None and args.mydroid == None: userErrorExit("Must specify either --ndk or --mydroid.") elif args.ndk <> None and args.mydroid <> None: userErrorExit("Specified both --ndk and --mydroid.") elif args.ndk <> None: if not os.path.isdir(args.ndk): userErrorExit("'%s' specified by --ndk does not exist or is not a directory." % (args.ndk)) else: if not os.path.isdir(args.mydroid): userErrorExit("'%s' specified by --mydroid does not exist or is not a directory." % (args.mydroid)) if args.app == None: userErrorExit("--app not specified.") if args.arm_mode <> None and not args.arm_mode in ["arm", "thumb"]: userErrorExit("Illegal arm mode '%s', specified with --arm-mode." % (args.arm_mode)) if args.abs_sub <> None: if (len(args.abs_sub) % 2 != 0): userErrorExit("--abs-sub: number of arguments must be a 2-multiple.") global absPathSub absPathSub = args.abs_sub if args.rel_sub <> None: if (len(args.rel_sub) % 2 != 0): userErrorExit("--rel-sub: number of arguments must be a 2-multiple.") global relPathSub relPathSub = args.rel_sub # This will be used as LOCAL_PATH for all (android) modules. # By using this path we ensure that no paths contain any "..". # (They would mess up the android build system.) localPath = lowestCommonPath(allSources) # Returns a path to be used in a makefile. def getDstPath(*pathComps): if args.project <> None: if not os.path.isabs(args.project): return os.path.join(os.getcwd(), args.project, *pathComps).replace("\\", "/") else: return os.path.join(args.project, *pathComps).replace("\\", "/") elif args.ndk <> None: return os.path.join(args.ndk, "apps", args.app, "project", *pathComps).replace("\\", "/") else: return os.path.join(args.mydroid, args.app, *pathComps).replace("\\", "/") # Returns a path that locates where to actually put a file. def getOutPath(*pathComps): if args.output <> None: if not os.path.isabs(args.output): return os.path.join(os.getcwd(), args.output, "apps", args.app, "project", *pathComps).replace("\\", "/") else: return os.path.join(args.output, "apps", args.app, "project", *pathComps).replace("\\", "/") else: return getDstPath(*pathComps) if args.ndk <> None: # Determine location of the Application.mk. if args.output == None: applicationDir = os.path.join(args.ndk, "apps", args.app) else: if not os.path.isabs(args.output): applicationDir = os.path.join(os.getcwd(), args.output, "apps", args.app).replace("\\", "/") else: applicationDir = os.path.join(args.output, "apps", args.app).replace("\\", "/") libPath = args.mk_path else: # Source tree build, determine location of the main Android.mk. if args.output == None: applicationDir = os.path.join(args.mydroid, args.app) else: if not os.path.isabs(args.output): applicationDir = os.path.join(os.getcwd(), args.output, args.app).replace("\\", "/") else: applicationDir = os.path.join(args.output, args.app).replace("\\", "/") libPath = "" # Determine if anything is to be omitted. if args.no <> None: argOmits = [no.lower() for no in args.no] for omit in argOmits: if not omits.has_key(omit): userErrorExit("'%s' is not a valid argument to --no." % (omit)) else: omits[omit] = True if args.mydroid <> None and args.project == None: omits["top"] = True # We generate one makefile per library. # This variable could be possible to change via commandline. # However, it's more practical to subdivide into several # makefiles. If one of them is changed all others needn't be rebuilt. allInOne = False sharedObjLib = None if args.shared <> None: if len(args.shared) == 0: userErrorExit("No libraries specifed by --shared.") partsOfShared = [] for name in args.shared: for libMod in staticLibs: if libMod["LIBNAME"] == name: break else: userErrorExit("Contexo library '%s', specified by --shared not found in export." % (name)) del staticLibs[staticLibs.index(libMod)] partsOfShared.append(libMod) name = args.shared[0] if args.shared_name == None else args.shared_name sharedObjLib = { 'PROJNAME': name, 'LIBNAME': name, 'MODULELIST': [], 'SHAREDOBJECT' : True } for part in partsOfShared: sharedObjLib['MODULELIST'].extend(part['MODULELIST']) else: if args.ldlibs <> None: warningMessage("Ignoring option --ldlibs since --shared was not specified.") if args.shared_name <> None: warningMessage("Ignoring option --shared-name since --shared was not specified.") staticRelPath = "static" sharedRelPath = "shared" mkFileVerbosity = 1 if not omits["static"] and len(staticLibs) > 0: if not allInOne: for staticLib in staticLibs: dirName = staticRelPath + "_" + staticLib['LIBNAME'] lclDstDir = getDstPath(libPath, dirName) lclOutDir = getOutPath(libPath, dirName) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") content = moduleMk(staticLib, build_params, staticLibs, None, depMgr, lclDstDir, args, localPath=localPath) createFile(mkFileName, content, args) infoMessage("Created %s" % (mkFileName), mkFileVerbosity) else: lclDstDir = getDstPath(libPath, staticRelPath) lclOutDir = getOutPath(libPath, staticRelPath) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") file = open(mkFileName, "wt") i = 0 for staticLib in staticLibs: file.write(moduleMk(staticLib, build_params, staticLibs, None, depMgr, lclDstDir, args, localPath=localPath)) file.write("#" * 60 + "\n") i += 1 file.close() infoMessage("Created %s" % (mkFileName), mkFileVerbosity) if sharedObjLib <> None and not omits["shared"]: lclDstDir = getDstPath(libPath, sharedRelPath) lclOutDir = getOutPath(libPath, sharedRelPath) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") content = moduleMk(sharedObjLib, build_params, staticLibs, None, depMgr, lclDstDir, args, localPath=localPath) createFile(mkFileName, content, args) if args.static_libs == None and len(staticLibs) > 0: warningMessage("Computed link order is very likely not accurate.") warningMessage("See %s." % (mkFileName)) infoMessage("Created %s" % (mkFileName), mkFileVerbosity) if args.prebuilt <> None and not omits["prebuilt"]: name = "prebuilt" lclDstDir = getDstPath(libPath, name) lclOutDir = getOutPath(libPath, name) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") content = prebuiltMk(args) createFile(mkFileName, content, args) if not omits["top"]: if not os.path.isdir(getOutPath(libPath)): os.makedirs(getOutPath(libPath)) topMkFileName = getOutPath(libPath, "Android.mk") createFile(topMkFileName, "include $(call all-subdir-makefiles)", args) if not omits["app"]: if not os.path.isdir(applicationDir): os.makedirs(applicationDir) outData = [] if args.ndk <> None: appMkFileName = os.path.join(applicationDir, "Application.mk") libNames = [staticLib['LIBNAME'] for staticLib in staticLibs] if sharedObjLib <> None: libNames.append(sharedObjLib['LIBNAME']) outData.append("APP_MODULES := %s\n" % (" ".join(libNames))) if args.project <> None: outData.append("APP_PROJECT_PATH := %s\n" % (absPath(getDstPath()))) else: outData.append("APP_PROJECT_PATH := $(call my-dir)/project\n") if bc_file.dbgmode: outData.append("APP_OPTIM := debug\n") if args.vars_app <> None: outData.append(writeVars(args.vars_app, "--vars-app")) else: appMkFileName = os.path.join(applicationDir, "Android.mk") if args.project <> None: outData.append("include $(call all-makefiles-under,%s)\n" % (absPath(getDstPath()))) else: outData.append("include $(call all-subdir-makefiles)\n") content = "".join(outData) createFile(appMkFileName, content, args) # # The End # infoMessage("Export done.", 1)
modules = package.export_data['MODULES'] module_map = create_module_mapping_from_module_list( package.export_data['MODULES'], depMgr) if linkHeaders: headers = set() for mod in module_map: headers |= set(mod['PUBHDRS']) headers |= set(mod['PRIVHDRS']) headers |= set(mod['DEPHDRS']) if buildTests: headers |= set(mod['TESTHDRS']) if os.path.isfile('output'): userErrorExit('output must not be a file if using symlinks') if not os.path.isdir('output'): os.mkdir('output') hdrlinkOutputDir = 'output' + os.sep + 'hdrlinks' shutil.rmtree(hdrlinkOutputDir,True) os.mkdir('output' + os.sep + 'hdrlinks') for header in headers: os.symlink(header, 'output' + os.sep + 'hdrlinks' + os.sep + os.path.basename(header)) if not os.path.isfile("Makefile.inc"): incmakefile = open("Makefile.inc", 'w') incmakefile.write("### inc_all is built after all other projects is built\n") incmakefile.write("### add dependencies for inc_all to add further build steps\n") incmakefile.write("inc_all: $(LIBS)\n") incmakefile.write("\ttouch $@\n\n")
def cmd_parse(args): import string infoMessage("Receiving export data from Contexo...", 1) package = ctx_export.CTXExportData() package.receive() # Reads pickled export data from stdin infoMessage("Received export data:", 4) for item in package.export_data.keys(): infoMessage("%s: %s" % (item, str(package.export_data[item])), 4) # Retrieve build config from session bc_file = package.export_data["SESSION"].getBCFile() build_params = bc_file.getBuildParams() tests = package.export_data["TESTS"] # # Add module paths/repositories as include directories # modTags = list() incPaths = list() # TODO: the preprocessor define COMPILING_MOD_ is a legacy definition, # initially created to make sure private headers were not included in a # project. # DO NOT REMOVE until all previous releases compiles without it. # /thomase depRoots = package.export_data["PATHS"]["MODULES"] for depRoot in depRoots: incPathCandidates = os.listdir(depRoot) for cand in incPathCandidates: path = os.path.join(depRoot, cand) if contexo.ctx_cmod.isContexoCodeModule(path): rawMod = contexo.ctx_cmod.CTXRawCodeModule(path) modTags.append("COMPILING_MOD_" + string.upper(rawMod.getName())) # # Collect additional include paths and additional library paths # def getPathsFromOption(option): user_paths = list() if os.path.isdir(option): user_paths.append(option) elif not os.path.isfile(option): userErrorExit("Cannot find option file or directory'%s'" % option) else: file = open(option, "r") for line in file.readlines(): line = line.strip() user_paths += line.split(";") file.close() user_paths = filter(lambda x: x.strip(" ") != "", user_paths) dirname = os.path.abspath(os.path.dirname(filename)) return map(lambda path: os.path.join(dirname, path), user_paths) if args.additional_includes != None: filename = args.additional_includes user_includepaths = getPathsFromOption(filename) # dirname = os.path.dirname(filename) for inc in user_includepaths: incPaths.append(inc) # # Determin if we're exporting components or modules, and do some related # sanity checks # comp_export = bool(package.export_data["COMPONENTS"] != None) if comp_export: # Exporting components pass else: # Exporting modules if package.export_data["MODULES"] == None: userErrorExit("No components or modules specified for export.") # # If exporting components and the user specified --mirror-components we # create one vcproj per component library, otherwise we create one large # library of all code modules. # projList = ( list() ) # list of dict['PROJNAME':string, 'LIBNAME':string, 'MODULELIST':listof( see doc of make_libvcproj7 ) ] # Regardless if we export components or modules, all modules are located in export_data['MODULES'] depMgr = package.export_data["DEPMGR"] module_map = create_module_mapping_from_module_list(package.export_data["MODULES"], depMgr) if not args.mergename: for comp in package.export_data["COMPONENTS"]: for library, modules in comp.libraries.iteritems(): lib_modules = [("", mod) for mod in module_map if mod["MODNAME"] in modules] projList.append({"PROJNAME": library, "LIBNAME": library, "MODULELIST": lib_modules}) else: lib_modules = [] for comp in package.export_data["COMPONENTS"]: for library, modules in comp.libraries.iteritems(): lib_modules.extend([(library, mod) for mod in module_map if mod["MODNAME"] in modules]) projList.append({"PROJNAME": args.mergename, "LIBNAME": args.mergename, "MODULELIST": lib_modules}) # # Generate the projects # if not os.path.exists(args.output): os.makedirs(args.output) for proj in projList: # codeModules = listof dictionaries: { MODNAME: string, SOURCES: list(paths), PRIVHDRS: list(paths), PUBHDRS: list(paths), PRIVHDRDIR: string, TESTSOURCES:list } contexo.ctx_netbeans.make_libproj( proj["PROJNAME"], build_params.cflags, build_params.prepDefines + modTags, proj["MODULELIST"], proj["LIBNAME"] + ".lib", tests, incPaths, args.output, ) # # The End # infoMessage("Export done.", 1)
def cmd_export(args): from contexo import ctx_cmod from contexo import ctx_base from contexo import ctx_envswitch from contexo.ctx_depmgr import CTXDepMgr from contexo.ctx_export import CTXExportData envLayout = None oldEnv = None if args.env != None: envLayout = EnvironmentLayout( cfgFile, args.env ) oldEnv = switchEnvironment( envLayout, True ) # Prepare all cview = ctx_view.CTXView( args.view, getAccessPolicy(args), validate=bool(args.repo_validation) ) bc = getBuildConfiguration( cview, args ) deprecated_tolerate_missing_headers_warning(args) depmgr = CTXDepMgr ( cview.getItemPaths('modules'), args.fail_on_missing_headers, bc.getArchPath() ) session = ctx_base.CTXBuildSession( bc ) session.setDependencyManager( depmgr ) export_items = expand_list_files( cview, args.export_items ) # Make sure we have only one type of item to export component_export = True for item in export_items: if item.endswith( '.comp' ): if component_export == False: userErrorExit("An export operation can either export a list of components OR a list of modules, not both.") else: component_export = False components = list() main_modules = list() # Excluding dependency modules if component_export: # Construct and validate component objects components = create_components( export_items, cview.getItemPaths('comp') ) for comp in components: for library, compmodules in comp.libraries.items(): depmgr.addCodeModules( compmodules, args.tests ) main_modules.extend( compmodules ) else: main_modules = export_items # Divert modules into main modules and dependency modules export_modules = depmgr.getCodeModulesWithDependencies() if args.deps else main_modules export_modules.sort() dep_modules = set(export_modules) - set(main_modules) ctx_modules = depmgr.createCodeModules( main_modules, args.tests ) ctx_modules.extend ( depmgr.createCodeModules( dep_modules ) ) module_map = dict() for mod in ctx_modules: module_map[mod.getName()] = mod depmgr.updateDependencyHash() # Dispatch export data to handler (through pipe) package = CTXExportData() package.setExportData( module_map, components, args.tests, session, depmgr, cview, envLayout, args ) package.dispatch() # Restore environment if args.env != None: switchEnvironment( oldEnv, False )
def cmd_build(args): from contexo import ctx_cmod from contexo import ctx_base from contexo import ctx_envswitch from contexo.ctx_depmgr import CTXDepMgr from contexo.ctx_export import CTXExportData envLayout = None oldEnv = None if args.env != None: envLayout = EnvironmentLayout( cfgFile, args.env ) oldEnv = switchEnvironment( envLayout, True ) absIncDirs = map(os.path.abspath, args.incdirs) # Prepare all cview = ctx_view.CTXView( args.view, getAccessPolicy(args), validate=bool(args.repo_validation) ) bc = getBuildConfiguration( cview, args ) bc.buildParams.incPaths.extend( absIncDirs ) #TODO: accessing 'private' data? bc.buildParams.ldDirs.extend(args.libdirs) bc.buildParams.ldLibs.extend(args.libs) archPath = list() archPath = bc.getArchPath() deprecated_tolerate_missing_headers_warning(args) depmgr = CTXDepMgr ( codeModulePaths = cview.getItemPaths('modules'), failOnMissingHeaders = args.fail_on_missing_headers, archPath = bc.getArchPath(), additionalIncDirs = absIncDirs, legacyCompilingMod = args.legacy_compiling_mod ) session = ctx_base.CTXBuildSession( bc ) session.setDependencyManager( depmgr ) items = expand_list_files( cview, args.items ) # Make sure we have only one type of item to export #TODO:make a more robust recognition than file extention for .comp component_build = True for item in items: if item.endswith( '.comp' ): if component_build == False: userErrorExit("The operation can either work on a list of components OR a list of modules, not both.") else: component_build = False # Register build configuration in log handler ctx_log.ctxlogSetBuildConfig( bc.getTitle(), bc.getCompiler().cdefTitle, bc.getBuildParams().cflags, bc.getBuildParams().prepDefines, "N/A" ) outputPath = args.output bin_dir = os.path.join( outputPath, args.bindir ) header_dir = os.path.join( outputPath, args.headerdir ) objs = list() # Process components if component_build: infoMessage("building components", 6) components = create_components( items, cview.getItemPaths('comp') ) for comp in components: ctx_log.ctxlogBeginComponent( comp.name ) # TODO: also unused, what does the workaround below fix? # Workaround to get header export to work #codemodule_map = dict() # Build component modules. for library, modules in comp.libraries.items(): modules = expand_list_files( cview, modules ) depmgr.addCodeModules( modules, args.tests ) args.library_name = library infoMessage('args: %s'%args, 6) objs += buildmodules( depmgr, session, modules, args, bin_dir, session.bc.getTitle(), args.library_name) if (args.all_headers): header_path = os.path.join(args.output, args.headerdir ) export_public_module_headers( depmgr, modules, header_path ) depmgr.emptyCodeModules() export_headers( depmgr, comp.publicHeaders, header_dir ) ctx_log.ctxlogEndComponent() #Process modules else: infoMessage("building modules", 6) depmgr.addCodeModules( items, args.tests ) objs += buildmodules( depmgr, session, items, args, outputPath, bc.getTitle(), libraryName=args.library_name) export_public_module_headers( depmgr, items, header_dir ) if args.executable_name: session.linkExecutable(objs, bin_dir, args.executable_name) # Write log if requested if args.logfile != None: logfilepath = os.path.join( args.output, args.logfile ) logpath = os.path.normpath(os.path.dirname( logfilepath )) if len(logpath) and not os.path.isdir(logpath): os.makedirs( logpath ) ctx_log.ctxlogWriteToFile( logfilepath, appendToExisting=False ) # Restore environment if args.env != None: switchEnvironment( oldEnv, False )
def cmd_parse( args ): import string infoMessage("Receiving export data from Contexo...", 1) package = ctx_export.CTXExportData() package.receive() # Reads pickled export data from stdin infoMessage("Received export data:", 4) for item in package.export_data.keys(): infoMessage("%s: %s"%(item, str(package.export_data[item])), 4) # Retrieve build config from session #bc_file = package.export_data['SESSION'].getBCFile() #build_params = bc_file.getBuildParams() #tests = package.export_data['TESTS'] module_dicts = create_module_mapping_from_module_list( package.export_data['MODULES'].values() ) def getFileContents(inputFilePath): f = open( inputFilePath, 'rb' ) contents = f.read() f.close() return contents input_names = list ( args.input_names ) #init the dict with input_names and empty lists allNames = dict( zip(input_names, [[] for i in range(len(input_names))] ) ) for module in module_dicts: sources = module['SOURCES'] + module['TESTSOURCES'] print >>sys.stderr, 'Analysing %s in %s'%( map(os.path.basename, sources), module['MODNAME'] ) def readNames(name): from contexo.ctx_cparser import parseTengilTests #Get a list of lists of testnames. One list of names for each source file. lista = map( lambda sourcefile: parseTengilTests( getFileContents(sourcefile), name), sources ) #squash the list of lists into a simple list allNames[name] += reduce( lambda l1,l2: l1 + l2, lista, []) map(readNames, input_names) output_names = input_names if len(args.output_names) == 0 else list(args.output_names) if len(output_names) != len(input_names): userErrorExit("output names should map 1:1 to input names") nameMap = dict(zip(input_names, output_names)) outputfile = open( args.output ,'wb' ) def writeName( (inname, outname) ): def writeCall( arg ): outputfile.write( '%s(%s)\n'%( outname, arg ) ) print >>sys.stderr, ( ' %s(%s)'%( outname, arg ) ) map(writeCall, allNames[inname]) map(writeName, nameMap.items() ) outputfile.close() # # The End # infoMessage("Export done.", 1)
def cmd_build(args): launch_path = os.path.abspath('.') view_dir = get_view_dir(args.view) obj_dir = view_dir + os.sep + '.ctx/obj' # test if not absolute path if args.output[0] != '/' and args.output[0] != '\\' and args.output[1:3] != ':\\': lib_output = os.path.join(view_dir,args.output) else: lib_output = args.output lib_dirs = map(os.path.abspath, args.libdirs) from contexo import ctx_cmod from contexo import ctx_base from contexo import ctx_envswitch from contexo.ctx_depmgr import CTXDepMgr from contexo.ctx_export import CTXExportData envLayout = None oldEnv = None if args.env != None: envLayout = EnvironmentLayout( cfgFile, args.env ) oldEnv = switchEnvironment( envLayout, True ) absIncDirs = map(os.path.abspath, args.incdirs) # Prepare all deprecated_nra_warning( args ) deprecated_repo_validation_warning(args) cview = ctx_view.CTXView( view_dir, validate=False ) bc = getBuildConfiguration( cview, args ) bc.buildParams.incPaths.extend( absIncDirs ) #TODO: accessing 'private' data? bc.buildParams.ldDirs.extend(lib_dirs) bc.buildParams.ldLibs.extend(args.libs) archPath = list() archPath = bc.getArchPath() deprecated_tolerate_missing_headers_warning(args) depmgr = CTXDepMgr ( codeModulePaths = cview.getItemPaths('modules'), failOnMissingHeaders = args.fail_on_missing_headers, archPath = bc.getArchPath(), additionalIncDirs = absIncDirs, legacyCompilingMod = args.legacy_compiling_mod, legacyDuplicateSources = args.legacy_duplicate_sources, globalOutputDir = obj_dir, bc = bc ) session = ctx_base.CTXBuildSession( bc ) session.setDependencyManager( depmgr ) items = expand_list_files( cview, args.items ) # Make sure we have only one type of item to export #TODO:make a more robust recognition than file extention for .comp component_build = True for item in items: if item.endswith( '.comp' ): if component_build == False: userErrorExit("The operation can either work on a list of components OR a list of modules, not both.") else: component_build = False # Register build configuration in log handler ctx_log.ctxlogSetBuildConfig( bc.getTitle(), bc.getCompiler().cdefTitle, bc.getBuildParams().cflags, bc.getBuildParams().prepDefines, "N/A" ) outputPath = lib_output bin_dir = os.path.join( outputPath, args.bindir ) header_dir = os.path.join( outputPath, args.headerdir ) objs = list() ## TODO: place this in CTXCompiler where it belongs when the spaghetti code is gone ## changing working directory to .ctx/obj/[BUILDCONFIGNAME] dest_wd = os.path.join(obj_dir, bc.getTitle()) try: os.makedirs(dest_wd) except: pass old_path = os.path.abspath('') os.chdir(dest_wd) # Process components if component_build: infoMessage("Building components...", 1) components = create_components( items, cview.getItemPaths('comp'), obj_dir, launch_path ) allmods = set() for comp in components: for library, modules in comp.libraries.items(): mods = expand_list_files( cview, modules ) depmgr.addCodeModules( mods, args.tests ) allmods |= set(modules) depMods = depmgr.getModuleDependencies(allmods) depmgr.emptyCodeModules() if len(depMods - set(allmods)) > 0: for module in depMods - set(allmods): warningMessage('The module \"'+module+'\" was not specified to be built in any specified .comp file, but at least one of its headers were included in another module, linker errors may arise') for comp in components: ctx_log.ctxlogBeginComponent( comp.name ) # Build component modules. for library, modules in comp.libraries.items(): modules = expand_list_files( cview, modules ) depmgr.addCodeModules( modules, args.tests ) args.library_name = library infoMessage('args: %s'%args, 6) objs += buildmodules( depmgr, session, modules, args, bin_dir, session.bc.getTitle(), args.library_name) if (args.all_headers): header_path = os.path.join(lib_output, args.headerdir ) export_public_module_headers( depmgr, modules, header_path ) depmgr.emptyCodeModules() export_headers( depmgr, comp.publicHeaders, header_dir, cview ) ctx_log.ctxlogEndComponent() #Process modules else: infoMessage("Building modules...", 1) depmgr.addCodeModules( items, args.tests ) if args.deps: print >>sys.stderr, "Dependencies for:", print >>sys.stderr, items print >>sys.stderr, depmgr.getCodeModulesWithDependencies() objs += buildmodules( depmgr, session, items, args, outputPath, bc.getTitle(), libraryName=args.library_name) export_public_module_headers( depmgr, items, header_dir ) if args.executable_name: session.linkExecutable(objs, bin_dir, args.executable_name) # Write log if requested if args.logfile != None: logfilepath = os.path.join( lib_output, args.logfile ) logpath = os.path.normpath(os.path.dirname( logfilepath )) if len(logpath) and not os.path.isdir(logpath): os.makedirs( logpath ) ctx_log.ctxlogWriteToFile( logfilepath, appendToExisting=False ) os.chdir(old_path) # Restore environment if args.env != None: switchEnvironment( oldEnv, False )
from contexo import ctx_common from contexo.ctx_common import setInfoMessageVerboseLevel, infoMessage, userErrorExit, warningMessage, ctxAssert from contexo.ctx_comp import ctx_log, COMPFile from contexo import ctx_sysinfo import locale try: #this fails on windows, but it doesn't matter much locale.resetlocale() # locale.LC_ALL, 'en_US.UTF-8') except: pass try: import platform if platform.architecture()[0] == '64bit' and platform.architecture()[1] == 'WindowsPE': userErrorExit("64-bit Python on Windows does not support 32-bit pysvn. Install 32-bit Python instead.") except: # if we have an old Contexo installation we might wind up here due to Contexo previously having a package named 'platform' pass msgSender = 'ctx.py' logging.basicConfig(format = '%(asctime)s %(levelname)-8s %(message)s', datefmt='%H:%M:%S', level = logging.DEBUG); # # Get configuration. # contexo_config_path = os.path.join( ctx_common.getUserCfgDir(), ctx_sysinfo.CTX_CONFIG_FILENAME ) infoMessage("Using config file '%s'"%contexo_config_path, 1) cfgFile = ctx_cfg.CFGFile( contexo_config_path )
def cmd_info(args): userErrorExit("info is deprecated")
def cmd_parse( args ): import string infoMessage("Receiving export data from Contexo...", 1) package = ctx_export.CTXExportData() package.receive() # Reads pickled export data from stdin infoMessage("Received export data:", 4) for item in package.export_data.keys(): infoMessage("%s: %s"%(item, str(package.export_data[item])), 4) # Retrieve build config from session bc_file = package.export_data['SESSION'].getBCFile() build_params = bc_file.getBuildParams() debugmode = bool( not args.release ) tests = package.export_data['TESTS'] # # Add module paths/repositories as include directories # modTags = list() incPaths = list() # the preprocessor define COMPILING_MOD_ is a legacy definition, # initially created to make sure private headers were not included in a # project. # this is kept to build legacy products if args.legacy_compiling_mod: depRoots = package.export_data['PATHS']['MODULES'] for depRoot in depRoots: incPathCandidates = os.listdir( depRoot ) for cand in incPathCandidates: path = os.path.join(depRoot, cand) if contexo.ctx_cmod.isContexoCodeModule( path ): rawMod = contexo.ctx_cmod.CTXRawCodeModule(path) modTags.append( 'COMPILING_MOD_' + string.upper( rawMod.getName() ) ) # # Collect additional include paths and additional library paths # def getPathsFromOption(option): user_paths = list() if os.path.isdir( option ): user_paths.append(option) elif not os.path.isfile( option ): userErrorExit("Cannot find option file or directory'%s'"%option) else: file = open( option, "r" ) for line in file.readlines(): line = line.strip() user_paths += line.split(";") file.close() user_paths = filter(lambda x: x.strip(" ") != '',user_paths) dirname = os.path.abspath(os.path.dirname(filename)) return map (lambda path: os.path.join(dirname, path), user_paths) if args.additional_includes != None: filename = args.additional_includes user_includepaths = getPathsFromOption(filename) #dirname = os.path.dirname(filename) for inc in user_includepaths: incPaths.append(inc) #print >>sys.stderr, 'incPaths %s' %incPaths libPaths = list() if args.additional_libdir != None: filename = args.additional_libdir user_librarypaths = getPathsFromOption(filename) libPaths += user_librarypaths # Additional dependencies libNames = list() user_libnames = list() if args.additional_dependencies != None: filename = args.additional_dependencies user_libnames = getPathsFromOption(filename) libNames += user_libnames # # Determin if we're exporting components or modules, and do some related # sanity checks # comp_export = bool( package.export_data['COMPONENTS'] != None ) if comp_export: #Exporting components if args.mirror_components == True and args.project_name != None: warningMessage("Ignoring option --project-name (-pn) when option --mirror-components (-mc) is used") args.project_name = None else: # Exporting modules if args.mirror_components == True: warningMessage("Ignoring option --mirror-components (-mc) when exporting modules") args.mirror_components = False if package.export_data['MODULES'] == None: userErrorExit( "No components or modules specified for export.") project_name = args.project_name if project_name == None and args.mirror_components == False: project_name = default_projname # strip vcproj extension if user included it. if project_name != None and project_name[ -7: ].lower() == '.vcproj': project_name = project_name[0:-7] # # If exporting components and the user specified --mirror-components we # create one vcproj per component library, otherwise we create one large # library of all code modules. # vcprojList = list() # list of dict['PROJNAME':string, 'LIBNAME':string, 'MODULELIST':listof( see doc of make_libvcproj7 ) ] # Regardless if we export components or modules, all modules are located in export_data['MODULES'] depMgr = package.export_data['DEPMGR'] module_map = create_module_mapping_from_module_list( package.export_data['MODULES'], depMgr) if comp_export and args.mirror_components: for comp in package.export_data['COMPONENTS']: for library, modules in comp.libraries.iteritems(): lib_modules = [ mod for mod in module_map if mod['MODNAME'] in modules ] vcprojList.append( { 'PROJNAME': library, 'LIBNAME': library, 'MODULELIST': lib_modules } ) else: # Module export OR component export without mirroring component structure vcprojList.append( {'PROJNAME': project_name, 'LIBNAME': project_name, 'MODULELIST': module_map } ) # # Generate the projects # if not os.path.exists( args.output ): os.makedirs( args.output ) guidDict = dict() for proj in vcprojList: #codeModules = listof dictionaries: { MODNAME: string, SOURCES: list(paths), PRIVHDRS: list(paths), PUBHDRS: list(paths), PRIVHDRDIR: string, TESTSOURCES:list } guidDict[proj['PROJNAME']] = contexo.ctx_msvc.make_libvcproj8( proj['PROJNAME'], build_params.cflags, build_params.prepDefines + modTags, proj['MODULELIST'], proj['LIBNAME'] + '.lib', debugmode, tests, incPaths, args.output, args.platform, proj['PROJNAME'], args.configuration_type, libNames, libPaths ) # # Handle external project if specified # external_vcproj = None if args.external_vcproj != None: external_vcproj = contexo.ctx_msvc.get_info_vcproj8( os.path.abspath( args.external_vcproj ) ) external_vcproj['DEBUG'] = debugmode attrs = list() attrs.append( dict({ "DEBUG":debugmode, "TOOL":"VCCLCompilerTool", "KEY":"AdditionalIncludeDirectories", "VALUE":";".join(incPaths) })) attrs.append( dict({ "DEBUG":debugmode, "TOOL":"VCLinkerTool", "KEY":"AdditionalLibraryDirectories", "VALUE":";".join(libPaths) })) contexo.ctx_msvc.update_vcproj8(external_vcproj['FILENAME'],attrs) # # Create solution if specified # if args.solution != None: slnProjects = list() for proj in vcprojList: slnProjects.append( { 'PROJNAME': proj['PROJNAME'], 'PROJGUID': guidDict[proj['PROJNAME']], 'DEBUG': debugmode } ) contexo.ctx_msvc.make_solution8( args.solution, args.output, slnProjects, external_vcproj, args.platform ) # # The End # infoMessage("Export done.", 1)
def cmd_parse( args ): import string infoMessage("Receiving export data from Contexo...", 1) package = ctx_export.CTXExportData() package.receive() # Reads pickled export data from stdin infoMessage("Received export data:", 4) for item in package.export_data.keys(): infoMessage("%s: %s"%(item, str(package.export_data[item])), 4) # Retrieve build config from session bc_file = package.export_data['SESSION'].getBCFile() build_params = bc_file.getBuildParams() #TODO? debugmode = bool( not args.release ) # # Add module paths/repositories as include directories # modTags = list() incPaths = list() depRoots = package.export_data['PATHS']['MODULES'] depMgr = package.export_data['DEPMGR'] for depRoot in depRoots: incPathCandidates = os.listdir( depRoot ) for cand in incPathCandidates: path = os.path.join(depRoot, cand) if contexo.ctx_cmod.isContexoCodeModule( path ): rawMod = contexo.ctx_cmod.CTXRawCodeModule(path) incPaths.append( path ) # Only include private headers for projects containing the specified module #incPaths.append( os.path.join(rawMod.getRootPath(), rawMod.getPrivHeaderDir()) ) modTags.append( 'COMPILING_MOD_' + string.upper( rawMod.getName() ) ) # # Determine if we're exporting components or modules, and do some related # sanity checks # comp_export = bool( package.export_data['COMPONENTS'] != None ) if comp_export: #Exporting components pass else: # Exporting modules userErrorExit( "No components specified. Currently no support for module-export.") # Regardless if we export components or modules, all modules are located in export_data['MODULES'] module_map = create_module_mapping_from_module_list( package.export_data['MODULES'].values() ) staticLibs = [] if comp_export: for comp in package.export_data['COMPONENTS']: for library, modules in comp.libraries.iteritems(): ctxMods = [ mod for mod in module_map if mod['MODNAME'] in modules ] staticLibs.append( { 'PROJNAME': library, 'LIBNAME': library, 'MODULELIST': ctxMods } ) if args.ndk == None: userErrorExit("--ndk not specified.") if not os.path.isdir(args.ndk): userErrorExit("'%s' specified by --ndk does not exist or is not a directory." % (args.ndk)) if args.app == None: userErrorExit("--app not specified.") if args.abs_sub <> None: if (len(args.abs_sub) % 2 != 0): userErrorExit("--abs-sub: number of arguments must be a 2-multiple.") global absPathSub absPathSub = args.abs_sub if args.rel_sub <> None: if (len(args.rel_sub) % 2 != 0): userErrorExit("--rel-sub: number of arguments must be a 2-multiple.") global relPathSub relPathSub = args.rel_sub # Set up paths. def getDstPath(*pathComps): if args.project <> None: if not os.path.isabs(args.project): return os.path.join(os.getcwd(), args.project, *pathComps).replace("\\", "/") else: return os.path.join(args.project, *pathComps).replace("\\", "/") else: return os.path.join(args.ndk, "apps", args.app, "project").replace("\\", "/") def getOutPath(*pathComps): if args.output <> None: if not os.path.isabs(args.output): return os.path.join(os.getcwd(), args.output, "apps", args.app, "project", *pathComps).replace("\\", "/") else: return os.path.join(args.output, "apps", args.app, "project", *pathComps).replace("\\", "/") else: return getDstPath(*pathComps) if args.output == None: applicationDir = os.path.join(args.ndk, "apps", args.app) else: if not os.path.isabs(args.output): applicationDir = os.path.join(os.getcwd(), args.output, "apps", args.app).replace("\\", "/") else: applicationDir = os.path.join(args.output, "apps", args.app).replace("\\", "/") #projectPath = "project" #libPath = os.path.join(projectPath, args.mk_path) libPath = args.mk_path # Determine if anything is to be omitted. omits = {"static" : False, "shared" : False, "top" : False, "app" : False} if args.no <> None: argOmits = [no.lower() for no in args.no] for omit in argOmits: if not omits.has_key(omit): userErrorExit("'%s' is not a valid argument to --no." % (omit)) else: omits[omit] = True # # Generate the makefile # # if not os.path.exists( outDir ): # os.makedirs( outDir ) # There were some problems when one makefile per comp was created, (with the android build). # I guess it should be possible to do it that way. # However this way has proved to work. # So, we set allInOne to True. allInOne = True sharedObjLib = None if args.shared <> None: if len(args.shared) == 0: userErrorExit("No libraries specifed by --shared.") partsOfShared = [] for name in args.shared: for libMod in staticLibs: if libMod["LIBNAME"] == name: break else: userErrorExit("Contexo library '%s', specified by --shared not found in export." % (name)) del staticLibs[staticLibs.index(libMod)] partsOfShared.append(libMod) name = args.shared[0] if args.shared_name == None else args.shared_name sharedObjLib = { 'PROJNAME': name, 'LIBNAME': name, 'MODULELIST': [], 'SHAREDOBJECT' : True } for part in partsOfShared: sharedObjLib['MODULELIST'].extend(part['MODULELIST']) else: if args.ldlibs <> None: warningMessage("Ignoring option --ldlibs since --shared was not specified.") if args.shared_name <> None: warningMessage("Ignoring option --shared-name since --shared was not specified.") ldlibs = args.ldlibs staticRelPath = "static" sharedRelPath = "shared" mkFileVerbosity = 1 if not omits["static"] and len(staticLibs) > 0: if not allInOne: for staticLib in staticLibs: lclDstDir = getDstPath(libPath, staticLib['LIBNAME']) lclOutDir = getOutPath(libPath, staticLib['LIBNAME']) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") file = open(mkFileName, "wt") file.write(moduleMk(staticLib, build_params, staticLibs, None, depMgr, lclDstDir)) file.close() infoMessage("Created %s" % (mkFileName), mkFileVerbosity) else: lclDstDir = getDstPath(libPath, staticRelPath) lclOutDir = getOutPath(libPath, staticRelPath) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") file = open(mkFileName, "wt") i = 0 for staticLib in staticLibs: file.write(moduleMk(staticLib, build_params, staticLibs, None, depMgr, lclDstDir, i == 0)) file.write("#" * 60 + "\n") i += 1 file.close() infoMessage("Created %s" % (mkFileName), mkFileVerbosity) if sharedObjLib <> None and not omits["shared"]: lclDstDir = getDstPath(libPath, sharedRelPath) lclOutDir = getOutPath(libPath, sharedRelPath) if not os.path.exists(lclOutDir): os.makedirs(lclOutDir) mkFileName = os.path.join(lclOutDir, "Android.mk") file = open(mkFileName, "wt") file.write(moduleMk(sharedObjLib, build_params, staticLibs, None, depMgr, lclDstDir, localPath=True, ldlibs=ldlibs, staticLibs=args.static_libs)) file.close() if args.static_libs == None: warningMessage("Computed link order is very likely not accurate.") warningMessage("See %s." % (mkFileName)) infoMessage("Created %s" % (mkFileName), mkFileVerbosity) if not omits["top"]: topMkFileName = getOutPath(libPath, "Android.mk") file = open(topMkFileName, "wt") file.write("include $(call all-subdir-makefiles)") file.close() if not omits["app"]: appMkFileName = os.path.join(applicationDir, "Application.mk") file = open(appMkFileName, "wt") libNames = [staticLib['LIBNAME'] for staticLib in staticLibs] if sharedObjLib <> None: libNames.append(sharedObjLib['LIBNAME']) file.write("APP_PROJECT_PATH := $(call my-dir)/project\n") file.write("APP_MODULES := %s\n" % (" ".join(libNames))) if args.project <> None: file.write("APP_PROJECT_PATH := %s" % (absPath(getDstPath()))) if bc_file.dbgmode: file.write("APP_OPTIM := debug\n") file.close() # # The End # infoMessage("Export done.", 1)