def projectFileGenerator(project_info): directory = project_info.info("PROJECT_PATH") project_data = {} enabled_modules = [] for module, information in project_info.info("MODULES").items(): if information["enabled"]: enabled_modules.append(module) project_data["ENABLED_MODULES"] = enabled_modules if project_info.info("PRESET"): # For presets save again the BERTOS_PATH into project file project_data["PRESET"] = True project_data["BERTOS_PATH"] = relpath.relpath(project_info.info("BERTOS_PATH"), directory) elif project_info.edit: # If in editing mode the BERTOS_PATH is maintained project_data["BERTOS_PATH"] = relpath.relpath(project_info.info("BERTOS_PATH"), directory) else: # Use the local BeRTOS version instead of the original one # project_data["BERTOS_PATH"] = project_info.info("BERTOS_PATH") project_data["BERTOS_PATH"] = "." project_data["PROJECT_NAME"] = project_info.info("PROJECT_NAME", os.path.basename(directory)) project_src_relpath = relpath.relpath(project_info.info("PROJECT_SRC_PATH"), directory) project_data["PROJECT_SRC_PATH"] = project_src_relpath project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN") project_data["CPU_NAME"] = project_info.info("CPU_NAME") project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ") project_data["OUTPUT"] = project_info.info("OUTPUT") project_data["WIZARD_VERSION"] = WIZARD_VERSION project_data["PRESET"] = project_info.info("PRESET") project_data["PROJECT_HW_PATH"] = relpath.relpath(project_info.info("PROJECT_HW_PATH"), directory) return pickle.dumps(project_data)
def pathRelativeToModel(path, model): from relpath import relpath mbd = getModelBaseDir(model) if mbd: return relpath(mbd, path) else: return path
def readTestFunctions(self): """For each TestFile we are running tests in, load the list of test functions in that file""" #XXX TODO - error handling if a test is specified that does not exist sys.stderr.write("Finding tests...\n") def get_job(test_file): def get_tests(): def callback(rv, signal, proc, killed): test_file.updateTestLists(rv) if test_file.compile_failed: self.uncompiled_files.append(test_file) else: self.compiled_files.append(test_file) return test_file.getTests(self.engine), callback, None return get_tests test_finder = JobRunner(self.num_processes, timeout=30) for path, test_file in self.tests.iteritems(): self.all_files.append(relpath(path, test_path)) test_finder.queue([get_job(test_file)]) test_finder.run()
def importInShell(self): modDir, modFile = os.path.split(self.assertLocalFile()) modName = os.path.splitext(modFile)[0] if self.app: execDir = os.path.dirname(self.app.assertLocalFile()) if execDir != modDir: p, m = os.path.split(relpath.relpath(execDir, self.assertLocalFile())) p = p.replace('/', '.') p = p.replace('\\', '.') pckName = p impExecStr = 'from %s import %s'%(pckName, modName) else: impExecStr = 'import %s'%modName else: execDir = modDir impExecStr = 'import %s'%modName shell = self.editor.shell if execDir not in sys.path: sys.path.append(execDir) shell.pushLine("print '## Appended to sys.path'") else: info = '' shell.pushLine(impExecStr, impExecStr) if shell.lastResult != 'stderr': return _('Import of %s successfull')%modName, 'Info' else: return _('Import of %s failed')%modName, 'Error'
def get_lecture_date_folder(self): folder_path = self._get_lecture_date_folder() #print('folder_path = %s' % folder_path) rp = relpath.relpath(folder_path, base=semester_root) self.dest_dir_box.SetValue(rp) self.check_for_existing_lecture_folder() return folder_path
def RComp(env, rsc, rsg, rss, options, includes, fileinc, defines, extra_depends=None): """Utility for creating Command for Symbian resource compiler""" # Preprocess the resource file first rpp = ".".join(os.path.basename(rss).split(".")[:-1] + ["rpp"]) rpp = os.path.abspath(os.path.join(os.path.dirname(rsg), rpp)) import relpath rpp_build = cpp.Preprocess(env, rpp, rss, includes, fileinc, defines + ["_UNICODE"]) rss = relpath.relpath(os.path.abspath("."), os.path.abspath(rss)) # FIXME: For some strange reason, using the rcomp when creating bootup resource fails # if using the 'normal' way( colorizer.py must mess it up somehow ) def build(target, source, env): cmd = RCOMP + ' -u %s -o\"%s\" -h\"%s\" -s\"%s\" -i\"%s\" ' % \ ( options, rsc, rsg, rpp, rss ) os.system(cmd) if extra_depends is not None: for dep in extra_depends: env.Depends(rpp_build, dep) resource_build = env.Command([rsc, rsg], [rpp, rss], build) env.Depends(resource_build, rpp) return resource_build
def importInShell(self): modDir, modFile = os.path.split(self.assertLocalFile()) modName = os.path.splitext(modFile)[0] if self.app: execDir = os.path.dirname(self.app.assertLocalFile()) if execDir != modDir: p, m = os.path.split( relpath.relpath(execDir, self.assertLocalFile())) p = p.replace('/', '.') p = p.replace('\\', '.') pckName = p impExecStr = 'from %s import %s' % (pckName, modName) else: impExecStr = 'import %s' % modName else: execDir = modDir impExecStr = 'import %s' % modName shell = self.editor.shell if execDir not in sys.path: sys.path.append(execDir) shell.pushLine("print '## Appended to sys.path'") else: info = '' shell.pushLine(impExecStr, impExecStr) if shell.lastResult != 'stderr': return _('Import of %s successfull') % modName, 'Info' else: return _('Import of %s failed') % modName, 'Error'
def h( x ): import relpath if not x.startswith( "/" ): return x p = relpath.relpath( os.getcwd(), x ) #print p return p
def __init__(self, abspath, basepath, rel_link_paths, level=1, \ index_name='index.html', subclass=None, title=None, myskips=[]): self.abspath = abspath rest, self.name = os.path.split(self.abspath) self.basepath = basepath if subclass is None: subclass = subfolder self.relpath = relpath.relpath(abspath, basepath) self.rel_link_paths = rel_link_paths self.mylinks = [item for item in self.rel_link_paths \ if item.find(self.relpath+os.sep) == 0] subfolder_abstract.__init__(self, index_name=index_name, \ title=title, subclass=subclass, \ myskips=myskips, level=level) self.topnavlinks = [relpath.relpath(item, self.relpath) for item in self.toplinks]#used for sidebar navigation in Krauss blog
def on_browse_dest(self, event=None): lectures_dir = self.get_lectures_dir() folder_path = wx_utils.my_dir_dialog(parent=None, \ start_dir=lectures_dir, \ msg="Choose destination directory", \ ) if folder_path: rp = relpath.relpath(folder_path, base=semester_root) self.dest_dir_box.SetValue(rp)
def on_browse(self, event=None): course_dir = self.get_course_dir() folder_path = wx_utils.my_dir_dialog(parent=None,\ start_dir=course_dir,\ msg="Choose root directory", \ ) if folder_path: rp = relpath.relpath(folder_path, base=course_dir) self.root_folder_box.SetValue(rp)
def createFileSoftLink(self, name, target): if os.path.exists(name) or os.path.islink(name): os.remove(name) if os.path.exists(target): # Create soft link with absolute path t_relpath = relpath.relpath(os.path.dirname(name), os.path.dirname(target), '/') t = t_relpath + "/" + os.path.basename(target) os.system("ln -s %s %s" %(t, name)) return
def make_rel(self, pathin): print('MRMRMRMMRMRMRMR') print('') print('relpath stuff:') print('') print('pathin = ' + pathin) print('self.root = ' + self.root) rp = relpath.relpath(pathin, self.root) print('rp = ' + rp) return rp
def saveAs(self, filename): for mod in self.modules.keys(): self.modules[mod][2] = self.convertToUnixPath(\ relpath.relpath(os.path.dirname(filename), self.normaliseModuleRelativeToApp(self.modules[mod][2]))) self.writeModules() ClassModel.saveAs(self, filename) self.notify()
def on_browse_source(self, event=None): course_dir = self.get_course_dir() folder_path = wx_utils.my_dir_dialog(parent=None, \ start_dir=course_dir, \ msg="Choose source directory", \ ) if folder_path: rp = relpath.relpath(folder_path, base=semester_root) self.source_dir_box.SetValue(rp) self.list_source_dir() self.set_lecture_number()
def __init__(self, abspath, basepath, rel_link_paths, level=1): subfolder.__init__(self, abspath, basepath, rel_link_paths, level=level) self.months = [blog_month(item, self.basepath, self.rel_link_paths, \ level=self.level+1, parent=self) \ for item in self.subfolders] self.month_links = [relpath.relpath(item, self.abspath) for item in self.subfolders] self.first_day_links = [item.first_day for item in self.months] keys = [item.name for item in self.months] vals = [item.first_day for item in self.months] self.firstdays_dict = dict(zip(keys, vals))
def userMkGenerator(project_info): makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_user.mk"), "r").read() destination = os.path.join(project_info.prjdir, os.path.basename(project_info.prjdir) + "_user.mk") # Deadly performances loss was here :( mk_data = {} mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH")) mk_data["$ppath"] = relpath.relpath(project_info.info("PROJECT_SRC_PATH"), project_info.info("PROJECT_PATH")) mk_data["$main"] = "/".join(["$(%s_SRC_PATH)" %project_info.info("PROJECT_NAME"), "main.c"]) for key in mk_data: makefile = makefile.replace(key, mk_data[key]) open(destination, "w").write(makefile)
def projectFileGenerator(project_info): directory = project_info.info("PROJECT_PATH") project_data = {} enabled_modules = [] for module, information in project_info.info("MODULES").items(): if information["enabled"]: enabled_modules.append(module) project_data["ENABLED_MODULES"] = sorted(enabled_modules) if project_info.info("PRESET"): # For presets save again the BERTOS_PATH into project file project_data["PRESET"] = True project_data["BERTOS_PATH"] = relpath.relpath( project_info.info("BERTOS_PATH"), directory) elif project_info.edit: # If in editing mode the BERTOS_PATH is maintained project_data["BERTOS_PATH"] = relpath.relpath( project_info.info("BERTOS_PATH"), directory) else: # Use the local BeRTOS version instead of the original one # project_data["BERTOS_PATH"] = project_info.info("BERTOS_PATH") project_data["BERTOS_PATH"] = "." project_data["PROJECT_NAME"] = project_info.info( "PROJECT_NAME", os.path.basename(directory)) project_src_relpath = relpath.relpath( project_info.info("PROJECT_SRC_PATH"), directory) project_data["PROJECT_SRC_PATH"] = project_src_relpath project_data["PROJECT_SRC_PATH_FROM_MAKEFILE"] = project_info.info( "PROJECT_SRC_PATH_FROM_MAKEFILE") project_data["TOOLCHAIN"] = { 'path': project_info.info("TOOLCHAIN")['path'] } project_data["CPU_NAME"] = project_info.info("CPU_NAME") project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ") project_data["OUTPUT"] = project_info.info("OUTPUT") project_data["WIZARD_VERSION"] = WIZARD_VERSION project_data["PRESET"] = project_info.info("PRESET") project_data["PROJECT_HW_PATH"] = relpath.relpath( project_info.info("PROJECT_HW_PATH"), directory) project_data["PROJECT_HW_PATH_FROM_MAKEFILE"] = project_info.info( "PROJECT_HW_PATH_FROM_MAKEFILE") return pickle.dumps(project_data)
def __init__(self, path): if not os.path.exists(path): print "Test file %s not found" % path sys.exit(1) self.path = path self.compile_failed = None self.get_tests_output = None self.tests_to_run = [] self.crashed = False #Did the current file crash (fast mode only) self.relative_path = relpath(path, test_path) self.run_tests_individually = False self.read_options()
def __init__(self, path): if not os.path.exists(path): print "Test file %s not found"%path sys.exit(1) self.path = path self.compile_failed = None self.get_tests_output = None self.tests_to_run = [] self.crashed = False #Did the current file crash (fast mode only) self.relative_path = relpath(path, test_path) self.run_tests_individually = False self.read_options()
def addModule(self, filename, descr, source=None): name, ext = os.path.splitext(os.path.basename(filename)) if self.modules.has_key(name): raise Exception(_('Module name exists in application')) if self.savedAs: relative = relpath.relpath(os.path.dirname(self.filename), filename) else: relative = filename self.modules[name] = [0, descr, self.convertToUnixPath(relative)] self.idModel(name, source) self.writeModules()
def makefileGenerator(project_info): """ Generate the Makefile for the current project. """ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), "r").read() destination = os.path.join(project_info.maindir, "Makefile") # TODO write a general function that works for both the mk file and the Makefile mk_data = {} mk_data["$pname"] = project_info.info("PROJECT_NAME") mk_data["$ppath"] = relpath.relpath(project_info.info("PROJECT_SRC_PATH"), project_info.info("PROJECT_PATH")) for key in mk_data: makefile = makefile.replace(key, mk_data[key]) open(destination, "w").write(makefile)
def make_tests(test_list, opts): shutil.copy(os.path.join(opjsunit.harness_path, "opjsunit.js"), os.path.join(default_output_path, "opjsunit.js")) test_data = {} for test_file in test_list: current_test_data = test_data filename = os.path.split(test_file.path)[1] dir_name = os.path.join(default_output_path, os.path.splitext(filename)[0]) out_path = os.path.join(dir_name, filename) components = [ item for item in os.path.splitext(filename)[0][4:].split("_") ] if len(components) == 1: components.append("other") for i, component in enumerate(components): if component not in current_test_data: if i == len(components) - 1: current_test_data[component] = [] else: current_test_data[component] = {} current_test_data = current_test_data[component] if not ('write_tests' in opts) or opts.write_tests: if not os.path.exists(dir_name): os.mkdir(dir_name) assert os.path.isdir(dir_name) shutil.copy(test_file.path, out_path) tests = opjsunit.TestSet(test_file.iterkeys()) for i, test_id in enumerate(tests.iterByName()): test = test_file[unicode(test_id)] code_filename = os.path.split(out_path)[1] make_test(test, i, dir_name, code_filename, opts) props = { "name": test.id.function, "href": relpath.relpath( os.path.join(default_output_path, test.rel_path), opjsunit.base_path) } current_test_data.append(props) return test_data
def mkGenerator(project_info): """ Generates the mk file for the current project. """ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read() prjdir = os.path.abspath(project_info.prjdir) destination = os.path.join(prjdir, os.path.basename(prjdir) + ".mk") mk_data = {} mk_data["$pname"] = project_info.info("PROJECT_NAME") mk_data["$ppath"] = relpath.relpath(project_info.info("PROJECT_SRC_PATH"), project_info.info("PROJECT_PATH")) mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ") cpu_mk_parameters = [] for key, value in project_info.info("CPU_INFOS").items(): if key.startswith(const.MK_PARAM_ID): cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value)) mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters) mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info) mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].rsplit("gcc", 1)[0]) mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].rsplit("gcc", 1)[1]) mk_data["$hwpath"] = relpath.relpath(project_info.info("PROJECT_HW_PATH"), project_info.info("PROJECT_PATH")) for key in mk_data: makefile = makefile.replace(key, mk_data[key]) open(destination, "w").write(makefile)
def add_header_search_path(self, configuration): project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) build_path = os.path.join(os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products'), 'three20') rel_path = relpath(project_path, build_path) did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"'+rel_path+'"') if not did_add_build_setting: return did_add_build_setting # Version 46 is Xcode 4's file format. try: primary_version = int(self._xcode_version.split('.')[0]) except ValueError, e: primary_version = 0
def get_all_tests(): tests = set() for dirpath, dirnames, filenames in os.walk(base_path): if ".svn" in dirpath or "tools" in dirpath: continue for fn in filenames: if os.path.splitext(fn)[1] != ".js" or fn[0] in ("#", "."): continue if fn not in ("mjsunit.js", "harness.js"): full_path = os.path.join(dirpath, fn) tests.add(relpath.relpath(full_path, base_path)) return tests
def _FindRelPath(self, abspath): maindir = self._FindMainDir() #print('self.defaultdir='+self.defaultdir) #print('abspath='+abspath) #print('maindir='+maindir) #pdb.set_trace() abspath = rwkos.FindFullPath(abspath)#this might break some stuff maindir = rwkos.FindFullPath(maindir) if not maindir: self.relpath = abspath #elif os.path.samefile(abspath, maindir): elif abspath == maindir: self.relpath = '' else: self.relpath = relpath.relpath(abspath, maindir) return self.relpath
def gen_nav(self, firstdays_dict): navlist = [] self.dict_to_rst() for month in months: if firstdays_dict.has_key(month): month_link = firstdays_dict[month] month_rel_link = relpath.relpath(month_link, self.relpath) month_rel_link = month_rel_link.replace('\\','/') navlist.append(link_dec(month_rel_link, month)) navlist.append('') if month == self.name: navlist.append('') navlist.extend(self.rst_links) navlist.append('') self.navlist = rst_to_html_list(navlist) return self.navlist
def createNonndkHeaderFile(self): #path = os.path.dirname(os.path.realpath(__file__)) #if not path: return path = self.OUT_PATH + "/" + "nonndk.h" if not os.path.exists(path): f = file(path,"w") if not f: print "Create file %s failed!!!" %(path) return f.close() headerfile = file(path,"r+") lines = headerfile.readlines() defines = [] for c in self.totalNdkClass: defines.append("#define %s %s_mtk \n" %(c,c)) if defines != lines: headerfile.seek(0) headerfile.truncate(0) for define in defines: headerfile.write(define) headerfile.close() configfile = file("system/core/include/arch/mtkNonNDKConfig.h", "r+") sap = os.getcwd() + "/" + "system/core/include/arch/" if path.startswith('/'): tap = path else: tap = os.getcwd() + "/" + path rp = relpath.relpath(os.path.dirname(sap), os.path.dirname(tap), "/") rp = rp + "/" + os.path.basename(path) string = r'#include "' #string += "../../../../" + str(path) string += rp string += r'"' string += "\n" lines = configfile.readlines() if not string in lines: configfile.write(string) configfile.close()
def make_tests(test_list, opts): shutil.copy(os.path.join(opjsunit.harness_path, "opjsunit.js"), os.path.join(default_output_path, "opjsunit.js")) test_data = {} for test_file in test_list: current_test_data = test_data filename = os.path.split(test_file.path)[1] dir_name = os.path.join(default_output_path, os.path.splitext(filename)[0]) out_path = os.path.join(dir_name, filename) components = [item for item in os.path.splitext(filename)[0][4:].split("_")] if len(components) == 1: components.append("other") for i, component in enumerate(components): if component not in current_test_data: if i == len(components) - 1: current_test_data[component] = [] else: current_test_data[component] = {} current_test_data = current_test_data[component] if not('write_tests' in opts) or opts.write_tests: if not os.path.exists(dir_name): os.mkdir(dir_name) assert os.path.isdir(dir_name) shutil.copy(test_file.path, out_path) tests = opjsunit.TestSet(test_file.iterkeys()) for i, test_id in enumerate(tests.iterByName()): test = test_file[unicode(test_id)] code_filename = os.path.split(out_path)[1] make_test(test, i, dir_name, code_filename, opts) props = { "name":test.id.function, "href":relpath.relpath(os.path.join(default_output_path, test.rel_path), opjsunit.base_path) } current_test_data.append(props) return test_data
def add_header_search_path(self, configuration): project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) build_path = os.path.join( os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products'), 'three20') rel_path = relpath(project_path, build_path) did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"' + rel_path + '"') if not did_add_build_setting: return did_add_build_setting # Version 46 is Xcode 4's file format. try: primary_version = int(self._xcode_version.split('.')[0]) except ValueError, e: primary_version = 0
def add_bundle(self): tthash_base = self.get_hash_base('Three20.bundle') project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) build_path = os.path.join(Paths.src_dir, 'Three20.bundle') rel_path = relpath(project_path, build_path) fileref_hash = self.add_filereference('Three20.bundle', 'plug-in', tthash_base+'0', rel_path, 'SOURCE_ROOT') libfile_hash = self.add_buildfile('Three20.bundle', fileref_hash, tthash_base+'1') if not self.add_file_to_resources('Three20.bundle', fileref_hash): return False if not self.add_file_to_resources_phase('Three20.bundle', libfile_hash): return False return True
def OnBrowseButton(self,e): self._FindMainDir() #-------------------------------------------------- # wxLogMessage("dirchooser Browse Button Pushed") #-------------------------------------------------- dlg = wx.FileDialog(self,self.message,self.defaultdir,"", self.filter, self.style) success=dlg.ShowModal() if success== wx.ID_OK: path=dlg.GetPath() self.path = path if self.defaultdir: self.relpath = relpath.relpath(path, self.defaultdir) else: self.relpath = self.path self.DirectoryTextBox.SetValue(self.relpath) for method in self.postmethods: method() dlg.Destroy() return success
def main() : argv = sys.argv argc = len( argv ) if argc < 3 : print u"----- usage -----" print u"create_data_list [rootpath] [filename]" return; inDataDir = os.path.normpath( argv[1] ) outHeaderPath = os.path.normpath( argv[2] ) inDataDir = re.sub( r'\\', '/', inDataDir ) outHeaderPath = re.sub( r'\\', '/', outHeaderPath ) print "inDataDir : ", inDataDir print "outHeaderPath : ", outHeaderPath # データフォルダがなかったらエラー if not os.path.exists( inDataDir ) : print "directory not exists : ", inDataDir # 出力ディレクトリが無かったら作成 if not os.path.exists( os.path.dirname( outHeaderPath ) ) : os.mkdir( os.path.dirname( outHeaderPath ) ) file = createHeaderFile( outHeaderPath ) rootPath = re.sub( r'\\', '/', inDataDir ) for d in reprDirInfo( rootPath ) : d = re.sub( r'\\', '/', d ) #relative = os.path.relpath( d, rootPath ) relative = relpath.relpath( d, rootPath ) relative = re.sub( r'\\', '/', relative ) writeDefine( file, relative ) closeHeaderFile( file )
def __init__(self, main_dir, pats=['*.html','*.pdf'], \ index_name = 'index.html', title=None, header=[], \ subclass=subfolder, myskips=[]): self.main_dir = main_dir self.basepath = main_dir self.abspath = main_dir self.relpath = '' self.pats = pats self.header = header self.full_link_paths = [] for pat in self.pats: curpaths = pyfind.findall(self.main_dir, pat) self.full_link_paths.extend(curpaths) self.full_link_paths.sort()#need to filter myrst2s5.py all_links = copy.copy(self.full_link_paths) self.full_link_paths = filter(skip_filter, all_links) self.rel_link_paths = [relpath.relpath(item, self.main_dir) \ for item in self.full_link_paths] subfolder_abstract.__init__(self, index_name=index_name, \ title=title, subclass=subclass, \ myskips=myskips, level=0)
def ToLatex(self, pdf=1, **kwargs): #the figure path is a full path by default. This isn't #great. Should be relative. # #whether or not self.opts contains a non-empty caption #determines if a floating figure environment is used. latexlist = [] hascaption = bool(self.opts['caption']) if hascaption: startstr = '\\begin{figure}' if self.opts['placestr']: startstr += self.opts['placestr'] latexlist.append(startstr) if self.opts['center']: latexlist.append('\\begin{center}') myoptstr = FigWidthHeightStr(self.opts) igline = '\\includegraphics' if myoptstr: igline +='['+myoptstr+']' figpath = self.opts['filename'] if pdf: #print('calling epstopdf') epstopdf(figpath) #else: #print('pdf = '+str(pdf)) figrelpath = relpath.relpath(figpath, self.basepath) figrelpath = figrelpath.replace('\\','/') igline += '{'+figrelpath+'}' latexlist.append(igline) if hascaption: latexlist.append('\\caption{'+self.opts['caption']+'}') if self.opts['label']:#no sense labeling a non-floating #figure, so this is a nested if latexlist.append('\\label{'+self.opts['label']+'}') if self.opts['center']: latexlist.append('\\end{center}') if hascaption: latexlist.append('\\end{figure}') self.latexlist = latexlist return latexlist
def add_bundle(self): tthash_base = self.get_hash_base('Three20.bundle') project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) build_path = os.path.join(Paths.src_dir, 'Three20.bundle') rel_path = relpath(project_path, build_path) fileref_hash = self.add_filereference('Three20.bundle', 'plug-in', tthash_base + '0', rel_path, 'SOURCE_ROOT') libfile_hash = self.add_buildfile('Three20.bundle', fileref_hash, tthash_base + '1') if not self.add_file_to_resources('Three20.bundle', fileref_hash): return False if not self.add_file_to_resources_phase('Three20.bundle', libfile_hash): return False return True
def make_test(test, file_number, out_dir, code_filename, opts): harness_code = make_harness_code(test, opts.iterations) filename = test.id.function + ".htm" test.rel_path = os.path.join(relpath.relpath(out_dir, default_output_path), filename) if not ('write_tests' in opts) or opts.write_tests: out_f = open(os.path.join(out_dir, filename), "w") template = MarkupTemplate( open(os.path.join(opjsunit.harness_path, "test.xml"))) stream = template.generate( harness_code=harness_code, title=str(test.id.name), test_filename=code_filename, run_individually=test.file.run_tests_individually, function_code=test.function_code) out_f.write(stream.render('html', doctype='html5', encoding="utf-8")) out_f.close()
def moduleSaveAsNotify(self, module, oldFilename, newFilename): if module != self: newName, ext = os.path.splitext(os.path.basename(newFilename)) oldName = os.path.splitext(os.path.basename(oldFilename))[0] if not self.modules.has_key(oldName): raise Exception, _('Module does not exists in application') if self.savedAs: relative = relpath.relpath(os.path.dirname(self.filename), newFilename) else: relative = newFilename if newName != oldName: self.modules[newName] = self.modules[oldName] del self.modules[oldName] self.modules[newName][2] = self.convertToUnixPath(relative) # Check if it's autocreated module if self.modules[newName][0]: if len(self.viewsModified): self.refreshFromViews() impIdx = self.updateAutoCreateImports(oldName, newName) if impIdx is not None: # check if it's the main module, first in the import list is # always the main module if not impIdx: self.updateMainFrameModuleRefs(oldName, newName) # preserve modified modules mods = self.modules self.refreshFromModule() self.modules = mods self.writeModules() self.update()
def make_test(test, file_number, out_dir, code_filename, opts): harness_code = make_harness_code(test, opts.iterations) filename = test.id.function + ".htm" test.rel_path = os.path.join(relpath.relpath(out_dir, default_output_path), filename) if not('write_tests' in opts) or opts.write_tests: out_f = open(os.path.join(out_dir, filename), "w") template = MarkupTemplate(open(os.path.join(opjsunit.harness_path, "test.xml"))) stream = template.generate(harness_code = harness_code, title = str(test.id.name), test_filename=code_filename, run_individually=test.file.run_tests_individually, function_code=test.function_code) out_f.write(stream.render('html', doctype='html5', encoding="utf-8")) out_f.close()
def MFProgramGenerator(source, target, env, for_signature): #Rebuild everything if # a) the number of dependencies has changed # b) any target does not exist # c) the build command has changed #Else rebuild only those c files that have changed_since_last_build #The signature of this builder should always be the same, because the #multifile compile is always functionally equivalent to rebuilding #everything if for_signature: pared_sources = source else: #First a sanity check assert len(set([os.path.splitext(str(i))[1] for i in source])) == 1, \ "All source files must have the same extension." pared_sources = [] src_names = [os.path.splitext(os.path.basename(str(i)))[0] for i in source] tgt_names = [os.path.splitext(os.path.basename(str(t)))[0] for t in target] ni = target[0].get_binfo() oi = target[0].get_stored_info().binfo if ni.bactsig != oi.bactsig: #Command line has changed pared_sources = source else: for i in range(len(tgt_names)): t = target[i] tgt_name = tgt_names[i] if not t.exists(): #a target does not exist pared_sources = source break bi = t.get_stored_info().binfo then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs children = t.children() if len(children) != len(then): #the number of dependencies has changed pared_sources = source break for child, prev_ni in zip(children, then): if child.changed_since_last_build(t, prev_ni) and \ not t in pared_sources: #If child is a source file, not an explicit or implicit #dependency, then it is not truly a dependency of any target #except that with the same basename. This is a limitation #of SCons.node, which assumes that all sources of a Node #are dependencies of all targets. So we check for that case #here and only rebuild as necessary. src_name = os.path.splitext(os.path.basename(str(child)))[0] if src_name not in tgt_names or src_name == tgt_name: s = source[src_names.index(tgt_name)] pared_sources.append(s) assert len(pared_sources) > 0 destdir = str(target[0].dir) #finding sconscript_dir is a bit of a hack. It assumes that the source #files are always going to be in the same directory as the SConscript file #which is not necessarily true. BUG BY Alan Somers sconscript_dir = os.path.dirname(str(pared_sources[0])) prefixed_sources = [relpath(str(i), destdir) for i in pared_sources] prefixed_sources_str = ' '.join([str(i) for i in prefixed_sources]) lang_ext = os.path.splitext(prefixed_sources[0])[1] tgt_names2 = [os.path.splitext(os.path.basename(str(t)))[0] for t in target] _CPPPATH = [] if 'CPPPATH' in env: for i in env['CPPPATH']: #if i[0] == '#': ##_CPPPATH.append(relpath(i[1:], destdir)) _CPPPATH.append(i) #else: # _CPPPATH.append(relpath(os.path.join(sconscript_dir, i), # destdir)) defines = "" for t in env['CPPDEFINES']: defines += ("-D"+str(t)+" ") _CPPINCFLAGS = ['-I' + i for i in _CPPPATH] _CCOMCOM = '$CPPFLAGS $_CPPDEFFLAGS $defines %s' % ' '.join(_CPPINCFLAGS) libstr = "" for t in env['LIBS']: libstr += ("-l"+t+" ") if lang_ext == '.c' : _CCCOM = 'cd %s && $CC $CFLAGS $CCFLAGS %s %s $LINKFLAGS %s -o %s' % \ (destdir, _CCOMCOM, prefixed_sources_str, libstr, tgt_names2[0]) #XXX BUG BY Alan Somers. $CCCOMSTR gets substituted using the full list of target files, #not prefixed_sources cmd = SCons.Script.Action(env.subst(_CCCOM), "$CCCOMSTR") elif lang_ext in ['.cc', '.cpp']: _CXXCOM = 'cd %s && $CXX $CXXFLAGS $CCFLAGS %s %s $LINKFLAGS %s -o %s' % \ (destdir, _CCOMCOM, prefixed_sources_str, libstr, tgt_names2[0]) cmd = SCons.Script.Action(env.subst(_CXXCOM), "$CXXCOMSTR") else: assert False, "Unknown source file extension %s" % lang_ext return cmd
def get_rel_path_to_products_dir(self): project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) build_path = os.path.join( os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products') return relpath(project_path, build_path)
def testTarget2DirsDown_c1(self): """Target is 2 directories down from current dir, base is in c1""" os.chdir('a') self.assertEqual(relpath.relpath('b/file2',self.TempDir+'/a1/b1/c1'), FmtPath('../../../a/b/file2'))
def Parse(self): f = open(self.source) lines = f.readlines() f.close() workingfolder = os.path.dirname(os.path.abspath(self.source)).replace( "\\", "/") curdir = abspath(os.curdir) if os.name == "nt": # Remove drive letter if on same drive as target if curdir.split(":")[0].lower() == workingfolder.lower().split( ":")[0]: curdir = curdir.split(":")[1] workingfolder = workingfolder.split(":")[1] sourcepath = workingfolder epocroot = os.environ["EPOCROOT"].replace("\\", "/") lines = [x for x in lines if len(x.strip()) > 0] result = {} # initialize for x in KEYWORDS: result[x] = [] result[ "epocallowdlldata"] = False # Not enabled with regular scripts either result["epocstacksize"].append(hex(8 * 1024)) result["epocheapsize"] = (hex(4096), hex(1024 * 1024)) result["uid"] += [None, None] for line in lines: # Fixes Issue-5: mmp parser cannot handle comments c_index = line.find("//") if c_index != -1: line = line[:c_index] if line == "": continue parts = line.split() keyword = parts[0].lower() if keyword in KEYWORDS: items = result.get(keyword, []) if len(parts) > 1: if keyword == "source": files = [] files = [ join(sourcepath, x).replace("\\", "/") for x in parts[1:] ] items += files elif keyword == "library": libs = [ x.lower().replace(".lib", "") for x in parts[1:] ] items += libs elif keyword == "uid": items = parts[1:] elif keyword in ["systeminclude", "userinclude"]: for p in parts[1:]: p = p.replace("\\", "/") if p[0] in ["/", "+"] or ":" in p: items += [ (epocroot + p[1:]).replace("\\", "/") ] #print "1", items elif p == ".": items += [workingfolder] #print "2", items else: items += [relpath(workingfolder, p)] #print "3", items else: items += parts[1:] else: if keyword == "epocallowdlldata": result["epocallowdlldata"] = True result[keyword] = items elif keyword == "sourcepath": sourcepath = parts[1].replace("\\", "/") sourcepath = relpath(curdir, abspath(sourcepath)) elif keyword == "start": result["resources"] += [join(sourcepath, parts[-1])] result["userinclude"] += [sourcepath] # Take targettype from file extension instead. TODO: special dlls. result["targettype"] = result["target"][0].split(".")[-1] result["target"] = ".".join( result["target"][0].split(".")[:-1]) # Strip extension result["epocstacksize"] = int(result["epocstacksize"][0], 16) return result
def __init__(self, filename, function=None, index=None): self._filename = filename self._function = function self._relative_path = relpath(filename, test_path) self.name = unicode(self) self.index = index
def make_rel(self, pathin): rp = relpath.relpath(pathin, self.root) return rp
def sc_relpath(src, destdir): """Like relpath but aware of SCons convention regarding '#' in pathnames""" if src[0] == '#': return relpath(src[1:], destdir) else: return relpath(os.path.join(destdir, src), destdir)
def Export(self): attrs = [x for x in dir(self.MMPData) if x.isupper()] #import pdb;pdb.set_trace() result = [] targettype = self.MMPData.TARGETTYPE if targettype in [TARGETTYPE_PLUGIN, TARGETTYPE_DLL, TARGETTYPE_PYD]: targettype = TARGETTYPE_DLL result.append("TARGET %s.%s" % (self.MMPData.TARGET, self.MMPData.TARGETTYPE)) result.append("TARGETTYPE %s" % (targettype)) if targettype != TARGETTYPE_LIB: result.append("UID %s %s" % (self.MMPData.UID2, self.MMPData.UID3)) attrs.remove("TARGET") attrs.remove("TARGETTYPE") if self.MMPData.EPOCALLOWDLLDATA: result.append("EPOCALLOWDLLDATA") attrs.remove("EPOCALLOWDLLDATA") if self.MMPData.EPOCHEAPSIZE == 0: attrs.remove("EPOCHEAPSIZE") if self.MMPData.EPOCSTACKSIZE == 0: attrs.remove("EPOCSTACKSIZE") attrs.remove("UID2") attrs.remove("UID3") attrs.remove("CAPABILITY") attrs.append("CAPABILITY") data = self.MMPData #import pdb;pdb.set_trace() #data.USERINCLUDE = ["."] + [ relpath(self.TargetDir, x ) for x in data.USERINCLUDE ] for x in xrange(len(data.USERINCLUDE)): #print data.USERINCLUDE[x] #import pdb;pdb.set_trace() if not os.path.isabs(data.USERINCLUDE[x]): #print "USERINCLUDE", data.USERINCLUDE[x], try: data.USERINCLUDE[x] = relpath(self.TargetDir, data.USERINCLUDE[x]) except TypeError: data.USERINCLUDE[x] = "." pass # if same #print "=>", data.USERINCLUDE[x] data.USERINCLUDE.sort() data.SYSTEMINCLUDE.sort() data.SOURCEPATH.sort() #data.SYSTEMINCLUDE = [ relpath(self.TargetDir, x ) for x in data.SYSTEMINCLUDE ] order = [ "MACRO", "SYSTEMINCLUDE", "USERINCLUDE", "SOURCEPATH", "RESOURCE", "SOURCE", "LIBRARY" ] for o in order: if o in attrs: attrs.remove(o) attrs = order + attrs # Remove keywords not valid for LIB if targettype == TARGETTYPE_LIB: for l in ["LIBRARY", "STATICLIBRARY", "CAPABILITY"]: if l in attrs: attrs.remove(l) for a in attrs: result.append("") # Separate sections with empty line data = getattr(self.MMPData, a) if type(data) == list: if a == "CAPABILITY": #for item in data: result.append( "%-11s %s" % ("CAPABILITY", " ".join(self.MMPData.CAPABILITY))) elif a == "SOURCE": #import pdb;pdb.set_trace() for s in data: rpath = relpath(self.TargetDir, s) result.append("%-11s %s" % ("SOURCE", rpath)) elif a == "RESOURCE": for s in data: template = TEMPLATE_RESOURCE if "_reg" in s.lower(): template = TEMPLATE_RESOURCE_REG s = relpath(self.TargetDir, s) res = template % {"RESOURCE": s} result.append(res) else: for item in data: result.append("%-11s %s" % (a, item)) elif data: result.append("%-11s %s" % (a, data)) result.append("EXPORTUNFROZEN") self.MMPContents = "// Generated by SCons for Symbian\n" self.MMPContents += "\n".join(result).replace("\\", "/") return self.MMPContents
def add_dependency(self, dep): project_data = self.get_project_data() dep_data = dep.get_project_data() project_target = self.active_target() dep_target = dep.active_target() if project_data is None or dep_data is None: return False logging.info("\nAdding " + str(dep) + "\nto\n" + str(self)) project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath())) dep_path = os.path.abspath(dep.xcodeprojpath()) rel_path = relpath(project_path, dep_path) logging.info("") logging.info("Project path: " + project_path) logging.info("Dependency path: " + dep_path) logging.info("Relative path: " + rel_path) tthash_base = self.get_hash_base( dep.uniqueid_for_target(dep._active_target)) ############################################### logging.info("") logging.info("Step 1: Add file reference to the dependency...") pbxfileref_hash = self.add_filereference( dep._project_name + '.xcodeproj', 'pb-project', tthash_base + '0', rel_path, 'SOURCE_ROOT') project_data = self.get_project_data() logging.info("Done: Added file reference: " + pbxfileref_hash) self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 2: Add file to Frameworks group...") if not self.add_file_to_frameworks(dep._project_name + ".xcodeproj", pbxfileref_hash): return False project_data = self.get_project_data() logging.info("Done: Added file to Frameworks group.") self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 3: Add dependencies...") pbxtargetdependency_hash = None pbxcontaineritemproxy_hash = None match = re.search( '\/\* Begin PBXTargetDependency section \*\/\n((?:.|\n)+?)\/\* End PBXTargetDependency section \*\/', project_data) if not match: logging.info("\tAdding a PBXTargetDependency section...") match = re.search('\/\* End PBXSourcesBuildPhase section \*\/\n', project_data) if not match: logging.error( "Couldn't find the PBXSourcesBuildPhase section.") return False project_data = project_data[:match.end( )] + "\n/* Begin PBXTargetDependency section */\n\n/* End PBXTargetDependency section */\n" + project_data[ match.end():] else: (subtext, ) = match.groups() match = re.search( '([A-Z0-9]+) \/\* PBXTargetDependency \*\/ = {\n[ \t]+isa = PBXTargetDependency;\n[ \t]+name = ' + re.escape(dep._project_name) + ';\n[ \t]+targetProxy = ([A-Z0-9]+) \/\* PBXContainerItemProxy \*\/;', project_data) if match: ( pbxtargetdependency_hash, pbxcontaineritemproxy_hash, ) = match.groups() logging.info("This dependency already exists.") self.set_project_data(project_data) if pbxtargetdependency_hash is None or pbxcontaineritemproxy_hash is None: match = re.search('\/\* Begin PBXTargetDependency section \*\/\n', project_data) pbxtargetdependency_hash = tthash_base + '1' pbxcontaineritemproxy_hash = tthash_base + '2' pbxtargetdependency = "\t\t" + pbxtargetdependency_hash + " /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\tname = " + dep._project_name + ";\n\t\t\ttargetProxy = " + pbxcontaineritemproxy_hash + " /* PBXContainerItemProxy */;\n\t\t};\n" project_data = project_data[:match.end( )] + pbxtargetdependency + project_data[match.end():] logging.info("Done: Added dependency.") self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 3.1: Add container proxy for dependencies...") containerExists = False match = re.search( '\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data) if not match: logging.info("\tAdding a PBXContainerItemProxy section...") match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data) if not match: logging.error("Couldn't find the PBXBuildFile section.") return False project_data = project_data[:match.end( )] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[ match.end():] else: (subtext, ) = match.groups() match = re.search(re.escape(pbxcontaineritemproxy_hash), subtext) if match: logging.info("This container proxy already exists.") containerExists = True self.set_project_data(project_data) if not containerExists: match = re.search( '\/\* Begin PBXContainerItemProxy section \*\/\n', project_data) pbxcontaineritemproxy = "\t\t" + pbxcontaineritemproxy_hash + " /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = " + pbxfileref_hash + " /* " + dep._project_name + ".xcodeproj */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = " + dep_target.guid( ) + ";\n\t\t\tremoteInfo = " + dep._project_name + ";\n\t\t};\n" project_data = project_data[:match.end( )] + pbxcontaineritemproxy + project_data[match.end():] logging.info("Done: Added container proxy.") self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 3.2: Add module to the dependency list...") match = re.search( project_target.guid() + ' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n((?:.|\n)+?)\);', project_data) dependency_exists = False if not match: logging.error("Couldn't find the dependency list.") return False else: (dependencylist, ) = match.groups() match = re.search(re.escape(pbxtargetdependency_hash), dependencylist) if match: logging.info("This dependency has already been added.") dependency_exists = True if not dependency_exists: match = re.search( project_target.guid() + ' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n', project_data) if not match: logging.error("Couldn't find the dependency list.") return False dependency_item = '\t\t\t\t' + pbxtargetdependency_hash + ' /* PBXTargetDependency */,\n' project_data = project_data[:match.end( )] + dependency_item + project_data[match.end():] logging.info("Done: Added module to the dependency list.") self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 4: Create project references...") match = re.search( '\/\* Begin PBXProject section \*\/\n((?:.|\n)+?)\/\* End PBXProject section \*\/', project_data) if not match: logging.error("Couldn't find the project section.") return False project_start = match.start(1) project_end = match.end(1) (project_section, ) = match.groups() reference_exists = False did_change = False productgroup_hash = None match = re.search('projectReferences = \(\n((?:.|\n)+?)\n[ \t]+\);', project_section) if not match: logging.info("Creating project references...") match = re.search('projectDirPath = ".*?";\n', project_section) if not match: logging.error("Couldn't find project references anchor.") return False did_change = True project_section = project_section[:match.end( )] + '\t\t\tprojectReferences = (\n\t\t\t);\n' + project_section[ match.end():] else: (refs, ) = match.groups() match = re.search( '\{\n[ \t]+ProductGroup = ([A-Z0-9]+) \/\* Products \*\/;\n[ \t]+ProjectRef = ' + re.escape(pbxfileref_hash), refs) if match: (productgroup_hash, ) = match.groups() logging.info("This product group already exists: " + productgroup_hash) reference_exists = True if not reference_exists: match = re.search('projectReferences = \(\n', project_section) if not match: logging.error("Missing the project references item.") return False productgroup_hash = tthash_base + '3' reference_text = '\t\t\t\t{\n\t\t\t\t\tProductGroup = ' + productgroup_hash + ' /* Products */;\n\t\t\t\t\tProjectRef = ' + pbxfileref_hash + ' /* ' + dep._project_name + '.xcodeproj */;\n\t\t\t\t},\n' project_section = project_section[:match.end( )] + reference_text + project_section[match.end():] did_change = True if did_change: project_data = project_data[: project_start] + project_section + project_data[ project_end:] logging.info("Done: Created project reference.") self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 4.1: Create product group...") match = re.search('\/\* Begin PBXGroup section \*\/\n', project_data) if not match: logging.error("Couldn't find the group section.") return False group_start = match.end() lib_hash = None match = re.search( re.escape(productgroup_hash) + " \/\* Products \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);", project_data) if match: logging.info("This product group already exists.") (children, ) = match.groups() match = re.search( '([A-Z0-9]+) \/\* ' + re.escape(dep_target.product_name()) + ' \*\/', children) if not match: logging.error("No product found") return False # TODO: Add this product. else: (lib_hash, ) = match.groups() else: lib_hash = tthash_base + '4' productgrouptext = "\t\t" + productgroup_hash + " /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t" + lib_hash + " /* " + dep_target.product_name( ) + " */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n" project_data = project_data[: group_start] + productgrouptext + project_data[ group_start:] logging.info("Done: Created product group: " + lib_hash) self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 4.2: Add container proxy for target product...") containerExists = False targetproduct_hash = tthash_base + '6' match = re.search( '\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data) if not match: logging.info("\tAdding a PBXContainerItemProxy section...") match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data) if not match: logging.error("Couldn't find the PBXBuildFile section.") return False project_data = project_data[:match.end( )] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[ match.end():] else: (subtext, ) = match.groups() match = re.search(re.escape(targetproduct_hash), subtext) if match: logging.info("This container proxy already exists.") containerExists = True self.set_project_data(project_data) if not containerExists: match = re.search( '\/\* Begin PBXContainerItemProxy section \*\/\n', project_data) pbxcontaineritemproxy = "\t\t" + targetproduct_hash + " /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = " + pbxfileref_hash + " /* " + dep._project_name + ".xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = " + dep_target.guid( ) + ";\n\t\t\tremoteInfo = " + dep._project_name + ";\n\t\t};\n" project_data = project_data[:match.end( )] + pbxcontaineritemproxy + project_data[match.end():] logging.info("Done: Added target container proxy.") self.set_project_data(project_data) ############################################### # This code seems to break the xcode project but doesn't seem completely crucial. # Gr. # logging.info("") # logging.info("Step 4.3: Create reference proxy...") # # referenceExists = False # # match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n((?:.|\n)+?)\/\* End PBXReferenceProxy section \*\/', project_data) # if not match: # logging.info("\tAdding a PBXReferenceProxy section...") # match = re.search('\/\* End PBXProject section \*\/\n', project_data) # # if not match: # logging.error("Couldn't find the PBXProject section.") # return False # # project_data = project_data[:match.end()] + "\n/* Begin PBXReferenceProxy section */\n\n/* End PBXReferenceProxy section */\n" + project_data[match.end():] # else: # (subtext, ) = match.groups() # match = re.search(re.escape(lib_hash), subtext) # if match: # logging.info("This reference proxy already exists.") # referenceExists = True # # self.set_project_data(project_data) # # if not referenceExists: # match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n', project_data) # # referenceproxytext = "\t\t"+lib_hash+" /* "+dep_target.product_name()+" */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = archive.ar;\n\t\t\tpath = \""+dep_target.product_name()+"\";\n\t\t\tremoteRef = "+targetproduct_hash+" /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n" # project_data = project_data[:match.end()] + referenceproxytext + project_data[match.end():] # # logging.info("Done: Created reference proxy.") # self.set_project_data(project_data) ############################################### logging.info("") logging.info("Step 5: Add target file...") libfile_hash = self.add_buildfile(dep_target.product_name(), lib_hash, tthash_base + '5') project_data = self.get_project_data() logging.info("Done: Added target file.") ############################################### logging.info("") logging.info("Step 6: Add frameworks...") self.add_file_to_frameworks_phase(dep_target.product_name(), libfile_hash) project_data = self.get_project_data() logging.info("Done: Adding module.") self.set_project_data(project_data, flush=True) return True