def feed(self, data): if self.__last_was_cr and data[0:1] == '\n': data = data[1:] self.__last_was_cr = data[-1:] == '\r' data = string.replace(data, '\r\n', '\n') data = string.replace(data, '\r', '\n') self.__parser.feed(data)
def callRiger(rigerTableFile,scoring='KSbyScore',output='',callRiger = True): ''' calls riger using the KS scoring metric (default) ''' rigerDirectory = '/raider/temp/riger/' rigerTableAbsFile = os.path.abspath(rigerTableFile) outputFolder = utils.getParentFolder(rigerTableAbsFile) if len(output) == 0: output = string.replace(rigerTableAbsFile,'_friger.txt','_friger_%s_out.txt' % (scoring)) rigerBashFileName = string.replace(rigerTableAbsFile,'_friger.txt','_callRiger.sh') rigerBashFile = open(rigerBashFileName,'w') rigerBashFile.write('#!/usr/bin/bash\n') rigerBashFile.write('cd %s\n\n' % (rigerDirectory)) rigerCmd = 'java -cp commons-cli-1.2.jar:rigerj-1.6.2.jar org.broadinstitute.rnai.rigerj.RigerJMain -scoringMethod %s -inputFile %s -outputFile %s' % (scoring,rigerTableAbsFile,output) rigerBashFile.write(rigerCmd) rigerBashFile.write('\n') rigerBashFile.close() print("WROTE RIGER CMD TO %s" % (rigerBashFileName)) if callRiger == True: print("Calling RIGER with %s scoring method" % (scoring)) print("RIGER CMD: %s" % (rigerCmd)) os.system(rigerBashFileName) return rigerBashFileName
def expand_macros(feed, item, conf_items): macro_defns = {} macro_defns["%{host}"] = "TODO" macro_defns["%{user}"] = "TODO" macro_defns["%{rss-link}"] = feed.setdefault("href", None) macro_defns["%{last-modified}"] = "TODO" macro_defns["%{item:link}"] = item.setdefault("link", None) macro_defns["%{item:title}"] = item.setdefault("title", None) macro_defns["%{item:description}"] = item.setdefault("description", None) macro_defns["%{item:dc:date}"] = item.setdefault("date", None) macro_defns["%{item:dc:subject}"] = "TODO" macro_defns["%{item:dc:creator}"] = "TODO" macro_defns["%{channel:link}"] = feed.setdefault("feed", None).setdefault("link", None) macro_defns["%{channel:title}"] = feed.setdefault("feed", None).setdefault("title", None) macro_defns["%{channel:description}"] = feed.setdefault("feed", None).setdefault("description", None) macro_defns["%{channel:dc:date}"] = feed.setdefault("feed", None).setdefault("date", None) expanded_config = {} for key, val in conf_items.items(): regex = re.compile("(?P<macro>%{.*})") matches = regex.search(val) if matches is not None: macro_label = matches.groupdict()["macro"] # need to escape folders if key == "folder": macro_defns[macro_label] = string.replace(macro_defns[macro_label], ".", ":") expanded_config[key] = string.replace(conf_items[key], macro_label, macro_defns[macro_label]) else: expanded_config[key] = conf_items[key] return expanded_config
def main(): print "in main" usage = "%prog -i inifile -o outputfile -s servers" parser = OptionParser(usage) parser.add_option("-s", "--servers", dest="servers") parser.add_option("-i", "--inifile", dest="inifile") parser.add_option("-o", "--outputFile", dest="outputFile") parser.add_option("-p", "--os", dest="os") options, args = parser.parse_args() print "the ini file is", options.inifile print "the server info is", options.servers servers = json.loads(options.servers) f = open(options.inifile) data = f.readlines() for i in range(len(data)): if "dynamic" in data[i]: data[i] = string.replace(data[i], "dynamic", servers[0]) servers.pop(0) if options.os == "windows": if "root" in data[i]: data[i] = string.replace(data[i], "root", "Administrator") if "couchbase" in data[i]: data[i] = string.replace(data[i], "couchbase", "Membase123") for d in data: print d, f = open(options.outputFile, "w") f.writelines(data)
def parseFileName(name): nameString = dropInsideContent(name,"[","]" ) nameString = dropInsideContent(nameString,"{","}" ) nameString = dropInsideContent(nameString,"(",")" ) nameString = nameString.strip('()_{}[]!@#$^&*+=|\\/"\'?<>~`') nameString = nameString.lstrip(' ') nameString = nameString.rstrip(' ') nameString = dropInsideContent(nameString,"{","}" ) nameString = nameString.lower() nameString = string.replace(nameString,"\t"," ") nameString = string.replace(nameString," "," ") try: nameString = unicodedata.normalize('NFKD',nameString).encode() nameString = nameString.encode() except: try: nameString = nameString.encode('latin-1', 'ignore') nameString = unicodedata.normalize('NFKD',nameString).encode("ascii") nameString = str(nameString) except: nameString = "unknown" if len(nameString)==0: nameString=" " return nameString
def get_cast(self): self.cast = "<%s" % gutils.trim(self.page, '/ob.gif"',"<br/>\n\t") self.cast = string.replace(self.cast, "\n",'') self.cast = string.replace(self.cast, "\t",'') self.cast = string.replace(self.cast, '<div class="filmActor">', "\n") self.cast = string.replace(self.cast, ":", _(" as ")) self.cast = gutils.strip_tags(self.cast)
def find(self, string, pattern): word = string.replace(pattern,'*','\*') words = string.replace(word,' ','\s*') if re.search(words,string): pass else: raise log.err(string)
def genmenu(start, directory): # get a directory list dirlist = os.listdir(directory) for d in dirlist: # set di to overall directory di = directory + "/" + d # if we get a dir, generate a menu if isdir(di): print "" print ' <menu id="' + di + '" label="' + d + '" >' genmenu(start, di) print " </menu>" # if we get a file, check if it is a valid type else: if re.search(filetypes, string.lower(di)) > 0: # make fi variable just filename, without extension fi = string.replace(string.replace(di, directory, ""), "/", "") fi = fi[: string.rfind(fi, ".")] # if so, add it to the pipe menu print ' <item label="' + fi + '">' # execute line to set wallpaper print ' <action name="Execute"><execute>' + program + ' "' + di + '"</execute></action>' # if we want to update config file, do so if config == "yes": print ' <action name="Execute"><execute>~/.config/openbox/wallmenu.py ' + di + "</execute></action>" print " </item>"
def get_director(self): self.director = re.findall(r'yseria\t+(.*)\t+scenariusz', self.page) if len(self.director)>0: self.director = self.director[0] self.director = string.replace(self.director, "\t",'') self.director = string.replace(self.director, ",",", ") self.director = string.replace(self.director, ", (wi\xeacej ...)",'')
def regexp_escape (a_string): import string; r = a_string r = string.replace (r, '\\', '\\\\') r = string.replace (r, '/', '\\/') r = string.replace (r, '.', '\\.') return r
def filter_message(message): """ Filter a message body so it is suitable for learning from and replying to. This involves removing confusing characters, padding ? and ! with ". " so they also terminate lines and converting to lower case. """ # to lowercase message = string.lower(message) # remove garbage message = string.replace(message, "\"", "") # remove "s message = string.replace(message, "\n", " ") # remove newlines message = string.replace(message, "\r", " ") # remove carriage returns # remove matching brackets (unmatched ones are likely smileys :-) *cough* # should except out when not found. index = 0 try: while 1: index = string.index(message, "(", index) # Remove matching ) bracket i = string.index(message, ")", index+1) message = message[0:i]+message[i+1:] # And remove the ( message = message[0:index]+message[index+1:] except ValueError, e: pass
def createLSFScript(cfgfile): file=os.path.basename(cfgfile) absDir=os.path.abspath(os.path.dirname(cfgfile)) outScript="runlsf_" + string.replace(file,".py",".csh") outLog="runlsf_" + string.replace(file,".py",".log") inScript=os.path.join(file) outScript=os.path.join(absDir,outScript) outLog=os.path.join(absDir,outLog) oFile = open(outScript,'w') oFile.write("#!/bin/csh" + "\n") #oFile.write("\n") oFile.write("cd " + absDir + "\n") oFile.write("eval `scram runtime -csh`" + "\n") #oFile.write("\n") oFile.write("cmsRun " + inScript + "\n") #oFile.write("date" + "\n") oFile.close() return
def sanitize(text): """ Sanitizes text for referral URLS and for not found errors """ text = string.replace(text, "<", "") text = string.replace(text, ">", "") return text
def CleanFileData(self, data): """ clean the read file data and return it """ fresh = [] for i in data: if string.strip(string.replace(i, '\012', '')) != '': fresh.append(string.strip(string.replace(i, '\012', ''))) return fresh
def createCFGFiles(i,orgFile,basename,dir): newFile=basename + "_" + str(i) + ".py" newFile=os.path.join(dir,newFile) print(newFile) outFile = open(newFile,'w') for iline in orgFile: indx=string.find(iline,INPUTSTARTSWITH) if (indx == 0): indx2=string.find(iline,searchInput) if (indx2 < 0): print("Problem") sys.exit(1) else: iline=string.replace(iline,searchInput,str(i)) indx=string.find(iline,OUTPUTSTARTSWITH) if (indx == 0): indx2=string.find(iline,searchOutput) if (indx2 < 0): print("Problem") sys.exit(1) else: replString="_" + str(i) + searchOutput iline=string.replace(iline,searchOutput,replString) outFile.write(iline + "\n") CloseFile(outFile) return newFile
def formatElement(el,path): if (el.find("{http://www.w3.org/2001/XMLSchema}annotation") is not None): splitPath=string.split(path,'/') #set default component to "scenario", i.e. write to the "Scenario" worksheet below component="scenario" printPath=string.replace(path,"/"+component,"") #update component if a know child element of scenario, i.e. write to another worksheet below if (len(splitPath)>2): if (splitPath[2] in worksheets): component=splitPath[2] printPath=string.replace(printPath,"/"+component,"") sheet= worksheets[component][0] row=worksheets[component][1] sheet.write(row,0,string.lstrip(printPath,"/")) annotation=el.find("{http://www.w3.org/2001/XMLSchema}annotation") docuElem=annotation.find("{http://www.w3.org/2001/XMLSchema}documentation") if (docuElem is not None): docu=docuElem.text else: docu="TODO" content=string.strip(docu) sheet.write(row,1,normalizeNewlines(content),docu_xf) appInfoElem=el.find("{http://www.w3.org/2001/XMLSchema}annotation").find("{http://www.w3.org/2001/XMLSchema}appinfo") if (appInfoElem is not None): appInfo=string.strip(appInfoElem.text) else: appInfo="name:TODO" appInfoList=string.split(appInfo,";")[0:-1] for keyValue in appInfoList: splitPair=string.split(keyValue,":") colIndex=appinfoOrder.index(string.strip(str(splitPair[0])))+2 sheet.write(row,colIndex,splitPair[1],docu_xf) #update next row to be written in that sheet worksheets[component][1]=worksheets[component][1]+1
def get_rendereable_badgeset(account): """ Will return a badgset as follows: theme -badgeset name description alt key perm(issions) """ badges = get_all_badges_for_account(account) badgeset = [] for b in badges: """ Badge id is theme-name-perm. spaces and " " become "-" """ name_for_id = string.replace(b.name, " ", "_") name_for_id = string.replace(name_for_id, "-", "_") theme_for_id = string.replace(b.theme, " ", "_") theme_for_id = string.replace(theme_for_id, "-", "_") badge_id = theme_for_id + "-" + name_for_id + "-" + b.permissions item = {"name": b.name, "description": b.description, "alt":b.altText, "key":b.key().name(), "perm":b.permissions, "theme" : b.theme, "id": badge_id, "downloadLink":b.downloadLink} badgeset.append(item) return badgeset
def get_cast(self): self.cast = '' tmp = gutils.trim(self.page_cast, '<h2>cast</h2>', '</table>') elements = string.split(tmp, '<td class="name">') for element in elements: element = string.replace(element, '</a>', '$$$') self.cast = self.cast + string.replace(re.sub('[$][$][$]$', '', re.sub('[ ]+', ' ', string.replace(gutils.clean(element), '\n', ''))), '$$$', _(' as ')) + '\n'
def lurk(bot): print('lurink...') handler = LabAPIHandler(bot.config.labstatus.api_url) if handler.update_data(): lab_is_open = handler.get_lab_state() lab_was_open = bot.memory['lab_was_open'] status_str = '' if( lab_is_open and not lab_was_open ): # state changed to 'open' status_str = 'geöffnet!' elif( not lab_is_open and lab_was_open ): # state changed to 'closed' status_str = 'geschlossen.' else: # nothing happend return topic = string.replace(bot.config.labstatus.topic_draft, '$STATUS', status_str) for channel in bot.channels: bot.write(('TOPIC', channel + ' :' + string.replace(topic, '$CHANNEL', channel))) bot.msg(channel, '+++ NEUER LAB-STATUS: ' + status_str + ' +++') bot.memory['lab_was_open'] = lab_is_open else: print('API call failed');
def loadFile(fileName, width, height): """ Loads skin xml file and resolves embedded #include directives. Returns completely loaded file as a string. """ log.debug("loadFile(%s, %d, %d)"%(fileName, width, height ) ) s = "" f = file( fileName ) for l in f.readlines(): # log.debug(fileName + ":" + l) m = sre.match( '^#include (.*)$', l ) if m: incFile = m.group(1) incFile = incFile.strip() if sre.match( '^\w+\.xml$', incFile ): # need to find skin file incFile = findSkinFile( incFile, width, height ) elif sre.match( '\%SKINDIR\%', incFile ): incFile = string.replace(incFile, "%SKINDIR%", getSkinDir()) else: # convert path separators to proper path separator - just in case incFile = string.replace( incFile, "/", os.sep ) incFile = string.replace( incFile, "\\", os.sep ) # assume relative path provided path = os.path.dirname( fileName ) path += os.sep + incFile incFile = path s += loadFile( incFile, width, height ) else: s += l f.close() return s
def get_payload(self): s = self.firewall.to_json() s = string.replace(s, 'inlist', "in") s = string.replace(s, 'outlist', "out") payload = {self._mn3: json.loads(s)} return json.dumps(payload, default=lambda o: o.__dict__, sort_keys=True, indent=4)
def get_plot(self): self.plot = gutils.trim(self.page,"<td valign=\"top\" align=\"left\">","</td>") self.plot = string.strip(self.plot.decode('latin-1')) self.plot = string.replace(self.plot,"<br>", " ") self.plot = string.replace(self.plot,"<p>", " ") self.plot = string.replace(self.plot,"'","_") self.plot = string.strip(gutils.strip_tags(self.plot))
def four11Path(self, filename411): """Translates 411 file names into UNIX absolute filenames.""" # Warning this is UNIX dependant n = string.replace(filename411, ".", "/") n = string.replace(n, "//", ".") return os.path.normpath(os.path.join(self.rootdir, n))
def render(self): form = self.request.params if not form.has_key("id"): return "ERROR No job specified" id = string.atoi(form["id"]) if form.has_key("prefix"): prefix = form["prefix"] prefix = string.replace(prefix, '<', '') prefix = string.replace(prefix, '>', '') prefix = string.replace(prefix, '/', '') prefix = string.replace(prefix, '&', '') prefix = string.replace(prefix, '\\', '') elif form.has_key("phase") and form.has_key("test"): id = self.lookup_detailid(id, form["phase"], form["test"]) if id == -1: return "ERROR Specified test not found" prefix = "test" else: prefix = "" try: filename = app.utils.results_filename(prefix, id) self.request.response.body_file = file(filename, "r") self.request.response.content_type="application/octet-stream" self.request.response.content_disposition = "attachment; filename=\"%d.tar.bz2\"" % (id) return self.request.response except Exception, e: if isinstance(e, IOError): # We can still report error to client at this point return "ERROR File missing" else: return "ERROR Internal error"
def precmd(self, line): """Handle alias expansion and ';;' separator.""" if not line: return line args = string.split(line) while self.aliases.has_key(args[0]): line = self.aliases[args[0]] ii = 1 for tmpArg in args[1:]: line = string.replace(line, "%" + str(ii), tmpArg) ii = ii + 1 line = string.replace(line, "%*", string.join(args[1:], ' ')) args = string.split(line) # split into ';;' separated commands # unless it's an alias command if args[0] != 'alias': marker = string.find(line, ';;') if marker >= 0: # queue up everything after marker next = string.lstrip(line[marker+2:]) self.cmdqueue.append(next) line = string.rstrip(line[:marker]) return line
def search(self, pattern='', context=''): pattern = string.replace(string.lower(pattern),'*','%') pattern = string.replace(string.lower(pattern),'?','_') if pattern: return self.select("context LIKE '%s'" % pattern, orderBy="context") else: return self.select(orderBy="context")
def render_admin_panel(self, req, cat, page, path_info): req.perm.require('TRAC_ADMIN') status, message = init_admin(self.gitosis_user, self.gitosis_server, self.admrepo, self.env.path) data = {} if status != 0: add_warning(req, _('Error while cloning gitosis-admin repository. Please check your settings and/or passphrase free connection to this repository for the user running trac (in most cases, the web server user)')) message = 'return code: '+str(status)+'\nmessage:\n'+message if message: add_warning(req, _(message)) repo = replace(os.path.basename(self.config.get('trac', 'repository_dir')), '.git', '') if req.method == 'POST': config = {} self.log.debug('description: '+req.args.get('description')) for option in ('daemon', 'gitweb', 'description', 'owner'): config[option] = req.args.get(option) self.set_config(repo, config) req.redirect(req.href.admin(cat, page)) repo = replace(os.path.basename(self.config.get('trac', 'repository_dir')), '.git', '') if repo != '': data = self.get_config(repo) self.log.debug('data: %s', str(data)) if not data: data = {} for option in ('daemon', 'gitweb', 'description', 'owner'): if option not in data: data[option] = '' data['gitweb'] = data['gitweb'] in _TRUE_VALUES data['daemon'] = data['daemon'] in _TRUE_VALUES return 'admin_tracgitosis_repo.html', {'repo': data}
def __normalize_dell_charset(self, page_content): page_content = replace(page_content, "<sc'+'ript", '') page_content = replace(page_content, "</sc'+'ript", '') t = ''.join(map(chr, range(256))) d = ''.join(map(chr, range(128, 256))) page_content = page_content.translate(t, d) return page_content
def normalize(string): string = string.replace(u"Ä", "Ae").replace(u"ä", "ae") string = string.replace(u"Ö", "Oe").replace(u"ö", "oe") string = string.replace(u"Ü", "Ue").replace(u"ü", "ue") string = string.replace(u"ß", "ss") string = string.encode("ascii", "ignore") return string
def get_genre(self): self.genre = gutils.trim(self.page,"Genre(s):","</table>") self.genre = string.replace(self.genre, "<br>", ", ") self.genre = gutils.strip_tags(self.genre) self.genre = string.replace(self.genre, "/", ", ") self.genre = gutils.clean(self.genre) self.genre = self.genre[0:-1]
def _stringSource(target, source, env): import string return (str(target[0]) + ' <-\n |' + string.replace(source[0].get_contents(), '\n', "\n |"))
def format_prize_amount(payout): payout = string.replace(payout, ',', '') payout = string.replace(payout, '$', '') return float(payout)
def wild2regex(string): """Convert a Unix wildcard glob into a regular expression""" return string.replace('.', '\.').replace('*', '.*').replace('?', '.').replace('!', '^')
#! python # -*- coding: utf-8 -*- # (c) 2006 Werner Mayer LGPL # FreeCAD report memory leaks script to get provide the log file of Visual Studio in more readable file. import string, re # Open the memory leak file file = open("MemLog.txt") lines = file.readlines() file.close() d = dict() l = list() for line in lines: r = re.search("\\(#\\s*\\d+\\)", line) if r is not None: s = line[r.start():r.end()] t = re.search("^Leak", line) if t is not None: m = d[s] l.append(m) else: d[s] = line file = open("MemLog_leaks.txt", "w") for line in l: line = string.replace(line, 'Alloc', 'Leak') file.write(line) file.close()
def str_attribute(self, k): kt = replace(k, '_', '-') if self.attlist[k]: return '%s: %s' % (kt, str(self[k])) else: return self[k] and kt or ''
def strip_nick(message, sender): return string.replace(string.strip(message), nickname, sender)
def get_right_sep(fullPath): return string.replace(fullPath, "\\", os.sep)
def get_genre(self): self.genre = string.replace( gutils.regextrim(self.page, 'Genre[s]*:<[^>]+>', '</div>'), '\n', '') self.genre = self.__before_more(self.genre)
def findFileName(filename): filename = string.replace(filename, '\\', '/') dataset_name = string.split(filename, '/')[-1] return dataset_name
return mg, mg_c, cr1, cr2, cr3, cr4, cr5 countnuv = np.vectorize(countnuv) # Function to find seperation in celestial coordinates. cc = SkyCoord(RA, DEC, unit=(u.hourangle, u.deg)) def cel_separation(a, b): coo = SkyCoord(a, b, frame='icrs', unit='deg') return coo.separation(cc) # To get on with MAST website queries. ra = string.replace(RA, ':', '+') dec = string.replace(DEC, ':', '+') # Mast website form data. mastdata = { '__EVENTTARGET': '""', '__EVENTARGUMENT': '""', '__VIEWSTATE': '/wEPDwUKMTUwNjg2NDc5Ng8WAh4TVmFsaWRhdGVSZXF1ZXN0TW9kZQIBFgQCAQ8WAh4JaW5uZXJodG1sBRNNQVNULkdhbGV4LlRpbGVMaXN0ZAIDD2QWAgIBDxYKHgtjZWxsc3BhY2luZwUBMB4LY2VsbHBhZGRpbmcFATAeBXdpZHRoBQM3NjAeBmJvcmRlcgUBMB4FYWxpZ24FBmNlbnRlchYGZg9kFgJmD2QWAmYPZBYOAgEPPCsABQEDFCsAARAWCB4GSXRlbUlEBRZfY3RsMl9NQVNULW1lbnVJdGVtMDAxHghJdGVtVGV4dAUETUFTVB4HSXRlbVVSTAUYaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1HhBNZW51SXRlbUNzc0NsYXNzBQt0b3BuYXZjb2xvcmRkZAIDDzwrAAUBAxQrAAEQFggfBwUXX2N0bDJfU1RTY0ktbWVudUl0ZW0wMDEfCAUFU1RTY0kfCQUUaHR0cDovL3d3dy5zdHNjaS5lZHUfCgULdG9wbmF2Y29sb3JkZGQCBQ88KwAFAQMUKwABEBYKHwcFIF9jdGwyX1NlYXJjaGVzX1Rvb2xzLW1lbnVJdGVtMDAxHwgFBVRvb2xzHwkFJmh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9zZWFyY2hlcy5odG1sHg1JdGVtTGVmdEltYWdlBREuLi9NZW51cy9kb3duLmdpZh4SSXRlbUxlZnRJbWFnZUFsaWduCyokU3lzdGVtLldlYi5VSS5XZWJDb250cm9scy5JbWFnZUFsaWduAhQrAAkQFgYfBwU0X2N0bDJfU2VhcmNoZXNfVG9vbHMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMB8IBQZBbGFkaW4fCQU5aHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2NnaS1iaW4vbnBoLWFsYWRpbi5wbD9mcm9tPVNUU2NJZGQQFgYfBwU0X2N0bDJfU2VhcmNoZXNfVG9vbHMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMR8IBQlTY3JhcGJvb2sfCQUmaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L3NjcmFwYm9vay5waHBkZBAWBh8HBTRfY3RsMl9TZWFyY2hlc19Ub29scy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDAyHwgFEFZpemllUi9NQVNUIFhjb3IfCQUjaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L3Zpemllci5waHBkZBAWBh8HBTRfY3RsMl9TZWFyY2hlc19Ub29scy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDAzHwgFDU5FRC9NQVNUIFhjb3IfCQUgaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L25lZC5waHBkZBAWBh8HBTRfY3RsMl9TZWFyY2hlc19Ub29scy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDA0HwgFCUNvcGxvdHRlch8JBSlodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvbWFzdF9jb3Bsb3QuaHRtbGRkEBYGHwcFNF9jdGwyX1NlYXJjaGVzX1Rvb2xzLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDUfCAUIU3BlY3ZpZXcfCQU5aHR0cDovL3d3dy5zdHNjaS5lZHUvcmVzb3VyY2VzL3NvZnR3YXJlX2hhcmR3YXJlL3NwZWN2aWV3ZGQQFgYfBwU0X2N0bDJfU2VhcmNoZXNfVG9vbHMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwNh8IBQhTdGFyVmlldx8JBR9odHRwOi8vc3RhcnZpZXcuc3RzY2kuZWR1L2h0bWwvZGQQFgYfBwU0X2N0bDJfU2VhcmNoZXNfVG9vbHMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwNx8IBQlBYnN0cmFjdHMfCQUnaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2Fic3RyYWN0cy5odG1sZGQQFgYfBwU0X2N0bDJfU2VhcmNoZXNfVG9vbHMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwOB8IBQdtb3JlLi4uHwkFJmh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9zZWFyY2hlcy5odG1sZGRkZAIHDzwrAAUBAxQrAAEQFgofBwUaX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEfCAUOTWlzc2lvbiBTZWFyY2gfCQUmaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L21pc3Npb25zLmh0bWwfCwURLi4vTWVudXMvZG93bi5naWYfDAsrBAIUKwAdEBYGHwcFLl9jdGwyX01pc3Npb25zLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDAfCAURIDxiPiBIdWJibGUgPC9iPiAfCQUnaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2hzdC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMR8IBSAgPGI+IEh1YmJsZSBMZWdhY3kgQXJjaGl2ZSA8L2I+IB8JBSFodHRwOi8vaGxhLnN0c2NpLmVkdS9obGF2aWV3Lmh0bWxkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDAyHwgFFCA8Yj4gSFNUb25saW5lIDwvYj4gHwkFLWh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9oc3RvbmxpbmUvc2VhcmNoLnBocGRkEBYGHwcFLl9jdGwyX01pc3Npb25zLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDMfCAUjIDxiPiBIU1QgUHJlc3MgUmVsZWFzZSBJbWFnZXMgPC9iPiAfCQUoaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L3N0cHIvc2VhcmNoLnBocGRkEBYGHwcFLl9jdGwyX01pc3Npb25zLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDQfCAUOIDxiPiBEU1MgIDwvYj4fCQUqaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2NnaS1iaW4vZHNzX2Zvcm0vZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwNR8IBRQgPGI+IEdBTEVYVmlldyAgPC9iPh8JBQsvR2FsZXhWaWV3L2RkEBYGHwcFLl9jdGwyX01pc3Npb25zLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDYfCAUQIDxiPiBHQUxFWCAgPC9iPh8JBRMvR1I2Lz9wYWdlPW1hc3Rmb3JtZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwNx8IBRsgPGI+IEpXU1QgU0lEIEFyY2hpdmUgIDwvYj4fCQUzaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2p3c3Qvc2lkYXJjaGl2ZS9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwOB8IBRUgPGI+IEtlcGxlciBEYXRhIDwvYj4fCQU2aHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2tlcGxlci9kYXRhX3NlYXJjaC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwOR8IBRggPGI+IEtlcGxlciBUYXJnZXRzIDwvYj4fCQU1aHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2tlcGxlci9rZXBsZXJfZm92L3NlYXJjaC5waHBkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDEwHwgFEyA8Yj4gU3dpZnRVVk9UIDwvYj4fCQUtaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L3N3aWZ0dXZvdC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxMR8IBREgPGI+IFhNTS1PTSAgPC9iPh8JBSpodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUveG1tLW9tL3NlYXJjaC5waHBkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDEyHwgFDiBCRUZTIChPUkZFVVMpHwkFKGh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9iZWZzL3NlYXJjaC5waHBkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDEzHwgFDyBDb3Blcm5pY3VzLXJhdx8JBS5odHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvY29wZXJuaWN1cy9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxNB8IBREgQ29wZXJuaWN1cy1jb2FkZB8JBTRodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvY29wZXJuaWN1cy9jb2FkZC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxNR8IBQYgRVBPQ0gfCQU4aHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2Vwb2NoL2Vwb2NoX21hc3RfZGlyZWN0b3J5Lmh0bWxkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDE2HwgFBiBFVVZFIB8JBShodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvZXV2ZS9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxNx8IBRFGVVNFIE9ic2VydmF0aW9ucx8JBShodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvZnVzZS9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxOB8IBQ5GVVNFIEV4cG9zdXJlcx8JBTFodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvZnVzZS9leHBvc3VyZS9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAxOR8IBQUgR1NDIB8JBTdodHRwOi8vZ3Nzcy5zdHNjaS5lZHUvd2Vic2VydmljZXMvR1NDMi9HU0MyV2ViRm9ybS5hc3B4ZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyMB8IBQYgSFBPTCAfCQUoaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2hwb2wvc2VhcmNoLnBocGRkEBYGHwcFLl9jdGwyX01pc3Npb25zLW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMjEfCAUFIEhVVCAfCQUnaHR0cDovL2FyY2hpdmUuc3RzY2kuZWR1L2h1dC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyMh8IBRAgSU1BUFMgKE9SRkVVUykgHwkFKWh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9pbWFwcy9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyMx8IBQUgSVVFIB8JBSdodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvaXVlL3NlYXJjaC5waHBkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDI0HwgFDyBUVUVTIChPUkZFVVMpIB8JBShodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvdHVlcy9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyNR8IBQUgVUlUIB8JBSdodHRwOi8vYXJjaGl2ZS5zdHNjaS5lZHUvdWl0L3NlYXJjaC5waHBkZBAWBh8HBS5fY3RsMl9NaXNzaW9ucy1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDI2HwgFCyBWTEEtRklSU1QgHwkFLGh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS92bGFmaXJzdC9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyNx8IBQcgV1VQUEUgHwkFKWh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS93dXBwZS9zZWFyY2gucGhwZGQQFgYfBwUuX2N0bDJfTWlzc2lvbnMtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAyOB8IBQdtb3JlLi4uHwkFL2h0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS9zZWFyY2hlcy5odG1sI21pc3Npb25zZGRkZAIJDzwrAAUBAxQrAAEQFgYfBwUbX2N0bDJfVHV0b3JpYWxzLW1lbnVJdGVtMDAxHwgFCFR1dG9yaWFsHwkFLGh0dHA6Ly9hcmNoaXZlLnN0c2NpLmVkdS90dXRvcmlhbC9pbmRleC5odG1sZGRkAgsPPCsABQEDFCsAARAWCB8HBRxfY3RsMl9TaXRlU2VhcmNoLW1lbnVJdGVtMDAxHwgFC1NpdGUgU2VhcmNoHwkFEi4vP3BhZ2U9c2l0ZXNlYXJjaB8KBQt0b3BuYXZjb2xvcmRkZAINDzwrAAUBAxQrAAEQFggfBwUaX2N0bDJfRm9sbG93VXMtbWVudUl0ZW0wMDAfCAUJRm9sbG93IFVzHwsFES4uL01lbnVzL2Rvd24uZ2lmHwwLKwQCFCsAAhAWBh8HBS5fY3RsMl9Gb2xsb3dVcy1tZW51SXRlbTAwMC1zdWJNZW51LW1lbnVJdGVtMDAwHwgFCiBGYWNlYm9vayAfCQUjaHR0cDovL3d3dy5mYWNlYm9vay5jb20vTUFTVEFyY2hpdmVkZBAWBh8HBS5fY3RsMl9Gb2xsb3dVcy1tZW51SXRlbTAwMC1zdWJNZW51LW1lbnVJdGVtMDAxHwgFCSBUd2l0dGVyIB8JBR5odHRwczovL3R3aXR0ZXIuY29tL01BU1RfTmV3cy9kZGRkAgIPZBYEZg9kFgJmD2QWAgIBDzwrAAUBAxQrAAoQFggfBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEfCAUSU2VhcmNoICYgUmV0cmlldmFsHwsFEi4uL01lbnVzL2Fycm93LmdpZh8MCysEAhQrAAMQFgYfBwUuX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMB8IBRVTb3VyY2UgQ2F0YWxvZyBTZWFyY2gfCQUQLi8/cGFnZT1tYXN0Zm9ybWRkEBYGHwcFLl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDEfCAUKU1FMIFNlYXJjaB8JBQ8uLz9wYWdlPXNxbGZvcm1kZBAWCB8HBS5fY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDAyHwgFC1RpbGUgU2VhcmNoHwsFEi4uL01lbnVzL2Fycm93LmdpZh8MCysEAhQrAAgQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMi1zdWJNZW51LW1lbnVJdGVtMDAwHwgFGjxiPkFJUzwvYj46IEFsbCBTa3kgU3VydmV5Hg9JdGVtQ29tbWFuZE5hbWUFA2Fpc2RkEBYGHwcFQl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDItc3ViTWVudS1tZW51SXRlbTAwMR8IBR88Yj5ESVM8L2I+OiBEZWVwIEltYWdpbmcgU3VydmV5Hw0FA2Rpc2RkEBYGHwcFQl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDItc3ViTWVudS1tZW51SXRlbTAwMh8IBSE8Yj5NSVM8L2I+OiBNZWRpdW0gSW1hZ2luZyBTdXJ2ZXkfDQUDbWlzZGQQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMi1zdWJNZW51LW1lbnVJdGVtMDAzHwgFIjxiPk5HUzwvYj46IE5lYXJieSBHYWxheGllcyBTdXJ2ZXkfDQUDbmdzZGQQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMi1zdWJNZW51LW1lbnVJdGVtMDA0HwgFIzxiPkdJSTwvYj46IEd1ZXN0IEludmVzdGlnYXRvciBEYXRhHw0FA2dpaWRkEBYGHwcFQl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDItc3ViTWVudS1tZW51SXRlbTAwNR8IBR48Yj5DQUk8L2I+OiBDYWxpYnJhdGlvbiBTdXJ2ZXkfDQUDY2FpZGQQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDEtc3ViTWVudS1tZW51SXRlbTAwMi1zdWJNZW51LW1lbnVJdGVtMDA2HwgFKDxiPlNQRUNUUkE8L2I+OiBGcm9tIEFsbCBBdmFpbGFibGUgVGlsZXMfDQUHc3BlY3RyYWRkEBYGHwcFQl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDItc3ViTWVudS1tZW51SXRlbTAwNx8IBRI8Yj5BTEwgU1VSVkVZUzwvYj4fDQUKYWxsc3VydmV5c2RkZGQQFgofBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDMfCAUTR3Vlc3QgSW52ZXN0aWdhdG9ycx8JBQ4uLz9wYWdlPWdpbGlzdB8LBRMuLi9NZW51cy9zZWN1cmUuZ2lmHwwLKwQCZGQQFggfBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDUfCAUNRG9jdW1lbnRhdGlvbh8LBRIuLi9NZW51cy9hcnJvdy5naWYfDAsrBAIUKwACEBYIHwcFLl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDA1LXN1Yk1lbnUtbWVudUl0ZW0wMDAfCAULPGI+TUFTVDwvYj4fCwUSLi4vTWVudXMvYXJyb3cuZ2lmHwwLKwQCFCsABRAWBh8HBUJfY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAwNS1zdWJNZW51LW1lbnVJdGVtMDAwLXN1Yk1lbnUtbWVudUl0ZW0wMDAfCAUKSGlnaCBMZXZlbB8JBRIuLz9wYWdlPWdlbmVyYWxmYXFkZBAWBh8HBUJfY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAwNS1zdWJNZW51LW1lbnVJdGVtMDAwLXN1Yk1lbnUtbWVudUl0ZW0wMDEfCAUHUXVlcmllcx8JBQ4uLz9wYWdlPXNxbGZhcWRkEBYGHwcFQl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDA1LXN1Yk1lbnUtbWVudUl0ZW0wMDAtc3ViTWVudS1tZW51SXRlbTAwMh8IBRBEYXRhIERlc2NyaXB0aW9uHwkFDS4vP3BhZ2U9ZGRmYXFkZBAWBh8HBUJfY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAwNS1zdWJNZW51LW1lbnVJdGVtMDAwLXN1Yk1lbnUtbWVudUl0ZW0wMDMfCAUOVXNlciBTdWJtaXR0ZWQfCQUPLi8/cGFnZT11c2VyZmFxZGQQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDUtc3ViTWVudS1tZW51SXRlbTAwMC1zdWJNZW51LW1lbnVJdGVtMDA1HwgFCFR1dG9yaWFsHwkFEC4vP3BhZ2U9dHV0b3JpYWxkZGQQFggfBwUuX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDUtc3ViTWVudS1tZW51SXRlbTAwMR8IBRM8Yj5DYWx0ZWNoIEZBUXM8L2I+HwsFEi4uL01lbnVzL2Fycm93LmdpZh8MCysEAhQrAAIQFgYfBwVCX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDUtc3ViTWVudS1tZW51SXRlbTAwMS1zdWJNZW51LW1lbnVJdGVtMDAwHwgFEENhbHRlY2ggTWV0YWRhdGEfCQULLi8/cGFnZT1mYXFkZBAWBh8HBUJfY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAwNS1zdWJNZW51LW1lbnVJdGVtMDAxLXN1Yk1lbnUtbWVudUl0ZW0wMDEfCAURQ2FsdGVjaCBUZWNoIERvY3MfCQU1aHR0cDovL3d3dy5nYWxleC5jYWx0ZWNoLmVkdS9yZXNlYXJjaGVyL3RlY2hkb2NzLmh0bWxkZGRkEBYGHwcFGl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDA3HwgFDURhdGFiYXNlIEluZm8fCQUMP3BhZ2U9ZGJpbmZvZGQQFgYfBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMDkfCAUUQ29udHJpYnV0ZWQgU29mdHdhcmUfCQUQLi8/cGFnZT1zb2Z0d2FyZWRkEBYIHwcFGl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDExHwgFF0d1ZXN0IEludmVzdGlnYXRvciBTaXRlHwsFEi4uL01lbnVzL2Fycm93LmdpZh8MCysEAhQrAAMQFgYfBwUuX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMTEtc3ViTWVudS1tZW51SXRlbTAwMB8IBQlIb21lIFBhZ2UfCQUdaHR0cDovL2dhbGV4Z2kuZ3NmYy5uYXNhLmdvdi9kZBAWBh8HBS5fY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAxMS1zdWJNZW51LW1lbnVJdGVtMDAxHwgFD0luc3RydW1lbnRhdGlvbh8JBUFodHRwOi8vZ2FsZXhnaS5nc2ZjLm5hc2EuZ292L0RvY3VtZW50cy9FUk9fZGF0YV9kZXNjcmlwdGlvbl8yLmh0bWRkEBYGHwcFLl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDExLXN1Yk1lbnUtbWVudUl0ZW0wMDIfCAUNRGF0YSBQaXBlbGluZR8JBUFodHRwOi8vZ2FsZXhnaS5nc2ZjLm5hc2EuZ292L0RvY3VtZW50cy9FUk9fZGF0YV9kZXNjcmlwdGlvbl8zLmh0bWRkZBAWBh8HBRpfY3RsOF9sZWZ0TWVudS1tZW51SXRlbTAxMx8IBQ1SZWxhdGVkIFNpdGVzHwkFFC4vP3BhZ2U9cmVsYXRlZHNpdGVzZGQQFgYfBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMTUfCAUPQWNrbm93bGVkZ21lbnRzHwkFFy4vP3BhZ2U9YWNrbm93bGVkZ21lbnRzZGQQFhQfCQULL0dhbGV4Vmlldy8eD01lbnVJdGVtVG9vbFRpcAUdR2FsZXhWaWV3IChRdWljayBTZWFyY2gpIFRvb2weCkl0ZW1UYXJnZXQFBl9ibGFuax4QSXRlbUltYWdlQWx0VGV4dAUdR2FsZXhWaWV3IChRdWljayBTZWFyY2gpIFRvb2wfCAUKR2FsZXhWaWV3Oh4NTWVudUl0ZW1XaWR0aBsAAAAAAMBiQAEAAAAfBwUaX2N0bDhfbGVmdE1lbnUtbWVudUl0ZW0wMTYeDkl0ZW1SaWdodEltYWdlBRlpbWFnZXMvR2FsZXhWaWV3VGh1bWIucG5nHhNJdGVtUmlnaHRJbWFnZUFsaWduCysEAR4OTWVudUl0ZW1IZWlnaHQbAAAAAADAYkABAAAAZGQQFhQfCQUJL2Nhc2pvYnMvHw4FG0Nhc0pvYnMgKERhdGFiYXNlIFNRTCkgVG9vbB8PBQZfYmxhbmsfEAUbQ2FzSm9icyAoRGF0YWJhc2UgU1FMKSBUb29sHwgFCENhc0pvYnM6HxEbAAAAAADAYkABAAAAHwcFGl9jdGw4X2xlZnRNZW51LW1lbnVJdGVtMDE3HxIFF2ltYWdlcy9DYXNKb2JzVGh1bWIucG5nHxMLKwQBHxQbAAAAAABAYEABAAAAZGRkAgEPZBYCZg8PFgQeCXNvcnRPcmRlcgUHcmFfY2VudB4Mc2hvd0FsbFRpbGVzBQVmYWxzZWQWBAIBDw8WAh4EVGV4dAXKAjxiPlRoZXJlIGFyZSA0NTE5NCB0b3RhbCB0aWxlcyBpbiBhbGwgdGhlIEdBTEVYIHN1cnZleXMuPC9iPjxicj48Zm9udCBzaXplPSctMScgY29sb3I9J2dyYXknPlBsZWFzZSBub3RlOiBTZWFyY2hlcyBpbiB0aGlzIHBhZ2UgYXBwbHkgb25seSB0byBUSUxFIGxldmVsIHByb2R1Y3RzLjxicj5JZiB5b3Ugd2FudCB0byBzZWFyY2ggR0FMRVggb2JqZWN0IGNhdGFsb2dzLCBwbGVhc2UgdXNlIGVpdGhlciB0aGUgPGEgaHJlZj0nP3BhZ2U9bWFzdGZvcm0nPkNhdGFsb2cgT2JqZWN0IFNlYXJjaDwvYT4gb3IgdGhlIDxhIGhyZWY9Jz9wYWdlPXNxbGZvcm0nPlNRTCBTZWFyY2g8L2E+LmRkAhUPDxYCHgdWaXNpYmxlaGQWAgIDDzwrAAsAZAIDD2QWAmYPZBYCZg9kFgICBQ8PFgIfFwUrTGFzdCBNb2RpZmllZCBEYXRlOjxicj4xMi81LzIwMTYgMTo1MTozOSBQTWRkZBOt2pbUX66uvUbSuy3q9kQU8fEC', '__VIEWSTATEGENERATOR': 'C84C2718', '__EVENTVALIDATION': '/wEdAAue+6xrb6xgp2ityzurA/pfWsTF2CBs9ziYHlDmus7EnHXVqisK/ch+FuYDN4RJj9bNygAwoalISibjyjYgoB7/Pb1PMsXU2LG7o+i6/zoft2ZmqVWZEJyWTGlJer/5/ymk9SeG9Y8RLbkbyiuf4BcRXP2SoyGCMZyu6LfyUjL5ZgAB13huDNxtBirRDFLR6zW3raPnQUy5sK21W/3eiEs/KUQOVtp9GallVy/IsFMIp4yMEruOYx0KrV7GUndYi0m5y40+', '_ctl10:txtTargetName': '', '_ctl10:resolverDropList': 'SIMBAD', '_ctl10:txtRadius': '0.4', '_ctl10:txtRA': ra,
def Hopach(self, cluster_method, metric_gene, force_gene, metric_array, force_array): print_out = r('library("hopach")') if "Error" in print_out: print 'Installing the R package "hopach" in Config/R' print_out = r( 'source("http://bioconductor.org/biocLite.R"); biocLite("hopach")' ) if "Error" in print_out: print 'unable to download the package "hopach"' forceError print_out = r('library("hopach")') filename = self.File() #r('memory.limit(2000)') print "Begining to process", filename, "using HOPACH" metric_g = self.format_value_for_R(metric_gene) metric_a = self.format_value_for_R(metric_array) parse_line = 'data<-read.table(%s,sep="\t",as.is=T,row.names=1,header=T)' % filename checklinelengths(self._file) print_out = r(parse_line) #print parse_line dat = r['data'] #print "Number of columns in input file:",len(dat) print_out = r('data<-as.matrix(data)') dat = r['data'] #print "Number of columns in matrix:",len(dat) force1 = '' force2 = '' hopg = 'NULL' hopa = 'NULL' distmatg = 'NULL' distmata = 'NULL' ### defaults for tree export if force_gene != '' and force_gene != 0: force1 = ',kmax=' + str(force_gene) + ', khigh=' + str(force_gene) if force_array != '' and force_array != 0: force2 = ',kmax=' + str(force_array) + ', khigh=' + str( force_array) if cluster_method == 'both' or cluster_method == 'gene': distance_matrix_line = 'distmatg<-distancematrix(data,d=%s)' % metric_g #print distance_matrix_line if len(dat) > 1: print_out1 = r(distance_matrix_line) print_out2 = r('hopg<-hopach(data,dmat=distmatg,ord="own"' + force1 + ')') #print 'hopg<-hopach(data,dmat=distmatg,ord="own"'+force1+')' try: hopach_run = r['hopg'] except Exception: print print_out1 print print_out2 sys.exit() hopg = 'hopg' distmatg = 'distmatg' gene_output = self.HopachGeneOutputFilename( metric_gene, str(force_gene)) output = 'out<-makeoutput(data,hopg,file=%s)' % gene_output #print output print_out = r(output) output_file = r['out'] status = 'stop' if 'clustering' in hopach_run: if 'order' in hopach_run['clustering']: try: if len(hopach_run['clustering']['order']) > 10: status = 'continue' except TypeError: error = 'file: ' + filename + ": Hopach returned the array of cluster orders as blank while clustering GENES... can not process cluster... continuing with other files" print error errors.append(error) if status == 'continue': r(output_file) print 'hopach output written' else: error = 'file: ' + filename + " Hopach returned data-matrix length zero...ARRAY clusters can not be generated" print error errors.append(error) if cluster_method == 'both' or cluster_method == 'array': distance_matrix_line = 'distmata<-distancematrix(t(data),d=%s)' % metric_a if len(dat) > 1: dist = r(distance_matrix_line) #print distance_matrix_line print_out = r('hopa<-hopach(t(data),dmat=distmata,ord="own"' + force2 + ')') #print 'hopa<-hopach(t(data),dmat=distmata,ord="own"'+force2+')' hopach_run = r['hopa'] hopa = 'hopa' distmata = 'distmata' array_output = self.HopachArrayOutputFilename( metric_array, str(force_array)) output = 'out<-makeoutput(t(data),hopa,file=%s)' % array_output #print output print_out = r(output) output_file = r['out'] status = 'stop' if 'clustering' in hopach_run: if 'order' in hopach_run['clustering']: try: if len(hopach_run['clustering']['order']) > 10: status = 'continue' except TypeError: error = 'file: ' + filename + ": Hopach returned the array of cluster orders as blank while clustering ARRAYS... can not process cluster" print error errors.append(error) if status == 'continue': r(output_file) print 'hopach output written' else: error = 'file: ' + filename + "data-matrix length zero...ARRAY clusters can not be generated...continuing analysis" print error errors.append(error) if len(metric_g) == 0: metric_g = 'NULL' if len(metric_a) == 0: metric_a = 'NULL' try: output_filename = string.replace(gene_output, 'rows.', '') cdt_output_line = 'hopach2tree(data, file = %s, hopach.genes = %s, hopach.arrays = %s, dist.genes = %s, dist.arrays = %s, d.genes = %s, d.arrays = %s, gene.wts = NULL, array.wts = NULL, gene.names = NULL)' % ( output_filename, hopg, hopa, distmatg, distmata, metric_g, metric_a) ###7 values except Exception: None #make_tree_line = 'makeTree(labels, ord, medoids, dist, side = "GENE")' ### Used internally by HOPACH #print cdt_output_line try: print_out = r(cdt_output_line) except Exception: None
def findParentDir(filename): filename = string.replace(filename, '//', '/') filename = string.replace(filename, '\\', '/') x = string.find(filename[::-1], '/') * -1 return filename[:x]
import math import random import copy import os import os.path try: from rpy import r print "\n---------Using RPY---------\n" except Exception: from pyper import * print "\n---------Using PypeR---------\n" r = R(use_numpy=True) ### Create a Directory for R packages in the AltAnalyze program directory (in non-existant) r_package_path = string.replace(os.getcwd() + '/Config/R', '\\', '/') ### R doesn't link \\ try: os.mkdir(r_package_path) except Exception: None ### Set an R-package installation path command = '.libPaths("' + r_package_path + '")' r(command) ### doesn't work with %s for some reason #print_out = r('.libPaths()');print print_out; sys.exit() def remoteMonocle(samplelogfile, genes=[], expPercent=50, pval=0.01,
def cleanUpLine(line): line = string.replace(line, '\n', '') line = string.replace(line, '\c', '') data = string.replace(line, '\r', '') data = string.replace(data, '"', '') return data
def _checkFunction(module, func, c=None, main=0, in_class=0): "Return a list of Warnings found in a function/method." # always push a new config object, so we can pop at end of function utils.pushConfig() code = CodeChecks.Code() code.init(func) if main: for key in func.function.func_globals.keys(): code.unusedLocals[key] = -1 codeSource = CodeChecks.CodeSource(module, func, c, main, in_class, code) try: _checkCode(code, codeSource) if not in_class: _findUnreachableCode(code) # handle lambdas and nested functions codeSource.calling_code.append(func) for func_code in code.codeObjects.values(): _handleNestedCode(func_code, code, codeSource) del codeSource.calling_code[-1] except (SystemExit, KeyboardInterrupt): exc_type, exc_value, exc_tb = sys.exc_info() raise exc_type, exc_value except: exc_type, exc_value, exc_tb = sys.exc_info() exc_list = traceback.format_exception(exc_type, exc_value, exc_tb) for index in range(0, len(exc_list)): exc_list[index] = string.replace(exc_list[index], "\n", "\n\t") code.addWarning(msgs.CHECKER_BROKEN % string.join(exc_list, "")) if cfg().checkReturnValues: _checkReturnWarnings(code) if cfg().localVariablesUsed: for var, line in code.unusedLocals.items(): if line is not None and line > 0 and _name_unused(var): code.addWarning(msgs.UNUSED_LOCAL % var, line) if cfg().argumentsUsed: op = code.getFirstOp() if not (OP.RAISE_VARARGS(op) or OP.RETURN_VALUE(op)): for var, line in code.unusedLocals.items(): _checkUnusedParam(var, line, func, code) # Check code complexity: # loops should be counted as one branch, but there are typically 3 # branches in byte code to setup a loop, so subtract off 2/3's of them # / 2 to approximate real branches branches = (len(code.branches.keys()) - (2 * code.loops)) / 2 lines = (code.getLineNum() - code.func_code.co_firstlineno) returns = len(code.returnValues) if not main and not in_class: args = code.func_code.co_argcount locals = len(code.func_code.co_varnames) - args _checkComplex(code, cfg().maxArgs, args, func, msgs.TOO_MANY_ARGS) _checkComplex(code, cfg().maxLocals, locals, func, msgs.TOO_MANY_LOCALS) _checkComplex(code, cfg().maxLines, lines, func, msgs.FUNC_TOO_LONG) _checkComplex(code, cfg().maxReturns, returns, func, msgs.TOO_MANY_RETURNS) _checkComplex(code, cfg().maxBranches, branches, func, msgs.TOO_MANY_BRANCHES) if not (main or in_class): utils.popConfig() func.returnValues = code.returnValues return (code.warnings, code.globalRefs, code.functionsCalled, code.codeObjects.values(), code.returnValues)
def AffyNormalization(self, normalization_method, probe_level, batch_effects): print 'Loading affy package in R' print_out = r('library("affy")') if "Error" in print_out: #print_out = r('install.packages("ggplot2", repos="http://cran.us.r-project.org")') print 'Installing the R package "affy" in Config/R' print_out = r( 'source("http://bioconductor.org/biocLite.R"); biocLite("affy")' ) if "Error" in print_out: print 'unable to download the package "affy"' forceError print_out = r('library("affy")') if 'gcrma' in normalization_method: print 'Loading gcrma package in R' print_out = r('library("gcrma")') if "Error" in print_out: print 'Installing the R package "gcrma" in Config/R' print_out = r( 'source("http://bioconductor.org/biocLite.R"); biocLite("gcrma")' ) if "Error" in print_out: print 'unable to download the package "gcrma"' forceError print_out = r('library("gcrma")') if batch_effects == 'remove': ### Import or download support for SVA/Combat print 'Loading sva package in R' print_out = r('library("sva")') if "Error" in print_out: print 'Installing the R package "sva" in Config/R' print_out = r( 'source("http://bioconductor.org/biocLite.R"); biocLite("sva")' ) if "Error" in print_out: print 'unable to download the package "sva"' forceError print_out = r('library("sva")') print "Reading Affy files..." print_out = r('rawdata<-ReadAffy()') print print_out r('setwd("ExpressionInput")') if probe_level: ### normalize at the level of probes rahter than probeset (e.g., alt.exon analysis of 3' array) print_out = r('PM<-probes(rawdata,which="pm")') print print_out print_out = r('AffyInfo<-dimnames(PM)[[1]]') print print_out print_out = r('cutpos<-regexpr("\\d+$",AffyInfo,perl=T)') print print_out print_out = r('AffyID<-substr(AffyInfo,1,cutpos-1)') print print_out print_out = r( 'probe<-as.numeric(substr(AffyInfo,cutpos,nchar(AffyInfo)))') print print_out print_out = r('data.bgc<-bg.correct(rawdata,method="rma")') print print_out print_out = r( 'data.bgc.q<-normalize.AffyBatch.quantiles(data.bgc,type="pmonly")' ) print print_out print_out = r('pm.bgc.q<-probes(data.bgc.q,which="pm")') print print_out print_out = r('normalized<-cbind(AffyID,probe,pm.bgc.q)') print print_out command = 'write.table(normalized,file=' + self.File( ) + ',sep="\t",row.names=FALSE, quote=FALSE)' print_out = r(command) print print_out print 'probe-level normalization complete' else: print "Begining %s normalization (will install array annotations if needed)... be patient" % normalization_method print_out = r('normalized<-%s(rawdata)') % normalization_method print print_out command = 'write.exprs(normalized,' + self.File() + ')' print_out = r(command) print print_out print self.File(), 'written...' if batch_effects == 'remove': ### Import data command = 'mod = model.matrix(~as.factor(cancer) + age, data=pheno)' print_out = r(command) command = 'cdata = ComBat(dat=normalized, batch=as.factor(pheno$batch), mod=mod, numCov=match("age", colnames(mod)))' print_out = r(command) command = 'write.table(cdata,file=' + self.File( ) + ',sep="\t",row.names=FALSE, quote=FALSE)' print_out = r(command) output_file = string.replace(self.File(), 'exp.', 'stats.') print_out = r('calls<-mas5calls(rawdata)') #print_out = r('pvals<-se.exprs(calls)') ### outdated? print_out = r('pvals<-assayData(calls)[["se.exprs"]]') command = 'write.table(pvals,' + output_file + ',sep = "\t", col.names = NA)' print_out = r(command) print output_file, 'written...'
def get_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built distributions. Typically includes the OS name and version and the architecture (as supplied by 'os.uname()'), although the exact information included depends on the OS; eg. for IRIX the architecture isn't particularly important (IRIX only runs on SGI hardware), but for Linux the kernel version isn't particularly important. Examples of returned values: linux-i586 linux-alpha (?) solaris-2.6-sun4u irix-5.3 irix64-6.2 For non-POSIX platforms, currently just returns 'sys.platform'. """ if os.name != "posix" or not hasattr(os, 'uname'): # XXX what about the architecture? NT is Intel or Alpha, # Mac OS is M68k or PPC, etc. return sys.platform # Try to distinguish various flavours of Unix (osname, host, release, version, machine) = os.uname() # Convert the OS name to lowercase, remove '/' characters # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") osname = string.lower(osname) osname = string.replace(osname, '/', '') machine = string.replace(machine, ' ', '_') machine = string.replace(machine, '/', '-') if osname[:5] == "linux": # At least on Linux/Intel, 'machine' is the processor -- # i386, etc. # XXX what about Alpha, SPARC, etc? return "%s-%s" % (osname, machine) elif osname[:5] == "sunos": if release[0] >= "5": # SunOS 5 == Solaris 2 osname = "solaris" release = "%d.%s" % (int(release[0]) - 3, release[2:]) # fall through to standard osname-release-machine representation elif osname[:4] == "irix": # could be "irix64"! return "%s-%s" % (osname, release) elif osname[:3] == "aix": return "%s-%s.%s" % (osname, version, release) elif osname[:6] == "cygwin": osname = "cygwin" rel_re = re.compile(r'[\d.]+') m = rel_re.match(release) if m: release = m.group() elif osname[:6] == "darwin": # # For our purposes, we'll assume that the system version from # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set # to. This makes the compatibility story a bit more sane because the # machine is going to compile and link as if it were # MACOSX_DEPLOYMENT_TARGET. from distutils.sysconfig import get_config_vars cfgvars = get_config_vars() macver = os.environ.get('MACOSX_DEPLOYMENT_TARGET') if not macver: macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') if not macver: # Get the system version. Reading this plist is a documented # way to get the system version (see the documentation for # the Gestalt Manager) try: f = open('/System/Library/CoreServices/SystemVersion.plist') except IOError: # We're on a plain darwin box, fall back to the default # behaviour. pass else: m = re.search( r'<key>ProductUserVisibleVersion</key>\s*' + r'<string>(.*?)</string>', f.read()) f.close() if m is not None: macver = '.'.join(m.group(1).split('.')[:2]) # else: fall back to the default behaviour if macver: from distutils.sysconfig import get_config_vars release = macver osname = "macosx" if (release + '.') >= '10.4.' and \ get_config_vars().get('UNIVERSALSDK', '').strip(): # The universal build will build fat binaries, but not on # systems before 10.4 machine = 'fat' elif machine in ('PowerPC', 'Power_Macintosh'): # Pick a sane name for the PPC architecture. machine = 'ppc' return "%s-%s-%s" % (osname, release, machine)
attrlist = set() contentHandler = parseXML(attrlist) parser.setContentHandler(contentHandler) if not no_comments: parser.setProperty(property_lexical_handler, contentHandler) for arg in sys.argv[1:]: if os.path.isdir(arg): for file in os.listdir(arg): if (file.endswith(".xml")): parser.parse(os.path.join(arg, file)) else: parser.parse(arg) attrlist = list(attrlist) attrlist.sort(key=lambda a: a[0]) for (k, c) in attrlist: print() print('#: ' + arg) string.replace(k, "\\n", "\"\n\"") if c: for l in c.split('\n'): print("#. ", l) print('msgid "' + str(k) + '"') print('msgstr ""') attrlist = set()
def cleanupWordsString(str): str = string.replace(str, ".", " ") str = string.replace(str, "!", " ") str = string.replace(str, "?", " ") str = string.replace(str, ",", " ") str = string.replace(str, "'", " ") str = string.replace(str, '"', " ") str = string.replace(str, ";", " ") str = string.replace(str, "(", " ") str = string.replace(str, ")", " ") str = string.replace(str, "{", " ") str = string.replace(str, "}", " ") str = string.replace(str, "<", " ") str = string.replace(str, ">", " ") str = string.replace(str, "=", " ") str = string.replace(str, "/", " ") str = string.replace(str, "*", " ") str = string.replace(str, ":", " ") str = string.replace(str, "#", " ") str = string.replace(str, "\\", " ") str = string.replace(str, "\n", " ") str = string.replace(str, "\r", " ") str = string.replace(str, "\xc2", " ") str = string.replace(str, "\xa0", " ") return str
#!/usr/bin/env python import sys import numpy as nmp from os import system from netCDF4 import Dataset from string import replace if len(sys.argv) != 2: print 'Usage: ' + sys.argv[0] + ' <nemo_bathy.nc>' sys.exit(0) cf_old = sys.argv[1] cf_new = replace(cf_old, '.nc', '_new.nc') cv_bathy = 'Bathymetry' # First, creating a copy: system('rm -f ' + cf_new) system('cp ' + cf_old + ' ' + cf_new) print '\n' # Opening the Netcdf file: f_new = Dataset(cf_new, 'r+') print 'File ', cf_new, 'is open...\n' Xbathy = f_new.variables[cv_bathy][:, :] # Edit zone:
def safely_replace(string: str, replaced_substring: str, new_substring: str) -> str: if replaced_substring not in string: raise ValueError(f'Expecting {replaced_substring} to be in {string}.') return string.replace(replaced_substring, new_substring)
for (extdirpath, extdirnames, extfilenames) in os.walk(extractionDir): for extfname in extfilenames: #TODO check for actual output name because there will be more than 1 bin file for t16 if extfname[-4:] == ".bin": runLine = False if (not os.path.isdir(extractionDir) or runLine) and rightTracker: mmfFile = dirpath + os.sep + filename pathComponents = dirpath.split(os.sep) dirLength = len(pathComponents) lineHandle = functions.sanitizeForFileName(lineName) batchScriptDir = os.path.dirname(fullProjectDirPath) + os.sep + settings.batchScriptDirProjectsSuffix + os.sep + 'extraction' if not os.path.exists(batchScriptDir): os.makedirs(batchScriptDir) shfile = batchScriptDir + '/' + lineHandle + '_' + tracker + '.sh' if not os.path.isdir(extractionDir): os.makedirs(extractionDir) shfile = batchScriptDir + '/' + lineHandle + '_' + tracker + '.sh' extractFile = extractionDir + os.sep + lineHandle + '.bxx' functions.createExtractFile(extractFile, mmfFile, extractionDir, extractionOptions) functions.createQsubExtractFile(shfile, extractFile, settings.settingsDir, os.path.dirname(mmfFile)) jobName = 'ext' + lineHandle + '_' + str(njob) ; qsubOut = shfile[:-3] + '.log' #qsubCommand = 'qsub -l short -N ' + settings.supplementaryQsubParams[tracker] + 'ext_' + string.replace(lineName[:11],'@','_') + ' -j y -b y -o ' + qsubOut + ' -cwd ' + shfile qsubCommand = 'bsub -J ' + settings.supplementaryQsubParams[tracker] + 'ext_' + string.replace(lineName[:11],'@','_') + ' -o ' + qsubOut + ' ' + shfile os.system("chmod 755 " + shfile) #qsubCommand = shfile njob = njob + 1 functions.addQsubToQueue(qsubCommand) functions.runAllQsubsToCompletion()
def create(self): self._dmplugin = PlugInManager().getPlugInInstance("DepManPlugIn") if self._dmplugin == None: self.reportError("PlugIn `depman` not found") return False #user questions if self._is_Interactive: self._group = raw_input("Group: ") self._artifact = raw_input("Artifact: ") self._version = raw_input("Version: ") tmpstr = "" for p in self._dmplugin.getSupportedPlatforms(): tmpstr = tmpstr + p + " " print "( ", tmpstr, ")" tmstr = "Platform [*]:" self._platform = raw_input( string.replace(tmstr, "*", self._dmplugin.getOSPlatform())) if len(self._platform) == 0: self._platform = self._dmplugin.getOSPlatform() tmpstr = "" for p in self._dmplugin.getSupportedCompilers(): tmpstr = tmpstr + p + " " print "( ", tmpstr, ")" tmstr = "Compiler [*]:" self._compiler = raw_input( string.replace(tmstr, "*", self._dmplugin.getOSCompiler())) if len(self._compiler) == 0: self._compiler = self._dmplugin.getOSCompiler() tmpstr = "" for p in self._dmplugin.getSupportedArchs(): tmpstr = tmpstr + p + " " print "( ", tmpstr, ")" tmstr = "Architecture [*]:" self._arch = raw_input( string.replace(tmstr, "*", self._dmplugin.getOSArch())) if len(self._arch) == 0: self._arch = self._dmplugin.getOSArch() tmpstr = "" for p in self._dmplugin.getSupportedLibraryTypes(): tmpstr = tmpstr + p + " " print "( ", tmpstr, ")" self._ltype = raw_input("Library Type: ") upload_response = raw_input("Upload to server? (y/n): ") if upload_response == "y" or upload_response == "yes": self._upload = True if self._packageNode != None: for n in self._packageNode.childNodes: if n.localName == "package": processPackage = True if n.hasAttributes(): if n.attributes.has_key("platform"): values = n.attributes.get("platform").value.split( ",") if self._dmplugin.getOSPlatform() in values: processPackage = True else: processPackage = False if processPackage: print "Processing for platform..." for p in n.childNodes: if p.localName == "group": self._group = p.childNodes[0].nodeValue if p.localName == "artifact": self._artifact = p.childNodes[0].nodeValue if p.localName == "version": self._version = p.childNodes[0].nodeValue if p.localName == "platform": self._platform = p.childNodes[0].nodeValue if p.localName == "compiler": self._compiler = p.childNodes[0].nodeValue if p.localName == "arch": self._arch = p.childNodes[0].nodeValue if p.localName == "libraryType": self._ltype = p.childNodes[0].nodeValue if p.localName == "upload": #TODO: Maybe upload should be an external plugin for k in p.childNodes: if k.localName == "sshserver": self._default_ssh = k.childNodes[ 0].nodeValue if k.localName == "destdir": self._default_destdir = k.childNodes[ 0].nodeValue if k.localName == "username": self._default_login = k.childNodes[ 0].nodeValue if self._group == "": self.reportError("Group cannot be empty") return False if self._artifact == "": self.reportError("Artifact cannot be empty") return False if self._version == "": self.reportError("Version cannog be empty") return self._group = self._group.replace(".", "/") if self._platform == "default": self._platform = self._dmplugin.getOSPlatform() if self._compiler == "default": self._compiler = self._dmplugin.getOSCompiler() if self._arch == "default": self._arch = self._dmplugin.getOSArch() #let's check user input consistency if self._platform not in self._dmplugin.getSupportedPlatforms(): self.reportError("Platform not supported: " + self._platform + ". Supported platforms:" + str(self._dmplugin.getSupportedPlatforms())) return False if self._compiler not in self._dmplugin.getSupportedCompilers(): self.reportError("Compiler not supported: " + self._compiler + ". Supported compilers:" + str(self._dmplugin.getSupportedCompilers())) return False if self._arch not in self._dmplugin.getSupportedArchs(): self.reportError("Architecture not supported: " + self._arch + ". Supported archs:" + str(self._dmplugin.getSupportedArchs())) return False if self._ltype not in self._dmplugin.getSupportedLibraryTypes(): self.reportError("Library type not supported: " + self._ltype + ". Supported libraries:" + str(self._dmplugin.getSupportedLibraryTypes())) return False #artifact and md5 generation file_name = self._artifact + "-" + self._version + "-" + self._platform + "-" + self._compiler + "-" + self._arch + "-" + self._ltype tarname = file_name + ".tar.gz" md5name = tarname + ".md5" dmfile = file_name + ".xml" dmutil = BMUtil() tmpdir = self._dmplugin.getDepManPath( ) + os.path.sep + ".cache" + os.path.sep + self._group + os.path.sep + self._artifact + os.path.sep + self._version + os.path.sep + self._platform + os.path.sep + self._compiler + os.path.sep + self._arch + os.path.sep + self._ltype dmutil.mkdir(tmpdir) print tmpdir + os.path.sep + tarname, self._path dmutil.targz(os.path.join(tmpdir, tarname), self._path) #print "targz ",tmpdir+os.path.sep+tarname dmutil.createMD5(tmpdir + os.path.sep + tarname, tmpdir + os.path.sep + md5name) if self._xmlfile != "": shutil.copyfile(self._xmlfile, tmpdir + os.path.sep + dmfile) print "Artifact " + tarname + " created in:\n" + tmpdir if self._upload: dmutil.rmdir( ".dmn_tmp") # Prevent for uploading bad previous compilations! sshtmpdir = ".dmn_tmp" + os.path.sep + self._group + os.path.sep + self._artifact + os.path.sep + self._version + os.path.sep + self._platform + os.path.sep + self._compiler + os.path.sep + self._arch + os.path.sep + self._ltype dmutil.mkdir(sshtmpdir) shutil.copyfile(tmpdir + os.path.sep + tarname, sshtmpdir + os.path.sep + tarname) shutil.copyfile(tmpdir + os.path.sep + md5name, sshtmpdir + os.path.sep + md5name) if self._xmlfile != "": shutil.copyfile(tmpdir + os.path.sep + dmfile, sshtmpdir + os.path.sep + dmfile) url = self._default_ssh destdir = self._default_destdir login = self._default_login if self._is_Interactive: tmstr = "SSH Server [*]:" url = raw_input(string.replace(tmstr, "*", self._default_ssh)) if len(url) == 0: url = self._default_ssh tmstr = "Destination Directory [*]:" destdir = raw_input( string.replace(tmstr, "*", self._default_destdir)) if len(destdir) == 0: destdir = self._default_destdir tmstr = "Login [*]:" login = raw_input( string.replace(tmstr, "*", self._default_login)) if len(login) == 0: login = self._default_login download_dir = self._group + "/" + self._artifact + "/" + self._version + "/" + self._platform + "/" + self._compiler + "/" + self._arch + "/" + self._ltype print "* Uploading ", tarname print "* Uploading ", md5name #scp base_ssh = destdir url_ssh = url scpcommand = "scp" pscppath = "" if sys.platform == "win32": scpcommand = self._dmplugin.getDepManDataPath( ) + os.path.sep + "win32" + os.path.sep + "pscp.exe" scpcommand = '"' + os.path.normpath(scpcommand) + '"' cmdstr = scpcommand + " -r " + ".dmn_tmp" + os.path.sep + "*" + " " + login + "@" + url_ssh + ":" + base_ssh print cmdstr os.system(cmdstr) #scp dmutil.rmdir(".dmn_tmp")
colon_from_end = st.find(line[1][::-1], ':') if colon_from_end >= 0: start_i = len(line[1]) - colon_from_end if line[1][start_i] == '0': lookahead = 1 while line[1][start_i + lookahead] == '0': lookahead += 1 if line[1][start_i + lookahead] == '.': start_i += lookahead - 1 else: start_i += lookahead precolon = line[1][:start_i] line[1] = line[1][start_i:] else: precolon = "" params.append([line[0], np.float(st.replace(line[1], 'D', 'e')), \ np.float(st.replace(line[3], 'D', 'e')), precolon]) # Convert frequency and frequency derivatives to period and period # derivatives if flags['-p'] = True already_gave_F0_warning = False f0 = 0.0 deriv = -1 if flags['-p']: for i in range(0, len(params)): success = True freq = True name = params[i][0] ndigits = len(name) - 1 if name[0] == 'F' and (ndigits == 1 or ndigits == 2): if ndigits == 1: deriv = np.int(name[1])
def doRun(self, PassedCommString): """This method is responsible for the processing of the 'run' command.""" # Tokenize! self._commString = PassedCommString self._commTokens = PassedCommString.split() myCommandCount = 0 myServerGroupList = [] myServerNameList = [] myRunTarget = '' myIsNow = False myIsReverse = False myIsSingle = False myIsRunInThreads = False # # Step 1: Create our own tokens, and check for SSH flags and # the special-case keywords. (i.e. now, reverse, single) # if (self._commString.find('"') == -1): myError = "Command Syntax Error. Try 'help run' for more information." self._globalConfig.getMultiLogger().LogMsgWarn(myError) return False # Get substr indexes. myFirstQuoteIndex = self._commString.find('"') myLastQuoteIndex = self._commString.rfind('"') myPrefixStr = self._commString[0:myFirstQuoteIndex] myBodyStr = self._commString[myFirstQuoteIndex:(myLastQuoteIndex + 1)] mySuffixStr = self._commString[myLastQuoteIndex + 1:] # Check for pass-through SSH flags if (myPrefixStr.find('-') != -1): myFlagStr = ' ' + myPrefixStr[myPrefixStr.find('-'):] myFlagStr = myFlagStr.rstrip() else: myFlagStr = '' # Check for special-case keywords. if (mySuffixStr.find(' now') != -1): myIsNow = True mySuffixStr = string.replace(mySuffixStr, ' now', '') if (mySuffixStr.find(' reverse') != -1): myIsReverse = True mySuffixStr = string.replace(mySuffixStr, ' reverse', '') if (mySuffixStr.find(' single') != -1): myIsSingle = True mySuffixStr = string.replace(mySuffixStr, ' single', '') if (mySuffixStr.find(' threads') != -1): myIsRunInThreads = True mySuffixStr = string.replace(mySuffixStr, ' threads', '') # # Step 2: Try to determine what the target of the command is # and set a state-tracking variable accordingly. # if (len(mySuffixStr) == 0): # run "uptime" # run -t "uptime" myRunTarget = 'current_server_group' # Check for syntax errors. elif (mySuffixStr.find(' on ') == -1): myError = "Command Syntax Error. Try 'help run' for more information." self._globalConfig.getMultiLogger().LogMsgWarn(myError) return False elif (mySuffixStr.find(',') == -1): # run "uptime" on app # run -t "uptime" on app # run "uptime" on app01 # run -t "uptime" on app01 myRunTarget = 'single_server_group' else: # run "uptime" on app, www # run -t "uptime" on app, www # run "uptime" on app01, www01 # run -t "uptime" on app01, www01 myRunTarget = 'multiple_server_group' # Assuming no error up until my point we can now # throw out the " on " part of our command. myGroupStr = mySuffixStr[mySuffixStr.find(' on ') + 4:] myGroupStr = myGroupStr.strip() # # Step 3: Assemble two lists based on command syntax. # # myServerNameList will contain a list of server names. # -or- # myServerGroupList will contain a list of server groups. # if (myRunTarget == 'current_server_group'): myGroupStr = self._globalConfig.getCurrentServerGroup().getName() myServerGroupList.append(myGroupStr) elif (myRunTarget == 'single_server_group'): # Check for server name match. myServer = self._globalConfig.getCurrentEnv().getServerByName( myGroupStr) if (myServer): myServerNameList.append(myServer.getName()) else: # Check for server group match, with and without attributes. myServerGroup = self._globalConfig.getCurrentEnv( ).getServerGroupByName(myGroupStr) # Validate. if (not myServerGroup): myError = "No matching server name or group '" + \ self._globalConfig.getCurrentEnv().getServerGroupName(myGroupStr) + "'." self._globalConfig.getMultiLogger().LogMsgError(myError) return False else: myServerGroupList.append(myGroupStr) elif (myRunTarget == 'multiple_server_group'): myGroupList = myGroupStr.split(',') for myLoopStr in myGroupList: myLoopStr = myLoopStr.strip() # Check for server name match. myServer = self._globalConfig.getCurrentEnv().getServerByName( myLoopStr) if (myServer): myServerNameList.append(myServer.getName()) continue # Check for server group match, with and without attributes. myServerGroup = self._globalConfig.getCurrentEnv( ).getServerGroupByName(myLoopStr) # Validate. if (not myServerGroup): if (not self._globalConfig.isBatchMode()): myError = "No matching server name or group '" + \ self._globalConfig.getCurrentEnv().getServerGroupName(myLoopStr) + "'." self._globalConfig.getMultiLogger().LogMsgError( myError) return False else: myServerGroupList.append(myLoopStr) # # Step 4: Make sure noone's trying to mix # server hostnames and server group names together. # if ((len(myServerNameList) > 0) and (len(myServerGroupList) > 0)): myError = "Mixing of server name(s) and server group(s) is unsupported." self._globalConfig.getMultiLogger().LogMsgError(myError) return False # # Step 5: Must make sure...are you sure you're sure? # if ((not self._globalConfig.isBatchMode()) and (not myIsNow)): myDisplayStr = '' if (len(myServerNameList) > 0): for myNameStr in myServerNameList: myDisplayStr = myDisplayStr + myNameStr + ',' myDisplayStr = myDisplayStr.rstrip(',') # Are you sure? myInfo = "Run command " + myBodyStr + " on server(s) " + \ myDisplayStr + "?" self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) if (not self.doAreYouSure()): myInfo = "Aborting command." self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) return False else: for myGroupStr in myServerGroupList: myDisplayStr = myDisplayStr + myGroupStr + ',' myDisplayStr = myDisplayStr.rstrip(',') # Are you sure? myInfo = "Run command " + myBodyStr + " on server group(s) " + \ myDisplayStr + "?" self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) if (not self.doAreYouSure()): myInfo = "Aborting command." self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) return False # # Step 6: If we found server name(s), then run with that. # Otherwise, do the same with the server group(s) given. # threadList = {} threadCounter = 0 if (len(myServerNameList) > 0): if (myIsReverse): myServerNameList.reverse() try: for myNameStr in myServerNameList: myServer = self._globalConfig.getCurrentEnv( ).getServerByName(myNameStr) myPinger = engine.misc.HostPinger.HostPinger( self._globalConfig) if (myPinger.ping(myNameStr) == 0): myExternalCommand = engine.data.ExternalCommand.ExternalCommand( self._globalConfig) # Build It. if (myServer.getVersion() != None): myExternalCommand.setCommand( \ self._globalConfig.getSshBinary() + myFlagStr + \ " -" + myServer.getVersion() + \ " -l " + myServer.getUsername() + " " + \ myServer.getName() + " " + \ myBodyStr ) else: myExternalCommand.setCommand( \ self._globalConfig.getSshBinary() + myFlagStr + \ " -l " + myServer.getUsername() + " " + \ myServer.getName() + " " + \ myBodyStr ) # Run in threads if myIsRunInThreads: # set to true to prompt user before running if (self._globalConfig.isBatchMode()): PassedIsInteractive = False elif (len(myFlagStr) > 0): myExternalCommand.run(True) else: myExternalCommand.run() PassedIsInteractive = False # create the thread object externalCommandThread = threading.Thread( target=myExternalCommand.run, kwargs={ 'PassedIsInteractive': PassedIsInteractive }, name=myNameStr) # dictionary containing thread objects and their count from 0 to ... the key is the machine name threadCounter += 1 threadList[myNameStr] = { 'thread': externalCommandThread, 'number': threadCounter } # join thread n to thread n - maxThreads if threadList[myNameStr]['number'] >= maxThreads: threadNumberToJoin = threadList[myNameStr][ 'number'] - maxThreads threadToJoin = [ threadList[ns] for ns in threadList.keys() if threadList[ns]['number'] == threadNumberToJoin ][0]['thread'] threadToJoin.join() externalCommandThread.start() else: if (self._globalConfig.isBatchMode()): myExternalCommand.run() elif (len(myFlagStr) > 0): myExternalCommand.run(True) else: myExternalCommand.run() myCommandCount = myCommandCount + 1 threadCounter += 1 if (myIsSingle): break else: myError = "Server '" + myServer.getName() + \ "' appears to be down. Continuing..." self._globalConfig.getMultiLogger().LogMsgWarn(myError) self._globalConfig.getMultiLogger( ).LogMsgDebugSeperator() # join the controlling thread to all the running threads so it can't exit until they are all finished def masterThreadJoiner(): pass masterThread = threading.Thread(target=masterThreadJoiner) [t['thread'].join(threadTimeout) for t in threadList.values()] masterThread.start() except EOFError: pass except KeyboardInterrupt: myInfo = "Caught CTRL-C keystroke. Attempting to abort..." self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) self._globalConifg.setBreakState(True) return False return True else: # # If we found server group names, then run with that. # for myGroupStr in myServerGroupList: # Check for server group match, with and without attributes. myServerGroup = self._globalConfig.getCurrentEnv( ).getServerGroupByName(myGroupStr) if (myGroupStr.find('[') == -1): myServerList = myServerGroup.getServerList() else: myServerList = myServerGroup.getAttribValueServerList( myGroupStr) if (myIsReverse): myServerList.reverse() try: for myServer in myServerList: myPinger = engine.misc.HostPinger.HostPinger( self._globalConfig) if (myPinger.ping(myServer.getName()) == 0): myExternalCommand = engine.data.ExternalCommand.ExternalCommand( self._globalConfig) # Build It. if (myServer.getVersion() != None): myExternalCommand.setCommand( \ self._globalConfig.getSshBinary() + myFlagStr + \ " -" + myServer.getVersion() + \ " -l " + myServer.getUsername() + " " + \ myServer.getName() + " " + \ myBodyStr ) else: myExternalCommand.setCommand( \ self._globalConfig.getSshBinary() + myFlagStr + \ " -l " + myServer.getUsername() + " " + \ myServer.getName() + " " + \ myBodyStr ) # Run in threads if myIsRunInThreads: # set to true to prompt user before running if (self._globalConfig.isBatchMode()): PassedIsInteractive = False elif (len(myFlagStr) > 0): myExternalCommand.run(True) else: myExternalCommand.run() PassedIsInteractive = False # create the thread object externalCommandThread = threading.Thread( target=myExternalCommand.run, kwargs={ 'PassedIsInteractive': PassedIsInteractive }, name=myServer) # dictionary containing thread objects and their count from 0 to ... the key is the machine name threadList[myServer] = { 'thread': externalCommandThread, 'number': threadCounter } # join thread n to thread n - maxThreads if threadList[myServer]['number'] >= maxThreads: threadNumberToJoin = threadList[myServer][ 'number'] - maxThreads threadToJoin = [ threadList[ns] for ns in threadList.keys() if threadList[ns]['number'] == threadNumberToJoin ][0]['thread'] threadToJoin.join() externalCommandThread.start() else: if (self._globalConfig.isBatchMode()): myExternalCommand.run() elif (len(myFlagStr) > 0): myExternalCommand.run(True) else: myExternalCommand.run() myCommandCount = myCommandCount + 1 threadCounter += 1 if (myIsSingle): break else: myError = "Server '" + myServer.getName() + \ "' appears to be down. Continuing..." self._globalConfig.getMultiLogger().LogMsgWarn( myError) self._globalConfig.getMultiLogger( ).LogMsgDebugSeperator() # join the controlling thread to all the running threads so it can't exit until they are all finished def masterThreadJoiner(): pass masterThread = threading.Thread(target=masterThreadJoiner) [ t['thread'].join(threadTimeout) for t in threadList.values() ] masterThread.start() except EOFError: pass except KeyboardInterrupt: myInfo = "Caught CTRL-C keystroke. Attempting to abort..." self._globalConfig.getMultiLogger().LogMsgInfo(myInfo) self._globalConfig.setBreakState(True) return False if (myIsReverse): myServerList.sort() return myCommandCount
def _open(self): # Quick rejection: if there's not an LF among the first # 100 bytes, this is (probably) not a text header. if not "\n" in self.fp.read(100): raise SyntaxError, "not an IM file" self.fp.seek(0) n = 0 # Default values self.info[MODE] = "L" self.info[SIZE] = (512, 512) self.info[FRAMES] = 1 self.rawmode = "L" while 1: s = self.fp.read(1) # Some versions of IFUNC uses \n\r instead of \r\n... if s == "\r": continue if not s or s[0] == chr(0) or s[0] == chr(26): break # FIXME: this may read whole file if not a text file s = s + self.fp.readline() if len(s) > 100: raise SyntaxError, "not an IM file" if s[-2:] == '\r\n': s = s[:-2] elif s[-1:] == '\n': s = s[:-1] try: m = split.match(s) except re.error, v: raise SyntaxError, "not an IM file" if m: k, v = m.group(1, 2) # Convert value as appropriate if k in [FRAMES, SCALE, SIZE]: v = string.replace(v, "*", ",") v = tuple(map(number, string.split(v, ","))) if len(v) == 1: v = v[0] elif k == MODE and OPEN.has_key(v): v, self.rawmode = OPEN[v] # Add to dictionary. Note that COMMENT tags are # combined into a list of strings. if k == COMMENT: if self.info.has_key(k): self.info[k].append(v) else: self.info[k] = [v] else: self.info[k] = v if TAGS.has_key(k): n = n + 1 else: raise SyntaxError, "Syntax error in IM header: " + s
test.must_match('test10' + _exe, " -c -x\nThis is a .F77 file.\n") g77 = test.detect('F77', 'g77') FTN_LIB = TestSCons.fortran_lib if g77: test.write("wrapper.py", """import os import string import sys open('%s', 'wb').write("wrapper.py\\n") os.system(string.join(sys.argv[1:], " ")) """ % string.replace(test.workpath('wrapper.out'), '\\', '\\\\')) test.write('SConstruct', """ foo = Environment(LIBS = %FTN_LIBs) f77 = foo.Dictionary('F77') bar = foo.Clone(F77 = r'%(_python_)s wrapper.py ' + f77, F77FLAGS = '-Ix') foo.Program(target = 'foo', source = 'foo.f') bar.Program(target = 'bar', source = 'bar.f') """ % locals()) test.write('foo.f', r""" PROGRAM FOO PRINT *,'foo.f' STOP END """)
control_info = msg.split(control_sep) assert len(control_info) == 4, 'len(control_info) = %d' % len(control_info) control_info[2] = eval(control_info[2]) #Convert back to int control_info[3] = eval(control_info[3]) #Convert back to tuple return control_info #---------------------------------------------------------------------------- # Initialise module #---------------------------------------------------------------------------- # Take care of situation where module is part of package import sys, os, string, os.path dirname = os.path.dirname(string.replace(__name__, '.', os.sep)).strip() if not dirname: dirname = '.' if dirname[-1] != os.sep: dirname += os.sep # Import MPI extension # # Verify existence of mpiext.so. try: import mpiext except: errmsg = 'ERROR: C extension mpiext could not be imported.\n'
def replacechars(filename, fileExt, isCrowdin): s = open(filename, "r+") newfilename = filename + ".tmp" fin = open(newfilename, "w") errorOccured = False if fileExt != '.csv': for line in s.readlines(): if line.startswith("<?xml"): line = "<?xml version=\"1.0\" encoding=\"utf-8\"?> \n <!-- \n ~ Copyright (c) 2009 Andrew <andrewdubya@gmail> \n ~ Copyright (c) 2009 Edu Zamora <*****@*****.**> \n ~ Copyright (c) 2009 Daniel Svaerd <*****@*****.**> \n ~ Copyright (c) 2009 Nicolas Raoul <*****@*****.**> \n ~ Copyright (c) 2010 Norbert Nagold <*****@*****.**> \n ~ This program is free software; you can redistribute it and/or modify it under \n ~ the terms of the GNU General Public License as published by the Free Software \n ~ Foundation; either version 3 of the License, or (at your option) any later \n ~ version. \n ~ \n ~ This program is distributed in the hope that it will be useful, but WITHOUT ANY \n ~ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A \n ~ PARTICULAR PURPOSE. See the GNU General Public License for more details. \n ~ \n ~ You should have received a copy of the GNU General Public License along with \n ~ this program. If not, see <http://www.gnu.org/licenses/>. \n --> \n \n" else: # some people outwitted crowdin's "0"-bug by filling in "0 ", this changes it back: if line.startswith(" <item>0 </item>"): line = " <item>0</item>\n" line = string.replace(line, '\'', '\\\'') line = string.replace(line, '\\\\\'', '\\\'') line = string.replace(line, '\n\s', '\\n') line = string.replace(line, 'amp;', '') if re.search('%[0-9]\\s\\$|%[0-9]\\$\\s', line) != None: errorOccured = True # print line fin.write(line) else: fin.write( "<?xml version=\"1.0\" encoding=\"utf-8\"?> \n <!-- \n ~ Copyright (c) 2011 Norbert Nagold <*****@*****.**> \n ~ This program is free software; you can redistribute it and/or modify it under \n ~ the terms of the GNU General Public License as published by the Free Software \n ~ Foundation; either version 3 of the License, or (at your option) any later \n ~ version. \n ~ \n ~ This program is distributed in the hope that it will be useful, but WITHOUT ANY \n ~ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A \n ~ PARTICULAR PURPOSE. See the GNU General Public License for more details. \n ~ \n ~ You should have received a copy of the GNU General Public License along with \n ~ this program. If not, see <http://www.gnu.org/licenses/>. \n --> \n \n \n<resources> \n <string-array name=\"tutorial_questions\"> \n" ) content = re.sub('([^\"])\n', "\\1", s.read()).split("\n") length = len(content) line = [] for i in range(length - 1): if isCrowdin: start = content[i].rfind('\",\"') + 3 else: start = content[i].find('\"') + 1 contentLine = content[i][start:len(content[i]) - 1] sepPos = contentLine.find('<separator>') if sepPos == -1: if len(contentLine) > 2: errorOccured = True print contentLine continue line.append([ "<![CDATA[" + contentLine[:sepPos] + "]]>", "<![CDATA[" + contentLine[sepPos + 11:] + "]]>" ]) for fi in line: fi[0] = re.sub('\"+', '\\\"', fi[0]) fi[0] = re.sub('\'+', '\\\'', fi[0]) fi[0] = re.sub('\\\\{2,}', '\\\\', fi[0]) fin.write(" <item>" + fi[0] + "</item> \n") fin.write( " </string-array>\n <string-array name=\"tutorial_answers\">\n") for fi in line: fi[1] = re.sub('\"+', '\\\"', fi[1]) fi[1] = re.sub('\'+', '\\\'', fi[1]) fi[1] = re.sub('\\\\{2,}', '\\\\', fi[1]) fin.write(" <item>" + fi[1] + "</item> \n") fin.write(" </string-array>\n</resources>") s.close() fin.close() shutil.move(newfilename, filename) if errorOccured: #os.remove(filename) print 'Error in file ' + filename return False else: # print 'File ' + filename + ' successfully copied' # Disabled, makes output too large. return True
def fixEndings(str): str = string.replace(str, '\r\n', '\n') str = string.replace(str, '\r', '\n') return str