def testContext(self): tt = Template({ 'INCLUDE_PATH': 'test/src:test/lib', 'TRIM': True, 'POST_CHOMP': 1 }) ttpython = Template({ 'INCLUDE_PATH': 'test/src:test/lib', 'TRIM': True, 'POST_CHOMP': 1, 'EVAL_PYTHON': True }) ctx = tt.service().context() self.failUnless(ctx) self.assertEquals(ctx, tt.context()) self.failUnless(ctx.trim()) self.failUnless(not ctx.eval_python()) ctx = ttpython.service().context() self.failUnless(ctx) self.failUnless(ctx.trim()) self.failUnless(ctx.eval_python()) # template() # Test that we can fetch a template via template() tmpl = ctx.template('header') self.failUnless(tmpl) self.failUnless(isinstance(tmpl, Document)) # Test that non-existence of a template is reported error = None try: tmpl = ctx.template('no_such_template') except Exception, e: error = e
def testCompile(self): ttcfg = { "POST_CHOMP": 1, "INCLUDE_PATH": "test/src", "COMPILE_EXT": ".ttc" } # Check that compiled template files exist. compiled = "test/src/foo.ttc" self.assert_(os.path.exists(compiled)) self.assert_(os.path.exists("test/src/complex.ttc")) # Ensure template metadata is saved in compiled file. output = Template(ttcfg).process("baz", { "showname": 1 }) self.assertNotEqual(-1, output.find("name: baz")) # We're going to hack on the foo.ttc file to change some key text. # This way we can tell that the template was loaded from the compiled # version and not the source. fh = open(compiled, "r+") stat = os.fstat(fh.fileno()) foo = fh.read() fh.seek(0) fh.write(foo.replace("the foo file", "the hacked foo file")) fh.close() os.utime(compiled, (stat.st_atime, stat.st_mtime)) self.Expect(DATA, ttcfg)
def testInclude(self): callsign = self._callsign() replace = { "a": callsign["a"], "b": callsign["b"], "c": { "d": callsign["d"], "e": callsign["e"], "f": { "g": callsign["g"], "h": callsign["h"] } }, "r": callsign["r"], "s": callsign["s"], "t": callsign["t"] } tproc = Template({ "INTERPOLATE": True, "INCLUDE_PATH": "test/src:test/lib", "TRIM": True, "AUTO_RESET": False, "DEFAULT": "default" }) incpath = [ "test/src", "/nowhere" ] tt_reset = Template({ "INTERPOLATE": True, "INCLUDE_PATH": incpath, "TRIM": True, "RECURSION": True, "DEFAULT": "bad_default" }) incpath[1] = "test/lib" replace["metaout"] = tproc.process("metadata", replace) replace["metamod"] = os.stat("test/src/metadata").st_mtime self.Expect(DATA, (('default', tproc), ('reset', tt_reset)), replace)
def generate(self): print("=== Generating mocks for %s" % self.header_file) cwd_stored = os.getcwd() scriptdir = os.path.dirname(os.path.realpath(__file__)) print("Parsing file...") os.chdir(self.tempdir) fdv = FuncDeclVisitor(self.args) fdv.parse(os.path.realpath(self.header_prep)) if self.args.print_ast: # pprint(v.funcdecls) fdv.ast.show(attrnames=True, nodenames=True) if fdv.funcdecls: HFile = collections.namedtuple( 'File', ['name', 'incl', 'funcs']) file = HFile(name=self.header_name, incl=self.header_file, funcs=fdv.funcdecls) os.chdir(scriptdir + "/../tmpl") print("Generating %s" % (self.filemock_h)) self.mockh = Template("file.h.tmpl") self.mockh.render({'file': file}) print("Generating %s" % (self.filemock_c)) self.mockc = Template("file.c.tmpl") self.mockc.render({'file': file}) else: print("No function declarations found in %s" % (self.header_file)) # restore cwd os.chdir(cwd_stored)
def create_user(ldap_server, user): old_user = find_user(ldap_server, config, user['USER_NAME']) if old_user is None: user['USER_ID'] = str(get_free_user_id(ldap_server)) user['USER_UID'] = str(uuid.uuid4()) else: user['USER_ID'] = old_user['uidNumber'][0] user['USER_UID'] = old_user['apple-generateduid'][0] group = find_group(l, config, user['USER_GROUP_NAME']) user['USER_GROUP_ID'] = group['gidNumber'][0] user['USER_PASSWORD_HASH'] = "{KERBEROS} " + user['USER_NAME'] t = Template("../../config/plab-ldap/templates/user.ldif") t.replace(config) t.replace(user) insert_ldif(ldap_server, str(t)) add_to_group(ldap_server, config, user['USER_NAME'], user['USER_GROUP_NAME']) # Get user and show details group = find_group(ldap_server, config, user['USER_GROUP_NAME']) user = find_user(ldap_server, config, user['USER_NAME']) pp = pprint.PrettyPrinter(indent = 4) print "Added user (" + user['uid'][0] + ") with id (" + str(user['uidNumber']) + ")"
def make_admin_staff_panel(self): session = model.Session() table = model.account sql = table.select().order_by(table.c.account.asc(), table.c.username.asc()) query = session.execute(sql) users = [dict(row.items()) for row in query] rowtype = 1 for row in users: # Alternate between values 1 and 2. rowtype ^= 0x3 row["rowtype"] = rowtype # Get latest action from DB. action_table = model.activity action_grab_sql = ( action_table.select() .where(action_table.c.username == row["username"]) .order_by(action_table.c.date.desc()) ) last_action = session.execute(action_grab_sql).fetchone() # Copy to row. if last_action: row["action"] = last_action["action"] row["actiondate"] = last_action["date"] else: row["action"] = None row["actiondate"] = None Template.__init__(self, "staff_management", users=users)
def writeVhost(settings, domain, templateName, values): """Writes a site's vhost configuration file""" # Open the file conf = str(os.path.join(settings['confdir'], domain)) outFile = open(conf, 'w') # Write the configuration t = Template() try: tplDir = os.path.join(settings['templates'], templateName) templateFilename = os.path.join(tplDir, templateName + '.tpl') inFile = open(templateFilename, 'r') source = inFile.read() # While there are variables left unprocessed, process the string expression = r'\[\%.+\%\]' while re.search(expression, source) != None: source = t.processString(source, values) # Write what we have outFile.write(source) except TemplateException, e: print "ERROR: %s" % e
def _generate(self, path=None, output=None): if not output: output = self.args.output if not path: path = self.args.template template = Template(path=path, output=output, seed=self.args.seed, quiet=self.args.quiet) template.generate()
def testError(self): tmpl = Template({ 'BLOCKS': { 'badinc': '[% INCLUDE nosuchfile %]' } }) try: tmpl.process("badinc") self.fail("Failed to raise exception") except TemplateException, e: self.assertEquals('file', e.type()) self.assertEquals('nosuchfile: not found', e.info())
def generate(self, section=None): if section is None: section = self.navigation.root t = Template(self, section) t.generate() for child in section.childs: self.generate(child)
def make_admin_post_search_panel(self, search, text, caller='internal'): board = self.board session = model.Session() table = board.table board.check_access(self.user) popup = caller != 'board' if search.find('IP Address') != -1: try: sql = table.select()\ .where(table.c.ip == misc.dot_to_dec(text)) except ValueError: raise WakaError('Please enter a valid IP.') search_type = 'IP' elif search.find('Text String') != -1: sql = table.select().where(table.c.comment.like('%'+text+'%')) search_type = 'text string' elif search.find('Author') != -1: sql = table.select().where(or_(table.c.name.like('%'+text+'%'), table.c.trip.like('%'+text+'%'))) search_type = 'author' else: sql = table.select().where(table.c.num == text) search_type = 'ID' if search_type != 'ID': page = model.Page(sql, self.page, self.perpage) rowcount = page.total_entries total_pages = page.total_pages posts = page.rows if not posts: raise WakaError("No posts found for %s %s" % (search_type, text)) else: rowcount = total_pages = 1 row = session.execute(sql).fetchone() if not row: raise WakaError("Post not found. (It may have just been" " deleted.)") posts = [row] inputs = [ {'name': 'board', 'value': board.name}, {'name' : 'task', 'value' : 'searchposts'}, {'name' : 'text', 'value' : text}, {'name': 'caller', 'value': caller}, {'name' : 'search', 'value': search} ] rooturl = misc.make_script_url(task='searchposts', board=board.name, caller=caller, search=search, text=text, _amp=True) Template.__init__(self, 'post_search', num=id, posts=posts, search=search, text=text, inputs=inputs, number_of_pages=total_pages, rooturl=rooturl, rowcount=rowcount, popup=popup)
def make_edit_staff_window(self, username): boards = interboard.get_all_boards() edited_user = staff.StaffMember.get(username) for board in boards: if board in edited_user.reign: board["underpower"] = True Template.__init__(self, "staff_edit_template", user_to_edit=username, boards=boards)
def Expect(self, data, tproc=None, vars=None): vars = vars or {} data = re.sub(r"(?m)^#.*\n", "", data) match = re.search(r"\s*--\s*start\s*--\s*", data) if match: data = data[match.end():] match = re.search(r"\s*--\s*stop\s*--\s*", data) if match: data = data[:match.start()] tests = re.split(r"(?mi)^\s*--\s*test\s*--\s*", data) if not tests[0]: tests.pop(0) ttprocs = None if isinstance(tproc, dict): tproc = Template(tproc) elif isinstance(tproc, (tuple, list)): ttprocs = dict(tproc) tproc = tproc[0][1] elif not isinstance(tproc, Template): tproc = Template() for count, test in enumerate(tests): match = re.search(r"(?mi)^\s*-- name:? (.*?) --\s*\n", test) if match: name = match.group(1) test = test[:match.start()] + test[match.end():] else: name = "template text %d" % (count + 1) match = re.search(r"(?mi)^\s*--\s*expect\s*--\s*\n", test) if match: input, expect = test[:match.start()], test[match.end():] else: input, expect = test, "" match = re.match(r"(?mi)^\s*--\s*use\s+(\S+)\s*--\s*\n", input) if match: ttname = match.group(1) ttlookup = ttprocs.get(ttname) if ttlookup: tproc = ttlookup else: self.fail("no such template object to use: %s\n" % ttname) input = input[:match.start()] + input[match.end():] try: out = tproc.processString(input, vars) except Exception, e: self.fail("Test #%d: %s process FAILED: %s\n%s" % ( count + 1, name, subtext(input), e)) match = re.match(r"(?i)\s*--+\s*process\s*--+\s*\n", expect) if match: expect = expect[match.end():] try: expect = tproc.processString(expect, vars) except TemplateException, e: self.fail("Test #%d: Template process failed (expect): %s" % ( count + 1, e))
def count_templates(self): """ Retrieve count of existing templates created by owner of API-key API call used is v1/template (GET) @return: Integer """ temp_template = Template(api_client=self.api_client) return temp_template.count()
def testView(self): vars = {"foo": Foo(pi=3.14, e=2.718), "blessed_list": MyList("Hello", "World")} t = Template() context = t.context() view = context.view() self.assert_(view) view = context.view({"prefix": "my"}) self.assert_(view) self.assertEquals("my", view.prefix()) self.Expect(DATA, None, vars)
def __init__(self, cookie, board=None, dest=None, page=None, perpage=50, **kwargs): try: self.user = staff.StaffMember.get_from_cookie(cookie) except staff.LoginError: Template.__init__(self, "admin_login_template", login_task=dest) return if not dest: dest = HOME_PANEL self.admin = cookie # TODO: Check if mod is banned. if not page: if dest in (HOME_PANEL, TRASH_PANEL): # Adjust for different pagination scheme. (Blame Wakaba.) page = 0 else: page = 1 if not str(perpage).isdigit(): perpage = 50 # The page attribute is not always a pure integer (thread pages). if str(page).isdigit(): page = int(page) self.page = page self.perpage = int(perpage) self.board = local.environ["waka.board"] if dest not in INTERFACE_MAPPING: dest = HOME_PANEL INTERFACE_MAPPING[dest](self, **kwargs) # Convert user reign list into a list of dictionaries, for # templating. reign = [] if self.user.account == staff.MODERATOR: reign = [{"board_entry": entry} for entry in self.user.reign] else: if self.board: reign = interboard.get_all_boards(check_board_name=self.board.name) else: reign = interboard.get_all_boards() # Set global form variables. Template.update_parameters( self, username=self.user.username, type=self.user.account, admin=cookie, boards_select=reign, boards=reign, page=self.page, perpage=self.perpage, )
def render_template(content, basename): """Render the template.""" try: name = 'pan.tt' template = Template({'INCLUDE_PATH': os.path.join(os.path.dirname(__file__), 'tt')}) output = template.process(name, {'content': content, 'basename': basename}) except TemplateException as e: msg = "Failed to render template %s with data %s: %s." % (name, content, e) logger.error(msg) raise TemplateException('render', msg) return output
def make_admin_spam_panel(self): # TODO: Paginate this, too. spam_list = [] for filename in config.SPAM_FILES: with open(filename, "r") as f: spam_list.extend([str_format.clean_string(l) for l in f]) spamlines = len(spam_list) spam = "".join(spam_list) Template.__init__(self, "spam_panel_template", spam=spam, spamlines=spamlines)
def show(self, http_handler, param, form=None): template = Template('base.html') template.setTemplate('CONTENT', 'control_test.html') template.setTemplate('INFO_BLOCK', 'admin_menue.html') template.setData('TEST', storage.userControlTestHtml(param[1])) report = storage.userReport(param[1]) for key in report: template.setData(key, report[key]) self.answer(http_handler, template.show())
def delete_template(self, template_id = None): """ Delete Template specified by template_id API call used is v1/template (DELETE) @type template_id: int @param template_id: ID of the template to delete @return: None """ temp_template = Template(api_client=self.api_client) temp_template.delete(template_id)
def testTemplate(self): out = StringBuffer() tt = Template({"INCLUDE_PATH": "test/src:test/lib", "OUTPUT": out}) tt.process("header") self.assert_(out.get()) out.clear() try: tt.process("this_file_does_not_exist") self.fail("exception not raised") except TemplateException, e: self.assertEquals("file", e.type()) self.assertEquals("this_file_does_not_exist: not found", e.info())
def __init__(self): Template.__init__(self, [{"class" : Getstatic, "match" : "getstatic CRunTime/memory [I"}, {"class" : Iload}, {"class" : Iconst}, {"class" : Iushr}, {"optional-class" : Iconst}, {"optional-class" : Iadd}, {"class" : Iaload}, {"class" : Istore}, ] )
def make_admin_ban_panel(self, ip=""): session = model.Session() table = model.admin sql = select( [ model.activity.c.username, table.c.num, table.c.type, table.c.comment, table.c.ival1, table.c.ival2, table.c.sval1, table.c.total, table.c.expiration, ], from_obj=[ table.outerjoin( model.activity, and_(table.c.num == model.activity.c.admin_id, table.c.type == model.activity.c.action), ) ], ).order_by(table.c.type.asc(), table.c.num.asc()) # TODO: We should be paginating, but the page needs to be # adjusted first. # res = model.Page(sql, self.page, self.perpage) query = session.execute(sql) bans = [dict(row.items()) for row in query] rowtype = 1 prevtype = "" for row in bans: prevtype = row if prevtype != row["type"]: row["divider"] = 1 # Alternate between values 1 and 2. rowtype ^= 0x3 row["rowtype"] = rowtype if row["expiration"]: row["expirehuman"] = misc.make_date(row["expiration"]) else: row["expirehuman"] = "Never" if row["total"] == "yes": row["browsingban"] = "No" else: row["browsingban"] = "Yes" Template.__init__(self, "ban_panel_template", bans=bans, ip=ip)
def write_toc(toc, location): """Write the toc to disk.""" try: name = 'toc.tt' template = Template({'INCLUDE_PATH': os.path.join(os.path.dirname(__file__), 'tt')}) tocfile = template.process(name, {'toc': toc}) except TemplateException as e: msg = "Failed to render template %s with data %s: %s." % (name, toc, e) logger.error(msg) raise TemplateException('render', msg) with open(os.path.join(location, "mkdocs.yml"), 'w') as fih: fih.write(tocfile)
def process_template(self, template_file, vars): config = { 'ABSOLUTE' : 1, 'INCLUDE_PATH' : HTDOCS_ABS_TEMPLATE_PATH, 'INTERPOLATE' : 1, 'POST_CHOMP' : 1, 'EVAL_PYTHON' : 1, #'PRE_PROCESS' : 'header' } template = Template(config) return template.process(template_file, vars)
def response(self,msg,**kwargs): ## msg is parsed and your handled data.Actually,it is a dict. ## Your could specify a type by assign.ex response(type='music').I list all legal types. ''' ex: response(message,type='yourType') optional kwargs: type='legal_types',content='yourContent',handler=foo,count=1 ps:when type is news,the count kwarg is nessceary support types: text,image,voice,video,music,news ''' msg['receiver'],msg['sender'] = msg['sender'],msg['receiver'] legal_types = ['text','music','image','voice','video','news'] ## get some kwargs ## # key word content ---- which force type to textand return a static string if kwargs.get('type'): type = kwargs.get('type') else:type = msg['type'] if type == 'music': if not msg['hq_musurl']: msg['hq_musurl'] = msg['musurl'] # charge receiver and sender if kwargs.get('content'): msg['type'] = type = 'text' msg['content'] = to_unicode(kwargs.get('content')) if not type in legal_types: raise Exception("Illgal type!You could only choose one type from legal_types!") # key word handler ---- which is a function object,accept a dict and return a modified dict else: msg['type'] = type if kwargs.get('handler'): msg = kwargs.get('handler')(msg) ## more kwargs ## if not type == 'news': template = to_unicode(getattr(Template(),type)) else: count = kwargs.get('count') if count: temp = Template() template = to_unicode(temp.news(count)) else: raise Exception('When type is set to news,the count kwarg is necessary!') logging.info(template.format(**msg)) try: retdata = template.format(**msg) except: raise Exception("You did't pass enough args or pass wrong args,please check args which template needed.Read template.py maybe inspire your mind") return retdata
def show(self, http_handler, param, form=None): template = Template('base.html') template.setTemplate('CONTENT', 'results.html') template.setTemplate('INFO_BLOCK', 'admin_menue.html') template.setData('USERS', storage.recordsHtmlInfo()) self.answer(http_handler, template.show())
def show(self, http_handler, param, form=None): template = Template('base.html') template.setTemplate('CONTENT', 'wait.html') template.setTemplate('INFO_BLOCK', 'test_dummy.html') template.setData('META', '<meta http-equiv="Refresh" content="3; url=/test">') self.answer(http_handler, template.show())
class TemplateTest(unittest.TestCase): """ unit test for template """ def setUp(self): """ set up data """ self.spec_yaml = """ 001: id 200: [ authors, { a: name, b: firstname } ] 300: { a: title, b: subtitle } 700: [ auth_author, { a: name, b: firstname } ] 701: [ auth_author, { a: name, b: firstname } ] 002: code 400: { a: ville, b: adresse } """ self.mir_yaml = """ - [001, PPNxxxx ] - [002, toto ] - [200, [ [a, Doe], [b, elias], [b, frederik], [b, john] ]] - [200, [ [a, Doe], [b, jane] ]] - [300, [ [a, "i can haz title"], [b, "also subs"] ]] - [400, [ [a, "Strasbourg"], [b, "rue 1"], [b, "rue 2"] ]] """ self.data_yaml = """ authors: - { name: Doe, firstname: [elias, frederik, john] } - { name: Doe, firstname: jane } title: "i can haz title" subtitle: "also subs" id: PPNxxxx code: toto adresse: [rue 1, rue 2] ville: Strasbourg """ self.template = Template(yaml.safe_load(self.spec_yaml)) def test_build_data_from_mir(self): """ test to build data from mir """ self.template.build_data_from_mir(yaml.safe_load(self.mir_yaml)) template_data_dump = yaml.dump(self.template.data) test_data_dump = yaml.dump(yaml.safe_load(self.data_yaml)) self.assertEqual(template_data_dump, test_data_dump) def test_build_mir_from_data(self): """ test to build mir from data """ self.template.build_mir_from_data(yaml.safe_load(self.data_yaml)) template_mir_dump = yaml.dump(self.template.mir) test_mir_dump = yaml.dump(yaml.safe_load(self.mir_yaml)) self.assertEqual(template_mir_dump, test_mir_dump)
def make_admin_home_panel(self): # Update perpage attribute: it is determined here by board options. board = self.board self.perpage = board.options['IMAGES_PER_PAGE'] # Get reports. reports = board.get_local_reports() # Send to Template initializer. kwargs = {} threads = [] if str(self.page).startswith('t'): self.page = self.page[1:] posts = board.get_thread_posts(self.page) threads.append({'posts' : posts}) kwargs = {'lockedthread' : posts[0].locked, 'parent' : self.page, 'thread' : self.page} else: # Grab count of all threads. table = board.table session = model.Session() sql = select([func.count()], table.c.parent == 0) thread_count = session.execute(sql).fetchone()[0] total = (thread_count + self.perpage - 1) / self.perpage if total <= self.page and total > 0: # Set page number to last page if exceeding total. # Pages are 0-indexed. self.page = total - 1 # Get partial board posts. pagethreads = board.get_some_threads(self.page) (pages, prevpage, nextpage)\ = board.get_board_page_data(self.page, total, admin_page='mpanel') threads = board.parse_page_threads(pagethreads) kwargs = {'pages' : pages, 'prevpage' : prevpage, 'nextpage' : nextpage} Template.__init__(self, 'post_panel_template', postform=board.options['ALLOW_TEXTONLY'] or board.options['ALLOW_IMAGES'], image_inp=board.options['ALLOW_IMAGES'], threads=threads, reportedposts=reports, **kwargs)
class RequestHandler(object): def __init__(self, app=None, request=None, **kwargs): self._app = app self._request = request if not hasattr(self, "response"): self._response = HTTPResponse() self._template = Template(config.config.get('template_dir')) def respond(self, file, data): self._response.body = self._template.render(file, data) return self._response def text_respond(self, data): self._response._headers = [('Content-Type', 'text/plain')] self._response.body = data return self._response @property def request(self): return self._request @property def app(self): return self._response
def generateGroupedIeDataType(grpIeDetails, grpIe): grpIeInfo={} grpIeInfo['iePresenceList']=[] grpIeTypeInfo={} grpIeTypeInfo['grpIeTypeName']=grpIe + 'IeType' grpIeTypeInfo['grpIeTypeValue']=grpIeDetails['ieTypeValue'] if grpIeTypeInfo not in grpIeDataTypeData['grpTypeList']: grpIeDataTypeData['grpTypeList'].append(grpIeTypeInfo) for inst in grpIeDetails['instList']: instDetails = grpIeDetails[inst] grpIeInfo={} grpIeInfo['iePresenceList']=[] grpIeInfo['grpIeName']= inst+'Data' grpIeInfo['ieList']=[] for ieDict in instDetails['ieDetails']: for ieDetails in ieDict: if ieDict[ieDetails]['presence']!='M': grpIeInfo['iePresenceList'].append(ieDict[ieDetails]['ieVarName']+'IePresent') ieInfo={} ieInfo['ieTypeName']=ieDict[ieDetails]['ieTypeName']+'IeData' ieInfo['ieVarName']=ieDict[ieDetails]['ieVarName'] ieInfo['ieCardinality'] = ieDict[ieDetails]['cardinality'] ieInfo['grouped'] = ieDict[ieDetails]['grouped'] if ieInfo['grouped']=='Yes': ieInfo['grpIeInstClassName'] =ieDict[ieDetails]['ieGroupTypeName'][0].upper()+ieDict[ieDetails]['ieGroupTypeName'][1:] grpIeInfo['ieList'].append(ieInfo) if grpIeInfo not in grpIeDataTypeData['grpList']: grpIeDataTypeData['grpList'].append(grpIeInfo) template = Template() ttFileNamefactoryH = 'tts/grpIeDataTypetemplate.h.tt' outputDir = '../../src/gtpV2Codec/ieClasses/' if not os.path.exists(outputDir): os.makedirs(outputDir) outputFileName = 'gtpV2GrpIeDataTypes.h' template.__init__({'OUTPUT' : outputFileName, 'OUTPUT_PATH' : outputDir}) template.process(ttFileNamefactoryH, {'tempdata' : grpIeDataTypeData})
def testListVmethods(self): def odd(seq): return [x for x in seq if x % 2 != 0] Stash.LIST_OPS["sum"] = lambda seq: sum(seq, 0) Stash.LIST_OPS["odd"] = odd Stash.LIST_OPS["jumble"] = jumble params = {"metavars": ("foo", "bar", "baz", "qux", "wiz", "waz", "woz"), "people": [{"id": "tom", "name": "Tom"}, {"id": "dick", "name": "Richard"}, {"id": "larry", "name": "Larry"}], "primes": (13, 11, 17, 19, 2, 3, 5, 7), "phones": {"3141": "Leon", "5131": "Andy", "4131": "Simon"}, "groceries": {"Flour": 3, "Milk": 1, "Peanut Butter": 21}, "names": [MyObject(x) for x in ("Tom", "Dick", "Larry")], "numbers": [MyObject(x) for x in ("1", "02", "10", "12", "021")], "duplicates": (1, 1, 2, 2, 3, 3, 4, 4, 5, 5)} context = Template().context() context.define_vmethod("list", "oddnos", odd) context.define_vmethod("array", "jumblate", jumble) self.Expect(DATA, None, params)
def generateIeDataType(self, currentType, typeVal, varList): ieinfo = {} ieTypeInfo = {} ieinfo['ieName'] = currentType + 'IeData' ieinfo['varList'] = varList ieTypeInfo['ieName'] = currentType + 'IeType' ieTypeInfo['value'] = typeVal if ieinfo not in ieDataTypeData['ieList']: ieDataTypeData['ieList'].append(ieinfo) if ieTypeInfo not in ieDataTypeData['ieTypeList']: ieDataTypeData['ieTypeList'].append(ieTypeInfo) #print(ieDataTypeData) template = Template() ttFileNamefactoryH = 'tts/ieDataTypetemplate.h.tt' outputDir = '../../src/gtpV2Codec/ieClasses' if not os.path.exists(outputDir): os.makedirs(outputDir) outputFileName = 'gtpV2IeDataTypes.h' template.__init__({'OUTPUT': outputFileName, 'OUTPUT_PATH': outputDir}) template.process(ttFileNamefactoryH, {'tempdata': ieDataTypeData})
def format_msg(self): ## inital msg obj msg = MIMEMultipart('alternative') msg['From'] = self.from_email msg['To'] = ", ".join(self.to_emails) msg['Subject'] = self.subject ## attach the txt information if self.template_name is not None: text = Template(self.template_name, context=self.context) txt_part = MIMEText(text.render(), "plain") msg.attach(txt_part) if self.template_html is not None: html = Template(self.template_html, context=self.context) html_part = MIMEText(html.render(), 'html') msg.attach(html_part) msg_str = msg.as_string() return msg_str
$if d: <table class="$kls"\ $if id: id="$id"\ ><thead><tr><th>Variable</th><th>Value</th></tr></thead> <tbody> $ temp = d.items() $temp.sort() $for kv in temp: <tr><td>$kv[0]</td><td class="code"><div>$prettify(kv[1])</div></td></tr> </tbody> </table> $else: <p>No data.</p> """ dicttable_r = Template(dicttable_t, filter=websafe) djangoerror_r = Template(djangoerror_t, filter=websafe) def djangoerror(): def _get_lines_from_file(filename, lineno, context_lines): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). """ try: source = open(filename).readlines() lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = \
def mark_resolved(task_data, delete, posts): referer = local.environ['HTTP_REFERER'] user = task_data.user errors = [] board_obj = None old_board_obj = local.environ['waka.board'] for (board_name, posts) in posts.iteritems(): # Access rights enforcement. if user.account == staff.MODERATOR and board_name not in user.reign: errors.append({'error' : '/%s/*: Sorry, you lack access rights.'\ % (board_name)}) continue for post in posts: session = model.Session() table = model.report sql = table.select().where( and_(table.c.postnum == post, table.c.board == board_name)) row = session.execute(sql).fetchone() if not row: errors.append({'error' : '%s,%d: Report not found.'\ % (board_name, int(post))}) continue sql = table.delete().where( and_(table.c.postnum == post, table.c.board == board_name)) session.execute(sql) # Log the resolved post. task_data.contents.append('/'.join(['', board_name, post])) if delete: try: board_obj = board.Board(board_name) local.environ['waka.board'] = board_obj except WakaError: errors.append({'error' : '%s,*: Error loading board.'\ % (board_name)}) continue try: board_obj.delete_stuff(posts, '', False, False, admindelete=True, admin_task_data=task_data) except WakaError: errors.append({'error' : '%s,%d: Post already deleted.'\ % (board_name, int(post))}) local.environ['waka.board'] = old_board_obj # TODO: This probably should be refactored into StaffInterface. return Template('report_resolved', errors=errors, error_occurred=len(errors) > 0, admin=user.login_data.cookie, username=user.username, type=user.account, boards_select=user.reign, referer=referer)
def add_admin_entry(task_data, option, comment, ip='', mask='255.255.255.255', sval1='', total='', expiration=0, caller=''): session = model.Session() table = model.admin ival1 = ival2 = 0 if not comment: raise WakaError(strings.COMMENT_A_MUST) if option in ('ipban', 'whitelist'): if not ip: raise WakaError('IP address required.') if not mask: mask = '255.255.255.255' # Convert to decimal. (ival1, ival2) = (misc.dot_to_dec(ip), misc.dot_to_dec(mask)) sql = table.select().where(table.c.type == option) query = session.execute(sql) for row in query: try: if int(row.ival1) & int(row.ival2) == ival1 & ival2: raise WakaError('IP address and mask match ban #%d.' % \ (row.num)) except ValueError: raise WakaError("Entry #%s on ban table is inconsistent. " "This shouldn't happen." % row.num) # Add info to task data. content = ip + (' (' + mask + ')' if mask else '') if total == 'yes': add_htaccess_entry(ip) content += ' (no browse)' content += ' "' + comment + '"' task_data.contents.append(content) else: if not sval1: raise WakaError(strings.STRINGFIELDMISSING) sql = table.select().where( and_(table.c.sval1 == sval1, table.c.type == option)) row = session.execute(sql).fetchone() if row: raise WakaError('Duplicate String in ban #%d.' % (row.num)) # Add ifno to task data. task_data.contents.append(sval1) comment = str_format.clean_string(\ str_format.decode_string(comment, config.CHARSET)) expiration = int(expiration) if expiration else 0 if expiration: expiration = expiration + time.time() sql = table.insert().values(type=option, comment=comment, ival1=int(ival1), ival2=int(ival2), sval1=sval1, total=total, expiration=expiration) result = session.execute(sql) task_data.admin_id = result.inserted_primary_key[0] # Add specific action name to task data. task_data.action = option board = local.environ['waka.board'] forward_url = misc.make_script_url(task='bans', board=board.name) if caller == 'window': return Template('edit_successful') return util.make_http_forward(forward_url, config.ALTERNATE_REDIRECT)
def test_lambda(self): rendered = Template( '{{ list(map(lambda x: x * 2, [1, 2, 3])) }}').render() self.assertEqual(rendered, '[2, 4, 6]')
def testForeach(self): a = 'alpha' b = 'bravo' c = 'charlie' d = 'delta' l = 'lima' o = 'oscar' r = 'romeo' u = 'uncle' w = 'whisky' day = -1 days = [ 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday' ] months = [ 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec' ] people = ({ 'id': 'abw', 'name': 'Andy Wardley' }, { 'id': 'sam', 'name': 'Simon Matthews' }) seta = (a, b, w) setb = (c, l, o, u, d) params = { 'a': a, 'b': b, 'c': c, 'C': c.upper(), 'd': d, 'l': l, 'o': o, 'r': r, 'u': u, 'w': w, 'seta': seta, 'setb': setb, 'users': people, 'item': 'foo', 'items': ['foo', 'bar'], 'days': days, 'months': lambda *_: months, 'format': format, 'people': [{ 'id': 'abw', 'code': 'abw', 'name': 'Andy Wardley' }, { 'id': 'aaz', 'code': 'zaz', 'name': 'Azbaz Azbaz Zazbazzer' }, { 'id': 'bcd', 'code': 'dec', 'name': 'Binary Coded Decimal' }, { 'id': 'efg', 'code': 'zzz', 'name': 'Extra Fine Grass' }], 'sections': { 'one': 'Section One', 'two': 'Section Two', 'three': 'Section Three', 'four': 'Section Four' }, 'nested': [['a', 'b', 'c'], ['x', 'y', 'z']] } tmpl = Template({ 'INTERPOLATE': True, 'POST_CHOMP': 1, 'ANYCASE': False }) ttdebug = Template({'DEBUG': True, 'DEBUG_FORMAT': ''}) self.Expect(DATA, (('default', tmpl), ('debug', ttdebug)), params)
def test_index(self): rendered = Template("{{ a[2] }}").render(a=[1, 2, 3]) self.assertEqual(rendered, '3')
def __init__(self, channame, dim, lPos=3, iPos=11): self._dim = dim #dimension (x,y,z) plotvars = {'x': 'c*', 'y': '|x_{F}|', 'z': 'M', '1D': 'Template bin'} self._plotvar = plotvars[self._dim] #string of plot variable self._lPos = lPos self._iPos = iPos self._channame = channame #name of the channel this plot belongs to self._canvsize = (1100, 900) if self._channame.startswith('t3'): self._canvsize = (5500, 900) #pieces of the plot #MC stack self._MC_stack = THStack(channame + '_' + self._dim + '_stack', ';;Events/bin') #list of MC histograms for legend self._MC_histos = [] #make a new template to clone histogram shapes from dummy_template_name = channame + '__' + self._dim + '_dummy_template' dummy_template = Template(dummy_template_name, dummy_template_name, None) #clone histogram for MC error histograms, residual plots, and MC err residuals self._MC_err_histo = None self._resid = None self._MC_err_resid = None if self._dim == 'x': self._MC_err_histo = dummy_template.getHistoX().Clone() self._resid = dummy_template.getHistoX().Clone() self._MC_err_resid = dummy_template.getHistoX().Clone() elif self._dim == 'y': self._MC_err_histo = dummy_template.getHistoY().Clone() self._resid = dummy_template.getHistoY().Clone() self._MC_err_resid = dummy_template.getHistoY().Clone() elif self._dim == 'z': self._MC_err_histo = dummy_template.getHistoZ().Clone() self._resid = dummy_template.getHistoZ().Clone() self._MC_err_resid = dummy_template.getHistoZ().Clone() elif self._dim == '1D': self._MC_err_histo = dummy_template.convertTo1D().Clone() self._resid = dummy_template.convertTo1D().Clone() self._MC_err_resid = dummy_template.convertTo1D().Clone() if self._MC_err_histo == None: print 'ERROR: could not get histogram cloned for plot dimension %s in channel %s' % ( self._dim, self._channame) #Set attributes and directories #MC err histograms self._MC_err_histo.SetFillColor(kBlack) self._MC_err_histo.SetMarkerStyle(1) self._MC_err_histo.SetFillStyle(3013) self._MC_err_histo.SetStats(0) self._MC_err_histo.SetDirectory(0) #residual plots self._resid.SetTitle(';' + self._plotvar + ';Data/MC') self._resid.SetStats(0) self._resid.SetMarkerStyle(20) self._resid.SetDirectory(0) #MC err residuals self._MC_err_resid.SetTitle(';' + self._plotvar + ';Data/MC') self._MC_err_resid.SetFillColor(kBlack) self._MC_err_resid.SetStats(0) self._MC_err_resid.SetMarkerStyle(1) self._MC_err_resid.SetFillStyle(3013) self._MC_err_resid.SetDirectory(0) #"oneline" to go at y=1. on the residuals plot self._oneline = TLine( self._resid.GetXaxis().GetBinLowEdge(1), 1., self._resid.GetXaxis().GetBinUpEdge(self._resid.GetNbinsX()), 1.) self._oneline.SetLineWidth(3) self._oneline.SetLineStyle(2) #initialize the legend legwidth = 0.622 legheight = 0.272 if self._channame.startswith('t3'): legwidth = 0.561 legheight = 0.183 x2 = 0.922 y2 = 0.857 #if self._lPos==1 : # x2 = 0.44 #elif self._lPos==2 : # x2 = 0.7 self._leg = TLegend(x2 - legwidth, y2 - legheight, x2, y2) if self._channame.startswith('t3'): self._leg.SetNColumns(3) else: self._leg.SetNColumns(2) self._lumi_obj = None self._pnames_added_to_leg = [] #make the channel identifier text (and the key for the plot info) self._chanTxt = 'Type-' + self._channame.split('_')[0].split('t')[1] self._ckey = 't' + self._channame.split('_')[0].split('t')[1] + '_' if self._channame.split('_')[1].startswith('mu'): self._chanTxt += ' #mu+jets' self._ckey += 'mu' elif self._channame.split('_')[1].startswith('el'): self._chanTxt += ' e+jets' self._ckey += 'el' self._chanTxt += ' (' if self._channame.split('_')[1].endswith('plus'): self._chanTxt += 'Q>0 ' self._ckey += 'plus' elif self._channame.split('_')[1].endswith('minus'): self._chanTxt += 'Q<0 ' self._ckey += 'minus' if self._channame.find('SR') != -1: self._chanTxt += 'SR)' self._ckey += '_SR' elif self._channame.find('WJets_CR') != -1: self._chanTxt += 'W+jets CR)' self._ckey += '_CR'
'pre_context_lineno': pre_context_lineno, })) tback = tback.tb_next frames.reverse() urljoin = urlparse.urljoin def prettify(x): try: out = pprint.pformat(x) except Exception, e: out = '[could not display: <' + e.__class__.__name__ + \ ': '+str(e)+'>]' return out global djangoerror_r if djangoerror_r is None: djangoerror_r = Template(djangoerror_t, filename=__file__, filter=websafe) t = djangoerror_r globals = {'ctx': web.ctx, 'web':web, 'dict':dict, 'str':str, 'prettify': prettify} t.t.func_globals.update(globals) return t(exception_type, exception_value, frames) def debugerror(): """ A replacement for `internalerror` that presents a nice page with lots of debug information for the programmer. (Based on the beautiful 500 page from [Django](http://djangoproject.com/), designed by [Wilson Miner](http://wilsonminer.com/).) """ return web._InternalError(djangoerror())
def parseLineAndCreateTemplate(line): title = parseTitle(line) components = parseComponents(line) return Template(title, components)
def __init__(self, infile, outfile): Template.__init__(self, infile, outfile) self._setTemplateType("pymol") self._setLoadStructureString("cmd.do(\"load %s\")")
class EnvironmentBase(object): """ EnvironmentBase encapsulates functionality required to build and deploy a network and common resources for object storage within a specified region """ def __init__(self, view=None, env_config=EnvConfig(), config_filename=(res.DEFAULT_CONFIG_FILENAME + res.EXTENSIONS[0]), config_file_override=None): """ Init method for environment base creates all common objects for a given environment within the CloudFormation template including a network, s3 bucket and requisite policies to allow ELB Access log aggregation and CloudTrail log storage. :param view: View object to use. :param create_missing_files: Specifies policy to use when local files are missing. When disabled missing files will cause an IOException :param config_filename: The name of the config file to load by default. Note: User can still override this value from the CLI with '--config-file'. :param config: Override loading config values from file by providing config setting directly to the constructor """ self.config_filename = config_filename self.env_config = env_config self.config_file_override = config_file_override self.config = {} self.globals = {} self.template_args = {} self.template = None self.deploy_parameter_bindings = [] self.ignore_outputs = ['templateValidationHash', 'dateGenerated'] self.stack_outputs = {} self._config_handlers = [] self.stack_monitor = None self._ami_cache = None self.cfn_connection = None self.sts_credentials = None self.boto_session = None # self.env_config = env_config for config_handler in env_config.config_handlers: self._add_config_handler(config_handler) self.add_config_hook() # Load the user interface self.view = view if view else cli.CLI() # The view may override the config file location (i.e. command line arguments) if hasattr( self.view, 'config_filename') and self.view.config_filename is not None: self.config_filename = self.view.config_filename # Allow the view to execute the user's requested action self.view.process_request(self) def create_hook(self): """ Override in your subclass for custom resource creation. Called after config is loaded and template is initialized. After the hook completes the templates are serialized and written to file and uploaded to S3. """ pass def add_config_hook(self): """ Override in your subclass for adding custom config handlers. Called after the other config handlers have been added. After the hook completes the view is loaded and started. """ pass def deploy_hook(self): """ Extension point for modifying behavior of deploy action. Called after config is loaded and before cloudformation deploy_stack is called. Some things you can do in deploy_hook include modifying config or deploy_parameter_bindings or run arbitrary commands with boto. """ pass def delete_hook(self): """ Extension point for modifying behavior of delete action. Called after config is loaded and before cloudformation deploy_stack is called. Can be used to manage out-of-band resources with boto. """ pass def stack_event_hook_wrapper(self, event_data): """ Write the stack outputs to file before calling the stack_event_hook that the user overrides """ if self.config.get('global').get('write_stack_outputs'): self.write_stack_outputs_to_file(event_data) self.stack_event_hook(event_data) def stack_event_hook(self, event_data): """ Extension point for reacting to the cloudformation stack event stream. If global.monitor_stack is enabled in config this function is used to react to stack events. Once a stack is created a notification topic will begin emitting events to a queue. Each event is passed to this call for further processing. Details about the event data can be read here: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-listing-event-history.html :param event_data: The event_data hash provided the following mappings from the raw cloudformation event: "status" = "ResourceStatus" "type" = "ResourceType" "name" = "LogicalResourceId" "id" = "PhysicalResourceId" "reason" = "ResourceStatusReason" "props" = "ResourceProperties" :return bool: Indicates that processing is complete, false indicates that you are not yet done """ return True def init_action(self): """ Default init_action invoked by the CLI Generates config and ami_cache files Override in your subclass for custom initialization steps """ self.generate_config() self.generate_ami_cache() def s3_prefix(self): """ Allows subclasses to modify the default s3 prefix """ return self.config.get('template').get('s3_prefix') def stack_outputs_directory(self): """ Allows subclasses to modify the default stack outputs directory """ return self.config.get('global').get( 'stack_outputs_directory') or 'stack_outputs' def _ensure_template_dir_exists(self): template_dir = self.s3_prefix() if not os.path.exists(template_dir): os.makedirs(template_dir) return template_dir @staticmethod def serialize_templates_helper(template, s3_client, s3_upload=True): # Create stack resources for template and all child templates raw_json = template.to_template_json() # Recursively iterate through each child template to serialize it and process its children for child, _, _, _, _ in template._child_templates: EnvironmentBase.serialize_templates_helper(template=child, s3_client=s3_client, s3_upload=s3_upload) if s3_upload: # Upload the template to the s3 bucket under the template_prefix s3_client.Bucket(Template.template_bucket_default).put_object( Key=template.resource_path, Body=raw_json, ACL=Template.upload_acl) # Save the template locally with the same file hierarchy as on s3 with open(template.resource_path, 'w') as output_file: reloaded_template = json.loads(raw_json) output_file.write( json.dumps(reloaded_template, indent=4, separators=(',', ':'))) print "Generated {} template".format(template.name) if s3_upload: print "S3:\t{}".format( utility.get_template_s3_url(Template.template_bucket_default, template.resource_path)) print "Local:\t{}\n".format(template.resource_path) def serialize_templates(self): s3_client = utility.get_boto_resource(self.config, 's3') local_file_path = self._ensure_template_dir_exists() s3_upload = self.config.get('template').get('s3_upload', True) EnvironmentBase.serialize_templates_helper(template=self.template, s3_client=s3_client, s3_upload=s3_upload) def estimate_cost(self, template_name=None, template_url=None, stack_params=None): cfn_conn = utility.get_boto_client(self.config, 'cloudformation') if not template_url: return None estimate_cost_url = cfn_conn.estimate_template_cost( TemplateURL=template_url, Parameters=stack_params) # else: # template_body = self._load_template(template_name) # estimate_cost_url = cfn_conn.estimate_template_cost( # TemplateBody=template_body, # Parameters=stack_params) return estimate_cost_url.get('Url') def _root_template_path(self): """ Construct the root template resource path It never includes a timestamp because we need to find it by convention in the deploy step """ return utility.get_template_s3_resource_path( prefix=self.s3_prefix(), template_name=self.globals.get('environment_name'), include_timestamp=False) def _root_template_url(self): """ Construct the root template S3 URL """ return utility.get_template_s3_url( bucket_name=self.template_args.get('s3_bucket'), resource_path=self._root_template_path()) def create_action(self): """ Default create_action invoked by the CLI Loads and validates config, initializes a new template instance, and writes it to file. Override the create_hook in your environment to inject all of your cloudformation resources """ self.load_config() self.initialize_template() # Do custom troposphere resource creation in your overridden copy of this method self.create_hook() self.serialize_templates() def _ensure_stack_is_deployed(self, stack_name='UnnamedStack', sns_topic=None, stack_params=[]): """ Deploys the root template to cloudformation using boto First attempts to issue an update stack command If this fails because the stack does not yet exist, then issues a create stack command """ is_successful = False notification_arns = [] if sns_topic: notification_arns.append(sns_topic.arn) template_url = self._root_template_url() cfn_conn = utility.get_boto_client(self.config, 'cloudformation') try: cfn_conn.update_stack(StackName=stack_name, TemplateURL=template_url, Parameters=stack_params, NotificationARNs=notification_arns, Capabilities=['CAPABILITY_IAM']) is_successful = True print "\nSuccessfully issued update stack command for %s\n" % stack_name # Else stack doesn't currently exist, create a new stack except botocore.exceptions.ClientError as update_e: if "does not exist" in update_e.message: try: cfn_conn.create_stack(StackName=stack_name, TemplateURL=template_url, Parameters=stack_params, NotificationARNs=notification_arns, Capabilities=['CAPABILITY_IAM'], DisableRollback=True, TimeoutInMinutes=TIMEOUT) is_successful = True print "\nSuccessfully issued create stack command for %s\n" % stack_name except botocore.exceptions.ClientError as create_e: print "Deploy failed: \n\n%s\n" % create_e.message else: raise return is_successful def add_parameter_binding(self, key, value): """ Deployment parameters are used to provide values for parameterized templates The deploy_parameter_bindings is populated with hashes of the form: { 'ParameterKey': <key>, 'ParameterValue': <value> } :param key: String representing an input Parameter name in the root template :param value: Troposphere value for the Parameter """ self.deploy_parameter_bindings.append({ 'ParameterKey': key, 'ParameterValue': value }) def deploy_action(self): """ Default deploy_action invoked by the CLI. Loads and validates config, then deploys the root template to cloudformation using boto Override the deploy_hook in your environment to intercept the deployment process This can be useful for creating resources using boto outside of cloudformation """ self.load_config() self.deploy_hook() stack_name = self.config['global']['environment_name'] # initialize stack event monitor topic = None queue = None if self.stack_monitor and self.stack_monitor.has_handlers(): (topic, queue) = self.stack_monitor.setup_stack_monitor(self.config) try: # First try to do an update-stack... if it doesn't exist, then try create-stack is_successful = self._ensure_stack_is_deployed( stack_name, sns_topic=topic, stack_params=self.deploy_parameter_bindings) if self.stack_monitor and is_successful: self.stack_monitor.start_stack_monitor( queue, stack_name, debug=self.globals['print_debug']) except KeyboardInterrupt: if self.stack_monitor: print 'KeyboardInterrupt: calling cleanup' self.stack_monitor.cleanup_stack_monitor(topic, queue) raise if self.stack_monitor: self.stack_monitor.cleanup_stack_monitor(topic, queue) def delete_action(self): """ Default delete_action invoked by CLI Loads and validates config, then issues the delete stack command to the root stack Override the delete_hook in your environment to intercept the delete process with your own code This can be useful for deleting any resources that were created outside of cloudformation """ self.load_config() self.delete_hook() cfn_conn = utility.get_boto_client(self.config, 'cloudformation') stack_name = self.config['global']['environment_name'] cfn_conn.delete_stack(StackName=stack_name) print "\nSuccessfully issued delete stack command for %s\n" % stack_name def _validate_config_helper(self, schema, config, path): # Check each requirement for (req_key, req_value) in schema.iteritems(): # Check for key match, usually only one match but parametrized keys can have multiple matches # Uses 'filename' match, similar to regex but only supports '?', '*', [XYZ], [!XYZ] filter_fun = lambda candidate_key: fnmatch(candidate_key, req_key) # Find all config keys matching the requirement matches = filter(filter_fun, config.keys()) if not matches: message = "Config file missing section " + str(path) + ( '.' if path is not '' else '') + req_key raise ValidationError(message) # Validate each matching config entry for matching_key in matches: new_path = path + ('.' if path is not '' else '') + matching_key # ------------ value check ----------- if isinstance(req_value, basestring): req_type = res.get_type(req_value) if not isinstance(config[matching_key], req_type): message = "Type mismatch in config, %s should be of type %s, not %s" % \ (new_path, req_value, type(config[matching_key]).__name__) raise ValidationError(message) # else: # print "%s validated: %s == %s" % (new_path, req_value, type(config[matching_key]).__name__) # if the schema is nested another level .. we must go deeper elif isinstance(req_value, dict): matching_value = config[matching_key] if not isinstance(matching_value, dict): message = "Type mismatch in config, %s should be a dict, not %s" % \ (new_path, type(matching_value).__name__) raise ValidationError(message) self._validate_config_helper(req_value, matching_value, new_path) elif isinstance(req_value, list): matching_value = config[matching_key] if not isinstance(matching_value, list): message = "Type mismatch in config, %s should be a list, not %s" % \ (new_path, type(matching_value).__name__) raise ValidationError(message) def _validate_region(self, config): """ Checks boto.region_name against the list of valid regions raising an exception if not. """ valid_regions = config['global']['valid_regions'] region_name = config['boto']['region_name'] if region_name not in valid_regions: raise ValidationError('Unrecognized region name: ' + region_name) def _validate_config(self, config, factory_schema=res.CONFIG_REQUIREMENTS): """ Compares provided dict against TEMPLATE_REQUIREMENTS. Checks that required all sections and values are present and that the required types match. Throws ValidationError if not valid. :param config: dict to be validated """ config_reqs_copy = copy.deepcopy(factory_schema) # Merge in any requirements provided by config handlers for handler in self._config_handlers: config_reqs_copy.update(handler.get_config_schema()) self._validate_config_helper(config_reqs_copy, config, '') # # Validate region # self._validate_region(config) def _add_config_handler(self, handler): """ Register classes that will augment the configuration defaults and/or validation logic here """ if not hasattr(handler, 'get_factory_defaults') or not callable( getattr(handler, 'get_factory_defaults')): raise ValidationError( 'Class %s cannot be a config handler, missing get_factory_defaults()' % type(handler).__name__) if not hasattr(handler, 'get_config_schema') or not callable( getattr(handler, 'get_config_schema')): raise ValidationError( 'Class %s cannot be a config handler, missing get_config_schema()' % type(handler).__name__) self._config_handlers.append(handler) @staticmethod def _config_env_override(config, path, print_debug=False): """ Update config value with values from the environment variables. If the environment variable exists the config value is replaced with its value. For config parameters like template.ec2_key_default this function will expect an environment variable matching the <section label>_<config_key> in all caps (e.g. TEMPLATE_EC2_KEY_DEFAULT). For environment variables containing multiple subsections the same pattern applies. For example: self._update_config_from_env('db', 'password') for the config file: { ... 'db': { 'label1': { ... 'password': '******' }, 'label2': { ... 'password': '******' } } } Would replace those two database passwords if the following is run from the shell: > export DB_LABEL1_PASSWORD=myvoiceismypassword12345 > export DB_LABEL2_PASSWORD=myvoiceismyotherpassword12345 """ for key, val in config.iteritems(): new_path = path + ('.' if path is not '' else '') + key env_name = '_'.join(new_path.split('.')).upper() if not isinstance(val, dict): env_value = os.environ.get(env_name) if print_debug: print "Checking %s (%s)" % (env_name, new_path) if env_value is None: continue # TODO: Need better schema validation for non-string values from env vars # Convert true/false strings to booleans for schema validation if env_value.lower() == 'true': env_value = True elif env_value.lower() == 'false': env_value = False default_value = config.get(key) config[ key] = env_value if env_value is not None else default_value if env_value is not None: print "* Updating %s from '%s' to value of '%s'" % ( new_path, default_value, env_name) else: EnvironmentBase._config_env_override(config[key], new_path, print_debug=print_debug) def generate_config(self): """ Generate config dictionary from defaults Add defaults from all registered config handlers (added patterns, etc.) Write file to self.config_filename """ if os.path.isfile(self.config_filename): overwrite = raw_input("%s already exists. Overwrite? (y/n) " % self.config_filename).lower() print if not overwrite == 'y': return config = copy.deepcopy(res.FACTORY_DEFAULT_CONFIG) # Merge in any defaults provided by registered config handlers for handler in self._config_handlers: config.update(handler.get_factory_defaults()) with open(self.config_filename, 'w') as f: f.write( json.dumps(config, indent=4, sort_keys=True, separators=(',', ': '))) print 'Generated config file at %s\n' % self.config_filename def load_config(self, view=None, config=None): """ Load config from self.config_filename, break if it doesn't exist Load any overrides from environment variables Validate all loaded values """ # Allow overriding the view for testing purposes if not view: view = self.view # Allow overriding of the entire config object if self.config_file_override: config = self.config_file_override # Else read from file else: config = res.load_file('', self.config_filename) # Load in cli config overrides view.update_config(config) # record value of the debug variable debug = config['global']['print_debug'] # Check the environment variables for any overrides self._config_env_override(config, '', print_debug=debug) # Validate and save results self._validate_config(config) self.config = config # Save shortcut references to commonly referenced config sections self.globals = self.config.get('global', {}) self.template_args = self.config.get('template', {}) # Register all stack handlers if self.globals['monitor_stack']: self.stack_monitor = monitor.StackMonitor( self.globals['environment_name']) self.stack_monitor.add_handler(self) def initialize_template(self): """ Create new Template instance, set description and common parameters and load AMI cache. """ print '\nGenerating templates for {} stack\n'.format( self.globals['environment_name']) # Configure Template class with S3 settings from config Template.template_bucket_default = self.template_args.get('s3_bucket') Template.s3_path_prefix = self.s3_prefix() Template.stack_timeout = self.template_args.get("timeout_in_minutes") Template.upload_acl = self.template_args.get('s3_upload_acl') Template.include_timestamp = self.template_args.get( 'include_timestamp') Template.include_templateValidationHash_output = self.template_args.get( 'include_templateValidationHash_output') Template.include_dateGenerated_output = self.template_args.get( 'include_dateGenerated_output') # Create the root template object self.template = Template( self.globals.get('environment_name', 'default_template')) self.template.description = self.template_args.get( 'description', 'No Description Specified') self.template.resource_path = self._root_template_path() ec2_key = self.config.get('template').get('ec2_key_default', 'default-key') self.template._ec2_key = self.template.add_parameter( Parameter( 'ec2Key', Type='String', Default=ec2_key, Description= 'Name of an existing EC2 KeyPair to enable SSH access to the instances', AllowedPattern=res.get_str('ec2_key'), MinLength=1, MaxLength=255, ConstraintDescription=res.get_str('ec2_key_message'))) bucket_name = self.config.get('logging').get('s3_bucket') self.template.add_utility_bucket(name=bucket_name) self.template.add_log_group() self.template.add_vpcflowlogs_role() ami_filename = self.config['template'].get('ami_map_file') if ami_filename: ami_cache = res.load_yaml_file(ami_filename) self.template.add_ami_mapping(ami_cache) def generate_ami_cache(self): """ Generate ami_cache.json file from defaults """ ami_cache_filename = res.DEFAULT_AMI_CACHE_FILENAME + res.EXTENSIONS[0] if os.path.isfile(ami_cache_filename): overwrite = raw_input("%s already exists. Overwrite? (y/n) " % ami_cache_filename).lower() print if not overwrite == 'y': return with open(ami_cache_filename, 'w') as f: f.write( json.dumps(res.FACTORY_DEFAULT_AMI_CACHE, indent=4, separators=(',', ': '))) print "Generated AMI cache file at %s\n" % ami_cache_filename def to_json(self): """ Centralized method for outputting the root template with a timestamp identifying when it was generated and for creating a SHA256 hash representing the template for validation purposes Also recursively processess all child templates """ return self.template.to_template_json() # Called after add_child_template() has attached common parameters and some instance attributes: # - RegionMap: Region to AMI map, allows template to be deployed in different regions without updating AMI ids # - ec2Key: keyname to use for ssh authentication # - vpcCidr: IP block claimed by whole VPC # - vpcId: resource id of VPC # - commonSecurityGroup: sg identifier for common allowed ports (22 in from VPC) # - utilityBucket: S3 bucket name used to send logs to # - [public|private]Subnet[0-9]: indexed and classified subnet identifiers # # and some instance attributes referencing the attached parameters: # - self.vpc_cidr # - self.vpc_id # - self.common_security_group # - self.utility_bucket # - self.subnets: keyed by type, layer, and AZ index (e.g. self.subnets['public']['web'][1]) def add_child_template(self, child_template, merge=False, depends_on=[]): """ Saves reference to provided template. References are processed in write_template_to_file(). :param child_template: The Environmentbase Template you want to associate with the current instances :param depends_on: List of upstream resources that must be processes before the provided template :param merge: Determines whether the resource is attached as a child template or all of its resources merged into the current template """ return self.template.add_child_template(child_template, merge=merge, depends_on=depends_on) def write_stack_outputs_to_file(self, event_data): """ Given the stack event data, determine if the stack has finished executing (CREATE_COMPLETE or UPDATE_COMPLETE) If it has, write the stack outputs to file """ if event_data['type'] == 'AWS::CloudFormation::Stack' and \ (event_data['status'] == 'CREATE_COMPLETE' or event_data['status'] == 'UPDATE_COMPLETE'): self.write_stack_output_to_file(stack_id=event_data['id'], stack_name=event_data['name']) def write_stack_output_to_file(self, stack_id, stack_name): """ Given a CFN stack's physical resource ID, query the stack for its outputs Save outputs to file as JSON at ./<stack_outputs_dir>/<stack_name>.json """ # Grab all the outputs from the cfn stack object as k:v pairs stack_outputs = {} for output in self.get_cfn_stack_obj(stack_id).outputs: stack_outputs[output.key] = output.value stack_outputs_dir = self.stack_outputs_directory() # Ensure <stack_outputs_dir> directory exists if not os.path.isdir(stack_outputs_dir): os.mkdir(stack_outputs_dir) # Write the JSON-formatted stack outputs to ./<stack_outputs_dir>/<stack_name>.json stack_output_filename = os.path.join(stack_outputs_dir, stack_name + '.json') with open(stack_output_filename, 'w') as output_file: output_file.write( json.dumps(stack_outputs, indent=4, separators=(',', ':'))) if self.globals['print_debug']: print "Outputs for {0} written to {1}\n".format( stack_name, stack_output_filename) def get_stack_output(self, stack_id, output_name): """ Given the PhysicalResourceId of a Stack and a specific output key, return the output value Raise an exception if the output key is not found Example: def stack_event_hook(self, event_data): elb_dns_name = self.get_stack_output(event_data['id'], 'ElbDnsName') """ stack_obj = self.get_cfn_stack_obj(stack_id) for output in stack_obj.outputs: if output.key == output_name: return output.value # If the output wasn't found in the stack, raise an exception raise Exception("%s did not output %s" % (stack_obj.stack_name, output_name)) def get_cfn_stack_obj(self, stack_id): """ Given the unique physical stack ID, return exactly one cloudformation stack object """ return self.get_cfn_connection().describe_stacks(stack_id)[0] def get_cfn_connection(self): """ We persist the CFN connection so that we don't create a new session with each request """ if not self.cfn_connection: self.cfn_connection = cloudformation.connect_to_region( self.config.get('boto').get('region_name')) return self.cfn_connection def get_sts_credentials(self, role_session_name, role_arn): """ We persist the STS credentials so that we don't create a new session with each request """ if not self.sts_credentials: sts_connection = sts.STSConnection() assumed_role = sts_connection.assume_role( role_arn=role_arn, role_session_name=role_session_name) self.sts_credentials = assumed_role.credentials return self.sts_credentials
def test_simple_2(self): rendered = Template('{{ len([1, 2, 3]) }}').render() self.assertEqual(rendered, '3')
def test_elif(self): t1 = Template('{% if 2 > 3 %}2{% elif 3 > 2 %}3{% else %}1').render() self.assertEqual(t1, '3') t2 = Template('{% if 2 > 3 %}2{% elif 3 < 2 %}3{% else %}0').render() self.assertEqual(t2, '0')
class CodeGenerater(object): def __init__(self, graph: Graph, params, config: Config) -> None: self.graph = graph self.params = params self.config = config assert len(self.graph.get_inputs() ) == 1, 'Codegenerator does not support multiple inputs.' self.template = Template({ 'graph': self.graph, 'params': self.params, 'config': self.config, 'graph_input': self.graph.get_inputs()[0], 'graph_output': self.graph.non_variables[-1], }) self.src_dir = path.join(self.config.output_pj_path, 'src') self.header_dir = path.join(self.config.output_pj_path, 'include') def generate_files_from_template(self) -> None: src_dir_path = self.template.root_dir file_pathes = utils.get_files(src_dir_path, excepts='/templates/manual') for src_file_path in file_pathes: src_file = Path(src_file_path) if src_file.is_file(): relative_file_path = str(src_file.relative_to(src_dir_path)) dest_file_path = path.join(self.config.output_pj_path, relative_file_path) dest_file_dir_path = path.dirname(dest_file_path) # if the file's dir not exist, make it utils.make_dirs([dest_file_dir_path]) if 'tpl' in path.basename(src_file_path) and path.basename( src_file_path)[0] != '.': relative_src_file_path = str( src_file.relative_to(self.template.root_dir)) self.template.generate(relative_src_file_path, dest_file_dir_path) else: shutil.copy2(src_file_path, dest_file_path) def generate_inputs(self) -> None: input_src_dir_path = path.join(self.src_dir, 'inputs') input_header_dir_path = path.join(self.header_dir, 'inputs') utils.make_dirs([input_src_dir_path, input_header_dir_path]) input_src_template_path = path.join('consts', 'input.tpl.cpp') input_header_template_path = path.join('consts', 'input.tpl.h') for node in self.graph.consts: self.template.manual_generate(input_src_template_path, input_src_dir_path, new_name=node.name + '.cpp', node=node) self.template.manual_generate(input_header_template_path, input_header_dir_path, new_name=node.name + '.h', node=node) def generate_thresholds(self): src_template_path = path.join('manual', 'consts', 'thresholds.tpl.cpp') header_template_path = path.join('manual', 'consts', 'thresholds.tpl.h') qconvs_with_ts = [ x for x in self.graph.non_variables if x.op_type == 'Conv' and cast(Conv, x).is_quantized and cast(Conv, x).has_thresholds ] self.template.generate(src_template_path, self.src_dir, quantized_convs=qconvs_with_ts) self.template.generate(header_template_path, self.header_dir, quantized_convs=qconvs_with_ts) def generate_scaling_factors(self): src_template_path = path.join('manual', 'consts', 'scaling_factors.tpl.cpp') header_template_path = path.join('manual', 'consts', 'scaling_factors.tpl.h') qconvs_convs = self.graph.convs(quantized_only=True) self.template.generate(src_template_path, self.src_dir, quantized_convs=qconvs_convs) self.template.generate(header_template_path, self.header_dir, quantized_convs=qconvs_convs) def reuse_output_buffers(self): operations = self.graph.non_variables candidates = defaultdict(set) for idx, op in enumerate(operations): prev_ops = operations[:idx] next_ops = operations[idx:] next_inputs = [] for x in next_ops: for i in x.input_ops.values(): next_inputs.append(i.name) aliased = set() for prev_op in prev_ops: if prev_op.op_type in ['Reshape', 'Split']: aliased.add(prev_op.name) for i in prev_op.input_ops.values(): aliased.add(i.name) if prev_op.name not in next_inputs and prev_op.size >= op.size and prev_op.dtype == op.dtype: candidates[op.name].add(prev_op.name) candidates[op.name] = candidates[op.name].difference(aliased) being_reused = [] reusing = [] for op in operations: cs = candidates[op.name] if cs: reusable_buffer = None for option in cs: if option not in being_reused and option not in reusing: reusable_buffer = option break if reusable_buffer: op.available_buffer = reusable_buffer being_reused.append(reusable_buffer) reusing.append(op.name)
scf_file_copy = output_path + os.sep + os.path.basename(scf_file_name) shutil.copy2(scf_file_name, scf_file_copy) print 'Working on the following files' print 'Mol file: ' + mol_file_name print 'SCF file: ' + scf_file_name print 'Output folder: ' + output_path #end of file creation/manipulation module #write_integrate_dia_file(integrated_dia_file, scf_file, mol_file) #THE HANDLING OF THESE FILES WHERE DELEGATED TO THE SPECIFIC CODE #integrated_dia_file, integrate_para_file, integrate_total_file, try: #assemble input template = Template() scf_file = Scf(template, scf_file) molecule = Molecule(mol_file) #assemble output london_text = write_london(template, scf_file, molecule) j = J(template, scf_file, molecule) jdia_text = j.write_j_dia(template) jpara_text = j.write_j_para(template) jtotal_text = j.write_j_total(template) integrate = Integrate(template, j.scf, molecule) (int_dia_text_1, int_dia_text_2) = integrate.write_integrate_dia() (int_para_text_1, int_para_text_2) = integrate.write_integrate_para()
def template(self): return Template(self.client)
def _create_default_template(self): default_template = Template("default") self.template_list["default"] = default_template
pkg_service_deps = '' file_metadata = '' file_data = '' services = '' service_action = '' service_pkg_deps = '' if not arguments['--template']: hosts = arguments['--hosts'] if arguments['--run-cmd']: commands = arguments['--run-cmd'] elif arguments['--install-pkg']: install_pkgs = arguments['--install-pkg'] elif arguments['--uninstall-pkg']: uninstall_pkgs = arguments['--uninstall-pkg'] else: t = Template() template = arguments['--template'] parsed = t.loadYamlFile(template, "templates") hosts = parsed['hosts'] if 'run-cmd' in parsed['action'].keys(): commands = parsed['action']['run-cmd'] elif 'install-pkg' in parsed['action'].keys(): install_pkgs = parsed['action']['install-pkg'] pkg_service_deps = parsed['action']['service_deps'] elif 'uninstall-pkg' in parsed['action'].keys(): uninstall_pkgs = parsed['action']['uninstall-pkg'] elif 'upgrade-pkg' in parsed['action'].keys(): upgrade_pkgs = parsed['action']['upgrade-pkg'] elif 'create-file' in parsed['action'].keys(): file_metadata = parsed['action']['create-file']['metadata'] file_data = parsed['action']['create-file']['data']
def test_simple_1(self): rendered = Template('{{ abs(-3) }}').render() self.assertEqual(rendered, '3')
def make_delete_all_window(self, **kwargs): Template.__init__(self, 'delete_crap_confirm', **kwargs)
def test_not_escape(self): rendered = Template("{{ content }}", autoescape=False).render( content="<p>hello escape</p>") self.assertEqual(rendered, '<p>hello escape</p>')
def test_dict_2(self): rendered = Template( "{{ a.get('hello') }}").render(a={'hello': 'flango'}) self.assertEqual(rendered, 'flango')
def test_simple_3(self): rendered = Template('{{ [1, 2, 3].index(2) }}').render() self.assertEqual(rendered, '1')
def setUp(self): self.__underTest = Template()
def test_escape(self): rendered = Template("{{ content }}", autoescape=True).render( content="<p>hello escape</p>") self.assertEqual(rendered, '<p>hello escape</p>')