def pagingSQL(self, sql, paging, NS): """ default it not support paging """ page = int(NS.get(paging['pagename'], 1)) rows = int(NS.get(paging['rowsname'], 10)) sort = NS.get(paging.get('sortname', 'sort'), None) order = NS.get(paging.get('ordername', 'asc'), 'asc') if not sort: return sql if page < 1: page = 1 from_line = (page - 1) * rows + 1 end_line = page * rows + 1 psql = self.pagingSQLmodel() ns = { 'from_line': from_line, 'end_line': end_line, 'rows': rows, 'sort': sort, 'order': order, } ac = ArgsConvert('$[', ']$') psql = ac.convert(psql, ns) retSQL = psql % sql return retSQL
def maskingSQL(self, org_sql, NS): """ replace all ${X}$ format variable exception named by '__mainsql__' in sql with '%s', and return the marked sql sentent and variable list sql is a sql statment with variable formated in '${X}$ the '__mainsql__' variable use to identify the main sql will outout data. NS is the name space the variable looking for, it is a variable dictionary return (MarkedSQL,list_of_variable) """ sqltextAC = ArgsConvert(self.sqltp, self.sqlts) sqlargsAC = ArgsConvert(self.sqlvp, self.sqlvs) sql1 = sqltextAC.convert(org_sql, NS) cc = ConditionConvert() sql1 = cc.convert(sql1, NS) vars = sqlargsAC.findAllVariables(sql1) phnamespace = {} [ phnamespace.update({v: self.placeHolder(v, i)}) for i, v in enumerate(vars) ] m_sql = sqlargsAC.convert(sql1, phnamespace) newdata = [] for v in vars: if v != '__mainsql__': value = sqlargsAC.getVarValue(v, NS, None) newdata += self.dataList(v, value) return (m_sql, newdata)
def filterSQL(self, sql, filters, NS): ac = ArgsConvert('$[', ']$') fbs = [] for f in filters: vars = ac.findAllVariables(f) if len(vars) > 0: ignoreIt = False for v in vars: if not NS.get(v, False): ignoreIt = True if not ignoreIt: f = ac.convert(f, NS) else: f = '1=1' fbs.append(f) fb = ' '.join(fbs) retsql = u"""select * from (%s) filter_table where %s""" % (sql, fb) return retsql
def __init__(self, jsonholder, keytype='ansi', NS=None): self.__jsonholder__ = jsonholder self.NS = NS jhtype = type(jsonholder) if jhtype == type("") or jhtype == type(u''): f = open(jsonholder, 'r') else: f = jsonholder try: a = json.load(f) except Exception as e: print("exception:", self.__jsonholder__, e) raise e finally: if type(jsonholder) == type(""): f.close() if self.NS is not None: ac = ArgsConvert('$[', ']$') a = ac.convert(a, self.NS) DictObject.__init__(self, **a)
def renderAll(namespace): workdir = '' xlsxs = [] if namespace['sourcePath'][-len('.xlsx'):] == '.xlsx' or namespace['sourcePath'][-len('.xls'):] == '.xls': workdir = os.path.dirname(namespace['sourcePath']) xlsxs = [namespace['sourcePath']] else: workdir = namespace['sourcePath'] xlsxs = [ f for f in listFile(namespace['sourcePath'],'.xlsx')] + [ f for f in listFile(namespace['sourcePath'],'.xls')] ac = ArgsConvert('${','}$') engines = {} e = MyTemplateEngine(namespace['tmplPaths']) #print( "tmpl paths = ",namespace['tmplPaths']) #print( xlsxs) for xlsx in xlsxs: print( xlsx," handling ...") a = CRUDData(xlsx) data = a.read() g = namespace.get('global',False) if g: data.update(g) for tmpl,fn in namespace['outputMapping'].items(): base = os.path.basename(xlsx) bs = base.split('.') basename = '.'.join(bs[:-1]) namespace.update({'basename' : basename}) vs = ac.findAllVariables(fn) filename = ac.convert(fn,namespace).decode('utf8').encode('gb2312'); #print( vs,fn,filename) namespace.update({'filename' : filename }) namespace.update({'tmplname':tmpl}) s = e.render(tmpl,data) out_s = s.decode(namespace['coding']) _mkdir(os.path.dirname(namespace['filename'])) f = codecs.open(namespace['filename'],"w","utf-8") f.write(out_s) f.close()
"kwargs": {} }, "sql_string": summarysSQL, "default": {} } grapdesc = { "writer": { "filetype": "csv", "kwargs": { "outheader": True, "maxWrite": 100000, } }, "ignore_error": 1, "sql_string": fieldsSQL, } tables = db.sqlExecute(sqldesc, {}) ac = ArgsConvert('$[', ']$') cur_t = None for t in tables: fn = os.path.join(sys.argv[2], t['name'].lower() + '_data.csv') grapdesc['writer']['kwargs']['outfile'] = fn grapdesc['sql_string'] = ac.convert(fieldsSQL, {'table_name': t['name']}) try: tables = db.sqlExecute(grapdesc, {}) except: print(t['name'], "data grap error") conn.close()