def connect (self,conf=None): print >> sys.stderr,self.db if conf is not None: self.conf=conf try: self.db.index(".db") self.conn = sqlite3.connect(self.db) self.cur = self.conn.cursor() self.conn.execute("PRAGMA foreign_keys = ON") self.dbtype="sqlite" self.now="datetime('now')" self.desc="PRAGMA table_info(users)" return True except Exception,e: try: self.conn = psycopg2.connect(self.db) self.cur = self.conn.cursor() self.dbtype="PG" self.now="now()" import authenticate.service as auth import datastores.postgis.pgConnection as pg self.prefix=auth.getPrefix(conf) print >> sys.stderr,self.prefix self.desc=pg.getDesc(self.cur,self.prefix+"users") return True except Exception,e: print >> sys.stderr,e return False
def connect(self, conf=None): if conf is not None: self.conf = conf try: self.db.index(".db") self.paramstyle = sqlite3.paramstyle self.conn = sqlite3.connect(self.db) self.cur = self.conn.cursor() self.conn.execute("PRAGMA foreign_keys = ON") self.dbtype = "sqlite" self.now = "datetime('now')" self.desc = "PRAGMA table_info(users)" return True except Exception, e: try: self.conn = psycopg2.connect(self.db) self.paramstyle = psycopg2.paramstyle self.cur = self.conn.cursor() self.dbtype = "PG" self.now = "now()" import authenticate.service as auth import datastores.postgis.pgConnection as pg self.prefix = auth.getPrefix(conf) self.desc = pg.getDesc(self.cur, self.prefix + "users") return True except Exception, e: print >> sys.stderr, e return False
def getTableFeatures(conf, inputs, outputs): c = auth.getCon(conf) prefix = auth.getPrefix(conf) if c.dbtype != "PG": req = "PRAGMA table_info(" + inputs["table"]["value"] + ")" else: import datastores.postgis.pgConnection as pg req = pg.getDesc(c.cur, auth.getPrefix(conf) + inputs["table"]["value"]) res1 = c.cur.execute(req) res = c.cur.fetchall() fields = [] pkey = 0 pfield = "id" vfields = None if inputs.has_key("cols") and inputs["cols"]["value"] != "NULL": vfields = inputs["cols"]["value"].split(",") if vfields is None: for i in range(0, len(res)): fields += [{ "name": res[i][1], "type": res[i][2], "pkey": res[i][4] }] if res[i][4] == 1: pkey = i else: for j in range(0, len(vfields)): for i in range(0, len(res)): if res[i][1] == vfields[j]: fields += [{ "name": res[i][1], "type": res[i][2], "pkey": res[i][4] }] if res[i][4] == 1 or res[i][3] == 'PRI': pkey = i pfield = res[i][1] req = "select count(*) from " + prefix + inputs["table"]["value"] req1 = "SELECT " + pfield + " from " + prefix + inputs["table"]["value"] res1 = c.cur.execute(req) res = c.cur.fetchall() if res != False: total = res[0][0] req = "select " if inputs.has_key("fields") and inputs["fields"]["value"] != "NULL": req += inputs["fields"]["value"] else: req += "*" req += " from " + prefix + inputs["table"]["value"] if inputs.has_key("clause") and inputs["clause"]["value"] != "NULL": req += " WHERE " + inputs["clause"]["value"] req1 += " WHERE " + inputs["clause"]["value"] if inputs.has_key("search") and inputs["search"][ "value"] != "asc" and inputs["search"]["value"] != "desc": req += " WHERE " req1 += " WHERE " mfields = inputs["fields"]["value"].split(',') for i in range(0, len(mfields)): req+=mfields[i]+" LIKE '%"+inputs["search"]["value"]+"%' OR "+ \ mfields[i]+" LIKE '"+inputs["search"]["value"]+"%' OR " +\ mfields[i]+" LIKE '%"+inputs["search"]["value"]+"' " if i + 1 < len(mfields): req += " OR " req1+=mfields[i]+" LIKE '%"+inputs["search"]["value"]+"%' OR "+ \ mfields[i]+" LIKE '"+inputs["search"]["value"]+"%' OR " +\ mfields[i]+" LIKE '%"+inputs["search"]["value"]+"' " if i + 1 < len(mfields): req1 += " OR " if inputs.has_key( "sortname") and inputs["sortname"]["value"] != "NULL" and inputs[ "sortname"]["value"] != "undefined" and inputs["sortorder"][ "value"] != "undefined": req += " ORDER BY " + inputs["sortname"]["value"] + " " + inputs[ "sortorder"]["value"] req1 += " ORDER BY " + inputs["sortname"]["value"] + " " + inputs[ "sortorder"]["value"] if inputs.has_key("limit") and inputs["limit"]["value"] != "NULL": req += " LIMIT " + inputs["limit"]["value"] req1 += " LIMIT " + inputs["limit"]["value"] if inputs.has_key("offset") and inputs["offset"]["value"] != "": req += " OFFSET " + inputs["offset"]["value"] req1 += " OFFSET " + inputs["offset"]["value"] else: req += " LIMIT 10" req1 += " LIMIT 10" res1 = c.cur.execute(req) res = c.cur.fetchall() resId1 = c.cur.execute(req1) resId = c.cur.fetchall() if res != False: rows = [] for i in range(0, len(res)): res0 = [] for k in range(0, len(res[i])): if res[i][k] is not None and fields[k]["type"].count( "char") > 0: try: res0 += [res[i][k].encode("utf-8")] except: res0 += [res[i][k]] else: res0 += [str(res[i][k])] rows += [{ "id": resId[i][0], "group": c.get_groups_user_by_id(int(resId[i][0])), "cell": res0 }] outputs["Count"]["value"] = str(total) outputs["Result"]["value"] = json.dumps({"total": total, "rows": rows}) return zoo.SERVICE_SUCCEEDED else: print >> sys.stderr, "unable to run request" return zoo.SERVICE_FAILED
def getTableContent(conf,inputs,outputs): c = auth.getCon(conf) prefix=auth.getPrefix(conf) if c.dbtype!="PG": req="PRAGMA table_info("+inputs["table"]["value"]+")" else: import datastores.postgis.pgConnection as pg req=pg.getDesc(c.cur,auth.getPrefix(conf)+inputs["table"]["value"]) res1=c.cur.execute(req) res=c.cur.fetchall() print >> sys.stderr,res fields=[] pkey=0 pfield="id" vfields=None if inputs.has_key("cols") and inputs["cols"]["value"]!="NULL": vfields=inputs["cols"]["value"].split(",") if vfields is None: for i in range(0,len(res)): fields+=[{"name": res[i][1],"type": res[i][2],"pkey": res[i][4]}] if res[i][4]==1: pkey=i else: for j in range(0,len(vfields)): for i in range(0,len(res)): if res[i][1]==vfields[j]: fields+=[{"name": res[i][1],"type": res[i][2],"pkey": res[i][4]}] if res[i][4]==1 or res[i][3]=='PRI': pkey=i pfield=res[i][1] req="select count(*) from "+prefix+inputs["table"]["value"] res1=c.cur.execute(req) res=c.cur.fetchall() if res!=False: total=res[0][0] req="select " if inputs.has_key("cols") and inputs["cols"]["value"]!="NULL": req+=inputs["cols"]["value"] else: req+="*" req+=" from "+prefix+inputs["table"]["value"] if inputs.has_key("clause") and inputs["clause"]["value"]!="NULL": req+=" WHERE "+inputs["clause"]["value"] if inputs.has_key("sortname") and inputs["sortname"]["value"]!="NULL" and inputs["sortname"]["value"]!="undefined" and inputs["sortorder"]["value"]!="undefined": req+=" ORDER BY "+inputs["sortname"]["value"]+" "+inputs["sortorder"]["value"] if inputs.has_key("limit") and inputs["limit"]["value"]!="NULL": req+=" LIMIT "+inputs["limit"]["value"] if inputs.has_key("page") and inputs["page"]["value"]!="": req+=" OFFSET "+str((int(inputs["page"]["value"])-1)*int(inputs["limit"]["value"])) page=inputs["page"]["value"] else: page=1 req+=" LIMIT 10" res1=c.cur.execute(req) res=c.cur.fetchall() resId1=c.cur.execute("SELECT "+pfield+" from "+prefix+inputs["table"]["value"]) resId=c.cur.fetchall() if res!=False: rows=[] for i in range(0,len(res)): res0=[] for k in range(0,len(res[i])): if res[i][k] is not None and fields[k]["type"].count("char")>0: try: res0+=[res[i][k].encode("utf-8")] except: res0+=[res[i][k]] else: res0+=[str(res[i][k])] rows+=[{"id": resId[i][0],"cell": res0}] outputs["Result"]["value"]=json.dumps({"page": page, "total": total,"rows": rows}) return zoo.SERVICE_SUCCEEDED else: print >> sys.stderr,"unable to run request" return zoo.SERVICE_FAILED