def get_fields(params): tablename = params.tableName db = params.db if db.lower() == 'bigquery': datasetname = params.dataSetName fields = bqhandler.get_fields(datasetname, tablename) return comm.format_response(True,fields,"",exception=None) elif db.lower() == 'mssql': fields = mssqlhandler.get_fields(tablename) return comm.format_response(True,fields,"",exception=None) elif db.lower() == 'postgresql': schema_name = params.schema colnames = pgsqlhandler.get_fields(tablename,schema_name) return comm.format_response(True,colnames,"",exception=None) elif db.lower() == 'mysql': colnames = mysqlhandler.get_fields(params.tableName) return comm.format_response(True,colnames,"",exception=None) else: return comm.format_response(False,db,"DB not implemented!",exception=None)
query += ' LIMIT ' + str(limit_) data = pgsqlhandler.execute_query(query) try: logger.info('Inserting to cache..') # p = Process(target=MEM_insert,args=(cache_key,json.dumps(data),query,cache_timeout)) # p.start() t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(data),query,cache_timeout)) t.start() except Exception, err: logger.error("Cache insertion failed. %s" % err) pass return comm.format_response(True,data,query,exception=None) elif db.lower() == 'mysql': try: resultSet = mysqlhandler.execute_query(query,params.db_name) except Exception, err: print err raise try: logger.info('Inserting to cache..') # p = Process(target=MEM_insert,args=(cache_key,json.dumps(resultSet),query,cache_timeout)) # p.start() t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(resultSet),query,cache_timeout)) t.start() except Exception, err: logger.error("Cache insertion failed. %s" % err) pass return comm.format_response(True,resultSet,query,exception=None) else: return "db not implemented"