def dbtdel(desc): if request.method == "DELETE": try: auth_token = request.headers.get('Authorization') current_user_id = Users.decode_auth_token(auth_token) del_data_table_id = request.json.get("dmp_data_table_id") if del_data_table_id: del_data_table = DataTable.get(del_data_table_id) if del_data_table: is_user = Users.get( DataTable.get(del_data_table_id).dmp_user_id ).id == current_user_id is_user_leader = Users.get( DataTable.get(del_data_table_id).dmp_user_id ).leader_dmp_user_id == current_user_id is_admin = Users.get(current_user_id).dmp_group_id == 1 if is_user or is_user_leader or is_admin: del_data_table.delete() current_app.logger.info("del db table complete!") return resp_hanlder(result="OK") else: return resp_hanlder(code=302) else: return resp_hanlder(code=404) else: return resp_hanlder(code=101) except Exception as err: return resp_hanlder(err=err)
def hive2kylin(hive_table_id): if DataTable.exist_item_by_id(hive_table_id): hive_table = DataTable.get(hive_table_id) if hive_table.dmp_database_id==1: table_name = "%s.%s"%(hive_table.dmp_database_name,hive_table.db_table_name) if KylinTable.exist_item_by_db_table_name(table_name): kt_item = KylinTable.query.filter_by(db_table_name=table_name).first() hive_table.kylin_info_id = kt_item.id hive_table.save() else: # load hive data load_hive_table_res = kt.api_load_hive_tables( tables= load_table_name, project=project ) if load_hive_table_res.get("result.loaded") == table_name.upper(): # 创建model modelName = "model_%s_%s"%(table_name, uuid_str()) modelDescData = modelDescData_hanlder( project=project, tableName=table_name, modelName=modelName description="" ) kt.api_create_model( modelName = modelName, projectName = project ) # 创建cube cubeName = "cube_%s_%s"%(table_name, uuid_str()) cubeDescData = cubeDescDataHanlder( cubeName=cubeName, description="", tableName= table_name, project= project, modelsName=modelName ) kt.api_create_cube( cubeName=cubeName, cubeDescData=cubeDescData, projectName=project ) # build cube kt.api_build_cube( cubeName=cubeName, buildType="BUILD" ) else: raise Exception("数据库类型不符") else: return resp_hanlder(code=999, msg="数据表不存在或已被删除")
def auto_connect(db_id=None, table_id=None): try: from dmp.models import Database, DataTable if table_id != None and db_id == None: if DataTable.exist_item_by_id(table_id): table = DataTable.get(table_id) db_id = table.dmp_database_id if Database.exist_item_by_id(db_id): db = Database.get(db_id) db_type = 4 if db.db_type == 1 and table.is_kylin else db.db_type Engine = engines.get(db_type) if db_type == 4: conn = Engine( host=current_app.config.get("KYLIN_HOST"), port=current_app.config.get("KYLIN_PORT"), user=current_app.config.get("KYLIN_USER"), passwd=current_app.config.get("KYLIN_PASSWD"), db=current_app.config.get("KYLIN_PROJECT")) else: conn = Engine(host=db.db_host, port=db.db_port, user=db.db_username, passwd=db.db_passwd, db=db.db_name) return conn else: print("数据表不存在") raise Exception("数据表不存在") elif db_id != None: if Database.exist_item_by_id(db_id): db = Database.get(db_id) Engine = engines.get(db.db_type) conn = Engine(host=db.db_host, port=db.db_port, user=db.db_username, passwd=db.db_passwd, db=db.db_name) return conn else: print("数据库不存在") raise Exception("数据库不存在") else: raise Exception("缺少必要参数") except Exception as e: raise e
def retrieve(desc): if request.method == "GET": retrieve_info = request.json dmp_data_table_id = retrieve_info.get("dmp_data_table_id") dmp_data_table = DataTable.get(dmp_data_table_id) database_id = dmp_data_table.dmp_database_id db_table_name = dmp_data_table.db_table_name conn = auto_connect(db_id=database_id) data = conn.retrieve(table_name=db_table_name) # current_app.logger.info(data) return resp_hanlder(result=data)
def post(dmp_data_table_name, db_table_name, description, dmp_user_id, dmp_database_id, dmp_case_id): """添加数据表""" new_db_table = DataTable(dmp_data_table_name=dmp_data_table_name, db_table_name=db_table_name, description=description, dmp_user_id=dmp_user_id, dmp_database_id=dmp_database_id, dmp_case_id=dmp_case_id) new_db_table.save() print("new data table saved") new_db_table.data_count() return new_db_table.__json__()
def column(desc): if request.method == "GET": try: dmp_data_table_id = request.json.get("dmp_data_table_id") if DataTable.exist_item_by_id(dmp_data_table_id): data_table_info = DataTable.get(dmp_data_table_id) else: return resp_hanlder(code=999, msg="数据表不存在或已被删除") db_table_name = data_table_info.db_table_name colums4sdb = DataTableColumn.query.filter_by( dmp_data_table_id=dmp_data_table_id) column4sdb_array = [] current_app.logger.info(colums4sdb.count()) if colums4sdb.count() > 0: column4sdb_array = [col.__json__() for col in colums4sdb.all()] columns4db = [] columns4db = auto_connect( db_id=data_table_info.dmp_database_id).columns(db_table_name) current_app.logger.info(column4sdb_array) columns = [] for i in columns4db: mark = True for j in column4sdb_array: if j.get("dmp_data_table_column_name") == i.get( "dmp_data_table_column_name"): columns.append(j) mark = False if mark: columns.append(i) current_app.logger.info(columns) return resp_hanlder(result=columns) except Exception as err: current_app.logger.error(err) return resp_hanlder(code=999, err=err)
def info(desc): if request.method == "GET": try: dmp_data_table_id = request.json.get("dmp_data_table_id") dmp_case_id = request.json.get("dmp_case_id") if dmp_case_id and not dmp_data_table_id: dbtables = [ dtb.__json__() for dtb in DataTable.query.filter_by( dmp_case_id=dmp_case_id).all() ] # current_app.logger.info(dbtables) return resp_hanlder(result=dbtables) elif dmp_data_table_id and not dmp_case_id: db_table = DataTable.get(dmp_data_table_id).__json__() # current_app.logger.info(db_table) return resp_hanlder(result=db_table) except Exception as err: return resp_hanlder(err=err)
def put(desc): if request.method == "PUT": auth_token = request.headers.get('Authorization') current_user_id = Users.decode_auth_token(auth_token) dbt_info = request.json dbt_id = dbt_info.get("dmp_data_table_id") dbt = DataTable.get(dbt_id) if dbt: dmp_user_id = dbt.dmp_user_id if current_user_id == 1 or current_user_id == dmp_user_id or Users.get( dmp_user_id).leader_dmp_user_id == current_user_id: if "dmp_data_table_name" in dbt_info.keys(): dbt.dmp_data_table_name = dbt_info.get( "dmp_data_table_name") if "description" in dbt_info.keys(): dbt.description = dbt_info.get("description") dbt.put() return resp_hanlder(result="OK!") else: return resp_hanlder(code=301) else: return resp_hanlder(code=404)
def hct(): from dmp.models import DataTable h = DataTable.get(1) h.data_count()
def approve(desc): if request.method == "PUT": try: # 获取请求参数 approve_form_info = request.json auth_token = request.headers.get('Authorization') approve_user_id = Users.decode_auth_token(auth_token) form_type = approve_form_info.get("dmp_form_type", None) form_id = approve_form_info.get("dmp_form_id", None) approve_result = approve_form_info.get("approve_result", None) answer = approve_form_info.get("answer", None) # 保存表单审批信息 approve_form = Forms.get(form_id) approve_form.approve_dmp_user_id = approve_user_id approve_form.approve_result = approve_result approve_form.approve_on = datetime.datetime.now() approve_form.answer = answer approve_form.put() # 执行审批动作 if form_type == 1: # 从数据库添加数据表单 try: if approve_result == 1: post( dmp_data_table_name=approve_form.info_form. dmp_data_table_name, db_table_name=approve_form.info_form.db_table_name, dmp_user_id=approve_form.submit_dmp_user_id, dmp_database_id=approve_form.info_form. dmp_database_id, dmp_case_id=approve_form.info_form.dmp_case_id, description=approve_form.description) approve_form.result = "SUCCEED!" except Exception as err: approve_form.result = "ASSOCIATION FAILED,ERROR_MSG:%s" % str( err) approve_form.finish = True elif form_type == 2: # 文件上传添加数据表单 upload_path = current_app.config.get("UPLOADED_PATH") file_path = os.path.join(upload_path, approve_form.info_form.filepath) file_type = approve_form.info_form.filetype column_line = approve_form.info_form.column_line column = approve_form.info_form.column.split(",") if type( approve_form.info_form.column) == str else [] destination_dmp_database_id = approve_form.info_form.destination_dmp_database_id destination_db_table_name = approve_form.info_form.destination_db_table_name dmp_data_table_name = approve_form.info_form.dmp_data_table_name method = approve_form.info_form.method description = approve_form.description submit_dmp_user_id = approve_form.submit_dmp_user_id dmp_case_id = approve_form.info_form.dmp_case_id destination_database = Database.get( destination_dmp_database_id) destination_database_type = destination_database.db_type destination_db_host = destination_database.db_host destination_db_port = destination_database.db_port destination_db_username = destination_database.db_username destination_db_passwd = destination_database.db_passwd destination_db_name = destination_database.db_name try: reader = [] text_column = [] csv_filepath = os.path.join( current_app.config.get("UPLOADED_PATH"), file_path) if file_type == 1 or file_type == 3: # csv、excel dt = pd.read_csv(csv_filepath, header=column_line) csv_column = [ "_".join(lazy_pinyin(d)) if is_contains_chinese(d) else d for d in list(dt.columns) ] text_column = column if column and len(column) == len( csv_column) else csv_column csv_column_d = [{ "index": i, "type": "string" } for i, cc in enumerate(text_column)] reader = textfile_reader(filepath=csv_filepath, column=csv_column_d) elif file_type == 2: # json dt = pd.read_csv(csv_filepath, header=0) csv_column = [ "_".join(lazy_pinyin(d)) if is_contains_chinese(d) else d for d in list(dt.columns) ] text_column = column if column and len(column) == len( csv_column) else csv_column csv_column_d = [{ "index": i, "type": "string" } for i, cc in enumerate(text_column)] reader = textfile_reader(filepath=csv_filepath, column=csv_column_d) pass writer = [] if destination_database_type == 1: # hive_writer hive_columns = [{ "name": col, "type": "string" } for col in text_column] hive_path = "/user/hive/warehouse/%s.db/%s" % ( destination_db_name, destination_db_table_name) hive_conn = auto_connect( db_id=destination_dmp_database_id) create_table_sql = create_table_query_handler( table_name=destination_db_table_name, fields=text_column, uniform_type="string", id_primary_key=False, semicolon=False, fieldDelimiter=",") print(create_table_sql) if method == 1: hive_conn.execsql(create_table_sql) elif method == 3: del_table_sql = "drop table {table_name}" hive_conn.execsql( del_table_sql.format( table_name=destination_db_table_name)) hive_conn.execsql(create_table_sql) else: pass writer = hive_writer( host=destination_db_host, port=8020, path=hive_path, filename=destination_db_table_name, column=hive_columns, fieldDelimiter=",", ) elif destination_database_type == 2: # mysql_writer create_table_sql = create_table_query_handler( table_name=destination_db_table_name, fields=text_column, uniform_type="text", id_primary_key=True, semicolon=True, fieldDelimiter=None) current_app.logger.info(create_table_sql) mysql_conn = auto_connect( db_id=destination_dmp_database_id) del_table_sql = "drop table {table_name};" if method == 1: mysql_conn.execsql(sql=create_table_sql) elif method == 2: pass elif method == 3: mysql_conn.execsql( del_table_sql.format( table_name=destination_db_table_name)) mysql_conn.execsql(create_table_sql) column = text_column writer = mysql_writer( model=1, username=destination_db_username, password=destination_db_passwd, column=column, host=destination_db_host, port=destination_db_port, db=destination_db_name, table=destination_db_table_name, ) elif destination_database_type == 3: # mongo_writer mongo_conn = auto_connect( db_id=destination_dmp_database_id) if method == 3: mongo_conn.del_table( table_name=destination_db_table_name) column = [{ "name": col, "type": "string" } for col in text_column] writer = mongodb_writer( host=destination_db_host, port=destination_db_port, username=destination_db_username, password=destination_db_passwd, db_name=destination_db_name, collection_name=destination_db_table_name, column=column, ) meta = { "form_id": approve_form.id, "csv_file_path": csv_filepath, "dmp_data_table_name": dmp_data_table_name, "db_table_name": destination_db_table_name, "dmp_user_id": submit_dmp_user_id, "dmp_database_id": destination_dmp_database_id, "dmp_case_id": dmp_case_id, "description": description, } job_hanlder.delay(reader=reader, writer=writer, channel=3, func=postfunc, meta=meta) approve_form.result = "SUCCEED!" except Exception as err: approve_form.result = "CREATE UPLOAD JOB FAILED,ERROR MESSAGE:%s" % str( err) approve_form.finish = True elif form_type == 3: # 数据迁移表单 if approve_result == 1: origin_data_table = DataTable.get( approve_form.info_form.origin_dmp_table_id) origin_database = Database.get( origin_data_table.dmp_database_id) origin_database_type = origin_database.db_type origin_db_host = origin_database.db_host origin_db_port = origin_database.db_port origin_db_username = origin_database.db_username origin_db_passwd = origin_database.db_passwd origin_db_name = origin_database.db_name origin_db_table_name = origin_data_table.db_table_name destination_database = Database.get( approve_form.info_form.destination_dmp_database_id) destination_database_type = destination_database.db_type destination_db_host = destination_database.db_host destination_db_port = destination_database.db_port destination_db_username = destination_database.db_username destination_db_passwd = destination_database.db_passwd destination_db_name = destination_database.db_name destination_db_table_name = approve_form.info_form.new_table_name try: base_column = auto_connect( db_id=origin_data_table.dmp_database_id).columns( origin_data_table.db_table_name) # current_app.logger.info(base_column) reader = [] if origin_database_type == 1: # hive_reader reader = hive_reader( host=origin_db_host, port=8020, path="/user/hive/warehouse/%s.db/%s" % (origin_db_name, origin_db_table_name), fileType="text", column=["*"], fieldDelimiter=',', encoding="utf-8") elif origin_database_type == 2: # mysql_reader column = [ col.get("dmp_data_table_column_name") for col in base_column ] reader = mysql_reader( username=origin_db_username, password=origin_db_passwd, column=column, host=origin_db_host, port=origin_db_port, db=origin_db_name, table=origin_db_table_name, where=None, ) elif origin_database_type == 3: # mongodb column = [{ "index": i + 1, "name": col.get("dmp_data_table_column_name"), "type": col.get("dmp_data_table_column_type") } for i, col in enumerate(base_column)] reader = mongodb_reader( host=origin_db_host, port=origin_db_port, db_name=origin_db_name, collection_name=origin_db_table_name, column=column, username=origin_db_username, password=origin_db_passwd) pass writer = [] if destination_database_type == 1: # hive_writer hive_col = [ col.get("dmp_data_table_column_name") for col in base_column ] hive_columns = [{ "name": col, "type": "string" } for col in hive_col] hive_path = "/user/hive/warehouse/%s.db/%s" % ( destination_db_name, destination_db_table_name) hive_conn = auto_connect( db_id=approve_form.info_form. destination_dmp_database_id) create_table_sql = create_table_query_handler( table_name=destination_db_table_name, fields=hive_col, uniform_type="string", id_primary_key=False, semicolon=False, fieldDelimiter=",") hive_conn.execsql(create_table_sql) writer = hive_writer( host=destination_db_host, port=8020, path=hive_path, filename=destination_db_table_name, column=hive_columns, fieldDelimiter=",", ) elif destination_database_type == 2: # mysql_writer column = [ col.get("dmp_data_table_column_name") for col in base_column ] create_table_sql = create_table_query_handler( table_name=destination_db_table_name, fields=column, uniform_type="text", id_primary_key=True, semicolon=True, fieldDelimiter=None) mysql_conn = auto_connect( db_id=approve_form.info_form. destination_dmp_database_id) mysql_conn.execsql(sql=create_table_sql) # preSQL = [] writer = mysql_writer( model=1, username=destination_db_username, password=destination_db_passwd, column=column, host=destination_db_host, port=destination_db_port, db=destination_db_name, table=destination_db_table_name, ) elif destination_database_type == 3: # mongo_writer column = [{ "name": col.get("dmp_data_table_column_name"), "type": "string" } for col in base_column] writer = mongodb_writer( host=destination_db_host, port=destination_db_port, username=destination_db_username if destination_db_username else None, password=destination_db_passwd if destination_db_passwd else None, db_name=destination_db_name, collection_name=destination_db_table_name, column=column, ) meta = {"form_id": approve_form.id} job_hanlder.delay(reader=reader, writer=writer, channel=3, func=job_finish, meta=meta) except Exception as err: approve_form.result = "CREATE MIGRATE JOB FAILED,ERROR MESSAGE:%s" % str( err) approve_form.finish = True raise err elif form_type == 4: # 数据导出表单 if approve_result == 1: origin_data_table = DataTable.get( approve_form.info_form.dmp_data_table_id) if origin_data_table: origin_database = Database.get( origin_data_table.dmp_database_id) origin_database_type = origin_database.db_type origin_db_host = origin_database.db_host origin_db_port = origin_database.db_port origin_db_username = origin_database.db_username origin_db_passwd = origin_database.db_passwd origin_db_name = origin_database.db_name origin_db_table_name = origin_data_table.db_table_name try: base_column = auto_connect( db_id=origin_data_table.dmp_database_id ).columns(origin_data_table.db_table_name) reader = [] if origin_database_type == 1: # hive_reader reader = hive_reader( host=origin_db_host, port=8020, path="/user/hive/warehouse/%s.db/%s" % (origin_db_name, origin_db_table_name), fileType="text", column=["*"], fieldDelimiter=',', encoding="utf-8") elif origin_database_type == 2: # mysql_reader column = [ col.get("dmp_data_table_column_name") for col in base_column ] reader = mysql_reader( username=origin_db_username, password=origin_db_passwd, column=column, host=origin_db_host, port=origin_db_port, db=origin_db_name, table=origin_db_table_name, where=None, ) elif origin_database_type == 3: # mongodb column = [{ "index": i + 1, "name": col.get("dmp_data_table_column_name"), "type": col.get("dmp_data_table_column_type") } for i, col in enumerate(base_column)] reader = mongodb_reader( host=origin_db_host, port=origin_db_port, db_name=origin_db_name, collection_name=origin_db_table_name, column=column, username=origin_db_username, password=origin_db_passwd) pass writer = [] download_path = os.path.join( current_app.config.get("DOWNLOAD_PATH"), approve_form.submit_dmp_username) file_name = origin_db_table_name + uuid_str( ) + ".csv" finally_name = origin_db_table_name + "-" + uuid_str( ) + ".csv" headers = [ col.get("dmp_data_table_column_name") for col in base_column ] writer = textfile_writer(filepath=download_path, filename=file_name, header=headers) ip = socket.gethostbyname(socket.gethostname()) ftp_url = "ftp://%s:21/%s" % ( str(ip), str( os.path.join( approve_form.submit_dmp_username, finally_name))) meta = { "form_id": approve_form.id, "file_name": file_name, "finally_name": finally_name, "download_path": download_path, "ftp_url": ftp_url, } job_hanlder.delay(reader=reader, writer=writer, channel=1, func=dlfunc, meta=meta) except Exception as err: approve_form.result = "CREATE DOWNLOAD JOB FAILED,ERROR MESSAGE:%s" % str( err) approve_form.finish = True else: approve_form.result = "The original data sheet information is not obtained" approve_form.finish = True approve_form.put() return resp_hanlder(result="OK!") except Exception as err: return resp_hanlder(code=999, err=err, msg=str(err))
def origin_data_table_name(self): from dmp.models import DataTable t = DataTable.get(self.origin_dmp_table_id) t_name = t.dmp_data_table_name if t else "-" return t_name
def update_data_service_by_id(id, desc): """ 修改数据服务 --- tags: - DataService parameters: - name: id in: path type: integer required: true description: URL参数,要修改的数据服务的ID - name: data_service_name in: json type: string required: false description: 数据服务名称 - name: api_path in: json type: string required: false description: 数据服务接口 - name: request_method in: json type: integer required: false description: 请求方法1get2post - name: description in: json type: string required: false description: 简介 - name: source_dmp_data_table_id in: json type: integer required: false description: 源数据表ID - name: query_sql in: json type: string required: false description: 查询语句(mysql) - name: query_params in: json type: integer required: false description: 查询参数(mongodb) - name: state in: json type: integer required: false description: 是否启用 responses: 500: description: Error The language is not awesome! 0: msg: 'ok' """ if request.method == 'PUT': try: auth_token = request.headers.get("Authorization") current_user_id = Users.decode_auth_token(auth_token) if not current_user_id or not id: return resp_hanlder(code=201) if DataService.exist_item_by_id(id): data_service = DataService.get(id) if current_user_id == data_service.created_dmp_user_id: data = request.json valid = Update_dataservice_validator(data) if not valid.is_valid(): return resp_hanlder(code=201, msg=valid.str_errors) if "data_service_name" in data.keys(): data_service.data_service_name = data.get( "data_service_name") if "api_path" in data.keys(): api_path = data.get("api_path") if DataService.exsit_data_service_by_apipath( apipath=api_path): item_api_path = data_service.api_path if item_api_path == api_path: data_service.api_path = api_path else: return resp_hanlder(code=101, msg="API路径已存在") else: data_service.api_path = api_path if "request_method" in data.keys(): data_service.request_method = data.get( "request_method") if "description" in data.keys(): data_service.description = data.get("description") if "source_dmp_data_table_id" in data.keys(): data_service.source_dmp_data_table_id = data.get( "source_dmp_data_table_id") if not DataTable.exist_item_by_id( data.get("source_dmp_data_table_id")): return resp_hanlder(code=101, msg="源数据表不存在") if "query_sql" in data.keys(): data_service.query_sql = data.get("query_sql") if "query_params" in data.keys(): data_service.query_params = data.get("query_params") if "state" in data.keys(): data_service.state = data.get("state") data_service.changed_dmp_user_id = current_user_id data_service.put() return resp_hanlder( result={"update_data_service": "complete!"}) else: return resp_hanlder(code=301) else: return resp_hanlder(code=7401, msg="数据服务不存在或在操作期间被删除") except Exception as err: return resp_hanlder(code=999, error=err)
def ds_retrieve(desc): """ 图表查询 --- tags: - SQL parameters: - name: source_dmp_data_table_id in: type: int required: True description: 数据表ID - name: query_sql_tmp in: type: string required: True description: 要调试的查询语句 - name: parameters in: type: dict required: true description: 参数,{"参数名":"参数值"} responses: 0: description: OK """ from .dataservice import format_sql try: request_json = request.json if request.json else {} data_table_id = request_json.get("source_dmp_data_table_id") query_sql_tmp = request_json.get("query_sql_tmp", None) parameters = request_json.get("parameters", {}) code = 0 if all([data_table_id, query_sql_tmp]): if DataTable.exist_item_by_id(data_table_id): current_data_table = DataTable.get(data_table_id) else: code = 999 msg = "数据表不存在" return resp_hanlder(code=code, msg=msg) db_type = current_data_table.dmp_database_type qp = {} conn = auto_connect(table_id=data_table_id) if db_type in [1, 2]: query_sql = format_sql( query_sql_tmp, parameters) if parameters else query_sql_tmp qp["sql"] = query_sql print(qp) data = conn.exec_query(**qp) result = data return resp_hanlder(code=code, result=result) elif db_type == 3: query_sql_tmp = current_data_sevice.query_params query_sql = eval(query_sql_tmp) query_sql[filter] = parameters qp = query_sql print(qp) data = conn.exec_query(**qp) result = data return resp_hanlder(code=code, result=result) else: code = 8107 msg = "源数据库异常" return resp_hanlder(code=code, msg=msg) else: code = 201 msg = "参数异常" return resp_hanlder(code=code, msg=msg) except Exception as err: code = 999 msg = str(err) return resp_hanlder(code=code, msg=msg)
def chart_retrieve(desc): """ 图表查询 --- tags: - SQL parameters: - name: dmp_data_table_id in: type: int required: True description: 数据表ID - name: dimension in: type: array required: True description: 维度,格式[{"name":"xxx"},{"name":"xxxxx"}] - name: measure in: type: array required: true description: 度量,格式[{"name":"xxx", "method":"count/sum/avg"},{......}] responses: 0: description: OK """ request_json = request.json if request.json else {} data_table_id = request_json.get("dmp_data_table_id") dimension = request_json.get("dimension") measure = request_json.get("measure") limit = request_json.get("limit") if data_table_id and DataTable.exist_item_by_id(data_table_id): data_table = DataTable.get(data_table_id) data_table_name = data_table.db_table_name db_type = data_table.dmp_database_type dimension_names = [d.get("name") for d in dimension] measure_names = [ m.get("name") if m.get("method") == None else "%s(%s)" % (m.get("method"), m.get("name")) for m in measure ] measure_names_methods = [ m.get("name") if m.get("method") == None else "%s_%s" % (m.get("name"), m.get("method")) for m in measure ] groupby = bool( sum([True if m.get("method") else False for m in measure])) sql = "select {p1} from {p2} {p3} {p4}".format( p1=",".join(dimension_names + measure_names), p2=data_table_name, p3="group by " + ",".join(dimension_names) if groupby else "", p4=";" if db_type == 2 else "") request_json["sql"] = sql try: conn = auto_connect(table_id=data_table_id) _data = conn.exec_query(**request_json) func2f = lambda x: round(float(x), 2) if type( x) in [float, Decimal] else x if type(_data) == list or type(_data) == tuple: result = {} result["data"] = [ dict( zip(dimension_names + measure_names_methods, map(func2f, d))) for d in _data ] result["query_string"] = { "sql": sql, "fields": dimension_names + measure_names_methods } return resp_hanlder(code=0, result=result) else: return resp_hanlder(code=999, msg="查询出错 你的查询SQl:%s" % sql) except Exception as e: return resp_hanlder(code=999, msg=str(e)) else: return resp_hanlder(code=999, msg="缺少dmp_data_table_id")