コード例 #1
0
def get_executions_data(flow_name):
    ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"],
               MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    try:
        sql = "select id from merce_flow_execution where flow_name like '%s%%%%' order by create_time desc limit 1" % flow_name
        flow_info = ms.ExecuQuery(sql.encode('utf-8'))
        print(sql)
        print('execution_Id:', flow_info[0]["id"])
    except:
        return
    new_data = {
        "fieldList": [{
            "fieldName": "executionId",
            "fieldValue": flow_info[0]["id"],
            "comparatorOperator": "EQUAL",
            "logicalOperator": "AND"
        }],
        "sortObject": {
            "field": "lastModifiedTime",
            "orderDirection": "DESC"
        },
        "offset":
        0,
        "limit":
        8
    }
    return new_data
コード例 #2
0
def set_upsert_data():
    print("开始执行set_upsert_data")
    ms = MYSQL(MySQL_CONFIG1["HOST"], MySQL_CONFIG1["USER"],
               MySQL_CONFIG1["PASSWORD"], MySQL_CONFIG1["DB"])
    try:
        sql = "INSERT INTO `test_flow`.`training`(`ts`, `code`, `total`, `forward_total`, `reverse_total`, `sum_flow`, `sum_inst`, `inst_num`, `max_inst`, `max_inst_ts`, `min_inst`, `min_inst_ts`) VALUES ( CURRENT_TIMESTAMP, 'code1', 310001, 50, 5, 48, 2222, 42, 55, '2020-05-01 00:09:00', 23, '2020-01-01 00:09:00')"
        ms.ExecuNoQuery(sql.encode('utf-8'))
        sql = "UPDATE `test_flow`.`training`  set ts=CURRENT_TIMESTAMP "
        ms.ExecuNoQuery(sql.encode('utf-8'))
    except Exception:
        return
コード例 #3
0
def delete_autotest_datas():
    print("------开始删除测试数据-------")
    ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"],
               MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    try:
        flow_sql = "delete from merce_flow where name like 'test%' or name like 'gjb%' or  name like 'auto_api_test_%'"
        dataset_sql = "delete from merce_dataset where name like 'test%' or  name like 'merce%' or  name like 'sink%' or  name like 'gjb_test_%'  "
        schema_sql = "delete from merce_schema where name like 'test%' or  name like 'apitest%' or  name like  'gtest%'"
        tenant_sql = "delete from merce_tenant where name like 'api_tenants%' order by create_time desc limit 1"
        print("删除flow表测试数据 ", flow_sql)
        ms.ExecuNoQuery(flow_sql.encode('utf-8'))
        print("删除dataset表测试数据 ", dataset_sql)
        ms.ExecuNoQuery(dataset_sql.encode('utf-8'))
        print("删除schema表测试数据 ", schema_sql)
        ms.ExecuNoQuery(schema_sql.encode('utf-8'))
        print("删除tenant表测试数据 ", tenant_sql)
        ms.ExecuNoQuery(tenant_sql.encode('utf-8'))
    except:
        return


#delete_autotest_datas()
コード例 #4
0
# coding:utf-8
from basic_info.get_auth_token import get_headers
import unittest
import requests
from util.Open_DB import MYSQL
from basic_info.setting import MySQL_CONFIG, zmod_id, host
ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"],
           MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])


class CasesForZmod(unittest.TestCase):
    from basic_info.url_info import query_zdaf
    """分析任务信息接口"""
    def test_create_Zdaf_flow(self):
        """创建分析任务-flow"""
        from basic_info.url_info import create_zmod_flow_url
        # data = ["e2dbfd88-0e2d-4fa2-b145-75c1a13ab455"]  # 分析模板id
        response = requests.post(url=create_zmod_flow_url,
                                 headers=get_headers(host),
                                 json=zmod_id)
        self.assertEqual(200, response.status_code, '分析任务创建失败')
        self.assertEqual(zmod_id[0],
                         response.json()["modelId"], "分析任务的modelId不一致")

    def test_query_zdaf_all(self):
        """查询所有的分析任务"""
        from basic_info.url_info import query_zdaf
        data = {
            "fieldList": [],
            "sortObject": {
                "field": "lastModifiedTime",
コード例 #5
0
import random
from basic_info.setting import Dsp_MySQL_CONFIG
from util.Open_DB import MYSQL
from util.logs import Logger

ms = MYSQL(Dsp_MySQL_CONFIG["HOST"], Dsp_MySQL_CONFIG["USER"],
           Dsp_MySQL_CONFIG["PASSWORD"], Dsp_MySQL_CONFIG["DB"],
           Dsp_MySQL_CONFIG["PORT"])
log = Logger().get_log()


def deal_parameters(data):
    try:
        if data:
            if '随机数' in data:
                data = data.replace('随机数', str(random.randint(0, 999)))
                return deal_parameters(data)
            if 'select id from' in data:
                data_select_result = ms.ExecuQuery(data.encode('utf-8'))
                new_data = []
                if data_select_result:
                    if len(data_select_result) > 1:
                        for i in range(len(data_select_result)):
                            new_data.append(data_select_result[i]["id"])
                        if "select id from merce_dss" in data:
                            return new_data
                        elif "select id from merce_schema" in data:
                            return new_data
                        elif "select id from merce_role" in data:
                            return new_data
                        elif "select id from merce_user" in data:
コード例 #6
0
import random

from util.format_res import dict_res
from util.timestamp_13 import *
from basic_info.setting import Compass_MySQL_CONFIG
import os
from util.Open_DB import MYSQL
from util.logs import Logger

ms = MYSQL(Compass_MySQL_CONFIG["HOST"], Compass_MySQL_CONFIG["USER"],
           Compass_MySQL_CONFIG["PASSWORD"], Compass_MySQL_CONFIG["DB"],
           Compass_MySQL_CONFIG["PORT"])
ab_dir = lambda n: os.path.abspath(os.path.join(os.path.dirname(__file__), n))
log = Logger().get_log()


def deal_parameters(data):
    try:
        if data:
            if '随机数' in data:
                data = data.replace('随机数',
                                    str(random.randint(0, 999999999999999)))
                return deal_parameters(data)
            elif '监控开始时间' in data:
                data = data.replace('监控开始时间', get_now_time()[0])
                return deal_parameters(data)
            elif '监控结束时间' in data:
                data = data.replace('监控结束时间', get_now_time()[1])
                return deal_parameters(data)
            elif 'select job_pool_oid' in data:
                data_select_result = ms.ExecuQuery(data.encode('utf-8'))
コード例 #7
0
def get_dataflow_data(flow_name):
    ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"],
               MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    try:
        sql = "select id, name,flow_type from merce_flow where name like '%s%%%%' order by create_time desc limit 1" % flow_name
        flow_info = ms.ExecuQuery(sql.encode('utf-8'))
        print("flow_id,flow_name,flow_type", flow_info[0]["id"],
              flow_info[0]["name"], flow_info[0]["flow_type"])
    except:
        return

    if "auto_api_test" in flow_name:
        data = {
            "configurations": {
                "arguments": [],
                "properties": [{
                    "name": "all.debug",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.dataset-nullable",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.optimized.enable",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "all.lineage.enable",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.debug-rows",
                    "value": "20",
                    "input": "20"
                }, {
                    "name": "all.runtime.cluster-id",
                    "value": "cluster1",
                    "input": ["cluster1"]
                }, {
                    "name": "dataflow.master",
                    "value": "yarn",
                    "input": "yarn"
                }, {
                    "name": "dataflow.deploy-mode",
                    "value": "client",
                    "input": ["client", "cluster"]
                }, {
                    "name": "dataflow.queue",
                    "value": "default",
                    "input": ["default"]
                }, {
                    "name": "dataflow.num-executors",
                    "value": "2",
                    "input": "2"
                }, {
                    "name": "dataflow.driver-memory",
                    "value": "512M",
                    "input": "512M"
                }, {
                    "name": "dataflow.executor-memory",
                    "value": "1G",
                    "input": "1G"
                }, {
                    "name": "dataflow.executor-cores",
                    "value": "2",
                    "input": "2"
                }, {
                    "name": "dataflow.verbose",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "dataflow.local-dirs",
                    "value": "",
                    "input": ""
                }, {
                    "name": "dataflow.sink.concat-files",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "dataflow.tempDirectory",
                    "value": "/tmp/dataflow/spark",
                    "input": "/tmp/dataflow/spark"
                }],
                "dependencies":
                "",
                "extraConfigurations": {},
                "startTime":
                1593964800000
            },
            "flowId": flow_info[0]["id"],
            "flowName": flow_info[0]["name"],
            "flowType": flow_info[0]["flow_type"],
            "name": "auto_api_test_随机数",
            "schedulerId": "once",
            "source": "rhinos"
        }
        return data
    elif "ES-upsert" in flow_name:
        data = {
            "configurations": {
                "startTime":
                1600617600000,
                "arguments": [],
                "dependencies": [],
                "extraConfigurations": {},
                "properties": [{
                    "name": "all.debug",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.dataset-nullable",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.optimized.enable",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "all.lineage.enable",
                    "value": "false",
                    "input": "false"
                }, {
                    "name": "all.debug-rows",
                    "value": "20",
                    "input": "20"
                }, {
                    "name": "all.runtime.cluster-id",
                    "value": "cluster1",
                    "input": ["cluster1"]
                }, {
                    "name": "dataflow.master",
                    "value": "yarn",
                    "input": "yarn"
                }, {
                    "name": "dataflow.deploy-mode",
                    "value": "client",
                    "input": ["client", "cluster"]
                }, {
                    "name": "dataflow.queue",
                    "value": "default",
                    "input": ["default"]
                }, {
                    "name": "dataflow.num-executors",
                    "value": "2",
                    "input": "2"
                }, {
                    "name": "dataflow.driver-memory",
                    "value": "512M",
                    "input": "512M"
                }, {
                    "name": "dataflow.executor-memory",
                    "value": "1G",
                    "input": "1G"
                }, {
                    "name": "dataflow.executor-cores",
                    "value": "2",
                    "input": "2"
                }, {
                    "name": "dataflow.verbose",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "dataflow.local-dirs",
                    "value": "",
                    "input": ""
                }, {
                    "name": "dataflow.sink.concat-files",
                    "value": "true",
                    "input": "true"
                }, {
                    "name": "dataflow.tempDirectory",
                    "value": "/tmp/dataflow/spark",
                    "input": "/tmp/dataflow/spark"
                }],
                "retry": {
                    "enable": "false",
                    "limit": 1,
                    "timeInterval": 1,
                    "intervalUnit": "MINUTES"
                }
            },
            "schedulerId": "once",
            "ource": "rhinos",
            "version": 1,
            "flowId": flow_info[0]["id"],
            "flowType": flow_info[0]["flow_type"],
            "name": "ES-upsert_随机数",
            "creator": "admin",
            "oldName": flow_info[0]["name"]
        }
        return data
    else:
        return
コード例 #8
0
ファイル: get_execution_log.py プロジェクト: lisalitester/API
 def __init__(self, execution_id, host):
     self.ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"], MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
     self.execution_id = execution_id
     self.host = host
コード例 #9
0
 def __init__(self):
     """初始化数据库连接"""
     self.ms = MYSQL(mysql_config["HOST"], mysql_config["USER"], mysql_config["PASSWORD"], mysql_config["DB"])
コード例 #10
0
 def __init__(self):
     """初始化数据库连接"""
     self.ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"], MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
     self.host = host
     self.table = "flow_dataset_info.xlsx"
     self.table_sheet = 'flow_info'
コード例 #11
0
class ExecuteWeirdDataflow(unittest.TestCase):

    # def setUp(self):
    #     self.ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"], MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    #     self.expected_result = ['[{"name":"james","id":"6","age":"50"}]', '[{"name":"xiaowang","id":"3","age":"30"}]', '[{"name":"xiaoming","id":"1","age":"18"}]', '[{"name":"tyest","id":"4","age":"12"}]', '[{"name":"xiaohong","id":"2","age":"20"}]', '[{"name":"空","id":"5","age":"空"}]']
    #
    # def tearDown(self):
    #     pass
    ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"], MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    expected_result = ['[{"name":"james","id":"6","age":"50"}]', '[{"name":"xiaowang","id":"3","age":"30"}]', '[{"name":"xiaoming","id":"1","age":"18"}]', '[{"name":"tyest","id":"4","age":"12"}]', '[{"name":"xiaohong","id":"2","age":"20"}]', '[{"name":"空","id":"5","age":"空"}]']
    #
    def test_create_scheduler(self):
        print("开始执行test_create_scheduler(self)")
        data = get_dataflow_data('tc_auto_df_sink_hdfs_path使用$进行分区、使用sliceTimeColumn1545633382888')
        res = requests.post(url=create_scheduler_url, headers=get_headers(HOST_189), json=data)
        print(res.url)
        print(res.status_code)
        self.assertEqual(201, res.status_code, '创建scheduler失败,失败原因%s' % res.text)
        self.assertNotEqual(res.json().get('id', 'scheduler创建可能失败了'), 'scheduler创建可能失败了')
        # scheduler_id = res.json()['id']
        # print('---------scheduler_id-------', scheduler_id)
        # print(res.json()["id"])
        return res.json()['id']

    def test_get_execution_info(self):
        print("开始执行get_execution_info(self)")
        scheduler_id = self.test_create_scheduler()
        e_status_format = {'type': 'READY'}
        while e_status_format["type"] in ("READY", "RUNNING"):
            time.sleep(5)
            execution_sql = 'select id, status_type, flow_id, flow_scheduler_id from merce_flow_execution where flow_scheduler_id = "%s"' % scheduler_id
            time.sleep(20)
            print(execution_sql)
            select_result = self.ms.ExecuQuery(execution_sql)
            print(select_result)
            e_status_format["type"] = select_result[0]["status_type"]
            # e_status_format = dict_res(e_status)
        if e_status_format['type'] == 'SUCCEEDED':
            self.assertEqual('SUCCEEDED', e_status_format['type'])
            print('select_result: \n', select_result)
            return select_result
        else:
            return None
    def test_get_dataset_id(self):
        """获取execution的id和状态, 最终返回execution执行成功后的dataset id """

        e_info = self.test_get_execution_info()
        if e_info:
            data_json_sql = 'select b.dataset_json from merce_flow_execution as a  LEFT JOIN merce_flow_execution_output as b on a.id = b.execution_id where a.id ="%s"' % e_info[0]["id"]
            data_json = self.ms.ExecuQuery(data_json_sql)
            sink_dataset_list = []
            for n in range(len(data_json)):
                sink_dataset = data_json[n]["dataset_json"]  # 返回结果为元祖
                sink_dataset_id = dict_res(sink_dataset)["id"]  # 取出json串中的dataset id
                sink_dataset_list.append(sink_dataset_id)
            print('----------sink_dataset_list----------', sink_dataset_list)
            return sink_dataset_list
        else:
            return None
    def test_test_check_result(self):
        ''' 返回多dataset且ID会变,对该flow的校验 '''
        sink_dataset_list = self.test_get_dataset_id()
        if sink_dataset_list:
            L = []
            for dataset_id in sink_dataset_list:
                priview_url = "%s/api/datasets/%s/preview?rows=5000&tenant=2d7ad891-41c5-4fba-9ff2-03aef3c729e5" % (HOST_189, dataset_id)
                result = requests.get(url=priview_url, headers=get_headers(HOST_189))
                L.append(result.text)
            different_result = [i for i in self.expected_result if i not in L]
            self.assertEqual(len(self.expected_result), len(L))
            self.assertEqual(different_result, [])
        else:
            return None
コード例 #12
0
ファイル: clean_test_data.py プロジェクト: xu-hn/For_API
 def __init__(self):
     self.ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"], MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
コード例 #13
0
def get_dataflow_data(flow_name):
    print("开始执行get_dataflow_data(flow_name)")
    ms = MYSQL(MySQL_CONFIG["HOST"], MySQL_CONFIG["USER"],
               MySQL_CONFIG["PASSWORD"], MySQL_CONFIG["DB"])
    try:
        sql = 'select id, flow_type from merce_flow where name = "%s"' % flow_name
        flow_info = ms.ExecuQuery(sql)
        print(sql)
        print('flow_info:', flow_info)
    except Exception as e:
        raise e
    else:
        try:
            flow_id = flow_info[0]["id"]
            flow_type = flow_info[0]["flow_type"]
            # print(flow_name, flow_type)
        except KeyError as e:
            raise e

    data = {
        "configurations": {
            "arguments": [],
            "properties": [{
                "name": "all.debug",
                "value": "false"
            }, {
                "name": "all.dataset-nullable",
                "value": "false"
            }, {
                "name": "all.lineage.enable",
                "value": "true"
            }, {
                "name": "all.notify-output",
                "value": "false"
            }, {
                "name": "all.debug-rows",
                "value": "20"
            }, {
                "name": "dataflow.master",
                "value": "yarn"
            }, {
                "name": "dataflow.deploy-mode",
                "value": "client"
            }, {
                "name": "dataflow.queue",
                "value": "merce.normal"
            }, {
                "name": "dataflow.num-executors",
                "value": "2"
            }, {
                "name": "dataflow.driver-memory",
                "value": "512M"
            }, {
                "name": "dataflow.executor-memory",
                "value": "1G"
            }, {
                "name": "dataflow.executor-cores",
                "value": "2"
            }, {
                "name": "dataflow.verbose",
                "value": "true"
            }, {
                "name": "dataflow.local-dirs",
                "value": ""
            }, {
                "name": "dataflow.sink.concat-files",
                "value": "true"
            }],
            "startTime":
            get_time()
        },
        "flowId": flow_id,
        "flowName": flow_name,
        "flowType": flow_type,
        "name": flow_name + str(random.randint(0, 99999)),
        "schedulerId": "once",
        "source": "rhinos"
    }
    return data
コード例 #14
0
 def __init__(self, execution_id):
     self.ms = MYSQL(mysql_config["HOST"], mysql_config["USER"],
                     mysql_config["PASSWORD"], mysql_config["DB"])
     self.execution_id = execution_id
     self.host = HOST
コード例 #15
0
from new_api_cases.get_statementId import statementId_flow_use, preview_result_flow_use
from util.Open_DB import MYSQL
from basic_info.setting import MySQL_CONFIG
from basic_info.setting import host
from util.format_res import dict_res

host_for_url = host
# excel_dir=os.path.join(os.path.abspath(".."),r"api_test_cases\flow_dataset_info.xlsx")
table = abs_dir("flow_dataset_info.xlsx")
host = MySQL_CONFIG["HOST"]
port = MySQL_CONFIG["PORT"]
user = MySQL_CONFIG["USER"]
pwd = MySQL_CONFIG["PASSWORD"]
db = MySQL_CONFIG["DB"]

ms = MYSQL(host, user, pwd, db, port)
flow_table = load_workbook(table)
sheet = flow_table.get_sheet_by_name("flink")
max_rows = sheet.max_row
print(max_rows)


def get_flow_id():
    flow_id_list = []
    for i in range(2, max_rows + 1):
        flow_id_list.append(sheet.cell(row=i, column=2).value)
    return flow_id_list


def data_for_exe():
    data_for_exe_list = []