def __init__(self): self.dirname = dirname # 表所在目录 self.sql_table = os.path.basename(dirname) # 获取上上级目录的当前文件夹名,即writer部分表名 # 读取参数 self.__dbconfig = ReadConfig() self.reader = self.__dbconfig.get_db("reader") self.writer = self.__dbconfig.get_db("writer") # reader数据库相关参数 self.host_r = self.__dbconfig.get_db("host_r") self.user_r = self.__dbconfig.get_db("user_r") self.password_r = self.__dbconfig.get_db("password_r") self.database_r = self.__dbconfig.get_db("database_r") self.port_r = self.__dbconfig.get_db("port_r") self.schema_r = self.__dbconfig.get_db("schema_r") # writer数据库相关参数 self.host_w = self.__dbconfig.get_db("host_w") self.user_w = self.__dbconfig.get_db("user_w") self.password_w = self.__dbconfig.get_db("password_w") self.database_w = self.__dbconfig.get_db("database_w") self.port_w = self.__dbconfig.get_db("port_w") self.schema_w = self.__dbconfig.get_db("schema_w") # DataX目录 self.datax_home = self.__dbconfig.get_other("datax_home") # json文件保存目录 self.output_json_path = os.path.join(self.dirname, "output_json/%s.json" % self.sql_table)
def setup_IE(self): localreadconfig = rConfig.ReadConfig() baseurl = localreadconfig.get_http("baseURL") ieaction = IEAction() driver = ieaction.setup_IE() driver.get(baseurl) driver.maximize_window() time.sleep(3) # ieaction.do_screenshot(driver) return driver
def add_book(self, filename, begincase, casenum): booklist = [] #获取案例数据 conxls = ConnectExcelData(filename, exlpath) caselist = conxls.get_case_List(conxls.absFile, begincase, casenum) #对于每一个案例调用接口 for casedict in caselist: caseid = casedict['id'] print caseid caseintf = casedict['interface'] # print caseintf datadict = {} datadict['id'] = str(int(casedict['bookid'])) datadict['name'] = casedict['bookname'] datadict['author'] = casedict['bookauthor'] datadict['year'] = str(int(casedict['bookyear'])) datadict['digest'] = casedict['bookdigest'] datadict['caseid'] = caseid booklist.append(datadict) print datadict serviceparams = {} #给接口赋值,bookValues json格式 serviceparams['bookValues'] = json.dumps(datadict) localreadconfig = rConfig.ReadConfig() baseurl = localreadconfig.get_http("baseURL") serviceurl = baseurl + caseintf # print serviceurl callurl = CallUrl() respdict = callurl.post_serviceurl(serviceurl, serviceparams) tmpjson = respdict['rjson'] restime = respdict['rtime'] print tmpjson # print tmpjson['error'] # success1 = tmpjson['success'] errorNo1 = tmpjson['errorNo'] errorInfo1 = tmpjson['errorInfo'] print tmpjson['success'] print tmpjson['errorNo'] print tmpjson['errorInfo'] therow = conxls.get_row_num(conxls.absFile, 'dataout', caseid) print therow conxls.write_case_result(errorInfo1, restime, success1, errorNo1, conxls.absFile, therow + 1) return booklist
def get_book_list(self, filename, begincase, casenum, bookid): #获取案例数据 conxls = ConnectExcelData(filename, exlpath) caselist = conxls.get_case_List(conxls.absFile, begincase, casenum) #对于每一个案例调用接口 for casedict in caselist: caseid = casedict['id'] print caseid caseintf = casedict['interface'] # print caseintf localreadconfig = rConfig.ReadConfig() baseurl = localreadconfig.get_http("baseURL") serviceurl = baseurl + caseintf # print serviceurl serviceparams = {} respdict = {} callurl = CallUrl() respdict = callurl.post_serviceurl(serviceurl, serviceparams) tmpjson = respdict['rjson'] restime = respdict['rtime'] # resjson = tmpjson['root'][0] # print resjson # for bid in resjson # tbook[1]['author'] resjson = 0 print tmpjson booknum = tmpjson['totalProperty'] # print tmpjson.keys() # print tbook[1]['author'] rescode = 0 therow = conxls.get_row_num(conxls.absFile, 'dataout', caseid) print therow conxls.write_case_result(resjson, restime, booknum, rescode, conxls.absFile, therow + 1) if bookid > 0 and int(booknum) > 0: books = tmpjson['root'] for book in books: if int(book['id']) == int(bookid): print book['id'] conxls.write_book_result(book, conxls.absFile, therow + 1)
def testgetbooklist(self): #设置基本参数 filename = 'caseData.xlsx' begincase = '00003' casenum = 1 #获取案例数据 conxls = ConnectExcelData(filename, exlpath) caselist = conxls.get_case_List(conxls.absFile, begincase, casenum) print caselist #对于每一个案例调用接口 for casedict in caselist: caseid = casedict['id'] print caseid caseintf = casedict['interface'] # print caseintf localreadconfig = rConfig.ReadConfig() baseurl = localreadconfig.get_http("baseURL") serviceurl = baseurl + caseintf # print serviceurl serviceparams = {} respdict = {} callurl = CallUrl() respdict = callurl.get_serviceurl(serviceurl, serviceparams) tmpjson = respdict['rjson'] restime = respdict['rtime'] print tmpjson # resjson = tmpjson['root'][0] # print resjson resjson = tmpjson['error'] # booknum = tmpjson['totalProperty'] booknum = 0 # print tmpjson.keys() # print tbook[1]['author'] rescode = tmpjson['status'] therow = conxls.get_row_num(conxls.absFile, 'dataout', caseid) print therow conxls.write_case_result(resjson, restime, booknum, rescode, conxls.absFile, therow + 1)
import peewee as pw from peewee import * from common import ReadConfig config = ReadConfig.getConfig() db_host = config.get("database", "database.host") db_port = config.get("database", "database.port") db_name = config.get("database", "database.db") db_user = config.get("database", "database.user") db_pass = config.get("database", "database.password") db = pw.MySQLDatabase(db_name, user=db_user, password=db_pass, host=db_host) db.connect(reuse_if_open=True) class BaseModel(Model): """A base model that will use our MySQL database""" class Meta: database = db
class CreateJson(object): def __init__(self): self.dirname = dirname # 表所在目录 self.sql_table = os.path.basename(dirname) # 获取上上级目录的当前文件夹名,即writer部分表名 # 读取参数 self.__dbconfig = ReadConfig() self.reader = self.__dbconfig.get_db("reader") self.writer = self.__dbconfig.get_db("writer") # reader数据库相关参数 self.host_r = self.__dbconfig.get_db("host_r") self.user_r = self.__dbconfig.get_db("user_r") self.password_r = self.__dbconfig.get_db("password_r") self.database_r = self.__dbconfig.get_db("database_r") self.port_r = self.__dbconfig.get_db("port_r") self.schema_r = self.__dbconfig.get_db("schema_r") # writer数据库相关参数 self.host_w = self.__dbconfig.get_db("host_w") self.user_w = self.__dbconfig.get_db("user_w") self.password_w = self.__dbconfig.get_db("password_w") self.database_w = self.__dbconfig.get_db("database_w") self.port_w = self.__dbconfig.get_db("port_w") self.schema_w = self.__dbconfig.get_db("schema_w") # DataX目录 self.datax_home = self.__dbconfig.get_other("datax_home") # json文件保存目录 self.output_json_path = os.path.join(self.dirname, "output_json/%s.json" % self.sql_table) def get_json_template(self): # 获取reader和writer部分json模板 reader_json = [row for row in json_dict["reader"] if row["name"] == "%sreader" % self.reader] writer_json = [row for row in json_dict["writer"] if row["name"] == "%swriter" % self.writer] # datax的json模版 template_json = { "job": { "setting": { "speed": { "byte": 1048576, "channel": "4", } }, "content": [ { "reader": reader_json[0], "writer": writer_json[0] } ] } } return template_json def get_readsql(self): # 获取reader的querySql,从配置文件获取 with open(os.path.join(self.dirname, "conf/read.sql"), "r") as f: select_sql = f.read() f.close() return select_sql def get_presql(self): with open(os.path.join(self.dirname, "conf/pre.sql"), "r") as f: pre_sql = f.read() f.close() return pre_sql def get_postsql(self): with open(os.path.join(self.dirname, "conf/post.sql"), "r") as f: post_sql = f.read() f.close() return post_sql def get_col_list(self): # 获取目标表的字段 writer_col_list = [] meta_param = (self.database_w, self.sql_table) if self.schema_w == "" else (self.schema_w, self.sql_table) sql = metasql_dict[self.writer].format(meta_param[0], meta_param[1]) rs = getSQLResult(self.writer, sql, self.host_w, self.user_w, self.password_w, self.database_w, int(self.port_w)) for row in rs: colName = row[0] colType = row[1] colComment = row[2] writer_col_list.append(colName) return writer_col_list def set_json_parma(self): template_json = self.get_json_template() select_sql = self.get_readsql() pre_sql = self.get_presql() post_sql = self.get_postsql() writer_col_list = self.get_col_list() # 配置reader部分json参数 template_json["job"]["content"][0]["reader"]["parameter"]["username"] = self.user_r template_json["job"]["content"][0]["reader"]["parameter"]["password"] = self.password_r reader_jdbc = template_json["job"]["content"][0]["reader"]["parameter"]["connection"][0]["jdbcUrl"] template_json["job"]["content"][0]["reader"]["parameter"]["connection"][0]["jdbcUrl"] = [reader_jdbc[0].format( host=self.host_r, port=self.port_r, database=self.database_r)] template_json["job"]["content"][0]["reader"]["parameter"]["connection"][0]["querySql"] = [select_sql] # 配置writer部分json参数 template_json["job"]["content"][0]["writer"]["parameter"]["username"] = self.user_w template_json["job"]["content"][0]["writer"]["parameter"]["password"] = self.password_w template_json["job"]["content"][0]["writer"]["parameter"]["column"] = writer_col_list template_json["job"]["content"][0]["writer"]["parameter"]["preSql"] = [] if pre_sql.strip() == "" else [pre_sql] template_json["job"]["content"][0]["writer"]["parameter"]["postSql"] = [] if post_sql.strip() == "" else [ post_sql] writer_jdbc = template_json["job"]["content"][0]["writer"]["parameter"]["connection"][0]["jdbcUrl"] template_json["job"]["content"][0]["writer"]["parameter"]["connection"][0]["jdbcUrl"] = writer_jdbc.format( host=self.host_w, port=self.port_w, database=self.database_w) sql_table_param = self.sql_table if self.schema_w == "" else "%s.%s" % (self.schema_w, self.sql_table) template_json["job"]["content"][0]["writer"]["parameter"]["connection"][0]["table"] = [sql_table_param] return template_json def del_json_files(self): output_json_dir = os.path.join(os.path.dirname(self.output_json_path), "*") os.system("rm -rf %s" % output_json_dir) # 删除output_json文件夹下所有的json文件 print "output_json文件夹下所有的json文件已删除..." def save_json(self): self.del_json_files() # 保存json文件前,先删除output_json目录下的所有文件 template_json = self.set_json_parma() # 保存json文件 print "开始输出业务库{sql_db}的数据表{sql_table}的DataX Json模板...".format(sql_db=self.database_w, sql_table=self.sql_table) with open(self.output_json_path, 'w') as f: json.dump(template_json, f, indent=4, ensure_ascii=False) print "%s.json生成完毕..." % self.sql_table return "Save json file successfully..." def run(self): self.save_json()
# -*- coding:utf-8 -*- import requests from common.Log import Log from common.ReadConfig import * rc=ReadConfig() log=Log() headkey=rc.get_headers('key') def get(url,params,Authonvalue=''): if Authonvalue=='': header={} else: if Authonvalue != 'om': headvalue = rc.get_headers('market') else: headvalue = rc.get_headers('om') header={headkey:headvalue} """ get请求 :return: """ try: # 当headers传的cookies时,要把键值对id改成cookies response = requests.get(url, params=params,headers=header,timeout=10) return response except Exception as e: log.error(e) def post(url,data,Authonvalue=''):
from dataModels.MachineGPUResourceInfo import MachineGPUResourceInfo from dataModels.BaseModel import * from dataModels.DPU import DPU from dataModels.GlusterFSVolume import GlusterFSVolume from dataModels.KubeCluster import KubeCluster from dataModels.MachinePool import MachinePool from dataModels.MLClusterInfo import MLClusterInfo from dataModels.User import User from common.Utils import * db.create_tables([ KubeCluster, GlusterFSVolume, DPU, MachinePool, User, MLClusterInfo, MachineGPUResourceInfo ]) config = conf.getConfig() app = Flask(__name__) CORS(app, resources={r"/*": {"origins": "*"}}) @app.route(Paths.CREATE_CLUSTER_PATH, methods=['PUT']) def create_kube_cluster(): payload = request.get_json(force=True) log.debug("Create cluster payload is {0}".format(payload)) ClusterManagement.create_cluster(payload) out = { "status": "SUCCESS", "message": "Successfully processed", "data": None } return jsonify(out)
from selenium import webdriver from selenium.webdriver.support.select import Select import sys, time, unittest, re import common.ReadConfig as co sys.path.append('..') from pages.loginpage import LoginPage from pages.zf_userpage import UserPagezf from pages.jqxxpage import JqxxPage from pages.xfgk1cyed1xfedpage import Cyed1xfedPage import utils.util as ut file1 = ut.DATA_PATH + '/user_data.csv' data1 = ut.get_data(file1) file2 = ut.DATA_PATH + '/zfzjq_data.csv' data2 = ut.get_data(file2) url = co.getbrowsername('Url') mark1 = 1 class TestCyed(unittest.TestCase): @classmethod def setUpClass(cls): cls.driver = webdriver.Firefox() cls.driver.maximize_window() cls.driver.get(url) time.sleep(3) cls.dl = LoginPage(cls.driver, Select) cls.zfuser = UserPagezf(cls.driver, Select) cls.jqxx = JqxxPage(cls.driver, Select) cls.cyed1 = Cyed1xfedPage(cls.driver, Select) @classmethod
import logging as log # import DbCon from common import ReadConfig as config config = config.getConfig() log.basicConfig(filename=config.get("logging", "fileName"), level=log.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") log.debug("This is ") def get_log(): return log def info(message): log.info(msg=message) def debug(message): log.debug(msg=message) def exception(exp, msg=None): if msg is not None: exp = "Message is: " + msg + "\n Stacktrace is : " + str(exp) log.error(exp, exc_info=True)