class ExcelHandler(object): ''' 关于Excel表的操作 ''' oper_j = operate_json.OperateJson() con = Config.Config() def get_excel_data(self, case_desc): ''' 过滤excel中不必要的数据 :param case_desc: 通过excel中的case_description来过滤用例 :return: ''' # 获取到book对象 book = xlrd.open_workbook(Config.TEST_CASE_PATH) sheet = book.sheet_by_index(0) # sheet = book.sheet_by_name('接口自动化用例') # sheets = book.sheets() # 获取所有的sheet对象 rows, cols = sheet.nrows, sheet.ncols l = [] # print(sheet.row_values(0)) title = sheet.row_values(0) # print(title) # 获取其他行 for i in range(1, rows): # print(sheet.row_values(i)) if case_desc in sheet.row_values(i): l.append(dict(zip(title, sheet.row_values(i)))) return l
def __init__(self, headers, cookies): self.herders = headers self.cookies = cookies self.repeson = None conf_name = 'token.ini' self.con = Config.Config() logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename='myapp.log', filemode='w') ################################################################################################# # 定义一个StreamHandler,将DEBUG级别或更高的日志信息打印到标准错误,并将其添加到当前的日志处理对象# console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) ################################################################################################# self.appname = {} self.key = {} self.iv = {} ConfigData = self.con.readconfig(conf_name) for i in ConfigData.keys(): self.appname[i] = ConfigData.get(i).get('appname') self.key[i] = ConfigData.get(i).get('key') self.iv[i] = ConfigData.get(i).get('iv') # 当前时间戳 self.t = Token.crypt()
def __init__(self): self.log = Log.MyLog() self.oper_j = operate_json.OperateJson() self.conf = Config.Config() self.excel = ExcelHandler.ExcelHandler() self.reqe = req_reload.ReqReload() self.test = Assert.Assertions()
def checkDB_for_period(): conf = Config.Config() dbConf = DbConfig.DbConfig() connect = psycopg2.connect(database=dbConf.dbname, user=dbConf.user, host=dbConf.address, password=dbConf.password) cursor = connect.cursor() cursor.itersize = 1000 candleDiff = conf.candleDiff if conf.candlePeriod == 'M': candleDiff = candleDiff * 60 if conf.candlePeriod == 'H': candleDiff = candleDiff * 3600 print('Successfully connected') tName = conf.insName.lower() cmd = 'SELECT * FROM {0} ORDER BY datetimestamp;'.format(tName) cursor.execute(cmd) lastTimeStamp = datetime.min error = False for row in cursor: timeStamp = row[0] if lastTimeStamp!=datetime.min: delta = timeStamp - lastTimeStamp if delta != timedelta(seconds=candleDiff): print('Error: difference in time is ', delta, row) error = True break lastTimeStamp = timeStamp connect.close() return error
class ExcelHandler(object): ''' 关于Excel表的操作 ''' oper_j = operate_json.OperateJson() con = Config.Config() def get_excel_data(self, case_desc): ''' 过滤excel中不必要的数据,当遇到特殊标签all,就全执行 :param case_desc: 通过excel中的case_description来过滤用例 :return: ''' # 获取到book对象 book = xlrd.open_workbook(Config.TEST_CASE_PATH) sheet = book.sheet_by_index(0) rows, cols = sheet.nrows, sheet.ncols l = [] title = sheet.row_values(0) # 获取其他行 for i in range(1, rows): #print(sheet.row_values(i)) if case_desc in sheet.row_values( i) or case_desc == 'all': # 判定case_desc在第二行数据中 l.append(dict(zip(title, sheet.row_values(i)))) return l
def test_oanda_fx_history(self): conf = Config.Config() token = conf.token oanda = oandapyV20.API(environment="practice", access_token=token) downloader = StockDataDownloader.StockDataDownloader() dateFrom = datetime.utcnow() - timedelta(days=1) dateTo = datetime.utcnow() result = downloader.get_data_from_oanda_fx(oanda, 'EUR_USD', 'S5', dateFrom, dateTo) self.assertTrue(len(result) > 0)
def __init__(self): self.config = Config.Config() self.log = Log.MyLog() self.data = ConfRelevance.ConfRelevance( CONF_PATH, "test_data").get_relevance_conf() self.headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "Content-Type": "application/json", "X-Auth-Token": self.config.token }
def test_accounts(self): conf = Config.Config() token = conf.token accId = conf.account_id oanda = oandapyV20.API(environment="practice", access_token=token) r = AccountList() oanda.request(r) accsInfo = r.response.get('accounts') self.assertTrue(len(accsInfo) > 0) p = PricingInfo(accId, "instruments=EUR_USD") oanda.request(p) print(p.response) self.assertTrue(len(p.response.get('prices')) > 0)
def test_market_orders(self): conf = Config.Config() token = conf.token accId = conf.account_id oanda = oandapyV20.API(environment="practice", access_token=token) mktOrder = MarketOrderRequest(instrument='EUR_USD', units=1) r = orders.OrderCreate(accId, data=mktOrder.data) resp = oanda.request(r) print(resp) r = positions.PositionClose(accId, 'EUR_USD', {"longUnits": "ALL"}) resp = oanda.request(r) print(resp) r = AccountDetails(accId) balance = oanda.request(r).get('account').get('balance') self.assertTrue(balance > 0)
def fix_missing(delta, row): conf = Config.Config() dbConf = DbConfig.DbConfig() candleDiff = conf.candleDiff if conf.candlePeriod == 'M': candleDiff = candleDiff * 60 if conf.candlePeriod == 'H': candleDiff = candleDiff * 3600 tName = conf.insName.lower() cmd = ('INSERT INTO {0} VALUES').format(tName) cmd_bulk = '' dumpback = delta mcount = 0 md = row[0] while dumpback > timedelta(seconds=candleDiff): #cmdel = ('DELETE FROM {0} WHERE ').format(tName) md -= timedelta(seconds=candleDiff) #cmdel = cmdel + ("(datetimestamp) = '{0}';".format(md)) cmd_bulk = cmd_bulk + ("(TIMESTAMP '{0}',{1},{2},{3}),\n".format( md, row[1], row[2], row[3])) print(md) #connect = psycopg2.connect(database=dbConf.dbname, user=dbConf.user, host=dbConf.address, password=dbConf.password) #curdel = connect.cursor() #print(cmdel) #curdel.execute(cmdel) #connect.close() dumpback -= timedelta(seconds=candleDiff) mcount += 1 connect = psycopg2.connect(database=dbConf.dbname, user=dbConf.user, host=dbConf.address, password=dbConf.password) cursor = connect.cursor() if len(cmd_bulk) > 0: cmd = cmd + cmd_bulk[:-2] + ';' cursor.execute(cmd) print("Вставка пропушенных. Количество: ", mcount) print("Цикл на ", row[0]) connect.commit() connect.close() else: print("Нет пропущенных") print("Вставка пропущенных завершена")
def __init__(self): self.config = Config.Config() self.log = Log.MyLog() self.relevance = ConfRelevance.ConfRelevance( CONF_PATH, "test_data").get_relevance_conf() self.headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "Content-Type": "application/json", "X-Auth-Token": "${token_id}$" } self.address = { "address": "/v3/projects/${project_id}$/users/${admin_id}$/roles/${admin_role_id}$" }
def __init__(self): self.config = Config() self.db = None self.cursor = None self.host = self.config.dbhost_debug self.username = self.config.dbuser_debug self.password = self.config.dbpassword_debug self.port = self.config.dbport_debug self.database = self.config.dbdatabase_debug self.config = { 'host': str(self.host), 'user': self.username, 'passwd': self.password, 'port': int(self.port), 'db': self.database }
def get_patterns_for_window_and_num(window, length, limit=None): conf = Config.Config() dbConf = DbConfig.DbConfig() connect = psycopg2.connect(database=dbConf.dbname, user=dbConf.user, host=dbConf.address, password=dbConf.password) cursor = connect.cursor() print('Successfully connected') tName = conf.insName.lower() cmd = 'SELECT COUNT(*) FROM {0};'.format(tName) cursor.execute(cmd) totalCount = cursor.fetchone()[0] print('Total items count {0}'.format(totalCount)) cmd = 'SELECT * FROM {0} ORDER BY open_time'.format(tName) if limit is None: cmd = '{0};'.format(cmd) else: cmd = '{0} LIMIT {1};'.format(cmd, limit) cursor.execute(cmd) wl = list() patterns = list() profits = list() indicies = list() i = 1 for row in cursor: nextCandle = Candle(open_price=row[0], high_price=row[1], low_price=row[2], close_price=row[3], volume=row[4], open_time=row[5]) wl.append(nextCandle) print('Row {0} of {1}, {2:.3f}% total'.format( i, totalCount, 100 * (float(i) / float(totalCount)))) if len(wl) == window + length: # find pattern of 0..length elements # that indicates price falls / grows # in the next window elements to get profit candle = wl[length - 1] ind = length + 1 # take real data only if candle.volume != 0: while ind <= window + length: iCandle = wl[ind - 1] # define patterns for analyzing iCandle if iCandle.volume != 0: # if iCandle.low_price > candle.high_price: if iCandle.open_price > candle.close_price: # buy pattern p = Pattern(wl[:length], 'buy') patterns.append(p) indicies.append(ind - length) # profits.append(iCandle.low_price - candle.high_price) profits.append(iCandle.open_price - candle.close_price) break # if iCandle.high_price < candle.low_price: if iCandle.close_price < candle.open_price: # sell pattern p = Pattern(wl[:length], 'sell') patterns.append(p) indicies.append(ind - length) # profits.append(candle.low_price - iCandle.high_price) profits.append(candle.open_price - iCandle.close_price) break ind = ind + 1 wl.pop(0) i = i + 1 print('Total patterns: {0}'.format(len(patterns))) print('Mean index[after]: {0}'.format(numpy.mean(indicies))) print('Mean profit: {0}'.format(numpy.mean(profits))) connect.close() return patterns
def __init__(self): self.config = Config.Config() self.log = Log.MyLog() self.relevance = ConfRelevance.ConfRelevance( CONF_PATH, "test_data").get_relevance_conf()
def send_request(data, host, address,port ,relevance, _path, success): """ 再次封装请求 :param data: 测试用例 :param host: 测试地址 :param address: 接口地址 :param relevance: 关联对象 :param port: 端口地址 :param _path: case路径 :param success: 全局结果 :return: """ # logging = Log.MyLog() config = Config.Config() logging.info("="*100) header = ReadParam.read_param(data["test_name"], data["headers"], relevance, _path, success) # 处理请求头 logging.debug("请求头处理结果: %s" % header) parameter = ReadParam.read_param(data["test_name"], data["parameter"], relevance, _path, success) # 处理请求参数 logging.debug("请求参数处理结果: %s" % header) try: # 如果用例中写了host和address,则使用用例中的host和address,若没有则使用全局的 host = data["host"] except KeyError: pass try: address = data["address"] except KeyError: pass host = config.host # host处理,读取配置文件中的host address = ParamManage.manage(address, relevance) logging.debug("host处理结果: %s" % host) if not host: raise failureException("接口请求地址为空 %s" % data["headers"]) logging.info("请求接口:%s" % str(data["test_name"])) logging.info("请求地址:%s" % data["http_type"] + "://" + host + ":"+ port+address) logging.info("请求头: %s" % str(header)) logging.info("请求参数: %s" % str(parameter)) if data["request_type"].lower() == 'post': if data["file"]: with allure.step("POST上传文件"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) result = confighttp.post(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, request_parameter_type=data["parameter_type"], files=parameter, timeout=data["timeout"]) else: with allure.step("POST请求接口"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) logging.info("POST请求接口") result = confighttp.post(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, request_parameter_type=data["parameter_type"], data=parameter, timeout=data["timeout"]) elif data["request_type"].lower() == 'get': with allure.step("GET请求接口"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) logging.info("GET请求接口") result = confighttp.get(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, data=parameter, timeout=data["timeout"]) elif data["request_type"].lower() == "put": if data["file"]: with allure.step("PUT上传文件"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) logging.info("PUT上传文件") result = confighttp.put(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, request_parameter_type=data["parameter_type"], files=parameter, timeout=data["timeout"]) else: with allure.step("PUT请求接口"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) logging.info("PUT请求接口") result = confighttp.put(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, request_parameter_type=data["parameter_type"], data=parameter, timeout=data["timeout"]) elif data["request_type"].lower() == "delete": with allure.step("DELETE请求接口"): allure.attach("请求接口:", str(data["test_name"])) allure.attach("请求地址", data["http_type"] + "://" + host + ":"+ port+address) allure.attach("请求头", str(header)) allure.attach("请求参数", str(parameter)) logging.info("DELETE请求接口") result = confighttp.delete(header=header, address=data["http_type"] + "://" + host + ":"+ port+address, data=parameter, timeout=data["timeout"]) else: result = {"code": False, "data": False} logging.info("接口请求结果:\n %s" % str(result)) return result
class OperateSqlAl(object): oj = operate_json.OperateJson() con = Config.Config() def __init__(self, envir): if 'ysy_test' == envir: # 一生约测试 self.dbhost = self.con.tysy_db_host self.dbport = int(self.con.tysy_db_port) self.dbname = self.con.tysy_db_name self.db_user = self.con.tysy_db_user self.pwd = self.con.tysy_db_pwd elif 'ysy_release' == envir or 'ysy_pro_release' == envir: # 一生约正式 self.dbhost = self.con.ysy_db_host self.dbport = int(self.con.ysy_db_port) self.dbname = self.con.ysy_db_name self.db_user = self.con.ysy_db_user self.pwd = self.con.ysy_db_pwd elif 'yhz_test' == envir: # 雨花测试环境 self.dbhost = self.con.ysy_db_host self.dbport = int(self.con.ysy_db_port) self.dbname = self.con.tysy_db_name self.db_user = self.con.tyhz_user self.pwd = self.con.tysy_db_pwd elif 'yhz_release' == envir: # 雨花正式环境 self.dbhost = self.con.ysy_db_host self.dbport = int(self.con.ysy_db_port) self.dbname = self.con.yhz_db_name self.db_user = self.con.ysy_db_user self.pwd = self.con.ysy_db_pwd elif 'ysy_o2o' == envir: # 小猪正式环境--很多都和一生约正式相同 self.dbhost = self.con.ysy_db_host self.dbport = int(self.con.ysy_db_port) self.dbname = self.con.db_name_o2o self.db_user = self.con.ysy_db_user self.pwd = self.con.ysy_db_pwd def re_sql(self, var_str): """ 处理str中包含了变量的sql :param str:可能包含了formate的字符串 :return:不包含了formate的字符串 """ if 'format' in var_str: p1 = re.compile(r"[(](.*?)[')]", re.S) split_str = var_str.split('format') var_1 = re.findall(p1, split_str[1]) #这里会对list中每个值进行判断 var_1 = self.ut.circular_processing_data(var_1) # 注意这里只传递了第一个格式化值进来 sql_resutl = split_str[0].format(*var_1) return sql_resutl else: return var_str def execute_sql(self, sql_str): try: db = pymysql.connect(host=self.dbhost, port=self.dbport, user=self.db_user, passwd=self.pwd, db=self.dbname, charset='utf8') cursor = db.cursor() # 创建一个游标 cursor.execute(sql_str) data = cursor.fetchone() data = self.bytes_to_str(data[0]) return data except Exception: print('\033[1;33m"sql执行异常,请检查"\033[0m \n') finally: cursor.close() db.close() # 关闭数据库 def bytes_to_str(self, val1): if type(val1) == bytes: val1 = val1.decode(encoding='utf-8') return val1
def __init__(self): self.config = Config.Config() # self.log = log_module.MyLog() # self.env = sys.argv[1] self.env = 'debug'
def __init__(self): self.config = Config.Config() self.log = Log.Log() self.optionsSession = requests.session() self.postSession = requests.session() self.homeSession = requests.session()
def __init__(self): ''' 获取数据源 ''' self.data_resource = ExcelHandler.ExcelHandler() self.con = Config.Config()
# '--allure_severities=critical, blocker' # '--allure_stories=测试模块_demo1, 测试模块_demo2' # '--allure_features=测试features' """ import sys import pytest from Common import Log from Common import Shell from Conf import Config if __name__ == '__main__': conf = Config.Config() log = Log.MyLog() log.info('初始化配置文件, path=' + conf.conf_path) shell = Shell.Shell() xml_report_path = conf.xml_report_path html_report_path = conf.html_report_path # 定义测试集 allure_list = '--allure_features=Home,Personal' args = ['-s', '-q', '--alluredir', xml_report_path, allure_list] log.info('执行用例集为:%s' % allure_list) self_args = sys.argv[1:] pytest.main(args) cmd = 'allure generate %s -o %s' % (xml_report_path, html_report_path)
class GeneralTests(unittest.TestCase): """Test methods for StockDataDownloader""" conf = Config.Config() def test_downloader(self): downloader = StockDataDownloader.StockDataDownloader() data = downloader.get_data_from_finam('SPFB.SI-9.16', 5, 17, 420658, date(2016, 6, 13), date(2016, 9, 12)) self.assertTrue(data.__len__() == 910, 'Invalid number of rows!') self.assertTrue(data.shape[1] == 7, 'Invalid number of columns!') def test_accounts(self): conf = Config.Config() token = conf.token accId = conf.account_id oanda = oandapyV20.API(environment="practice", access_token=token) r = AccountList() oanda.request(r) accsInfo = r.response.get('accounts') self.assertTrue(len(accsInfo) > 0) p = PricingInfo(accId, "instruments=EUR_USD") oanda.request(p) print(p.response) self.assertTrue(len(p.response.get('prices')) > 0) # for demo accounts only! def test_market_orders(self): conf = Config.Config() token = conf.token accId = conf.account_id oanda = oandapyV20.API(environment="practice", access_token=token) mktOrder = MarketOrderRequest(instrument='EUR_USD', units=1) r = orders.OrderCreate(accId, data=mktOrder.data) resp = oanda.request(r) print(resp) r = positions.PositionClose(accId, 'EUR_USD', {"longUnits": "ALL"}) resp = oanda.request(r) print(resp) r = AccountDetails(accId) balance = oanda.request(r).get('account').get('balance') self.assertTrue(balance > 0) # do not forget UTC now! def test_oanda_fx_history(self): conf = Config.Config() token = conf.token oanda = oandapyV20.API(environment="practice", access_token=token) downloader = StockDataDownloader.StockDataDownloader() dateFrom = datetime.utcnow() - timedelta(days=1) dateTo = datetime.utcnow() result = downloader.get_data_from_oanda_fx(oanda, 'EUR_USD', 'S5', dateFrom, dateTo) self.assertTrue(len(result) > 0) def test_pattern_serie_to_vector(self): c1 = Candle(datetime.now(), 1, 2, 3) c2 = Candle(datetime.now(), 4, 5, 6) p = Pattern([c1, c2], 'test') self.assertTrue(numpy.allclose(pattern_serie_to_vector(p), [0.5, 1.5])) def test_get_x_y_for_patterns(self): c1 = Candle(datetime.now(), 1, 2, 3) c2 = Candle(datetime.now(), 4, 5, 6) p = Pattern([c1, c2], 'test1') c3 = Candle(datetime.now(), 7, 8, 9) c4 = Candle(datetime.now(), 10, 11, 12) p1 = Pattern([c3, c4], 'test2') X, y = get_x_y_for_patterns([p, p1], 'test2') self.assertEqual(y, [0, 1]) self.assertTrue(numpy.allclose(X[0], [0.5, 1.5])) self.assertTrue(numpy.allclose(X[1], [0.83333333, 1.16666667]))
def __init__(self): self.config = Config.Config() self.log = Log.MyLog() self.encrypt = Hash.base()
def __init__(self): self.config = Config.Config() self.log = Log.MyLog()
def __init__(self): self.config = Config.Config() self.log = Logger().get_logger()