def __init__(self, username: str, password: str): self.api = API() self.__username = username self.__password = password self.api.login(username, password)
def __init__(self): asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) self.executor = ThreadPoolExecutor(max_workers=2) self.storage = Storage() self.api = API(self.storage, self.executor) self.task_processor = TaskProcessor(self.storage) self.api.set_execute_and_callback(self.task_processor.execute, self.task_processor.callback)
def do(self, *args, **kwargs): data = API(self.symbol, self.name, self.args, **self.kwargs).outputs if type(data) == list: self.outputs += [ Task('post', 'FireTask', self.symbol, output['uri'], output['df']) for output in data ] elif 'collection' in data.keys(): self.outputs.append( Task('post', 'FireTask', self.symbol, data['collection'], data['key'], data['data'])) else: self.outputs.append( Task('post', 'FireTask', self.symbol, data['uri'], data['df'])) return self.outputs
def invest(config): api = API(config["AccessToken"]) loan_classifier = LoanClassifier(float(config["ConfidenceThreshold"]), float(config["MinInterest"]), config["ModelPath"], api) bidder = Bidder(float(config["MinInvestment"]), float(config["MaxInvestment"]), api, loan_classifier) bidder.bid()
def handle(self): logger.info(self.request) try: data = self.request.recv(BUFFER_SIZE) clean_data = data.decode('GBK').strip() if not clean_data: logger.warning('no data !!!') return str_input = clean_data.replace(';', ',') d_input = eval(str_input) case_id = d_input.get('case_id') if not case_id: logger.error('Unidentified case_id !') result = dict( case_id=case_id, date=now(), status='未执行,无法识别的case_id', exec_point='connect', result_code=-1, ) self.request.sendall(str(result).encode('GBK')) return api = API(d_input) res = wait(600, api.connect) # 10分钟连不上不再重连 if res.get('result_code') != 1: # 连接服务失败,跳过用例 result = dict( case_id=d_input.get('case_id'), date=now(), status='未结束', exec_point='connect', result_code=res.get('result_code'), ) self.request.sendall(str(result).encode('GBK')) else: funcname = d_input.get('case_id').split('_')[0] func = CASES_DICT.get(funcname) if not func: logger.error('invalid case_id: %s' % case_id) logger.critical('Run Test: %s' % case_id) try: result = func(d_input) except: result = dict( case_id=case_id, date=now(), status='未知异常,查看日志', exec_point='exec_cases', result_code=-1, ) logger.error(traceback.format_exc()) self.request.sendall(str(result).encode('GBK')) except Exception as e: logger.error(traceback.format_exc()) self.request.sendall(str(e).encode('GBK'))
class AutomatedInstagramUser: __user_id = None __instagram_api = None def __init__(self): self.__instagram_api = API() def login(self, username, password): if self.__instagram_api.login(username=username, password=password): self.__user_id = self.get_user_id_from_username(username) def __initialize_with_user(self, user_id): return user_id if user_id else self.__user_id def get_user_id_from_username(self, username): self.__instagram_api.searchUsername(username) return self.__instagram_api.LastJson["user"]["pk"] def logout(self): self.__instagram_api.logout() def get_followers(self, user_id=None): user_id = self.__initialize_with_user(user_id) return self.__instagram_api.getTotalFollowers(user_id) def get_followings(self, user_id=None): user_id = self.__initialize_with_user(user_id) return self.__instagram_api.getTotalFollowings(user_id) def get_difference_between_followings_and_followers(self, user_id=None): user_id = self.__initialize_with_user(user_id) followers = [ user["username"] for user in self.get_followers(user_id=user_id) ] followings = [ user["username"] for user in self.get_followings(user_id=user_id) ] return list(set(followings) - set(followers)) def unfollow(self, username): user_id = self.get_user_id_from_username(username) return self.__instagram_api.unfollow(user_id) def follow(self, username): user_id = self.get_user_id_from_username(username) return self.__instagram_api.follow(user_id)
class Main: def __init__(self): asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) self.executor = ThreadPoolExecutor(max_workers=2) self.storage = Storage() self.api = API(self.storage, self.executor) self.task_processor = TaskProcessor(self.storage) self.api.set_execute_and_callback(self.task_processor.execute, self.task_processor.callback) async def init(self): await self.storage.init_connections() for _ in self.storage.launch_queue(): self.executor.submit( self.task_processor.execute).add_done_callback( self.task_processor.callback) def run(self): loop = asyncio.get_event_loop() loop.run_until_complete(self.init()) self.api.run()
def api(): return API()
from api.api import API from aisy.sca_database import * # set analysis id (this can be retrieved from web application) analysis_id = 1 # start database db_location = "C:/Users/guilh/PycharmProjects/aisy/databases" db_name = "database_ascad.sqlite" db = ScaDatabase("{}/{}".format(db_location, db_name)) # start api api = API(db, analysis_id) metrics = api.get_metric_names() print(metrics) # key_ranks = api.get_all_key_ranks() # # import matplotlib.pyplot as plt # # for key_rank in key_ranks: # plt.plot(key_rank['values']) # plt.xlabel("Traces") # plt.ylabel("Key Rank") # plt.show() # # success_rates = api.get_all_success_rates() # # import matplotlib.pyplot as plt # # for success_rate in success_rates: # plt.plot(success_rate['values'])
from api.api import API from api.middleware import Middleware app = API(templates_dir="templates") @app.route("/home") def home(request, response): response.body = app.template("home.html", context={ "title": "Py-restfull Framework", "name": "py-restfull" }) @app.route("/about") def about(request, response): response.text = "Hello from the ABOUT page" @app.route("/hello/{name}") def say_hello(request, response, name): response.text = f"Hello, {name}" @app.route("/tell/{age:d}") def tell(request, response, age): response.text = f"Your age: {age}" @app.route("/book")
def api(request) -> API: key, token = get_api_key_token(conf_file=request.config.getoption('--config')) api = API(key=key, token=token) yield api
def sync_get_history_data_object_record_sets(default={}): ''' 同步查询历史数据记录用例实现: 创建API实例并初始化 连接服务 构造请求参数 调用请求方法,发送同步请求 返回结果或超时 结果验证 测试数据统计并输出 测试结束 ''' case_id = default.get('case_id') date = now() check_case = default.get('check') # 校验规则应该也是个字典 check_result = '未校验' api = API(default) # 实例化API类 request = history_pb_requests.get_history_data_object_record_set_request( default) # 从自己实现的PB请求构造文件中获取对应接口的PB格式的请求 res = api.sync_pb_blob_request_call(request) # 同步接口请求调用 result_code = res.get('result_code') totle_time = res.get('et', 0) - res.get('st', 0) # 同步接口调用耗时统计 if result_code == 1: # 如果返回值为成功,执行结果校验逻辑;否则直接返回测试结果 response = res.get('response') # pb_response = history_pb2.GetHistoryDataObjectRecordSetsResponse() pb_request = history_pb2.GetHistoryDataObjectRecordSetsRequest( ) # 测试为了简单,重用了request作为响应,这里只是验证功能 # pb_response.ParseFromString(response.responseBody.data) pb_request.ParseFromString(response.responseBody.data) # object_recordsets = pb_response.object_recordsets # data_count = len(object_recordsets) # 伪代码,不一定这么写,看具体对象结构 data_count = len(pb_request.history_data_object_names) # 验证object_recordsets成员值 if response.responseHeader.retCode == 666 and pb_request.history_data_object_names[ 0] == 'obj_name': check_result = 'success' else: check_result = 'failed' result = dict( case_id=case_id, # 用例id date=date, # 日期时间 status='结束', # 用例执行状态 exec_point='end', # 用例执行指针(异常时记录执行到哪一步) result_code=result_code, # 返回码 totle_time=totle_time, # 用例执行耗时 check_case=check_case, # 结果校验规则 check_result=check_result, # 校验结果 data_count=data_count, # 记录条数 ) return result else: result = dict( case_id=case_id, date=date, status='结束,未校验', exec_point='end', result_code=result_code, totle_time=totle_time, check_case=check_case, check_result=check_result, ) return result
def __init__(self, w: int, h: int, title: str): self.title = title self.screenSize = vec2(w, h) if not glfw.init(): return self.api = API() glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR, 3) glfw.window_hint(glfw.CONTEXT_VERSION_MINOR, 3) glfw.window_hint(glfw.OPENGL_PROFILE, glfw.OPENGL_CORE_PROFILE) glfw.window_hint(glfw.RESIZABLE, glfw.TRUE) self.window = glfw.create_window(w, h, title, None, None) if not self.window: glfw.terminate() return glfw.make_context_current(self.window) glfw.swap_interval(1) value = [0] MaxTextures = glGetInteger(GL_MAX_TEXTURE_UNITS, value) self.render = Render2D(VertexShader(), FragmentShader(MaxTextures), MaxTextures) self.text_render = Render2D(VertexShaderText(), FragmentShaderText(MaxTextures), MaxTextures) self.game = Game() self.game.screenSize = self.screenSize self.game.api = self.api #callback functions def resize_callback(window, w, h): self.screenSize = vec2(w, h) glViewport(w // 2 - int(self.screenSize.x // 2), h // 2 - int(self.screenSize.y // 2), int(self.screenSize.x), int(self.screenSize.y)) self.game.on_resize(w, h) self.render.shader.SetUniMat4( "u_ViewProj", identity(mat4)) #Camera projection here self.render.shader.SetUniMat4( "u_Transform", ortho(0.0, self.screenSize.x, 0.0, self.screenSize.y, -1.0, 1.0)) self.text_render.shader.SetUniMat4("u_ViewProj", identity(mat4)) self.text_render.shader.SetUniMat4( "u_Transform", ortho(0.0, self.screenSize.x, 0.0, self.screenSize.y, 0.0, 1.0)) #TODO: Maybe pass the Mouse and Keyboard to Game, maybe def on_mouse_scroll_callback(window, xOffSet, yOffSet): Mouse.handleScroll(window, xOffSet, yOffSet) self.game.on_mouse_scroll(xOffSet, yOffSet) def on_cursor_move_callback(window, xpos, ypos): ypos = -(ypos - self.screenSize.y) # adjust to the OpenGL viewport Mouse.handleMove(window, xpos, ypos) self.game.on_cursor_move(xpos, ypos) def on_mouse_button_callback(window, key, action, mods): Mouse.handleClicks(window, key, action, mods) self.game.on_mouse_click(key, action, mods) def on_keyboard_click_callback(window, key, scancode, action, mods): Keyboard.handleClicks(window, key, scancode, action, mods) self.game.on_keyboard_click(key, scancode, action, mods) #Set callbacks from glfw glfw.set_window_size_callback(self.window, resize_callback) glfw.set_scroll_callback(self.window, on_mouse_scroll_callback) glfw.set_cursor_pos_callback(self.window, on_cursor_move_callback) glfw.set_mouse_button_callback(self.window, on_mouse_button_callback) glfw.set_key_callback(self.window, on_keyboard_click_callback)
Initialize rethinkdb connection with backoff """ # TODO(rabrams) generic retry functional would be nice for sleepsecs in [1, 2, 4, 10]: try: return r.connect(DB_HOST, DB_PORT).repl() except Exception: LOGGER.exception("Could not connect to db") time.sleep(sleepsecs) raise Exception("Retry cap reached for connecting to db") def _init_db(): """ Initialize rethinkdb schema """ # TODO(rabrams) consider looking into schema migration tools if not r.db_list().contains(DB_NAME).run(): r.db_create(DB_NAME).run() if not r.db(DB_NAME).table_list().contains("schemata").run(): r.db(DB_NAME).table_create("schemata", primary_key="name").run() if not r.db(DB_NAME).table_list().contains("data").run(): r.db(DB_NAME).table_create("data", primary_key="key").run() _wait_for_db() _init_db() app = API(r, DB_NAME).app # pylint: disable=invalid-name
from django.shortcuts import render from api import rpc import api.views from faucet.faucet_settings import faucet_settings from api.api import API from api.models import Log, LoyaltyLevel import urllib.request import urllib.parse import json api = API(rpc.get_session()) # Create your views here. def index(request): params = {'faucet_settings': faucet_settings} payout_address = request.POST.get('payout_address') if payout_address: #get user ip x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') #verify captcha if faucet_settings['use_recaptcha']: captcha = request.POST.get('g-recaptcha-response', '') verify_post = [('secret', faucet_settings['recaptcha_secret']), ('response', captcha), ('remoteip', ip)] verify_result = urllib.request.urlopen(
def async_get_history_data_object_record_sets(default={}): """ 异步查询历史数据记录的用例实现: 定义回调函数 创建API实例并初始化 连接服务 构造请求参数 调用请求方法,发送异步请求 等待回调函数的处理结果 回调函数被调用或等待超时 统计输出测试结果 测试结束 :param default: csv读入数据,dict类型 :return: 测试结果数据,dict类型 """ case_id = default.get('case_id') date = now() flag = False # 回调触发标志位 check_case = str(default.get('check')) check_result = '未校验' data_count = 0 # 不能省略 def callback(handle, pb_response): """ !!! 回调里必须保证异常时能正常返回,否则主进程无法退出!!! 响应结果验证 :param handle: c_void_p类型 :param pb_response: PBResponse类型指针 :return: """ nonlocal flag, check_result, test_et, data_count flag = True test_et = time.time() data = pb_response.contents # 获取指针指向的数据 data_length = data.responseBody.len byte_data = data.responseBody.data[:data_length] # 二进制格式的PB响应数据 # pb_response = history_pb2.GetHistoryDataObjectRecordSetsResponse() pb_request = history_pb2.GetHistoryDataObjectRecordSetsRequest( ) # 测试用临时写法 # pb_response.ParseFromString(byte_data) # 反序列为pb格式的响应对象 try: print(byte_data) pb_request.ParseFromString(byte_data) # 测试用临时写法 except: check_result = 'failed' logger.error('unserialize error !!!') logger.error(traceback.format_exc()) return # object_recordsets = pb_response.object_recordsets # data_count = len(object_recordsets) # 验证object_recordsets成员值及记录条数等 if data.responseHeader.retCode == 666: # 自行测试用,实际校验逻辑应以api具体实现为准 check_result = 'success' else: check_result = 'failed' api = API(default) request = history_pb_requests.get_history_data_object_record_set_request( default) print(request.get('data')) res = api.async_pb_blob_request_call(callback, request) result_code = res.get('result_code') test_st = res.get('st', 0) test_et = res.get('et', 0) if result_code == 1: st = time.time() et = time.time() while et - st < 600: # 等待回调函数被执行,超时时间为10分钟 if flag: break time.sleep(1) et = time.time() result = dict( case_id=case_id, date=date, status='结束', exec_point='end', result_code=result_code, totle_time=test_et - test_st, check_case=check_case, check_result=check_result, data_count=data_count, ) return result else: result = dict( case_id=default.get('case_id'), date=date, status='结束,未校验', exec_point='end', result_code=result_code, check_case=check_case, check_result=check_result, ) return result
def connect(self): """Init and/or connect database""" database = None try: database = mysql.connector.connect( user=self.user_name, password=self.user_password, host=self.host_name, ) mycursor = database.cursor() mycursor.execute(queries.SQL_DB_DIRECTORY) path = mycursor.fetchone() if len(path) != 0: url_db = path[0] + self.database_name if os.path.exists(url_db): mycursor.execute(queries.SQL_USE_DB) else: # import data from OpenFoodFacts API api = API() try: imported_products = api.products except Error as err: print(f"L'erreur '{err}' est survenue") time.sleep(5) imported_products = api.products # create database and tables mycursor.execute(queries.SQL_CREATE_DB) print(Fore.GREEN + "> Base de données créée avec succès <") mycursor.execute(queries.SQL_USE_DB) for name, query in queries.TABLES.items(): mycursor.execute(query) print(f"> La table '{name}' a été créée avec succès") # insert data in DB print("\n----> Insertion des données en base <----\n") products = [] # store product objects prod_mng = ProductManager(database) with Bar("Progression", max=len(imported_products)) as bar: for imported_product in imported_products: cat_mng = CategoryManager(database) city_mng = CityManager(database) store_mng = StoreManager(database) catprod_mng = CategoryProductManager(database) prodloc_mng = ProductLocationManager(database) categories = [] # store category objects cities = [] # store city objects stores = [] # store store objects # filter & insert categories tmp_categories = imported_product.get( "categories", "").split(",") for tmp_category in tmp_categories: category = cat_filter(tmp_category) if category is not None: categories.append(cat_mng.find(category)) # filter & insert cities tmp_cities = imported_product.get( "purchase_places", "").split(",") if len(tmp_cities) != 0: for tmp_city in tmp_cities: city = city_filter(tmp_city) cities.append(city_mng.find(city)) # filter & insert cities tmp_stores = imported_product.get("stores", "").split(",") if len(tmp_stores) != 0: for tmp_store in tmp_stores: store = store_filter(tmp_store) stores.append(store_mng.find(store)) # filter & insert products tmp_name = imported_product.get( "product_name_fr", "") tmp_brand = imported_product.get("brands", "") tmp_nutrition_grade = imported_product.get( "nutrition_grades", "z") tmp_energy_100g = imported_product.get( "nutriments", "").get("energy_100g", "999999") tmp_url = imported_product.get("url", "") tmp_code = imported_product.get("code", (13 * "0")) ( name, brand, nutrition_grade, energy_100g, url, code, ) = prod_filters( tmp_name, tmp_brand, tmp_nutrition_grade, tmp_energy_100g, tmp_url, tmp_code, ) tmp_product_object = Product( name, brand, nutrition_grade, energy_100g, url, code, stores, cities, categories, ) product_object = prod_mng.insert( tmp_product_object) # if the product is really stored in DB if product_object is not None: products.append(product_object) bar.next() # insert data in CategoryProduct table categoryproducts = [] productlocations = [] for product in products: prod_id = product.id for category in product.categories: cat_id = category.id categoryproducts.append( CategoryProduct(prod_id, cat_id)) # insert data in ProductLocation table for city in product.cities: city_id = city.id for store in product.stores: store_id = store.id productlocations.append( ProductLocation(prod_id, store_id, city_id)) catprod_mng.insert(categoryproducts) prodloc_mng.insert(productlocations) # insert default user account users_mng = UsersManager(database) # convert pwd in bytes pwd_hashed = bcrypt.hashpw( bytes(users_mng.default_pw, "utf-8"), bcrypt.gensalt()) # serialize the serial_pwd_hashed object serial_pwd_hashed = pickle.dumps(pwd_hashed) users_mng.create(users_mng.default_username, serial_pwd_hashed) except Error as err: print(f"L'erreur '{err}' est survenue") mycursor.close() print(Style.RESET_ALL) return database
d_input['instance_count'] = 1 d_input['instance_names'] = ['instance_name1'] d_input['instance_ips'] = ['127.0.0.1'] d_input['instance_backupips'] = ['172.16.3.80'] d_input['instance_redundancy_ips'] = ['172.16.3.81'] d_input['instance_redundancy_backupips'] = ['172.16.3.82'] d_input['case_id'] = 'ASyncGetHistoryDataObjectRecordSets_1' d_input['check'] = '暂时没想到咋校验' d_input['obj_names'] = ['obj_name'] d_input['data_versions'] = [1] d_input['start_times'] = ['2019-08-06 13:57:32.265'] d_input['end_times'] = ['2019-08-06 13:57:42.265'] d_input['time_realtions'] = [14] # 以上为测试数据 api = API(d_input) res = wait(600, api.connect) # 10分钟连不上不再重连 if res.get('result_code') != 1: # 连接服务失败,跳过用例 result = dict( case_id=d_input.get('case_id'), date=now(), status='未结束', exec_point='connect', result_code=res.get('result_code'), ) else: func_name = d_input.get('case_id').split('_')[0] func = CASES_DICT.get(func_name) # 根据case_id获取对应的用例执行函数 case_id = d_input.get('case_id') if not func or not callable(func): # badcase 过滤错误的case_id result = dict(
def app(): return API(templates_dir="tests/templates", debug=False)
class BotBuilder: """ A tool for building devRant bots. Extend this class """ __runner_thread: Thread running: bool __username: str __password: str __interval: int __raw_notifs: list def __init__(self, username: str, password: str): self.api = API() self.__username = username self.__password = password self.api.login(username, password) def onNotif(self, type: NotifType, content: dict): pass def start(self, interval_secs: int, threaded=False): self.running = True self.__interval = interval_secs # Create a thread for the bot self.__runner_thread = Thread(target=self.__run) # Start the bot thread self.__runner_thread.start() # Check if we should join the bot thread or not if not threaded: self.__runner_thread.join() def stop(self): self.running = False def __run(self): while self.running: # Check for expired auth if self.api.hasAuthExpired(): # Log in again self.api.login(self.username, self.password) # Fetch notifs self.__raw_notifs = self.api.getNotifs() # Clear notifs self.api.clearNotifs() # Convert raw notif data to something more useful notif_feed = [] for notif in self.__raw_notifs: # skip if already read if notif["read"]: continue # Just doing some type conversion del notif["read"] notif["type"] = toNotifType(notif["type"]) # Convert a user ID to a username notif["username"] = self.api.getUsername(notif["uid"]) del notif["uid"] # Add to list notif_feed.append(notif) # Call onNotif with new data for notif in notif_feed: # Store type in buffer notif_type = notif["type"] # Strip type from notif del notif["type"] # Call callback self.onNotif(notif_type, notif) # Sleep for interval time.sleep(self.__interval)
def __init__(self): self.__instagram_api = API()
def async_subscribe_history_data_object_record_sets(default={}): """ 异步订阅历史数据记录的用例实现: 注册订阅回调 定义回调函数 创建API实例并初始化 连接服务 构造请求参数 调用请求方法,发送异步请求 等待回调函数的处理结果 回调函数被调用或等待超时 首次订阅结果校验 触发订阅 结果校验 统计输出测试结果 测试结束 :param default: csv读入数据,dict类型 :return: 测试结果数据,dict类型 """ case_id = default.get('case_id') date = now() flag = False # 异步回调触发标志位 sub_flag = False # 订阅回调触发标志位 check_case = str(default.get('check')) first_check_result = '未校验' second_check_result = '未校验' first_data_count = 0 second_data_count = 0 def subscribe_callback(handle, pb_response_batch): nonlocal sub_flag, second_data_count, second_check_result, sub_et sub_flag = True sub_et = time.time( ) # 计时与推送机制有关,如果不分包,只调用一次,那么这种计时方式正确;如果分包,需要统计记录条数到达校验值时再计时 data = pb_response_batch.contents response_count = data.responseCount response_array = data.responseArray record_sets = [] try: for response in response_array: data_length = response.responseBody.len byte_data = response.responseBody.data[:data_length] pb_response = history_pb2.SubscribeHistoryDataObjectRecordSetsResponse( ) pb_response.ParseFromString(byte_data) # 反序列为pb格式的响应对象 object_recordsets = pb_response.object_recordsets record_sets.extend(object_recordsets) except: second_check_result = 'failed' logger.error('unserialize error !!!') logger.error(traceback.format_exc()) return second_data_count = len(record_sets) # 伪代码,不一定这么写,看具体对象结构 # 验证record_sets成员值及记录条数等 if 1: second_check_result = 'success' else: second_check_result = 'failed' def callback(handle, pb_response): nonlocal flag, first_check_result, test_et, first_data_count flag = True test_et = time.time() data = pb_response.contents # 获取指针指向的数据 data_length = data.responseBody.len byte_data = data.responseBody.data[:data_length] # 二进制格式的PB响应数据 pb_response = history_pb2.SubscribeHistoryDataObjectRecordSetsResponse( ) try: pb_response.ParseFromString(byte_data) # 反序列为pb格式的响应对象 except: first_check_result = 'failed' logger.error('unserialize error !!!') logger.error(traceback.format_exc()) return object_recordsets = pb_response.object_recordsets first_data_count = len(object_recordsets) # 伪代码,不一定这么写,看具体对象结构 # 验证object_recordsets成员值及记录条数等 if 1: first_check_result = 'success' else: first_check_result = 'failed' api = API(default) res = api.register_subscribe_callback(subscribe_callback, default) result_code = res.get('result_code') if result_code != 1: # 注册回调失败 result = dict( case_id=default.get('case_id'), date=date, status='未结束', exec_point='注册回调', result_code=result_code, check_case=check_case, check_result=first_check_result, ) return result request = history_pb_requests.subscribe_history_data_object_record_sets( default) res = api.async_pb_blob_request_call(callback, request) result_code = res.get('result_code') test_st = res.get('st', 0) test_et = res.get('et', 0) if result_code == 1: st = time.time() et = time.time() while et - st < 600: # 等待首次订阅结果 if flag: break time.sleep(3) et = time.time() if flag and (first_check_result == 'success'): # 发起二次订阅 request = history_pb_requests.add_history_data_object_record_sets( default) # 增加记录触发订阅 res = api.sync_pb_blob_request_call(request) sub_st = res.get('st', 0) # 增加的起始时间为触发订阅的起始计时时间 sub_et = res.get('et', 0) add_code = res.get('result_code') if add_code == 1: # 增加成功 query_time = test_et - test_st st = time.time() et = time.time() while et - st < 600: # 等待二次订阅结果 if sub_flag: break time.sleep(3) et = time.time() if sub_flag: sub_time = sub_et - sub_st else: sub_time = sub_et - sub_st + 600 # 这里执行取消订阅逻辑 result = dict( case_id=case_id, date=date, status='结束', exec_point='end', result_code=result_code, query_time=query_time, sub_time=sub_time, check_case=check_case, first_check_result=first_check_result, first_data_count=first_data_count, second_check_result=second_check_result, second_data_count=second_data_count, ) return result else: # 这里执行取消订阅逻辑 result = dict( case_id=case_id, date=date, status='未结束', exec_point='增加记录触发二次订阅', result_code=add_code, query_time=test_et - test_st, check_case=check_case, first_check_result=first_check_result, first_data_count=first_data_count, second_check_result=second_check_result, ) return result else: # 这里执行取消订阅逻辑 result = dict( case_id=case_id, date=date, status='未结束', exec_point='首次订阅', result_code=result_code, query_time=test_et - test_st, check_case=check_case, check_result=first_check_result, data_count=first_data_count, ) return result else: result = dict( case_id=default.get('case_id'), date=date, status='未结束', exec_point='首次订阅', result_code=result_code, check_case=check_case, check_result=first_check_result, ) return result