def makeSelectSql(self, selects, filters = [], groups = [], orders = [], **kwargs): ''' 制作select形式的sql语句 ''' if not selects or len(selects) <= 0: raise Exception('no selected content') select_part = self.cvtSelect(selects) sql_obj = select(select_part) if filters: where_part = self.cvtWhere(filters) sql_obj = sql_obj.where(where_part) if groups: group_part = self.cvtGroup(groups) sql_obj = sql_obj.group_by(*group_part) if orders: order_part = self.cvtOrder(orders) sql_obj = sql_obj.order_by(*order_part) if kwargs.get('distinct'): sql_obj = sql_obj.distinct() # 如果是Gis,那么就默认拿全部数据做图 if 'gis' == kwargs.get('graph'): pdb.set_trace() pass logger.info(sql_obj.compile(compile_kwargs={"literal_binds": True})) return sql_obj
def add_cli(self, username: str, port: str, tree_sent: str, transport): tree_database = database.find_user(username, 'pass') if tree_database is None: logger.info('user {} does not exists'.format(username)) return None ts = tree.Tree(tree_sent) td = tree.Tree(tree_database) cli = Client(username, port, ts, transport) if username not in self.clients: self.clients[username] = [cli] else: self.clients[username].append(cli) logger.debug("new instance of client {}".format(username)) self.transports[transport] = cli tree.Tree.process_subtree(cli, ts.raw_tree, td.raw_tree) # TODO: think about client conflicts + database sync between instances logger.debug('tree has been processed for {}'.format(username)) database.save_new_tree(username, ts.raw_tree) return cli
def view(request, id): """ 某个主题浏览界面 """ pattern = request.GET.get('pattern', '1') #普通浏览模式 template_name = 'theme/view.html' #全屏浏览模式 if pattern == '3': template_name = 'theme/fullview.html' #列表浏览模式 elif pattern == '2': template_name = 'theme/listview.html' if u'GET' == request.method: logger.info("xxxxxxxxxxxxx") context = RequestContext(request) theme = get_object_or_404(ThemeModel, pk = id) theme_scene_rla_set = theme.t2r_set.all().order_by(u'm_order') data = { u"theme" : theme, u"theme_scene_rla" : theme_scene_rla_set } return render_to_response(template_name, data, context) else: raise Http404()
def get(self,tablename): try: session = Session() M = make_table_model(tablename.encode('utf-8')) #filter_args = [ getattr(M,k)==v[0] for k,v in self.request.arguments.items()] _params = {} [_params.update({k: v[0]}) for k, v in self.request.arguments.items()] logger.info(tablename+str(_params)) filter_args = utils.get_filter_args(M, _params) if filter_args: models = session.query(M).filter(*filter_args).order_by(desc('id')).limit(100) else: models = session.query(M).order_by(desc('id')).limit(100) logger.debug(models) models = [ [ getattr(model,c.name) for c in M.__table__._columns] for model in models] clms = map(lambda x:x.name, M.__table__._columns) # hide the primary_key result = map(lambda x: dict(zip(clms[1:], x[1:])), models) for item in result: for k in item: if type(item[k])==datetime: item[k] = item[k].strftime("%Y-%m-%d %H:%M:%S") elif type(item[k])==unicode: item[k] = item[k].strip() self.write(json.dumps(result)) except BaseException, e: self.write(json.dumps({'msg':'Request Error'})) logger.error(traceback.format_exc(e))
def __init__(self, username: str, port: str, tree: tree.Tree, transport): logger.info('{} has joined on port {}'.format(username, port)) self.username = username self.port = port self.user_tree = tree self.transport = transport
def pushToClient(event_name, user_name, result): """ 主动推送信息到前端 """ logger.info('name is {}, result is {}, username is {}'.format(event_name, result, user_name)) send_event(event_name, json.dumps({ 'result': result }), channel = SSE_CHANNEL_TEMPLATE.format(user_name))
def connection_lost(self, ext): try: username = shared.climanager.transports[self.transp] del shared.climanager.clients[username] del shared.climanager.transports[self.transp] logger.info('{} has disconnected'.format(username)) NetManager.connection_lost(self, ext) except KeyError: logger.info('connection with some unidentified client lost')
def FADD(client, args): logger.debug('fadd : {}'.format(args)) tr = json.loads(args.decode()) directory, filename = client.find_in_tree(tr) engine.FADD(directory, 'client', filename, tr, client) logger.info('new user tree has been saved for {}'.format(client.username)) database.save_new_tree(client.username, client.user_tree.raw_tree)
def verify_bk_login(self, bk_token): """请求平台接口验证登录是否失效""" param = {'bk_token': bk_token} result, resp = http_get(self.BK_LOGIN_VERIFY_URL, param) resp = resp if result and resp else {} ret = resp.get('result', False) # 验证失败 if not ret: logger.info(u"验证用户登录token无效:%s" % resp.get('message', '')) return False, {} return True, resp.get('data', {})
def handleField(request): """ 处理自定义数据表请求 """ hk = request.session.get('hk') st = SqlExecutorMgr.stRestore(hk) logger.info('hk = {}'.format(hk)) try: conn = ExternalDbModel.objects.get(pk = hk) except ExternalDbModel.DoesNotExist, e: return MyHttpJsonResponse({'succ': False, 'msg': 'xxxxx'})
def get(self): ''' Returns an array of movies in JSON format matching a given search query. The query must be included in a parameter called "query" ''' search_text = self.get_argument('query', strip=True) logger.info('Processing request /search/movies with query "{0}"'.format(search_text)) result = yield movies.search_by_title(search_text) self.write(json.dumps(result)) self.set_header('Content-Type', 'application/json')
def get(self, movie_id): ''' Returns JSON data with a movie given an id ''' logger.info('Processing request /movies/{0}'.format(movie_id)) movie = yield movies.get(movie_id) if movie is None: self.set_status(404) else: self.write(json.dumps(movie)) self.set_header('Content-Type', 'application/json')
def CSND(params): params = params.decode() send, hsh, addr = params.split(' ', 2) ip, port = addr.split(':') if send == '1': logger.info('sending chunk {} to {} on port {}'.format(hsh, ip, port)) send_CSTR(ip, int(port), hsh) else: logger.info('getting chunk {} from {}'.format(hsh, ip)) pass # TODO
def get(self, name): configMap = {"width": 1024, "height": 400, "timeformat": TIME_FORMAT} params = {} try: for k, v in self.request.arguments.items(): if not configMap.has_key(k): params[k] = v[0] else: configMap[k] = v[0] logger.info(name + str(params)) data = make_query(name, params, configMap) self.render("chart/chart.html", data=data, conf=configMap) except BaseException, e: logerror.error(traceback.format_exc(e))
def cvtTimeField(self, obj, time_type): if 'year' == time_type: tc = extract('year', obj) elif 'month' == time_type: tc = extract('month', obj) elif 'day' == time_type: tc = extract('day', obj) elif 'hour' == time_type: tc = extract('hour', obj) else: logger.info(sys.exc_info()) tc = obj return tc
def post(self,tablename): logger.info(str(self.request.arguments)) cli = self.redis_cli apidata = self.get_arguments('apidata') if apidata: logger.debug(str(apidata)) data_list = json.loads(apidata[0]) for data in data_list: cli.rpush('ag:post',{'tablename':tablename.encode('utf-8'),'data':data}) else: data = { k:v[0] for k,v in self.request.arguments.items()} logger.debug('redis cli start rpush %s'%time.time()) cli.rpush(CACHE_NAME,{'tablename':tablename.encode('utf-8'),'data':data}) logger.debug('redis cli end rpush %s'%time.time()) self.write({'status':'OK'})
def connection_made(self, transp): NetworkClient.connection_made(self, transp) global server_transport logger.info("connected to master server") server_transport = transp try: protocol.login(self) except Exception as e: logger.error(e) logger.error('{} was raised'.format(log.nomore(e))) for l in traceback.format_tb(e.__traceback__): logger.debug(l) raise e
def listen(port): eloop = asyncio.new_event_loop() asyncio.set_event_loop(eloop) coro = eloop.create_server(Server, '0.0.0.0', port) server = eloop.run_until_complete(coro) logger.info('listening on {}'.format(server.sockets[0].getsockname())) try: eloop.run_forever() except KeyboardInterrupt: logger.warn('keyboard interrupt') pass server.close() eloop.run_until_complete(server.wait_closed()) eloop.close()
def selectButton(self, wid, _id): js = "return Rb.Pages.Page.s_pages['" + wid + "'].getControl('" + _id + "')._data['length'];" _length = self.driver.execute_script(js) if _length: _num = random.randint(0, (_length - 1)) else: _num = 0 js = "return Rb.Pages.Page.s_pages['" + wid + "'].getControl('" + _id + "')._data;" _name = self.driver.execute_script(js) # 记录运行日志 logger.info(js) _name = _name[_num]['name'] rowDatas = self.driver.find_elements_by_class_name("rowData") for i in rowDatas: if i.text == _name: i.click() break return _name
def restore(self, hk): ''' 恢复连接 ''' try: logger.info('restore hk = {0}'.format(hk)) externaldb = ExternalDbModel.objects.get(pk = hk) except ExternalDbModel.DoesNotExist: raise Exception(u'can''t resotre') else: conn_nt = ConnNamedtuple( \ ip = externaldb.m_ip, port = externaldb.m_port, \ kind = externaldb.m_kind, db = externaldb.m_db, \ user = externaldb.m_user, pwd = externaldb.m_pwd \ ) self.connDb(conn_nt) return self
def getExpr(self, factor): ''' 用来把条件表达式某部分转换成sql里的变量形式 用来标记是哪部分 ''' fragment = factor.mapIntoSql() # 对于求全部行的,求新增行,或者常值型的,方法各不一样 if isinstance(factor, (SeriesFactor, RangeFactor)): return False, fragment elif 'ElementFactor' == factor.__class__.__name__ \ and 'rgl' == factor.cmd: return False, 'TD["new"][fragment]' else: logger.info('factor type is {}'.format(type(factor))) # 定义一个sql函数去求得全部行的xx结果 funcname = 'prefunc' + str(int(time.time())) return True, funcname
def run_worker(cls, app): task_queue = app.task_normal_queue while app.is_running: try: task = task_queue.get(block=False) except queue.Empty as e: time.sleep(0.1) continue tr = task.execute() if tr.sub_tasks: for t in tr.sub_tasks: app.task_normal_queue.put(t) logger.info('Add to task queue(normal):{}, {}'.format( t.url, app.task_normal_queue.qsize())) if tr.data: app.data_queue.put((task, tr.data)) logger.info('Task 1 thread done !')
def parse_node_search_all(self, rule_node, content): key, value, new_links = rule_node.name, [], {} m = [] for item in rule_node.regex_items: # 查找第一个匹配, 用于保存$0 search_ret = re.search(item.regex, content) if not search_ret: logger.info('Regex match failed, re: {}, {}'.format( item.regex, self._url)) continue # 需要查找所有匹配的场景,返回数组 result = re.findall(item.regex, content) if not result: logger.info('{} parse failed'.format(rule_node.name)) else: # $0,$1...取值 result = [ self.query_content_for_multi(item.query, groups, groups) for groups in result ] m.extend(result) # 后处理,去除HTML标签等 m = [self.post_process(rule_node, content=i) for i in m] if not rule_node.children: # 没有子解析项,直接输出匹配后的内容 value = m if not rule_node.jsonfied else [ json.loads(i, encoding='utf8') for i in m ] # 当前解析节点需要提取链接 if rule_node.type == NodeType.LINK: for m_item in m: new_links[m_item] = rule_node.link_rule else: # 有子解析项,遍历所有匹配的结果,每个结果应用子解析规则,然后合并结果 for m_item in m: child_rt, child_links = self.parse_child_nodes( rule_node.children, m_item) value.append(child_rt) new_links = {**new_links, **child_links} return value, new_links
def upload_etc_fee_deduction(body: OBUModel2): """ 上传etc扣费信息 :param body: :return: """ logger.info('===============接收etc扣费上传请求===============') logger.info(body.json(ensure_ascii=False)) params = json.loads(body.json()) sign_combine = 'card_net_no:{},card_serial_no:{},card_sn:{},card_type:{},exit_time:{},obu_id:{},park_code:{},' \ 'plate_no:{},tac:{}'. format(body.card_net_no, body.card_serial_no, body.card_sn, body.card_type, body.exit_time, body.obu_id, body.park_code, body.plate_no, body.tac) print(sign_combine) sign = XlapiSignature.to_sign_with_private_key( text=sign_combine, private_key=CommonConf.ETC_CONF_DICT['thirdApi']['private_key']).decode(encoding='utf8') print(sign) etc_deduct_info_dict = {"method": "etcPayUpload", "params": params} # 业务编码报文json格式 # etc_deduct_info_json = json.dumps(etc_deduct_info_dict, ensure_ascii=False) # # 上传etc扣费数据 # upload_flag, upload_fail_count = ThirdEtcApi.etc_deduct_upload(etc_deduct_info_json) # db_engine, db_session = create_db_session(sqlite_dir=CommonConf.SQLITE_DIR, # sqlite_database='etc_deduct.sqlite') # # etc_deduct_info_json入库 # DBClient.add(db_session=db_session, # orm=ETCFeeDeductInfoOrm(id=CommonUtil.random_str(32).lower(), # trans_order_no=body.trans_order_no, # etc_info=etc_deduct_info_json, # upload_flag=upload_flag, # upload_fail_count=upload_fail_count)) # db_session.close() # db_engine.dispose() result = dict(flag=True, errorCode='', errorMessage='', data=None) upload_flag = True if body.obu_id != '0' else False if not upload_flag: result['flag'] = False result['errorCode'] = '1' result['errorMessage'] = 'etc扣费上传失败' return result
def post(self, request, *args, **kwargs): pk = int(self.kwargs['pk']) post = Post.objects.get(pk=pk) post = auth.post_permissions(request=request, post=post) # For historical reasons we had posts with iframes # these cannot be edited because the content would be lost in the front end if "<iframe" in post.content: logger.error("This post is not editable because of an iframe! Contact if you must edit it (Request: %s)", request) return HttpResponseRedirect(post.get_absolute_url()) # Check and exit if not a valid edit. if not post.is_editable: logger.error("This user may not modify the post (Request: %s)", request) return HttpResponseRedirect(post.get_absolute_url()) # Posts with a parent are not toplevel form_class = LongForm if post.is_toplevel else ShortForm form = form_class(request.POST) if not form.is_valid(): # Invalid form submission. return render(request, self.template_name, {'form': form}) # Valid forms start here. data = form.cleaned_data # Set the form attributes. for field in form_class.FIELDS: setattr(post, field, data[field]) # TODO: fix this oversight! post.type = int(data.get('post_type', post.type)) # This is needed to validate some fields. post.save() if post.is_toplevel: post.add_tags(post.tag_val) logger.info("Post updated (Request: %s)", request) return HttpResponseRedirect(post.get_absolute_url())
def init(): global GPIO_PAPOWER, GPIO_SPEAKER, GPIO_PAPOWER_ACTIVE_HIGH, GPIO_SPEAKER_ACTIVE_HIGH global GPIO_LED_A, GPIO_LED_A_ACTIVE_HIGH, GPIO_LED_B, GPIO_LED_B_ACTIVE_HIGH if GPIO: try: with open('client_hw_setup.cfg') as f: config = json.loads(f.read()) try: GPIO_PAPOWER = config['papower']['pin'] GPIO_PAPOWER_ACTIVE_HIGH = config['papower']['active'] except: pass try: GPIO_SPEAKER = config['speaker']['pin'] GPIO_SPEAKER_ACTIVE_HIGH = config['speaker']['active'] except: pass try: GPIO_LED_A = config['ledA']['pin'] GPIO_LED_A_ACTIVE_HIGH = config['ledA']['active'] except: pass try: GPIO_LED_B = config['ledB']['pin'] GPIO_LED_B_ACTIVE_HIGH = config['ledB']['active'] except: pass log.debug( f'speaker on gpio={GPIO_SPEAKER} polarity={GPIO_SPEAKER_ACTIVE_HIGH}' ) log.debug( f'papower on gpio={GPIO_PAPOWER} polarity={GPIO_PAPOWER_ACTIVE_HIGH}' ) except: log.info('using default i/o mappings') GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) GPIO.setup(GPIO_SPEAKER, GPIO.OUT) GPIO.setup(GPIO_PAPOWER, GPIO.OUT) GPIO.setup(GPIO_LED_A, GPIO.OUT) GPIO.setup(GPIO_LED_B, GPIO.OUT) GPIO.setup(GPIO_GPIO, GPIO.OUT)
def parse_node_search_1st(self, node: RuleNode, content): key, value, new_links = node.name, {}, {} m = None for item in node.regex_items: # 查找第一个匹配 search_ret = re.search(item.regex, content) if not search_ret: logger.info('Regex match failed, re: {}, {}'.format( item.regex, self._url)) continue g0, groups = search_ret.group(0), search_ret.groups() # $0,$1...取值 m = self.query_content(item.query, g0, groups) if m: break if not m: return None, {} # 后处理,去除HTML标签等 m = self.post_process(node, m) if not node.children: # 没有子解析项,直接输出匹配后的内容 if node.jsonfied: try: m = json.loads(m, encoding='utf8') except Exception as e: logger.error('Convert to json failed ! {}, {}'.format( m, e)) m = None value = m if node.type == NodeType.VARIOUS: self.set_var(node.name, m) # 当前解析节点需要提取链接 if node.type == NodeType.LINK: if isinstance(m, str): new_links[m] = node.link_rule elif isinstance(m, list): for mi in m: new_links[mi] = node.link_rule else: # 有子解析项 value, new_links = self.parse_child_nodes(node.children, m) return value, new_links
def shutdown_all_threads(self): logger.info('Start shut down all threads') self.is_running = False for thread in self.worker_normal_threads: logger.info('Wait work1 thread end:', thread.name) if thread.isAlive(): thread.join() logger.info('Wait store thread end') if self.store_thread and self.store_thread.isAlive(): self.store_thread.join() logger.info('End shut down all threads')
def get_text(self, path): data = { 'image': base64.b64encode(open(path, 'rb').read()).decode(), 'language_type': 'ENG', 'probability': 'true' } while True: params = { 'access_token': OCR.access_token, 'aipSdk': 'python', 'aipVersion': '2_2_18' } try: r = requests.post( 'https://aip.baidubce.com/rest/2.0/ocr/v1/general_basic', data=data, params=params, headers={}, verify=False) r.raise_for_status() except BaseException as e: logger.error(f'ocr请求失败, {e}') return '' try: ret = json.loads(r.content.decode('utf8')) except BaseException as e: logger.error(f"json不正确: {r.content.decode('utf8')}") return '' logger.info(ret) if ret.get('error_code', 0) == 111: response = requests.get(OCR.host) if response: OCR.access_token = response.json()['access_token'] logger.info('access_token 过期,已重新获取') continue else: logger.error(f'access_token 更新失败') return '' words_reuslt = list( map(lambda x: (x['words'], x['probability']['average']), ret['words_result'])) words_reuslt.sort(key=lambda x: x[1], reverse=True) return words_reuslt[0][0]
def on_data(data): logger.info({ 'a_ts': data.get('ts'), 'b_op': data.get('op'), 'c_ns': data.get('ns'), 'd_time': arrow.get( data.get('ts').time).format('YYYY-MM-DD hh:mm:ss.SSS') }) mongo2es.set(self.key, json.dumps(self.__done)) self.__done = { "ts": { "time": data['ts'].time, "inc": data['ts'].inc } }
def transfer_move_req(peer_id, chain_id): peer = config.get_peer_by_id(peer_id) if not peer: raise Exception('该节点不存在') if peer.chain_id == chain_id: raise Exception('该节点已属于该分片,无需调整') # 向该节点所在机器上的服务发送调整分片的请求 data = {'peer_id': peer_id, 'chain_id': chain_id} url = str.format('http://{}:{}/api/system/doAdjust', peer.ip, config.PORT) try: # 转发请求 res_str = request_util.post(url=url, data=data) res = json_util.un_marshal(res_str) if res['flag']: logger.info('调整分片操作执行成功') else: raise Exception('调整分片操作执行失败: ' + res.message) except Exception as e: raise e
def __enter_transfer(self, xml): start_time = time.time() while time.time() - start_time < 50: cur_activity = self.driver.get_cur_activity() xml = self.driver.get_xml() if self.__handle_random_page(cur_activity, xml): start_time = time.time() continue elif cur_activity == 'com.android.bankabc.MainActivity': # 待转账页面和转账页面 if re.search(self.__pattern_reciver, xml): logger.info('进入正式转账页面') return xml ret = re.findall(self.__pattern_transfer_btn, xml) if len(ret) == 2 and '他行转本行' in xml: logger.info('点击待转账页面的转账按钮') self.__click(ret[1]) else: logger.warning('未知 activity %s' % cur_activity) raise MyError('从待转账页面进入转账页面超时')
def __init__(self, source_config, streaming_config): super(InputMux, self).__init__(name='inputmux') log.info('inputmux is starting sources') self.queue = queue.Queue() self.source_event_lock = threading.Lock() self.log_first_audio = LOG_FIRST_AUDIO_COUNT self.now_playing = None self.timeout_counter = None self.timeout_preset_ticks = int(streaming_config['audiotimeout']) * 10 self.audio_buffer = bytearray() self.sourcefifo = sourcefifo.SourceFifo() self.sourcefifo.connect('event', self.source_event) self.sourcetcp = sourcetcp.SourceTCP(source_config['gstreamer_port']) self.sourcetcp.connect('event', self.source_event) self.sourcespotifyd = sourcespotifyd.SourceSpotifyd() self.sourcespotifyd.connect('event', self.source_event) if source_config['mopidy_ws_enabled'] == 'on': self.source_mopidy = sourcemopidy.SourceMopidy( source_config['mopidy_ws_address'], source_config['mopidy_ws_port'], source_config['mopidy_gst_port']) self.source_mopidy.connect('event', self.source_event) else: self.source_mopidy = None try: self.alsasrc = None if source_config['alsasource']['enabled'] == 'true': self.alsasrc = sourcealsa.SourceAlsa( source_config['alsasource']) self.alsasrc.connect('event', self.source_event) except: pass self.audiominblocksize = int(source_config['audiominblocksize']) threading.Thread(target=self.gst_mainloop_thread).start() self.start()
def send(self, method, path, **content): """ 使用和原生的requests.request一致,只是对url和auth params做了些特殊处理 :param method: :param path: :param auth: :param content: :return: """ url = self._get_url(path) # if auth: # content["auth"] = auth # else: # content["auth"] = self.auth if "headers" in content and content['headers']: headers = copy.deepcopy(self.headers) headers.update(content['headers']) content["headers"] = headers else: content["headers"] = self.headers # if "params" not in content: # content["params"] = copy.deepcopy(self.params) content["proxies"] = self.proxy content['verify'] = False if 'timeout' not in content: content["timeout"] = 20 date_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") logger.info("time={},Requesting url={}, method={}, args={}".format( date_time, url, method, content)) urllib3.disable_warnings() try: response = s.request(method, url, **content) except Exception as e: logger.error(color.red(e)) return requests.Response() if response.status_code < 200 or response.status_code > 300: logger.error( color.red("response code={}, text={}".format( response.status_code, response.text))) else: logger.info( color.green("response code={}".format(response.status_code))) return response
def run_worker_thread(cls, job): while job.is_running: tasks = job.task_queue.get_many() if not tasks: time.sleep(5) continue task_results = [] for task in tasks: tr = task.execute() if tr.sub_tasks: job.task_queue.put_many(tr.sub_tasks) logger.info('Add {} new tasks, total task count:{}'.format( len(tr.sub_tasks), job.task_queue.qsize())) if tr.data: # job.data_queue.put((task, tr.data)) task_results.append((task, tr.data)) job.data_queue.put_many(task_results)
def __init__(self): db_path = [ '/data/user_de/0/com.android.providers.telephony/databases/mmssms.db', '/data/data/com.android.providers.telephony/databases/mmssms.db', '/data/user_de/0/com.android.providers.telephony/databases/mmssms.db' ] for db in db_path: self.db_path = os.path.join('/data', db) logger.info(f'尝试 {self.db_path}') db_obj = Sqlite(self.db_path) if not db_obj.connected(): continue if db_obj.select( 'select address, date, date, body from sms where type=1 limit 1' )[0] is True: logger.info(f'使用 {self.db_path}') return else: raise MyError(f'sms db 未找到: {db_list}')
def delete_saas_app(app_code, username): """ 清除应用 """ # app = App.objects.get(code=app_code) saas_app = SaaSApp.objects.get(code=app_code) if saas_app.state not in [1]: return False, "应用已经部署过, 无法进行删除操作" saas_app.app = None saas_app.current_version = None saas_app.online_version = None saas_app.save() # 删除 app 表中的应用 try: SecureInfo.objects.filter(app_code=app_code).delete() App.objects.filter(code=app_code).delete() # 将APP的发布记录保存为上一次,避免下次创建时显示冲突 Record.objects.filter(app_code=app_code).update(version='last') logger.info("[app:%s] 删除成功", app_code) except Exception: message = "应用删除失败!" logger.exception("[app:%s] %s", app_code, message) return False, message try: SaaSAppVersion.objects.delete_all_versions(saas_app) saas_app.delete() except Exception: message = "删除 SaaSApp 相关数据失败, app_code=%s" % app_code logger.exception(message) return False, message # 操作流水日志 extra_data = {"username": username} record_user_operate(app_code=app_code, username=username, operate_type=UserOperateTypeEnum.APP_DELETE.value, extra_data=extra_data) return True, "应用删除成功"
def sendmessage(): """ @summary: 发送邮件的任务函数 @note: Celery 启动后,该任务会自动注册到djcelery的库中 """ now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) mail_title = u'Blueking Info' mailto_list = ['*****@*****.**'] ## 如果有多个用户请以分号分隔. mail_content = u"辛苦了,继续加油! {}".format(now) # 执行发送邮件操作.... try: s = send.SendMail() s.send_mail(to_list=mailto_list, sub=mail_title, content=mail_content) logger.info(u"celery 周期任务调用成功,当前时间:{}".format(now)) print 'Send Success...', except Exception, e: print 'Send Failed!', e logger.error(u"收集失败,当前时间:{}".format(now))
def check_surplus(self): self.driver.power_on_screen() self.driver.unlock() logger.info('开始通过建设银行查询余额') # 回到首页 try: xml = self._enter_main_page() # 从首页进入待转账页面 xml = self.__enter_prepare_transfer(xml) # 从待转账页面进入转账页面 xml = self.__enter_transfer(xml) # 转账页面 return float(self.__transfer(xml, '', '', '', '', True)[0]), '' except BaseException as e: logger.error(f'查询余额失败,{e}') return '', str(e)
def __enter_transfer(self, xml): start_time = time.time() while time.time() - start_time < 50: cur_activity = self.driver.get_cur_activity() xml = self.driver.get_xml() if self.__handle_random_page(cur_activity, xml): start_time = time.time() continue elif cur_activity == 'com.ccb.transfer.transfer_home.view.TransferHomeAct': ret = re.findall(self.__pattern_transfer_btn, xml) if len(ret) == 1: logger.info('点击待转账页面的转账按钮') self.__click(ret[0]) elif cur_activity == 'com.ccb.transfer.smarttransfer.view.SmartTransferMainAct': logger.info('进入正式转账页面') return xml else: logger.warning('未知 activity %s' % cur_activity) raise MyError('从待转账页面进入转账页面超时')
def is_value_in_element(self, locator, value): """返回bool值,value为空字符串,返回False""" if not isinstance(locator, tuple): raise logger.info("参数类型错误,locator必须是元祖类型:loc = ('id','value1')") try: result = WebDriverWait(self.driver, self.timeout, self.t).until( EC.text_to_be_present_in_element_value(locator, value)) return result except: return False
def test_loan_success(self, loan): self.loan_page.loan_amount(loan["amount"]) self.loan_page.loan_business(loan["rate"], loan["bidding"], loan["uses"], loan["limit"], loan["day_mouth"], loan["payment"], loan["address"] ) actual = self.loan_page.get_loan_success_info try: self.assertEqual(loan["expected"], actual) except AssertionError as e: logger.debug("测试用例:{}->失败:{}".format(inspect.stack()[0][3], e)) self.loan_page.screen_shot("loan_success") raise e else: logger.info("测试用例:{}->通过".format(inspect.stack()[0][3]))
def get_user(uid, allusers): found = 0 for row in allusers: if row[0] == uid: name = row[1] pubkey = row[2] found = 1 break try: user = User.objects.get(pubkey=pubkey) logger.info('Fetched user: %s' % user.pubkey) except: user = User(pubkey=pubkey) user.save() logger.info('Created user: %s' % user.pubkey) return user
def create_task(request, template_id, bk_biz_id): biz = Business.objects.get(cc_id=bk_biz_id) tmpl = TaskTemplate.objects.select_related('pipeline_template').get(id=template_id, business=biz) params = json.loads(request.body) logger.info('apigw create_task info, template_id: %s, bk_biz_id: %s, params: %s' % (template_id, bk_biz_id, params)) try: params.setdefault('flow_type', 'common') params.setdefault('constants', {}) params.setdefault('exclude_task_nodes_id', []) jsonschema.validate(params, APIGW_CREATE_TASK_PARAMS) except jsonschema.ValidationError as e: logger.warning(u"apigw create_task raise prams error: %s" % e) message = 'task params is invalid: %s' % e return JsonResponse({'result': False, 'message': message}) pipeline_instance_kwargs = { 'name': params['name'], 'creator': request.user.username, } if 'description' in params: pipeline_instance_kwargs['description'] = params['description'] try: result, data = TaskFlowInstance.objects.create_pipeline_instance_exclude_task_nodes( tmpl, pipeline_instance_kwargs, params['constants'], params['exclude_task_nodes_id']) except PipelineException as e: return JsonResponse({'result': False, 'message': e.message}) if not result: return JsonResponse({'result': False, 'message': data}) task = TaskFlowInstance.objects.create( business=biz, pipeline_instance=data, category=tmpl.category, template_id=template_id, create_method='api', create_info=request.jwt.app.app_code if hasattr(request, 'jwt') else request.META.get('HTTP_BK_APP_CODE'), flow_type=params.get('flow_type', 'common'), current_flow='execute_task' if params.get('flow_type', 'common') == 'common' else 'func_claim', ) return JsonResponse({'result': True, 'data': {'task_id': task.id}})
def init_scheduler(): """初始化调度器""" def my_listener(event): """事件监听""" if event.exception: logger.exception('========== The job crashed :( ==========') logger.exception(str(event.exception)) else: logger.info('============ The job worked :) ===========') job_sqlite_path = os.path.join(CommonConf.SQLITE_DIR, 'jobs.sqlite') # 每次启动任务时删除数据库 os.remove(job_sqlite_path) if os.path.exists(job_sqlite_path) else None jobstores = { 'default': SQLAlchemyJobStore( url='sqlite:///' + job_sqlite_path) # SQLAlchemyJobStore指定存储链接 } executors = { 'default': { 'type': 'threadpool', 'max_workers': 10 }, # 最大工作线程数20 'processpool': ProcessPoolExecutor(max_workers=1) # 最大工作进程数为5 } job_defaults = {'coalesce': True, 'max_instances': 3} scheduler._logger = logger scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults) # 检测天线心跳状态, 心跳停止过长,重启天线 scheduler.add_job(RsuStatus.check_rsu_heartbeat, trigger='cron', minute='*/3', id='check_rsu_heartbeat', kwargs={'callback': ThirdEtcApi.tianxian_heartbeat}, max_instances=2) scheduler.add_listener(my_listener, events.EVENT_JOB_EXECUTED | events.EVENT_JOB_ERROR) logger.info("启动调度器...") scheduler.start()
def server_connector(self): delay = 0.1 while not self.terminated: if self.terminate_socket: self.socket_lock.acquire() self.terminate_socket = False if self.socket: self.socket.join() self.socket = None self.server_endpoint = None self.player.process_server_message({'command': 'stopping'}) log.info('waiting for server connection') self.server_offline_counter = 10 self.socket_lock.release() if not self.server_endpoint: if self.server_offline_counter: self.server_offline_counter -= 1 if not self.server_offline_counter: log.critical('server seems to be offline') try: self.multicast.send({ 'command': 'get_server_socket', 'to': 'server', 'from': self.id, 'version': util.LUDIT_VERSION }) except OSError: log.warning('got a multicast send exception. Bummer.') if delay < 5.0: delay += 0.1 else: delay = 0.1 _delay = delay while _delay > 0.0 and not self.terminated: time.sleep(0.1) _delay -= 0.1 log.debug('server connector exits') return False
def init_rsu(self): """ 初始化rsu, 初始化耗时大约1s :return: """ # 天线开关开启 self.rsu_on_or_off = StatusFlagConfig.RSU_ON # if 'socket_client' in dir(self): # del self.socket_client # 创建一个客户端的socket对象 self.socket_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # 连接服务端 self.socket_client.connect( (self.rsu_conf['ip'], self.rsu_conf['port'])) logger.info( '=============================天线初始化=============================') # 设置连接超时 # self.socket_client.settimeout(CommonConf.ETC_CONF_DICT['socket_connect_time_out']) # 天线功率, 十进制转16进制 tx_power = hex(self.rsu_conf['tx_power'])[2:] if len(tx_power) == 1: tx_power = '0' + tx_power # 发送c0初始化指令,以二进制的形式发送数据,所以需要进行编码 c0 = CommandSendSet.combine_c0( lane_mode=self.rsu_conf['lane_mode'], wait_time=self.rsu_conf['wait_time'], tx_power=tx_power, pll_channel_id=self.rsu_conf['pll_channel_id'], trans_mode=self.rsu_conf['trans_mode']).strip() logger.info('发送c0初始化指令: %s' % (c0, )) self.socket_client.send(bytes.fromhex(c0)) # 接收数据 msg_bytes = self.socket_client.recv(1024) msg_str = CommonUtil.transfer_recv_command(msg_bytes.hex()) # 字节转十六进制 logger.info('接收数据: {}'.format(repr(msg_str))) # b0 天线设备状态信息帧 if msg_str[6:8] == 'b0': self.command_recv_set.parse_b0(msg_str) # 解析b0指令 if self.command_recv_set.info_b0['RSUStatus'] == '00': self.rsu_status = StatusFlagConfig.RSU_NORMAL self.rsu_heartbeat_time = datetime.datetime.now() else: self.rsu_status = StatusFlagConfig.RSU_FAILURE elif msg_str == '' and self.recreate_socket_count < 2: # 可能由于上次没有正常关闭,导致mst_st为空 self.recreate_socket_count += 1 self.close_socket() logger.info('==============再试一次初始化天线==============') # 再试一次初始化天线 self.init_rsu() else: self.recreate_socket_count = 0
def my_job_result(task_inst_id, bk_biz_id, client, create_user, max_retries=15, sleep_time=2): """ 轮询任务,返回任务执行的结果,和状态码 :param task_inst_id: :param bk_biz_id: :param client: :param create_user: :param max_retries: :param sleep_time: :return: """ retries = 0 while retries <= max_retries: logger.info(u'【%s】等待作业完成(%s/%s)' % (task_inst_id, retries, max_retries)) is_finished, is_ok = my_ijob_result(task_inst_id, bk_biz_id, client, create_user) # 等待执行完毕 if not is_finished: retries += 1 time.sleep(sleep_time) continue # 执行成功 if is_ok: logger.info(u'【%s】作业执行成功' % task_inst_id) return True # 执行失败 logger.error(u'执行失败') return False # 执行超时 if retries > max_retries: return False
def verify_bk_login(self, bk_token, use_cache=True): """请求平台接口验证登录是否失效""" CACHE_KEY = "%s_verify_bk_login_%s" % (CACHE_PREFIX, bk_token) data = cache.get(CACHE_KEY) if not (use_cache and data): param = {'bk_token': bk_token} result, resp = http_get(self.BK_LOGIN_VERIFY_URL, param) resp = resp if result and resp else {} ret = resp.get('result', False) # 验证失败 if ret: data = True, resp.get('data', {}) else: data = False, {} logger.info(_(u"验证用户登录token无效:%s") % resp.get('message', '')) cache.set(CACHE_KEY, data, 15) return data
def check_param_perm(func): @functools.wraps(func) def wrapper(*args, **kwargs): request = kwargs.get("request") or args[0] cc_biz_id = (request.POST.get('cc_biz_id') or request.POST.get('biz_id') or request.GET.get('cc_biz_id') or request.GET.get('biz_id')) if not cc_biz_id: try: param = json.loads(request.POST['param']) cc_biz_id = param.get('cc_biz_id') or param.get('biz_id') except Exception, e: logger.exception(e) cc_biz_id = None logger.info("check_param_perm biz_id: %s" % cc_biz_id) try: check_perm_with_raise(request.user, cc_biz_id) except Exception, e: raise PermissionDenied(unicode(e))
def themeEdit(request, id): """ 主题编辑 """ if request.method == u'GET': logger.info("id:" + id) context = RequestContext(request) theme = get_object_or_404(ThemeModel, pk = id) sceneList = SceneModel.objects.filter(m_is_distributed = True)\ .values(u'pk', u'm_snapshot', u'm_name') th_sc_ral_set = theme.t2r_set.all().order_by("m_order") data = { u'theme': theme, u'allowed_scenes': sceneList, u't2s_ral' : th_sc_ral_set } return render_to_response('theme/add.html', data, context) else: name, switch_effect,description = map(lambda x: request.POST.get(x), \ ('name', 'switch_effect', 'description')) scences = json.loads(request.POST.get('scences')) theme = ThemeModel.objects.get(pk = id) ThemeModel.objects.filter(pk = id).update(\ m_name = name, m_switch_effect = switch_effect,\ description = description) rla_list = [] for sc in scences: scence = SceneModel.objects.get(pk = sc.get(u'id')) rla = TheToScnRelationModel( \ m_sub = theme, m_scn = scence,m_order=sc.get(u'order') ) rla_list.append(rla) TheToScnRelationModel.objects.filter(m_sub = theme).delete() TheToScnRelationModel.objects.bulk_create(rla_list) return MyHttpJsonResponse({u'succ': True, u'id': theme.pk, \ u'msg': u'编辑成功'})
def loop(protocol_factory, ip, port): loop = asyncio.get_event_loop() # Each client connection will create a new protocol instance coro = loop.create_server(protocol_factory, ip, port) try: server = loop.run_until_complete(coro) # Serve requests until Ctrl+C is pressed logger.info('Serving on {}'.format(server.sockets[0].getsockname())) loop.run_forever() except Exception: logger.debug("something happened") try: # Close the server server.close() loop.run_until_complete(server.wait_closed()) loop.close() except KeyboardInterrupt: logger.warn('keyboard interrupt') exit(1)
def connection_lost(self, ext): logger.info('connection with peer was lost')
import logging.config import sys import yaml from os import path from tornado.ioloop import IOLoop from tornado.web import Application, url from common.config import config from common.log import logger from services.handlers import movies_handler if __name__ == '__main__': logging_conf = path.join(path.dirname(path.abspath(sys.modules['__main__'].__file__)), 'movies-logging.yaml') if path.exists(logging_conf): with open(logging_conf) as conf: logging.config.dictConfig(yaml.load(conf)) app = Application([ url(r'/api/movies/([a-z0-9]+)', movies_handler.GetMovieByIdHandler), url(r'/api/search/movies', movies_handler.MoviesSearchHandler) ]) port = config['web_app']['port'] logger.info('Starting Tornado server in port {0}'.format(port)) app.listen(port) IOLoop.current().start()