def commit_sql(self, sql, args=None): """ 对数据库进行增删改方法封装,返回执行状态 :param sql:需要执行的insert,update,delete语句(sql) :param args:sql中的变量,多个变量放到一个元组中,该处参数可以不传,默认为None :return:sql执行状态,Ture表示执行成功,False表示查询失败 """ conn = pymysql.connect(host=self.host, port=self.port, user=self.user, passwd=self.password, charset='utf8', db=self.db_name) cursor_sql = conn.cursor() try: cursor_sql.execute(sql, args) conn.commit() log_print = 'exec sql successfully :{}'.format(sql) exec_result = True except Exception as e: log_print('something wrong when connect to the :', e) conn.rollback() exec_result = False finally: conn.close() return exec_result
def kafka_consume_data(self,*topic): """ 实现kafka消费者 :param topic: 被消费数据的topic,支持传入多个topic进行查询 :return: 以列表方式返回所查询topic的待消费的数据 """ consumer = KafkaConsumer(group_id='test_group', bootstrap_servers=self.server) consumer.subscribe(topics=topic) poll_num = 5 # 消费者主动消费数据的次数,用来延时,每执行一次延时等待0.2秒 data_read = [] try: for second_num in range(poll_num): msg = consumer.poll(timeout_ms=5) if len(msg) > 0: for topic_index in range(len(msg)): #一次拉取会可能会取到多个topic的数据,需要将每个topic中解析出,先转化为列表 res_values_temp = list(msg.values())[topic_index] #topic中可能会有多条待消费的数据,需要将每条数据解析出来 for data_index in range(len(res_values_temp)): res_value = res_values_temp[data_index].value.decode('utf-8') data_read.append(res_value) time.sleep(0.2) except Exception as e: log_print('receive data error', e) finally: consumer.commit() consumer.close() return data_read
def kafka_produce_data(self,topic, send_msg, send_partition=None): """ 实现kafka生产者 :param topic: 用来接收生产者数据的topic :param send_msg: 生产者发送的数据 :param send_partition: 生产者发送的数据需要存储的partition位置,默认为None,有系统自动分配 :return: 返回发送结果 """ try: producer = KafkaProducer(bootstrap_servers=self.server) producer.send(topic, send_msg.encode('utf-8'), partition=send_partition) send_resut = True except KafkaError as e: send_resut = False log_print('send data wrong ', e) finally: producer.flush() producer.close() return send_resut
def delete_redis_value(self, key): """ 根据键值key删除redis的数值 :param key: 入参为redis的键值 :return: 返回删除成功还是失败的标志 """ self.key = key try: r = redis.Redis(host=self.host, port=self.port, db=0, password=self.password) except Exception as e: log_print('Something wrong when connect the redis ...', e) try: delete_res = r.delete(self.key) except Exception as e: log_print('Delete redis values failed ... ', e) delete_res = False return delete_res
def get_redis_value(self, key): """ 根据键值key查询redis数据 :param key: 入参传入redis的key键值 :return: 返回查询到的数据 """ self.key = key try: r = redis.Redis(host=self.host, port=self.port, db=0, password=self.password) except Exception as e: log_print('Something wrong when connect the redis ...', e) if r.exists(self.key): get_res = r.get(self.key).decode('utf8') else: log_print('the redis_key is not exist') get_res = 'Not exist' return get_res
def query_sql(self, sql, args=None): """ 查询数据库方法封装 :param sql:需要执行的查询sql :param args:sql中的变量,多个变量放到一个元组中,该处参数可以不传,默认为None :return:返回查询状态和查询的结果数据,exec_status为True表示查询成功,False表示查询失败 """ conn = pymysql.connect(host=self.host, port=self.port, user=self.user, passwd=self.password, charset='utf8', db=self.db_name) cursor_sql = conn.cursor() try: cursor_sql.execute(sql, args) query_result = cursor_sql.fetchall() log_print('query sql is: :', sql) log_print('the query result is:', query_result) except Exception as e: log_print('something wrong when connect to the :', e) query_result = 'query failed' finally: conn.close() return query_result
def set_redis_value(self, key, values): """ 根据键值key赋值values :param key: 传入键值key :param values: 传入需要存储的数据 :return: 返回赋值成功还是失败的标志 """ self.key = key self.values = values.encode('utf8') try: r = redis.Redis(host=self.host, port=self.port, db=0, password=self.password) except Exception as e: log_print('Something wrong when connect the redis ...', e) try: set_res = r.set(self.key, self.values) except Exception as e: log_print('Update redis values failed ...:', e) set_res = False return set_res
# 作为rf关键字,向kafka写入数据 def python_kafka_produce_data(topic, send_msg, send_partition): produce_result = kafka_func.kafka_produce_data(topic, send_msg, send_partition) return produce_result # 作为rf关键字,读取kafka中待消费的数据 def python_kafka_consume_data(*topic): read_data = kafka_func.kafka_consume_data(*topic) return read_data if __name__ == '__main__': log_print('Enter python environment') # flag = check_running_case() # print(flag) sql1 = "select count(1) from t_order_table where phone_number = '13618254716'" # sql2 = "select * from test.t_user_table" sql_result = python_sql_query(sql1) print(sql_result) # set_res = python_set_redis_value('osh.app.oprNum_19926359:2059a0caa3d9:39_month_7_01:a1d263f249e54b4fbb0243da8352328b','osh.app.oprNum_19926359:2059a0caa3d9:39_month_7_01:a1d263f249e54b4fbb0243da8352328b') # print(set_res) # redis_get = python_get_redis_value('osh.app.oprNum_19926359:2059a0caa3d9:39_month_7_01:a1d263f249e54b4fbb0243da8352328b') # print(redis_get) # redis_del = python_delete_redis_value('osh.app.oprNum_19926359:2059a0caa3d9:39_month_7_01:a1d263f249e54b4fbb0243da8352328b') # print(redis_del) # redis_get = python_get_redis_value('osh.app.oprNum_19926359:2059a0caa3d9:39_month_7_01:a1d263f249e54b4fbb0243da8352328b') # print(redis_get)