def step_impl(context): """ :type context behave.runner.Context """ stg_list_cnts = dict() stg_list = list() stg = mysql.select(context.dev_xmart, "select sourcelistingid from xmart.stg_listing;") mysql.close_connection(con=context.dev_xmart) for row in stg: row = str(row) # print("========", id) first_quot = row.find("'") comma = row.find(",") stg_list.append(row[first_quot+1:comma-1]) for aa in context.id_exception: assert_that(aa, is_in(stg_list)) # if aa in stg_list: # print(aa) for bb in stg_list: stg_list_cnts[bb] = stg_list_cnts.get(bb, 0) + 1 # print(stg_list_cnts) for key,val in stg_list_cnts.items(): assert_that(val, is_(1)) print("Assertion complete (2 of 3). Listings that are in the Exceptions table appear only once in Stg table. \n")
def step_impl(context): """ :type context behave.runner.Context """ perm_list = list() perm_cnts = dict() perm = mysql.select(context.dev_xmart, "select sourcelistingid from xmart.listing;") mysql.close_connection(context.dev_xmart) for listid in perm: listid = str(listid) first_quot = listid.find("'") comma = listid.find(",") perm_list.append(listid[first_quot+1:comma-1]) for li in perm_list: perm_cnts[li] = perm_cnts.get(li, 0) + 1 # print(perm_cnts) for key,val in perm_cnts.items(): assert_that(val, is_(1)) print("Assertion complete (3 of 3). Listing Table contains only unique source listing id's. \n")
def records_all_sql(id, page): return mysql.select( 'supporters as a', 'b.rankid, c.date - interval 1 day as date, c.rank_global, c.rank_country, c.pp', 'LEFT JOIN quicks AS b ON a.uid = b.uid ' + 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid ' + 'WHERE a.uid={} '.format(id)+ 'order by date desc limit 11 offset {}'.format(max(page*10-1, 0)) )
def article_detail_tags(self): item = pq(self.txt)('.hot-tag-box .hot-tag') actual = list() for i in item: i = pq(i) actual.append((i.text(), )) # 标签集合 expect = select(sqls.article_hot_tags) equal('article_detail_tags', expect, actual)
def records_sql(id, page): return mysql.select( '(SELECT b.rankid, min(c.date) - interval 1 day date, min(c.rank_global)rank_global, min(c.rank_country)rank_country, c.pp FROM supporters AS a ' + 'LEFT JOIN quicks AS b ON a.uid = b.uid ' + 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid ' + 'WHERE a.uid={} '.format(id) + 'GROUP BY c.pp)t', '*', 'order by date desc limit 11 offset {}'.format( max(page * 10 - 1, 0)))
def ask_total_count(self): item = self.txt('.zypage') page = pq(item)('#page-pane2').text() # 总页数 lp = pq(requests.get(self.url + '?page=' + page).text) last_count = len(lp('.QA-list>.item')) total_count = int(page) * 20 + last_count - 20 actual = list() actual.append((total_count, )) expect = select(sqls.ask_count) equal('ask_total_count', expect, actual)
def article_list_expect(self, article_type): expect = select(sqls.article_list.replace('lilang_type', article_type)) expect = list(expect) for i in range(len(expect)): expect[i] = list(expect[i]) expect[i][2] = pq(content_deal(expect[i][2])).text()[0:49] expect[i][3] = pq(content_deal(expect[i][3])).text().replace( ' ', '').replace(u'\xa0', '')[0:80] expect[i][4] = expect[i][4].replace(',', '') if expect[i][4] else '' expect[i][8] = time_deal(expect[i][8]) return expect
def get_excellent_recom(self): url = self.url + 'v3/getExcellentRecom/?UUID=&userId=' res = requests.get(url) txt = json.loads(res.text) actual = list() for i in txt['data']['list']: course_id = i['course_id'] # 推荐课程id img_url = i['img_url'].split('uploads/')[-1] # 课程图片 name = i['name'] # 课程名称 actual.append((course_id, img_url, name)) expect = select(sqls.get_excellent_recom) equal('get_excellent_recom', expect, actual)
def ask_hot_qa(self): item = self.txt('#hotQA .item') actual = list() for i in item: i = pq(i) link = i('a').attr('href').split('/')[-1] # 问答链接 count = i('a>span').text().split()[0] # 回答数量 title = i('a').remove('span').text() # 问答标题 actual.append((link, title, count)) self.driver.close() expect = select(sqls.ask_hot_qa) equal('ask_hot_qa', expect, actual)
def submit_consult_info(self): name = '李朗' + str(random.randint(0, 100)) # 随机姓名 phone = 18000000000 + random.randint(0, 1000000000) # 随机手机号 url = self.url + 'v3/submitConsultInfo/?realName=' + name + '&phoneNum=' + str( phone) res = requests.get(url) txt = json.loads(res.text) actual = list() message = txt['message'] # 返回信息 actual.append((message, name, phone)) expect = select(sqls.submit_consult_info) equal('submit_consult_info', expect, actual)
def task__list_schedule(self): ses = login_by_res('18008062322', '11111111') txt = pq(ses.get(self.url).text) actual = list() for i in txt('#lineList>ul>li'): i = pq(i) stage_name = i('.zyStageTit').text() for j in i('.YaHei'): j = pq(j) task_name = j.text() actual.append((stage_name, task_name)) expect = select(sqls.task_list) equal('task_list', expect, actual)
def talk_page(self): actual = Article().article_page(self.talk_url) total = select(sqls.article_total_count.replace('lilang_type', '5')) if total[0][0] % 10 > 0: count = total[0][0] / 10 + 1 else: count = total[0][0] / 10 expect = list() if count > 1: expect.append((count, '1', '2', '1')) else: expect.append(('1', )) equal('talk_page', expect, actual)
def rank_record(): # 후원자 목록 가져오기 supporters = mysql.select('supporters as a', 'a.*, b.rankid', 'JOIN quicks as b ON a.uid = b.uid') if len(supporters) > 0: for spt in supporters: rankid = spt['rankid'] href = 'http://saber.tarcle.kr/api/profile/' + rankid req = urllib.request.Request(href, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') player = json.loads(text) mysql.insert('rank_records', ['rankid', 'rank_global', 'rank_country', 'pp'], [rankid, player['rank_global'], player['rank_country'], player['pp']])
def article_detail_relate(self): item = pq(self.txt)('.hot-article-list>.item') actual = list() for i in item: i = pq(i) link = i('a').attr('href').split('/')[-1] # 链接 title = i.text() # 标题 actual.append((link, title)) actual = actual if actual else ['无相关问答'] expect = select( sqls.article_detail_relate.replace('lilang_id', self.article_id)) expect = expect if expect else ['无相关问答'] equal('article_detail_relate', expect, actual)
def get_excellent_course_ad(self): url = self.url + 'v3/getExcellentCourse/?orderBy=2&loadAd=1' res = requests.get(url) txt = json.loads(res.text) actual = list() for i in txt['data']['ad']: img = i['url'].split('uploads/')[-1] # 广告图片 callback_url = i['out_url'] # 广告回调url target_id = i['target_id'] # 目标id,不晓得有毛用 ad_type = i['ad_type'] # 广告类型 title = i['name'] # 广告标题 actual.append((img, callback_url, target_id, ad_type, title)) expect = select(sqls.get_excellent_course_ad) equal('get_excellent_course_ad', expect, actual)
def step_impl(context, db_list_cnt, s3_list_cnt): """ :type context behave.runner.Context """ db_list_cnt = str(mysql.select(context.conn, "Select count(*) from xmart.listing;")) frst_range = db_list_cnt.find("(") second_range = db_list_cnt.find("L") db_list_cnt = int(db_list_cnt[frst_range +2 : second_range]) s3_list_cnt = len(s3_connect.cc_list) assert_that(db_list_cnt, equal_to(s3_list_cnt)) print("Assertion complete (1 of 3). Listing Counts Match between S3 input and Database. \n")
def old_user(): countnum = 0 while countnum <= 2: user = input("\033[1;37m%s\033[0m" % "username:"******"\033[1;37m%s\033[0m" % "password:"******"\033[0;31m%s\033[0m" % "\nLogin incorrent,there are %s/3 chances left.\n" % (3 - countnum)) else: print("\033[0;31m%s\033[0m" % "\n%s Login successful!\n" % user) os.system("say 'welcome to login,%s'" % user) run_game() break
def get_career_course(self): url = self.url + 'v3/getCareerCourse/?count=0' res = requests.get(url) txt = json.loads(res.text) actual = list() for i in txt['data']['list']: course_count = i['course_count'] # 课程数量 student_count = i['student_count'] # 学生数量 name = i['name'] # 直通班课程名 class_count = i['class_count'] # 开班数量 career_id = i['career_id'] # 课程id img_url = i['img_url'].split('uploads/')[-1] # 课程图片 actual.append((course_count, student_count, name, class_count, career_id, img_url)) expect = select(sqls.get_career_course) equal('get_career_course', expect, actual)
def get_excellent_course(self): url = self.url + 'v3/getExcellentCourse/?orderBy=2&loadAd=0' res = requests.get(url) txt = json.loads(res.text) actual = list() for i in txt['data']['list']: click_count = i['student_count'] # 课程点击次数 lesson_count = i['lesson_count'] # 课程视频个数 course_id = i['course_id'] # 课程id name = i['course_name'] # 课程名称 img = i['img_url'].split('uploads/')[-1] # 课程图片链接 teacher = i['teacher'] # 教师昵称 updating = i['updating'] # 是否更新中,0为不更新 actual.append((click_count, lesson_count, course_id, name, img, teacher, updating)) expect = select(sqls.get_excellent_course) equal('get_excellent_course', expect, actual)
def classify(word, dict): corpus = [] sql = "select * from T_Keywords" results = mysql.select(sql) for category in categories: words = "" for result in results: if result[2] == category: fool.load_userdict(dict) line = " ".join(fool.cut(result[3])[0]) #将每一类的分词拼接成一个字符串 words = words + line corpus.append(words) exp = get_parses(word) #获取当前词的解释 fool.load_userdict(dict) expwords = " ".join(fool.cut(exp)[0]) #对解释进行切词 corpus.append(expwords) vectorizer = CountVectorizer() csr_mat = vectorizer.fit_transform(corpus) transformer = TfidfTransformer() tfidf = transformer.fit_transform(csr_mat) y = np.array(categories) model = SVC() length = categories.__len__() model.fit(tfidf[0:length], y) predicted = model.predict(tfidf[length:]) #对新查询到的词进行插入操作 sql = "insert into T_Keywords(keyword,category,weight,explanation) values('%s','%s','%s','%s')" % ( word, predicted[0], 1, exp) kid = mysql.exec(sql) #爬取相关的链接并插入 hrefs = get_policy(word) for href in hrefs: title = href.get('title') url = href.get('url') sql = "insert into T_Links(title,href,kid) values('%s','%s','%s')" % ( title, url, kid) mysql.exec(sql)
def step_impl(context, db_attr_cnt, s3_attr_cnt): """ :type context behave.runner.Context """ db_attr_cnt = str(mysql.select(context.conn, "Select count(*) from xmart.listing_attribute_value;")) first_range = db_attr_cnt.find("(") sec_range = db_attr_cnt.find("L") db_attr_cnt = int(db_attr_cnt[first_range +2 : sec_range]) mysql.close_connection(context.conn) # print(db_attr_cnt) s3_attr_cnt = len(s3_connect.cc_attr) # print(s3_attr_cnt) assert_that(db_attr_cnt, equal_to(s3_attr_cnt)) print("Assertion complete (2 of 3). Attribute Counts Match between S3 attributes file and Database. \n")
def get_career_detail(self, career_id): url = self.url + 'v3/getCareerDetail/?careerId=' + career_id + '&UUID=a489516c4e73436bbbeb1a695940fdcf&userId=108' res = requests.get(url) txt = json.loads(res.text) actual = list() course_desc = txt['data']['desc'] # 职业课程介绍 index_html = txt['data']['index_html'] # 职业课程落地页地址 for i in txt['data']['stage']: stage_desc = i['stage_desc'] # 阶段描述 stage_id = i['stage_id'] # 阶段id stage_name = i['stage_name'] # 阶段名称 for j in i['list']: course_id = j['course_id'] # 课程id img_url = j['img_url'].split('uploads/')[-1] # 课程图片 name = j['name'] # 课程名称 updating = j['updating'] # 是否更新中 actual.append((course_desc, index_html, stage_desc, stage_id, stage_name, course_id, img_url, name, updating)) expect = select(sqls.get_career_detail.replace('my_id', career_id)) equal('get_career_detail', expect, actual)
def ask_rank(self): driver = self.driver lists = driver.find_element_by_xpath( '/html/body/div[7]/div[3]/div/div/div[2]/div[6]') item = lists.find_elements_by_tag_name('li') actual = list() for i in item: link = i.find_element_by_tag_name('a').get_attribute('href').split( '/')[-1] # 用户链接 ava = i.find_element_by_tag_name('img').get_attribute('src').split( 'http://192.168.1.142//uploads/')[-1] nick_name = i.find_element_by_class_name('a1').text # 昵称 desc = i.find_element_by_class_name('a2').text # 用户介绍 count = i.find_element_by_class_name( 'huoBox_ul_font2').text.split()[0] # 回答数量 actual.append((link, ava, nick_name, desc, count)) driver.close() expect = select(sqls.ask_rank) expect = expect if expect else ['无数据'] equal('ask_rank', expect, actual)
def step_impl(context, mart_ids, s3_ids): """ :type context behave.runner.Context """ mart_ids = list() m_ids = mysql.select(context.conn, "select * from xmart.listing") for line in m_ids: mart_ids.append(line[3]) mart_ids.sort() # print(mart_ids) mysql.close_connection(context.conn) s3_ids = s3_connect.listing_id s3_ids.sort() # print(s3_ids, "\n") assert_that(mart_ids, equal_to(s3_ids)) print("Assertion complete (3 of 3). Source Listing ID's Match between S3 feed file and the DB listing table. \n")
def setting(driver): username = '******' password = '******' login.login_fps(username, password) time.sleep(5) driver.find_element_by_link_text('个人设置').click() time.sleep(5) driver.find_element_by_xpath( '//*[@id="user_info_save"]/div[1]/div/span[3]/a').click() driver.find_element_by_id("file_upload").send_keys("E:\\Pictures\\lyp.jpg") time.sleep(5) driver.find_element_by_xpath( '//*[@id="upload-pane"]/div[2]/button').click() time.sleep(3) driver.find_element_by_id('id_nick_name').clear() time.sleep(3) driver.find_element_by_id('id_nick_name').send_keys(u'漆来平猪') driver.find_element_by_id('id_position').clear() time.sleep(3) driver.find_element_by_id('id_position').send_keys(u'测试工程师猪') time.sleep(2) driver.find_element_by_id('id_description').clear() driver.find_element_by_id('id_description').send_keys(u'打酱油工程师') js = "document.body.scrollTop=10000;" driver.execute_script(js) time.sleep(2) driver.find_element_by_id('id_qq').clear() driver.find_element_by_id('id_qq').send_keys('123456') time.sleep(2) driver.find_element_by_id('user_save').click() time.sleep(2) actual = select( 'select nickname from mz_forum_entity where user=2218 order BY date_publish DESC limit 0,1 ;' ) print(actual) update_output('FPS_010', '漆来平猪', actual)
def step_impl(context): """ :type context behave.runner.Context """ n_agent = list() notfound = 0 for agent in context.new_agent: agt_query = "select sourceagentid from xmart.agent nolock where sourceagentid = '%s';" % agent agt_select = mysql.select(mysql.dev_mart, agt_query) mysql.close_connection(mysql.dev_mart) n_agent.append(agent) if str(agt_select) == '()': print("** Fail. Agent not found: ", agent) notfound = notfound + 1 assert_that(notfound, is_(0)) print("Assertion complete (1 of 2). New Agent(s) appear in Agent Table... ID's:", n_agent, "\n")
def step_impl(context): """ :type context behave.runner.Context """ exception = mysql.select(context.dev_xmart, "select distinct sourcelistingid from xmart.load_exception;") mysql.close_connection(con=context.dev_xmart) context.id_exception = list() for row in exception: row = str(row) first_quot = row.find("'") comma = row.find(",") context.id_exception.append(row[first_quot+1:comma-1]) context.id_exception.sort() # print("=== ID's from Exceptions table: ", context.id_exception, "\n") assert_that(context.id_exception, equal_to(context.dup_feed_id)) print("Assertion complete (1 of 3). Duplicate ID's from the Feed File have been found in the Exceptions table. \n")
def step_impl(context): """ :type context behave.runner.Context """ n_office = list() notfound = 0 for office in context.new_office: off_query = "select sourceofficeid from xmart.office nolock where sourceofficeid = '%s';" % office off_select = mysql.select(mysql.dev_mart, off_query) mysql.close_connection(mysql.dev_mart) n_office.append(office) if str(off_select) == '()': print("** Fail. Office not found: ", office) notfound = notfound + 1 assert_that(notfound, is_(0)) print("Assertion complete (2 of 2). New Office(s) appear in Office Table... ID's:", n_office, "\n")
def step_impl(context, stg_attr_del, stg_list_del, perm_attr_del, perm_list_del): """ :type context behave.runner.Context """ cur_count = str(mysql.select(context.conn, "Select count(*) from xmart.listing;")) cur_count = cur_count[2] stg_attr_del = "delete from xmart.stg_listing_attribute_value;" stg_list_del = "delete from xmart.stg_listing;" perm_attr_del = "delete from xmart.listing_attribute_value;" perm_list_del = "delete from xmart.listing;" if int(cur_count) >= 1: mysql.execute(con=context.conn, statement= stg_attr_del) mysql.execute(con=context.conn, statement= stg_list_del) mysql.execute(con=context.conn, statement= perm_attr_del) mysql.execute(con=context.conn, statement= perm_list_del) mysql.close_connection(context.conn) else: mysql.close_connection(context.conn) pass
def ask_list(self): item = self.txt('.QA-list>.item') actual = list() for i in item: i = pq(i) ava = i('.col-left').find('img').attr('src').split('uploads/')[ -1] # 头像地址 nick_name = i('.u-name').text() # 昵称 title = i('.col-right h3>a').text() # 问答标题 reply = i('.item-r-A').text().split('[最新回答]')[-1].replace( ' 查看详细', '')[0:100] # 最新回复 tags = i('.hot-tag-group .hot-tag').text() # 问答标签 tags = tags if tags else None source = i('.hot-tag-group .from-where').text().split('源自: ')[ -1] # 源自xx date_publish = i('.datetime').text() # 发布时间 review_count = i('.icon-browse').text() # 浏览数 reply_count = i('.icon-ask').text() # 回复数 forward_count = i('.icon-good').text() # 点赞数 actual.append( (ava, nick_name, title, reply, tags, source, date_publish, review_count, reply_count, forward_count)) self.driver.close() expect = select(sqls.ask_list) expect = list(expect) for i in range(len(expect)): expect[i] = list(expect[i]) expect[i][2] = pq(content_deal(expect[i][2])).text()[0:49] print i, expect[i][3] expect[i][3] = expect[i][3] if expect[i][3] else u'还没有人回答这个问题呢...' expect[i][3] = pq(content_deal(expect[i][3])).text()[0:100] expect[i][4] = expect[i][4].replace( ',', '') if expect[i][4] else expect[i][4] expect[i][6] = time_deal(expect[i][6]) equal('ask_list', expect, actual)
def get_career_price(self, career_id): url = self.url + 'v3/getCareerPrice/?careerId=' + career_id + '&UUID=a489516c4e73436bbbeb1a695940fdcf&userId=108' res = requests.get(url) txt = json.loads(res.text) actual = list() first_pay = txt['data']['pay']['first_pay'] # 首付价格 price = txt['data']['pay']['price'] # 全款价格 for i in txt['data']['class_list']: class_id = i['class_id'] # 班级ID curr_student = i['curr_student'] # 当前报名人数 teacher = i['teacher'] # 教师昵称,麻痹取不到接口要挂 class_no = i['class_no'] # 班级coding max_student = i['max_student'] # 报名上限人数 actual.append(( first_pay, price, class_id, curr_student, teacher, class_no, max_student, )) expect = select(sqls.get_career_price.replace('my_id', career_id)) equal('get_career_price', expect, actual)
def cafe_hot_cafe(self): actual = Article().article_hot_article(self.cafe_url) expect = select(sqls.article_hot_article) equal('cafe_hot_cafe', expect, actual)
def talk_hot_talk(self): actual = Article().article_hot_article(self.talk_url) expect = select(sqls.article_hot_article) equal('talk_hot_talk', expect, actual)
# _*_ coding:utf-8 _*_ __author__ = 'Administrator' import requests import time import sqls import threading from mysql import select, auto import Queue queue = Queue.Queue() result = select(sqls.ava) for row in result: queue.put(row) def check_avatar(row): user_id = row[0] value1 = 'http://www.maiziedu.com/uploads/' + row[1] value2 = 'http://www.maiziedu.com/uploads/' + row[2] value3 = 'http://www.maiziedu.com/uploads/' + row[3] if requests.get(value1).status_code == 404: auto( 'INSERT INTO AutoTesting.avatar VALUES ("{user_id}", "404"),"{img}"' .format(user_id=user_id, img=row[1])) if requests.get(value2).status_code == 404: auto( 'INSERT INTO AutoTesting.avatar VALUES ("{user_id}", "404"),"{img}"' .format(user_id=user_id, img=row[2])) if requests.get(value3).status_code == 404: auto( 'INSERT INTO AutoTesting.avatar VALUES ("{user_id}", "404"),"{img}"' .format(user_id=user_id, img=row[3]))
from multiprocessing.dummy import Pool as ThreadPool import tushare as ts import mysql as msl import pandas as pd import time import datetime nums1 = list(msl.select().index) index = ['sh', 'sz', 'hs300', 'sz50', 'zxb', 'cyb'] def get_sh_5mins(): end_date = datetime.datetime.now().strftime('%Y-%m-%d') df = ts.get_hist_data('sh', ktype='5', start='2016-01-09', end=end_date) msl.insert_sh_5mins(df) def get_hist_data(num): df = ts.get_hist_data(num, retry_count=10) if df is None: print num else: df['code'] = num df['date'] = pd.Series(df.index, index=df.index) msl.insert_dayline_details(df) return {num: df} def get_today_data(num): # date = time.strftime('%Y-%m-%d', time.localtime()) # print num
async def on_message(self, message): if message.author.bot: return None if message.content.startswith(prefix): msg = message.content.split(' ') command = msg[0][len(prefix):] if command in ['후원자']: supporters = mysql.select('supporters', 'uid, name', 'WHERE uid!="361018280569470986" ORDER BY date') duple = [] content = '```json\n' for supporter in supporters: if supporter['uid'] in duple: continue content += '%10s님 (ID : %s)\n' % (supporter['name'], supporter['uid']) duple.append(supporter['uid']) content += '%13s감사합니다.```' % '' await message.channel.send(content) elif command in ['검색', 'search', '-s', '-ㄴ']: search = urllib.parse.quote(' '.join(msg[1:])) if len(search)==0: return await message.channel.send('검색할 닉네임을 입력해주세요') async with message.channel.typing(): req = urllib.request.Request("http://saber.tarcle.kr/api/search/"+search, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') players = json.loads(text) sel = -1 #검색목록 출력 if len(players) > 0: if len(players) == 1: sel = 0 else: content = '```json\n' for i in range(min(5, len(players))): content += '{} : {} ( {} ) - {}\n'.format(i+1, players[i]['name'], players[i]['pp'], players[i]['rank']) content += '```' searchlist = await message.channel.send(content) else: return await message.channel.send('검색한 닉네임이 존재하지 않습니다. 다시 확인해주세요.') #이모지 추가 if sel < 0: for e in emoji_num[:min(5, len(players))]: await searchlist.add_reaction(e) try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_num)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) return False else: sel = emoji_num.index(res[0].emoji) # 페이지 이동 async with message.channel.typing(): url = 'https://scoresaber.com/u/'+players[sel]['url'] href = 'http://saber.tarcle.kr/api/profile/'+players[sel]['url'] req = urllib.request.Request(href, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') player = json.loads(text) embed = createProfile(player, url) embed.set_footer(text="내정보로 등록하시려면 💾을 눌러주세요.".format(prefix=prefix)) if 'searchlist' in locals(): await clearReaction(searchlist) await searchlist.edit(content="", embed=embed) else: searchlist = await message.channel.send(embed=embed) #이모지 추가 await searchlist.add_reaction(emoji_disk[0]) try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_disk)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) return False rankid = players[sel]['url'] if saveProfile(message.author.id, rankid): await clearReaction(searchlist) await message.channel.send('내정보 등록이 완료되었습니다.') elif command in ['랭킹', '순위', '탑텐', 'rank', '-r', '-ㄱ']: async with message.channel.typing(): country = ''.join(msg[1:]) if len(country)>0: url = "http://saber.tarcle.kr/api/rank/"+urllib.parse.quote(country) else: url = "http://saber.tarcle.kr/api/rank" req = urllib.request.Request(url, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') players = json.loads(text) #랭킹목록 출력 if len(players) > 0: embed = createRanklist(players, country) searchlist = await message.channel.send(embed=embed) else: return await message.channel.send('입력하신 국가코드는 존재하지 않습니다. 다시 확인해주세요.') if len(players) < 11: return False #페이징 total_page = int(len(players) / 10) curr_page = 0 #이모지 추가 for e in emoji_page: await searchlist.add_reaction(e) while True: try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_page)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) break else: sel = emoji_page.index(res[0].emoji) if sel: curr_page += 1 else: curr_page -= 1 curr_page = (curr_page + total_page) % total_page page_start = curr_page*10 embed = createRanklist(players, country, page_start) if(getPerms(message).manage_messages): await searchlist.remove_reaction(res[0].emoji, message.author) await searchlist.edit(embed=embed) elif command in ['내정보', '-m']: if len(msg) > 1 and msg[1] in ['등록']: if len(msg) > 2: res = re.match('^((https?:\\/\\/)?scoresaber\\.com\\/u\\/)?[0-9]+$', msg[2]) if res: async with message.channel.typing(): rankid = res.group(3) saveProfile(message.author.id, rankid) await message.channel.send('내정보 등록이 완료되었습니다.') else: async with message.channel.typing(): search = ' '.join(msg[2:]) req = urllib.request.Request("http://saber.tarcle.kr/api/search/"+search, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') players = json.loads(text) sel = -1 #검색목록 출력 if len(players) > 0: if len(players) == 1: sel = 0 else: content = '```json\n' for i in range(min(5, len(players))): name = players[i]['name'] rank = players[i]['rank'] pp = players[i]['pp'] content += '{} : {} ( {} ) - {}\n'.format(i+1, name, pp, rank) content += '```' searchlist = await message.channel.send(content) else: return await message.channel.send('검색한 닉네임이 존재하지 않습니다. 다시 확인해주세요.') #이모지 추가 if sel < 0: for e in emoji_num[:min(5, len(players))]: await searchlist.add_reaction(e) try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_num)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) return False else: sel = emoji_num.index(res[0].emoji) rankid = players[sel]['url'] saveProfile(message.author.id, rankid) await clearReaction(searchlist) await message.channel.send('내정보 등록이 완료되었습니다.') else: return await message.channel.send('닉네임 또는 스코어세이버 URL을 입력해주세요.') else: async with message.channel.typing(): rows = mysql.select('quicks', '*', 'where uid='+str(message.author.id)) if len(rows) > 0: url = 'https://scoresaber.com/u/'+rows[0]['rankid'] href = 'http://saber.tarcle.kr/api/profile/'+rows[0]['rankid'] req = urllib.request.Request(href, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') player = json.loads(text) embed = createProfile(player, url) await message.channel.send(embed=embed) else: await message.channel.send('등록된 계정이 없습니다. [{}내정보 등록]을 먼저 실행해주세요.'.format(prefix)) elif command in ['점수', '성과', 'score', '-c', '-ㅊ']: if len(msg)>1: if msg[1] in ['최고', 'top', 't', 'ㅅ']: param = 'topscore' elif msg[1] in ['최근', 'recent', 'r', 'ㄱ']: param = 'recentscore' else: return await message.channel.send('명령어(최고, 최근)를 정확히 입력해주세요.') else: param = 'topscore' async with message.channel.typing(): if len(msg)>2: search = ' '.join(msg[2:]) req = urllib.request.Request("http://saber.tarcle.kr/api/search/"+search, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') players = json.loads(text) sel = -1 #검색목록 출력 if len(players) > 0: if len(players) == 1: sel = 0 else: content = createSearchlist(players) searchlist = await message.channel.send(content) else: return await message.channel.send('검색한 닉네임이 존재하지 않습니다. 다시 확인해주세요.') #이모지 추가 if sel < 0: for e in emoji_num[:min(5, len(players))]: await searchlist.add_reaction(e) try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_num)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) return False else: sel = emoji_num.index(res[0].emoji) rankid = players[sel]['url'] else: rows = mysql.select('quicks', '*', 'where uid='+str(message.author.id)) if len(rows) > 0: rankid = rows[0]['rankid'] # 페이지 이동 # 플레이어 정보 req = urllib.request.Request('http://saber.tarcle.kr/api/profile/'+rankid, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') player = json.loads(text) # 점수 정보 req = urllib.request.Request('http://saber.tarcle.kr/api/'+param+'/'+rankid, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') scores = json.loads(text) #페이징 total_page = int(scores[0]) curr_page = 0 embed = createScorelist(player, scores, rankid, 1, total_page) if 'searchlist' in locals(): await clearReaction(searchlist) await searchlist.edit(content="", embed=embed) else: searchlist = await message.channel.send(embed=embed) #이모지 추가 for e in emoji_page: await searchlist.add_reaction(e) while True: try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == searchlist.id and user == message.author and str(reaction.emoji) in emoji_page)) except asyncio.TimeoutError: #시간초과 await clearReaction(searchlist) break else: sel = emoji_page.index(res[0].emoji) if sel: curr_page += 1 else: curr_page -= 1 curr_page = (curr_page + total_page) % total_page page_start = curr_page*8 # 점수 정보 req = urllib.request.Request('http://saber.tarcle.kr/api/%s/%s/%d'%(param, rankid, (curr_page+1)), headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') scores = json.loads(text) embed = createScorelist(player, scores, rankid, curr_page+1, total_page) if(getPerms(message).manage_messages): await searchlist.remove_reaction(res[0].emoji, message.author) await searchlist.edit(embed=embed) elif command in ['전적', '기록', 'history', 'record', '-h', '-ㅗ']: def records_sql(id, page): return mysql.select( '(SELECT b.rankid, min(c.date) - interval 1 day date, min(c.rank_global)rank_global, min(c.rank_country)rank_country, c.pp FROM supporters AS a '+ 'LEFT JOIN quicks AS b ON a.uid = b.uid '+ 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid '+ 'WHERE a.uid={} '.format(id)+ 'GROUP BY c.pp)t', '*', 'order by date desc limit 11 offset {}'.format(max(page*10-1, 0)) ) def records_all_sql(id, page): return mysql.select( 'supporters as a', 'b.rankid, c.date - interval 1 day as date, c.rank_global, c.rank_country, c.pp', 'LEFT JOIN quicks AS b ON a.uid = b.uid ' + 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid ' + 'WHERE a.uid={} '.format(id)+ 'order by date desc limit 11 offset {}'.format(max(page*10-1, 0)) ) rall = len(msg)>1 and msg[1] in ['모두', '전체', 'all', 'a'] async with message.channel.typing(): if rall: count = mysql.select( 'supporters as a', 'count(*) as count', 'LEFT JOIN quicks AS b ON a.uid = b.uid ' + 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid ' + 'WHERE a.uid=' + str(message.author.id) )[0]['count'] else: count = len(mysql.select( 'supporters as a', 'count(*) as count', 'LEFT JOIN quicks AS b ON a.uid = b.uid '+ 'LEFT JOIN rank_records AS c ON b.rankid = c.rankid '+ 'WHERE a.uid={} '.format(message.author.id)+ 'group by c.pp' )) if count == 0: return await message.channel.send("```\n후원자를 위한 기능입니다.\n```") if rall: records = records_all_sql(message.author.id, 0) else: records = records_sql(message.author.id, 0) if not records[0]['rankid']: return await message.channel.send("```\n내정보를 먼저 등록해주세요.\n```") if not records[0]['date']: return await message.channel.send("```\n아직 데이터가 존재하지 않습니다.\n내일 다시 시도해주세요.```") href = 'http://saber.tarcle.kr/api/profile/'+records[0]['rankid'] req = urllib.request.Request(href, headers={'api': 'beatsaber'}) text = urllib.request.urlopen(req).read().decode('utf-8') player = json.loads(text) total_page = int(math.ceil((count+1) / 10)) now = {'date': date.today(), 'rank_global': player['rank_global'], 'rank_country': player['rank_country'], 'pp': player['pp']} records.insert(0, now) recordlist = await message.channel.send(embed=createRecordlist(message.author, records, 0, total_page)) if count > 10: #페이징 curr_page = 0 #이모지 추가 for e in emoji_page: await recordlist.add_reaction(e) while True: try: res = await self.wait_for('reaction_add', timeout=30, check=(lambda reaction, user: reaction.message.id == recordlist.id and user == message.author and str(reaction.emoji) in emoji_page)) except asyncio.TimeoutError: #시간초과 await clearReaction(recordlist) break else: sel = emoji_page.index(res[0].emoji) if sel: curr_page += 1 else: curr_page -= 1 curr_page = (curr_page + total_page) % total_page page_start = max(curr_page*10-1, 0) records = records_all_sql(message.author.id, curr_page) if rall else records_sql(message.author.id, curr_page) if curr_page == 0: records.insert(0, now) if(getPerms(message).manage_messages): await recordlist.remove_reaction(res[0].emoji, message.author) await recordlist.edit(embed=createRecordlist(message.author, records, curr_page, total_page)) #나만 elif message.author.id == 361018280569470986: if command in ['dm']: dm = await message.author.create_dm() await dm.send('test') elif command in ['접속']: if message.author.voice == None: return await message.channel.send('먼저 음성 채널에 입장해주세요.') vc = message.guild.voice_client if vc == None: voice = message.author.voice.channel vc = await voice.connect() else: vc = await vc.move_to(message.author.voice.channel) '' elif command in ['퇴장']: vc = message.guild.voice_client if vc != None: await vc.disconnect() elif command in ['채팅']: history = await message.channel.history(limit=int(msg[1])).flatten() history.reverse() tmp = "" for h in history: tmp += h.author.name + " : " + h.content + "\n" await message.channel.send(tmp)
def saveProfile(uid, rankid): if mysql.select('quicks', 'count(*) as count', 'where uid="'+str(uid)+'"')[0]['count'] > 0: mysql.update('quicks', 'uid='+str(uid), 'rankid='+rankid) else: mysql.insert('quicks', ['uid', 'rankid'], (uid, rankid)) return True
def class_hot_class(self): actual = Article().article_hot_article(self.class_url) expect = select(sqls.article_hot_article) equal('class_hot_class', expect, actual)
def find_by(cls, where, *args): ''' Find by where clause and return list ''' L = mysql.select('select * from `%s` %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
def find_all(cls, *args): ''' Find all and return list. ''' L = mysql.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in L]
def news_hot_news(self): actual = Article().article_hot_article(self.news_url) expect = select(sqls.article_hot_article) equal('news_hot_news', expect, actual)